Fixes bug 1456 by returning default empty list in case no vdu-profile is present
[osm/NBI.git] / osm_nbi / descriptor_topics.py
index fd3e3f4..d6b2884 100644 (file)
 # -*- coding: utf-8 -*-
 
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 import tarfile
 import yaml
 import json
+import importlib
+import copy
 # import logging
 from hashlib import md5
 from osm_common.dbbase import DbException, deep_update_rfc7396
 from http import HTTPStatus
-from validation import ValidationError, pdu_new_schema, pdu_edit_schema
-from base_topic import BaseTopic, EngineException, get_iterable
-from osm_im.vnfd import vnfd as vnfd_im
-from osm_im.nsd import nsd as nsd_im
+from time import time
+from uuid import uuid4
+from re import fullmatch
+from osm_nbi.validation import ValidationError, pdu_new_schema, pdu_edit_schema, \
+    validate_input, vnfpkgop_new_schema
+from osm_nbi.base_topic import BaseTopic, EngineException, get_iterable
+etsi_nfv_vnfd = importlib.import_module("osm_im.etsi-nfv-vnfd")
+etsi_nfv_nsd = importlib.import_module("osm_im.etsi-nfv-nsd")
+from osm_im.nst import nst as nst_im
 from pyangbind.lib.serialise import pybindJSONDecoder
 import pyangbind.lib.pybindJSON as pybindJSON
+from osm_nbi import utils
 
 __author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
 
 
 class DescriptorTopic(BaseTopic):
 
-    def __init__(self, db, fs, msg):
-        BaseTopic.__init__(self, db, fs, msg)
-
-    def check_conflict_on_edit(self, session, final_content, edit_content, _id, force=False):
+    def __init__(self, db, fs, msg, auth):
+        BaseTopic.__init__(self, db, fs, msg, auth)
+
+    def check_conflict_on_edit(self, session, final_content, edit_content, _id):
+        final_content = super().check_conflict_on_edit(session, final_content, edit_content, _id)
+
+        def _check_unique_id_name(descriptor, position=""):
+            for desc_key, desc_item in descriptor.items():
+                if isinstance(desc_item, list) and desc_item:
+                    used_ids = []
+                    desc_item_id = None
+                    for index, list_item in enumerate(desc_item):
+                        if isinstance(list_item, dict):
+                            _check_unique_id_name(list_item, "{}.{}[{}]"
+                                                  .format(position, desc_key, index))
+                            # Base case
+                            if index == 0 and (list_item.get("id") or list_item.get("name")):
+                                desc_item_id = "id" if list_item.get("id") else "name"
+                            if desc_item_id and list_item.get(desc_item_id):
+                                if list_item[desc_item_id] in used_ids:
+                                    position = "{}.{}[{}]".format(position, desc_key, index)
+                                    raise EngineException("Error: identifier {} '{}' is not unique and repeats at '{}'"
+                                                          .format(desc_item_id, list_item[desc_item_id],
+                                                                  position), HTTPStatus.UNPROCESSABLE_ENTITY)
+                                used_ids.append(list_item[desc_item_id])
+
+        _check_unique_id_name(final_content)
         # 1. validate again with pyangbind
         # 1.1. remove internal keys
         internal_keys = {}
         for k in ("_id", "_admin"):
             if k in final_content:
                 internal_keys[k] = final_content.pop(k)
-        serialized = self._validate_input_new(final_content, force)
+        storage_params = internal_keys["_admin"].get("storage")
+        serialized = self._validate_input_new(final_content, storage_params, session["force"])
+
         # 1.2. modify final_content with a serialized version
-        final_content.clear()
-        final_content.update(serialized)
+        final_content = copy.deepcopy(serialized)
         # 1.3. restore internal keys
         for k, v in internal_keys.items():
             final_content[k] = v
+        if session["force"]:
+            return final_content
 
         # 2. check that this id is not present
         if "id" in edit_content:
-            _filter = self._get_project_filter(session, write=False, show_all=False)
+            _filter = self._get_project_filter(session)
+
             _filter["id"] = final_content["id"]
             _filter["_id.neq"] = _id
+
             if self.db.get_one(self.topic, _filter, fail_on_empty=False):
                 raise EngineException("{} with id '{}' already exists for this project".format(self.topic[:-1],
                                                                                                final_content["id"]),
                                       HTTPStatus.CONFLICT)
 
+        return final_content
+
     @staticmethod
     def format_on_new(content, project_id=None, make_public=False):
         BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
         content["_admin"]["onboardingState"] = "CREATED"
         content["_admin"]["operationalState"] = "DISABLED"
-        content["_admin"]["usageSate"] = "NOT_IN_USE"
+        content["_admin"]["usageState"] = "NOT_IN_USE"
 
-    def delete(self, session, _id, force=False, dry_run=False):
+    def delete_extra(self, session, _id, db_content, not_send_msg=None):
         """
-        Delete item by its internal _id
-        :param session: contains the used login username, working project, and admin rights
+        Deletes file system storage associated with the descriptor
+        :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
         :param _id: server internal id
-        :param force: indicates if deletion must be forced in case of conflict
-        :param dry_run: make checking but do not delete
-        :return: dictionary with deleted item _id. It raises EngineException on error: not found, conflict, ...
+        :param db_content: The database content of the descriptor
+        :param not_send_msg: To not send message (False) or store content (list) instead
+        :return: None if ok or raises EngineException with the problem
         """
-        # TODO add admin to filter, validate rights
-        v = BaseTopic.delete(self, session, _id, force, dry_run=True)
-        if dry_run:
-            return
-        v = self.db.del_one(self.topic, {"_id": _id})
         self.fs.file_delete(_id, ignore_non_exist=True)
-        self._send_msg("delete", {"_id": _id})
-        return v
+        self.fs.file_delete(_id + "_", ignore_non_exist=True)  # remove temp folder
 
     @staticmethod
     def get_one_by_id(db, session, topic, id):
         # find owned by this project
-        _filter = BaseTopic._get_project_filter(session, write=False, show_all=False)
+        _filter = BaseTopic._get_project_filter(session)
         _filter["id"] = id
         desc_list = db.get_list(topic, _filter)
         if len(desc_list) == 1:
@@ -85,7 +130,7 @@ class DescriptorTopic(BaseTopic):
                               HTTPStatus.CONFLICT)
 
         # not found any: try to find public
-        _filter = BaseTopic._get_project_filter(session, write=False, show_all=True)
+        _filter = BaseTopic._get_project_filter(session)
         _filter["id"] = id
         desc_list = db.get_list(topic, _filter)
         if not desc_list:
@@ -96,51 +141,50 @@ class DescriptorTopic(BaseTopic):
             raise DbException("Found more than one public {} with id='{}'; and no one belonging to this project".format(
                 topic[:-1], id), HTTPStatus.CONFLICT)
 
-    def new(self, rollback, session, indata=None, kwargs=None, headers=None, force=False, make_public=False):
+    def new(self, rollback, session, indata=None, kwargs=None, headers=None):
         """
         Creates a new almost empty DISABLED  entry into database. Due to SOL005, it does not follow normal procedure.
         Creating a VNFD or NSD is done in two steps: 1. Creates an empty descriptor (this step) and 2) upload content
         (self.upload_content)
         :param rollback: list to append created items at database in case a rollback may to be done
-        :param session: contains the used login username and working project
+        :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
         :param indata: data to be inserted
         :param kwargs: used to override the indata descriptor
         :param headers: http request headers
-        :param force: If True avoid some dependence checks
-        :param make_public: Make the created descriptor public to all projects
-        :return: _id: identity of the inserted data.
+        :return: _id, None: identity of the inserted data; and None as there is not any operation
         """
 
-        try:
-            # _remove_envelop
-            if indata:
-                if "userDefinedData" in indata:
-                    indata = indata['userDefinedData']
-
-            # Override descriptor with query string kwargs
-            self._update_input_with_kwargs(indata, kwargs)
-            # uncomment when this method is implemented.
-            # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors
-            # indata = DescriptorTopic._validate_input_new(self, indata, force=force)
-
-            content = {"_admin": {"userDefinedData": indata}}
-            self.format_on_new(content, session["project_id"], make_public=make_public)
-            _id = self.db.create(self.topic, content)
-            rollback.append({"topic": self.topic, "_id": _id})
-            return _id
-        except ValidationError as e:
-            raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
-
-    def upload_content(self, session, _id, indata, kwargs, headers, force=False):
+        # No needed to capture exceptions
+        # Check Quota
+        self.check_quota(session)
+
+        # _remove_envelop
+        if indata:
+            if "userDefinedData" in indata:
+                indata = indata['userDefinedData']
+
+        # Override descriptor with query string kwargs
+        self._update_input_with_kwargs(indata, kwargs)
+        # uncomment when this method is implemented.
+        # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors
+        # indata = DescriptorTopic._validate_input_new(self, indata, project_id=session["force"])
+
+        content = {"_admin": {"userDefinedData": indata}}
+        self.format_on_new(content, session["project_id"], make_public=session["public"])
+        _id = self.db.create(self.topic, content)
+        rollback.append({"topic": self.topic, "_id": _id})
+        self._send_msg("created", {"_id": _id})
+        return _id, None
+
+    def upload_content(self, session, _id, indata, kwargs, headers):
         """
         Used for receiving content by chunks (with a transaction_id header and/or gzip file. It will store and extract)
-        :param session: session
+        :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
         :param _id : the nsd,vnfd is already created, this is the id
         :param indata: http body request
         :param kwargs: user query string to override parameters. NOT USED
         :param headers:  http request headers
-        :param force: to be more tolerant with validation
-        :return: True package has is completely uploaded or False if partial content has been uplodaed.
+        :return: True if package is completely uploaded or False if partial content has been uploded
             Raise exception on error
         """
         # Check that _id exists and it is valid
@@ -169,18 +213,19 @@ class DescriptorTopic(BaseTopic):
                 total = int(content_range[3])
             else:
                 start = 0
+            temp_folder = _id + "_"  # all the content is upload here and if ok, it is rename from id_ to is folder
 
             if start:
-                if not self.fs.file_exists(_id, 'dir'):
+                if not self.fs.file_exists(temp_folder, 'dir'):
                     raise EngineException("invalid Transaction-Id header", HTTPStatus.NOT_FOUND)
             else:
-                self.fs.file_delete(_id, ignore_non_exist=True)
-                self.fs.mkdir(_id)
+                self.fs.file_delete(temp_folder, ignore_non_exist=True)
+                self.fs.mkdir(temp_folder)
 
             storage = self.fs.get_params()
             storage["folder"] = _id
 
-            file_path = (_id, filename)
+            file_path = (temp_folder, filename)
             if self.fs.file_exists(file_path, 'file'):
                 file_size = self.fs.file_size(file_path)
             else:
@@ -201,9 +246,9 @@ class DescriptorTopic(BaseTopic):
                         break
                     file_pkg.write(indata_text)
             if content_range_text:
-                if indata_len != end-start:
+                if indata_len != end - start:
                     raise EngineException("Mismatch between Content-Range header {}-{} and body length of {}".format(
-                        start, end-1, indata_len), HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE)
+                        start, end - 1, indata_len), HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE)
                 if end != total:
                     # TODO update to UPLOADING
                     return False
@@ -240,8 +285,8 @@ class DescriptorTopic(BaseTopic):
                     raise EngineException("Not found any descriptor file at package descriptor tar.gz")
                 storage["descriptor"] = descriptor_file_name
                 storage["zipfile"] = filename
-                self.fs.file_extract(tar, _id)
-                with self.fs.file_open((_id, descriptor_file_name), "r") as descriptor_file:
+                self.fs.file_extract(tar, temp_folder)
+                with self.fs.file_open((temp_folder, descriptor_file_name), "r") as descriptor_file:
                     content = descriptor_file.read()
             else:
                 content = file_pkg.read()
@@ -252,7 +297,7 @@ class DescriptorTopic(BaseTopic):
                 indata = json.load(content)
             else:
                 error_text = "Invalid yaml format "
-                indata = yaml.load(content)
+                indata = yaml.load(content, Loader=yaml.SafeLoader)
 
             current_desc["_admin"]["storage"] = storage
             current_desc["_admin"]["onboardingState"] = "ONBOARDED"
@@ -263,15 +308,15 @@ class DescriptorTopic(BaseTopic):
             # Override descriptor with query string kwargs
             if kwargs:
                 self._update_input_with_kwargs(indata, kwargs)
-            # it will call overrides method at VnfdTopic or NsdTopic
-            indata = self._validate_input_new(indata, force=force)
 
             deep_update_rfc7396(current_desc, indata)
-            self.check_conflict_on_edit(session, current_desc, indata, _id=_id, force=force)
+            current_desc = self.check_conflict_on_edit(session, current_desc, indata, _id=_id)
+            current_desc["_admin"]["modified"] = time()
             self.db.replace(self.topic, _id, current_desc)
+            self.fs.dir_rename(temp_folder, _id)
 
             indata["_id"] = _id
-            self._send_msg("created", indata)
+            self._send_msg("edited", indata)
 
             # TODO if descriptor has changed because kwargs update content and remove cached zip
             # TODO if zip is not present creates one
@@ -297,7 +342,7 @@ class DescriptorTopic(BaseTopic):
     def get_file(self, session, _id, path=None, accept_header=None):
         """
         Return the file content of a vnfd or nsd
-        :param session: contains the used login username and working project
+        :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
         :param _id: Identity of the vnfd, nsd
         :param path: artifact path or "$DESCRIPTOR" or None
         :param accept_header: Content of Accept header. Must contain applition/zip or/and text/plain
@@ -322,7 +367,7 @@ class DescriptorTopic(BaseTopic):
                                   "onboardingState is {}".format(content["_admin"]["onboardingState"]),
                                   http_code=HTTPStatus.CONFLICT)
         storage = content["_admin"]["storage"]
-        if path is not None and path != "$DESCRIPTOR":   # artifacts
+        if path is not None and path != "$DESCRIPTOR":  # artifacts
             if not storage.get('pkg-dir'):
                 raise EngineException("Packages does not contains artifacts", http_code=HTTPStatus.BAD_REQUEST)
             if self.fs.file_exists((storage['folder'], storage['pkg-dir'], *path), 'dir'):
@@ -330,7 +375,7 @@ class DescriptorTopic(BaseTopic):
                 return folder_content, "text/plain"
                 # TODO manage folders in http
             else:
-                return self.fs.file_open((storage['folder'], storage['pkg-dir'], *path), "rb"),\
+                return self.fs.file_open((storage['folder'], storage['pkg-dir'], *path), "rb"), \
                     "application/octet-stream"
 
         # pkgtype   accept  ZIP  TEXT    -> result
@@ -338,10 +383,15 @@ class DescriptorTopic(BaseTopic):
         #                   no   yes     -> error
         # onefile           yes  no      -> zip
         #                   X    yes     -> text
-
-        if accept_text and (not storage.get('pkg-dir') or path == "$DESCRIPTOR"):
+        contain_many_files = False
+        if storage.get('pkg-dir'):
+            # check if there are more than one file in the package, ignoring checksums.txt.
+            pkg_files = self.fs.dir_ls((storage['folder'], storage['pkg-dir']))
+            if len(pkg_files) >= 3 or (len(pkg_files) == 2 and 'checksums.txt' not in pkg_files):
+                contain_many_files = True
+        if accept_text and (not contain_many_files or path == "$DESCRIPTOR"):
             return self.fs.file_open((storage['folder'], storage['descriptor']), "r"), "text/plain"
-        elif storage.get('pkg-dir') and not accept_zip:
+        elif contain_many_files and not accept_zip:
             raise EngineException("Packages that contains several files need to be retrieved with 'application/zip'"
                                   "Accept header", http_code=HTTPStatus.NOT_ACCEPTABLE)
         else:
@@ -351,232 +401,452 @@ class DescriptorTopic(BaseTopic):
                                       "future versions", http_code=HTTPStatus.NOT_ACCEPTABLE)
             return self.fs.file_open((storage['folder'], storage['zipfile']), "rb"), accept_zip
 
+    def _remove_yang_prefixes_from_descriptor(self, descriptor):
+        new_descriptor = {}
+        for k, v in descriptor.items():
+            new_v = v
+            if isinstance(v, dict):
+                new_v = self._remove_yang_prefixes_from_descriptor(v)
+            elif isinstance(v, list):
+                new_v = list()
+                for x in v:
+                    if isinstance(x, dict):
+                        new_v.append(self._remove_yang_prefixes_from_descriptor(x))
+                    else:
+                        new_v.append(x)
+            new_descriptor[k.split(':')[-1]] = new_v
+        return new_descriptor
+
     def pyangbind_validation(self, item, data, force=False):
-        try:
-            if item == "vnfds":
-                myvnfd = vnfd_im()
-                pybindJSONDecoder.load_ietf_json({'vnfd:vnfd-catalog': {'vnfd': [data]}}, None, None, obj=myvnfd,
-                                                 path_helper=True, skip_unknown=force)
-                out = pybindJSON.dumps(myvnfd, mode="ietf")
-            elif item == "nsds":
-                mynsd = nsd_im()
-                pybindJSONDecoder.load_ietf_json({'nsd:nsd-catalog': {'nsd': [data]}}, None, None, obj=mynsd,
-                                                 path_helper=True, skip_unknown=force)
-                out = pybindJSON.dumps(mynsd, mode="ietf")
+        raise EngineException("Not possible to validate '{}' item".format(item),
+                              http_code=HTTPStatus.INTERNAL_SERVER_ERROR)
+
+    def _validate_input_edit(self, indata, content, force=False):
+        # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
+        if "_id" in indata:
+            indata.pop("_id")
+        if "_admin" not in indata:
+            indata["_admin"] = {}
+
+        if "operationalState" in indata:
+            if indata["operationalState"] in ("ENABLED", "DISABLED"):
+                indata["_admin"]["operationalState"] = indata.pop("operationalState")
+            else:
+                raise EngineException("State '{}' is not a valid operational state"
+                                      .format(indata["operationalState"]),
+                                      http_code=HTTPStatus.BAD_REQUEST)
+
+        # In the case of user defined data, we need to put the data in the root of the object 
+        # to preserve current expected behaviour
+        if "userDefinedData" in indata:
+            data = indata.pop("userDefinedData")
+            if type(data) == dict:
+                indata["_admin"]["userDefinedData"] = data
             else:
-                raise EngineException("Not possible to validate '{}' item".format(item),
-                                      http_code=HTTPStatus.INTERNAL_SERVER_ERROR)
+                raise EngineException("userDefinedData should be an object, but is '{}' instead"
+                                      .format(type(data)),
+                                      http_code=HTTPStatus.BAD_REQUEST)
 
-            desc_out = self._remove_envelop(yaml.safe_load(out))
-            return desc_out
+        if ("operationalState" in indata["_admin"] and
+                content["_admin"]["operationalState"] == indata["_admin"]["operationalState"]):
+            raise EngineException("operationalState already {}".format(content["_admin"]["operationalState"]),
+                                  http_code=HTTPStatus.CONFLICT)
 
-        except Exception as e:
-            raise EngineException("Error in pyangbind validation: {}".format(str(e)),
-                                  http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+        return indata
 
 
 class VnfdTopic(DescriptorTopic):
     topic = "vnfds"
     topic_msg = "vnfd"
 
-    def __init__(self, db, fs, msg):
-        DescriptorTopic.__init__(self, db, fs, msg)
+    def __init__(self, db, fs, msg, auth):
+        DescriptorTopic.__init__(self, db, fs, msg, auth)
+
+    def pyangbind_validation(self, item, data, force=False):
+        if self._descriptor_data_is_in_old_format(data):
+            raise EngineException("ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
+                                  http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+        try:
+            myvnfd = etsi_nfv_vnfd.etsi_nfv_vnfd()
+            pybindJSONDecoder.load_ietf_json({'etsi-nfv-vnfd:vnfd': data}, None, None, obj=myvnfd,
+                                             path_helper=True, skip_unknown=force)
+            out = pybindJSON.dumps(myvnfd, mode="ietf")
+            desc_out = self._remove_envelop(yaml.safe_load(out))
+            desc_out = self._remove_yang_prefixes_from_descriptor(desc_out)
+            return utils.deep_update_dict(data, desc_out)
+        except Exception as e:
+            raise EngineException("Error in pyangbind validation: {}".format(str(e)),
+                                  http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+
+    @staticmethod
+    def _descriptor_data_is_in_old_format(data):
+        return ('vnfd-catalog' in data) or ('vnfd:vnfd-catalog' in data)
 
     @staticmethod
     def _remove_envelop(indata=None):
         if not indata:
             return {}
         clean_indata = indata
-        if clean_indata.get('vnfd:vnfd-catalog'):
-            clean_indata = clean_indata['vnfd:vnfd-catalog']
-        elif clean_indata.get('vnfd-catalog'):
-            clean_indata = clean_indata['vnfd-catalog']
-        if clean_indata.get('vnfd'):
-            if not isinstance(clean_indata['vnfd'], list) or len(clean_indata['vnfd']) != 1:
-                raise EngineException("'vnfd' must be a list of only one element")
-            clean_indata = clean_indata['vnfd'][0]
-        elif clean_indata.get('vnfd:vnfd'):
-            if not isinstance(clean_indata['vnfd:vnfd'], list) or len(clean_indata['vnfd:vnfd']) != 1:
-                raise EngineException("'vnfd:vnfd' must be a list of only one element")
-            clean_indata = clean_indata['vnfd:vnfd'][0]
+
+        if clean_indata.get('etsi-nfv-vnfd:vnfd'):
+            if not isinstance(clean_indata['etsi-nfv-vnfd:vnfd'], dict):
+                raise EngineException("'etsi-nfv-vnfd:vnfd' must be a dict")
+            clean_indata = clean_indata['etsi-nfv-vnfd:vnfd']
+        elif clean_indata.get('vnfd'):
+            if not isinstance(clean_indata['vnfd'], dict):
+                raise EngineException("'vnfd' must be dict")
+            clean_indata = clean_indata['vnfd']
+
         return clean_indata
 
-    def check_conflict_on_del(self, session, _id, force=False):
+    def check_conflict_on_edit(self, session, final_content, edit_content, _id):
+        final_content = super().check_conflict_on_edit(session, final_content, edit_content, _id)
+
+        # set type of vnfd
+        contains_pdu = False
+        contains_vdu = False
+        for vdu in get_iterable(final_content.get("vdu")):
+            if vdu.get("pdu-type"):
+                contains_pdu = True
+            else:
+                contains_vdu = True
+        if contains_pdu:
+            final_content["_admin"]["type"] = "hnfd" if contains_vdu else "pnfd"
+        elif contains_vdu:
+            final_content["_admin"]["type"] = "vnfd"
+        # if neither vud nor pdu do not fill type
+        return final_content
+
+    def check_conflict_on_del(self, session, _id, db_content):
         """
         Check that there is not any NSD that uses this VNFD. Only NSDs belonging to this project are considered. Note
         that VNFD can be public and be used by NSD of other projects. Also check there are not deployments, or vnfr
         that uses this vnfd
-        :param session:
-        :param _id: vnfd inernal id
-        :param force: Avoid this checking
+        :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
+        :param _id: vnfd internal id
+        :param db_content: The database content of the _id.
         :return: None or raises EngineException with the conflict
         """
-        if force:
+        if session["force"]:
             return
-        descriptor = self.db.get_one("vnfds", {"_id": _id})
+        descriptor = db_content
         descriptor_id = descriptor.get("id")
         if not descriptor_id:  # empty vnfd not uploaded
             return
 
-        _filter = self._get_project_filter(session, write=False, show_all=False)
+        _filter = self._get_project_filter(session)
+
         # check vnfrs using this vnfd
         _filter["vnfd-id"] = _id
         if self.db.get_list("vnfrs", _filter):
-            raise EngineException("There is some VNFR that depends on this VNFD", http_code=HTTPStatus.CONFLICT)
+            raise EngineException("There is at least one VNF using this descriptor", http_code=HTTPStatus.CONFLICT)
+
+        # check NSD referencing this VNFD
         del _filter["vnfd-id"]
-        # check NSD using this VNFD
         _filter["constituent-vnfd.ANYINDEX.vnfd-id-ref"] = descriptor_id
         if self.db.get_list("nsds", _filter):
-            raise EngineException("There is soame NSD that depends on this VNFD", http_code=HTTPStatus.CONFLICT)
+            raise EngineException("There is at least one NSD referencing this descriptor",
+                                  http_code=HTTPStatus.CONFLICT)
+
+    def _validate_input_new(self, indata, storage_params, force=False):
+        indata.pop("onboardingState", None)
+        indata.pop("operationalState", None)
+        indata.pop("usageState", None)
+        indata.pop("links", None)
 
-    def _validate_input_new(self, indata, force=False):
         indata = self.pyangbind_validation("vnfds", indata, force)
         # Cross references validation in the descriptor
-        if indata.get("vdu"):
-            if not indata.get("mgmt-interface"):
-                raise EngineException("'mgmt-interface' is a mandatory field and it is not defined",
+
+        self.validate_mgmt_interface_connection_point(indata)
+
+        for vdu in get_iterable(indata.get("vdu")):
+            self.validate_vdu_internal_connection_points(vdu)
+            self._validate_vdu_cloud_init_in_package(storage_params, vdu, indata)
+        self._validate_vdu_charms_in_package(storage_params, indata)
+
+        self._validate_vnf_charms_in_package(storage_params, indata)
+
+        self.validate_external_connection_points(indata)
+        self.validate_internal_virtual_links(indata)
+        self.validate_monitoring_params(indata)
+        self.validate_scaling_group_descriptor(indata)
+
+        return indata
+
+    @staticmethod
+    def validate_mgmt_interface_connection_point(indata):
+        if not indata.get("vdu"):
+            return
+        if not indata.get("mgmt-cp"):
+            raise EngineException("'mgmt-cp' is a mandatory field and it is not defined",
+                                  http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+
+        for cp in get_iterable(indata.get("ext-cpd")):
+            if cp["id"] == indata["mgmt-cp"]:
+                break
+        else:
+            raise EngineException("mgmt-cp='{}' must match an existing ext-cpd".format(indata["mgmt-cp"]),
+                                  http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+
+    @staticmethod
+    def validate_vdu_internal_connection_points(vdu):
+        int_cpds = set()
+        for cpd in get_iterable(vdu.get("int-cpd")):
+            cpd_id = cpd.get("id")
+            if cpd_id and cpd_id in int_cpds:
+                raise EngineException("vdu[id='{}']:int-cpd[id='{}'] is already used by other int-cpd"
+                                      .format(vdu["id"], cpd_id),
                                       http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
-            if indata["mgmt-interface"].get("cp"):
-                for cp in get_iterable(indata.get("connection-point")):
-                    if cp["name"] == indata["mgmt-interface"]["cp"]:
-                        break
-                else:
-                    raise EngineException("mgmt-interface:cp='{}' must match an existing connection-point"
-                                          .format(indata["mgmt-interface"]["cp"]),
-                                          http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+            int_cpds.add(cpd_id)
 
+    @staticmethod
+    def validate_external_connection_points(indata):
+        all_vdus_int_cpds = set()
         for vdu in get_iterable(indata.get("vdu")):
-            for interface in get_iterable(vdu.get("interface")):
-                if interface.get("external-connection-point-ref"):
-                    for cp in get_iterable(indata.get("connection-point")):
-                        if cp["name"] == interface["external-connection-point-ref"]:
-                            break
-                    else:
-                        raise EngineException("vdu[id='{}']:interface[name='{}']:external-connection-point-ref='{}' "
-                                              "must match an existing connection-point"
-                                              .format(vdu["id"], interface["name"],
-                                                      interface["external-connection-point-ref"]),
-                                              http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+            for int_cpd in get_iterable(vdu.get("int-cpd")):
+                all_vdus_int_cpds.add((vdu.get("id"), int_cpd.get("id")))
+
+        ext_cpds = set()
+        for cpd in get_iterable(indata.get("ext-cpd")):
+            cpd_id = cpd.get("id")
+            if cpd_id and cpd_id in ext_cpds:
+                raise EngineException("ext-cpd[id='{}'] is already used by other ext-cpd".format(cpd_id),
+                                      http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+            ext_cpds.add(cpd_id)
 
-                elif interface.get("internal-connection-point-ref"):
-                    for internal_cp in get_iterable(vdu.get("internal-connection-point")):
-                        if interface["internal-connection-point-ref"] == internal_cp.get("id"):
-                            break
-                    else:
-                        raise EngineException("vdu[id='{}']:interface[name='{}']:internal-connection-point-ref='{}' "
-                                              "must match an existing vdu:internal-connection-point"
-                                              .format(vdu["id"], interface["name"],
-                                                      interface["internal-connection-point-ref"]),
-                                              http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
-        for ivld in get_iterable(indata.get("internal-vld")):
-            for icp in get_iterable(ivld.get("internal-connection-point")):
-                icp_mark = False
-                for vdu in get_iterable(indata.get("vdu")):
-                    for internal_cp in get_iterable(vdu.get("internal-connection-point")):
-                        if icp["id-ref"] == internal_cp["id"]:
-                            icp_mark = True
-                            break
-                    if icp_mark:
-                        break
-                else:
-                    raise EngineException("internal-vld[id='{}']:internal-connection-point='{}' must match an existing "
-                                          "vdu:internal-connection-point".format(ivld["id"], icp["id-ref"]),
+            int_cpd = cpd.get("int-cpd")
+            if int_cpd:
+                if (int_cpd.get("vdu-id"), int_cpd.get("cpd")) not in all_vdus_int_cpds:
+                    raise EngineException("ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd".format(cpd_id),
                                           http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
-            if ivld.get("ip-profile-ref"):
-                for ip_prof in get_iterable(indata.get("ip-profiles")):
-                    if ip_prof["name"] == get_iterable(ivld.get("ip-profile-ref")):
-                        break
-                else:
-                    raise EngineException("internal-vld[id='{}']:ip-profile-ref='{}' does not exist".format(
-                        ivld["id"], ivld["ip-profile-ref"]),
+            # TODO: Validate k8s-cluster-net points to a valid k8s-cluster:nets ?
+
+    def _validate_vdu_charms_in_package(self, storage_params, indata):
+        for df in indata["df"]:
+            if "lcm-operations-configuration" in df and "operate-vnf-op-config" in df["lcm-operations-configuration"]:
+                configs = df["lcm-operations-configuration"]["operate-vnf-op-config"].get("day1-2", [])
+                vdus = df.get("vdu-profile", [])
+                for vdu in vdus:
+                    for config in configs:
+                        if config["id"] == vdu["id"] and utils.find_in_list(
+                            config.get("execution-environment-list", []),
+                            lambda ee: "juju" in ee
+                        ):
+                            if not self._validate_package_folders(storage_params, 'charms'):
+                                raise EngineException("Charm defined in vnf[id={}] but not present in "
+                                                      "package".format(indata["id"]))
+
+    def _validate_vdu_cloud_init_in_package(self, storage_params, vdu, indata):
+        if not vdu.get("cloud-init-file"):
+            return
+        if not self._validate_package_folders(storage_params, 'cloud_init', vdu["cloud-init-file"]):
+            raise EngineException("Cloud-init defined in vnf[id={}]:vdu[id={}] but not present in "
+                                  "package".format(indata["id"], vdu["id"]))
+
+    def _validate_vnf_charms_in_package(self, storage_params, indata):
+        # Get VNF configuration through new container
+        for deployment_flavor in indata.get('df', []):
+            if "lcm-operations-configuration" not in deployment_flavor:
+                return
+            if "operate-vnf-op-config" not in deployment_flavor["lcm-operations-configuration"]:
+                return
+            for day_1_2_config in deployment_flavor["lcm-operations-configuration"]["operate-vnf-op-config"]["day1-2"]:
+                if day_1_2_config["id"] == indata["id"]:
+                    if utils.find_in_list(
+                        day_1_2_config.get("execution-environment-list", []),
+                        lambda ee: "juju" in ee
+                    ):
+                        if not self._validate_package_folders(storage_params, 'charms'):
+                            raise EngineException("Charm defined in vnf[id={}] but not present in "
+                                                  "package".format(indata["id"]))
+
+    def _validate_package_folders(self, storage_params, folder, file=None):
+        if not storage_params or not storage_params.get("pkg-dir"):
+            return False
+        else:
+            if self.fs.file_exists("{}_".format(storage_params["folder"]), 'dir'):
+                f = "{}_/{}/{}".format(storage_params["folder"], storage_params["pkg-dir"], folder)
+            else:
+                f = "{}/{}/{}".format(storage_params["folder"], storage_params["pkg-dir"], folder)
+            if file:
+                return self.fs.file_exists("{}/{}".format(f, file), 'file')
+            else:
+                if self.fs.file_exists(f, 'dir'):
+                    if self.fs.dir_ls(f):
+                        return True
+            return False
+
+    @staticmethod
+    def validate_internal_virtual_links(indata):
+        all_ivld_ids = set()
+        for ivld in get_iterable(indata.get("int-virtual-link-desc")):
+            ivld_id = ivld.get("id")
+            if ivld_id and ivld_id in all_ivld_ids:
+                raise EngineException("Duplicated VLD id in int-virtual-link-desc[id={}]".format(ivld_id),
+                                      http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+            else:
+                all_ivld_ids.add(ivld_id)
+
+        for vdu in get_iterable(indata.get("vdu")):
+            for int_cpd in get_iterable(vdu.get("int-cpd")):
+                int_cpd_ivld_id = int_cpd.get("int-virtual-link-desc")
+                if int_cpd_ivld_id and int_cpd_ivld_id not in all_ivld_ids:
+                    raise EngineException(
+                        "vdu[id='{}']:int-cpd[id='{}']:int-virtual-link-desc='{}' must match an existing "
+                        "int-virtual-link-desc".format(vdu["id"], int_cpd["id"], int_cpd_ivld_id),
                         http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
-        for mp in get_iterable(indata.get("monitoring-param")):
-            if mp.get("vdu-monitoring-param"):
-                mp_vmp_mark = False
-                for vdu in get_iterable(indata.get("vdu")):
-                    for vmp in get_iterable(vdu.get("monitoring-param")):
-                        if vmp["id"] == mp["vdu-monitoring-param"].get("vdu-monitoring-param-ref") and vdu["id"] ==\
-                                mp["vdu-monitoring-param"]["vdu-ref"]:
-                            mp_vmp_mark = True
-                            break
-                    if mp_vmp_mark:
-                        break
-                else:
-                    raise EngineException("monitoring-param:vdu-monitoring-param:vdu-monitoring-param-ref='{}' not "
-                                          "defined at vdu[id='{}'] or vdu does not exist"
-                                          .format(mp["vdu-monitoring-param"]["vdu-monitoring-param-ref"],
-                                                  mp["vdu-monitoring-param"]["vdu-ref"]),
+
+        for df in get_iterable(indata.get("df")):
+            for vlp in get_iterable(df.get("virtual-link-profile")):
+                vlp_ivld_id = vlp.get("id")
+                if vlp_ivld_id and vlp_ivld_id not in all_ivld_ids:
+                    raise EngineException("df[id='{}']:virtual-link-profile='{}' must match an existing "
+                                          "int-virtual-link-desc".format(df["id"], vlp_ivld_id),
                                           http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
-            elif mp.get("vdu-metric"):
-                mp_vm_mark = False
-                for vdu in get_iterable(indata.get("vdu")):
-                    if vdu.get("vdu-configuration"):
-                        for metric in get_iterable(vdu["vdu-configuration"].get("metrics")):
-                            if metric["name"] == mp["vdu-metric"]["vdu-metric-name-ref"] and vdu["id"] == \
-                                    mp["vdu-metric"]["vdu-ref"]:
-                                mp_vm_mark = True
-                                break
-                        if mp_vm_mark:
-                            break
-                else:
-                    raise EngineException("monitoring-param:vdu-metric:vdu-metric-name-ref='{}' not defined at "
-                                          "vdu[id='{}'] or vdu does not exist"
-                                          .format(mp["vdu-metric"]["vdu-metric-name-ref"],
-                                                  mp["vdu-metric"]["vdu-ref"]),
+
+    @staticmethod
+    def validate_monitoring_params(indata):
+        all_monitoring_params = set()
+        for ivld in get_iterable(indata.get("int-virtual-link-desc")):
+            for mp in get_iterable(ivld.get("monitoring-parameters")):
+                mp_id = mp.get("id")
+                if mp_id and mp_id in all_monitoring_params:
+                    raise EngineException("Duplicated monitoring-parameter id in "
+                                          "int-virtual-link-desc[id='{}']:monitoring-parameters[id='{}']"
+                                          .format(ivld["id"], mp_id),
                                           http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+                else:
+                    all_monitoring_params.add(mp_id)
 
-        for sgd in get_iterable(indata.get("scaling-group-descriptor")):
-            for sp in get_iterable(sgd.get("scaling-policy")):
-                for sc in get_iterable(sp.get("scaling-criteria")):
-                    for mp in get_iterable(indata.get("monitoring-param")):
-                        if mp["id"] == get_iterable(sc.get("vnf-monitoring-param-ref")):
-                            break
-                    else:
-                        raise EngineException("scaling-group-descriptor[name='{}']:scaling-criteria[name='{}']:"
-                                              "vnf-monitoring-param-ref='{}' not defined in any monitoring-param"
-                                              .format(sgd["name"], sc["name"], sc["vnf-monitoring-param-ref"]),
-                                              http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
-            for sgd_vdu in get_iterable(sgd.get("vdu")):
-                sgd_vdu_mark = False
-                for vdu in get_iterable(indata.get("vdu")):
-                    if vdu["id"] == sgd_vdu["vdu-id-ref"]:
-                        sgd_vdu_mark = True
-                        break
-                if sgd_vdu_mark:
-                    break
-            else:
-                raise EngineException("scaling-group-descriptor[name='{}']:vdu-id-ref={} does not match any vdu"
-                                      .format(sgd["name"], sgd_vdu["vdu-id-ref"]),
-                                      http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
-            for sca in get_iterable(sgd.get("scaling-config-action")):
-                if not indata.get("vnf-configuration"):
-                    raise EngineException("'vnf-configuration' not defined in the descriptor but it is referenced by "
-                                          "scaling-group-descriptor[name='{}']:scaling-config-action"
-                                          .format(sgd["name"]),
+        for vdu in get_iterable(indata.get("vdu")):
+            for mp in get_iterable(vdu.get("monitoring-parameter")):
+                mp_id = mp.get("id")
+                if mp_id and mp_id in all_monitoring_params:
+                    raise EngineException("Duplicated monitoring-parameter id in "
+                                          "vdu[id='{}']:monitoring-parameter[id='{}']"
+                                          .format(vdu["id"], mp_id),
                                           http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
-                for primitive in get_iterable(indata["vnf-configuration"].get("config-primitive")):
-                    if primitive["name"] == sca["vnf-config-primitive-name-ref"]:
-                        break
                 else:
-                    raise EngineException("scaling-group-descriptor[name='{}']:scaling-config-action:vnf-config-"
-                                          "primitive-name-ref='{}' does not match any "
-                                          "vnf-configuration:config-primitive:name"
-                                          .format(sgd["name"], sca["vnf-config-primitive-name-ref"]),
+                    all_monitoring_params.add(mp_id)
+
+        for df in get_iterable(indata.get("df")):
+            for mp in get_iterable(df.get("monitoring-parameter")):
+                mp_id = mp.get("id")
+                if mp_id and mp_id in all_monitoring_params:
+                    raise EngineException("Duplicated monitoring-parameter id in "
+                                          "df[id='{}']:monitoring-parameter[id='{}']"
+                                          .format(df["id"], mp_id),
                                           http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
-        # TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none
-        return indata
+                else:
+                    all_monitoring_params.add(mp_id)
 
-    def _validate_input_edit(self, indata, force=False):
-        # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
-        return indata
+    @staticmethod
+    def validate_scaling_group_descriptor(indata):
+        all_monitoring_params = set()
+        for ivld in get_iterable(indata.get("int-virtual-link-desc")):
+            for mp in get_iterable(ivld.get("monitoring-parameters")):
+                all_monitoring_params.add(mp.get("id"))
+
+        for vdu in get_iterable(indata.get("vdu")):
+            for mp in get_iterable(vdu.get("monitoring-parameter")):
+                all_monitoring_params.add(mp.get("id"))
+
+        for df in get_iterable(indata.get("df")):
+            for mp in get_iterable(df.get("monitoring-parameter")):
+                all_monitoring_params.add(mp.get("id"))
+
+        for df in get_iterable(indata.get("df")):
+            for sa in get_iterable(df.get("scaling-aspect")):
+                for sp in get_iterable(sa.get("scaling-policy")):
+                    for sc in get_iterable(sp.get("scaling-criteria")):
+                        sc_monitoring_param = sc.get("vnf-monitoring-param-ref")
+                        if sc_monitoring_param and sc_monitoring_param not in all_monitoring_params:
+                            raise EngineException("df[id='{}']:scaling-aspect[id='{}']:scaling-policy"
+                                                  "[name='{}']:scaling-criteria[name='{}']: "
+                                                  "vnf-monitoring-param-ref='{}' not defined in any monitoring-param"
+                                                  .format(df["id"], sa["id"], sp["name"], sc["name"],
+                                                          sc_monitoring_param),
+                                                  http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+
+                for sca in get_iterable(sa.get("scaling-config-action")):
+                    if "lcm-operations-configuration" not in df \
+                        or "operate-vnf-op-config" not in df["lcm-operations-configuration"] \
+                        or not utils.find_in_list(
+                            df["lcm-operations-configuration"]["operate-vnf-op-config"].get("day1-2", []),
+                            lambda config: config["id"] == indata["id"]):
+                        raise EngineException("'day1-2 configuration' not defined in the descriptor but it is "
+                                              "referenced by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action"
+                                              .format(df["id"], sa["id"]),
+                                              http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+                    for configuration in get_iterable(
+                        df["lcm-operations-configuration"]["operate-vnf-op-config"].get("day1-2", [])
+                    ):
+                        for primitive in get_iterable(configuration.get("config-primitive")):
+                            if primitive["name"] == sca["vnf-config-primitive-name-ref"]:
+                                break
+                        else:
+                            raise EngineException("df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-"
+                                                  "config-primitive-name-ref='{}' does not match any "
+                                                  "day1-2 configuration:config-primitive:name"
+                                                  .format(df["id"], sa["id"], sca["vnf-config-primitive-name-ref"]),
+                                                  http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+
+    def delete_extra(self, session, _id, db_content, not_send_msg=None):
+        """
+        Deletes associate file system storage (via super)
+        Deletes associated vnfpkgops from database.
+        :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
+        :param _id: server internal id
+        :param db_content: The database content of the descriptor
+        :return: None
+        :raises: FsException in case of error while deleting associated storage
+        """
+        super().delete_extra(session, _id, db_content, not_send_msg)
+        self.db.del_list("vnfpkgops", {"vnfPkgId": _id})
+
+    def sol005_projection(self, data):
+        data["onboardingState"] = data["_admin"]["onboardingState"]
+        data["operationalState"] = data["_admin"]["operationalState"]
+        data["usageState"] = data["_admin"]["usageState"]
+
+        links = {}
+        links["self"] = {"href": "/vnfpkgm/v1/vnf_packages/{}".format(data["_id"])}
+        links["vnfd"] = {"href": "/vnfpkgm/v1/vnf_packages/{}/vnfd".format(data["_id"])}
+        links["packageContent"] = {"href": "/vnfpkgm/v1/vnf_packages/{}/package_content".format(data["_id"])}
+        data["_links"] = links
+
+        return super().sol005_projection(data)
 
 
 class NsdTopic(DescriptorTopic):
     topic = "nsds"
     topic_msg = "nsd"
 
-    def __init__(self, db, fs, msg):
-        DescriptorTopic.__init__(self, db, fs, msg)
+    def __init__(self, db, fs, msg, auth):
+        DescriptorTopic.__init__(self, db, fs, msg, auth)
+
+    def pyangbind_validation(self, item, data, force=False):
+        if self._descriptor_data_is_in_old_format(data):
+            raise EngineException("ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
+                                  http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+        try:
+            nsd_vnf_profiles = data.get('df', [{}])[0].get('vnf-profile', [])
+            mynsd = etsi_nfv_nsd.etsi_nfv_nsd()
+            pybindJSONDecoder.load_ietf_json({'nsd': {'nsd': [data]}}, None, None, obj=mynsd,
+                                             path_helper=True, skip_unknown=force)
+            out = pybindJSON.dumps(mynsd, mode="ietf")
+            desc_out = self._remove_envelop(yaml.safe_load(out))
+            desc_out = self._remove_yang_prefixes_from_descriptor(desc_out)
+            if nsd_vnf_profiles:
+                desc_out['df'][0]['vnf-profile'] = nsd_vnf_profiles
+            return desc_out
+        except Exception as e:
+            raise EngineException("Error in pyangbind validation: {}".format(str(e)),
+                                  http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+
+    @staticmethod
+    def _descriptor_data_is_in_old_format(data):
+        return ('nsd-catalog' in data) or ('nsd:nsd-catalog' in data)
 
     @staticmethod
     def _remove_envelop(indata=None):
@@ -584,89 +854,414 @@ class NsdTopic(DescriptorTopic):
             return {}
         clean_indata = indata
 
-        if clean_indata.get('nsd:nsd-catalog'):
-            clean_indata = clean_indata['nsd:nsd-catalog']
-        elif clean_indata.get('nsd-catalog'):
-            clean_indata = clean_indata['nsd-catalog']
+        if clean_indata.get('nsd'):
+            clean_indata = clean_indata['nsd']
+        elif clean_indata.get('etsi-nfv-nsd:nsd'):
+            clean_indata = clean_indata['etsi-nfv-nsd:nsd']
         if clean_indata.get('nsd'):
             if not isinstance(clean_indata['nsd'], list) or len(clean_indata['nsd']) != 1:
                 raise EngineException("'nsd' must be a list of only one element")
             clean_indata = clean_indata['nsd'][0]
-        elif clean_indata.get('nsd:nsd'):
-            if not isinstance(clean_indata['nsd:nsd'], list) or len(clean_indata['nsd:nsd']) != 1:
-                raise EngineException("'nsd:nsd' must be a list of only one element")
-            clean_indata = clean_indata['nsd:nsd'][0]
         return clean_indata
 
-    def _validate_input_new(self, indata, force=False):
+    def _validate_input_new(self, indata, storage_params, force=False):
+        indata.pop("nsdOnboardingState", None)
+        indata.pop("nsdOperationalState", None)
+        indata.pop("nsdUsageState", None)
+
+        indata.pop("links", None)
+
         indata = self.pyangbind_validation("nsds", indata, force)
+        # Cross references validation in the descriptor
         # TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none
+        for vld in get_iterable(indata.get("virtual-link-desc")):
+            self.validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata)
+
+        self.validate_vnf_profiles_vnfd_id(indata)
+
         return indata
 
-    def _validate_input_edit(self, indata, force=False):
+    @staticmethod
+    def validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata):
+        if not vld.get("mgmt-network"):
+            return
+        vld_id = vld.get("id")
+        for df in get_iterable(indata.get("df")):
+            for vlp in get_iterable(df.get("virtual-link-profile")):
+                if vld_id and vld_id == vlp.get("virtual-link-desc-id"):
+                    if vlp.get("virtual-link-protocol-data"):
+                        raise EngineException("Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-"
+                                              "protocol-data You cannot set a virtual-link-protocol-data "
+                                              "when mgmt-network is True"
+                                              .format(df["id"], vlp["id"]), http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+
+    @staticmethod
+    def validate_vnf_profiles_vnfd_id(indata):
+        all_vnfd_ids = set(get_iterable(indata.get("vnfd-id")))
+        for df in get_iterable(indata.get("df")):
+            for vnf_profile in get_iterable(df.get("vnf-profile")):
+                vnfd_id = vnf_profile.get("vnfd-id")
+                if vnfd_id and vnfd_id not in all_vnfd_ids:
+                    raise EngineException("Error at df[id='{}']:vnf_profile[id='{}']:vnfd-id='{}' "
+                                          "does not match any vnfd-id".format(df["id"], vnf_profile["id"], vnfd_id),
+                                          http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+
+    def _validate_input_edit(self, indata, content, force=False):
         # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
+        """
+        indata looks as follows:
+            - In the new case (conformant) 
+                {'nsdOperationalState': 'DISABLED', 'userDefinedData': {'id': 'string23', 
+                '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}}
+            - In the old case (backwards-compatible)
+                {'id': 'string23', '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}
+        """
+        if "_admin" not in indata:
+            indata["_admin"] = {}
+
+        if "nsdOperationalState" in indata:
+            if indata["nsdOperationalState"] in ("ENABLED", "DISABLED"):
+                indata["_admin"]["operationalState"] = indata.pop("nsdOperationalState")
+            else:
+                raise EngineException("State '{}' is not a valid operational state"
+                                      .format(indata["nsdOperationalState"]),
+                                      http_code=HTTPStatus.BAD_REQUEST)
+
+        # In the case of user defined data, we need to put the data in the root of the object 
+        # to preserve current expected behaviour
+        if "userDefinedData" in indata:
+            data = indata.pop("userDefinedData")
+            if type(data) == dict:
+                indata["_admin"]["userDefinedData"] = data
+            else:
+                raise EngineException("userDefinedData should be an object, but is '{}' instead"
+                                      .format(type(data)),
+                                      http_code=HTTPStatus.BAD_REQUEST)
+        if ("operationalState" in indata["_admin"] and
+                content["_admin"]["operationalState"] == indata["_admin"]["operationalState"]):
+            raise EngineException("nsdOperationalState already {}".format(content["_admin"]["operationalState"]),
+                                  http_code=HTTPStatus.CONFLICT)
         return indata
 
     def _check_descriptor_dependencies(self, session, descriptor):
         """
-        Check that the dependent descriptors exist on a new descriptor or edition
-        :param session: client session information
+        Check that the dependent descriptors exist on a new descriptor or edition. Also checks references to vnfd
+        connection points are ok
+        :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
         :param descriptor: descriptor to be inserted or edit
         :return: None or raises exception
         """
-        if not descriptor.get("constituent-vnfd"):
+        if session["force"]:
             return
-        for vnf in descriptor["constituent-vnfd"]:
-            vnfd_id = vnf["vnfd-id-ref"]
-            filter_q = self._get_project_filter(session, write=False, show_all=True)
-            filter_q["id"] = vnfd_id
-            if not self.db.get_list("vnfds", filter_q):
-                raise EngineException("Descriptor error at 'constituent-vnfd':'vnfd-id-ref'='{}' references a non "
-                                      "existing vnfd".format(vnfd_id), http_code=HTTPStatus.CONFLICT)
+        vnfds_index = self._get_descriptor_constituent_vnfds_index(session, descriptor)
+
+        # Cross references validation in the descriptor and vnfd connection point validation
+        for df in get_iterable(descriptor.get("df")):
+            self.validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index)
+
+    def _get_descriptor_constituent_vnfds_index(self, session, descriptor):
+        vnfds_index = {}
+        if descriptor.get("vnfd-id") and not session["force"]:
+            for vnfd_id in get_iterable(descriptor.get("vnfd-id")):
+                query_filter = self._get_project_filter(session)
+                query_filter["id"] = vnfd_id
+                vnf_list = self.db.get_list("vnfds", query_filter)
+                if not vnf_list:
+                    raise EngineException("Descriptor error at 'vnfd-id'='{}' references a non "
+                                          "existing vnfd".format(vnfd_id), http_code=HTTPStatus.CONFLICT)
+                vnfds_index[vnfd_id] = vnf_list[0]
+        return vnfds_index
 
-    def check_conflict_on_edit(self, session, final_content, edit_content, _id, force=False):
-        super().check_conflict_on_edit(session, final_content, edit_content, _id, force=force)
+    @staticmethod
+    def validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index):
+        for vnf_profile in get_iterable(df.get("vnf-profile")):
+            vnfd = vnfds_index.get(vnf_profile["vnfd-id"])
+            all_vnfd_ext_cpds = set()
+            for ext_cpd in get_iterable(vnfd.get("ext-cpd")):
+                if ext_cpd.get('id'):
+                    all_vnfd_ext_cpds.add(ext_cpd.get('id'))
+
+            for virtual_link in get_iterable(vnf_profile.get("virtual-link-connectivity")):
+                for vl_cpd in get_iterable(virtual_link.get("constituent-cpd-id")):
+                    vl_cpd_id = vl_cpd.get('constituent-cpd-id')
+                    if vl_cpd_id and vl_cpd_id not in all_vnfd_ext_cpds:
+                        raise EngineException("Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity"
+                                              "[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a "
+                                              "non existing ext-cpd:id inside vnfd '{}'"
+                                              .format(df["id"], vnf_profile["id"],
+                                                      virtual_link["virtual-link-profile-id"], vl_cpd_id, vnfd["id"]),
+                                              http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+
+    def check_conflict_on_edit(self, session, final_content, edit_content, _id):
+        final_content = super().check_conflict_on_edit(session, final_content, edit_content, _id)
+
+        self._check_descriptor_dependencies(session, final_content)
 
-        if not force:
-            self._check_descriptor_dependencies(session, final_content)
+        return final_content
 
-    def check_conflict_on_del(self, session, _id, force=False):
+    def check_conflict_on_del(self, session, _id, db_content):
         """
         Check that there is not any NSR that uses this NSD. Only NSRs belonging to this project are considered. Note
         that NSD can be public and be used by other projects.
-        :param session:
-        :param _id: vnfd inernal id
-        :param force: Avoid this checking
+        :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
+        :param _id: nsd internal id
+        :param db_content: The database content of the _id
         :return: None or raises EngineException with the conflict
         """
-        if force:
+        if session["force"]:
             return
-        _filter = self._get_project_filter(session, write=False, show_all=False)
-        _filter["nsdId"] = _id
+        descriptor = db_content
+        descriptor_id = descriptor.get("id")
+        if not descriptor_id:  # empty nsd not uploaded
+            return
+
+        # check NSD used by NS
+        _filter = self._get_project_filter(session)
+        _filter["nsd-id"] = _id
         if self.db.get_list("nsrs", _filter):
-            raise EngineException("There is some NSR that depends on this NSD", http_code=HTTPStatus.CONFLICT)
+            raise EngineException("There is at least one NS using this descriptor", http_code=HTTPStatus.CONFLICT)
+
+        # check NSD referenced by NST
+        del _filter["nsd-id"]
+        _filter["netslice-subnet.ANYINDEX.nsd-ref"] = descriptor_id
+        if self.db.get_list("nsts", _filter):
+            raise EngineException("There is at least one NetSlice Template referencing this descriptor",
+                                  http_code=HTTPStatus.CONFLICT)
+
+    def sol005_projection(self, data):
+        data["nsdOnboardingState"] = data["_admin"]["onboardingState"]
+        data["nsdOperationalState"] = data["_admin"]["operationalState"]
+        data["nsdUsageState"] = data["_admin"]["usageState"]
+
+        links = {}
+        links["self"] = {"href": "/nsd/v1/ns_descriptors/{}".format(data["_id"])}
+        links["nsd_content"] = {"href": "/nsd/v1/ns_descriptors/{}/nsd_content".format(data["_id"])}
+        data["_links"] = links
+
+        return super().sol005_projection(data)
+
+
+class NstTopic(DescriptorTopic):
+    topic = "nsts"
+    topic_msg = "nst"
+    quota_name = "slice_templates"
+
+    def __init__(self, db, fs, msg, auth):
+        DescriptorTopic.__init__(self, db, fs, msg, auth)
+
+    def pyangbind_validation(self, item, data, force=False):
+        try:
+            mynst = nst_im()
+            pybindJSONDecoder.load_ietf_json({'nst': [data]}, None, None, obj=mynst,
+                                             path_helper=True, skip_unknown=force)
+            out = pybindJSON.dumps(mynst, mode="ietf")
+            desc_out = self._remove_envelop(yaml.safe_load(out))
+            return desc_out
+        except Exception as e:
+            raise EngineException("Error in pyangbind validation: {}".format(str(e)),
+                                  http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+
+    @staticmethod
+    def _remove_envelop(indata=None):
+        if not indata:
+            return {}
+        clean_indata = indata
+
+        if clean_indata.get('nst'):
+            if not isinstance(clean_indata['nst'], list) or len(clean_indata['nst']) != 1:
+                raise EngineException("'nst' must be a list only one element")
+            clean_indata = clean_indata['nst'][0]
+        elif clean_indata.get('nst:nst'):
+            if not isinstance(clean_indata['nst:nst'], list) or len(clean_indata['nst:nst']) != 1:
+                raise EngineException("'nst:nst' must be a list only one element")
+            clean_indata = clean_indata['nst:nst'][0]
+        return clean_indata
+
+    def _validate_input_new(self, indata, storage_params, force=False):
+        indata.pop("onboardingState", None)
+        indata.pop("operationalState", None)
+        indata.pop("usageState", None)
+        indata = self.pyangbind_validation("nsts", indata, force)
+        return indata.copy()
+
+    def _check_descriptor_dependencies(self, session, descriptor):
+        """
+        Check that the dependent descriptors exist on a new descriptor or edition
+        :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
+        :param descriptor: descriptor to be inserted or edit
+        :return: None or raises exception
+        """
+        if not descriptor.get("netslice-subnet"):
+            return
+        for nsd in descriptor["netslice-subnet"]:
+            nsd_id = nsd["nsd-ref"]
+            filter_q = self._get_project_filter(session)
+            filter_q["id"] = nsd_id
+            if not self.db.get_list("nsds", filter_q):
+                raise EngineException("Descriptor error at 'netslice-subnet':'nsd-ref'='{}' references a non "
+                                      "existing nsd".format(nsd_id), http_code=HTTPStatus.CONFLICT)
+
+    def check_conflict_on_edit(self, session, final_content, edit_content, _id):
+        final_content = super().check_conflict_on_edit(session, final_content, edit_content, _id)
+
+        self._check_descriptor_dependencies(session, final_content)
+        return final_content
+
+    def check_conflict_on_del(self, session, _id, db_content):
+        """
+        Check that there is not any NSIR that uses this NST. Only NSIRs belonging to this project are considered. Note
+        that NST can be public and be used by other projects.
+        :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
+        :param _id: nst internal id
+        :param db_content: The database content of the _id.
+        :return: None or raises EngineException with the conflict
+        """
+        # TODO: Check this method
+        if session["force"]:
+            return
+        # Get Network Slice Template from Database
+        _filter = self._get_project_filter(session)
+        _filter["_admin.nst-id"] = _id
+        if self.db.get_list("nsis", _filter):
+            raise EngineException("there is at least one Netslice Instance using this descriptor",
+                                  http_code=HTTPStatus.CONFLICT)
+
+    def sol005_projection(self, data):
+        data["onboardingState"] = data["_admin"]["onboardingState"]
+        data["operationalState"] = data["_admin"]["operationalState"]
+        data["usageState"] = data["_admin"]["usageState"]
+
+        links = {}
+        links["self"] = {"href": "/nst/v1/netslice_templates/{}".format(data["_id"])}
+        links["nst"] = {"href": "/nst/v1/netslice_templates/{}/nst".format(data["_id"])}
+        data["_links"] = links
+
+        return super().sol005_projection(data)
 
 
 class PduTopic(BaseTopic):
     topic = "pdus"
     topic_msg = "pdu"
+    quota_name = "pduds"
     schema_new = pdu_new_schema
     schema_edit = pdu_edit_schema
 
-    def __init__(self, db, fs, msg):
-        BaseTopic.__init__(self, db, fs, msg)
+    def __init__(self, db, fs, msg, auth):
+        BaseTopic.__init__(self, db, fs, msg, auth)
 
     @staticmethod
     def format_on_new(content, project_id=None, make_public=False):
-        BaseTopic.format_on_new(content, project_id=None, make_public=make_public)
+        BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
         content["_admin"]["onboardingState"] = "CREATED"
-        content["_admin"]["operationalState"] = "DISABLED"
-        content["_admin"]["usageSate"] = "NOT_IN_USE"
+        content["_admin"]["operationalState"] = "ENABLED"
+        content["_admin"]["usageState"] = "NOT_IN_USE"
 
-    def check_conflict_on_del(self, session, _id, force=False):
-        if force:
+    def check_conflict_on_del(self, session, _id, db_content):
+        """
+        Check that there is not any vnfr that uses this PDU
+        :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
+        :param _id: pdu internal id
+        :param db_content: The database content of the _id.
+        :return: None or raises EngineException with the conflict
+        """
+        if session["force"]:
             return
-        # TODO Is it needed to check descriptors _admin.project_read/project_write??
-        _filter = {"vdur.pdu-id": _id}
+
+        _filter = self._get_project_filter(session)
+        _filter["vdur.pdu-id"] = _id
         if self.db.get_list("vnfrs", _filter):
-            raise EngineException("There is some NSR that uses this PDU", http_code=HTTPStatus.CONFLICT)
+            raise EngineException("There is at least one VNF using this PDU", http_code=HTTPStatus.CONFLICT)
+
+
+class VnfPkgOpTopic(BaseTopic):
+    topic = "vnfpkgops"
+    topic_msg = "vnfd"
+    schema_new = vnfpkgop_new_schema
+    schema_edit = None
+
+    def __init__(self, db, fs, msg, auth):
+        BaseTopic.__init__(self, db, fs, msg, auth)
+
+    def edit(self, session, _id, indata=None, kwargs=None, content=None):
+        raise EngineException("Method 'edit' not allowed for topic '{}'".format(self.topic),
+                              HTTPStatus.METHOD_NOT_ALLOWED)
+
+    def delete(self, session, _id, dry_run=False):
+        raise EngineException("Method 'delete' not allowed for topic '{}'".format(self.topic),
+                              HTTPStatus.METHOD_NOT_ALLOWED)
+
+    def delete_list(self, session, filter_q=None):
+        raise EngineException("Method 'delete_list' not allowed for topic '{}'".format(self.topic),
+                              HTTPStatus.METHOD_NOT_ALLOWED)
+
+    def new(self, rollback, session, indata=None, kwargs=None, headers=None):
+        """
+        Creates a new entry into database.
+        :param rollback: list to append created items at database in case a rollback may to be done
+        :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
+        :param indata: data to be inserted
+        :param kwargs: used to override the indata descriptor
+        :param headers: http request headers
+        :return: _id, op_id:
+            _id: identity of the inserted data.
+             op_id: None
+        """
+        self._update_input_with_kwargs(indata, kwargs)
+        validate_input(indata, self.schema_new)
+        vnfpkg_id = indata["vnfPkgId"]
+        filter_q = BaseTopic._get_project_filter(session)
+        filter_q["_id"] = vnfpkg_id
+        vnfd = self.db.get_one("vnfds", filter_q)
+        operation = indata["lcmOperationType"]
+        kdu_name = indata["kdu_name"]
+        for kdu in vnfd.get("kdu", []):
+            if kdu["name"] == kdu_name:
+                helm_chart = kdu.get("helm-chart")
+                juju_bundle = kdu.get("juju-bundle")
+                break
+        else:
+            raise EngineException("Not found vnfd[id='{}']:kdu[name='{}']".format(vnfpkg_id, kdu_name))
+        if helm_chart:
+            indata["helm-chart"] = helm_chart
+            match = fullmatch(r"([^/]*)/([^/]*)", helm_chart)
+            repo_name = match.group(1) if match else None
+        elif juju_bundle:
+            indata["juju-bundle"] = juju_bundle
+            match = fullmatch(r"([^/]*)/([^/]*)", juju_bundle)
+            repo_name = match.group(1) if match else None
+        else:
+            raise EngineException("Found neither 'helm-chart' nor 'juju-bundle' in vnfd[id='{}']:kdu[name='{}']"
+                                  .format(vnfpkg_id, kdu_name))
+        if repo_name:
+            del filter_q["_id"]
+            filter_q["name"] = repo_name
+            repo = self.db.get_one("k8srepos", filter_q)
+            k8srepo_id = repo.get("_id")
+            k8srepo_url = repo.get("url")
+        else:
+            k8srepo_id = None
+            k8srepo_url = None
+        indata["k8srepoId"] = k8srepo_id
+        indata["k8srepo_url"] = k8srepo_url
+        vnfpkgop_id = str(uuid4())
+        vnfpkgop_desc = {
+            "_id": vnfpkgop_id,
+            "operationState": "PROCESSING",
+            "vnfPkgId": vnfpkg_id,
+            "lcmOperationType": operation,
+            "isAutomaticInvocation": False,
+            "isCancelPending": False,
+            "operationParams": indata,
+            "links": {
+                "self": "/osm/vnfpkgm/v1/vnfpkg_op_occs/" + vnfpkgop_id,
+                "vnfpkg": "/osm/vnfpkgm/v1/vnf_packages/" + vnfpkg_id,
+            }
+        }
+        self.format_on_new(vnfpkgop_desc, session["project_id"], make_public=session["public"])
+        ctime = vnfpkgop_desc["_admin"]["created"]
+        vnfpkgop_desc["statusEnteredTime"] = ctime
+        vnfpkgop_desc["startTime"] = ctime
+        self.db.create(self.topic, vnfpkgop_desc)
+        rollback.append({"topic": self.topic, "_id": vnfpkgop_id})
+        self.msg.write(self.topic_msg, operation, vnfpkgop_desc)
+        return vnfpkgop_id, None