[RIFT 16087] Backend changes to decouple storage semantics from user interface. Chang... 93/1693/1 pm_phase2
authorsinhan <nandan.sinha@riftio.com>
Wed, 26 Apr 2017 03:13:29 +0000 (03:13 +0000)
committersinhan <nandan.sinha@riftio.com>
Wed, 26 Apr 2017 03:13:29 +0000 (03:13 +0000)
Signed-off-by: sinhan <nandan.sinha@riftio.com>
rwlaunchpad/plugins/rwlaunchpadtasklet/rift/package/archive.py
rwlaunchpad/plugins/rwlaunchpadtasklet/rift/package/package.py
rwlaunchpad/plugins/rwlaunchpadtasklet/rift/tasklets/rwlaunchpad/export.py
rwlaunchpad/plugins/rwlaunchpadtasklet/rift/tasklets/rwlaunchpad/onboard.py
rwlaunchpad/plugins/rwlaunchpadtasklet/rift/tasklets/rwlaunchpad/uploader.py
rwlaunchpad/plugins/rwpkgmgr/rift/tasklets/rwpkgmgr/downloader/copy.py
rwlaunchpad/plugins/rwpkgmgr/rift/tasklets/rwpkgmgr/downloader/url.py
rwlaunchpad/plugins/rwpkgmgr/rift/tasklets/rwpkgmgr/proxy/filesystem.py
rwlaunchpad/plugins/rwpkgmgr/rift/tasklets/rwpkgmgr/rpc.py
rwlaunchpad/plugins/rwpkgmgr/rift/tasklets/rwpkgmgr/subscriber/download_status.py
rwlaunchpad/plugins/yang/rw-pkg-mgmt.yang

index fffce99..245f69c 100644 (file)
@@ -53,13 +53,14 @@ class TarPackageArchive(object):
         self.load_archive()
 
     @classmethod
-    def from_package(cls, log, pkg, tar_file_hdl):
+    def from_package(cls, log, pkg, tar_file_hdl, top_level_dir=None):
         """ Creates a TarPackageArchive from a existing Package
 
         Arguments:
             log - logger
             pkg - a DescriptorPackage instance
             tar_file_hdl - a writeable file handle to write tar archive data
+            top_level_dir - (opt.) top level dir under which the archive will be extracted
 
         Returns:
             A TarPackageArchive instance
@@ -73,8 +74,10 @@ class TarPackageArchive(object):
             tar_info.gname = "rift"
 
         archive = TarPackageArchive(log, tar_file_hdl, mode='w:gz')
+
         for pkg_file in pkg.files:
-            tar_info = tarfile.TarInfo(name=pkg_file)
+            filename = "%s/%s" % (top_level_dir, pkg_file) if top_level_dir else pkg_file
+            tar_info = tarfile.TarInfo(name=filename)
             tar_info.type = tarfile.REGTYPE
             tar_info.mode = pkg.get_file_mode(pkg_file)
             set_common_tarinfo_fields(tar_info)
@@ -83,7 +86,8 @@ class TarPackageArchive(object):
                 archive.tarfile.addfile(tar_info, pkg_file_hdl)
 
         for pkg_dir in pkg.dirs:
-            tar_info = tarfile.TarInfo(name=pkg_dir)
+            dirname = "%s/%s" % (top_level_dir, pkg_dir) if top_level_dir else pkg_dir
+            tar_info = tarfile.TarInfo(name=dirname)
             tar_info.type = tarfile.DIRTYPE
             tar_info.mode = 0o775
             set_common_tarinfo_fields(tar_info)
index dc31b68..b416281 100644 (file)
@@ -171,6 +171,16 @@ class DescriptorPackage(object):
 
         return self.descriptor_msg.name
 
+    @property
+    def descriptor_version(self):
+        desc_msg = self.descriptor_msg
+        return desc_msg.version if desc_msg.has_field("version") else ''
+
+    @property
+    def descriptor_vendor(self):
+        desc_msg = self.descriptor_msg
+        return desc_msg.vendor if desc_msg.has_field("vendor") else ''
+
     @classmethod
     def get_descriptor_patterns(cls):
         """ Returns a tuple of descriptor regex and Package Types  """
@@ -354,6 +364,7 @@ class DescriptorPackage(object):
             # Copy the contents of the file to the correct path
             # For folder creation (or nested folders), dest_file appears w/ trailing "/" like: dir1/ or dir1/dir2/
             # For regular file upload, dest_file appears as dir1/abc.txt
+
             dest_dir_path = os.path.dirname(dest_file)
             if not os.path.isdir(dest_dir_path):
                 os.makedirs(dest_dir_path)
@@ -482,7 +493,7 @@ class DescriptorPackage(object):
             raise PackageError("Empty file name added")
 
         if rel_path not in self._package_file_mode_map:
-            raise PackageError("File %s does not in package" % rel_path)
+            raise PackageError("File %s does not exist in package" % rel_path)
 
         del self._package_file_mode_map[rel_path]
 
@@ -530,6 +541,32 @@ class VnfdPackage(DescriptorPackage):
     def serializer(self):
         return VnfdPackage.SERIALIZER
 
+class PackageConstructValidator(object): 
+
+    def __init__(self, log):
+        self._log = log
+
+    def validate(self, package):
+        """ Validate presence of descriptor file (.yaml) at the top level in the 
+        package folder structure. 
+
+        Arguments: 
+            package - The Descriptor Package being validated. 
+        Returns: 
+            None
+        Raises:
+            PackageValidationError - The package validation failed for some
+              generic reason.
+        """
+        pass
+        desc_file = package.descriptor_file
+        prefix, desc_file = package.prefix.rstrip('/'), desc_file.rstrip('/')
+
+        if os.path.dirname(desc_file) != prefix: 
+            msg = "Descriptor file {} not found in expcted location {}".format(desc_file, prefix)
+            self._log.error(msg)
+            raise PackageValidationError(msg)
+
 
 class PackageChecksumValidator(object):
     """  This class uses the checksums.txt file in the package
@@ -540,6 +577,7 @@ class PackageChecksumValidator(object):
 
     def __init__(self, log):
         self._log = log
+        self.validated_file_checksums = {}
 
     @classmethod
     def get_package_checksum_file(cls, package):
@@ -549,6 +587,10 @@ class PackageChecksumValidator(object):
 
         return checksum_file
 
+    @property
+    def checksums(self):
+        return self.validated_file_checksums
+
     def validate(self, package):
         """ Validate file checksums match that in the checksums.txt
 
@@ -564,7 +606,6 @@ class PackageChecksumValidator(object):
             PackageFileChecksumError - A file within the package did not match the
               checksum within checksums.txt
         """
-        validated_file_checksums = {}
 
         try:
             checksum_file = PackageChecksumValidator.get_package_checksum_file(package)
@@ -600,9 +641,7 @@ class PackageChecksumValidator(object):
                 self._log.error(msg)
                 raise PackageFileChecksumError(pkg_file)
 
-            validated_file_checksums[pkg_file] = file_checksum
-
-        return validated_file_checksums
+            self.validated_file_checksums[pkg_file] = file_checksum
 
 
 class TarPackageArchive(object):
index ff6a373..e404852 100644 (file)
@@ -21,6 +21,8 @@ import os.path
 import stat
 import time
 import uuid
+import collections
+import json
 
 import tornado.web
 
@@ -82,12 +84,12 @@ class DescriptorPackageArchiveExporter(object):
     def __init__(self, log):
         self._log = log
 
-    def _create_archive_from_package(self, archive_hdl, package, open_fn):
+    def _create_archive_from_package(self, archive_hdl, package, open_fn, top_level_dir=None):
         orig_open = package.open
         try:
             package.open = open_fn
             archive = rift.package.archive.TarPackageArchive.from_package(
-                    self._log, package, archive_hdl
+                    self._log, package, archive_hdl, top_level_dir
                     )
             return archive
         finally:
@@ -154,7 +156,7 @@ class DescriptorPackageArchiveExporter(object):
 
             return open_fn(rel_path)
 
-        archive = self._create_archive_from_package(archive_hdl, package, open_wrapper)
+        archive = self._create_archive_from_package(archive_hdl, package, open_wrapper, new_desc_msg.name)
 
         return archive
 
@@ -195,7 +197,7 @@ class DescriptorPackageArchiveExporter(object):
 
 
 class ExportRpcHandler(mano_dts.AbstractRpcHandler):
-    def __init__(self, log, dts, loop, application, store_map, exporter, catalog_map):
+    def __init__(self, log, dts, loop, application, store_map, exporter, onboarder, catalog_map):
         """
         Args:
             application: UploaderApplication
@@ -208,6 +210,7 @@ class ExportRpcHandler(mano_dts.AbstractRpcHandler):
         self.application = application
         self.store_map = store_map
         self.exporter = exporter
+        self.onboarder = onboarder
         self.catalog_map = catalog_map
         self.log = log
 
@@ -256,15 +259,18 @@ class ExportRpcHandler(mano_dts.AbstractRpcHandler):
         # Get the format for exporting
         format_ = msg.export_format.lower()
 
-        filename = None
+        # Initial value of the exported filename 
+        self.filename = "{name}_{ver}".format(
+                name=desc_msg.name, 
+                ver=desc_msg.version)
 
         if grammar == 'tosca':
-            filename = "{}.zip".format(transaction_id)
             self.export_tosca(schema, format_, desc_type, desc_id, desc_msg, log, transaction_id)
+            filename = "{}.zip".format(self.filename)
             log.message(message.FilenameMessage(filename))
         else:
-            filename = "{}.tar.gz".format(transaction_id)
             self.export_rift(schema, format_, desc_type, desc_id, desc_msg, log, transaction_id)
+            filename = "{}.tar.gz".format(self.filename)
             log.message(message.FilenameMessage(filename))
 
         log.message(ExportSuccess())
@@ -279,8 +285,8 @@ class ExportRpcHandler(mano_dts.AbstractRpcHandler):
                     "nsd": convert.RwNsdSerializer,
                     },
                 "mano": {
-                    "vnfd": convert.VnfdSerializer,
-                    "nsd": convert.NsdSerializer,
+                    "vnfd": convert.RwVnfdSerializer,
+                    "nsd": convert.RwNsdSerializer,
                     }
                 }
 
@@ -314,11 +320,35 @@ class ExportRpcHandler(mano_dts.AbstractRpcHandler):
                     log, hdl
                     )
 
+        # Try to get the updated descriptor from the api endpoint so that we have 
+        # the updated descriptor file in the exported archive and the name of the archive 
+        # tar matches the name in the yaml descriptor file. Proceed with the current 
+        # file if there's an error
+        #
+        json_desc_msg = src_serializer.to_json_string(desc_msg)
+        desc_name, desc_version = desc_msg.name, desc_msg.version
+        try: 
+            d = collections.defaultdict(dict)
+            sub_dict = self.onboarder.get_updated_descriptor(desc_msg)
+            root_key, sub_key = "{0}:{0}-catalog".format(desc_type), "{0}:{0}".format(desc_type)
+            # root the dict under "vnfd:vnfd-catalog" 
+            d[root_key] = sub_dict
+            
+            json_desc_msg = json.dumps(d)
+            desc_name, desc_version = sub_dict[sub_key]['name'], sub_dict[sub_key]['version']
+
+        except Exception as e:
+            msg = "Exception {} raised - {}".format(e.__class__.__name__, str(e)) 
+            self.log.debug(msg)
+
+        # exported filename based on the updated descriptor name
+        self.filename = "{}_{}".format(desc_name, desc_version)
+
         self.exporter.export_package(
                 package=package,
                 export_dir=self.application.export_dir,
-                file_id=transaction_id,
-                json_desc_str=src_serializer.to_json_string(desc_msg),
+                file_id = self.filename,
+                json_desc_str=json_desc_msg,
                 dest_serializer=dest_serializer,
                 )
 
index b12e192..636880f 100644 (file)
@@ -162,3 +162,57 @@ class DescriptorOnboarder(object):
             self._log.error(msg)
             raise OnboardError(msg) from e
 
+    def get_updated_descriptor(self, descriptor_msg, auth=None): 
+        """ Get updated descriptor file 
+
+        Arguments:
+            descriptor_msg - A descriptor proto-gi msg
+            auth - the authorization header
+
+        Raises:
+            OnboardError - The descriptor retrieval failed
+        """
+
+        if type(descriptor_msg) not in DescriptorOnboarder.DESC_SERIALIZER_MAP:
+            raise TypeError("Invalid descriptor message type")
+
+        endpoint = DescriptorOnboarder.DESC_ENDPOINT_MAP[type(descriptor_msg)]
+
+        url = "{}://{}:{}/api/config/{}/{}".format(
+                "https" if self._use_ssl else "http",
+                self._host,
+                self.port,
+                endpoint,
+                descriptor_msg.id
+                )
+
+        hdrs = self._get_headers(auth)
+        hdrs.update({'Accept': 'application/json'})
+        request_args = dict(
+            url=url,
+            headers=hdrs,
+            auth=DescriptorOnboarder.AUTH,
+            verify=False,
+            cert=(self._ssl_cert, self._ssl_key) if self._use_ssl else None,
+            timeout=self.timeout,
+        )
+
+        response = None
+        try:
+            response = requests.get(**request_args)
+            response.raise_for_status()
+        except requests.exceptions.ConnectionError as e:
+            msg = "Could not connect to restconf endpoint: %s" % str(e)
+            self._log.error(msg)
+            raise OnboardError(msg) from e
+        except requests.exceptions.HTTPError as e:
+            msg = "GET request to %s error: %s" % (request_args["url"], response.text)
+            self._log.error(msg)
+            raise OnboardError(msg) from e
+        except requests.exceptions.Timeout as e:
+            msg = "Timed out connecting to restconf endpoint: %s", str(e)
+            self._log.error(msg)
+            raise OnboardError(msg) from e
+
+        return response.json()
+
index c908bb3..ed3e683 100644 (file)
@@ -606,16 +606,22 @@ class OnboardPackage(downloader.DownloaderProtocol):
                         OnboardError("Cloud-Init file reference in VNFD does not match with cloud-init file"))
 
     def validate_package(self, package):
-        checksum_validator = rift.package.package.PackageChecksumValidator(self.log)
+        validators = (
+                rift.package.package.PackageChecksumValidator(self.log),
+                rift.package.package.PackageConstructValidator(self.log),
+                )
 
-        try:
-            file_checksums = checksum_validator.validate(package)
-        except rift.package.package.PackageFileChecksumError as e:
-            raise MessageException(OnboardChecksumMismatch(e.filename)) from e
-        except rift.package.package.PackageValidationError as e:
-            raise MessageException(OnboardUnreadablePackage()) from e
+        # Run the validators for checksum and package construction for imported pkgs
+        for validator in validators:
+            try:
+                validator.validate(package)
 
-        return file_checksums
+            except rift.package.package.PackageFileChecksumError as e:
+                raise MessageException(OnboardChecksumMismatch(e.filename)) from e
+            except rift.package.package.PackageValidationError as e:
+                raise MessageException(OnboardUnreadablePackage()) from e
+
+        return validators[0].checksums
 
     def onboard_descriptors(self, package):
         descriptor_msg = package.descriptor_msg
@@ -705,6 +711,7 @@ class UploaderApplication(tornado.web.Application):
                     self,
                     store_map=self.package_store_map,
                     exporter=self.exporter,
+                    onboarder=self.onboarder, 
                     catalog_map=catalog_map
                     )
 
index b1f11ec..c296c91 100644 (file)
@@ -128,16 +128,14 @@ class PackageFileCopier:
         store = self.proxy._get_store(self.package_type)
         src_path = store._get_package_dir(self.src_package_id)
         self.src_package = store.get_package(self.src_package_id) 
-        src_desc_name = self.src_package.descriptor_name
-        src_copy_path = os.path.join(src_path, src_desc_name)
 
-        self.dest_copy_path = os.path.join(store.DEFAULT_ROOT_DIR, 
-                self.dest_package_id
-                self.dest_package_name)
+        self.dest_copy_path = os.path.join(
+                store.DEFAULT_ROOT_DIR
+                self.dest_package_id) 
         self.log.debug("Copying contents from {src} to {dest}".
-                format(src=src_copy_path, dest=self.dest_copy_path))
+                format(src=src_path, dest=self.dest_copy_path))
 
-        shutil.copytree(src_copy_path, self.dest_copy_path)
+        shutil.copytree(src_path, self.dest_copy_path)
 
     def _create_descriptor_file(self):
         """ Update descriptor file for the newly copied descriptor catalog.
index d495620..88155fa 100644 (file)
@@ -38,6 +38,8 @@ class PackageFileDownloader(downloader.UrlDownloader):
             rpc_input.package_id,
             rpc_input.package_path,
             rpc_input.package_type,
+            rpc_input.vnfd_file_type, 
+            rpc_input.nsd_file_type,
             auth=auth,
             proxy=proxy,
             file_obj=file_obj,
@@ -50,6 +52,8 @@ class PackageFileDownloader(downloader.UrlDownloader):
                  package_id,
                  package_path,
                  package_type,
+                 vnfd_file_type, 
+                 nsd_file_type,
                  proxy,
                  file_obj=None,
                  delete_on_fail=True,
@@ -67,6 +71,7 @@ class PackageFileDownloader(downloader.UrlDownloader):
         self.package_id = package_id
         self.package_type = package_type
         self.package_path = package_path
+        self.package_file_type = vnfd_file_type.lower() if vnfd_file_type else nsd_file_type.lower()
         self.proxy = proxy
 
     def convert_to_yang(self):
@@ -106,7 +111,8 @@ class PackageFileDownloader(downloader.UrlDownloader):
                 self.meta.filepath,
                 self.package_type,
                 self.package_id,
-                self.package_path)
+                self.package_path, 
+                self.package_file_type)
 
         except Exception as e:
             self.log.exception(e)
index a303424..6cfc0fa 100644 (file)
@@ -78,10 +78,14 @@ class FileSystemProxy(AbstractPackageManagerProxy):
 
         return self.SCHEMA[package_type]
 
-    def package_file_add(self, new_file, package_type, package_id, package_path):
+    def package_file_add(self, new_file, package_type, package_id, package_path, package_file_type):
         # Get the schema from thr package path
         # the first part will always be the vnfd/nsd name
         mode = 0o664
+
+        # for files other than README, create the package path from the asset type
+        package_path = package_file_type + "/" + package_path \
+            if package_file_type != "readme" else package_path
         components = package_path.split("/")
         if len(components) > 2:
             schema = components[1]
@@ -94,7 +98,7 @@ class FileSystemProxy(AbstractPackageManagerProxy):
 
         # Construct abs path of the destination obj
         path = store._get_package_dir(package_id)
-        dest_file = os.path.join(path, package_path)
+        dest_file = os.path.join(path, package.prefix, package_path)
 
         try:
             package.insert_file(new_file, dest_file, package_path, mode=mode)
@@ -104,11 +108,15 @@ class FileSystemProxy(AbstractPackageManagerProxy):
 
         return True
 
-    def package_file_delete(self, package_type, package_id, package_path):
+    def package_file_delete(self, package_type, package_id, package_path, package_file_type):
         package_type = package_type.lower()
         store = self._get_store(package_type)
         package = store.get_package(package_id)
 
+        # for files other than README, create the package path from the asset type
+        package_path = package_file_type + "/" + package_path \
+            if package_file_type != "readme" else package_path
+
         # package_path has to be relative, so strip off the starting slash if
         # provided incorrectly.
         if package_path[0] == "/":
@@ -116,7 +124,7 @@ class FileSystemProxy(AbstractPackageManagerProxy):
 
         # Construct abs path of the destination obj
         path = store._get_package_dir(package_id)
-        dest_file = os.path.join(path, package_path)
+        dest_file = os.path.join(path, package.prefix, package_path)
 
         try:
             package.delete_file(dest_file, package_path)
index a71f108..dc0b27a 100644 (file)
@@ -192,10 +192,13 @@ class PackageDeleteOperationsRpcHandler(mano_dts.AbstractRpcHandler):
         rpc_op = RPC_PACKAGE_DELETE_ENDPOINT.from_dict({"status": str(True)})
 
         try:
+            package_file_type = msg.vnfd_file_type.lower() \
+                    if msg.vnfd_file_type else msg.nsd_file_type.lower()
             self.proxy.package_file_delete(
                 msg.package_type,
                 msg.package_id,
-                msg.package_path)
+                msg.package_path, 
+                package_file_type)
         except Exception as e:
             self.log.exception(e)
             rpc_op.status = str(False)
index b7bed38..042efa6 100644 (file)
@@ -101,7 +101,7 @@ def actionCreate(descriptor, msg):
         descriptor.log.debug("Skpping folder creation, {} already present".format(download_dir))
         return
     else: 
-        download_dir = os.path.join(download_dir, desc_name) 
+        # Folder structure is based on top-level package-id directory
         if not os.path.exists(download_dir):
             os.makedirs(download_dir)
             descriptor.log.debug("Created directory {}".format(download_dir))
index 5fbd621..b863caf 100644 (file)
@@ -76,6 +76,37 @@ module rw-pkg-mgmt
     }
   }
 
+  typedef package-file-type {
+    type enumeration {
+      enum ICONS;
+      enum CHARMS;
+      enum SCRIPTS;
+      enum IMAGES;
+      enum CLOUD_INIT;
+      enum README;
+    }
+  }
+
+  typedef vnfd-file-type {
+    type enumeration {
+      enum ICONS;
+      enum CHARMS;
+      enum SCRIPTS;
+      enum IMAGES;
+      enum CLOUD_INIT;
+      enum README;
+    }
+  }
+
+  typedef nsd-file-type {
+    type enumeration {
+      enum VNF_CONFIG;
+      enum NS_CONFIG;
+      enum ICONS;
+      enum SCRIPTS;
+    }
+  }
+
   typedef export-schema {
     type enumeration {
       enum RIFT;
@@ -364,6 +395,23 @@ module rw-pkg-mgmt
     input {
       uses package-file-identifer;
       uses external-url-data;
+
+      choice catalog-type {
+          mandatory true;
+          case VNFD {
+            leaf vnfd-file-type { 
+                description "Type of vnfd file being added to the package"; 
+                type vnfd-file-type; 
+            }
+          }
+          case NSD {
+            leaf nsd-file-type { 
+                description "Type of nsd file being added to the package"; 
+                type nsd-file-type; 
+            }
+          }
+      }
+      
     }
 
     output {
@@ -379,6 +427,21 @@ module rw-pkg-mgmt
 
     input {
       uses package-file-identifer;
+      choice catalog-type {
+          case VNFD {
+            leaf vnfd-file-type { 
+                description "Type of file being removed from the vnfd package"; 
+                type vnfd-file-type; 
+            }
+          }
+          case NSD {
+            leaf nsd-file-type { 
+                description "Type of file being removed from the nsd package"; 
+                type nsd-file-type; 
+            }
+          }
+      }
+      
     }
 
     output {