feature: sol004 and sol007 21/11421/1
authorbravof <fbravo@whitestack.com>
Fri, 29 Oct 2021 19:32:38 +0000 (16:32 -0300)
committergarciadeblas <gerardo.garciadeblas@telefonica.com>
Fri, 26 Nov 2021 14:41:23 +0000 (15:41 +0100)
Change-Id: I451f7e53ae48ecd9e3762c2e007d33cfac5d8292
Signed-off-by: bravof <fbravo@whitestack.com>
Signed-off-by: garciadeblas <gerardo.garciadeblas@telefonica.com>
osmclient/common/package_handling.py [new file with mode: 0644]
osmclient/common/package_tool.py
osmclient/common/sol004_package.py [new file with mode: 0644]
osmclient/common/sol007_package.py [new file with mode: 0644]
osmclient/common/sol_package.py [new file with mode: 0644]
osmclient/common/utils.py
osmclient/scripts/osm.py
osmclient/sol005/k8scluster.py
osmclient/sol005/osmrepo.py
osmclient/sol005/package.py
tox.ini

diff --git a/osmclient/common/package_handling.py b/osmclient/common/package_handling.py
new file mode 100644 (file)
index 0000000..40a2fc5
--- /dev/null
@@ -0,0 +1,70 @@
+# Copyright ETSI Contributors and Others.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import osmclient.common.utils as utils
+import os
+
+SOL004_TOSCA = "SOL004_TOSCA"
+SOL004 = "SOL004"
+SOL007_TOSCA = "SOL007_TOSCA"
+SOL007 = "SOL007"
+OSM_OLD = "OSM_OLD"
+
+
+def get_package_type(package_folder):
+    """
+    Detects the package's structure and returns the type:
+    SOL004
+    SOL007
+    OSM_OLD
+    """
+
+    package_files = os.listdir(package_folder)
+    if "Definitions" in package_files and "TOSCA-Metadata" in package_files:
+        descriptors = [
+            definition
+            for definition in os.listdir(package_folder + "/Definitions")
+            if definition.endswith(".yaml") or definition.endswith(".yml")
+        ]
+        if len(descriptors) < 1:
+            raise Exception(
+                "No descriptor found on this package, OSM was expecting at least 1"
+            )
+        pkg_type = utils.get_key_val_from_pkg(descriptors[0])
+        if pkg_type == "nsd":
+            return SOL007_TOSCA
+        else:
+            return SOL004_TOSCA
+    else:
+        manifests = [afile for afile in package_files if afile.endswith(".mf")]
+        if len(manifests) < 1:
+            # No manifest found, probably old OSM package structure
+            return OSM_OLD
+        else:
+            descriptors = [
+                definition
+                for definition in package_files
+                if definition.endswith(".yaml") or definition.endswith(".yml")
+            ]
+            if len(descriptors) < 1:
+                raise Exception(
+                    "No descriptor found on this package, OSM was expecting at least 1"
+                )
+            with open(os.path.join(package_folder, descriptors[0])) as descriptor:
+                pkg_type = utils.get_key_val_from_descriptor(descriptor)
+                if pkg_type["type"] == "nsd":
+                    return SOL007
+                else:
+                    return SOL004
index 436053c..cf09bac 100644 (file)
@@ -23,12 +23,14 @@ import shutil
 import subprocess
 import tarfile
 import time
-
 from jinja2 import Environment, PackageLoader
 from osm_im.validation import Validation as validation_im
 from osm_im.validation import ValidationException
 from osm_im import im_translation
+from osmclient.common import package_handling as package_handling
 from osmclient.common.exceptions import ClientException
+from .sol004_package import SOL004Package
+from .sol007_package import SOL007Package
 import yaml
 
 
@@ -365,8 +367,16 @@ class PackageTool(object):
                 raise ClientException(
                     "No descriptor file found in: {}".format(package_folder)
                 )
-        charm_list = self.build_all_charms(package_folder, skip_charm_build)
-        return self.build_tarfile(package_folder, charm_list)
+
+        is_sol004_007 = (
+            package_handling.get_package_type(package_folder)
+            != package_handling.OSM_OLD
+        )
+
+        charm_list = self.build_all_charms(
+            package_folder, skip_charm_build, is_sol004_007
+        )
+        return self.build_compressed_file(package_folder, charm_list, is_sol004_007)
 
     def calculate_checksum(self, package_folder):
         """
@@ -497,7 +507,7 @@ class PackageTool(object):
 
         return missing_paths
 
-    def build_all_charms(self, package_folder, skip_charm_build):
+    def build_all_charms(self, package_folder, skip_charm_build, sol004_007=True):
         """
         **Read the descriptor file, check that the charms referenced are in the folder and compiles them**
 
@@ -508,7 +518,13 @@ class PackageTool(object):
         self._logger.debug("")
         charms_set = set()
         descriptor_file = False
-        descriptors_paths = [f for f in glob.glob(package_folder + "/*.yaml")]
+        package_type = package_handling.get_package_type(package_folder)
+        if sol004_007 and package_type.find("TOSCA") >= 0:
+            descriptors_paths = [
+                f for f in glob.glob(package_folder + "/Definitions/*.yaml")
+            ]
+        else:
+            descriptors_paths = [f for f in glob.glob(package_folder + "/*.yaml")]
         for file in descriptors_paths:
             if file.endswith("nfd.yaml"):
                 descriptor_file = True
@@ -526,24 +542,32 @@ class PackageTool(object):
         if charms_set and not skip_charm_build:
             for charmName in charms_set:
                 if os.path.isdir(
-                    "{}/charms/layers/{}".format(package_folder, charmName)
+                    "{}/{}charms/layers/{}".format(
+                        package_folder, "Scripts/" if sol004_007 else "", charmName
+                    )
                 ):
                     print(
-                        "Building charm {}/charms/layers/{}".format(
-                            package_folder, charmName
+                        "Building charm {}/{}charms/layers/{}".format(
+                            package_folder, "Scripts/" if sol004_007 else "", charmName
                         )
                     )
-                    self.charm_build(package_folder, charmName)
+                    self.charm_build(package_folder, charmName, sol004_007)
                     print("Charm built: {}".format(charmName))
                 elif os.path.isdir(
-                    "{}/charms/ops/{}".format(package_folder, charmName)
+                    "{}/{}charms/ops/{}".format(
+                        package_folder, "Scripts/" if sol004_007 else "", charmName
+                    )
                 ):
                     self.charmcraft_build(package_folder, charmName)
                 else:
                     if not os.path.isdir(
-                        "{}/charms/{}".format(package_folder, charmName)
+                        "{}/{}charms/{}".format(
+                            package_folder, "Scripts/" if sol004_007 else "", charmName
+                        )
                     ) and not os.path.isfile(
-                        "{}/charms/{}".format(package_folder, charmName)
+                        "{}/{}charms/{}".format(
+                            package_folder, "Scripts/" if sol004_007 else "", charmName
+                        )
                     ):
                         raise ClientException(
                             "The charm: {} referenced in the descriptor file "
@@ -556,7 +580,7 @@ class PackageTool(object):
 
     def discover_folder_structure(self, base_directory, name, override):
         """
-        **Discover files and folders structure for OSM descriptors given a base_directory and name**
+        **Discover files and folders structure for SOL004/SOL007 descriptors given a base_directory and name**
 
         :params:
             - base_directory: is the location of the package to be created
@@ -569,14 +593,16 @@ class PackageTool(object):
         files_folders = {
             "folders": [
                 ("{}_ns".format(prefix), "ns"),
-                ("{}_ns/icons".format(prefix), "ns"),
-                ("{}_ns/charms".format(prefix), "ns"),
+                ("{}_ns/Licenses".format(prefix), "ns"),
+                ("{}_ns/Files/icons".format(prefix), "ns"),
+                ("{}_ns/Scripts/charms".format(prefix), "ns"),
                 ("{}_vnf".format(name), "vnf"),
-                ("{}_vnf/charms".format(prefix), "vnf"),
-                ("{}_vnf/cloud_init".format(prefix), "vnf"),
-                ("{}_vnf/images".format(prefix), "vnf"),
-                ("{}_vnf/icons".format(prefix), "vnf"),
-                ("{}_vnf/scripts".format(prefix), "vnf"),
+                ("{}_vnf/Licenses".format(prefix), "vnf"),
+                ("{}_vnf/Scripts/charms".format(prefix), "vnf"),
+                ("{}_vnf/Scripts/cloud_init".format(prefix), "vnf"),
+                ("{}_vnf/Files/images".format(prefix), "vnf"),
+                ("{}_vnf/Files/icons".format(prefix), "vnf"),
+                ("{}_vnf/Scripts/scripts".format(prefix), "vnf"),
                 ("{}_nst".format(prefix), "nst"),
                 ("{}_nst/icons".format(prefix), "nst"),
             ],
@@ -585,7 +611,7 @@ class PackageTool(object):
                 ("{}_ns/README.md".format(prefix), "ns", "readme"),
                 ("{}_vnf/{}_vnfd.yaml".format(prefix, name), "vnf", "descriptor"),
                 (
-                    "{}_vnf/cloud_init/cloud-config.txt".format(prefix),
+                    "{}_vnf/Scripts/cloud_init/cloud-config.txt".format(prefix),
                     "vnf",
                     "cloud_init",
                 ),
@@ -598,21 +624,33 @@ class PackageTool(object):
         # print("Missing files and folders: {}".format(missing_files_folders))
         return missing_files_folders
 
-    def charm_build(self, charms_folder, build_name):
+    def charm_build(self, charms_folder, build_name, sol004_007=True):
         """
         Build the charms inside the package.
         params: package_folder is the name of the folder where is the charms to compile.
                 build_name is the name of the layer or interface
         """
         self._logger.debug("")
-        os.environ["JUJU_REPOSITORY"] = "{}/charms".format(charms_folder)
+
+        if sol004_007:
+            os.environ["JUJU_REPOSITORY"] = "{}/Scripts/charms".format(charms_folder)
+        else:
+            os.environ["JUJU_REPOSITORY"] = "{}/charms".format(charms_folder)
+
         os.environ["CHARM_LAYERS_DIR"] = "{}/layers".format(
             os.environ["JUJU_REPOSITORY"]
         )
         os.environ["CHARM_INTERFACES_DIR"] = "{}/interfaces".format(
             os.environ["JUJU_REPOSITORY"]
         )
-        os.environ["CHARM_BUILD_DIR"] = "{}/charms/builds".format(charms_folder)
+
+        if sol004_007:
+            os.environ["CHARM_BUILD_DIR"] = "{}/Scripts/charms/builds".format(
+                charms_folder
+            )
+        else:
+            os.environ["CHARM_BUILD_DIR"] = "{}/charms/builds".format(charms_folder)
+
         if not os.path.exists(os.environ["CHARM_BUILD_DIR"]):
             os.makedirs(os.environ["CHARM_BUILD_DIR"])
         src_folder = "{}/{}".format(os.environ["CHARM_LAYERS_DIR"], build_name)
@@ -628,7 +666,7 @@ class PackageTool(object):
                 build_name is the name of the layer or interface
         """
         self._logger.debug("Building charm {}".format(charm_name))
-        src_folder = f"{package_folder}/charms/ops/{charm_name}"
+        src_folder = f"{package_folder}/Scripts/charms/ops/{charm_name}"
         current_directory = os.getcwd()
         os.chdir(src_folder)
         try:
@@ -643,6 +681,62 @@ class PackageTool(object):
         finally:
             os.chdir(current_directory)
 
+    def build_compressed_file(self, package_folder, charm_list=None, sol004_007=True):
+        if sol004_007:
+            return self.build_zipfile(package_folder, charm_list)
+        else:
+            return self.build_tarfile(package_folder, charm_list)
+
+    def build_zipfile(self, package_folder, charm_list=None):
+        """
+        Creates a zip file given a package_folder
+        params: package_folder is the name of the folder to be packaged
+        returns: .zip name
+        """
+        self._logger.debug("")
+        cwd = None
+        try:
+            directory_name, package_name = self.create_temp_dir_sol004_007(
+                package_folder, charm_list
+            )
+            cwd = os.getcwd()
+            os.chdir(directory_name)
+            package_type = package_handling.get_package_type(package_folder)
+            print(package_type)
+
+            if (
+                package_handling.SOL007 == package_type
+                or package_handling.SOL007_TOSCA == package_type
+            ):
+                the_package = SOL007Package(package_folder)
+            elif (
+                package_handling.SOL004 == package_type
+                or package_handling.SOL004_TOSCA == package_type
+            ):
+                the_package = SOL004Package(package_folder)
+
+            the_package.create_or_update_metadata_file()
+
+            the_zip_package = shutil.make_archive(
+                os.path.join(cwd, package_name),
+                "zip",
+                os.path.join(directory_name, package_name),
+            )
+
+            print("Package created: {}".format(the_zip_package))
+
+            return the_zip_package
+
+        except Exception as exc:
+            raise ClientException(
+                "failure during build of zip file (create temp dir, calculate checksum, "
+                "zip file): {}".format(exc)
+            )
+        finally:
+            if cwd:
+                os.chdir(cwd)
+            shutil.rmtree(os.path.join(package_folder, "tmp"))
+
     def build_tarfile(self, package_folder, charm_list=None):
         """
         Creates a .tar.gz file given a package_folder
@@ -744,6 +838,75 @@ class PackageTool(object):
                     self._logger.debug("DONE")
         return directory_name, package_name
 
+    def copy_tree(self, s, d, ignore):
+        self._logger.debug("Copying tree: {} -> {}".format(s, d))
+        shutil.copytree(s, d, symlinks=True, ignore=ignore)
+        self._logger.debug("DONE")
+
+    def create_temp_dir_sol004_007(self, package_folder, charm_list=None):
+        """
+        Method to create a temporary folder where we can move the files in package_folder
+        """
+        self._logger.debug("")
+        ignore_patterns = ".gitignore"
+        ignore = shutil.ignore_patterns(ignore_patterns)
+        directory_name = os.path.abspath(package_folder)
+        package_name = os.path.basename(directory_name)
+        directory_name += "/tmp"
+        os.makedirs("{}/{}".format(directory_name, package_name), exist_ok=True)
+        self._logger.debug("Makedirs DONE: {}/{}".format(directory_name, package_name))
+        for item in os.listdir(package_folder):
+            self._logger.debug("Item: {}".format(item))
+            if item != "tmp":
+                s = os.path.join(package_folder, item)
+                d = os.path.join(os.path.join(directory_name, package_name), item)
+                if os.path.isdir(s):
+                    if item == "Scripts":
+                        os.makedirs(d, exist_ok=True)
+                        scripts_folder = s
+                        for script_item in os.listdir(scripts_folder):
+                            scripts_destination_folder = os.path.join(d, script_item)
+                            if script_item == "charms":
+                                s_builds = os.path.join(
+                                    scripts_folder, script_item, "builds"
+                                )
+                                for charm in charm_list:
+                                    self._logger.debug("Copying charm {}".format(charm))
+                                    if charm in os.listdir(
+                                        os.path.join(scripts_folder, script_item)
+                                    ):
+                                        s_charm = os.path.join(
+                                            scripts_folder, script_item, charm
+                                        )
+                                    elif charm in os.listdir(s_builds):
+                                        s_charm = os.path.join(s_builds, charm)
+                                    else:
+                                        raise ClientException(
+                                            "The charm {} referenced in the descriptor file "
+                                            "could not be found in {}/charms or in {}/charms/builds".format(
+                                                charm, package_folder, package_folder
+                                            )
+                                        )
+                                    d_temp = os.path.join(
+                                        scripts_destination_folder, charm
+                                    )
+                                    self.copy_tree(s_charm, d_temp, ignore)
+                            else:
+                                self.copy_tree(
+                                    os.path.join(scripts_folder, script_item),
+                                    scripts_destination_folder,
+                                    ignore,
+                                )
+                    else:
+                        self.copy_tree(s, d, ignore)
+                else:
+                    if item in ignore_patterns:
+                        continue
+                    self._logger.debug("Copying file: {} -> {}".format(s, d))
+                    shutil.copy2(s, d)
+                    self._logger.debug("DONE")
+        return directory_name, package_name
+
     def charms_search(self, descriptor_file, desc_type):
         self._logger.debug(
             "descriptor_file: {}, desc_type: {}".format(descriptor_file, desc_type)
diff --git a/osmclient/common/sol004_package.py b/osmclient/common/sol004_package.py
new file mode 100644 (file)
index 0000000..b585bc4
--- /dev/null
@@ -0,0 +1,116 @@
+# Copyright ETSI Contributors and Others.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Python module for interacting with ETSI GS NFV-SOL004 compliant packages.
+
+This module provides a SOL004Package class for validating and interacting with
+ETSI SOL004 packages. A valid SOL004 package may have its files arranged according
+to one of the following two structures:
+
+SOL004 with metadata directory    SOL004 without metadata directory
+
+native_charm_vnf/                 native_charm_vnf/
+├── TOSCA-Metadata                â”œâ”€â”€ native_charm_vnfd.mf
+│   â””── TOSCA.meta                â”œâ”€â”€ native_charm_vnfd.yaml
+├── manifest.mf                   â”œâ”€â”€ ChangeLog.txt
+├── Definitions                   â”œâ”€â”€ Licenses
+│   â””── native_charm_vnfd.yaml    â”‚   â””── license.lic
+├── Files                         â”œâ”€â”€ Files
+│   â”œâ”€â”€ icons                     â”‚   â””── icons
+│   â”‚   â””── osm.png               â”‚       â””── osm.png
+│   â”œâ”€â”€ Licenses                  â””── Scripts
+│   â”‚   â””── license.lic               â”œâ”€â”€ cloud_init
+│   â””── changelog.txt                 â”‚   â””── cloud-config.txt
+└── Scripts                           â””── charms
+    â”œâ”€â”€ cloud_init                        â””── simple
+    â”‚   â””── cloud-config.txt                  â”œâ”€â”€ config.yaml
+    â””── charms                                â”œâ”€â”€ hooks
+        â””── simple                            â”‚   â”œâ”€â”€ install
+            â”œâ”€â”€ config.yaml                  ...
+            â”œâ”€â”€ hooks                         â”‚
+            â”‚   â”œâ”€â”€ install                   â””── src
+           ...                                    â””── charm.py
+            â””── src
+                â””── charm.py
+"""
+
+import yaml
+import datetime
+import os
+from .sol_package import SOLPackage
+
+
+class SOL004PackageException(Exception):
+    pass
+
+
+class SOL004Package(SOLPackage):
+    _MANIFEST_VNFD_ID = "vnfd_id"
+    _MANIFEST_VNFD_PRODUCT_NAME = "vnfd_product_name"
+    _MANIFEST_VNFD_PROVIDER_ID = "vnfd_provider_id"
+    _MANIFEST_VNFD_SOFTWARE_VERSION = "vnfd_software_version"
+    _MANIFEST_VNFD_PACKAGE_VERSION = "vnfd_package_version"
+    _MANIFEST_VNFD_RELEASE_DATE_TIME = "vnfd_release_date_time"
+    _MANIFEST_VNFD_COMPATIBLE_SPECIFICATION_VERSIONS = (
+        "compatible_specification_versions"
+    )
+    _MANIFEST_VNFM_INFO = "vnfm_info"
+
+    _MANIFEST_ALL_FIELDS = [
+        _MANIFEST_VNFD_ID,
+        _MANIFEST_VNFD_PRODUCT_NAME,
+        _MANIFEST_VNFD_PROVIDER_ID,
+        _MANIFEST_VNFD_SOFTWARE_VERSION,
+        _MANIFEST_VNFD_PACKAGE_VERSION,
+        _MANIFEST_VNFD_RELEASE_DATE_TIME,
+        _MANIFEST_VNFD_COMPATIBLE_SPECIFICATION_VERSIONS,
+        _MANIFEST_VNFM_INFO,
+    ]
+
+    def __init__(self, package_path=""):
+        super().__init__(package_path)
+
+    def generate_manifest_data_from_descriptor(self):
+        descriptor_path = os.path.join(
+            self._package_path, self.get_descriptor_location()
+        )
+        with open(descriptor_path, "r") as descriptor:
+            try:
+                vnfd_data = yaml.safe_load(descriptor)["vnfd"]
+            except yaml.YAMLError as e:
+                print("Error reading descriptor {}: {}".format(descriptor_path, e))
+                return
+
+            self._manifest_metadata = {}
+            self._manifest_metadata[self._MANIFEST_VNFD_ID] = vnfd_data.get(
+                "id", "default-id"
+            )
+            self._manifest_metadata[self._MANIFEST_VNFD_PRODUCT_NAME] = vnfd_data.get(
+                "product-name", "default-product-name"
+            )
+            self._manifest_metadata[self._MANIFEST_VNFD_PROVIDER_ID] = vnfd_data.get(
+                "provider", "OSM"
+            )
+            self._manifest_metadata[
+                self._MANIFEST_VNFD_SOFTWARE_VERSION
+            ] = vnfd_data.get("version", "1.0")
+            self._manifest_metadata[self._MANIFEST_VNFD_PACKAGE_VERSION] = "1.0.0"
+            self._manifest_metadata[self._MANIFEST_VNFD_RELEASE_DATE_TIME] = (
+                datetime.datetime.now().astimezone().isoformat()
+            )
+            self._manifest_metadata[
+                self._MANIFEST_VNFD_COMPATIBLE_SPECIFICATION_VERSIONS
+            ] = "2.7.1"
+            self._manifest_metadata[self._MANIFEST_VNFM_INFO] = "OSM"
diff --git a/osmclient/common/sol007_package.py b/osmclient/common/sol007_package.py
new file mode 100644 (file)
index 0000000..3c1df3a
--- /dev/null
@@ -0,0 +1,110 @@
+# Copyright ETSI Contributors and Others.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Python module for interacting with ETSI GS NFV-SOL007 compliant packages.
+
+This module provides a SOL007Package class for validating and interacting with
+ETSI SOL007 packages. A valid SOL007 package may have its files arranged according
+to one of the following two structures:
+
+SOL007 with metadata directory    SOL007 without metadata directory
+
+native_charm_vnf/                 native_charm_vnf/
+├── TOSCA-Metadata                â”œâ”€â”€ native_charm_nsd.mf
+│   â””── TOSCA.meta                â”œâ”€â”€ native_charm_nsd.yaml
+├── manifest.mf                   â”œâ”€â”€ ChangeLog.txt
+├── Definitions                   â”œâ”€â”€ Licenses
+│   â””── native_charm_nsd.yaml     â”‚   â””── license.lic
+├── Files                         â”œâ”€â”€ Files
+│   â”œâ”€â”€ icons                     â”‚   â””── icons
+│   â”‚   â””── osm.png               â”‚       â””── osm.png
+│   â”œâ”€â”€ Licenses                  â””── Scripts
+│   â”‚   â””── license.lic               â”œâ”€â”€ cloud_init
+│   â””── changelog.txt                 â”‚   â””── cloud-config.txt
+└── Scripts                           â””── charms
+    â”œâ”€â”€ cloud_init                        â””── simple
+    â”‚   â””── cloud-config.txt                  â”œâ”€â”€ config.yaml
+    â””── charms                                â”œâ”€â”€ hooks
+        â””── simple                            â”‚   â”œâ”€â”€ install
+            â”œâ”€â”€ config.yaml                  ...
+            â”œâ”€â”€ hooks                         â”‚
+            â”‚   â”œâ”€â”€ install                   â””── src
+           ...                                    â””── charm.py
+            â””── src
+                â””── charm.py
+"""
+
+import yaml
+import datetime
+import os
+from .sol_package import SOLPackage
+
+
+class SOL007PackageException(Exception):
+    pass
+
+
+class SOL007Package(SOLPackage):
+    _MANIFEST_NSD_INVARIANT_ID = "nsd_invariant_id"
+    _MANIFEST_NSD_NAME = "nsd_name"
+    _MANIFEST_NSD_DESIGNER = "nsd_designer"
+    _MANIFEST_NSD_FILE_STRUCTURE_VERSION = "nsd_file_structure_version"
+    _MANIFEST_NSD_RELEASE_DATE_TIME = "nsd_release_date_time"
+    _MANIFEST_NSD_COMPATIBLE_SPECIFICATION_VERSIONS = (
+        "compatible_specification_versions"
+    )
+
+    _MANIFEST_ALL_FIELDS = [
+        _MANIFEST_NSD_INVARIANT_ID,
+        _MANIFEST_NSD_NAME,
+        _MANIFEST_NSD_DESIGNER,
+        _MANIFEST_NSD_FILE_STRUCTURE_VERSION,
+        _MANIFEST_NSD_RELEASE_DATE_TIME,
+        _MANIFEST_NSD_COMPATIBLE_SPECIFICATION_VERSIONS,
+    ]
+
+    def __init__(self, package_path=""):
+        super().__init__(package_path)
+
+    def generate_manifest_data_from_descriptor(self):
+        descriptor_path = os.path.join(
+            self._package_path, self.get_descriptor_location()
+        )
+        with open(descriptor_path, "r") as descriptor:
+            try:
+                nsd_data = yaml.safe_load(descriptor)["nsd"]
+            except yaml.YAMLError as e:
+                print("Error reading descriptor {}: {}".format(descriptor_path, e))
+                return
+
+            self._manifest_metadata = {}
+            self._manifest_metadata[self._MANIFEST_NSD_INVARIANT_ID] = nsd_data.get(
+                "id", "default-id"
+            )
+            self._manifest_metadata[self._MANIFEST_NSD_NAME] = nsd_data.get(
+                "name", "default-name"
+            )
+            self._manifest_metadata[self._MANIFEST_NSD_DESIGNER] = nsd_data.get(
+                "designer", "OSM"
+            )
+            self._manifest_metadata[
+                self._MANIFEST_NSD_FILE_STRUCTURE_VERSION
+            ] = nsd_data.get("version", "1.0")
+            self._manifest_metadata[self._MANIFEST_NSD_RELEASE_DATE_TIME] = (
+                datetime.datetime.now().astimezone().isoformat()
+            )
+            self._manifest_metadata[
+                self._MANIFEST_NSD_COMPATIBLE_SPECIFICATION_VERSIONS
+            ] = "2.7.1"
diff --git a/osmclient/common/sol_package.py b/osmclient/common/sol_package.py
new file mode 100644 (file)
index 0000000..62c65c3
--- /dev/null
@@ -0,0 +1,281 @@
+# Copyright ETSI Contributors and Others.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import yaml
+import hashlib
+
+
+class SOLPackageException(Exception):
+    pass
+
+
+class SOLPackage:
+    _METADATA_FILE_PATH = "TOSCA-Metadata/TOSCA.meta"
+    _METADATA_DESCRIPTOR_FIELD = "Entry-Definitions"
+    _METADATA_MANIFEST_FIELD = "ETSI-Entry-Manifest"
+    _METADATA_CHANGELOG_FIELD = "ETSI-Entry-Change-Log"
+    _METADATA_LICENSES_FIELD = "ETSI-Entry-Licenses"
+    _METADATA_DEFAULT_CHANGELOG_PATH = "ChangeLog.txt"
+    _METADATA_DEFAULT_LICENSES_PATH = "Licenses"
+    _MANIFEST_FILE_PATH_FIELD = "Source"
+    _MANIFEST_FILE_HASH_ALGORITHM_FIELD = "Algorithm"
+    _MANIFEST_FILE_HASH_DIGEST_FIELD = "Hash"
+
+    _MANIFEST_ALL_FIELDS = []
+
+    def __init__(self, package_path=""):
+        self._package_path = package_path
+
+        self._package_metadata = self._parse_package_metadata()
+
+        try:
+            self._manifest_data = self._parse_manifest_data()
+        except Exception:
+            self._manifest_data = None
+
+        try:
+            self._manifest_metadata = self._parse_manifest_metadata()
+        except Exception:
+            self._manifest_metadata = None
+
+    def _parse_package_metadata(self):
+        try:
+            return self._parse_package_metadata_with_metadata_dir()
+        except FileNotFoundError:
+            return self._parse_package_metadata_without_metadata_dir()
+
+    def _parse_package_metadata_with_metadata_dir(self):
+        try:
+            return self._parse_file_in_blocks(self._METADATA_FILE_PATH)
+        except FileNotFoundError as e:
+            raise e
+        except (Exception, OSError) as e:
+            raise SOLPackageException(
+                "Error parsing {}: {}".format(self._METADATA_FILE_PATH, e)
+            )
+
+    def _parse_package_metadata_without_metadata_dir(self):
+        package_root_files = {f for f in os.listdir(self._package_path)}
+        package_root_yamls = [
+            f for f in package_root_files if f.endswith(".yml") or f.endswith(".yaml")
+        ]
+        if len(package_root_yamls) != 1:
+            error_msg = "Error parsing package metadata: there should be exactly 1 descriptor YAML, found {}"
+            raise SOLPackageException(error_msg.format(len(package_root_yamls)))
+
+        base_manifest = [
+            {
+                SOLPackage._METADATA_DESCRIPTOR_FIELD: package_root_yamls[0],
+                SOLPackage._METADATA_MANIFEST_FIELD: "{}.mf".format(
+                    os.path.splitext(package_root_yamls[0])[0]
+                ),
+                SOLPackage._METADATA_CHANGELOG_FIELD: SOLPackage._METADATA_DEFAULT_CHANGELOG_PATH,
+                SOLPackage._METADATA_LICENSES_FIELD: SOLPackage._METADATA_DEFAULT_LICENSES_PATH,
+            }
+        ]
+
+        return base_manifest
+
+    def _parse_manifest_data(self):
+        manifest_path = None
+        for tosca_meta in self._package_metadata:
+            if SOLPackage._METADATA_MANIFEST_FIELD in tosca_meta:
+                manifest_path = tosca_meta[SOLPackage._METADATA_MANIFEST_FIELD]
+                break
+        else:
+            error_msg = "Error parsing {}: no {} field on path".format(
+                self._METADATA_FILE_PATH, self._METADATA_MANIFEST_FIELD
+            )
+            raise SOLPackageException(error_msg)
+
+        try:
+            return self._parse_file_in_blocks(manifest_path)
+
+        except (Exception, OSError) as e:
+            raise SOLPackageException("Error parsing {}: {}".format(manifest_path, e))
+
+    def _parse_manifest_metadata(self):
+        try:
+            base_manifest = {}
+            manifest_file = os.open(
+                os.path.join(
+                    self._package_path,
+                    base_manifest[SOLPackage._METADATA_MANIFEST_FIELD],
+                ),
+                "rw",
+            )
+            for line in manifest_file:
+                fields_in_line = line.split(":", maxsplit=1)
+                fields_in_line[0] = fields_in_line[0].strip()
+                fields_in_line[1] = fields_in_line[1].strip()
+                if fields_in_line[0] in self._MANIFEST_ALL_FIELDS:
+                    base_manifest[fields_in_line[0]] = fields_in_line[1]
+            return base_manifest
+        except (Exception, OSError) as e:
+            raise SOLPackageException(
+                "Error parsing {}: {}".format(
+                    base_manifest[SOLPackage._METADATA_MANIFEST_FIELD], e
+                )
+            )
+
+    def _get_package_file_full_path(self, file_relative_path):
+        return os.path.join(self._package_path, file_relative_path)
+
+    def _parse_file_in_blocks(self, file_relative_path):
+        file_path = self._get_package_file_full_path(file_relative_path)
+        with open(file_path) as f:
+            blocks = f.read().split("\n\n")
+        parsed_blocks = map(yaml.safe_load, blocks)
+        return [block for block in parsed_blocks if block is not None]
+
+    def _get_package_file_manifest_data(self, file_relative_path):
+        for file_data in self._manifest_data:
+            if (
+                file_data.get(SOLPackage._MANIFEST_FILE_PATH_FIELD, "")
+                == file_relative_path
+            ):
+                return file_data
+
+        error_msg = (
+            "Error parsing {} manifest data: file not found on manifest file".format(
+                file_relative_path
+            )
+        )
+        raise SOLPackageException(error_msg)
+
+    def get_package_file_hash_digest_from_manifest(self, file_relative_path):
+        """Returns the hash digest of a file inside this package as specified on the manifest file."""
+        file_manifest_data = self._get_package_file_manifest_data(file_relative_path)
+        try:
+            return file_manifest_data[SOLPackage._MANIFEST_FILE_HASH_DIGEST_FIELD]
+        except Exception as e:
+            raise SOLPackageException(
+                "Error parsing {} hash digest: {}".format(file_relative_path, e)
+            )
+
+    def get_package_file_hash_algorithm_from_manifest(self, file_relative_path):
+        """Returns the hash algorithm of a file inside this package as specified on the manifest file."""
+        file_manifest_data = self._get_package_file_manifest_data(file_relative_path)
+        try:
+            return file_manifest_data[SOLPackage._MANIFEST_FILE_HASH_ALGORITHM_FIELD]
+        except Exception as e:
+            raise SOLPackageException(
+                "Error parsing {} hash digest: {}".format(file_relative_path, e)
+            )
+
+    @staticmethod
+    def _get_hash_function_from_hash_algorithm(hash_algorithm):
+        function_to_algorithm = {"SHA-256": hashlib.sha256, "SHA-512": hashlib.sha512}
+        if hash_algorithm not in function_to_algorithm:
+            error_msg = (
+                "Error checking hash function: hash algorithm {} not supported".format(
+                    hash_algorithm
+                )
+            )
+            raise SOLPackageException(error_msg)
+        return function_to_algorithm[hash_algorithm]
+
+    def _calculate_file_hash(self, file_relative_path, hash_algorithm):
+        file_path = self._get_package_file_full_path(file_relative_path)
+        hash_function = self._get_hash_function_from_hash_algorithm(hash_algorithm)
+        try:
+            with open(file_path, "rb") as f:
+                return hash_function(f.read()).hexdigest()
+        except Exception as e:
+            raise SOLPackageException(
+                "Error hashing {}: {}".format(file_relative_path, e)
+            )
+
+    def validate_package_file_hash(self, file_relative_path):
+        """Validates the integrity of a file using the hash algorithm and digest on the package manifest."""
+        hash_algorithm = self.get_package_file_hash_algorithm_from_manifest(
+            file_relative_path
+        )
+        file_hash = self._calculate_file_hash(file_relative_path, hash_algorithm)
+        expected_file_hash = self.get_package_file_hash_digest_from_manifest(
+            file_relative_path
+        )
+        if file_hash != expected_file_hash:
+            error_msg = "Error validating {} hash: calculated hash {} is different than manifest hash {}"
+            raise SOLPackageException(
+                error_msg.format(file_relative_path, file_hash, expected_file_hash)
+            )
+
+    def validate_package_hashes(self):
+        """Validates the integrity of all files listed on the package manifest."""
+        for file_data in self._manifest_data:
+            if SOLPackage._MANIFEST_FILE_PATH_FIELD in file_data:
+                file_relative_path = file_data[SOLPackage._MANIFEST_FILE_PATH_FIELD]
+                self.validate_package_file_hash(file_relative_path)
+
+    def create_or_update_metadata_file(self):
+        """
+        Creates or updates the metadata file with the hashes calculated for each one of the package's files
+        """
+        if not self._manifest_metadata:
+            self.generate_manifest_data_from_descriptor()
+
+        self.write_manifest_data_into_file()
+
+    def generate_manifest_data_from_descriptor(self):
+        pass
+
+    def write_manifest_data_into_file(self):
+        with open(self.get_manifest_location(), "w") as metadata_file:
+            # Write manifest metadata
+            for metadata_entry in self._manifest_metadata:
+                metadata_file.write(
+                    "{}: {}\n".format(
+                        metadata_entry, self._manifest_metadata[metadata_entry]
+                    )
+                )
+
+            # Write package's files hashes
+            file_hashes = {}
+            for root, dirs, files in os.walk(self._package_path):
+                for a_file in files:
+                    file_path = os.path.join(root, a_file)
+                    file_relative_path = file_path[len(self._package_path) :]
+                    if file_relative_path.startswith("/"):
+                        file_relative_path = file_relative_path[1:]
+                    file_hashes[file_relative_path] = self._calculate_file_hash(
+                        file_relative_path, "SHA-512"
+                    )
+
+            for file, hash in file_hashes.items():
+                file_block = "Source: {}\nAlgorithm: SHA-512\nHash: {}\n\n".format(
+                    file, hash
+                )
+                metadata_file.write(file_block)
+
+    def get_descriptor_location(self):
+        """Returns this package descriptor location as a relative path from the package root."""
+        for tosca_meta in self._package_metadata:
+            if SOLPackage._METADATA_DESCRIPTOR_FIELD in tosca_meta:
+                return tosca_meta[SOLPackage._METADATA_DESCRIPTOR_FIELD]
+
+        error_msg = "Error: no {} entry found on {}".format(
+            SOLPackage._METADATA_DESCRIPTOR_FIELD, SOLPackage._METADATA_FILE_PATH
+        )
+        raise SOLPackageException(error_msg)
+
+    def get_manifest_location(self):
+        """Return the VNF/NS manifest location as a relative path from the package root."""
+        for tosca_meta in self._package_metadata:
+            if SOLPackage._METADATA_MANIFEST_FIELD in tosca_meta:
+                return tosca_meta[SOLPackage._METADATA_MANIFEST_FIELD]
+
+        raise SOLPackageException("No manifest file defined for this package")
index aed7d29..5336577 100644 (file)
@@ -18,6 +18,7 @@ import time
 from uuid import UUID
 import hashlib
 import tarfile
+from zipfile import ZipFile
 import re
 import yaml
 
@@ -54,8 +55,35 @@ def md5(fname):
 
 
 def get_key_val_from_pkg(descriptor_file):
-    # method opens up a package and finds the name of the resulting
-    # descriptor (vnfd or nsd name)
+    if descriptor_file.split(".")[-1] == "zip":
+        return get_key_val_from_pkg_sol004(descriptor_file)
+    else:
+        return get_key_val_from_pkg_old(descriptor_file)
+
+
+def get_key_val_from_pkg_sol004(package_file):
+    """Method opens up a package and finds the name of the resulting
+    descriptor (vnfd or nsd name), using SOL004 spec
+    """
+    with ZipFile(package_file) as zipfile:
+        yamlfile = None
+        for filedata in zipfile.infolist():
+            if (
+                re.match(".*.yaml", filedata.filename)
+                and filedata.filename.find("Scripts") < 0
+            ):
+                yamlfile = filedata.filename
+                break
+        if yamlfile is None:
+            return None
+
+        return get_key_val_from_descriptor(zipfile.open(yamlfile))
+
+
+def get_key_val_from_pkg_old(descriptor_file):
+    """Method opens up a package and finds the name of the resulting
+    descriptor (vnfd or nsd name)
+    """
     tar = tarfile.open(descriptor_file)
     yamlfile = None
     for member in tar.getmembers():
@@ -65,13 +93,21 @@ def get_key_val_from_pkg(descriptor_file):
     if yamlfile is None:
         return None
 
-    dict = yaml.safe_load(tar.extractfile(yamlfile))
+    result = get_key_val_from_descriptor(tar.extractfile(yamlfile))
+
+    tar.close()
+    return result
+
+
+def get_key_val_from_descriptor(descriptor):
+    dict = yaml.safe_load(descriptor)
     result = {}
     for k in dict:
         if "nsd" in k:
             result["type"] = "nsd"
         else:
             result["type"] = "vnfd"
+
     if "type" not in result:
         for k1, v1 in list(dict.items()):
             if not k1.endswith("-catalog"):
@@ -88,6 +124,4 @@ def get_key_val_from_pkg(descriptor_file):
                         # strip off preceeding chars before :
                         key_name = k3.split(":").pop()
                         result[key_name] = v3
-
-    tar.close()
     return result
index 30aa33f..2182346 100755 (executable)
@@ -2603,7 +2603,9 @@ def pdu_create(
 
     check_client_version(ctx.obj, ctx.command.name)
 
-    pdu = create_pdu_dictionary(name, pdu_type, interface, description, vim_account, descriptor_file)
+    pdu = create_pdu_dictionary(
+        name, pdu_type, interface, description, vim_account, descriptor_file
+    )
     ctx.obj.pdu.create(pdu)
 
 
@@ -2651,11 +2653,15 @@ def pdu_update(
     if not newname:
         newname = name
 
-    pdu = create_pdu_dictionary(newname, pdu_type, interface, description, vim_account, descriptor_file, update)
+    pdu = create_pdu_dictionary(
+        newname, pdu_type, interface, description, vim_account, descriptor_file, update
+    )
     ctx.obj.pdu.update(name, pdu)
 
 
-def create_pdu_dictionary(name, pdu_type, interface, description, vim_account, descriptor_file, update=False):
+def create_pdu_dictionary(
+    name, pdu_type, interface, description, vim_account, descriptor_file, update=False
+):
 
     logger.debug("")
     pdu = {}
@@ -2698,6 +2704,7 @@ def create_pdu_dictionary(name, pdu_type, interface, description, vim_account, d
         pdu["interfaces"] = ifaces_list
     return pdu
 
+
 ####################
 # UPDATE operations
 ####################
@@ -5808,7 +5815,10 @@ def role_show(ctx, name):
     "--netslice-vlds", default=1, help="(NST) Number of netslice vlds. Default 1"
 )
 @click.option(
-    "--old", default=False, is_flag=True, help="Flag to create a descriptor using the previous OSM format (pre SOL006, OSM<9)"
+    "--old",
+    default=False,
+    is_flag=True,
+    help="Flag to create a descriptor using the previous OSM format (pre SOL006, OSM<9)",
 )
 @click.pass_context
 def package_create(
index a0b95e0..ff6822e 100644 (file)
@@ -47,7 +47,9 @@ class K8scluster(object):
         self._logger.debug("")
         self._client.get_token()
         # Endpoint to get operation status
-        apiUrlStatus = "{}{}{}".format(self._apiName, self._apiVersion, self._apiResource)
+        apiUrlStatus = "{}{}{}".format(
+            self._apiName, self._apiVersion, self._apiResource
+        )
         # Wait for status for VIM instance creation/deletion
         if isinstance(wait_time, bool):
             wait_time = WaitForStatus.TIMEOUT_VIM_OPERATION
@@ -70,8 +72,8 @@ class K8scluster(object):
             endpoint=self._apiBase, postfields_dict=k8s_cluster
         )
 
-        self._logger.debug('HTTP CODE: {}'.format(http_code))
-        self._logger.debug('RESP: {}'.format(resp))
+        self._logger.debug("HTTP CODE: {}".format(http_code))
+        self._logger.debug("RESP: {}".format(resp))
 
         if resp:
             resp = json.loads(resp)
@@ -107,8 +109,8 @@ class K8scluster(object):
             wait_id = cluster["_id"]
             self._wait(wait_id, wait)
 
-        self._logger.debug('HTTP CODE: {}'.format(http_code))
-        self._logger.debug('RESP: {}'.format(resp))
+        self._logger.debug("HTTP CODE: {}".format(http_code))
+        self._logger.debug("RESP: {}".format(resp))
 
         if http_code in (200, 201, 202, 204):
             print("Updated")
@@ -119,7 +121,9 @@ class K8scluster(object):
                     msg = json.loads(resp)
                 except ValueError:
                     msg = resp
-            raise ClientException("failed to update K8s cluster {} - {}".format(name, msg))
+            raise ClientException(
+                "failed to update K8s cluster {} - {}".format(name, msg)
+            )
 
     def get_id(self, name):
         """Returns a K8s cluster id from a K8s cluster name"""
@@ -140,8 +144,8 @@ class K8scluster(object):
             "{}/{}{}".format(self._apiBase, cluster_id, querystring)
         )
 
-        self._logger.debug('HTTP CODE: {}'.format(http_code))
-        self._logger.debug('RESP: {}'.format(resp))
+        self._logger.debug("HTTP CODE: {}".format(http_code))
+        self._logger.debug("RESP: {}".format(resp))
 
         if http_code == 202:
             if wait:
index 3568e8c..02318de 100644 (file)
@@ -96,7 +96,7 @@ class OSMRepo(Repo):
                     "Error cannot read from repository {} '{}': {}".format(
                         repository["name"], repository["url"], e
                     ),
-                    exc_info=True
+                    exc_info=True,
                 )
                 continue
 
@@ -188,10 +188,16 @@ class OSMRepo(Repo):
         artifacts = []
         directories = []
         for f in listdir(origin):
-            if isfile(join(origin, f)) and f.endswith('.tar.gz'):
+            if isfile(join(origin, f)) and f.endswith(".tar.gz"):
                 artifacts.append(f)
-            elif isdir(join(origin, f)) and f != destination.split('/')[-1] and not f.startswith('.'):
-                directories.append(f)  # TODO: Document that nested directories are not supported
+            elif (
+                isdir(join(origin, f))
+                and f != destination.split("/")[-1]
+                and not f.startswith(".")
+            ):
+                directories.append(
+                    f
+                )  # TODO: Document that nested directories are not supported
             else:
                 self._logger.debug(f"Ignoring {f}")
         for artifact in artifacts:
@@ -247,27 +253,33 @@ class OSMRepo(Repo):
             if descriptor_dict.get("vnfd-catalog", False):
                 aux_dict = descriptor_dict.get("vnfd-catalog", {}).get("vnfd", [{}])[0]
             elif descriptor_dict.get("vnfd:vnfd-catalog"):
-                aux_dict = descriptor_dict.get("vnfd:vnfd-catalog", {}).get("vnfd", [{}])[0]
+                aux_dict = descriptor_dict.get("vnfd:vnfd-catalog", {}).get(
+                    "vnfd", [{}]
+                )[0]
             elif descriptor_dict.get("vnfd"):
                 aux_dict = descriptor_dict["vnfd"]
                 if aux_dict.get("vnfd"):
-                    aux_dict = aux_dict['vnfd'][0]
+                    aux_dict = aux_dict["vnfd"][0]
             else:
                 msg = f"Unexpected descriptor format {descriptor_dict}"
                 self._logger.error(msg)
                 raise ValueError(msg)
-            self._logger.debug(f"Extracted descriptor info for {package_type}: {aux_dict}")
+            self._logger.debug(
+                f"Extracted descriptor info for {package_type}: {aux_dict}"
+            )
             images = []
-            for vdu in aux_dict.get("vdu", aux_dict.get('kdu', ())):
-                images.append(vdu.get("image", vdu.get('name')))
+            for vdu in aux_dict.get("vdu", aux_dict.get("kdu", ())):
+                images.append(vdu.get("image", vdu.get("name")))
             fields["images"] = images
         elif package_type == "ns":
             if descriptor_dict.get("nsd-catalog", False):
                 aux_dict = descriptor_dict.get("nsd-catalog", {}).get("nsd", [{}])[0]
             elif descriptor_dict.get("nsd:nsd-catalog"):
-                aux_dict = descriptor_dict.get("nsd:nsd-catalog", {}).get("nsd", [{}])[0]
+                aux_dict = descriptor_dict.get("nsd:nsd-catalog", {}).get("nsd", [{}])[
+                    0
+                ]
             elif descriptor_dict.get("nsd"):
-                aux_dict = descriptor_dict['nsd']
+                aux_dict = descriptor_dict["nsd"]
                 if aux_dict.get("nsd"):
                     aux_dict = descriptor_dict["nsd"]["nsd"][0]
             else:
@@ -279,16 +291,18 @@ class OSMRepo(Repo):
                 for vnf in aux_dict.get("constituent-vnfd", ()):
                     vnfs.append(vnf.get("vnfd-id-ref"))
             else:
-                vnfs = aux_dict.get('vnfd-id')
+                vnfs = aux_dict.get("vnfd-id")
             self._logger.debug("Used VNFS in the NSD: " + str(vnfs))
             fields["vnfd-id-ref"] = vnfs
-        elif package_type == 'nst':
+        elif package_type == "nst":
             if descriptor_dict.get("nst-catalog", False):
                 aux_dict = descriptor_dict.get("nst-catalog", {}).get("nst", [{}])[0]
             elif descriptor_dict.get("nst:nst-catalog"):
-                aux_dict = descriptor_dict.get("nst:nst-catalog", {}).get("nst", [{}])[0]
+                aux_dict = descriptor_dict.get("nst:nst-catalog", {}).get("nst", [{}])[
+                    0
+                ]
             elif descriptor_dict.get("nst"):
-                aux_dict = descriptor_dict['nst']
+                aux_dict = descriptor_dict["nst"]
                 if aux_dict.get("nst"):
                     aux_dict = descriptor_dict["nst"]["nst"][0]
             nsds = []
index 1fa0e24..79125d8 100644 (file)
@@ -94,7 +94,11 @@ class Package(object):
             # endpoint = '/nsds' if pkg_type['type'] == 'nsd' else '/vnfds'
             # print('Endpoint: {}'.format(endpoint))
             headers = self._client._headers
-            headers["Content-Type"] = "application/gzip"
+
+            if filename.endswith(".tar.gz"):
+                headers["Content-Type"] = "application/gzip"
+            else:
+                headers["Content-Type"] = "application/zip"
             # headers['Content-Type'] = 'application/binary'
             # Next three lines are to be removed in next version
             # headers['Content-Filename'] = basename(filename)
diff --git a/tox.ini b/tox.ini
index 083443b..2dc8110 100644 (file)
--- a/tox.ini
+++ b/tox.ini
@@ -114,7 +114,8 @@ ignore =
         E123,
         E125,
         E226,
-        E241
+        E241,
+        E203
 exclude =
         .git,
         __pycache__,