--- /dev/null
+# Copyright ETSI Contributors and Others.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import osmclient.common.utils as utils
+import os
+
+SOL004_TOSCA = "SOL004_TOSCA"
+SOL004 = "SOL004"
+SOL007_TOSCA = "SOL007_TOSCA"
+SOL007 = "SOL007"
+OSM_OLD = "OSM_OLD"
+
+
+def get_package_type(package_folder):
+ """
+ Detects the package's structure and returns the type:
+ SOL004
+ SOL007
+ OSM_OLD
+ """
+
+ package_files = os.listdir(package_folder)
+ if "Definitions" in package_files and "TOSCA-Metadata" in package_files:
+ descriptors = [
+ definition
+ for definition in os.listdir(package_folder + "/Definitions")
+ if definition.endswith(".yaml") or definition.endswith(".yml")
+ ]
+ if len(descriptors) < 1:
+ raise Exception(
+ "No descriptor found on this package, OSM was expecting at least 1"
+ )
+ pkg_type = utils.get_key_val_from_pkg(descriptors[0])
+ if pkg_type == "nsd":
+ return SOL007_TOSCA
+ else:
+ return SOL004_TOSCA
+ else:
+ manifests = [afile for afile in package_files if afile.endswith(".mf")]
+ if len(manifests) < 1:
+ # No manifest found, probably old OSM package structure
+ return OSM_OLD
+ else:
+ descriptors = [
+ definition
+ for definition in package_files
+ if definition.endswith(".yaml") or definition.endswith(".yml")
+ ]
+ if len(descriptors) < 1:
+ raise Exception(
+ "No descriptor found on this package, OSM was expecting at least 1"
+ )
+ with open(os.path.join(package_folder, descriptors[0])) as descriptor:
+ pkg_type = utils.get_key_val_from_descriptor(descriptor)
+ if pkg_type["type"] == "nsd":
+ return SOL007
+ else:
+ return SOL004
import subprocess
import tarfile
import time
-
from jinja2 import Environment, PackageLoader
from osm_im.validation import Validation as validation_im
from osm_im.validation import ValidationException
from osm_im import im_translation
+from osmclient.common import package_handling as package_handling
from osmclient.common.exceptions import ClientException
+from .sol004_package import SOL004Package
+from .sol007_package import SOL007Package
import yaml
raise ClientException(
"No descriptor file found in: {}".format(package_folder)
)
- charm_list = self.build_all_charms(package_folder, skip_charm_build)
- return self.build_tarfile(package_folder, charm_list)
+
+ is_sol004_007 = (
+ package_handling.get_package_type(package_folder)
+ != package_handling.OSM_OLD
+ )
+
+ charm_list = self.build_all_charms(
+ package_folder, skip_charm_build, is_sol004_007
+ )
+ return self.build_compressed_file(package_folder, charm_list, is_sol004_007)
def calculate_checksum(self, package_folder):
"""
return missing_paths
- def build_all_charms(self, package_folder, skip_charm_build):
+ def build_all_charms(self, package_folder, skip_charm_build, sol004_007=True):
"""
**Read the descriptor file, check that the charms referenced are in the folder and compiles them**
self._logger.debug("")
charms_set = set()
descriptor_file = False
- descriptors_paths = [f for f in glob.glob(package_folder + "/*.yaml")]
+ package_type = package_handling.get_package_type(package_folder)
+ if sol004_007 and package_type.find("TOSCA") >= 0:
+ descriptors_paths = [
+ f for f in glob.glob(package_folder + "/Definitions/*.yaml")
+ ]
+ else:
+ descriptors_paths = [f for f in glob.glob(package_folder + "/*.yaml")]
for file in descriptors_paths:
if file.endswith("nfd.yaml"):
descriptor_file = True
if charms_set and not skip_charm_build:
for charmName in charms_set:
if os.path.isdir(
- "{}/charms/layers/{}".format(package_folder, charmName)
+ "{}/{}charms/layers/{}".format(
+ package_folder, "Scripts/" if sol004_007 else "", charmName
+ )
):
print(
- "Building charm {}/charms/layers/{}".format(
- package_folder, charmName
+ "Building charm {}/{}charms/layers/{}".format(
+ package_folder, "Scripts/" if sol004_007 else "", charmName
)
)
- self.charm_build(package_folder, charmName)
+ self.charm_build(package_folder, charmName, sol004_007)
print("Charm built: {}".format(charmName))
elif os.path.isdir(
- "{}/charms/ops/{}".format(package_folder, charmName)
+ "{}/{}charms/ops/{}".format(
+ package_folder, "Scripts/" if sol004_007 else "", charmName
+ )
):
self.charmcraft_build(package_folder, charmName)
else:
if not os.path.isdir(
- "{}/charms/{}".format(package_folder, charmName)
+ "{}/{}charms/{}".format(
+ package_folder, "Scripts/" if sol004_007 else "", charmName
+ )
) and not os.path.isfile(
- "{}/charms/{}".format(package_folder, charmName)
+ "{}/{}charms/{}".format(
+ package_folder, "Scripts/" if sol004_007 else "", charmName
+ )
):
raise ClientException(
"The charm: {} referenced in the descriptor file "
def discover_folder_structure(self, base_directory, name, override):
"""
- **Discover files and folders structure for OSM descriptors given a base_directory and name**
+ **Discover files and folders structure for SOL004/SOL007 descriptors given a base_directory and name**
:params:
- base_directory: is the location of the package to be created
files_folders = {
"folders": [
("{}_ns".format(prefix), "ns"),
- ("{}_ns/icons".format(prefix), "ns"),
- ("{}_ns/charms".format(prefix), "ns"),
+ ("{}_ns/Licenses".format(prefix), "ns"),
+ ("{}_ns/Files/icons".format(prefix), "ns"),
+ ("{}_ns/Scripts/charms".format(prefix), "ns"),
("{}_vnf".format(name), "vnf"),
- ("{}_vnf/charms".format(prefix), "vnf"),
- ("{}_vnf/cloud_init".format(prefix), "vnf"),
- ("{}_vnf/images".format(prefix), "vnf"),
- ("{}_vnf/icons".format(prefix), "vnf"),
- ("{}_vnf/scripts".format(prefix), "vnf"),
+ ("{}_vnf/Licenses".format(prefix), "vnf"),
+ ("{}_vnf/Scripts/charms".format(prefix), "vnf"),
+ ("{}_vnf/Scripts/cloud_init".format(prefix), "vnf"),
+ ("{}_vnf/Files/images".format(prefix), "vnf"),
+ ("{}_vnf/Files/icons".format(prefix), "vnf"),
+ ("{}_vnf/Scripts/scripts".format(prefix), "vnf"),
("{}_nst".format(prefix), "nst"),
("{}_nst/icons".format(prefix), "nst"),
],
("{}_ns/README.md".format(prefix), "ns", "readme"),
("{}_vnf/{}_vnfd.yaml".format(prefix, name), "vnf", "descriptor"),
(
- "{}_vnf/cloud_init/cloud-config.txt".format(prefix),
+ "{}_vnf/Scripts/cloud_init/cloud-config.txt".format(prefix),
"vnf",
"cloud_init",
),
# print("Missing files and folders: {}".format(missing_files_folders))
return missing_files_folders
- def charm_build(self, charms_folder, build_name):
+ def charm_build(self, charms_folder, build_name, sol004_007=True):
"""
Build the charms inside the package.
params: package_folder is the name of the folder where is the charms to compile.
build_name is the name of the layer or interface
"""
self._logger.debug("")
- os.environ["JUJU_REPOSITORY"] = "{}/charms".format(charms_folder)
+
+ if sol004_007:
+ os.environ["JUJU_REPOSITORY"] = "{}/Scripts/charms".format(charms_folder)
+ else:
+ os.environ["JUJU_REPOSITORY"] = "{}/charms".format(charms_folder)
+
os.environ["CHARM_LAYERS_DIR"] = "{}/layers".format(
os.environ["JUJU_REPOSITORY"]
)
os.environ["CHARM_INTERFACES_DIR"] = "{}/interfaces".format(
os.environ["JUJU_REPOSITORY"]
)
- os.environ["CHARM_BUILD_DIR"] = "{}/charms/builds".format(charms_folder)
+
+ if sol004_007:
+ os.environ["CHARM_BUILD_DIR"] = "{}/Scripts/charms/builds".format(
+ charms_folder
+ )
+ else:
+ os.environ["CHARM_BUILD_DIR"] = "{}/charms/builds".format(charms_folder)
+
if not os.path.exists(os.environ["CHARM_BUILD_DIR"]):
os.makedirs(os.environ["CHARM_BUILD_DIR"])
src_folder = "{}/{}".format(os.environ["CHARM_LAYERS_DIR"], build_name)
build_name is the name of the layer or interface
"""
self._logger.debug("Building charm {}".format(charm_name))
- src_folder = f"{package_folder}/charms/ops/{charm_name}"
+ src_folder = f"{package_folder}/Scripts/charms/ops/{charm_name}"
current_directory = os.getcwd()
os.chdir(src_folder)
try:
finally:
os.chdir(current_directory)
+ def build_compressed_file(self, package_folder, charm_list=None, sol004_007=True):
+ if sol004_007:
+ return self.build_zipfile(package_folder, charm_list)
+ else:
+ return self.build_tarfile(package_folder, charm_list)
+
+ def build_zipfile(self, package_folder, charm_list=None):
+ """
+ Creates a zip file given a package_folder
+ params: package_folder is the name of the folder to be packaged
+ returns: .zip name
+ """
+ self._logger.debug("")
+ cwd = None
+ try:
+ directory_name, package_name = self.create_temp_dir_sol004_007(
+ package_folder, charm_list
+ )
+ cwd = os.getcwd()
+ os.chdir(directory_name)
+ package_type = package_handling.get_package_type(package_folder)
+ print(package_type)
+
+ if (
+ package_handling.SOL007 == package_type
+ or package_handling.SOL007_TOSCA == package_type
+ ):
+ the_package = SOL007Package(package_folder)
+ elif (
+ package_handling.SOL004 == package_type
+ or package_handling.SOL004_TOSCA == package_type
+ ):
+ the_package = SOL004Package(package_folder)
+
+ the_package.create_or_update_metadata_file()
+
+ the_zip_package = shutil.make_archive(
+ os.path.join(cwd, package_name),
+ "zip",
+ os.path.join(directory_name, package_name),
+ )
+
+ print("Package created: {}".format(the_zip_package))
+
+ return the_zip_package
+
+ except Exception as exc:
+ raise ClientException(
+ "failure during build of zip file (create temp dir, calculate checksum, "
+ "zip file): {}".format(exc)
+ )
+ finally:
+ if cwd:
+ os.chdir(cwd)
+ shutil.rmtree(os.path.join(package_folder, "tmp"))
+
def build_tarfile(self, package_folder, charm_list=None):
"""
Creates a .tar.gz file given a package_folder
self._logger.debug("DONE")
return directory_name, package_name
+ def copy_tree(self, s, d, ignore):
+ self._logger.debug("Copying tree: {} -> {}".format(s, d))
+ shutil.copytree(s, d, symlinks=True, ignore=ignore)
+ self._logger.debug("DONE")
+
+ def create_temp_dir_sol004_007(self, package_folder, charm_list=None):
+ """
+ Method to create a temporary folder where we can move the files in package_folder
+ """
+ self._logger.debug("")
+ ignore_patterns = ".gitignore"
+ ignore = shutil.ignore_patterns(ignore_patterns)
+ directory_name = os.path.abspath(package_folder)
+ package_name = os.path.basename(directory_name)
+ directory_name += "/tmp"
+ os.makedirs("{}/{}".format(directory_name, package_name), exist_ok=True)
+ self._logger.debug("Makedirs DONE: {}/{}".format(directory_name, package_name))
+ for item in os.listdir(package_folder):
+ self._logger.debug("Item: {}".format(item))
+ if item != "tmp":
+ s = os.path.join(package_folder, item)
+ d = os.path.join(os.path.join(directory_name, package_name), item)
+ if os.path.isdir(s):
+ if item == "Scripts":
+ os.makedirs(d, exist_ok=True)
+ scripts_folder = s
+ for script_item in os.listdir(scripts_folder):
+ scripts_destination_folder = os.path.join(d, script_item)
+ if script_item == "charms":
+ s_builds = os.path.join(
+ scripts_folder, script_item, "builds"
+ )
+ for charm in charm_list:
+ self._logger.debug("Copying charm {}".format(charm))
+ if charm in os.listdir(
+ os.path.join(scripts_folder, script_item)
+ ):
+ s_charm = os.path.join(
+ scripts_folder, script_item, charm
+ )
+ elif charm in os.listdir(s_builds):
+ s_charm = os.path.join(s_builds, charm)
+ else:
+ raise ClientException(
+ "The charm {} referenced in the descriptor file "
+ "could not be found in {}/charms or in {}/charms/builds".format(
+ charm, package_folder, package_folder
+ )
+ )
+ d_temp = os.path.join(
+ scripts_destination_folder, charm
+ )
+ self.copy_tree(s_charm, d_temp, ignore)
+ else:
+ self.copy_tree(
+ os.path.join(scripts_folder, script_item),
+ scripts_destination_folder,
+ ignore,
+ )
+ else:
+ self.copy_tree(s, d, ignore)
+ else:
+ if item in ignore_patterns:
+ continue
+ self._logger.debug("Copying file: {} -> {}".format(s, d))
+ shutil.copy2(s, d)
+ self._logger.debug("DONE")
+ return directory_name, package_name
+
def charms_search(self, descriptor_file, desc_type):
self._logger.debug(
"descriptor_file: {}, desc_type: {}".format(descriptor_file, desc_type)
--- /dev/null
+# Copyright ETSI Contributors and Others.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Python module for interacting with ETSI GS NFV-SOL004 compliant packages.
+
+This module provides a SOL004Package class for validating and interacting with
+ETSI SOL004 packages. A valid SOL004 package may have its files arranged according
+to one of the following two structures:
+
+SOL004 with metadata directory SOL004 without metadata directory
+
+native_charm_vnf/ native_charm_vnf/
+├── TOSCA-Metadata ├── native_charm_vnfd.mf
+│ └── TOSCA.meta ├── native_charm_vnfd.yaml
+├── manifest.mf ├── ChangeLog.txt
+├── Definitions ├── Licenses
+│ └── native_charm_vnfd.yaml │ └── license.lic
+├── Files ├── Files
+│ ├── icons │ └── icons
+│ │ └── osm.png │ └── osm.png
+│ ├── Licenses └── Scripts
+│ │ └── license.lic ├── cloud_init
+│ └── changelog.txt │ └── cloud-config.txt
+└── Scripts └── charms
+ ├── cloud_init └── simple
+ │ └── cloud-config.txt ├── config.yaml
+ └── charms ├── hooks
+ └── simple │ ├── install
+ ├── config.yaml ...
+ ├── hooks │
+ │ ├── install └── src
+ ... └── charm.py
+ └── src
+ └── charm.py
+"""
+
+import yaml
+import datetime
+import os
+from .sol_package import SOLPackage
+
+
+class SOL004PackageException(Exception):
+ pass
+
+
+class SOL004Package(SOLPackage):
+ _MANIFEST_VNFD_ID = "vnfd_id"
+ _MANIFEST_VNFD_PRODUCT_NAME = "vnfd_product_name"
+ _MANIFEST_VNFD_PROVIDER_ID = "vnfd_provider_id"
+ _MANIFEST_VNFD_SOFTWARE_VERSION = "vnfd_software_version"
+ _MANIFEST_VNFD_PACKAGE_VERSION = "vnfd_package_version"
+ _MANIFEST_VNFD_RELEASE_DATE_TIME = "vnfd_release_date_time"
+ _MANIFEST_VNFD_COMPATIBLE_SPECIFICATION_VERSIONS = (
+ "compatible_specification_versions"
+ )
+ _MANIFEST_VNFM_INFO = "vnfm_info"
+
+ _MANIFEST_ALL_FIELDS = [
+ _MANIFEST_VNFD_ID,
+ _MANIFEST_VNFD_PRODUCT_NAME,
+ _MANIFEST_VNFD_PROVIDER_ID,
+ _MANIFEST_VNFD_SOFTWARE_VERSION,
+ _MANIFEST_VNFD_PACKAGE_VERSION,
+ _MANIFEST_VNFD_RELEASE_DATE_TIME,
+ _MANIFEST_VNFD_COMPATIBLE_SPECIFICATION_VERSIONS,
+ _MANIFEST_VNFM_INFO,
+ ]
+
+ def __init__(self, package_path=""):
+ super().__init__(package_path)
+
+ def generate_manifest_data_from_descriptor(self):
+ descriptor_path = os.path.join(
+ self._package_path, self.get_descriptor_location()
+ )
+ with open(descriptor_path, "r") as descriptor:
+ try:
+ vnfd_data = yaml.safe_load(descriptor)["vnfd"]
+ except yaml.YAMLError as e:
+ print("Error reading descriptor {}: {}".format(descriptor_path, e))
+ return
+
+ self._manifest_metadata = {}
+ self._manifest_metadata[self._MANIFEST_VNFD_ID] = vnfd_data.get(
+ "id", "default-id"
+ )
+ self._manifest_metadata[self._MANIFEST_VNFD_PRODUCT_NAME] = vnfd_data.get(
+ "product-name", "default-product-name"
+ )
+ self._manifest_metadata[self._MANIFEST_VNFD_PROVIDER_ID] = vnfd_data.get(
+ "provider", "OSM"
+ )
+ self._manifest_metadata[
+ self._MANIFEST_VNFD_SOFTWARE_VERSION
+ ] = vnfd_data.get("version", "1.0")
+ self._manifest_metadata[self._MANIFEST_VNFD_PACKAGE_VERSION] = "1.0.0"
+ self._manifest_metadata[self._MANIFEST_VNFD_RELEASE_DATE_TIME] = (
+ datetime.datetime.now().astimezone().isoformat()
+ )
+ self._manifest_metadata[
+ self._MANIFEST_VNFD_COMPATIBLE_SPECIFICATION_VERSIONS
+ ] = "2.7.1"
+ self._manifest_metadata[self._MANIFEST_VNFM_INFO] = "OSM"
--- /dev/null
+# Copyright ETSI Contributors and Others.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Python module for interacting with ETSI GS NFV-SOL007 compliant packages.
+
+This module provides a SOL007Package class for validating and interacting with
+ETSI SOL007 packages. A valid SOL007 package may have its files arranged according
+to one of the following two structures:
+
+SOL007 with metadata directory SOL007 without metadata directory
+
+native_charm_vnf/ native_charm_vnf/
+├── TOSCA-Metadata ├── native_charm_nsd.mf
+│ └── TOSCA.meta ├── native_charm_nsd.yaml
+├── manifest.mf ├── ChangeLog.txt
+├── Definitions ├── Licenses
+│ └── native_charm_nsd.yaml │ └── license.lic
+├── Files ├── Files
+│ ├── icons │ └── icons
+│ │ └── osm.png │ └── osm.png
+│ ├── Licenses └── Scripts
+│ │ └── license.lic ├── cloud_init
+│ └── changelog.txt │ └── cloud-config.txt
+└── Scripts └── charms
+ ├── cloud_init └── simple
+ │ └── cloud-config.txt ├── config.yaml
+ └── charms ├── hooks
+ └── simple │ ├── install
+ ├── config.yaml ...
+ ├── hooks │
+ │ ├── install └── src
+ ... └── charm.py
+ └── src
+ └── charm.py
+"""
+
+import yaml
+import datetime
+import os
+from .sol_package import SOLPackage
+
+
+class SOL007PackageException(Exception):
+ pass
+
+
+class SOL007Package(SOLPackage):
+ _MANIFEST_NSD_INVARIANT_ID = "nsd_invariant_id"
+ _MANIFEST_NSD_NAME = "nsd_name"
+ _MANIFEST_NSD_DESIGNER = "nsd_designer"
+ _MANIFEST_NSD_FILE_STRUCTURE_VERSION = "nsd_file_structure_version"
+ _MANIFEST_NSD_RELEASE_DATE_TIME = "nsd_release_date_time"
+ _MANIFEST_NSD_COMPATIBLE_SPECIFICATION_VERSIONS = (
+ "compatible_specification_versions"
+ )
+
+ _MANIFEST_ALL_FIELDS = [
+ _MANIFEST_NSD_INVARIANT_ID,
+ _MANIFEST_NSD_NAME,
+ _MANIFEST_NSD_DESIGNER,
+ _MANIFEST_NSD_FILE_STRUCTURE_VERSION,
+ _MANIFEST_NSD_RELEASE_DATE_TIME,
+ _MANIFEST_NSD_COMPATIBLE_SPECIFICATION_VERSIONS,
+ ]
+
+ def __init__(self, package_path=""):
+ super().__init__(package_path)
+
+ def generate_manifest_data_from_descriptor(self):
+ descriptor_path = os.path.join(
+ self._package_path, self.get_descriptor_location()
+ )
+ with open(descriptor_path, "r") as descriptor:
+ try:
+ nsd_data = yaml.safe_load(descriptor)["nsd"]
+ except yaml.YAMLError as e:
+ print("Error reading descriptor {}: {}".format(descriptor_path, e))
+ return
+
+ self._manifest_metadata = {}
+ self._manifest_metadata[self._MANIFEST_NSD_INVARIANT_ID] = nsd_data.get(
+ "id", "default-id"
+ )
+ self._manifest_metadata[self._MANIFEST_NSD_NAME] = nsd_data.get(
+ "name", "default-name"
+ )
+ self._manifest_metadata[self._MANIFEST_NSD_DESIGNER] = nsd_data.get(
+ "designer", "OSM"
+ )
+ self._manifest_metadata[
+ self._MANIFEST_NSD_FILE_STRUCTURE_VERSION
+ ] = nsd_data.get("version", "1.0")
+ self._manifest_metadata[self._MANIFEST_NSD_RELEASE_DATE_TIME] = (
+ datetime.datetime.now().astimezone().isoformat()
+ )
+ self._manifest_metadata[
+ self._MANIFEST_NSD_COMPATIBLE_SPECIFICATION_VERSIONS
+ ] = "2.7.1"
--- /dev/null
+# Copyright ETSI Contributors and Others.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import yaml
+import hashlib
+
+
+class SOLPackageException(Exception):
+ pass
+
+
+class SOLPackage:
+ _METADATA_FILE_PATH = "TOSCA-Metadata/TOSCA.meta"
+ _METADATA_DESCRIPTOR_FIELD = "Entry-Definitions"
+ _METADATA_MANIFEST_FIELD = "ETSI-Entry-Manifest"
+ _METADATA_CHANGELOG_FIELD = "ETSI-Entry-Change-Log"
+ _METADATA_LICENSES_FIELD = "ETSI-Entry-Licenses"
+ _METADATA_DEFAULT_CHANGELOG_PATH = "ChangeLog.txt"
+ _METADATA_DEFAULT_LICENSES_PATH = "Licenses"
+ _MANIFEST_FILE_PATH_FIELD = "Source"
+ _MANIFEST_FILE_HASH_ALGORITHM_FIELD = "Algorithm"
+ _MANIFEST_FILE_HASH_DIGEST_FIELD = "Hash"
+
+ _MANIFEST_ALL_FIELDS = []
+
+ def __init__(self, package_path=""):
+ self._package_path = package_path
+
+ self._package_metadata = self._parse_package_metadata()
+
+ try:
+ self._manifest_data = self._parse_manifest_data()
+ except Exception:
+ self._manifest_data = None
+
+ try:
+ self._manifest_metadata = self._parse_manifest_metadata()
+ except Exception:
+ self._manifest_metadata = None
+
+ def _parse_package_metadata(self):
+ try:
+ return self._parse_package_metadata_with_metadata_dir()
+ except FileNotFoundError:
+ return self._parse_package_metadata_without_metadata_dir()
+
+ def _parse_package_metadata_with_metadata_dir(self):
+ try:
+ return self._parse_file_in_blocks(self._METADATA_FILE_PATH)
+ except FileNotFoundError as e:
+ raise e
+ except (Exception, OSError) as e:
+ raise SOLPackageException(
+ "Error parsing {}: {}".format(self._METADATA_FILE_PATH, e)
+ )
+
+ def _parse_package_metadata_without_metadata_dir(self):
+ package_root_files = {f for f in os.listdir(self._package_path)}
+ package_root_yamls = [
+ f for f in package_root_files if f.endswith(".yml") or f.endswith(".yaml")
+ ]
+ if len(package_root_yamls) != 1:
+ error_msg = "Error parsing package metadata: there should be exactly 1 descriptor YAML, found {}"
+ raise SOLPackageException(error_msg.format(len(package_root_yamls)))
+
+ base_manifest = [
+ {
+ SOLPackage._METADATA_DESCRIPTOR_FIELD: package_root_yamls[0],
+ SOLPackage._METADATA_MANIFEST_FIELD: "{}.mf".format(
+ os.path.splitext(package_root_yamls[0])[0]
+ ),
+ SOLPackage._METADATA_CHANGELOG_FIELD: SOLPackage._METADATA_DEFAULT_CHANGELOG_PATH,
+ SOLPackage._METADATA_LICENSES_FIELD: SOLPackage._METADATA_DEFAULT_LICENSES_PATH,
+ }
+ ]
+
+ return base_manifest
+
+ def _parse_manifest_data(self):
+ manifest_path = None
+ for tosca_meta in self._package_metadata:
+ if SOLPackage._METADATA_MANIFEST_FIELD in tosca_meta:
+ manifest_path = tosca_meta[SOLPackage._METADATA_MANIFEST_FIELD]
+ break
+ else:
+ error_msg = "Error parsing {}: no {} field on path".format(
+ self._METADATA_FILE_PATH, self._METADATA_MANIFEST_FIELD
+ )
+ raise SOLPackageException(error_msg)
+
+ try:
+ return self._parse_file_in_blocks(manifest_path)
+
+ except (Exception, OSError) as e:
+ raise SOLPackageException("Error parsing {}: {}".format(manifest_path, e))
+
+ def _parse_manifest_metadata(self):
+ try:
+ base_manifest = {}
+ manifest_file = os.open(
+ os.path.join(
+ self._package_path,
+ base_manifest[SOLPackage._METADATA_MANIFEST_FIELD],
+ ),
+ "rw",
+ )
+ for line in manifest_file:
+ fields_in_line = line.split(":", maxsplit=1)
+ fields_in_line[0] = fields_in_line[0].strip()
+ fields_in_line[1] = fields_in_line[1].strip()
+ if fields_in_line[0] in self._MANIFEST_ALL_FIELDS:
+ base_manifest[fields_in_line[0]] = fields_in_line[1]
+ return base_manifest
+ except (Exception, OSError) as e:
+ raise SOLPackageException(
+ "Error parsing {}: {}".format(
+ base_manifest[SOLPackage._METADATA_MANIFEST_FIELD], e
+ )
+ )
+
+ def _get_package_file_full_path(self, file_relative_path):
+ return os.path.join(self._package_path, file_relative_path)
+
+ def _parse_file_in_blocks(self, file_relative_path):
+ file_path = self._get_package_file_full_path(file_relative_path)
+ with open(file_path) as f:
+ blocks = f.read().split("\n\n")
+ parsed_blocks = map(yaml.safe_load, blocks)
+ return [block for block in parsed_blocks if block is not None]
+
+ def _get_package_file_manifest_data(self, file_relative_path):
+ for file_data in self._manifest_data:
+ if (
+ file_data.get(SOLPackage._MANIFEST_FILE_PATH_FIELD, "")
+ == file_relative_path
+ ):
+ return file_data
+
+ error_msg = (
+ "Error parsing {} manifest data: file not found on manifest file".format(
+ file_relative_path
+ )
+ )
+ raise SOLPackageException(error_msg)
+
+ def get_package_file_hash_digest_from_manifest(self, file_relative_path):
+ """Returns the hash digest of a file inside this package as specified on the manifest file."""
+ file_manifest_data = self._get_package_file_manifest_data(file_relative_path)
+ try:
+ return file_manifest_data[SOLPackage._MANIFEST_FILE_HASH_DIGEST_FIELD]
+ except Exception as e:
+ raise SOLPackageException(
+ "Error parsing {} hash digest: {}".format(file_relative_path, e)
+ )
+
+ def get_package_file_hash_algorithm_from_manifest(self, file_relative_path):
+ """Returns the hash algorithm of a file inside this package as specified on the manifest file."""
+ file_manifest_data = self._get_package_file_manifest_data(file_relative_path)
+ try:
+ return file_manifest_data[SOLPackage._MANIFEST_FILE_HASH_ALGORITHM_FIELD]
+ except Exception as e:
+ raise SOLPackageException(
+ "Error parsing {} hash digest: {}".format(file_relative_path, e)
+ )
+
+ @staticmethod
+ def _get_hash_function_from_hash_algorithm(hash_algorithm):
+ function_to_algorithm = {"SHA-256": hashlib.sha256, "SHA-512": hashlib.sha512}
+ if hash_algorithm not in function_to_algorithm:
+ error_msg = (
+ "Error checking hash function: hash algorithm {} not supported".format(
+ hash_algorithm
+ )
+ )
+ raise SOLPackageException(error_msg)
+ return function_to_algorithm[hash_algorithm]
+
+ def _calculate_file_hash(self, file_relative_path, hash_algorithm):
+ file_path = self._get_package_file_full_path(file_relative_path)
+ hash_function = self._get_hash_function_from_hash_algorithm(hash_algorithm)
+ try:
+ with open(file_path, "rb") as f:
+ return hash_function(f.read()).hexdigest()
+ except Exception as e:
+ raise SOLPackageException(
+ "Error hashing {}: {}".format(file_relative_path, e)
+ )
+
+ def validate_package_file_hash(self, file_relative_path):
+ """Validates the integrity of a file using the hash algorithm and digest on the package manifest."""
+ hash_algorithm = self.get_package_file_hash_algorithm_from_manifest(
+ file_relative_path
+ )
+ file_hash = self._calculate_file_hash(file_relative_path, hash_algorithm)
+ expected_file_hash = self.get_package_file_hash_digest_from_manifest(
+ file_relative_path
+ )
+ if file_hash != expected_file_hash:
+ error_msg = "Error validating {} hash: calculated hash {} is different than manifest hash {}"
+ raise SOLPackageException(
+ error_msg.format(file_relative_path, file_hash, expected_file_hash)
+ )
+
+ def validate_package_hashes(self):
+ """Validates the integrity of all files listed on the package manifest."""
+ for file_data in self._manifest_data:
+ if SOLPackage._MANIFEST_FILE_PATH_FIELD in file_data:
+ file_relative_path = file_data[SOLPackage._MANIFEST_FILE_PATH_FIELD]
+ self.validate_package_file_hash(file_relative_path)
+
+ def create_or_update_metadata_file(self):
+ """
+ Creates or updates the metadata file with the hashes calculated for each one of the package's files
+ """
+ if not self._manifest_metadata:
+ self.generate_manifest_data_from_descriptor()
+
+ self.write_manifest_data_into_file()
+
+ def generate_manifest_data_from_descriptor(self):
+ pass
+
+ def write_manifest_data_into_file(self):
+ with open(self.get_manifest_location(), "w") as metadata_file:
+ # Write manifest metadata
+ for metadata_entry in self._manifest_metadata:
+ metadata_file.write(
+ "{}: {}\n".format(
+ metadata_entry, self._manifest_metadata[metadata_entry]
+ )
+ )
+
+ # Write package's files hashes
+ file_hashes = {}
+ for root, dirs, files in os.walk(self._package_path):
+ for a_file in files:
+ file_path = os.path.join(root, a_file)
+ file_relative_path = file_path[len(self._package_path) :]
+ if file_relative_path.startswith("/"):
+ file_relative_path = file_relative_path[1:]
+ file_hashes[file_relative_path] = self._calculate_file_hash(
+ file_relative_path, "SHA-512"
+ )
+
+ for file, hash in file_hashes.items():
+ file_block = "Source: {}\nAlgorithm: SHA-512\nHash: {}\n\n".format(
+ file, hash
+ )
+ metadata_file.write(file_block)
+
+ def get_descriptor_location(self):
+ """Returns this package descriptor location as a relative path from the package root."""
+ for tosca_meta in self._package_metadata:
+ if SOLPackage._METADATA_DESCRIPTOR_FIELD in tosca_meta:
+ return tosca_meta[SOLPackage._METADATA_DESCRIPTOR_FIELD]
+
+ error_msg = "Error: no {} entry found on {}".format(
+ SOLPackage._METADATA_DESCRIPTOR_FIELD, SOLPackage._METADATA_FILE_PATH
+ )
+ raise SOLPackageException(error_msg)
+
+ def get_manifest_location(self):
+ """Return the VNF/NS manifest location as a relative path from the package root."""
+ for tosca_meta in self._package_metadata:
+ if SOLPackage._METADATA_MANIFEST_FIELD in tosca_meta:
+ return tosca_meta[SOLPackage._METADATA_MANIFEST_FIELD]
+
+ raise SOLPackageException("No manifest file defined for this package")
from uuid import UUID
import hashlib
import tarfile
+from zipfile import ZipFile
import re
import yaml
def get_key_val_from_pkg(descriptor_file):
- # method opens up a package and finds the name of the resulting
- # descriptor (vnfd or nsd name)
+ if descriptor_file.split(".")[-1] == "zip":
+ return get_key_val_from_pkg_sol004(descriptor_file)
+ else:
+ return get_key_val_from_pkg_old(descriptor_file)
+
+
+def get_key_val_from_pkg_sol004(package_file):
+ """Method opens up a package and finds the name of the resulting
+ descriptor (vnfd or nsd name), using SOL004 spec
+ """
+ with ZipFile(package_file) as zipfile:
+ yamlfile = None
+ for filedata in zipfile.infolist():
+ if (
+ re.match(".*.yaml", filedata.filename)
+ and filedata.filename.find("Scripts") < 0
+ ):
+ yamlfile = filedata.filename
+ break
+ if yamlfile is None:
+ return None
+
+ return get_key_val_from_descriptor(zipfile.open(yamlfile))
+
+
+def get_key_val_from_pkg_old(descriptor_file):
+ """Method opens up a package and finds the name of the resulting
+ descriptor (vnfd or nsd name)
+ """
tar = tarfile.open(descriptor_file)
yamlfile = None
for member in tar.getmembers():
if yamlfile is None:
return None
- dict = yaml.safe_load(tar.extractfile(yamlfile))
+ result = get_key_val_from_descriptor(tar.extractfile(yamlfile))
+
+ tar.close()
+ return result
+
+
+def get_key_val_from_descriptor(descriptor):
+ dict = yaml.safe_load(descriptor)
result = {}
for k in dict:
if "nsd" in k:
result["type"] = "nsd"
else:
result["type"] = "vnfd"
+
if "type" not in result:
for k1, v1 in list(dict.items()):
if not k1.endswith("-catalog"):
# strip off preceeding chars before :
key_name = k3.split(":").pop()
result[key_name] = v3
-
- tar.close()
return result
check_client_version(ctx.obj, ctx.command.name)
- pdu = create_pdu_dictionary(name, pdu_type, interface, description, vim_account, descriptor_file)
+ pdu = create_pdu_dictionary(
+ name, pdu_type, interface, description, vim_account, descriptor_file
+ )
ctx.obj.pdu.create(pdu)
if not newname:
newname = name
- pdu = create_pdu_dictionary(newname, pdu_type, interface, description, vim_account, descriptor_file, update)
+ pdu = create_pdu_dictionary(
+ newname, pdu_type, interface, description, vim_account, descriptor_file, update
+ )
ctx.obj.pdu.update(name, pdu)
-def create_pdu_dictionary(name, pdu_type, interface, description, vim_account, descriptor_file, update=False):
+def create_pdu_dictionary(
+ name, pdu_type, interface, description, vim_account, descriptor_file, update=False
+):
logger.debug("")
pdu = {}
pdu["interfaces"] = ifaces_list
return pdu
+
####################
# UPDATE operations
####################
"--netslice-vlds", default=1, help="(NST) Number of netslice vlds. Default 1"
)
@click.option(
- "--old", default=False, is_flag=True, help="Flag to create a descriptor using the previous OSM format (pre SOL006, OSM<9)"
+ "--old",
+ default=False,
+ is_flag=True,
+ help="Flag to create a descriptor using the previous OSM format (pre SOL006, OSM<9)",
)
@click.pass_context
def package_create(
self._logger.debug("")
self._client.get_token()
# Endpoint to get operation status
- apiUrlStatus = "{}{}{}".format(self._apiName, self._apiVersion, self._apiResource)
+ apiUrlStatus = "{}{}{}".format(
+ self._apiName, self._apiVersion, self._apiResource
+ )
# Wait for status for VIM instance creation/deletion
if isinstance(wait_time, bool):
wait_time = WaitForStatus.TIMEOUT_VIM_OPERATION
endpoint=self._apiBase, postfields_dict=k8s_cluster
)
- self._logger.debug('HTTP CODE: {}'.format(http_code))
- self._logger.debug('RESP: {}'.format(resp))
+ self._logger.debug("HTTP CODE: {}".format(http_code))
+ self._logger.debug("RESP: {}".format(resp))
if resp:
resp = json.loads(resp)
wait_id = cluster["_id"]
self._wait(wait_id, wait)
- self._logger.debug('HTTP CODE: {}'.format(http_code))
- self._logger.debug('RESP: {}'.format(resp))
+ self._logger.debug("HTTP CODE: {}".format(http_code))
+ self._logger.debug("RESP: {}".format(resp))
if http_code in (200, 201, 202, 204):
print("Updated")
msg = json.loads(resp)
except ValueError:
msg = resp
- raise ClientException("failed to update K8s cluster {} - {}".format(name, msg))
+ raise ClientException(
+ "failed to update K8s cluster {} - {}".format(name, msg)
+ )
def get_id(self, name):
"""Returns a K8s cluster id from a K8s cluster name"""
"{}/{}{}".format(self._apiBase, cluster_id, querystring)
)
- self._logger.debug('HTTP CODE: {}'.format(http_code))
- self._logger.debug('RESP: {}'.format(resp))
+ self._logger.debug("HTTP CODE: {}".format(http_code))
+ self._logger.debug("RESP: {}".format(resp))
if http_code == 202:
if wait:
"Error cannot read from repository {} '{}': {}".format(
repository["name"], repository["url"], e
),
- exc_info=True
+ exc_info=True,
)
continue
artifacts = []
directories = []
for f in listdir(origin):
- if isfile(join(origin, f)) and f.endswith('.tar.gz'):
+ if isfile(join(origin, f)) and f.endswith(".tar.gz"):
artifacts.append(f)
- elif isdir(join(origin, f)) and f != destination.split('/')[-1] and not f.startswith('.'):
- directories.append(f) # TODO: Document that nested directories are not supported
+ elif (
+ isdir(join(origin, f))
+ and f != destination.split("/")[-1]
+ and not f.startswith(".")
+ ):
+ directories.append(
+ f
+ ) # TODO: Document that nested directories are not supported
else:
self._logger.debug(f"Ignoring {f}")
for artifact in artifacts:
if descriptor_dict.get("vnfd-catalog", False):
aux_dict = descriptor_dict.get("vnfd-catalog", {}).get("vnfd", [{}])[0]
elif descriptor_dict.get("vnfd:vnfd-catalog"):
- aux_dict = descriptor_dict.get("vnfd:vnfd-catalog", {}).get("vnfd", [{}])[0]
+ aux_dict = descriptor_dict.get("vnfd:vnfd-catalog", {}).get(
+ "vnfd", [{}]
+ )[0]
elif descriptor_dict.get("vnfd"):
aux_dict = descriptor_dict["vnfd"]
if aux_dict.get("vnfd"):
- aux_dict = aux_dict['vnfd'][0]
+ aux_dict = aux_dict["vnfd"][0]
else:
msg = f"Unexpected descriptor format {descriptor_dict}"
self._logger.error(msg)
raise ValueError(msg)
- self._logger.debug(f"Extracted descriptor info for {package_type}: {aux_dict}")
+ self._logger.debug(
+ f"Extracted descriptor info for {package_type}: {aux_dict}"
+ )
images = []
- for vdu in aux_dict.get("vdu", aux_dict.get('kdu', ())):
- images.append(vdu.get("image", vdu.get('name')))
+ for vdu in aux_dict.get("vdu", aux_dict.get("kdu", ())):
+ images.append(vdu.get("image", vdu.get("name")))
fields["images"] = images
elif package_type == "ns":
if descriptor_dict.get("nsd-catalog", False):
aux_dict = descriptor_dict.get("nsd-catalog", {}).get("nsd", [{}])[0]
elif descriptor_dict.get("nsd:nsd-catalog"):
- aux_dict = descriptor_dict.get("nsd:nsd-catalog", {}).get("nsd", [{}])[0]
+ aux_dict = descriptor_dict.get("nsd:nsd-catalog", {}).get("nsd", [{}])[
+ 0
+ ]
elif descriptor_dict.get("nsd"):
- aux_dict = descriptor_dict['nsd']
+ aux_dict = descriptor_dict["nsd"]
if aux_dict.get("nsd"):
aux_dict = descriptor_dict["nsd"]["nsd"][0]
else:
for vnf in aux_dict.get("constituent-vnfd", ()):
vnfs.append(vnf.get("vnfd-id-ref"))
else:
- vnfs = aux_dict.get('vnfd-id')
+ vnfs = aux_dict.get("vnfd-id")
self._logger.debug("Used VNFS in the NSD: " + str(vnfs))
fields["vnfd-id-ref"] = vnfs
- elif package_type == 'nst':
+ elif package_type == "nst":
if descriptor_dict.get("nst-catalog", False):
aux_dict = descriptor_dict.get("nst-catalog", {}).get("nst", [{}])[0]
elif descriptor_dict.get("nst:nst-catalog"):
- aux_dict = descriptor_dict.get("nst:nst-catalog", {}).get("nst", [{}])[0]
+ aux_dict = descriptor_dict.get("nst:nst-catalog", {}).get("nst", [{}])[
+ 0
+ ]
elif descriptor_dict.get("nst"):
- aux_dict = descriptor_dict['nst']
+ aux_dict = descriptor_dict["nst"]
if aux_dict.get("nst"):
aux_dict = descriptor_dict["nst"]["nst"][0]
nsds = []
# endpoint = '/nsds' if pkg_type['type'] == 'nsd' else '/vnfds'
# print('Endpoint: {}'.format(endpoint))
headers = self._client._headers
- headers["Content-Type"] = "application/gzip"
+
+ if filename.endswith(".tar.gz"):
+ headers["Content-Type"] = "application/gzip"
+ else:
+ headers["Content-Type"] = "application/zip"
# headers['Content-Type'] = 'application/binary'
# Next three lines are to be removed in next version
# headers['Content-Filename'] = basename(filename)
E123,
E125,
E226,
- E241
+ E241,
+ E203
exclude =
.git,
__pycache__,