X-Git-Url: https://osm.etsi.org/gitweb/?a=blobdiff_plain;f=osmclient%2Fcommon%2Fpackage_tool.py;h=af1e4263712611af6d860bc58385860308b9d908;hb=85fe6ebf08098c66cfb3b140fc994835f70d9efc;hp=7cc10becc4a80c723fb1eb1aecaa683dd90369d6;hpb=b7463a4821eabd214b2b38d54db38a5a1aae53d6;p=osm%2Fosmclient.git diff --git a/osmclient/common/package_tool.py b/osmclient/common/package_tool.py index 7cc10be..af1e426 100644 --- a/osmclient/common/package_tool.py +++ b/osmclient/common/package_tool.py @@ -15,19 +15,28 @@ # License for the specific language governing permissions and limitations # under the License. -from osmclient.common.exceptions import ClientException -import os import glob -import time -import tarfile import hashlib -from osm_im.validation import Validation as validation_im +import logging +import os +import shutil +import subprocess +import tarfile +import time + from jinja2 import Environment, PackageLoader +from osm_im.validation import Validation as validation_im +from osm_im.validation import ValidationException +from osm_im import im_translation +from osmclient.common.exceptions import ClientException +import yaml class PackageTool(object): def __init__(self, client=None): self._client = client + self._logger = logging.getLogger('osmclient') + self._validator = validation_im() def create(self, package_type, base_directory, package_name, override, image, vdus, vcpu, memory, storage, interfaces, vendor, detailed, netslice_subnets, netslice_vlds): @@ -50,7 +59,7 @@ class PackageTool(object): :return: status """ - + self._logger.debug("") # print("location: {}".format(osmclient.__path__)) file_loader = PackageLoader("osmclient") env = Environment(loader=file_loader) @@ -80,7 +89,7 @@ class PackageTool(object): self.create_files(structure["files"], output, package_type) return "Created" - def validate(self, base_directory): + def validate(self, base_directory, recursive=True, old_format=False): """ **Validate OSM Descriptors given a path** @@ -89,8 +98,12 @@ class PackageTool(object): :return: List of dict of validated descriptors. keys: type, path, valid, error """ + self._logger.debug("") table = [] - descriptors_paths = [f for f in glob.glob(base_directory + "/**/*.yaml", recursive=True)] + if recursive: + descriptors_paths = [f for f in glob.glob(base_directory + "/**/*.yaml", recursive=recursive)] + else: + descriptors_paths = [f for f in glob.glob(base_directory + "/*.yaml", recursive=recursive)] print("Base directory: {}".format(base_directory)) print("{} Descriptors found to validate".format(len(descriptors_paths))) for desc_path in descriptors_paths: @@ -99,13 +112,82 @@ class PackageTool(object): desc_type = "-" try: desc_type, descriptor_data = validation_im.yaml_validation(self, descriptor_data) + if not old_format: + if ( desc_type=="vnfd" or desc_type=="nsd" ): + print("OSM descriptor '{}' written in an unsupported format. Please update to ETSI SOL006 format".format(desc_path)) + print("Package validation skipped. It can still be done with 'osm package-validate --old'") + print("Package build can still be done with 'osm package-build --skip-validation'") + raise Exception("Not SOL006 format") validation_im.pyangbind_validation(self, desc_type, descriptor_data) table.append({"type": desc_type, "path": desc_path, "valid": "OK", "error": "-"}) except Exception as e: table.append({"type": desc_type, "path": desc_path, "valid": "ERROR", "error": str(e)}) return table - def build(self, package_folder, skip_validation=True): + def translate(self, base_directory, recursive=True, dryrun=False): + """ + **Translate OSM Packages given a path** + + :params: + - base_directory is the root path for all packages + + :return: List of dict of translated packages. keys: current type, new type, path, valid, translated, error + """ + self._logger.debug("") + table = [] + if recursive: + descriptors_paths = [f for f in glob.glob(base_directory + "/**/*.yaml", recursive=recursive)] + else: + descriptors_paths = [f for f in glob.glob(base_directory + "/*.yaml", recursive=recursive)] + print("Base directory: {}".format(base_directory)) + print("{} Descriptors found to validate".format(len(descriptors_paths))) + for desc_path in descriptors_paths: + with open(desc_path) as descriptor_file: + descriptor_data = descriptor_file.read() + desc_type = "-" + try: + desc_type, descriptor_data = validation_im.yaml_validation(self, descriptor_data) + self._logger.debug("desc_type: {}".format(desc_type)) + self._logger.debug("descriptor_data:\n{}".format(descriptor_data)) + self._validator.pyangbind_validation(desc_type, descriptor_data) + if not ( desc_type=="vnfd" or desc_type=="nsd" ): + table.append({"current type": desc_type, "new type": desc_type, "path": desc_path, "valid": "OK", "translated": "N/A", "error": "-"}) + else: + new_desc_type = desc_type + try: + sol006_model = yaml.safe_dump(im_translation.translate_im_model_to_sol006(descriptor_data), indent=4, default_flow_style=False) + new_desc_type, new_descriptor_data = self._validator.yaml_validation(sol006_model) + self._validator.pyangbind_validation(new_desc_type, new_descriptor_data) + if not dryrun: + with open(desc_path, 'w') as descriptor_file: + descriptor_file.write(sol006_model) + table.append({"current type": desc_type, "new type": new_desc_type, "path": desc_path, "valid": "OK", "translated": "OK", "error": "-"}) + except ValidationException as ve2: + table.append({"current type": desc_type, "new type": new_desc_type, "path": desc_path, "valid": "OK", "translated": "ERROR", "error": "Error in the post-validation: {}".format(str(ve2))}) + except Exception as e2: + table.append({"current type": desc_type, "new type": new_desc_type, "path": desc_path, "valid": "OK", "translated": "ERROR", "error": "Error in the translation: {}".format(str(e2))}) + except ValidationException as ve: + table.append({"current type": desc_type, "new type": "N/A", "path": desc_path, "valid": "ERROR", "translated": "N/A", "error": "Error in the pre-validation: {}".format(str(ve))}) + except Exception as e: + table.append({"current type": desc_type, "new type": "N/A", "path": desc_path, "valid": "ERROR", "translated": "N/A", "error": str(e)}) + return table + + def descriptor_translate(self, descriptor_file): + """ + **Translate input descriptor file from Rel EIGHT OSM to SOL006** + + :params: + - base_directory is the root path for all packages + + :return: YAML descriptor in the new format + """ + self._logger.debug("") + with open(descriptor_file, 'r') as df: + im_model = yaml.safe_load(df.read()) + sol006_model = im_translation.translate_im_model_to_sol006(im_model) + return yaml.safe_dump(sol006_model, indent=4, default_flow_style=False) + + def build(self, package_folder, skip_validation=False, skip_charm_build=False): """ **Creates a .tar.gz file given a package_folder** @@ -115,20 +197,23 @@ class PackageTool(object): :returns: message result for the build process """ - + self._logger.debug("") + package_folder = package_folder.rstrip('/') if not os.path.exists("{}".format(package_folder)): - return "Fail, package is not in the specified route" + return "Fail, package is not in the specified path" if not skip_validation: - results = self.validate(package_folder) - for result in results: - if result["valid"] != "OK": - return("There was an error validating the file: {} with error: {}".format(result["path"], - result["error"])) - self.calculate_checksum(package_folder) - with tarfile.open("{}.tar.gz".format(package_folder), mode='w:gz') as archive: - print("Adding File: {}".format(package_folder)) - archive.add('{}'.format(package_folder), recursive=True) - return "Created {}.tar.gz".format(package_folder) + print('Validating package {}'.format(package_folder)) + results = self.validate(package_folder, recursive=False) + if results: + for result in results: + if result["valid"] != "OK": + raise ClientException("There was an error validating the file {} with error: {}" + .format(result["path"], result["error"])) + print('Validation OK') + else: + raise ClientException("No descriptor file found in: {}".format(package_folder)) + charm_list = self.build_all_charms(package_folder, skip_charm_build) + return self.build_tarfile(package_folder, charm_list) def calculate_checksum(self, package_folder): """ @@ -138,19 +223,19 @@ class PackageTool(object): - package_folder: is the folder where we have the files to calculate the checksum :returns: None """ - files = [f for f in glob.glob(package_folder + "/**/*.*", recursive=True)] - checksum = open("{}/checksum.txt".format(package_folder), "w+") - for file_item in files: - if "checksum.txt" in file_item: - continue - # from https://www.quickprogrammingtips.com/python/how-to-calculate-md5-hash-of-a-file-in-python.html - md5_hash = hashlib.md5() - with open(file_item, "rb") as f: - # Read and update hash in chunks of 4K - for byte_block in iter(lambda: f.read(4096), b""): - md5_hash.update(byte_block) - checksum.write("{}\t{}\n".format(md5_hash.hexdigest(), file_item)) - checksum.close() + self._logger.debug("") + files = [f for f in glob.glob(package_folder + "/**/*.*", recursive=True) if os.path.isfile(f)] + with open("{}/checksums.txt".format(package_folder), "w+") as checksum: + for file_item in files: + if "checksums.txt" in file_item: + continue + # from https://www.quickprogrammingtips.com/python/how-to-calculate-md5-hash-of-a-file-in-python.html + md5_hash = hashlib.md5() + with open(file_item, "rb") as f: + # Read and update hash in chunks of 4K + for byte_block in iter(lambda: f.read(4096), b""): + md5_hash.update(byte_block) + checksum.write("{}\t{}\n".format(md5_hash.hexdigest(), file_item)) def create_folders(self, folders, package_type): """ @@ -161,7 +246,7 @@ class PackageTool(object): - package_type: is the type of package to be created :return: None """ - + self._logger.debug("") for folder in folders: try: # print("Folder {} == package_type {}".format(folder[1], package_type)) @@ -180,6 +265,7 @@ class PackageTool(object): - file_body: is the content of the file :return: None """ + self._logger.debug("") print("Creating file: \t{}".format(file_name)) try: with open(file_name, "w+") as f: @@ -193,6 +279,7 @@ class PackageTool(object): :returns: readme content """ + self._logger.debug("") return """# Descriptor created by OSM descriptor package generated\n\n**Created on {} **""".format( time.strftime("%m/%d/%Y, %H:%M:%S", time.localtime())) @@ -202,6 +289,7 @@ class PackageTool(object): :returns: cloud-init content """ + self._logger.debug("") return "---\n#cloud-config" def create_files(self, files, file_content, package_type): @@ -215,6 +303,7 @@ class PackageTool(object): :return: None """ + self._logger.debug("") for file_item, file_package, file_type in files: if package_type == file_package: if file_type == "descriptor": @@ -234,6 +323,7 @@ class PackageTool(object): :return: Missing paths Dict """ + self._logger.debug("") missing_paths = {} folders = [] files = [] @@ -249,6 +339,42 @@ class PackageTool(object): return missing_paths + def build_all_charms(self, package_folder, skip_charm_build): + """ + **Read the descriptor file, check that the charms referenced are in the folder and compiles them** + + :params: + - packet_folder: is the location of the package + :return: Files and Folders not found. In case of override, it will return all file list + """ + self._logger.debug("") + listCharms = [] + descriptor_file = False + descriptors_paths = [f for f in glob.glob(package_folder + "/*.yaml")] + for file in descriptors_paths: + if file.endswith('nfd.yaml'): + descriptor_file = True + listCharms = self.charms_search(file, 'vnf') + if file.endswith('nsd.yaml'): + descriptor_file = True + listCharms = self.charms_search(file, 'ns') + print("List of charms in the descriptor: {}".format(listCharms)) + if not descriptor_file: + raise ClientException('Descriptor filename is not correct in: {}. It should end with "nfd.yaml" or "nsd.yaml"'.format(package_folder)) + if listCharms and not skip_charm_build: + for charmName in listCharms: + if os.path.isdir('{}/charms/layers/{}'.format(package_folder, charmName)): + print('Building charm {}/charms/layers/{}'.format(package_folder, charmName)) + self.charm_build(package_folder, charmName) + print('Charm built: {}'.format(charmName)) + else: + if not os.path.isdir('{}/charms/{}'.format(package_folder, charmName)): + raise ClientException('The charm: {} referenced in the descriptor file ' + 'is not present either in {}/charms or in {}/charms/layers'. + format(charmName, package_folder, package_folder)) + self._logger.debug("Return list of charms: {}".format(listCharms)) + return listCharms + def discover_folder_structure(self, base_directory, name, override): """ **Discover files and folders structure for OSM descriptors given a base_directory and name** @@ -259,6 +385,7 @@ class PackageTool(object): - override: is the flag used to indicate the creation of the list even if the file exist to override it :return: Files and Folders not found. In case of override, it will return all file list """ + self._logger.debug("") prefix = "{}/{}".format(base_directory, name) files_folders = {"folders": [("{}_ns".format(prefix), "ns"), ("{}_ns/icons".format(prefix), "ns"), @@ -284,3 +411,171 @@ class PackageTool(object): missing_files_folders = self.check_files_folders(files_folders, override) # print("Missing files and folders: {}".format(missing_files_folders)) return missing_files_folders + + def charm_build(self, charms_folder, build_name): + """ + Build the charms inside the package. + params: package_folder is the name of the folder where is the charms to compile. + build_name is the name of the layer or interface + """ + self._logger.debug("") + os.environ['JUJU_REPOSITORY'] = "{}/charms".format(charms_folder) + os.environ['CHARM_LAYERS_DIR'] = "{}/layers".format(os.environ['JUJU_REPOSITORY']) + os.environ['CHARM_INTERFACES_DIR'] = "{}/interfaces".format(os.environ['JUJU_REPOSITORY']) + os.environ['CHARM_BUILD_DIR'] = "{}/charms/builds".format(charms_folder) + if not os.path.exists(os.environ['CHARM_BUILD_DIR']): + os.makedirs(os.environ['CHARM_BUILD_DIR']) + src_folder = '{}/{}'.format(os.environ['CHARM_LAYERS_DIR'], build_name) + result = subprocess.run(["charm", "build", "{}".format(src_folder)]) + if result.returncode == 1: + raise ClientException("failed to build the charm: {}".format(src_folder)) + self._logger.verbose("charm {} built".format(src_folder)) + + def build_tarfile(self, package_folder, charm_list=None): + """ + Creates a .tar.gz file given a package_folder + params: package_folder is the name of the folder to be packaged + returns: .tar.gz name + """ + self._logger.debug("") + cwd = None + try: + directory_name, package_name = self.create_temp_dir(package_folder, charm_list) + cwd = os.getcwd() + os.chdir(directory_name) + self.calculate_checksum(package_name) + with tarfile.open("{}.tar.gz".format(package_name), mode='w:gz') as archive: + print("Adding File: {}".format(package_name)) + archive.add('{}'.format(package_name), recursive=True) + # return "Created {}.tar.gz".format(package_folder) + # self.build("{}".format(os.path.basename(package_folder))) + os.chdir(cwd) + cwd = None + created_package = "{}/{}.tar.gz".format(os.path.dirname(package_folder) or '.', package_name) + os.rename("{}/{}.tar.gz".format(directory_name, package_name), + created_package) + os.rename("{}/{}/checksums.txt".format(directory_name, package_name), + "{}/checksums.txt".format(package_folder)) + print("Package created: {}".format(created_package)) + return created_package + except Exception as exc: + raise ClientException('failure during build of targz file (create temp dir, calculate checksum, ' + 'tar.gz file): {}'.format(exc)) + finally: + if cwd: + os.chdir(cwd) + shutil.rmtree(os.path.join(package_folder, "tmp")) + + def create_temp_dir(self, package_folder, charm_list=None): + """ + Method to create a temporary folder where we can move the files in package_folder + """ + self._logger.debug("") + ignore_patterns = ('.gitignore') + ignore = shutil.ignore_patterns(ignore_patterns) + directory_name = os.path.abspath(package_folder) + package_name = os.path.basename(directory_name) + directory_name += "/tmp" + os.makedirs("{}/{}".format(directory_name, package_name), exist_ok=True) + self._logger.debug("Makedirs DONE: {}/{}".format(directory_name, package_name)) + for item in os.listdir(package_folder): + self._logger.debug("Item: {}".format(item)) + if item != "tmp": + s = os.path.join(package_folder, item) + d = os.path.join(os.path.join(directory_name, package_name), item) + if os.path.isdir(s): + if item == "charms": + os.makedirs(d, exist_ok=True) + s_builds = os.path.join(s, "builds") + for charm in charm_list: + self._logger.debug("Copying charm {}".format(charm)) + if charm in os.listdir(s): + s_charm = os.path.join(s, charm) + elif charm in os.listdir(s_builds): + s_charm = os.path.join(s_builds, charm) + else: + raise ClientException('The charm {} referenced in the descriptor file ' + 'could not be found in {}/charms or in {}/charms/builds'. + format(charm, package_folder, package_folder)) + d_temp = os.path.join(d, charm) + self._logger.debug("Copying tree: {} -> {}".format(s_charm, d_temp)) + shutil.copytree(s_charm, d_temp, symlinks=True, ignore=ignore) + self._logger.debug("DONE") + else: + self._logger.debug("Copying tree: {} -> {}".format(s, d)) + shutil.copytree(s, d, symlinks=True, ignore=ignore) + self._logger.debug("DONE") + else: + if item in ignore_patterns: + continue + self._logger.debug("Copying file: {} -> {}".format(s, d)) + shutil.copy2(s, d) + self._logger.debug("DONE") + return directory_name, package_name + + def charms_search(self, descriptor_file, desc_type): + self._logger.debug("descriptor_file: {}, desc_type: {}".format(descriptor_file, + desc_type)) + with open("{}".format(descriptor_file)) as yaml_desc: + descriptor_dict = yaml.safe_load(yaml_desc) + #self._logger.debug("\n"+yaml.safe_dump(descriptor_dict, indent=4, default_flow_style=False)) + + if ( (desc_type=="vnf" and ("vnfd:vnfd-catalog" in descriptor_dict or "vnfd-catalog" in descriptor_dict)) or + (desc_type=="ns" and ( "nsd:nsd-catalog" in descriptor_dict or "nsd-catalog" in descriptor_dict)) ): + charms_list = self._charms_search_on_osm_im_dict(descriptor_dict, desc_type) + else: + if desc_type == "ns": + get_charm_list = self._charms_search_on_nsd_sol006_dict + elif desc_type == "vnf": + get_charm_list = self._charms_search_on_vnfd_sol006_dict + else: + raise Exception("Bad descriptor type") + charms_list = get_charm_list(descriptor_dict) + return charms_list + + def _charms_search_on_osm_im_dict(self, osm_im_dict, desc_type): + self._logger.debug("") + charms_list = [] + for k1, v1 in osm_im_dict.items(): + for k2, v2 in v1.items(): + for entry in v2: + if '{}-configuration'.format(desc_type) in entry: + vnf_config = entry['{}-configuration'.format(desc_type)] + for k3, v3 in vnf_config.items(): + if 'charm' in v3: + charms_list.append((v3['charm'])) + if 'vdu' in entry: + vdus = entry['vdu'] + for vdu in vdus: + if 'vdu-configuration' in vdu: + for k4, v4 in vdu['vdu-configuration'].items(): + if 'charm' in v4: + charms_list.append((v4['charm'])) + return charms_list + + def _charms_search_on_vnfd_sol006_dict(self, sol006_dict): + self._logger.debug("") + charms_list = [] + for k1, v1 in sol006_dict.items(): + for k2, v2 in v1.items(): + if 'vnf-configuration' in k2: + for vnf_config in v2: + for k3, v3 in vnf_config.items(): + if 'charm' in v3: + charms_list.append((v3['charm'])) + if 'vdu-configuration' in k2: + for vdu_config in v2: + for k3, v3 in vdu_config.items(): + if 'charm' in v3: + charms_list.append((v3['charm'])) + return charms_list + + def _charms_search_on_nsd_sol006_dict(self, sol006_dict): + self._logger.debug("") + charms_list = [] + nsd_list = sol006_dict.get("nsd", {}).get("nsd", []) + for nsd in nsd_list: + charm = nsd.get("ns-configuration", {}).get("juju", {}).get("charm") + if charm: + charms_list.append(charm) + return charms_list