X-Git-Url: https://osm.etsi.org/gitweb/?a=blobdiff_plain;ds=sidebyside;f=rwlaunchpad%2Fplugins%2Frwlaunchpadtasklet%2Frift%2Ftasklets%2Frwlaunchpad%2Fexport.py;h=df1e251e0113b41a5bdd911abdeca1a02950e36b;hb=aa4d4c18f40062624a3c356eb23f40279cfd4c3b;hp=4256765b6d60e42ddd538418eb0489552a4b04e1;hpb=255ff03a528a3090ce7f46f0a63b65da3e6f9bcf;p=osm%2FSO.git diff --git a/rwlaunchpad/plugins/rwlaunchpadtasklet/rift/tasklets/rwlaunchpad/export.py b/rwlaunchpad/plugins/rwlaunchpadtasklet/rift/tasklets/rwlaunchpad/export.py index 4256765b..df1e251e 100644 --- a/rwlaunchpad/plugins/rwlaunchpadtasklet/rift/tasklets/rwlaunchpad/export.py +++ b/rwlaunchpad/plugins/rwlaunchpadtasklet/rift/tasklets/rwlaunchpad/export.py @@ -1,6 +1,6 @@ -# -# Copyright 2016 RIFT.IO Inc +# +# Copyright 2016-2017 RIFT.IO Inc # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -21,6 +21,8 @@ import os.path import stat import time import uuid +import collections +import json import tornado.web @@ -35,13 +37,19 @@ from . import message from . import tosca import gi -gi.require_version('NsdYang', '1.0') -gi.require_version('VnfdYang', '1.0') +gi.require_version('RwPkgMgmtYang', '1.0') from gi.repository import ( - NsdYang, - VnfdYang, - ) + RwPkgMgmtYang, + RwVnfdYang, + RwProjectVnfdYang, + RwNsdYang, + RwProjectNsdYang +) +import rift.mano.dts as mano_dts + + +RPC_PACKAGE_EXPORT_ENDPOINT = RwPkgMgmtYang.YangOutput_RwPkgMgmt_PackageExport class ExportStart(message.StatusMessage): @@ -77,18 +85,18 @@ class DescriptorPackageArchiveExporter(object): def __init__(self, log): self._log = log - def _create_archive_from_package(self, archive_hdl, package, open_fn): + def _create_archive_from_package(self, archive_hdl, package, open_fn, top_level_dir=None): orig_open = package.open try: package.open = open_fn archive = rift.package.archive.TarPackageArchive.from_package( - self._log, package, archive_hdl + self._log, package, archive_hdl, top_level_dir ) return archive finally: package.open = orig_open - def create_archive(self, archive_hdl, package, desc_json_str, serializer): + def create_archive(self, archive_hdl, package, desc_json_str, serializer, project=None): """ Create a package archive from an existing package, descriptor messages, and a destination serializer. @@ -110,7 +118,7 @@ class DescriptorPackageArchiveExporter(object): ArchiveExportError - The exported archive failed to create """ - new_desc_msg = serializer.from_file_hdl(io.BytesIO(desc_json_str.encode()), ".json") + new_desc_msg = serializer.from_file_hdl(io.BytesIO(desc_json_str.encode()), ".json", project) _, dest_ext = os.path.splitext(package.descriptor_file) new_desc_hdl = io.BytesIO(serializer.to_string(new_desc_msg, dest_ext).encode()) descriptor_checksum = rift.package.checksums.checksum(new_desc_hdl) @@ -134,7 +142,10 @@ class DescriptorPackageArchiveExporter(object): checksum_hdl ) - archive_checksums[package.descriptor_file] = descriptor_checksum + # Get the name of the descriptor file without the prefix + # (which is what is stored in the checksum file) + desc_file_no_prefix = os.path.relpath(package.descriptor_file, package.prefix) + archive_checksums[desc_file_no_prefix] = descriptor_checksum checksum_hdl = io.BytesIO(archive_checksums.to_string().encode()) return checksum_hdl @@ -149,11 +160,11 @@ class DescriptorPackageArchiveExporter(object): return open_fn(rel_path) - archive = self._create_archive_from_package(archive_hdl, package, open_wrapper) + archive = self._create_archive_from_package(archive_hdl, package, open_wrapper, new_desc_msg.name) return archive - def export_package(self, package, export_dir, file_id, json_desc_str, dest_serializer): + def export_package(self, package, export_dir, file_id, json_desc_str, dest_serializer, project=None): """ Export package as an archive to the export directory Arguments: @@ -178,7 +189,7 @@ class DescriptorPackageArchiveExporter(object): with open(archive_path, 'wb') as archive_hdl: try: self.create_archive( - archive_hdl, package, json_desc_str, dest_serializer + archive_hdl, package, json_desc_str, dest_serializer, project ) except Exception as e: os.remove(archive_path) @@ -189,80 +200,98 @@ class DescriptorPackageArchiveExporter(object): return archive_path -class ExportHandler(tornado.web.RequestHandler): - def options(self, *args, **kargs): - pass +class ExportRpcHandler(mano_dts.AbstractRpcHandler): + def __init__(self, application, catalog_map): + """ + Args: + application: UploaderApplication + calalog_map: Dict containing Vnfds and Nsd onboarding. + """ + super().__init__(application.log, application.dts, application.loop) + + self.application = application + self.exporter = application.exporter + self.onboarder = application.onboarder + self.catalog_map = catalog_map - def set_default_headers(self): - self.set_header('Access-Control-Allow-Origin', '*') - self.set_header('Access-Control-Allow-Headers', - 'Content-Type, Cache-Control, Accept, X-Requested-With, Authorization') - self.set_header('Access-Control-Allow-Methods', 'POST, GET, PUT, DELETE') - def initialize(self, log, loop, store_map, exporter, catalog_map): - self.loop = loop - self.transaction_id = str(uuid.uuid4()) - self.log = message.Logger( - log, - self.application.messages[self.transaction_id], + + @property + def xpath(self): + return "/rw-pkg-mgmt:package-export" + + @asyncio.coroutine + def callback(self, ks_path, msg): + transaction_id = str(uuid.uuid4()) + log = message.Logger( + self.log, + self.application.messages[transaction_id], ) - self.store_map = store_map - self.exporter = exporter - self.catalog_map = catalog_map - def get(self, desc_type): - if desc_type not in self.catalog_map: - raise tornado.web.HTTPError(400, "unknown descriptor type: {}".format(desc_type)) + file_name = self.export(transaction_id, log, msg) - self.log.message(ExportStart()) + rpc_out = RPC_PACKAGE_EXPORT_ENDPOINT.from_dict({ + 'transaction_id': transaction_id, + 'filename': file_name}) - # Parse the IDs - ids_query = self.get_query_argument("ids") - ids = [id.strip() for id in ids_query.split(',')] - if len(ids) != 1: - raise message.MessageException(ExportSingleDescriptorOnlyError) - desc_id = ids[0] + return rpc_out - catalog = self.catalog_map[desc_type] + def export(self, transaction_id, log, msg): + DESC_TYPE_PB_MAP = { + "vnfd": RwProjectVnfdYang.YangData_RwProject_Project_VnfdCatalog_Vnfd, + "nsd": RwProjectNsdYang.YangData_RwProject_Project_NsdCatalog_Nsd + } + + log.message(ExportStart()) + desc_type = msg.package_type.lower() - if desc_id not in catalog: - raise tornado.web.HTTPError(400, "unknown descriptor id: {}".format(desc_id)) + if desc_type not in self.catalog_map: + raise ValueError("Invalid package type: {}".format(desc_type)) - desc_msg = catalog[desc_id] + # Parse the IDs + desc_id = msg.package_id + catalog = self.catalog_map[desc_type](project=msg.project_name) + + # TODO: Descriptor isn't available from catalog info passed in from launchpad tasklet. + # If unavailable, create a filler descriptor object, which will be updated + # via GET call to config. + if desc_id in catalog: + desc_msg = catalog[desc_id] + else: + log.warn("Unable to find package ID in catalog: {}".format(desc_id)) + desc_msg = DESC_TYPE_PB_MAP[desc_type](id = desc_id) + + self.store_map = self.application.build_store_map(project=msg.project_name) + self.project_name = msg.project_name if msg.has_field('project_name') else None # Get the schema for exporting - schema = self.get_argument("schema", default="rift") + schema = msg.export_schema.lower() # Get the grammar for exporting - grammar = self.get_argument("grammar", default="osm") + grammar = msg.export_grammar.lower() # Get the format for exporting - format_ = self.get_argument("format", default="yaml") + format_ = msg.export_format.lower() - filename = None + # Initial value of the exported filename + self.filename = "{name}_{ver}".format( + name=desc_msg.name, + ver=desc_msg.version) if grammar == 'tosca': - filename = "{}.zip".format(self.transaction_id) - self.export_tosca(schema, format_, desc_type, desc_id, desc_msg) - self.log.message(message.FilenameMessage(filename)) + self.export_tosca(schema, format_, desc_type, desc_id, desc_msg, log, transaction_id) + filename = "{}.zip".format(self.filename) + log.message(message.FilenameMessage(filename)) else: - filename = "{}.tar.gz".format(self.transaction_id) - self.export_rift(schema, format_, desc_type, desc_id, desc_msg) - self.log.message(message.FilenameMessage(filename)) + self.export_rift(schema, format_, desc_type, desc_id, desc_msg, log, transaction_id) + filename = "{}.tar.gz".format(self.filename) + log.message(message.FilenameMessage(filename)) - self.log.message(ExportSuccess()) + log.message(ExportSuccess()) - if filename is not None: - self.write(tornado.escape.json_encode({ - "transaction_id": self.transaction_id, - "filename": filename, - })) - else: - self.write(tornado.escape.json_encode({ - "transaction_id": self.transaction_id, - })) + return filename - def export_rift(self, schema, format_, desc_type, desc_id, desc_msg): + def export_rift(self, schema, format_, desc_type, desc_id, desc_msg, log, transaction_id): convert = rift.package.convert schema_serializer_map = { "rift": { @@ -270,8 +299,8 @@ class ExportHandler(tornado.web.RequestHandler): "nsd": convert.RwNsdSerializer, }, "mano": { - "vnfd": convert.VnfdSerializer, - "nsd": convert.NsdSerializer, + "vnfd": convert.RwVnfdSerializer, + "nsd": convert.RwNsdSerializer, } } @@ -279,7 +308,7 @@ class ExportHandler(tornado.web.RequestHandler): raise tornado.web.HTTPError(400, "unknown schema: {}".format(schema)) if format_ != "yaml": - self.log.warn("Only yaml format supported for export") + log.warn("Only yaml format supported for export") if desc_type not in schema_serializer_map[schema]: raise tornado.web.HTTPError(400, "unknown descriptor type: {}".format(desc_type)) @@ -295,32 +324,62 @@ class ExportHandler(tornado.web.RequestHandler): # If that fails, create a temporary package using the descriptor only try: package = package_store.get_package(desc_id) + #Remove the image file from the package while exporting + for file in package.files: + if rift.package.image.is_image_file(file): + package.remove_file(file) + except rift.package.store.PackageNotFoundError: - self.log.debug("stored package not found. creating package from descriptor config") + log.debug("stored package not found. creating package from descriptor config") desc_yaml_str = src_serializer.to_yaml_string(desc_msg) with io.BytesIO(desc_yaml_str.encode()) as hdl: hdl.name = "{}__{}.yaml".format(desc_msg.id, desc_type) package = rift.package.package.DescriptorPackage.from_descriptor_file_hdl( - self.log, hdl + log, hdl ) + # Get the updated descriptor from the api endpoint to get any updates + # made to the catalog. Also desc_msg may not be populated correctly as yet. + # + + try: + # merge the descriptor content: for rbac everything needs to be project rooted, with project name. + D = collections.defaultdict(dict) + sub_dict = self.onboarder.get_updated_descriptor(desc_msg, self.project_name) + + if self.project_name: + D["project"] = dict(name = self.project_name) + root_key, sub_key = "project-{0}:{0}-catalog".format(desc_type), "project-{0}:{0}".format(desc_type) + D["project"].update({root_key: sub_dict}) + else: + root_key, sub_key = "{0}:{0}-catalog".format(desc_type), "{0}:{0}".format(desc_type) + D[root_key] = sub_dict + + json_desc_msg = json.dumps(D) + desc_name, desc_version = sub_dict[sub_key]['name'], sub_dict[sub_key].get('version', '') + + except Exception as e: + msg = "Exception {} raised - {}".format(e.__class__.__name__, str(e)) + self.log.error(msg) + raise ArchiveExportError(msg) from e + + # exported filename based on the updated descriptor name + self.filename = "{}_{}".format(desc_name, desc_version) + self.log.debug("JSON string for descriptor: {}".format(json_desc_msg)) + self.exporter.export_package( package=package, export_dir=self.application.export_dir, - file_id=self.transaction_id, - json_desc_str=src_serializer.to_json_string(desc_msg), + file_id = self.filename, + json_desc_str=json_desc_msg, dest_serializer=dest_serializer, + project=self.project_name, ) - def export_tosca(self, format_, schema, desc_type, desc_id, desc_msg): + def export_tosca(self, format_, schema, desc_type, desc_id, desc_msg, log, transaction_id): if format_ != "yaml": - self.log.warn("Only yaml format supported for TOSCA export") - - if desc_type != "nsd": - raise tornado.web.HTTPError( - 400, - "NSD need to passed to generate TOSCA: {}".format(desc_type)) + log.warn("Only yaml format supported for TOSCA export") def get_pkg_from_store(id_, type_): package = None @@ -330,33 +389,44 @@ class ExportHandler(tornado.web.RequestHandler): package = package_store.get_package(id_) except rift.package.store.PackageNotFoundError: - self.log.debug("stored package not found for {}.".format(id_)) + log.debug("stored package not found for {}.".format(id_)) except rift.package.store.PackageStoreError: - self.log.debug("stored package error for {}.".format(id_)) + log.debug("stored package error for {}.".format(id_)) return package - pkg = tosca.ExportTosca() - - # Add NSD and related descriptors for exporting - nsd_id = pkg.add_nsd(desc_msg, get_pkg_from_store(desc_id, "nsd")) - - catalog = self.catalog_map["vnfd"] - for const_vnfd in desc_msg.constituent_vnfd: - vnfd_id = const_vnfd.vnfd_id_ref - if vnfd_id in catalog: - pkg.add_vnfd(nsd_id, - catalog[vnfd_id], - get_pkg_from_store(vnfd_id, "vnfd")) - else: - raise tornado.web.HTTPError( - 400, - "Unknown VNFD descriptor {} for NSD {}". - format(vnfd_id, nsd_id)) - - # Create the archive. - pkg.create_archive(self.transaction_id, - dest=self.application.export_dir) + if desc_type == "nsd": + pkg = tosca.ExportTosca() + + # Add NSD and related descriptors for exporting + nsd_id = pkg.add_nsd(desc_msg, get_pkg_from_store(desc_id, "nsd")) + + catalog = self.catalog_map["vnfd"] + for const_vnfd in desc_msg.constituent_vnfd: + vnfd_id = const_vnfd.vnfd_id_ref + if vnfd_id in catalog: + pkg.add_vnfd(nsd_id, + catalog[vnfd_id], + get_pkg_from_store(vnfd_id, "vnfd")) + else: + raise tornado.web.HTTPError( + 400, + "Unknown VNFD descriptor {} for NSD {}". + format(vnfd_id, nsd_id)) + + # Create the archive. + pkg.create_archive(transaction_id, + dest=self.application.export_dir) + if desc_type == "vnfd": + pkg = tosca.ExportTosca() + vnfd_id = desc_msg.id + pkg.add_single_vnfd(vnfd_id, + desc_msg, + get_pkg_from_store(vnfd_id, "vnfd")) + + # Create the archive. + pkg.create_archive(transaction_id, + dest=self.application.export_dir) class ExportStateHandler(state.StateHandler):