self.load_archive()
@classmethod
- def from_package(cls, log, pkg, tar_file_hdl):
+ def from_package(cls, log, pkg, tar_file_hdl, top_level_dir=None):
""" Creates a TarPackageArchive from a existing Package
Arguments:
log - logger
pkg - a DescriptorPackage instance
tar_file_hdl - a writeable file handle to write tar archive data
+ top_level_dir - (opt.) top level dir under which the archive will be extracted
Returns:
A TarPackageArchive instance
tar_info.gname = "rift"
archive = TarPackageArchive(log, tar_file_hdl, mode='w:gz')
+
for pkg_file in pkg.files:
- tar_info = tarfile.TarInfo(name=pkg_file)
+ filename = "%s/%s" % (top_level_dir, pkg_file) if top_level_dir else pkg_file
+ tar_info = tarfile.TarInfo(name=filename)
tar_info.type = tarfile.REGTYPE
tar_info.mode = pkg.get_file_mode(pkg_file)
set_common_tarinfo_fields(tar_info)
archive.tarfile.addfile(tar_info, pkg_file_hdl)
for pkg_dir in pkg.dirs:
- tar_info = tarfile.TarInfo(name=pkg_dir)
+ dirname = "%s/%s" % (top_level_dir, pkg_dir) if top_level_dir else pkg_dir
+ tar_info = tarfile.TarInfo(name=dirname)
tar_info.type = tarfile.DIRTYPE
tar_info.mode = 0o775
set_common_tarinfo_fields(tar_info)
return self.descriptor_msg.name
+ @property
+ def descriptor_version(self):
+ desc_msg = self.descriptor_msg
+ return desc_msg.version if desc_msg.has_field("version") else ''
+
+ @property
+ def descriptor_vendor(self):
+ desc_msg = self.descriptor_msg
+ return desc_msg.vendor if desc_msg.has_field("vendor") else ''
+
@classmethod
def get_descriptor_patterns(cls):
""" Returns a tuple of descriptor regex and Package Types """
# Copy the contents of the file to the correct path
# For folder creation (or nested folders), dest_file appears w/ trailing "/" like: dir1/ or dir1/dir2/
# For regular file upload, dest_file appears as dir1/abc.txt
+
dest_dir_path = os.path.dirname(dest_file)
if not os.path.isdir(dest_dir_path):
os.makedirs(dest_dir_path)
raise PackageError("Empty file name added")
if rel_path not in self._package_file_mode_map:
- raise PackageError("File %s does not in package" % rel_path)
+ raise PackageError("File %s does not exist in package" % rel_path)
del self._package_file_mode_map[rel_path]
def serializer(self):
return VnfdPackage.SERIALIZER
+class PackageConstructValidator(object):
+
+ def __init__(self, log):
+ self._log = log
+
+ def validate(self, package):
+ """ Validate presence of descriptor file (.yaml) at the top level in the
+ package folder structure.
+
+ Arguments:
+ package - The Descriptor Package being validated.
+ Returns:
+ None
+ Raises:
+ PackageValidationError - The package validation failed for some
+ generic reason.
+ """
+ pass
+ desc_file = package.descriptor_file
+ prefix, desc_file = package.prefix.rstrip('/'), desc_file.rstrip('/')
+
+ if os.path.dirname(desc_file) != prefix:
+ msg = "Descriptor file {} not found in expcted location {}".format(desc_file, prefix)
+ self._log.error(msg)
+ raise PackageValidationError(msg)
+
class PackageChecksumValidator(object):
""" This class uses the checksums.txt file in the package
def __init__(self, log):
self._log = log
+ self.validated_file_checksums = {}
@classmethod
def get_package_checksum_file(cls, package):
return checksum_file
+ @property
+ def checksums(self):
+ return self.validated_file_checksums
+
def validate(self, package):
""" Validate file checksums match that in the checksums.txt
PackageFileChecksumError - A file within the package did not match the
checksum within checksums.txt
"""
- validated_file_checksums = {}
try:
checksum_file = PackageChecksumValidator.get_package_checksum_file(package)
self._log.error(msg)
raise PackageFileChecksumError(pkg_file)
- validated_file_checksums[pkg_file] = file_checksum
-
- return validated_file_checksums
+ self.validated_file_checksums[pkg_file] = file_checksum
class TarPackageArchive(object):
import stat
import time
import uuid
+import collections
+import json
import tornado.web
def __init__(self, log):
self._log = log
- def _create_archive_from_package(self, archive_hdl, package, open_fn):
+ def _create_archive_from_package(self, archive_hdl, package, open_fn, top_level_dir=None):
orig_open = package.open
try:
package.open = open_fn
archive = rift.package.archive.TarPackageArchive.from_package(
- self._log, package, archive_hdl
+ self._log, package, archive_hdl, top_level_dir
)
return archive
finally:
return open_fn(rel_path)
- archive = self._create_archive_from_package(archive_hdl, package, open_wrapper)
+ archive = self._create_archive_from_package(archive_hdl, package, open_wrapper, new_desc_msg.name)
return archive
class ExportRpcHandler(mano_dts.AbstractRpcHandler):
- def __init__(self, log, dts, loop, application, store_map, exporter, catalog_map):
+ def __init__(self, log, dts, loop, application, store_map, exporter, onboarder, catalog_map):
"""
Args:
application: UploaderApplication
self.application = application
self.store_map = store_map
self.exporter = exporter
+ self.onboarder = onboarder
self.catalog_map = catalog_map
self.log = log
# Get the format for exporting
format_ = msg.export_format.lower()
- filename = None
+ # Initial value of the exported filename
+ self.filename = "{name}_{ver}".format(
+ name=desc_msg.name,
+ ver=desc_msg.version)
if grammar == 'tosca':
- filename = "{}.zip".format(transaction_id)
self.export_tosca(schema, format_, desc_type, desc_id, desc_msg, log, transaction_id)
+ filename = "{}.zip".format(self.filename)
log.message(message.FilenameMessage(filename))
else:
- filename = "{}.tar.gz".format(transaction_id)
self.export_rift(schema, format_, desc_type, desc_id, desc_msg, log, transaction_id)
+ filename = "{}.tar.gz".format(self.filename)
log.message(message.FilenameMessage(filename))
log.message(ExportSuccess())
"nsd": convert.RwNsdSerializer,
},
"mano": {
- "vnfd": convert.VnfdSerializer,
- "nsd": convert.NsdSerializer,
+ "vnfd": convert.RwVnfdSerializer,
+ "nsd": convert.RwNsdSerializer,
}
}
log, hdl
)
+ # Try to get the updated descriptor from the api endpoint so that we have
+ # the updated descriptor file in the exported archive and the name of the archive
+ # tar matches the name in the yaml descriptor file. Proceed with the current
+ # file if there's an error
+ #
+ json_desc_msg = src_serializer.to_json_string(desc_msg)
+ desc_name, desc_version = desc_msg.name, desc_msg.version
+ try:
+ d = collections.defaultdict(dict)
+ sub_dict = self.onboarder.get_updated_descriptor(desc_msg)
+ root_key, sub_key = "{0}:{0}-catalog".format(desc_type), "{0}:{0}".format(desc_type)
+ # root the dict under "vnfd:vnfd-catalog"
+ d[root_key] = sub_dict
+
+ json_desc_msg = json.dumps(d)
+ desc_name, desc_version = sub_dict[sub_key]['name'], sub_dict[sub_key]['version']
+
+ except Exception as e:
+ msg = "Exception {} raised - {}".format(e.__class__.__name__, str(e))
+ self.log.debug(msg)
+
+ # exported filename based on the updated descriptor name
+ self.filename = "{}_{}".format(desc_name, desc_version)
+
self.exporter.export_package(
package=package,
export_dir=self.application.export_dir,
- file_id=transaction_id,
- json_desc_str=src_serializer.to_json_string(desc_msg),
+ file_id = self.filename,
+ json_desc_str=json_desc_msg,
dest_serializer=dest_serializer,
)
self._log.error(msg)
raise OnboardError(msg) from e
+ def get_updated_descriptor(self, descriptor_msg, auth=None):
+ """ Get updated descriptor file
+
+ Arguments:
+ descriptor_msg - A descriptor proto-gi msg
+ auth - the authorization header
+
+ Raises:
+ OnboardError - The descriptor retrieval failed
+ """
+
+ if type(descriptor_msg) not in DescriptorOnboarder.DESC_SERIALIZER_MAP:
+ raise TypeError("Invalid descriptor message type")
+
+ endpoint = DescriptorOnboarder.DESC_ENDPOINT_MAP[type(descriptor_msg)]
+
+ url = "{}://{}:{}/api/config/{}/{}".format(
+ "https" if self._use_ssl else "http",
+ self._host,
+ self.port,
+ endpoint,
+ descriptor_msg.id
+ )
+
+ hdrs = self._get_headers(auth)
+ hdrs.update({'Accept': 'application/json'})
+ request_args = dict(
+ url=url,
+ headers=hdrs,
+ auth=DescriptorOnboarder.AUTH,
+ verify=False,
+ cert=(self._ssl_cert, self._ssl_key) if self._use_ssl else None,
+ timeout=self.timeout,
+ )
+
+ response = None
+ try:
+ response = requests.get(**request_args)
+ response.raise_for_status()
+ except requests.exceptions.ConnectionError as e:
+ msg = "Could not connect to restconf endpoint: %s" % str(e)
+ self._log.error(msg)
+ raise OnboardError(msg) from e
+ except requests.exceptions.HTTPError as e:
+ msg = "GET request to %s error: %s" % (request_args["url"], response.text)
+ self._log.error(msg)
+ raise OnboardError(msg) from e
+ except requests.exceptions.Timeout as e:
+ msg = "Timed out connecting to restconf endpoint: %s", str(e)
+ self._log.error(msg)
+ raise OnboardError(msg) from e
+
+ return response.json()
+
OnboardError("Cloud-Init file reference in VNFD does not match with cloud-init file"))
def validate_package(self, package):
- checksum_validator = rift.package.package.PackageChecksumValidator(self.log)
+ validators = (
+ rift.package.package.PackageChecksumValidator(self.log),
+ rift.package.package.PackageConstructValidator(self.log),
+ )
- try:
- file_checksums = checksum_validator.validate(package)
- except rift.package.package.PackageFileChecksumError as e:
- raise MessageException(OnboardChecksumMismatch(e.filename)) from e
- except rift.package.package.PackageValidationError as e:
- raise MessageException(OnboardUnreadablePackage()) from e
+ # Run the validators for checksum and package construction for imported pkgs
+ for validator in validators:
+ try:
+ validator.validate(package)
- return file_checksums
+ except rift.package.package.PackageFileChecksumError as e:
+ raise MessageException(OnboardChecksumMismatch(e.filename)) from e
+ except rift.package.package.PackageValidationError as e:
+ raise MessageException(OnboardUnreadablePackage()) from e
+
+ return validators[0].checksums
def onboard_descriptors(self, package):
descriptor_msg = package.descriptor_msg
self,
store_map=self.package_store_map,
exporter=self.exporter,
+ onboarder=self.onboarder,
catalog_map=catalog_map
)
store = self.proxy._get_store(self.package_type)
src_path = store._get_package_dir(self.src_package_id)
self.src_package = store.get_package(self.src_package_id)
- src_desc_name = self.src_package.descriptor_name
- src_copy_path = os.path.join(src_path, src_desc_name)
- self.dest_copy_path = os.path.join(store.DEFAULT_ROOT_DIR,
- self.dest_package_id,
- self.dest_package_name)
+ self.dest_copy_path = os.path.join(
+ store.DEFAULT_ROOT_DIR,
+ self.dest_package_id)
self.log.debug("Copying contents from {src} to {dest}".
- format(src=src_copy_path, dest=self.dest_copy_path))
+ format(src=src_path, dest=self.dest_copy_path))
- shutil.copytree(src_copy_path, self.dest_copy_path)
+ shutil.copytree(src_path, self.dest_copy_path)
def _create_descriptor_file(self):
""" Update descriptor file for the newly copied descriptor catalog.
rpc_input.package_id,
rpc_input.package_path,
rpc_input.package_type,
+ rpc_input.vnfd_file_type,
+ rpc_input.nsd_file_type,
auth=auth,
proxy=proxy,
file_obj=file_obj,
package_id,
package_path,
package_type,
+ vnfd_file_type,
+ nsd_file_type,
proxy,
file_obj=None,
delete_on_fail=True,
self.package_id = package_id
self.package_type = package_type
self.package_path = package_path
+ self.package_file_type = vnfd_file_type.lower() if vnfd_file_type else nsd_file_type.lower()
self.proxy = proxy
def convert_to_yang(self):
self.meta.filepath,
self.package_type,
self.package_id,
- self.package_path)
+ self.package_path,
+ self.package_file_type)
except Exception as e:
self.log.exception(e)
return self.SCHEMA[package_type]
- def package_file_add(self, new_file, package_type, package_id, package_path):
+ def package_file_add(self, new_file, package_type, package_id, package_path, package_file_type):
# Get the schema from thr package path
# the first part will always be the vnfd/nsd name
mode = 0o664
+
+ # for files other than README, create the package path from the asset type
+ package_path = package_file_type + "/" + package_path \
+ if package_file_type != "readme" else package_path
components = package_path.split("/")
if len(components) > 2:
schema = components[1]
# Construct abs path of the destination obj
path = store._get_package_dir(package_id)
- dest_file = os.path.join(path, package_path)
+ dest_file = os.path.join(path, package.prefix, package_path)
try:
package.insert_file(new_file, dest_file, package_path, mode=mode)
return True
- def package_file_delete(self, package_type, package_id, package_path):
+ def package_file_delete(self, package_type, package_id, package_path, package_file_type):
package_type = package_type.lower()
store = self._get_store(package_type)
package = store.get_package(package_id)
+ # for files other than README, create the package path from the asset type
+ package_path = package_file_type + "/" + package_path \
+ if package_file_type != "readme" else package_path
+
# package_path has to be relative, so strip off the starting slash if
# provided incorrectly.
if package_path[0] == "/":
# Construct abs path of the destination obj
path = store._get_package_dir(package_id)
- dest_file = os.path.join(path, package_path)
+ dest_file = os.path.join(path, package.prefix, package_path)
try:
package.delete_file(dest_file, package_path)
rpc_op = RPC_PACKAGE_DELETE_ENDPOINT.from_dict({"status": str(True)})
try:
+ package_file_type = msg.vnfd_file_type.lower() \
+ if msg.vnfd_file_type else msg.nsd_file_type.lower()
self.proxy.package_file_delete(
msg.package_type,
msg.package_id,
- msg.package_path)
+ msg.package_path,
+ package_file_type)
except Exception as e:
self.log.exception(e)
rpc_op.status = str(False)
descriptor.log.debug("Skpping folder creation, {} already present".format(download_dir))
return
else:
- download_dir = os.path.join(download_dir, desc_name)
+ # Folder structure is based on top-level package-id directory
if not os.path.exists(download_dir):
os.makedirs(download_dir)
descriptor.log.debug("Created directory {}".format(download_dir))
}
}
+ typedef package-file-type {
+ type enumeration {
+ enum ICONS;
+ enum CHARMS;
+ enum SCRIPTS;
+ enum IMAGES;
+ enum CLOUD_INIT;
+ enum README;
+ }
+ }
+
+ typedef vnfd-file-type {
+ type enumeration {
+ enum ICONS;
+ enum CHARMS;
+ enum SCRIPTS;
+ enum IMAGES;
+ enum CLOUD_INIT;
+ enum README;
+ }
+ }
+
+ typedef nsd-file-type {
+ type enumeration {
+ enum VNF_CONFIG;
+ enum NS_CONFIG;
+ enum ICONS;
+ enum SCRIPTS;
+ }
+ }
+
typedef export-schema {
type enumeration {
enum RIFT;
input {
uses package-file-identifer;
uses external-url-data;
+
+ choice catalog-type {
+ mandatory true;
+ case VNFD {
+ leaf vnfd-file-type {
+ description "Type of vnfd file being added to the package";
+ type vnfd-file-type;
+ }
+ }
+ case NSD {
+ leaf nsd-file-type {
+ description "Type of nsd file being added to the package";
+ type nsd-file-type;
+ }
+ }
+ }
+
}
output {
input {
uses package-file-identifer;
+ choice catalog-type {
+ case VNFD {
+ leaf vnfd-file-type {
+ description "Type of file being removed from the vnfd package";
+ type vnfd-file-type;
+ }
+ }
+ case NSD {
+ leaf nsd-file-type {
+ description "Type of file being removed from the nsd package";
+ type nsd-file-type;
+ }
+ }
+ }
+
}
output {