"repository in url {} unreachable".format(repository.get("url"))
)
except Exception as e:
- logging.error(
+ self._logger.error(
"Error cannot read from repository {} '{}': {}".format(
repository["name"], repository["url"], e
- )
+ ),
+ exc_info=True
)
continue
:param origin: origin directory for getting all the artifacts
:param destination: destination folder for create and index the valid artifacts
"""
- self._logger.debug("")
+ self._logger.debug("Starting index composition")
if destination == ".":
if origin == destination:
destination = "repository"
destination = abspath(destination)
origin = abspath(origin)
-
+ self._logger.debug(f"Paths {destination}, {origin}")
if origin[0] != "/":
origin = join(getcwd(), origin)
if destination[0] != "/":
destination = join(getcwd(), destination)
self.init_directory(destination)
- artifacts = [f for f in listdir(origin) if isfile(join(origin, f))]
- directories = [f for f in listdir(origin) if isdir(join(origin, f))]
+ artifacts = []
+ directories = []
+ for f in listdir(origin):
+ if isfile(join(origin, f)) and f.endswith('.tar.gz'):
+ artifacts.append(f)
+ elif isdir(join(origin, f)) and f != destination.split('/')[-1] and not f.startswith('.'):
+ directories.append(f) # TODO: Document that nested directories are not supported
+ else:
+ self._logger.debug(f"Ignoring {f}")
for artifact in artifacts:
self.register_artifact_in_repository(
- join(origin, artifact), destination, source="file"
+ join(origin, artifact), destination, source="artifact"
)
for artifact in directories:
self.register_artifact_in_repository(
join(origin, artifact), destination, source="directory"
)
- print("\nFinal Results: ")
- print(
+ self._logger.info("\nFinal Results: ")
+ self._logger.info(
"VNF Packages Indexed: "
+ str(len(glob.glob(destination + "/vnf/*/*/metadata.yaml")))
)
- print(
+ self._logger.info(
"NS Packages Indexed: "
+ str(len(glob.glob(destination + "/ns/*/*/metadata.yaml")))
)
+ self._logger.info(
+ "NST Packages Indexed: "
+ + str(len(glob.glob(destination + "/nst/*/*/metadata.yaml")))
+ )
+
def md5(self, fname):
"""
Checksum generator
From an artifact descriptor, obtain the fields required for indexing
:param descriptor_dict: artifact description
:param file: artifact package
- :param package_type: type of artifact (vnf or ns)
+ :param package_type: type of artifact (vnf, ns, nst)
:return: fields
"""
self._logger.debug("")
+
fields = {}
base_path = "/{}/".format(package_type)
aux_dict = {}
if package_type == "vnf":
if descriptor_dict.get("vnfd-catalog", False):
aux_dict = descriptor_dict.get("vnfd-catalog", {}).get("vnfd", [{}])[0]
+ elif descriptor_dict.get("vnfd:vnfd-catalog"):
+ aux_dict = descriptor_dict.get("vnfd:vnfd-catalog", {}).get("vnfd", [{}])[0]
+ elif descriptor_dict.get("vnfd"):
+ aux_dict = descriptor_dict["vnfd"]
+ if aux_dict.get("vnfd"):
+ aux_dict = aux_dict['vnfd'][0]
else:
- aux_dict = descriptor_dict.get("vnfd:vnfd-catalog", {}).get(
- "vnfd", [{}]
- )[0]
-
+ msg = f"Unexpected descriptor format {descriptor_dict}"
+ self._logger.error(msg)
+ raise ValueError(msg)
+ self._logger.debug(f"Extracted descriptor info for {package_type}: {aux_dict}")
images = []
- for vdu in aux_dict.get("vdu", ()):
- images.append(vdu.get("image"))
+ for vdu in aux_dict.get("vdu", aux_dict.get('kdu', ())):
+ images.append(vdu.get("image", vdu.get('name')))
fields["images"] = images
- if package_type == "ns":
+ elif package_type == "ns":
if descriptor_dict.get("nsd-catalog", False):
aux_dict = descriptor_dict.get("nsd-catalog", {}).get("nsd", [{}])[0]
+ elif descriptor_dict.get("nsd:nsd-catalog"):
+ aux_dict = descriptor_dict.get("nsd:nsd-catalog", {}).get("nsd", [{}])[0]
+ elif descriptor_dict.get("nsd"):
+ aux_dict = descriptor_dict['nsd']
+ if aux_dict.get("nsd"):
+ aux_dict = descriptor_dict["nsd"]["nsd"][0]
else:
- aux_dict = descriptor_dict.get("nsd:nsd-catalog", {}).get("nsd", [{}])[
- 0
- ]
-
+ msg = f"Unexpected descriptor format {descriptor_dict}"
+ self._logger.error(msg)
+ raise ValueError(msg)
vnfs = []
-
- for vnf in aux_dict.get("constituent-vnfd", ()):
- vnfs.append(vnf.get("vnfd-id-ref"))
+ if aux_dict.get("constituent-vnfd"):
+ for vnf in aux_dict.get("constituent-vnfd", ()):
+ vnfs.append(vnf.get("vnfd-id-ref"))
+ else:
+ vnfs = aux_dict.get('vnfd-id')
self._logger.debug("Used VNFS in the NSD: " + str(vnfs))
fields["vnfd-id-ref"] = vnfs
+ elif package_type == 'nst':
+ if descriptor_dict.get("nst-catalog", False):
+ aux_dict = descriptor_dict.get("nst-catalog", {}).get("nst", [{}])[0]
+ elif descriptor_dict.get("nst:nst-catalog"):
+ aux_dict = descriptor_dict.get("nst:nst-catalog", {}).get("nst", [{}])[0]
+ elif descriptor_dict.get("nst"):
+ aux_dict = descriptor_dict['nst']
+ if aux_dict.get("nst"):
+ aux_dict = descriptor_dict["nst"]["nst"][0]
+ nsds = []
+ for nsd in aux_dict.get("netslice-subnet", ()):
+ nsds.append(nsd.get("nsd-ref"))
+ self._logger.debug("Used NSDs in the NST: " + str(nsds))
+ if not nsds:
+ msg = f"Unexpected descriptor format {descriptor_dict}"
+ self._logger.error(msg)
+ raise ValueError(msg)
+ fields["nsd-id-ref"] = nsds
+ else:
+ msg = f"Unexpected descriptor format {descriptor_dict}"
+ self._logger.error(msg)
+ raise ValueError(msg)
fields["name"] = aux_dict.get("name")
fields["id"] = aux_dict.get("id")
fields["description"] = aux_dict.get("description")
fields["vendor"] = aux_dict.get("vendor")
- fields["version"] = aux_dict.get("version", "1.0")
+ fields["version"] = str(aux_dict.get("version", "1.0"))
fields["path"] = "{}{}/{}/{}-{}.tar.gz".format(
base_path,
fields["id"],
"""
Validation of artifact.
:param path: file path
+ :param source: flag to select the correct file type (directory or artifact)
:return: status details, status, fields, package_type
"""
- self._logger.debug("")
+ self._logger.debug(f"Validating {path} {source}")
package_type = ""
folder = ""
try:
with open(descriptor_file, "r") as f:
descriptor_data = f.read()
+ self._logger.debug(f"Descriptor data: {descriptor_data}")
validation = validation_im()
desc_type, descriptor_dict = validation.yaml_validation(descriptor_data)
- validation_im.pyangbind_validation(self, desc_type, descriptor_dict)
- if "vnf" in list(descriptor_dict.keys())[0]:
+ try:
+ validation_im.pyangbind_validation(self, desc_type, descriptor_dict)
+ except Exception as e:
+ self._logger.error(e, exc_info=True)
+ raise e
+ descriptor_type_ref = list(descriptor_dict.keys())[0].lower()
+ if "vnf" in descriptor_type_ref:
package_type = "vnf"
- else:
- # raise ClientException("Not VNF package")
+ elif "nst" in descriptor_type_ref:
+ package_type = "nst"
+ elif "ns" in descriptor_type_ref:
package_type = "ns"
-
+ else:
+ msg = f"Unknown package type {descriptor_type_ref}"
+ self._logger.error(msg)
+ raise ValueError(msg)
self._logger.debug("Descriptor: {}".format(descriptor_dict))
fields = self.fields_building(descriptor_dict, path, package_type)
- self._logger.debug("Descriptor sucessfully validated")
+ self._logger.debug(f"Descriptor successfully validated {fields}")
return (
{
"detail": "{}D successfully validated".format(package_type.upper()),
def register_artifact_in_repository(self, path, destination, source):
"""
Registration of one artifact in a repository
- file: VNF or NS
- destination: path for index creation
+ param path:
+ param destination: path for index creation
+ param source:
"""
self._logger.debug("")
pt = PackageTool()
- compresed = False
+ compressed = False
try:
fields = {}
_, valid, fields, package_type = self.validate_artifact(path, source)
else:
if source == "directory":
path = pt.build(path)
- compresed = True
+ self._logger.debug(f"Directory path {path}")
+ compressed = True
fields["checksum"] = self.md5(path)
self.indexation(destination, path, package_type, fields)
raise ClientException(e)
finally:
- if source == "directory" and compresed:
+ if source == "directory" and compressed:
remove(path)
def indexation(self, destination, path, package_type, fields):
Process for index packages
:param destination: index repository path
:param path: path of the package
- :param package_type: package type (vnf, ns)
+ :param package_type: package type (vnf, ns, nst)
:param fields: dict with the required values
"""
- self._logger.debug("")
+ self._logger.debug(f"Processing {destination} {path} {package_type} {fields}")
+
data_ind = {
"name": fields.get("name"),
"description": fields.get("description"),
"vendor": fields.get("vendor"),
"path": fields.get("path"),
}
-
+ self._logger.debug(data_ind)
final_path = join(
destination, package_type, fields.get("id"), fields.get("version")
)
"{} {} added in the repository".format(package_type.upper(), str(path))
)
- def current_datatime(self):
+ def current_datetime(self):
"""
Datetime Generator
:return: Datetime as string with the following structure "2020-04-29T08:41:07.681653Z"
if not isfile(join(destination, "index.yaml")):
mkdir(join(destination, "vnf"))
mkdir(join(destination, "ns"))
+ mkdir(join(destination, "nst"))
index_data = {
"apiVersion": "v1",
- "generated": self.current_datatime(),
+ "generated": self.current_datetime(),
"vnf_packages": {},
"ns_packages": {},
+ "nst_packages": {},
}
with open(join(destination, "index.yaml"), "w") as outfile:
yaml.safe_dump(