Fix Bug 1656 repo generation from osm-packages
Change-Id: I129596c21a6d36a1e7d0d184f77bf52fc27ea826
Signed-off-by: gomezchavez <guillermo.gomez.external@atos.net>
diff --git a/osmclient/common/package_tool.py b/osmclient/common/package_tool.py
index 3dedccd..186d486 100644
--- a/osmclient/common/package_tool.py
+++ b/osmclient/common/package_tool.py
@@ -149,8 +149,8 @@
descriptors_paths = [
f for f in glob.glob(base_directory + "/*.yaml", recursive=recursive)
]
- print("Base directory: {}".format(base_directory))
- print("{} Descriptors found to validate".format(len(descriptors_paths)))
+ self._logger.info("Base directory: {}".format(base_directory))
+ self._logger.info("{} Descriptors found to validate".format(len(descriptors_paths)))
for desc_path in descriptors_paths:
with open(desc_path) as descriptor_file:
descriptor_data = descriptor_file.read()
@@ -159,17 +159,18 @@
desc_type, descriptor_data = validation_im.yaml_validation(
self, descriptor_data
)
+ self._logger.debug(f"Validate {desc_type} {descriptor_data}")
if not old_format:
if desc_type == "vnfd" or desc_type == "nsd":
- print(
+ self._logger.error(
"OSM descriptor '{}' written in an unsupported format. Please update to ETSI SOL006 format".format(
desc_path
)
)
- print(
+ self._logger.warning(
"Package validation skipped. It can still be done with 'osm package-validate --old'"
)
- print(
+ self._logger.warning(
"Package build can still be done with 'osm package-build --skip-validation'"
)
raise Exception("Not SOL006 format")
@@ -186,6 +187,7 @@
"error": str(e),
}
)
+ self._logger.debug(table[-1])
return table
def translate(self, base_directory, recursive=True, dryrun=False):
diff --git a/osmclient/sol005/osmrepo.py b/osmclient/sol005/osmrepo.py
index 424865c..3568e8c 100644
--- a/osmclient/sol005/osmrepo.py
+++ b/osmclient/sol005/osmrepo.py
@@ -92,10 +92,11 @@
"repository in url {} unreachable".format(repository.get("url"))
)
except Exception as e:
- logging.error(
+ self._logger.error(
"Error cannot read from repository {} '{}': {}".format(
repository["name"], repository["url"], e
- )
+ ),
+ exc_info=True
)
continue
@@ -170,40 +171,52 @@
:param origin: origin directory for getting all the artifacts
:param destination: destination folder for create and index the valid artifacts
"""
- self._logger.debug("")
+ self._logger.debug("Starting index composition")
if destination == ".":
if origin == destination:
destination = "repository"
destination = abspath(destination)
origin = abspath(origin)
-
+ self._logger.debug(f"Paths {destination}, {origin}")
if origin[0] != "/":
origin = join(getcwd(), origin)
if destination[0] != "/":
destination = join(getcwd(), destination)
self.init_directory(destination)
- artifacts = [f for f in listdir(origin) if isfile(join(origin, f))]
- directories = [f for f in listdir(origin) if isdir(join(origin, f))]
+ artifacts = []
+ directories = []
+ for f in listdir(origin):
+ if isfile(join(origin, f)) and f.endswith('.tar.gz'):
+ artifacts.append(f)
+ elif isdir(join(origin, f)) and f != destination.split('/')[-1] and not f.startswith('.'):
+ directories.append(f) # TODO: Document that nested directories are not supported
+ else:
+ self._logger.debug(f"Ignoring {f}")
for artifact in artifacts:
self.register_artifact_in_repository(
- join(origin, artifact), destination, source="file"
+ join(origin, artifact), destination, source="artifact"
)
for artifact in directories:
self.register_artifact_in_repository(
join(origin, artifact), destination, source="directory"
)
- print("\nFinal Results: ")
- print(
+ self._logger.info("\nFinal Results: ")
+ self._logger.info(
"VNF Packages Indexed: "
+ str(len(glob.glob(destination + "/vnf/*/*/metadata.yaml")))
)
- print(
+ self._logger.info(
"NS Packages Indexed: "
+ str(len(glob.glob(destination + "/ns/*/*/metadata.yaml")))
)
+ self._logger.info(
+ "NST Packages Indexed: "
+ + str(len(glob.glob(destination + "/nst/*/*/metadata.yaml")))
+ )
+
def md5(self, fname):
"""
Checksum generator
@@ -222,45 +235,81 @@
From an artifact descriptor, obtain the fields required for indexing
:param descriptor_dict: artifact description
:param file: artifact package
- :param package_type: type of artifact (vnf or ns)
+ :param package_type: type of artifact (vnf, ns, nst)
:return: fields
"""
self._logger.debug("")
+
fields = {}
base_path = "/{}/".format(package_type)
aux_dict = {}
if package_type == "vnf":
if descriptor_dict.get("vnfd-catalog", False):
aux_dict = descriptor_dict.get("vnfd-catalog", {}).get("vnfd", [{}])[0]
+ elif descriptor_dict.get("vnfd:vnfd-catalog"):
+ aux_dict = descriptor_dict.get("vnfd:vnfd-catalog", {}).get("vnfd", [{}])[0]
+ elif descriptor_dict.get("vnfd"):
+ aux_dict = descriptor_dict["vnfd"]
+ if aux_dict.get("vnfd"):
+ aux_dict = aux_dict['vnfd'][0]
else:
- aux_dict = descriptor_dict.get("vnfd:vnfd-catalog", {}).get(
- "vnfd", [{}]
- )[0]
-
+ msg = f"Unexpected descriptor format {descriptor_dict}"
+ self._logger.error(msg)
+ raise ValueError(msg)
+ self._logger.debug(f"Extracted descriptor info for {package_type}: {aux_dict}")
images = []
- for vdu in aux_dict.get("vdu", ()):
- images.append(vdu.get("image"))
+ for vdu in aux_dict.get("vdu", aux_dict.get('kdu', ())):
+ images.append(vdu.get("image", vdu.get('name')))
fields["images"] = images
- if package_type == "ns":
+ elif package_type == "ns":
if descriptor_dict.get("nsd-catalog", False):
aux_dict = descriptor_dict.get("nsd-catalog", {}).get("nsd", [{}])[0]
+ elif descriptor_dict.get("nsd:nsd-catalog"):
+ aux_dict = descriptor_dict.get("nsd:nsd-catalog", {}).get("nsd", [{}])[0]
+ elif descriptor_dict.get("nsd"):
+ aux_dict = descriptor_dict['nsd']
+ if aux_dict.get("nsd"):
+ aux_dict = descriptor_dict["nsd"]["nsd"][0]
else:
- aux_dict = descriptor_dict.get("nsd:nsd-catalog", {}).get("nsd", [{}])[
- 0
- ]
-
+ msg = f"Unexpected descriptor format {descriptor_dict}"
+ self._logger.error(msg)
+ raise ValueError(msg)
vnfs = []
-
- for vnf in aux_dict.get("constituent-vnfd", ()):
- vnfs.append(vnf.get("vnfd-id-ref"))
+ if aux_dict.get("constituent-vnfd"):
+ for vnf in aux_dict.get("constituent-vnfd", ()):
+ vnfs.append(vnf.get("vnfd-id-ref"))
+ else:
+ vnfs = aux_dict.get('vnfd-id')
self._logger.debug("Used VNFS in the NSD: " + str(vnfs))
fields["vnfd-id-ref"] = vnfs
+ elif package_type == 'nst':
+ if descriptor_dict.get("nst-catalog", False):
+ aux_dict = descriptor_dict.get("nst-catalog", {}).get("nst", [{}])[0]
+ elif descriptor_dict.get("nst:nst-catalog"):
+ aux_dict = descriptor_dict.get("nst:nst-catalog", {}).get("nst", [{}])[0]
+ elif descriptor_dict.get("nst"):
+ aux_dict = descriptor_dict['nst']
+ if aux_dict.get("nst"):
+ aux_dict = descriptor_dict["nst"]["nst"][0]
+ nsds = []
+ for nsd in aux_dict.get("netslice-subnet", ()):
+ nsds.append(nsd.get("nsd-ref"))
+ self._logger.debug("Used NSDs in the NST: " + str(nsds))
+ if not nsds:
+ msg = f"Unexpected descriptor format {descriptor_dict}"
+ self._logger.error(msg)
+ raise ValueError(msg)
+ fields["nsd-id-ref"] = nsds
+ else:
+ msg = f"Unexpected descriptor format {descriptor_dict}"
+ self._logger.error(msg)
+ raise ValueError(msg)
fields["name"] = aux_dict.get("name")
fields["id"] = aux_dict.get("id")
fields["description"] = aux_dict.get("description")
fields["vendor"] = aux_dict.get("vendor")
- fields["version"] = aux_dict.get("version", "1.0")
+ fields["version"] = str(aux_dict.get("version", "1.0"))
fields["path"] = "{}{}/{}/{}-{}.tar.gz".format(
base_path,
fields["id"],
@@ -292,9 +341,10 @@
"""
Validation of artifact.
:param path: file path
+ :param source: flag to select the correct file type (directory or artifact)
:return: status details, status, fields, package_type
"""
- self._logger.debug("")
+ self._logger.debug(f"Validating {path} {source}")
package_type = ""
folder = ""
try:
@@ -307,18 +357,28 @@
with open(descriptor_file, "r") as f:
descriptor_data = f.read()
+ self._logger.debug(f"Descriptor data: {descriptor_data}")
validation = validation_im()
desc_type, descriptor_dict = validation.yaml_validation(descriptor_data)
- validation_im.pyangbind_validation(self, desc_type, descriptor_dict)
- if "vnf" in list(descriptor_dict.keys())[0]:
+ try:
+ validation_im.pyangbind_validation(self, desc_type, descriptor_dict)
+ except Exception as e:
+ self._logger.error(e, exc_info=True)
+ raise e
+ descriptor_type_ref = list(descriptor_dict.keys())[0].lower()
+ if "vnf" in descriptor_type_ref:
package_type = "vnf"
- else:
- # raise ClientException("Not VNF package")
+ elif "nst" in descriptor_type_ref:
+ package_type = "nst"
+ elif "ns" in descriptor_type_ref:
package_type = "ns"
-
+ else:
+ msg = f"Unknown package type {descriptor_type_ref}"
+ self._logger.error(msg)
+ raise ValueError(msg)
self._logger.debug("Descriptor: {}".format(descriptor_dict))
fields = self.fields_building(descriptor_dict, path, package_type)
- self._logger.debug("Descriptor sucessfully validated")
+ self._logger.debug(f"Descriptor successfully validated {fields}")
return (
{
"detail": "{}D successfully validated".format(package_type.upper()),
@@ -338,12 +398,13 @@
def register_artifact_in_repository(self, path, destination, source):
"""
Registration of one artifact in a repository
- file: VNF or NS
- destination: path for index creation
+ param path:
+ param destination: path for index creation
+ param source:
"""
self._logger.debug("")
pt = PackageTool()
- compresed = False
+ compressed = False
try:
fields = {}
_, valid, fields, package_type = self.validate_artifact(path, source)
@@ -354,7 +415,8 @@
else:
if source == "directory":
path = pt.build(path)
- compresed = True
+ self._logger.debug(f"Directory path {path}")
+ compressed = True
fields["checksum"] = self.md5(path)
self.indexation(destination, path, package_type, fields)
@@ -365,7 +427,7 @@
raise ClientException(e)
finally:
- if source == "directory" and compresed:
+ if source == "directory" and compressed:
remove(path)
def indexation(self, destination, path, package_type, fields):
@@ -373,17 +435,18 @@
Process for index packages
:param destination: index repository path
:param path: path of the package
- :param package_type: package type (vnf, ns)
+ :param package_type: package type (vnf, ns, nst)
:param fields: dict with the required values
"""
- self._logger.debug("")
+ self._logger.debug(f"Processing {destination} {path} {package_type} {fields}")
+
data_ind = {
"name": fields.get("name"),
"description": fields.get("description"),
"vendor": fields.get("vendor"),
"path": fields.get("path"),
}
-
+ self._logger.debug(data_ind)
final_path = join(
destination, package_type, fields.get("id"), fields.get("version")
)
@@ -473,7 +536,7 @@
"{} {} added in the repository".format(package_type.upper(), str(path))
)
- def current_datatime(self):
+ def current_datetime(self):
"""
Datetime Generator
:return: Datetime as string with the following structure "2020-04-29T08:41:07.681653Z"
@@ -493,11 +556,13 @@
if not isfile(join(destination, "index.yaml")):
mkdir(join(destination, "vnf"))
mkdir(join(destination, "ns"))
+ mkdir(join(destination, "nst"))
index_data = {
"apiVersion": "v1",
- "generated": self.current_datatime(),
+ "generated": self.current_datetime(),
"vnf_packages": {},
"ns_packages": {},
+ "nst_packages": {},
}
with open(join(destination, "index.yaml"), "w") as outfile:
yaml.safe_dump(
diff --git a/osmclient/sol005/tests/test_osmrepo.py b/osmclient/sol005/tests/test_osmrepo.py
new file mode 100644
index 0000000..f337ae9
--- /dev/null
+++ b/osmclient/sol005/tests/test_osmrepo.py
@@ -0,0 +1,33 @@
+# Copyright 2021 ATOS.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+import shutil
+from pathlib import Path
+
+from osmclient.sol005.osmrepo import OSMRepo
+
+
+class TestOSMRepo(unittest.TestCase):
+ def setUp(self):
+ self.repo = OSMRepo()
+
+ def test_init_repo_structure(self):
+ # TODO: Mock filesystem after refactoring from os to pathlib
+ # TODO: Mock OSM IM repo if possible
+ repo_base = Path(__file__).parent / Path("test_repo")
+ expected_index_file_path = repo_base / Path("index.yaml")
+ self.repo.init_directory(str(repo_base))
+ self.assertTrue(expected_index_file_path.exists())
+ shutil.rmtree(expected_index_file_path.parent)
diff --git a/osmclient/sol005/tests/test_vca.py b/osmclient/sol005/tests/test_vca.py
index 25a2aeb..aa5eb2d 100644
--- a/osmclient/sol005/tests/test_vca.py
+++ b/osmclient/sol005/tests/test_vca.py
@@ -55,7 +55,7 @@
self.vca._http.post_cmd.assert_called()
mock_print.assert_not_called()
- def test_update_sucess(self):
+ def test_update_success(self):
self.vca.get = Mock()
self.vca.get.return_value = {"_id": "1234"}
self.vca.update("vca_name", self.vca_data)