Bug 1862: Fixed required field in SOL006 IM to generate OSM repo index
[osm/osmclient.git] / osmclient / sol005 / osmrepo.py
1 #
2 # Licensed under the Apache License, Version 2.0 (the "License"); you may
3 # not use this file except in compliance with the License. You may obtain
4 # a copy of the License at
5 #
6 # http://www.apache.org/licenses/LICENSE-2.0
7 #
8 # Unless required by applicable law or agreed to in writing, software
9 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
10 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
11 # License for the specific language governing permissions and limitations
12 # under the License.
13 #
14
15 """
16 OSM Repo API handling
17 """
18 import glob
19 import logging
20 from os import listdir, mkdir, getcwd, remove
21 from os.path import isfile, isdir, join, abspath
22 from shutil import copyfile, rmtree
23 import tarfile
24 import tempfile
25 import time
26
27 from osm_im.validation import Validation as validation_im
28 from osmclient.common.exceptions import ClientException
29 from osmclient.common.package_tool import PackageTool
30 from osmclient.sol005.repo import Repo
31 from osmclient.common import utils
32 from packaging import version as versioning
33 import requests
34 import yaml
35
36
37 class OSMRepo(Repo):
38 def __init__(self, http=None, client=None):
39 self._http = http
40 self._client = client
41 self._apiName = "/admin"
42 self._apiVersion = "/v1"
43 self._apiResource = "/osmrepos"
44 self._logger = logging.getLogger("osmclient")
45 self._apiBase = "{}{}{}".format(
46 self._apiName, self._apiVersion, self._apiResource
47 )
48
49 def pkg_list(self, pkgtype, filter=None, repo=None):
50 """
51 Returns a repo based on name or id
52 """
53 self._logger.debug("")
54 self._client.get_token()
55 # Get OSM registered repository list
56 repositories = self.list()
57 if repo:
58 repositories = [r for r in repositories if r["name"] == repo]
59 if not repositories:
60 raise ClientException("Not repository found")
61
62 vnf_repos = []
63 for repository in repositories:
64 try:
65 r = requests.get("{}/index.yaml".format(repository.get("url")))
66
67 if r.status_code == 200:
68 repo_list = yaml.safe_load(r.text)
69 vnf_packages = repo_list.get("{}_packages".format(pkgtype))
70 for repo in vnf_packages:
71 versions = vnf_packages.get(repo)
72 latest = versions.get("latest")
73 del versions["latest"]
74 for version in versions:
75 latest_version = False
76 if version == latest:
77 latest_version = True
78 vnf_repos.append(
79 {
80 "vendor": versions[version].get("vendor"),
81 "name": versions[version].get("name"),
82 "version": version,
83 "description": versions[version].get("description"),
84 "location": versions[version].get("path"),
85 "repository": repository.get("name"),
86 "repourl": repository.get("url"),
87 "latest": latest_version,
88 }
89 )
90 else:
91 raise Exception(
92 "repository in url {} unreachable".format(repository.get("url"))
93 )
94 except Exception as e:
95 self._logger.error(
96 "Error cannot read from repository {} '{}': {}".format(
97 repository["name"], repository["url"], e
98 ),
99 exc_info=True,
100 )
101 continue
102
103 vnf_repos_filtered = []
104 if filter:
105 for vnf_repo in vnf_repos:
106 for k, v in vnf_repo.items():
107 if v:
108 kf, vf = filter.split("=")
109 if k == kf and vf in v:
110 vnf_repos_filtered.append(vnf_repo)
111 break
112 vnf_repos = vnf_repos_filtered
113 return vnf_repos
114
115 def get_pkg(self, pkgtype, name, repo, filter, version):
116 """
117 Returns the filename of the PKG downloaded to disk
118 """
119 self._logger.debug("")
120 self._client.get_token()
121 f = None
122 f_name = None
123 # Get OSM registered repository list
124 pkgs = self.pkg_list(pkgtype, filter, repo)
125 for pkg in pkgs:
126 if pkg.get("repository") == repo and pkg.get("name") == name:
127 if "latest" in version:
128 if not pkg.get("latest"):
129 continue
130 else:
131 version = pkg.get("version")
132 if pkg.get("version") == version:
133 r = requests.get(
134 "{}{}".format(pkg.get("repourl"), pkg.get("location")),
135 stream=True,
136 )
137 if r.status_code != 200:
138 raise ClientException("Package not found")
139
140 with tempfile.NamedTemporaryFile(delete=False) as f:
141 f.write(r.raw.read())
142 f_name = f.name
143 if not f_name:
144 raise ClientException(
145 "{} {} not found at repo {}".format(pkgtype, name, repo)
146 )
147 return f_name
148
149 def pkg_get(self, pkgtype, name, repo, version, filter):
150
151 pkg_name = self.get_pkg(pkgtype, name, repo, filter, version)
152 if not pkg_name:
153 raise ClientException("Package not found")
154 folder, descriptor = self.zip_extraction(pkg_name)
155 with open(descriptor) as pkg:
156 pkg_descriptor = yaml.safe_load(pkg)
157 rmtree(folder, ignore_errors=False)
158 if (
159 pkgtype == "vnf"
160 and (pkg_descriptor.get("vnfd") or pkg_descriptor.get("vnfd:vnfd_catalog"))
161 ) or (
162 pkgtype == "ns"
163 and (pkg_descriptor.get("nsd") or pkg_descriptor.get("nsd:nsd_catalog"))
164 ):
165 raise ClientException("Wrong Package type")
166 return pkg_descriptor
167
168 def repo_index(self, origin=".", destination="."):
169 """
170 Repo Index main function
171 :param origin: origin directory for getting all the artifacts
172 :param destination: destination folder for create and index the valid artifacts
173 """
174 self._logger.debug("Starting index composition")
175 if destination == ".":
176 if origin == destination:
177 destination = "repository"
178
179 destination = abspath(destination)
180 origin = abspath(origin)
181 self._logger.debug(f"Paths {destination}, {origin}")
182 if origin[0] != "/":
183 origin = join(getcwd(), origin)
184 if destination[0] != "/":
185 destination = join(getcwd(), destination)
186
187 self.init_directory(destination)
188 artifacts = []
189 directories = []
190 for f in listdir(origin):
191 self._logger.debug(f"Element: {join(origin,f)}")
192 if isfile(join(origin, f)) and f.endswith(".tar.gz"):
193 artifacts.append(f)
194 elif (
195 isdir(join(origin, f))
196 and f != destination.split("/")[-1]
197 and not f.startswith(".")
198 ):
199 directories.append(
200 f
201 ) # TODO: Document that nested directories are not supported
202 else:
203 self._logger.debug(f"Ignoring {f}")
204 self._logger.debug(f"Artifacts: {artifacts}")
205 for package in artifacts:
206 self.register_package_in_repository(
207 join(origin, package), origin, destination, kind="artifact"
208 )
209 self._logger.debug(f"Directories: {directories}")
210 for package in directories:
211 self.register_package_in_repository(
212 join(origin, package), origin, destination, kind="directory"
213 )
214 self._logger.info("\nFinal Results: ")
215 self._logger.info(
216 "VNF Packages Indexed: "
217 + str(len(glob.glob(destination + "/vnf/*/*/metadata.yaml")))
218 )
219 self._logger.info(
220 "NS Packages Indexed: "
221 + str(len(glob.glob(destination + "/ns/*/*/metadata.yaml")))
222 )
223
224 self._logger.info(
225 "NST Packages Indexed: "
226 + str(len(glob.glob(destination + "/nst/*/*/metadata.yaml")))
227 )
228
229 def fields_building(self, descriptor_dict, file, package_type):
230 """
231 From an artifact descriptor, obtain the fields required for indexing
232 :param descriptor_dict: artifact description
233 :param file: artifact package
234 :param package_type: type of artifact (vnf, ns, nst)
235 :return: fields
236 """
237 self._logger.debug("")
238
239 fields = {}
240 base_path = "/{}/".format(package_type)
241 aux_dict = {}
242 if package_type == "vnf":
243 if descriptor_dict.get("vnfd-catalog", False):
244 aux_dict = descriptor_dict.get("vnfd-catalog", {}).get("vnfd", [{}])[0]
245 elif descriptor_dict.get("vnfd:vnfd-catalog"):
246 aux_dict = descriptor_dict.get("vnfd:vnfd-catalog", {}).get(
247 "vnfd", [{}]
248 )[0]
249 elif descriptor_dict.get("vnfd"):
250 aux_dict = descriptor_dict["vnfd"]
251 if aux_dict.get("vnfd"):
252 aux_dict = aux_dict["vnfd"][0]
253 else:
254 msg = f"Unexpected descriptor format {descriptor_dict}"
255 self._logger.error(msg)
256 raise ValueError(msg)
257 self._logger.debug(
258 f"Extracted descriptor info for {package_type}: {aux_dict}"
259 )
260 images = []
261 for vdu in aux_dict.get("vdu", aux_dict.get("kdu", ())):
262 images.append(vdu.get("image", vdu.get("name")))
263 fields["images"] = images
264 elif package_type == "ns":
265 if descriptor_dict.get("nsd-catalog", False):
266 aux_dict = descriptor_dict.get("nsd-catalog", {}).get("nsd", [{}])[0]
267 elif descriptor_dict.get("nsd:nsd-catalog"):
268 aux_dict = descriptor_dict.get("nsd:nsd-catalog", {}).get("nsd", [{}])[
269 0
270 ]
271 elif descriptor_dict.get("nsd"):
272 aux_dict = descriptor_dict["nsd"]
273 if aux_dict.get("nsd"):
274 aux_dict = descriptor_dict["nsd"]["nsd"][0]
275 else:
276 msg = f"Unexpected descriptor format {descriptor_dict}"
277 self._logger.error(msg)
278 raise ValueError(msg)
279 vnfs = []
280 if aux_dict.get("constituent-vnfd"):
281 for vnf in aux_dict.get("constituent-vnfd", ()):
282 vnfs.append(vnf.get("vnfd-id-ref"))
283 else:
284 vnfs = aux_dict.get("vnfd-id")
285 self._logger.debug("Used VNFS in the NSD: " + str(vnfs))
286 fields["vnfd-id-ref"] = vnfs
287 elif package_type == "nst":
288 if descriptor_dict.get("nst-catalog", False):
289 aux_dict = descriptor_dict.get("nst-catalog", {}).get("nst", [{}])[0]
290 elif descriptor_dict.get("nst:nst-catalog"):
291 aux_dict = descriptor_dict.get("nst:nst-catalog", {}).get("nst", [{}])[
292 0
293 ]
294 elif descriptor_dict.get("nst"):
295 aux_dict = descriptor_dict["nst"]
296 if aux_dict.get("nst"):
297 aux_dict = descriptor_dict["nst"]["nst"][0]
298 nsds = []
299 for nsd in aux_dict.get("netslice-subnet", ()):
300 nsds.append(nsd.get("nsd-ref"))
301 self._logger.debug("Used NSDs in the NST: " + str(nsds))
302 if not nsds:
303 msg = f"Unexpected descriptor format {descriptor_dict}"
304 self._logger.error(msg)
305 raise ValueError(msg)
306 fields["nsd-id-ref"] = nsds
307 else:
308 msg = f"Unexpected descriptor format {descriptor_dict}"
309 self._logger.error(msg)
310 raise ValueError(msg)
311 # Repo search is based on 'name' entry in index.yaml. It is mandatory then
312 fields["name"] = aux_dict.get("name", aux_dict["product-name"])
313 fields["id"] = aux_dict.get("id")
314 fields["description"] = aux_dict.get("description")
315 fields["vendor"] = aux_dict.get("vendor")
316 fields["version"] = str(aux_dict.get("version", "1.0"))
317 fields["path"] = "{}{}/{}/{}-{}.tar.gz".format(
318 base_path,
319 fields["id"],
320 fields["version"],
321 fields.get("id"),
322 fields.get("version"),
323 )
324 return fields
325
326 def zip_extraction(self, file_name):
327 """
328 Validation of artifact.
329 :param file: file path
330 :return: status details, status, fields, package_type
331 """
332 self._logger.debug("Decompressing package file")
333 temp_file = "/tmp/{}".format(file_name.split("/")[-1])
334 if file_name != temp_file:
335 copyfile(file_name, temp_file)
336 with tarfile.open(temp_file, "r:gz") as tar:
337 folder = tar.getnames()[0].split("/")[0]
338 tar.extractall()
339
340 remove(temp_file)
341 descriptor_file = glob.glob("{}/*.y*ml".format(folder))[0]
342 return folder, descriptor_file
343
344 def validate_artifact(self, path, origin, kind):
345 """
346 Validation of artifact.
347 :param path: file path
348 :param origin: folder where the package is located
349 :param kind: flag to select the correct file type (directory or artifact)
350 :return: status details, status, fields, package_type
351 """
352 self._logger.debug(f"Validating {path} {kind}")
353 package_type = ""
354 folder = ""
355 try:
356 if kind == "directory":
357 descriptor_file = glob.glob("{}/*.y*ml".format(path))[0]
358 else:
359 folder, descriptor_file = self.zip_extraction(path)
360 folder = join(origin, folder)
361 self._logger.debug(f"Kind is an artifact (tar.gz). Folder: {folder}. Descriptor_file: {descriptor_file}")
362
363 self._logger.debug("Opening descriptor file: {}".format(descriptor_file))
364
365 with open(descriptor_file, "r") as f:
366 descriptor_data = f.read()
367 self._logger.debug(f"Descriptor data: {descriptor_data}")
368 validation = validation_im()
369 desc_type, descriptor_dict = validation.yaml_validation(descriptor_data)
370 try:
371 validation_im.pyangbind_validation(self, desc_type, descriptor_dict)
372 except Exception as e:
373 self._logger.error(e, exc_info=True)
374 raise e
375 descriptor_type_ref = list(descriptor_dict.keys())[0].lower()
376 if "vnf" in descriptor_type_ref:
377 package_type = "vnf"
378 elif "nst" in descriptor_type_ref:
379 package_type = "nst"
380 elif "ns" in descriptor_type_ref:
381 package_type = "ns"
382 else:
383 msg = f"Unknown package type {descriptor_type_ref}"
384 self._logger.error(msg)
385 raise ValueError(msg)
386 self._logger.debug("Descriptor: {}".format(descriptor_dict))
387 fields = self.fields_building(descriptor_dict, path, package_type)
388 self._logger.debug(f"Descriptor successfully validated {fields}")
389 return (
390 {
391 "detail": "{}D successfully validated".format(package_type.upper()),
392 "code": "OK",
393 },
394 True,
395 fields,
396 package_type,
397 )
398 except Exception as e:
399 # Delete the folder we just created
400 return {"detail": str(e)}, False, {}, package_type
401 finally:
402 if folder:
403 rmtree(folder, ignore_errors=True)
404
405 def register_package_in_repository(self, path, origin, destination, kind):
406 """
407 Registration of one artifact in a repository
408 :param path: absolute path of the VNF/NS package
409 :param origin: folder where the package is located
410 :param destination: path for index creation
411 :param kind: artifact (tar.gz) or directory
412 """
413 self._logger.debug("")
414 pt = PackageTool()
415 compressed = False
416 try:
417 fields = {}
418 _, valid, fields, package_type = self.validate_artifact(path, origin, kind)
419 if not valid:
420 raise Exception(
421 "{} {} Not well configured.".format(package_type.upper(), str(path))
422 )
423 else:
424 if kind == "directory":
425 path = pt.build(path)
426 self._logger.debug(f"Directory path {path}")
427 compressed = True
428 fields["checksum"] = utils.md5(path)
429 self.indexation(destination, path, package_type, fields)
430
431 except Exception as e:
432 self._logger.exception(
433 "Error registering package in Repository: {}".format(e)
434 )
435 raise ClientException(e)
436
437 finally:
438 if kind == "directory" and compressed:
439 remove(path)
440
441 def indexation(self, destination, path, package_type, fields):
442 """
443 Process for index packages
444 :param destination: index repository path
445 :param path: path of the package
446 :param package_type: package type (vnf, ns, nst)
447 :param fields: dict with the required values
448 """
449 self._logger.debug(f"Processing {destination} {path} {package_type} {fields}")
450
451 data_ind = {
452 "name": fields.get("name"),
453 "description": fields.get("description"),
454 "vendor": fields.get("vendor"),
455 "path": fields.get("path"),
456 }
457 self._logger.debug(data_ind)
458 final_path = join(
459 destination, package_type, fields.get("id"), fields.get("version")
460 )
461 if isdir(join(destination, package_type, fields.get("id"))):
462 if isdir(final_path):
463 self._logger.warning(
464 "{} {} already exists".format(package_type.upper(), str(path))
465 )
466 else:
467 mkdir(final_path)
468 copyfile(
469 path,
470 final_path
471 + "/"
472 + fields.get("id")
473 + "-"
474 + fields.get("version")
475 + ".tar.gz",
476 )
477 yaml.safe_dump(
478 fields,
479 open(final_path + "/" + "metadata.yaml", "w"),
480 default_flow_style=False,
481 width=80,
482 indent=4,
483 )
484 index = yaml.safe_load(open(destination + "/index.yaml"))
485
486 index["{}_packages".format(package_type)][fields.get("id")][
487 fields.get("version")
488 ] = data_ind
489 if versioning.parse(
490 index["{}_packages".format(package_type)][fields.get("id")][
491 "latest"
492 ]
493 ) < versioning.parse(fields.get("version")):
494 index["{}_packages".format(package_type)][fields.get("id")][
495 "latest"
496 ] = fields.get("version")
497 yaml.safe_dump(
498 index,
499 open(destination + "/index.yaml", "w"),
500 default_flow_style=False,
501 width=80,
502 indent=4,
503 )
504 self._logger.info(
505 "{} {} added in the repository".format(
506 package_type.upper(), str(path)
507 )
508 )
509 else:
510 mkdir(destination + "/{}/".format(package_type) + fields.get("id"))
511 mkdir(final_path)
512 copyfile(
513 path,
514 final_path
515 + "/"
516 + fields.get("id")
517 + "-"
518 + fields.get("version")
519 + ".tar.gz",
520 )
521 yaml.safe_dump(
522 fields,
523 open(join(final_path, "metadata.yaml"), "w"),
524 default_flow_style=False,
525 width=80,
526 indent=4,
527 )
528 index = yaml.safe_load(open(destination + "/index.yaml"))
529
530 index["{}_packages".format(package_type)][fields.get("id")] = {
531 fields.get("version"): data_ind
532 }
533 index["{}_packages".format(package_type)][fields.get("id")][
534 "latest"
535 ] = fields.get("version")
536 yaml.safe_dump(
537 index,
538 open(join(destination, "index.yaml"), "w"),
539 default_flow_style=False,
540 width=80,
541 indent=4,
542 )
543 self._logger.info(
544 "{} {} added in the repository".format(package_type.upper(), str(path))
545 )
546
547 def current_datetime(self):
548 """
549 Datetime Generator
550 :return: Datetime as string with the following structure "2020-04-29T08:41:07.681653Z"
551 """
552 self._logger.debug("")
553 return time.strftime("%Y-%m-%dT%H:%M:%S.%sZ")
554
555 def init_directory(self, destination):
556 """
557 Initialize the index directory. Creation of index.yaml, and the directories for vnf and ns
558 :param destination:
559 :return:
560 """
561 self._logger.debug("")
562 if not isdir(destination):
563 mkdir(destination)
564 if not isfile(join(destination, "index.yaml")):
565 mkdir(join(destination, "vnf"))
566 mkdir(join(destination, "ns"))
567 mkdir(join(destination, "nst"))
568 index_data = {
569 "apiVersion": "v1",
570 "generated": self.current_datetime(),
571 "vnf_packages": {},
572 "ns_packages": {},
573 "nst_packages": {},
574 }
575 with open(join(destination, "index.yaml"), "w") as outfile:
576 yaml.safe_dump(
577 index_data, outfile, default_flow_style=False, width=80, indent=4
578 )