Fix bug 2073 to delete properly unzipped packages during osm repo-index
[osm/osmclient.git] / osmclient / sol005 / osmrepo.py
1 #
2 # Licensed under the Apache License, Version 2.0 (the "License"); you may
3 # not use this file except in compliance with the License. You may obtain
4 # a copy of the License at
5 #
6 # http://www.apache.org/licenses/LICENSE-2.0
7 #
8 # Unless required by applicable law or agreed to in writing, software
9 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
10 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
11 # License for the specific language governing permissions and limitations
12 # under the License.
13 #
14
15 """
16 OSM Repo API handling
17 """
18 import glob
19 import hashlib
20 import logging
21 from os import listdir, mkdir, getcwd, remove
22 from os.path import isfile, isdir, join, abspath
23 from shutil import copyfile, rmtree
24 import tarfile
25 import tempfile
26 import time
27
28 from osm_im.validation import Validation as validation_im
29 from osmclient.common.exceptions import ClientException
30 from osmclient.common.package_tool import PackageTool
31 from osmclient.sol005.repo import Repo
32 from packaging import version as versioning
33 import requests
34 import yaml
35
36
37 class OSMRepo(Repo):
38 def __init__(self, http=None, client=None):
39 self._http = http
40 self._client = client
41 self._apiName = "/admin"
42 self._apiVersion = "/v1"
43 self._apiResource = "/osmrepos"
44 self._logger = logging.getLogger("osmclient")
45 self._apiBase = "{}{}{}".format(
46 self._apiName, self._apiVersion, self._apiResource
47 )
48
49 def pkg_list(self, pkgtype, filter=None, repo=None):
50 """
51 Returns a repo based on name or id
52 """
53 self._logger.debug("")
54 self._client.get_token()
55 # Get OSM registered repository list
56 repositories = self.list()
57 if repo:
58 repositories = [r for r in repositories if r["name"] == repo]
59 if not repositories:
60 raise ClientException("Not repository found")
61
62 vnf_repos = []
63 for repository in repositories:
64 try:
65 r = requests.get("{}/index.yaml".format(repository.get("url")))
66
67 if r.status_code == 200:
68 repo_list = yaml.safe_load(r.text)
69 vnf_packages = repo_list.get("{}_packages".format(pkgtype))
70 for repo in vnf_packages:
71 versions = vnf_packages.get(repo)
72 latest = versions.get("latest")
73 del versions["latest"]
74 for version in versions:
75 latest_version = False
76 if version == latest:
77 latest_version = True
78 vnf_repos.append(
79 {
80 "vendor": versions[version].get("vendor"),
81 "name": versions[version].get("name"),
82 "version": version,
83 "description": versions[version].get("description"),
84 "location": versions[version].get("path"),
85 "repository": repository.get("name"),
86 "repourl": repository.get("url"),
87 "latest": latest_version,
88 }
89 )
90 else:
91 raise Exception(
92 "repository in url {} unreachable".format(repository.get("url"))
93 )
94 except Exception as e:
95 self._logger.error(
96 "Error cannot read from repository {} '{}': {}".format(
97 repository["name"], repository["url"], e
98 ),
99 exc_info=True,
100 )
101 continue
102
103 vnf_repos_filtered = []
104 if filter:
105 for vnf_repo in vnf_repos:
106 for k, v in vnf_repo.items():
107 if v:
108 kf, vf = filter.split("=")
109 if k == kf and vf in v:
110 vnf_repos_filtered.append(vnf_repo)
111 break
112 vnf_repos = vnf_repos_filtered
113 return vnf_repos
114
115 def get_pkg(self, pkgtype, name, repo, filter, version):
116 """
117 Returns the filename of the PKG downloaded to disk
118 """
119 self._logger.debug("")
120 self._client.get_token()
121 f = None
122 f_name = None
123 # Get OSM registered repository list
124 pkgs = self.pkg_list(pkgtype, filter, repo)
125 for pkg in pkgs:
126 if pkg.get("repository") == repo and pkg.get("name") == name:
127 if "latest" in version:
128 if not pkg.get("latest"):
129 continue
130 else:
131 version = pkg.get("version")
132 if pkg.get("version") == version:
133 r = requests.get(
134 "{}{}".format(pkg.get("repourl"), pkg.get("location")),
135 stream=True,
136 )
137 if r.status_code != 200:
138 raise ClientException("Package not found")
139
140 with tempfile.NamedTemporaryFile(delete=False) as f:
141 f.write(r.raw.read())
142 f_name = f.name
143 if not f_name:
144 raise ClientException(
145 "{} {} not found at repo {}".format(pkgtype, name, repo)
146 )
147 return f_name
148
149 def pkg_get(self, pkgtype, name, repo, version, filter):
150
151 pkg_name = self.get_pkg(pkgtype, name, repo, filter, version)
152 if not pkg_name:
153 raise ClientException("Package not found")
154 folder, descriptor = self.zip_extraction(pkg_name)
155 with open(descriptor) as pkg:
156 pkg_descriptor = yaml.safe_load(pkg)
157 rmtree(folder, ignore_errors=False)
158 if (
159 pkgtype == "vnf"
160 and (pkg_descriptor.get("vnfd") or pkg_descriptor.get("vnfd:vnfd_catalog"))
161 ) or (
162 pkgtype == "ns"
163 and (pkg_descriptor.get("nsd") or pkg_descriptor.get("nsd:nsd_catalog"))
164 ):
165 raise ClientException("Wrong Package type")
166 return pkg_descriptor
167
168 def repo_index(self, origin=".", destination="."):
169 """
170 Repo Index main function
171 :param origin: origin directory for getting all the artifacts
172 :param destination: destination folder for create and index the valid artifacts
173 """
174 self._logger.debug("Starting index composition")
175 if destination == ".":
176 if origin == destination:
177 destination = "repository"
178
179 destination = abspath(destination)
180 origin = abspath(origin)
181 self._logger.debug(f"Paths {destination}, {origin}")
182 if origin[0] != "/":
183 origin = join(getcwd(), origin)
184 if destination[0] != "/":
185 destination = join(getcwd(), destination)
186
187 self.init_directory(destination)
188 artifacts = []
189 directories = []
190 for f in listdir(origin):
191 self._logger.debug(f"Element: {join(origin,f)}")
192 if isfile(join(origin, f)) and f.endswith(".tar.gz"):
193 artifacts.append(f)
194 elif (
195 isdir(join(origin, f))
196 and f != destination.split("/")[-1]
197 and not f.startswith(".")
198 ):
199 directories.append(
200 f
201 ) # TODO: Document that nested directories are not supported
202 else:
203 self._logger.debug(f"Ignoring {f}")
204 self._logger.debug(f"Artifacts: {artifacts}")
205 for package in artifacts:
206 self.register_package_in_repository(
207 join(origin, package), origin, destination, kind="artifact"
208 )
209 self._logger.debug(f"Directories: {directories}")
210 for package in directories:
211 self.register_package_in_repository(
212 join(origin, package), origin, destination, kind="directory"
213 )
214 self._logger.info("\nFinal Results: ")
215 self._logger.info(
216 "VNF Packages Indexed: "
217 + str(len(glob.glob(destination + "/vnf/*/*/metadata.yaml")))
218 )
219 self._logger.info(
220 "NS Packages Indexed: "
221 + str(len(glob.glob(destination + "/ns/*/*/metadata.yaml")))
222 )
223
224 self._logger.info(
225 "NST Packages Indexed: "
226 + str(len(glob.glob(destination + "/nst/*/*/metadata.yaml")))
227 )
228
229 def md5(self, fname):
230 """
231 Checksum generator
232 :param fname: file path
233 :return: checksum string
234 """
235 self._logger.debug("")
236 hash_md5 = hashlib.md5()
237 with open(fname, "rb") as f:
238 for chunk in iter(lambda: f.read(4096), b""):
239 hash_md5.update(chunk)
240 return hash_md5.hexdigest()
241
242 def fields_building(self, descriptor_dict, file, package_type):
243 """
244 From an artifact descriptor, obtain the fields required for indexing
245 :param descriptor_dict: artifact description
246 :param file: artifact package
247 :param package_type: type of artifact (vnf, ns, nst)
248 :return: fields
249 """
250 self._logger.debug("")
251
252 fields = {}
253 base_path = "/{}/".format(package_type)
254 aux_dict = {}
255 if package_type == "vnf":
256 if descriptor_dict.get("vnfd-catalog", False):
257 aux_dict = descriptor_dict.get("vnfd-catalog", {}).get("vnfd", [{}])[0]
258 elif descriptor_dict.get("vnfd:vnfd-catalog"):
259 aux_dict = descriptor_dict.get("vnfd:vnfd-catalog", {}).get(
260 "vnfd", [{}]
261 )[0]
262 elif descriptor_dict.get("vnfd"):
263 aux_dict = descriptor_dict["vnfd"]
264 if aux_dict.get("vnfd"):
265 aux_dict = aux_dict["vnfd"][0]
266 else:
267 msg = f"Unexpected descriptor format {descriptor_dict}"
268 self._logger.error(msg)
269 raise ValueError(msg)
270 self._logger.debug(
271 f"Extracted descriptor info for {package_type}: {aux_dict}"
272 )
273 images = []
274 for vdu in aux_dict.get("vdu", aux_dict.get("kdu", ())):
275 images.append(vdu.get("image", vdu.get("name")))
276 fields["images"] = images
277 elif package_type == "ns":
278 if descriptor_dict.get("nsd-catalog", False):
279 aux_dict = descriptor_dict.get("nsd-catalog", {}).get("nsd", [{}])[0]
280 elif descriptor_dict.get("nsd:nsd-catalog"):
281 aux_dict = descriptor_dict.get("nsd:nsd-catalog", {}).get("nsd", [{}])[
282 0
283 ]
284 elif descriptor_dict.get("nsd"):
285 aux_dict = descriptor_dict["nsd"]
286 if aux_dict.get("nsd"):
287 aux_dict = descriptor_dict["nsd"]["nsd"][0]
288 else:
289 msg = f"Unexpected descriptor format {descriptor_dict}"
290 self._logger.error(msg)
291 raise ValueError(msg)
292 vnfs = []
293 if aux_dict.get("constituent-vnfd"):
294 for vnf in aux_dict.get("constituent-vnfd", ()):
295 vnfs.append(vnf.get("vnfd-id-ref"))
296 else:
297 vnfs = aux_dict.get("vnfd-id")
298 self._logger.debug("Used VNFS in the NSD: " + str(vnfs))
299 fields["vnfd-id-ref"] = vnfs
300 elif package_type == "nst":
301 if descriptor_dict.get("nst-catalog", False):
302 aux_dict = descriptor_dict.get("nst-catalog", {}).get("nst", [{}])[0]
303 elif descriptor_dict.get("nst:nst-catalog"):
304 aux_dict = descriptor_dict.get("nst:nst-catalog", {}).get("nst", [{}])[
305 0
306 ]
307 elif descriptor_dict.get("nst"):
308 aux_dict = descriptor_dict["nst"]
309 if aux_dict.get("nst"):
310 aux_dict = descriptor_dict["nst"]["nst"][0]
311 nsds = []
312 for nsd in aux_dict.get("netslice-subnet", ()):
313 nsds.append(nsd.get("nsd-ref"))
314 self._logger.debug("Used NSDs in the NST: " + str(nsds))
315 if not nsds:
316 msg = f"Unexpected descriptor format {descriptor_dict}"
317 self._logger.error(msg)
318 raise ValueError(msg)
319 fields["nsd-id-ref"] = nsds
320 else:
321 msg = f"Unexpected descriptor format {descriptor_dict}"
322 self._logger.error(msg)
323 raise ValueError(msg)
324
325 fields["name"] = aux_dict.get("name")
326 fields["id"] = aux_dict.get("id")
327 fields["description"] = aux_dict.get("description")
328 fields["vendor"] = aux_dict.get("vendor")
329 fields["version"] = str(aux_dict.get("version", "1.0"))
330 fields["path"] = "{}{}/{}/{}-{}.tar.gz".format(
331 base_path,
332 fields["id"],
333 fields["version"],
334 fields.get("id"),
335 fields.get("version"),
336 )
337 return fields
338
339 def zip_extraction(self, file_name):
340 """
341 Validation of artifact.
342 :param file: file path
343 :return: status details, status, fields, package_type
344 """
345 self._logger.debug("Decompressing package file")
346 temp_file = "/tmp/{}".format(file_name.split("/")[-1])
347 if file_name != temp_file:
348 copyfile(file_name, temp_file)
349 with tarfile.open(temp_file, "r:gz") as tar:
350 folder = tar.getnames()[0].split("/")[0]
351 tar.extractall()
352
353 remove(temp_file)
354 descriptor_file = glob.glob("{}/*.y*ml".format(folder))[0]
355 return folder, descriptor_file
356
357 def validate_artifact(self, path, origin, kind):
358 """
359 Validation of artifact.
360 :param path: file path
361 :param origin: folder where the package is located
362 :param kind: flag to select the correct file type (directory or artifact)
363 :return: status details, status, fields, package_type
364 """
365 self._logger.debug(f"Validating {path} {kind}")
366 package_type = ""
367 folder = ""
368 try:
369 if kind == "directory":
370 descriptor_file = glob.glob("{}/*.y*ml".format(path))[0]
371 else:
372 folder, descriptor_file = self.zip_extraction(path)
373 folder = join(origin, folder)
374 self._logger.debug(f"Kind is an artifact (tar.gz). Folder: {folder}. Descriptor_file: {descriptor_file}")
375
376 self._logger.debug("Opening descriptor file: {}".format(descriptor_file))
377
378 with open(descriptor_file, "r") as f:
379 descriptor_data = f.read()
380 self._logger.debug(f"Descriptor data: {descriptor_data}")
381 validation = validation_im()
382 desc_type, descriptor_dict = validation.yaml_validation(descriptor_data)
383 try:
384 validation_im.pyangbind_validation(self, desc_type, descriptor_dict)
385 except Exception as e:
386 self._logger.error(e, exc_info=True)
387 raise e
388 descriptor_type_ref = list(descriptor_dict.keys())[0].lower()
389 if "vnf" in descriptor_type_ref:
390 package_type = "vnf"
391 elif "nst" in descriptor_type_ref:
392 package_type = "nst"
393 elif "ns" in descriptor_type_ref:
394 package_type = "ns"
395 else:
396 msg = f"Unknown package type {descriptor_type_ref}"
397 self._logger.error(msg)
398 raise ValueError(msg)
399 self._logger.debug("Descriptor: {}".format(descriptor_dict))
400 fields = self.fields_building(descriptor_dict, path, package_type)
401 self._logger.debug(f"Descriptor successfully validated {fields}")
402 return (
403 {
404 "detail": "{}D successfully validated".format(package_type.upper()),
405 "code": "OK",
406 },
407 True,
408 fields,
409 package_type,
410 )
411 except Exception as e:
412 # Delete the folder we just created
413 return {"detail": str(e)}, False, {}, package_type
414 finally:
415 if folder:
416 rmtree(folder, ignore_errors=True)
417
418 def register_package_in_repository(self, path, origin, destination, kind):
419 """
420 Registration of one artifact in a repository
421 :param path: absolute path of the VNF/NS package
422 :param origin: folder where the package is located
423 :param destination: path for index creation
424 :param kind: artifact (tar.gz) or directory
425 """
426 self._logger.debug("")
427 pt = PackageTool()
428 compressed = False
429 try:
430 fields = {}
431 _, valid, fields, package_type = self.validate_artifact(path, origin, kind)
432 if not valid:
433 raise Exception(
434 "{} {} Not well configured.".format(package_type.upper(), str(path))
435 )
436 else:
437 if kind == "directory":
438 path = pt.build(path)
439 self._logger.debug(f"Directory path {path}")
440 compressed = True
441 fields["checksum"] = self.md5(path)
442 self.indexation(destination, path, package_type, fields)
443
444 except Exception as e:
445 self._logger.exception(
446 "Error registering package in Repository: {}".format(e)
447 )
448 raise ClientException(e)
449
450 finally:
451 if kind == "directory" and compressed:
452 remove(path)
453
454 def indexation(self, destination, path, package_type, fields):
455 """
456 Process for index packages
457 :param destination: index repository path
458 :param path: path of the package
459 :param package_type: package type (vnf, ns, nst)
460 :param fields: dict with the required values
461 """
462 self._logger.debug(f"Processing {destination} {path} {package_type} {fields}")
463
464 data_ind = {
465 "name": fields.get("name"),
466 "description": fields.get("description"),
467 "vendor": fields.get("vendor"),
468 "path": fields.get("path"),
469 }
470 self._logger.debug(data_ind)
471 final_path = join(
472 destination, package_type, fields.get("id"), fields.get("version")
473 )
474 if isdir(join(destination, package_type, fields.get("id"))):
475 if isdir(final_path):
476 self._logger.warning(
477 "{} {} already exists".format(package_type.upper(), str(path))
478 )
479 else:
480 mkdir(final_path)
481 copyfile(
482 path,
483 final_path
484 + "/"
485 + fields.get("id")
486 + "-"
487 + fields.get("version")
488 + ".tar.gz",
489 )
490 yaml.safe_dump(
491 fields,
492 open(final_path + "/" + "metadata.yaml", "w"),
493 default_flow_style=False,
494 width=80,
495 indent=4,
496 )
497 index = yaml.safe_load(open(destination + "/index.yaml"))
498
499 index["{}_packages".format(package_type)][fields.get("id")][
500 fields.get("version")
501 ] = data_ind
502 if versioning.parse(
503 index["{}_packages".format(package_type)][fields.get("id")][
504 "latest"
505 ]
506 ) < versioning.parse(fields.get("version")):
507 index["{}_packages".format(package_type)][fields.get("id")][
508 "latest"
509 ] = fields.get("version")
510 yaml.safe_dump(
511 index,
512 open(destination + "/index.yaml", "w"),
513 default_flow_style=False,
514 width=80,
515 indent=4,
516 )
517 self._logger.info(
518 "{} {} added in the repository".format(
519 package_type.upper(), str(path)
520 )
521 )
522 else:
523 mkdir(destination + "/{}/".format(package_type) + fields.get("id"))
524 mkdir(final_path)
525 copyfile(
526 path,
527 final_path
528 + "/"
529 + fields.get("id")
530 + "-"
531 + fields.get("version")
532 + ".tar.gz",
533 )
534 yaml.safe_dump(
535 fields,
536 open(join(final_path, "metadata.yaml"), "w"),
537 default_flow_style=False,
538 width=80,
539 indent=4,
540 )
541 index = yaml.safe_load(open(destination + "/index.yaml"))
542
543 index["{}_packages".format(package_type)][fields.get("id")] = {
544 fields.get("version"): data_ind
545 }
546 index["{}_packages".format(package_type)][fields.get("id")][
547 "latest"
548 ] = fields.get("version")
549 yaml.safe_dump(
550 index,
551 open(join(destination, "index.yaml"), "w"),
552 default_flow_style=False,
553 width=80,
554 indent=4,
555 )
556 self._logger.info(
557 "{} {} added in the repository".format(package_type.upper(), str(path))
558 )
559
560 def current_datetime(self):
561 """
562 Datetime Generator
563 :return: Datetime as string with the following structure "2020-04-29T08:41:07.681653Z"
564 """
565 self._logger.debug("")
566 return time.strftime("%Y-%m-%dT%H:%M:%S.%sZ")
567
568 def init_directory(self, destination):
569 """
570 Initialize the index directory. Creation of index.yaml, and the directories for vnf and ns
571 :param destination:
572 :return:
573 """
574 self._logger.debug("")
575 if not isdir(destination):
576 mkdir(destination)
577 if not isfile(join(destination, "index.yaml")):
578 mkdir(join(destination, "vnf"))
579 mkdir(join(destination, "ns"))
580 mkdir(join(destination, "nst"))
581 index_data = {
582 "apiVersion": "v1",
583 "generated": self.current_datetime(),
584 "vnf_packages": {},
585 "ns_packages": {},
586 "nst_packages": {},
587 }
588 with open(join(destination, "index.yaml"), "w") as outfile:
589 yaml.safe_dump(
590 index_data, outfile, default_flow_style=False, width=80, indent=4
591 )