Fix bug 2074 to delete properly unzipped packages during osm repo-index
[osm/osmclient.git] / osmclient / sol005 / osmrepo.py
1 #
2 # Licensed under the Apache License, Version 2.0 (the "License"); you may
3 # not use this file except in compliance with the License. You may obtain
4 # a copy of the License at
5 #
6 # http://www.apache.org/licenses/LICENSE-2.0
7 #
8 # Unless required by applicable law or agreed to in writing, software
9 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
10 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
11 # License for the specific language governing permissions and limitations
12 # under the License.
13 #
14
15 """
16 OSM Repo API handling
17 """
18 import glob
19 import hashlib
20 import logging
21 from os import listdir, mkdir, getcwd, remove
22 from os.path import isfile, isdir, join, abspath
23 from shutil import copyfile, rmtree
24 import tarfile
25 import tempfile
26 import time
27
28 from osm_im.validation import Validation as validation_im
29 from osmclient.common.exceptions import ClientException
30 from osmclient.common.package_tool import PackageTool
31 from osmclient.sol005.repo import Repo
32 from packaging import version as versioning
33 import requests
34 import yaml
35
36
37 class OSMRepo(Repo):
38 def __init__(self, http=None, client=None):
39 self._http = http
40 self._client = client
41 self._apiName = "/admin"
42 self._apiVersion = "/v1"
43 self._apiResource = "/osmrepos"
44 self._logger = logging.getLogger("osmclient")
45 self._apiBase = "{}{}{}".format(
46 self._apiName, self._apiVersion, self._apiResource
47 )
48
49 def pkg_list(self, pkgtype, filter=None, repo=None):
50 """
51 Returns a repo based on name or id
52 """
53 self._logger.debug("")
54 self._client.get_token()
55 # Get OSM registered repository list
56 repositories = self.list()
57 if repo:
58 repositories = [r for r in repositories if r["name"] == repo]
59 if not repositories:
60 raise ClientException("Not repository found")
61
62 vnf_repos = []
63 for repository in repositories:
64 try:
65 r = requests.get("{}/index.yaml".format(repository.get("url")))
66
67 if r.status_code == 200:
68 repo_list = yaml.safe_load(r.text)
69 vnf_packages = repo_list.get("{}_packages".format(pkgtype))
70 for repo in vnf_packages:
71 versions = vnf_packages.get(repo)
72 latest = versions.get("latest")
73 del versions["latest"]
74 for version in versions:
75 latest_version = False
76 if version == latest:
77 latest_version = True
78 vnf_repos.append(
79 {
80 "vendor": versions[version].get("vendor"),
81 "name": versions[version].get("name"),
82 "version": version,
83 "description": versions[version].get("description"),
84 "location": versions[version].get("path"),
85 "repository": repository.get("name"),
86 "repourl": repository.get("url"),
87 "latest": latest_version,
88 }
89 )
90 else:
91 raise Exception(
92 "repository in url {} unreachable".format(repository.get("url"))
93 )
94 except Exception as e:
95 self._logger.error(
96 "Error cannot read from repository {} '{}': {}".format(
97 repository["name"], repository["url"], e
98 ),
99 exc_info=True
100 )
101 continue
102
103 vnf_repos_filtered = []
104 if filter:
105 for vnf_repo in vnf_repos:
106 for k, v in vnf_repo.items():
107 if v:
108 kf, vf = filter.split("=")
109 if k == kf and vf in v:
110 vnf_repos_filtered.append(vnf_repo)
111 break
112 vnf_repos = vnf_repos_filtered
113 return vnf_repos
114
115 def get_pkg(self, pkgtype, name, repo, filter, version):
116 """
117 Returns the filename of the PKG downloaded to disk
118 """
119 self._logger.debug("")
120 self._client.get_token()
121 f = None
122 f_name = None
123 # Get OSM registered repository list
124 pkgs = self.pkg_list(pkgtype, filter, repo)
125 for pkg in pkgs:
126 if pkg.get("repository") == repo and pkg.get("name") == name:
127 if "latest" in version:
128 if not pkg.get("latest"):
129 continue
130 else:
131 version = pkg.get("version")
132 if pkg.get("version") == version:
133 r = requests.get(
134 "{}{}".format(pkg.get("repourl"), pkg.get("location")),
135 stream=True,
136 )
137 if r.status_code != 200:
138 raise ClientException("Package not found")
139
140 with tempfile.NamedTemporaryFile(delete=False) as f:
141 f.write(r.raw.read())
142 f_name = f.name
143 if not f_name:
144 raise ClientException(
145 "{} {} not found at repo {}".format(pkgtype, name, repo)
146 )
147 return f_name
148
149 def pkg_get(self, pkgtype, name, repo, version, filter):
150
151 pkg_name = self.get_pkg(pkgtype, name, repo, filter, version)
152 if not pkg_name:
153 raise ClientException("Package not found")
154 folder, descriptor = self.zip_extraction(pkg_name)
155 with open(descriptor) as pkg:
156 pkg_descriptor = yaml.safe_load(pkg)
157 rmtree(folder, ignore_errors=False)
158 if (
159 pkgtype == "vnf"
160 and (pkg_descriptor.get("vnfd") or pkg_descriptor.get("vnfd:vnfd_catalog"))
161 ) or (
162 pkgtype == "ns"
163 and (pkg_descriptor.get("nsd") or pkg_descriptor.get("nsd:nsd_catalog"))
164 ):
165 raise ClientException("Wrong Package type")
166 return pkg_descriptor
167
168 def repo_index(self, origin=".", destination="."):
169 """
170 Repo Index main function
171 :param origin: origin directory for getting all the artifacts
172 :param destination: destination folder for create and index the valid artifacts
173 """
174 self._logger.debug("Starting index composition")
175 if destination == ".":
176 if origin == destination:
177 destination = "repository"
178
179 destination = abspath(destination)
180 origin = abspath(origin)
181 self._logger.debug(f"Paths {destination}, {origin}")
182 if origin[0] != "/":
183 origin = join(getcwd(), origin)
184 if destination[0] != "/":
185 destination = join(getcwd(), destination)
186
187 self.init_directory(destination)
188 artifacts = []
189 directories = []
190 for f in listdir(origin):
191 self._logger.debug(f"Element: {join(origin,f)}")
192 if isfile(join(origin, f)) and f.endswith(".tar.gz"):
193 artifacts.append(f)
194 elif isdir(join(origin, f)) and f != destination.split('/')[-1] and not f.startswith('.'):
195 directories.append(f) # TODO: Document that nested directories are not supported
196 else:
197 self._logger.debug(f"Ignoring {f}")
198 self._logger.debug(f"Artifacts: {artifacts}")
199 for package in artifacts:
200 self.register_package_in_repository(
201 join(origin, package), origin, destination, kind="artifact"
202 )
203 self._logger.debug(f"Directories: {directories}")
204 for package in directories:
205 self.register_package_in_repository(
206 join(origin, package), origin, destination, kind="directory"
207 )
208 self._logger.info("\nFinal Results: ")
209 self._logger.info(
210 "VNF Packages Indexed: "
211 + str(len(glob.glob(destination + "/vnf/*/*/metadata.yaml")))
212 )
213 self._logger.info(
214 "NS Packages Indexed: "
215 + str(len(glob.glob(destination + "/ns/*/*/metadata.yaml")))
216 )
217
218 self._logger.info(
219 "NST Packages Indexed: "
220 + str(len(glob.glob(destination + "/nst/*/*/metadata.yaml")))
221 )
222
223 def md5(self, fname):
224 """
225 Checksum generator
226 :param fname: file path
227 :return: checksum string
228 """
229 self._logger.debug("")
230 hash_md5 = hashlib.md5()
231 with open(fname, "rb") as f:
232 for chunk in iter(lambda: f.read(4096), b""):
233 hash_md5.update(chunk)
234 return hash_md5.hexdigest()
235
236 def fields_building(self, descriptor_dict, file, package_type):
237 """
238 From an artifact descriptor, obtain the fields required for indexing
239 :param descriptor_dict: artifact description
240 :param file: artifact package
241 :param package_type: type of artifact (vnf, ns, nst)
242 :return: fields
243 """
244 self._logger.debug("")
245
246 fields = {}
247 base_path = "/{}/".format(package_type)
248 aux_dict = {}
249 if package_type == "vnf":
250 if descriptor_dict.get("vnfd-catalog", False):
251 aux_dict = descriptor_dict.get("vnfd-catalog", {}).get("vnfd", [{}])[0]
252 elif descriptor_dict.get("vnfd:vnfd-catalog"):
253 aux_dict = descriptor_dict.get("vnfd:vnfd-catalog", {}).get("vnfd", [{}])[0]
254 elif descriptor_dict.get("vnfd"):
255 aux_dict = descriptor_dict["vnfd"]
256 if aux_dict.get("vnfd"):
257 aux_dict = aux_dict['vnfd'][0]
258 else:
259 msg = f"Unexpected descriptor format {descriptor_dict}"
260 self._logger.error(msg)
261 raise ValueError(msg)
262 self._logger.debug(f"Extracted descriptor info for {package_type}: {aux_dict}")
263 images = []
264 for vdu in aux_dict.get("vdu", aux_dict.get('kdu', ())):
265 images.append(vdu.get("image", vdu.get('name')))
266 fields["images"] = images
267 elif package_type == "ns":
268 if descriptor_dict.get("nsd-catalog", False):
269 aux_dict = descriptor_dict.get("nsd-catalog", {}).get("nsd", [{}])[0]
270 elif descriptor_dict.get("nsd:nsd-catalog"):
271 aux_dict = descriptor_dict.get("nsd:nsd-catalog", {}).get("nsd", [{}])[0]
272 elif descriptor_dict.get("nsd"):
273 aux_dict = descriptor_dict['nsd']
274 if aux_dict.get("nsd"):
275 aux_dict = descriptor_dict["nsd"]["nsd"][0]
276 else:
277 msg = f"Unexpected descriptor format {descriptor_dict}"
278 self._logger.error(msg)
279 raise ValueError(msg)
280 vnfs = []
281 if aux_dict.get("constituent-vnfd"):
282 for vnf in aux_dict.get("constituent-vnfd", ()):
283 vnfs.append(vnf.get("vnfd-id-ref"))
284 else:
285 vnfs = aux_dict.get('vnfd-id')
286 self._logger.debug("Used VNFS in the NSD: " + str(vnfs))
287 fields["vnfd-id-ref"] = vnfs
288 elif package_type == 'nst':
289 if descriptor_dict.get("nst-catalog", False):
290 aux_dict = descriptor_dict.get("nst-catalog", {}).get("nst", [{}])[0]
291 elif descriptor_dict.get("nst:nst-catalog"):
292 aux_dict = descriptor_dict.get("nst:nst-catalog", {}).get("nst", [{}])[0]
293 elif descriptor_dict.get("nst"):
294 aux_dict = descriptor_dict['nst']
295 if aux_dict.get("nst"):
296 aux_dict = descriptor_dict["nst"]["nst"][0]
297 nsds = []
298 for nsd in aux_dict.get("netslice-subnet", ()):
299 nsds.append(nsd.get("nsd-ref"))
300 self._logger.debug("Used NSDs in the NST: " + str(nsds))
301 if not nsds:
302 msg = f"Unexpected descriptor format {descriptor_dict}"
303 self._logger.error(msg)
304 raise ValueError(msg)
305 fields["nsd-id-ref"] = nsds
306 else:
307 msg = f"Unexpected descriptor format {descriptor_dict}"
308 self._logger.error(msg)
309 raise ValueError(msg)
310
311 fields["name"] = aux_dict.get("name")
312 fields["id"] = aux_dict.get("id")
313 fields["description"] = aux_dict.get("description")
314 fields["vendor"] = aux_dict.get("vendor")
315 fields["version"] = str(aux_dict.get("version", "1.0"))
316 fields["path"] = "{}{}/{}/{}-{}.tar.gz".format(
317 base_path,
318 fields["id"],
319 fields["version"],
320 fields.get("id"),
321 fields.get("version"),
322 )
323 return fields
324
325 def zip_extraction(self, file_name):
326 """
327 Validation of artifact.
328 :param file: file path
329 :return: status details, status, fields, package_type
330 """
331 self._logger.debug("Decompressing package file")
332 temp_file = "/tmp/{}".format(file_name.split("/")[-1])
333 if file_name != temp_file:
334 copyfile(file_name, temp_file)
335 with tarfile.open(temp_file, "r:gz") as tar:
336 folder = tar.getnames()[0].split("/")[0]
337 tar.extractall()
338
339 remove(temp_file)
340 descriptor_file = glob.glob("{}/*.y*ml".format(folder))[0]
341 return folder, descriptor_file
342
343 def validate_artifact(self, path, origin, kind):
344 """
345 Validation of artifact.
346 :param path: file path
347 :param origin: folder where the package is located
348 :param kind: flag to select the correct file type (directory or artifact)
349 :return: status details, status, fields, package_type
350 """
351 self._logger.debug(f"Validating {path} {kind}")
352 package_type = ""
353 folder = ""
354 try:
355 if kind == "directory":
356 descriptor_file = glob.glob("{}/*.y*ml".format(path))[0]
357 else:
358 folder, descriptor_file = self.zip_extraction(path)
359 folder = join(origin, folder)
360 self._logger.debug(f"Kind is an artifact (tar.gz). Folder: {folder}. Descriptor_file: {descriptor_file}")
361
362 self._logger.debug("Opening descriptor file: {}".format(descriptor_file))
363
364 with open(descriptor_file, "r") as f:
365 descriptor_data = f.read()
366 self._logger.debug(f"Descriptor data: {descriptor_data}")
367 validation = validation_im()
368 desc_type, descriptor_dict = validation.yaml_validation(descriptor_data)
369 try:
370 validation_im.pyangbind_validation(self, desc_type, descriptor_dict)
371 except Exception as e:
372 self._logger.error(e, exc_info=True)
373 raise e
374 descriptor_type_ref = list(descriptor_dict.keys())[0].lower()
375 if "vnf" in descriptor_type_ref:
376 package_type = "vnf"
377 elif "nst" in descriptor_type_ref:
378 package_type = "nst"
379 elif "ns" in descriptor_type_ref:
380 package_type = "ns"
381 else:
382 msg = f"Unknown package type {descriptor_type_ref}"
383 self._logger.error(msg)
384 raise ValueError(msg)
385 self._logger.debug("Descriptor: {}".format(descriptor_dict))
386 fields = self.fields_building(descriptor_dict, path, package_type)
387 self._logger.debug(f"Descriptor successfully validated {fields}")
388 return (
389 {
390 "detail": "{}D successfully validated".format(package_type.upper()),
391 "code": "OK",
392 },
393 True,
394 fields,
395 package_type,
396 )
397 except Exception as e:
398 # Delete the folder we just created
399 return {"detail": str(e)}, False, {}, package_type
400 finally:
401 if folder:
402 rmtree(folder, ignore_errors=True)
403
404 def register_package_in_repository(self, path, origin, destination, kind):
405 """
406 Registration of one artifact in a repository
407 :param path: absolute path of the VNF/NS package
408 :param origin: folder where the package is located
409 :param destination: path for index creation
410 :param kind: artifact (tar.gz) or directory
411 """
412 self._logger.debug("")
413 pt = PackageTool()
414 compressed = False
415 try:
416 fields = {}
417 _, valid, fields, package_type = self.validate_artifact(path, origin, kind)
418 if not valid:
419 raise Exception(
420 "{} {} Not well configured.".format(package_type.upper(), str(path))
421 )
422 else:
423 if kind == "directory":
424 path = pt.build(path)
425 self._logger.debug(f"Directory path {path}")
426 compressed = True
427 fields["checksum"] = self.md5(path)
428 self.indexation(destination, path, package_type, fields)
429
430 except Exception as e:
431 self._logger.exception(
432 "Error registering package in Repository: {}".format(e)
433 )
434 raise ClientException(e)
435
436 finally:
437 if kind == "directory" and compressed:
438 remove(path)
439
440 def indexation(self, destination, path, package_type, fields):
441 """
442 Process for index packages
443 :param destination: index repository path
444 :param path: path of the package
445 :param package_type: package type (vnf, ns, nst)
446 :param fields: dict with the required values
447 """
448 self._logger.debug(f"Processing {destination} {path} {package_type} {fields}")
449
450 data_ind = {
451 "name": fields.get("name"),
452 "description": fields.get("description"),
453 "vendor": fields.get("vendor"),
454 "path": fields.get("path"),
455 }
456 self._logger.debug(data_ind)
457 final_path = join(
458 destination, package_type, fields.get("id"), fields.get("version")
459 )
460 if isdir(join(destination, package_type, fields.get("id"))):
461 if isdir(final_path):
462 self._logger.warning(
463 "{} {} already exists".format(package_type.upper(), str(path))
464 )
465 else:
466 mkdir(final_path)
467 copyfile(
468 path,
469 final_path
470 + "/"
471 + fields.get("id")
472 + "-"
473 + fields.get("version")
474 + ".tar.gz",
475 )
476 yaml.safe_dump(
477 fields,
478 open(final_path + "/" + "metadata.yaml", "w"),
479 default_flow_style=False,
480 width=80,
481 indent=4,
482 )
483 index = yaml.safe_load(open(destination + "/index.yaml"))
484
485 index["{}_packages".format(package_type)][fields.get("id")][
486 fields.get("version")
487 ] = data_ind
488 if versioning.parse(
489 index["{}_packages".format(package_type)][fields.get("id")][
490 "latest"
491 ]
492 ) < versioning.parse(fields.get("version")):
493 index["{}_packages".format(package_type)][fields.get("id")][
494 "latest"
495 ] = fields.get("version")
496 yaml.safe_dump(
497 index,
498 open(destination + "/index.yaml", "w"),
499 default_flow_style=False,
500 width=80,
501 indent=4,
502 )
503 self._logger.info(
504 "{} {} added in the repository".format(
505 package_type.upper(), str(path)
506 )
507 )
508 else:
509 mkdir(destination + "/{}/".format(package_type) + fields.get("id"))
510 mkdir(final_path)
511 copyfile(
512 path,
513 final_path
514 + "/"
515 + fields.get("id")
516 + "-"
517 + fields.get("version")
518 + ".tar.gz",
519 )
520 yaml.safe_dump(
521 fields,
522 open(join(final_path, "metadata.yaml"), "w"),
523 default_flow_style=False,
524 width=80,
525 indent=4,
526 )
527 index = yaml.safe_load(open(destination + "/index.yaml"))
528
529 index["{}_packages".format(package_type)][fields.get("id")] = {
530 fields.get("version"): data_ind
531 }
532 index["{}_packages".format(package_type)][fields.get("id")][
533 "latest"
534 ] = fields.get("version")
535 yaml.safe_dump(
536 index,
537 open(join(destination, "index.yaml"), "w"),
538 default_flow_style=False,
539 width=80,
540 indent=4,
541 )
542 self._logger.info(
543 "{} {} added in the repository".format(package_type.upper(), str(path))
544 )
545
546 def current_datetime(self):
547 """
548 Datetime Generator
549 :return: Datetime as string with the following structure "2020-04-29T08:41:07.681653Z"
550 """
551 self._logger.debug("")
552 return time.strftime("%Y-%m-%dT%H:%M:%S.%sZ")
553
554 def init_directory(self, destination):
555 """
556 Initialize the index directory. Creation of index.yaml, and the directories for vnf and ns
557 :param destination:
558 :return:
559 """
560 self._logger.debug("")
561 if not isdir(destination):
562 mkdir(destination)
563 if not isfile(join(destination, "index.yaml")):
564 mkdir(join(destination, "vnf"))
565 mkdir(join(destination, "ns"))
566 mkdir(join(destination, "nst"))
567 index_data = {
568 "apiVersion": "v1",
569 "generated": self.current_datetime(),
570 "vnf_packages": {},
571 "ns_packages": {},
572 "nst_packages": {},
573 }
574 with open(join(destination, "index.yaml"), "w") as outfile:
575 yaml.safe_dump(
576 index_data, outfile, default_flow_style=False, width=80, indent=4
577 )