feature: sol004 and sol007
[osm/osmclient.git] / osmclient / sol005 / osmrepo.py
1 #
2 # Licensed under the Apache License, Version 2.0 (the "License"); you may
3 # not use this file except in compliance with the License. You may obtain
4 # a copy of the License at
5 #
6 # http://www.apache.org/licenses/LICENSE-2.0
7 #
8 # Unless required by applicable law or agreed to in writing, software
9 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
10 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
11 # License for the specific language governing permissions and limitations
12 # under the License.
13 #
14
15 """
16 OSM Repo API handling
17 """
18 import glob
19 import hashlib
20 import logging
21 from os import listdir, mkdir, getcwd, remove
22 from os.path import isfile, isdir, join, abspath
23 from shutil import copyfile, rmtree
24 import tarfile
25 import tempfile
26 import time
27
28 from osm_im.validation import Validation as validation_im
29 from osmclient.common.exceptions import ClientException
30 from osmclient.common.package_tool import PackageTool
31 from osmclient.sol005.repo import Repo
32 from packaging import version as versioning
33 import requests
34 import yaml
35
36
37 class OSMRepo(Repo):
38 def __init__(self, http=None, client=None):
39 self._http = http
40 self._client = client
41 self._apiName = "/admin"
42 self._apiVersion = "/v1"
43 self._apiResource = "/osmrepos"
44 self._logger = logging.getLogger("osmclient")
45 self._apiBase = "{}{}{}".format(
46 self._apiName, self._apiVersion, self._apiResource
47 )
48
49 def pkg_list(self, pkgtype, filter=None, repo=None):
50 """
51 Returns a repo based on name or id
52 """
53 self._logger.debug("")
54 self._client.get_token()
55 # Get OSM registered repository list
56 repositories = self.list()
57 if repo:
58 repositories = [r for r in repositories if r["name"] == repo]
59 if not repositories:
60 raise ClientException("Not repository found")
61
62 vnf_repos = []
63 for repository in repositories:
64 try:
65 r = requests.get("{}/index.yaml".format(repository.get("url")))
66
67 if r.status_code == 200:
68 repo_list = yaml.safe_load(r.text)
69 vnf_packages = repo_list.get("{}_packages".format(pkgtype))
70 for repo in vnf_packages:
71 versions = vnf_packages.get(repo)
72 latest = versions.get("latest")
73 del versions["latest"]
74 for version in versions:
75 latest_version = False
76 if version == latest:
77 latest_version = True
78 vnf_repos.append(
79 {
80 "vendor": versions[version].get("vendor"),
81 "name": versions[version].get("name"),
82 "version": version,
83 "description": versions[version].get("description"),
84 "location": versions[version].get("path"),
85 "repository": repository.get("name"),
86 "repourl": repository.get("url"),
87 "latest": latest_version,
88 }
89 )
90 else:
91 raise Exception(
92 "repository in url {} unreachable".format(repository.get("url"))
93 )
94 except Exception as e:
95 self._logger.error(
96 "Error cannot read from repository {} '{}': {}".format(
97 repository["name"], repository["url"], e
98 ),
99 exc_info=True,
100 )
101 continue
102
103 vnf_repos_filtered = []
104 if filter:
105 for vnf_repo in vnf_repos:
106 for k, v in vnf_repo.items():
107 if v:
108 kf, vf = filter.split("=")
109 if k == kf and vf in v:
110 vnf_repos_filtered.append(vnf_repo)
111 break
112 vnf_repos = vnf_repos_filtered
113 return vnf_repos
114
115 def get_pkg(self, pkgtype, name, repo, filter, version):
116 """
117 Returns the filename of the PKG downloaded to disk
118 """
119 self._logger.debug("")
120 self._client.get_token()
121 f = None
122 f_name = None
123 # Get OSM registered repository list
124 pkgs = self.pkg_list(pkgtype, filter, repo)
125 for pkg in pkgs:
126 if pkg.get("repository") == repo and pkg.get("name") == name:
127 if "latest" in version:
128 if not pkg.get("latest"):
129 continue
130 else:
131 version = pkg.get("version")
132 if pkg.get("version") == version:
133 r = requests.get(
134 "{}{}".format(pkg.get("repourl"), pkg.get("location")),
135 stream=True,
136 )
137 if r.status_code != 200:
138 raise ClientException("Package not found")
139
140 with tempfile.NamedTemporaryFile(delete=False) as f:
141 f.write(r.raw.read())
142 f_name = f.name
143 if not f_name:
144 raise ClientException(
145 "{} {} not found at repo {}".format(pkgtype, name, repo)
146 )
147 return f_name
148
149 def pkg_get(self, pkgtype, name, repo, version, filter):
150
151 pkg_name = self.get_pkg(pkgtype, name, repo, filter, version)
152 if not pkg_name:
153 raise ClientException("Package not found")
154 folder, descriptor = self.zip_extraction(pkg_name)
155 with open(descriptor) as pkg:
156 pkg_descriptor = yaml.safe_load(pkg)
157 rmtree(folder, ignore_errors=False)
158 if (
159 pkgtype == "vnf"
160 and (pkg_descriptor.get("vnfd") or pkg_descriptor.get("vnfd:vnfd_catalog"))
161 ) or (
162 pkgtype == "ns"
163 and (pkg_descriptor.get("nsd") or pkg_descriptor.get("nsd:nsd_catalog"))
164 ):
165 raise ClientException("Wrong Package type")
166 return pkg_descriptor
167
168 def repo_index(self, origin=".", destination="."):
169 """
170 Repo Index main function
171 :param origin: origin directory for getting all the artifacts
172 :param destination: destination folder for create and index the valid artifacts
173 """
174 self._logger.debug("Starting index composition")
175 if destination == ".":
176 if origin == destination:
177 destination = "repository"
178
179 destination = abspath(destination)
180 origin = abspath(origin)
181 self._logger.debug(f"Paths {destination}, {origin}")
182 if origin[0] != "/":
183 origin = join(getcwd(), origin)
184 if destination[0] != "/":
185 destination = join(getcwd(), destination)
186
187 self.init_directory(destination)
188 artifacts = []
189 directories = []
190 for f in listdir(origin):
191 if isfile(join(origin, f)) and f.endswith(".tar.gz"):
192 artifacts.append(f)
193 elif (
194 isdir(join(origin, f))
195 and f != destination.split("/")[-1]
196 and not f.startswith(".")
197 ):
198 directories.append(
199 f
200 ) # TODO: Document that nested directories are not supported
201 else:
202 self._logger.debug(f"Ignoring {f}")
203 for artifact in artifacts:
204 self.register_artifact_in_repository(
205 join(origin, artifact), destination, source="artifact"
206 )
207 for artifact in directories:
208 self.register_artifact_in_repository(
209 join(origin, artifact), destination, source="directory"
210 )
211 self._logger.info("\nFinal Results: ")
212 self._logger.info(
213 "VNF Packages Indexed: "
214 + str(len(glob.glob(destination + "/vnf/*/*/metadata.yaml")))
215 )
216 self._logger.info(
217 "NS Packages Indexed: "
218 + str(len(glob.glob(destination + "/ns/*/*/metadata.yaml")))
219 )
220
221 self._logger.info(
222 "NST Packages Indexed: "
223 + str(len(glob.glob(destination + "/nst/*/*/metadata.yaml")))
224 )
225
226 def md5(self, fname):
227 """
228 Checksum generator
229 :param fname: file path
230 :return: checksum string
231 """
232 self._logger.debug("")
233 hash_md5 = hashlib.md5()
234 with open(fname, "rb") as f:
235 for chunk in iter(lambda: f.read(4096), b""):
236 hash_md5.update(chunk)
237 return hash_md5.hexdigest()
238
239 def fields_building(self, descriptor_dict, file, package_type):
240 """
241 From an artifact descriptor, obtain the fields required for indexing
242 :param descriptor_dict: artifact description
243 :param file: artifact package
244 :param package_type: type of artifact (vnf, ns, nst)
245 :return: fields
246 """
247 self._logger.debug("")
248
249 fields = {}
250 base_path = "/{}/".format(package_type)
251 aux_dict = {}
252 if package_type == "vnf":
253 if descriptor_dict.get("vnfd-catalog", False):
254 aux_dict = descriptor_dict.get("vnfd-catalog", {}).get("vnfd", [{}])[0]
255 elif descriptor_dict.get("vnfd:vnfd-catalog"):
256 aux_dict = descriptor_dict.get("vnfd:vnfd-catalog", {}).get(
257 "vnfd", [{}]
258 )[0]
259 elif descriptor_dict.get("vnfd"):
260 aux_dict = descriptor_dict["vnfd"]
261 if aux_dict.get("vnfd"):
262 aux_dict = aux_dict["vnfd"][0]
263 else:
264 msg = f"Unexpected descriptor format {descriptor_dict}"
265 self._logger.error(msg)
266 raise ValueError(msg)
267 self._logger.debug(
268 f"Extracted descriptor info for {package_type}: {aux_dict}"
269 )
270 images = []
271 for vdu in aux_dict.get("vdu", aux_dict.get("kdu", ())):
272 images.append(vdu.get("image", vdu.get("name")))
273 fields["images"] = images
274 elif package_type == "ns":
275 if descriptor_dict.get("nsd-catalog", False):
276 aux_dict = descriptor_dict.get("nsd-catalog", {}).get("nsd", [{}])[0]
277 elif descriptor_dict.get("nsd:nsd-catalog"):
278 aux_dict = descriptor_dict.get("nsd:nsd-catalog", {}).get("nsd", [{}])[
279 0
280 ]
281 elif descriptor_dict.get("nsd"):
282 aux_dict = descriptor_dict["nsd"]
283 if aux_dict.get("nsd"):
284 aux_dict = descriptor_dict["nsd"]["nsd"][0]
285 else:
286 msg = f"Unexpected descriptor format {descriptor_dict}"
287 self._logger.error(msg)
288 raise ValueError(msg)
289 vnfs = []
290 if aux_dict.get("constituent-vnfd"):
291 for vnf in aux_dict.get("constituent-vnfd", ()):
292 vnfs.append(vnf.get("vnfd-id-ref"))
293 else:
294 vnfs = aux_dict.get("vnfd-id")
295 self._logger.debug("Used VNFS in the NSD: " + str(vnfs))
296 fields["vnfd-id-ref"] = vnfs
297 elif package_type == "nst":
298 if descriptor_dict.get("nst-catalog", False):
299 aux_dict = descriptor_dict.get("nst-catalog", {}).get("nst", [{}])[0]
300 elif descriptor_dict.get("nst:nst-catalog"):
301 aux_dict = descriptor_dict.get("nst:nst-catalog", {}).get("nst", [{}])[
302 0
303 ]
304 elif descriptor_dict.get("nst"):
305 aux_dict = descriptor_dict["nst"]
306 if aux_dict.get("nst"):
307 aux_dict = descriptor_dict["nst"]["nst"][0]
308 nsds = []
309 for nsd in aux_dict.get("netslice-subnet", ()):
310 nsds.append(nsd.get("nsd-ref"))
311 self._logger.debug("Used NSDs in the NST: " + str(nsds))
312 if not nsds:
313 msg = f"Unexpected descriptor format {descriptor_dict}"
314 self._logger.error(msg)
315 raise ValueError(msg)
316 fields["nsd-id-ref"] = nsds
317 else:
318 msg = f"Unexpected descriptor format {descriptor_dict}"
319 self._logger.error(msg)
320 raise ValueError(msg)
321
322 fields["name"] = aux_dict.get("name")
323 fields["id"] = aux_dict.get("id")
324 fields["description"] = aux_dict.get("description")
325 fields["vendor"] = aux_dict.get("vendor")
326 fields["version"] = str(aux_dict.get("version", "1.0"))
327 fields["path"] = "{}{}/{}/{}-{}.tar.gz".format(
328 base_path,
329 fields["id"],
330 fields["version"],
331 fields.get("id"),
332 fields.get("version"),
333 )
334 return fields
335
336 def zip_extraction(self, file_name):
337 """
338 Validation of artifact.
339 :param file: file path
340 :return: status details, status, fields, package_type
341 """
342 self._logger.debug("Decompressing package file")
343 temp_file = "/tmp/{}".format(file_name.split("/")[-1])
344 if file_name != temp_file:
345 copyfile(file_name, temp_file)
346 with tarfile.open(temp_file, "r:gz") as tar:
347 folder = tar.getnames()[0].split("/")[0]
348 tar.extractall()
349
350 remove(temp_file)
351 descriptor_file = glob.glob("{}/*.y*ml".format(folder))[0]
352 return folder, descriptor_file
353
354 def validate_artifact(self, path, source):
355 """
356 Validation of artifact.
357 :param path: file path
358 :param source: flag to select the correct file type (directory or artifact)
359 :return: status details, status, fields, package_type
360 """
361 self._logger.debug(f"Validating {path} {source}")
362 package_type = ""
363 folder = ""
364 try:
365 if source == "directory":
366 descriptor_file = glob.glob("{}/*.y*ml".format(path))[0]
367 else:
368 folder, descriptor_file = self.zip_extraction(path)
369
370 self._logger.debug("Opening descriptor file: {}".format(descriptor_file))
371
372 with open(descriptor_file, "r") as f:
373 descriptor_data = f.read()
374 self._logger.debug(f"Descriptor data: {descriptor_data}")
375 validation = validation_im()
376 desc_type, descriptor_dict = validation.yaml_validation(descriptor_data)
377 try:
378 validation_im.pyangbind_validation(self, desc_type, descriptor_dict)
379 except Exception as e:
380 self._logger.error(e, exc_info=True)
381 raise e
382 descriptor_type_ref = list(descriptor_dict.keys())[0].lower()
383 if "vnf" in descriptor_type_ref:
384 package_type = "vnf"
385 elif "nst" in descriptor_type_ref:
386 package_type = "nst"
387 elif "ns" in descriptor_type_ref:
388 package_type = "ns"
389 else:
390 msg = f"Unknown package type {descriptor_type_ref}"
391 self._logger.error(msg)
392 raise ValueError(msg)
393 self._logger.debug("Descriptor: {}".format(descriptor_dict))
394 fields = self.fields_building(descriptor_dict, path, package_type)
395 self._logger.debug(f"Descriptor successfully validated {fields}")
396 return (
397 {
398 "detail": "{}D successfully validated".format(package_type.upper()),
399 "code": "OK",
400 },
401 True,
402 fields,
403 package_type,
404 )
405 except Exception as e:
406 # Delete the folder we just created
407 return {"detail": str(e)}, False, {}, package_type
408 finally:
409 if folder:
410 rmtree(folder, ignore_errors=True)
411
412 def register_artifact_in_repository(self, path, destination, source):
413 """
414 Registration of one artifact in a repository
415 param path:
416 param destination: path for index creation
417 param source:
418 """
419 self._logger.debug("")
420 pt = PackageTool()
421 compressed = False
422 try:
423 fields = {}
424 _, valid, fields, package_type = self.validate_artifact(path, source)
425 if not valid:
426 raise Exception(
427 "{} {} Not well configured.".format(package_type.upper(), str(path))
428 )
429 else:
430 if source == "directory":
431 path = pt.build(path)
432 self._logger.debug(f"Directory path {path}")
433 compressed = True
434 fields["checksum"] = self.md5(path)
435 self.indexation(destination, path, package_type, fields)
436
437 except Exception as e:
438 self._logger.exception(
439 "Error registering artifact in Repository: {}".format(e)
440 )
441 raise ClientException(e)
442
443 finally:
444 if source == "directory" and compressed:
445 remove(path)
446
447 def indexation(self, destination, path, package_type, fields):
448 """
449 Process for index packages
450 :param destination: index repository path
451 :param path: path of the package
452 :param package_type: package type (vnf, ns, nst)
453 :param fields: dict with the required values
454 """
455 self._logger.debug(f"Processing {destination} {path} {package_type} {fields}")
456
457 data_ind = {
458 "name": fields.get("name"),
459 "description": fields.get("description"),
460 "vendor": fields.get("vendor"),
461 "path": fields.get("path"),
462 }
463 self._logger.debug(data_ind)
464 final_path = join(
465 destination, package_type, fields.get("id"), fields.get("version")
466 )
467 if isdir(join(destination, package_type, fields.get("id"))):
468 if isdir(final_path):
469 self._logger.warning(
470 "{} {} already exists".format(package_type.upper(), str(path))
471 )
472 else:
473 mkdir(final_path)
474 copyfile(
475 path,
476 final_path
477 + "/"
478 + fields.get("id")
479 + "-"
480 + fields.get("version")
481 + ".tar.gz",
482 )
483 yaml.safe_dump(
484 fields,
485 open(final_path + "/" + "metadata.yaml", "w"),
486 default_flow_style=False,
487 width=80,
488 indent=4,
489 )
490 index = yaml.safe_load(open(destination + "/index.yaml"))
491
492 index["{}_packages".format(package_type)][fields.get("id")][
493 fields.get("version")
494 ] = data_ind
495 if versioning.parse(
496 index["{}_packages".format(package_type)][fields.get("id")][
497 "latest"
498 ]
499 ) < versioning.parse(fields.get("version")):
500 index["{}_packages".format(package_type)][fields.get("id")][
501 "latest"
502 ] = fields.get("version")
503 yaml.safe_dump(
504 index,
505 open(destination + "/index.yaml", "w"),
506 default_flow_style=False,
507 width=80,
508 indent=4,
509 )
510 self._logger.info(
511 "{} {} added in the repository".format(
512 package_type.upper(), str(path)
513 )
514 )
515 else:
516 mkdir(destination + "/{}/".format(package_type) + fields.get("id"))
517 mkdir(final_path)
518 copyfile(
519 path,
520 final_path
521 + "/"
522 + fields.get("id")
523 + "-"
524 + fields.get("version")
525 + ".tar.gz",
526 )
527 yaml.safe_dump(
528 fields,
529 open(join(final_path, "metadata.yaml"), "w"),
530 default_flow_style=False,
531 width=80,
532 indent=4,
533 )
534 index = yaml.safe_load(open(destination + "/index.yaml"))
535
536 index["{}_packages".format(package_type)][fields.get("id")] = {
537 fields.get("version"): data_ind
538 }
539 index["{}_packages".format(package_type)][fields.get("id")][
540 "latest"
541 ] = fields.get("version")
542 yaml.safe_dump(
543 index,
544 open(join(destination, "index.yaml"), "w"),
545 default_flow_style=False,
546 width=80,
547 indent=4,
548 )
549 self._logger.info(
550 "{} {} added in the repository".format(package_type.upper(), str(path))
551 )
552
553 def current_datetime(self):
554 """
555 Datetime Generator
556 :return: Datetime as string with the following structure "2020-04-29T08:41:07.681653Z"
557 """
558 self._logger.debug("")
559 return time.strftime("%Y-%m-%dT%H:%M:%S.%sZ")
560
561 def init_directory(self, destination):
562 """
563 Initialize the index directory. Creation of index.yaml, and the directories for vnf and ns
564 :param destination:
565 :return:
566 """
567 self._logger.debug("")
568 if not isdir(destination):
569 mkdir(destination)
570 if not isfile(join(destination, "index.yaml")):
571 mkdir(join(destination, "vnf"))
572 mkdir(join(destination, "ns"))
573 mkdir(join(destination, "nst"))
574 index_data = {
575 "apiVersion": "v1",
576 "generated": self.current_datetime(),
577 "vnf_packages": {},
578 "ns_packages": {},
579 "nst_packages": {},
580 }
581 with open(join(destination, "index.yaml"), "w") as outfile:
582 yaml.safe_dump(
583 index_data, outfile, default_flow_style=False, width=80, indent=4
584 )