Enable black and pylint in tox, and update code accordingly
[osm/osmclient.git] / osmclient / sol005 / osmrepo.py
1 #
2 # Licensed under the Apache License, Version 2.0 (the "License"); you may
3 # not use this file except in compliance with the License. You may obtain
4 # a copy of the License at
5 #
6 # http://www.apache.org/licenses/LICENSE-2.0
7 #
8 # Unless required by applicable law or agreed to in writing, software
9 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
10 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
11 # License for the specific language governing permissions and limitations
12 # under the License.
13 #
14
15 """
16 OSM Repo API handling
17 """
18 import glob
19 import logging
20 from os import listdir, mkdir, getcwd, remove
21 from os.path import isfile, isdir, join, abspath
22 from shutil import copyfile, rmtree
23 import tarfile
24 import tempfile
25 import time
26
27 from osm_im.validation import Validation as validation_im
28 from osmclient.common.exceptions import ClientException
29 from osmclient.common.package_tool import PackageTool
30 from osmclient.sol005.repo import Repo
31 from osmclient.common import utils
32 from packaging import version as versioning
33 import requests
34 import yaml
35
36
37 class OSMRepo(Repo):
38 def __init__(self, http=None, client=None):
39 self._http = http
40 self._client = client
41 self._apiName = "/admin"
42 self._apiVersion = "/v1"
43 self._apiResource = "/osmrepos"
44 self._logger = logging.getLogger("osmclient")
45 self._apiBase = "{}{}{}".format(
46 self._apiName, self._apiVersion, self._apiResource
47 )
48
49 def pkg_list(self, pkgtype, filter=None, repo=None):
50 """
51 Returns a repo based on name or id
52 """
53 self._logger.debug("")
54 self._client.get_token()
55 # Get OSM registered repository list
56 repositories = self.list()
57 if repo:
58 repositories = [r for r in repositories if r["name"] == repo]
59 if not repositories:
60 raise ClientException("Not repository found")
61
62 vnf_repos = []
63 for repository in repositories:
64 try:
65 r = requests.get("{}/index.yaml".format(repository.get("url")))
66
67 if r.status_code == 200:
68 repo_list = yaml.safe_load(r.text)
69 vnf_packages = repo_list.get("{}_packages".format(pkgtype))
70 for repo in vnf_packages:
71 versions = vnf_packages.get(repo)
72 latest = versions.get("latest")
73 del versions["latest"]
74 for version in versions:
75 latest_version = False
76 if version == latest:
77 latest_version = True
78 vnf_repos.append(
79 {
80 "vendor": versions[version].get("vendor"),
81 "name": versions[version].get("name"),
82 "version": version,
83 "description": versions[version].get("description"),
84 "location": versions[version].get("path"),
85 "repository": repository.get("name"),
86 "repourl": repository.get("url"),
87 "latest": latest_version,
88 }
89 )
90 else:
91 raise Exception(
92 "repository in url {} unreachable".format(repository.get("url"))
93 )
94 except Exception as e:
95 self._logger.error(
96 "Error cannot read from repository {} '{}': {}".format(
97 repository["name"], repository["url"], e
98 ),
99 exc_info=True,
100 )
101 continue
102
103 vnf_repos_filtered = []
104 if filter:
105 for vnf_repo in vnf_repos:
106 for k, v in vnf_repo.items():
107 if v:
108 kf, vf = filter.split("=")
109 if k == kf and vf in v:
110 vnf_repos_filtered.append(vnf_repo)
111 break
112 vnf_repos = vnf_repos_filtered
113 return vnf_repos
114
115 def get_pkg(self, pkgtype, name, repo, filter, version):
116 """
117 Returns the filename of the PKG downloaded to disk
118 """
119 self._logger.debug("")
120 self._client.get_token()
121 f = None
122 f_name = None
123 # Get OSM registered repository list
124 pkgs = self.pkg_list(pkgtype, filter, repo)
125 for pkg in pkgs:
126 if pkg.get("repository") == repo and pkg.get("name") == name:
127 if "latest" in version:
128 if not pkg.get("latest"):
129 continue
130 else:
131 version = pkg.get("version")
132 if pkg.get("version") == version:
133 r = requests.get(
134 "{}{}".format(pkg.get("repourl"), pkg.get("location")),
135 stream=True,
136 )
137 if r.status_code != 200:
138 raise ClientException("Package not found")
139
140 with tempfile.NamedTemporaryFile(delete=False) as f:
141 f.write(r.raw.read())
142 f_name = f.name
143 if not f_name:
144 raise ClientException(
145 "{} {} not found at repo {}".format(pkgtype, name, repo)
146 )
147 return f_name
148
149 def pkg_get(self, pkgtype, name, repo, version, filter):
150
151 pkg_name = self.get_pkg(pkgtype, name, repo, filter, version)
152 if not pkg_name:
153 raise ClientException("Package not found")
154 folder, descriptor = self.zip_extraction(pkg_name)
155 with open(descriptor) as pkg:
156 pkg_descriptor = yaml.safe_load(pkg)
157 rmtree(folder, ignore_errors=False)
158 if (
159 pkgtype == "vnf"
160 and (pkg_descriptor.get("vnfd") or pkg_descriptor.get("vnfd:vnfd_catalog"))
161 ) or (
162 pkgtype == "ns"
163 and (pkg_descriptor.get("nsd") or pkg_descriptor.get("nsd:nsd_catalog"))
164 ):
165 raise ClientException("Wrong Package type")
166 return pkg_descriptor
167
168 def repo_index(self, origin=".", destination="."):
169 """
170 Repo Index main function
171 :param origin: origin directory for getting all the artifacts
172 :param destination: destination folder for create and index the valid artifacts
173 """
174 self._logger.debug("Starting index composition")
175 if destination == ".":
176 if origin == destination:
177 destination = "repository"
178
179 destination = abspath(destination)
180 origin = abspath(origin)
181 self._logger.debug(f"Paths {destination}, {origin}")
182 if origin[0] != "/":
183 origin = join(getcwd(), origin)
184 if destination[0] != "/":
185 destination = join(getcwd(), destination)
186
187 self.init_directory(destination)
188 artifacts = []
189 directories = []
190 for f in listdir(origin):
191 self._logger.debug(f"Element: {join(origin,f)}")
192 if isfile(join(origin, f)) and f.endswith(".tar.gz"):
193 artifacts.append(f)
194 elif (
195 isdir(join(origin, f))
196 and f != destination.split("/")[-1]
197 and not f.startswith(".")
198 ):
199 directories.append(
200 f
201 ) # TODO: Document that nested directories are not supported
202 else:
203 self._logger.debug(f"Ignoring {f}")
204 self._logger.debug(f"Artifacts: {artifacts}")
205 for package in artifacts:
206 self.register_package_in_repository(
207 join(origin, package), origin, destination, kind="artifact"
208 )
209 self._logger.debug(f"Directories: {directories}")
210 for package in directories:
211 self.register_package_in_repository(
212 join(origin, package), origin, destination, kind="directory"
213 )
214 self._logger.info("\nFinal Results: ")
215 self._logger.info(
216 "VNF Packages Indexed: "
217 + str(len(glob.glob(destination + "/vnf/*/*/metadata.yaml")))
218 )
219 self._logger.info(
220 "NS Packages Indexed: "
221 + str(len(glob.glob(destination + "/ns/*/*/metadata.yaml")))
222 )
223
224 self._logger.info(
225 "NST Packages Indexed: "
226 + str(len(glob.glob(destination + "/nst/*/*/metadata.yaml")))
227 )
228
229 def fields_building(self, descriptor_dict, file, package_type):
230 """
231 From an artifact descriptor, obtain the fields required for indexing
232 :param descriptor_dict: artifact description
233 :param file: artifact package
234 :param package_type: type of artifact (vnf, ns, nst)
235 :return: fields
236 """
237 self._logger.debug("")
238
239 fields = {}
240 base_path = "/{}/".format(package_type)
241 aux_dict = {}
242 if package_type == "vnf":
243 if descriptor_dict.get("vnfd-catalog", False):
244 aux_dict = descriptor_dict.get("vnfd-catalog", {}).get("vnfd", [{}])[0]
245 elif descriptor_dict.get("vnfd:vnfd-catalog"):
246 aux_dict = descriptor_dict.get("vnfd:vnfd-catalog", {}).get(
247 "vnfd", [{}]
248 )[0]
249 elif descriptor_dict.get("vnfd"):
250 aux_dict = descriptor_dict["vnfd"]
251 if aux_dict.get("vnfd"):
252 aux_dict = aux_dict["vnfd"][0]
253 else:
254 msg = f"Unexpected descriptor format {descriptor_dict}"
255 self._logger.error(msg)
256 raise ValueError(msg)
257 self._logger.debug(
258 f"Extracted descriptor info for {package_type}: {aux_dict}"
259 )
260 images = []
261 for vdu in aux_dict.get("vdu", aux_dict.get("kdu", ())):
262 images.append(vdu.get("image", vdu.get("name")))
263 fields["images"] = images
264 elif package_type == "ns":
265 if descriptor_dict.get("nsd-catalog", False):
266 aux_dict = descriptor_dict.get("nsd-catalog", {}).get("nsd", [{}])[0]
267 elif descriptor_dict.get("nsd:nsd-catalog"):
268 aux_dict = descriptor_dict.get("nsd:nsd-catalog", {}).get("nsd", [{}])[
269 0
270 ]
271 elif descriptor_dict.get("nsd"):
272 aux_dict = descriptor_dict["nsd"]
273 if aux_dict.get("nsd"):
274 aux_dict = descriptor_dict["nsd"]["nsd"][0]
275 else:
276 msg = f"Unexpected descriptor format {descriptor_dict}"
277 self._logger.error(msg)
278 raise ValueError(msg)
279 vnfs = []
280 if aux_dict.get("constituent-vnfd"):
281 for vnf in aux_dict.get("constituent-vnfd", ()):
282 vnfs.append(vnf.get("vnfd-id-ref"))
283 else:
284 vnfs = aux_dict.get("vnfd-id")
285 self._logger.debug("Used VNFS in the NSD: " + str(vnfs))
286 fields["vnfd-id-ref"] = vnfs
287 elif package_type == "nst":
288 if descriptor_dict.get("nst-catalog", False):
289 aux_dict = descriptor_dict.get("nst-catalog", {}).get("nst", [{}])[0]
290 elif descriptor_dict.get("nst:nst-catalog"):
291 aux_dict = descriptor_dict.get("nst:nst-catalog", {}).get("nst", [{}])[
292 0
293 ]
294 elif descriptor_dict.get("nst"):
295 aux_dict = descriptor_dict["nst"]
296 if aux_dict.get("nst"):
297 aux_dict = descriptor_dict["nst"]["nst"][0]
298 nsds = []
299 for nsd in aux_dict.get("netslice-subnet", ()):
300 nsds.append(nsd.get("nsd-ref"))
301 self._logger.debug("Used NSDs in the NST: " + str(nsds))
302 if not nsds:
303 msg = f"Unexpected descriptor format {descriptor_dict}"
304 self._logger.error(msg)
305 raise ValueError(msg)
306 fields["nsd-id-ref"] = nsds
307 else:
308 msg = f"Unexpected descriptor format {descriptor_dict}"
309 self._logger.error(msg)
310 raise ValueError(msg)
311 # Repo search is based on 'name' entry in index.yaml. It is mandatory then
312 fields["name"] = aux_dict.get("name", aux_dict["product-name"])
313 fields["id"] = aux_dict.get("id")
314 fields["description"] = aux_dict.get("description")
315 fields["vendor"] = aux_dict.get("vendor")
316 fields["version"] = str(aux_dict.get("version", "1.0"))
317 fields["path"] = "{}{}/{}/{}-{}.tar.gz".format(
318 base_path,
319 fields["id"],
320 fields["version"],
321 fields.get("id"),
322 fields.get("version"),
323 )
324 return fields
325
326 def zip_extraction(self, file_name):
327 """
328 Validation of artifact.
329 :param file: file path
330 :return: status details, status, fields, package_type
331 """
332 self._logger.debug("Decompressing package file")
333 temp_file = "/tmp/{}".format(file_name.split("/")[-1])
334 if file_name != temp_file:
335 copyfile(file_name, temp_file)
336 with tarfile.open(temp_file, "r:gz") as tar:
337 folder = tar.getnames()[0].split("/")[0]
338 tar.extractall()
339
340 remove(temp_file)
341 descriptor_file = glob.glob("{}/*.y*ml".format(folder))[0]
342 return folder, descriptor_file
343
344 def validate_artifact(self, path, origin, kind):
345 """
346 Validation of artifact.
347 :param path: file path
348 :param origin: folder where the package is located
349 :param kind: flag to select the correct file type (directory or artifact)
350 :return: status details, status, fields, package_type
351 """
352 self._logger.debug(f"Validating {path} {kind}")
353 package_type = ""
354 folder = ""
355 try:
356 if kind == "directory":
357 descriptor_file = glob.glob("{}/*.y*ml".format(path))[0]
358 else:
359 folder, descriptor_file = self.zip_extraction(path)
360 folder = join(origin, folder)
361 self._logger.debug(
362 f"Kind is an artifact (tar.gz). Folder: {folder}. Descriptor_file: {descriptor_file}"
363 )
364
365 self._logger.debug("Opening descriptor file: {}".format(descriptor_file))
366
367 with open(descriptor_file, "r") as f:
368 descriptor_data = f.read()
369 self._logger.debug(f"Descriptor data: {descriptor_data}")
370 validation = validation_im()
371 desc_type, descriptor_dict = validation.yaml_validation(descriptor_data)
372 try:
373 validation_im.pyangbind_validation(self, desc_type, descriptor_dict)
374 except Exception as e:
375 self._logger.error(e, exc_info=True)
376 raise e
377 descriptor_type_ref = list(descriptor_dict.keys())[0].lower()
378 if "vnf" in descriptor_type_ref:
379 package_type = "vnf"
380 elif "nst" in descriptor_type_ref:
381 package_type = "nst"
382 elif "ns" in descriptor_type_ref:
383 package_type = "ns"
384 else:
385 msg = f"Unknown package type {descriptor_type_ref}"
386 self._logger.error(msg)
387 raise ValueError(msg)
388 self._logger.debug("Descriptor: {}".format(descriptor_dict))
389 fields = self.fields_building(descriptor_dict, path, package_type)
390 self._logger.debug(f"Descriptor successfully validated {fields}")
391 return (
392 {
393 "detail": "{}D successfully validated".format(package_type.upper()),
394 "code": "OK",
395 },
396 True,
397 fields,
398 package_type,
399 )
400 except Exception as e:
401 # Delete the folder we just created
402 return {"detail": str(e)}, False, {}, package_type
403 finally:
404 if folder:
405 rmtree(folder, ignore_errors=True)
406
407 def register_package_in_repository(self, path, origin, destination, kind):
408 """
409 Registration of one artifact in a repository
410 :param path: absolute path of the VNF/NS package
411 :param origin: folder where the package is located
412 :param destination: path for index creation
413 :param kind: artifact (tar.gz) or directory
414 """
415 self._logger.debug("")
416 pt = PackageTool()
417 compressed = False
418 try:
419 fields = {}
420 _, valid, fields, package_type = self.validate_artifact(path, origin, kind)
421 if not valid:
422 raise Exception(
423 "{} {} Not well configured.".format(package_type.upper(), str(path))
424 )
425 else:
426 if kind == "directory":
427 path = pt.build(path)
428 self._logger.debug(f"Directory path {path}")
429 compressed = True
430 fields["checksum"] = utils.md5(path)
431 self.indexation(destination, path, package_type, fields)
432
433 except Exception as e:
434 self._logger.exception(
435 "Error registering package in Repository: {}".format(e)
436 )
437 raise ClientException(e)
438
439 finally:
440 if kind == "directory" and compressed:
441 remove(path)
442
443 def indexation(self, destination, path, package_type, fields):
444 """
445 Process for index packages
446 :param destination: index repository path
447 :param path: path of the package
448 :param package_type: package type (vnf, ns, nst)
449 :param fields: dict with the required values
450 """
451 self._logger.debug(f"Processing {destination} {path} {package_type} {fields}")
452
453 data_ind = {
454 "name": fields.get("name"),
455 "description": fields.get("description"),
456 "vendor": fields.get("vendor"),
457 "path": fields.get("path"),
458 }
459 self._logger.debug(data_ind)
460 final_path = join(
461 destination, package_type, fields.get("id"), fields.get("version")
462 )
463 if isdir(join(destination, package_type, fields.get("id"))):
464 if isdir(final_path):
465 self._logger.warning(
466 "{} {} already exists".format(package_type.upper(), str(path))
467 )
468 else:
469 mkdir(final_path)
470 copyfile(
471 path,
472 final_path
473 + "/"
474 + fields.get("id")
475 + "-"
476 + fields.get("version")
477 + ".tar.gz",
478 )
479 yaml.safe_dump(
480 fields,
481 open(final_path + "/" + "metadata.yaml", "w"),
482 default_flow_style=False,
483 width=80,
484 indent=4,
485 )
486 index = yaml.safe_load(open(destination + "/index.yaml"))
487
488 index["{}_packages".format(package_type)][fields.get("id")][
489 fields.get("version")
490 ] = data_ind
491 if versioning.parse(
492 index["{}_packages".format(package_type)][fields.get("id")][
493 "latest"
494 ]
495 ) < versioning.parse(fields.get("version")):
496 index["{}_packages".format(package_type)][fields.get("id")][
497 "latest"
498 ] = fields.get("version")
499 yaml.safe_dump(
500 index,
501 open(destination + "/index.yaml", "w"),
502 default_flow_style=False,
503 width=80,
504 indent=4,
505 )
506 self._logger.info(
507 "{} {} added in the repository".format(
508 package_type.upper(), str(path)
509 )
510 )
511 else:
512 mkdir(destination + "/{}/".format(package_type) + fields.get("id"))
513 mkdir(final_path)
514 copyfile(
515 path,
516 final_path
517 + "/"
518 + fields.get("id")
519 + "-"
520 + fields.get("version")
521 + ".tar.gz",
522 )
523 yaml.safe_dump(
524 fields,
525 open(join(final_path, "metadata.yaml"), "w"),
526 default_flow_style=False,
527 width=80,
528 indent=4,
529 )
530 index = yaml.safe_load(open(destination + "/index.yaml"))
531
532 index["{}_packages".format(package_type)][fields.get("id")] = {
533 fields.get("version"): data_ind
534 }
535 index["{}_packages".format(package_type)][fields.get("id")][
536 "latest"
537 ] = fields.get("version")
538 yaml.safe_dump(
539 index,
540 open(join(destination, "index.yaml"), "w"),
541 default_flow_style=False,
542 width=80,
543 indent=4,
544 )
545 self._logger.info(
546 "{} {} added in the repository".format(package_type.upper(), str(path))
547 )
548
549 def current_datetime(self):
550 """
551 Datetime Generator
552 :return: Datetime as string with the following structure "2020-04-29T08:41:07.681653Z"
553 """
554 self._logger.debug("")
555 return time.strftime("%Y-%m-%dT%H:%M:%S.%sZ")
556
557 def init_directory(self, destination):
558 """
559 Initialize the index directory. Creation of index.yaml, and the directories for vnf and ns
560 :param destination:
561 :return:
562 """
563 self._logger.debug("")
564 if not isdir(destination):
565 mkdir(destination)
566 if not isfile(join(destination, "index.yaml")):
567 mkdir(join(destination, "vnf"))
568 mkdir(join(destination, "ns"))
569 mkdir(join(destination, "nst"))
570 index_data = {
571 "apiVersion": "v1",
572 "generated": self.current_datetime(),
573 "vnf_packages": {},
574 "ns_packages": {},
575 "nst_packages": {},
576 }
577 with open(join(destination, "index.yaml"), "w") as outfile:
578 yaml.safe_dump(
579 index_data, outfile, default_flow_style=False, width=80, indent=4
580 )