Fix Bug 1493 repo generation from osm-packages
[osm/osmclient.git] / osmclient / sol005 / osmrepo.py
1 #
2 # Licensed under the Apache License, Version 2.0 (the "License"); you may
3 # not use this file except in compliance with the License. You may obtain
4 # a copy of the License at
5 #
6 # http://www.apache.org/licenses/LICENSE-2.0
7 #
8 # Unless required by applicable law or agreed to in writing, software
9 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
10 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
11 # License for the specific language governing permissions and limitations
12 # under the License.
13 #
14
15 """
16 OSM Repo API handling
17 """
18 import glob
19 import hashlib
20 import logging
21 from os import listdir, mkdir, getcwd, remove
22 from os.path import isfile, isdir, join, abspath
23 from shutil import copyfile, rmtree
24 import tarfile
25 import tempfile
26 import time
27
28 from osm_im.validation import Validation as validation_im
29 from osmclient.common.exceptions import ClientException
30 from osmclient.common.package_tool import PackageTool
31 from osmclient.sol005.repo import Repo
32 from packaging import version as versioning
33 import requests
34 import yaml
35
36
37 class OSMRepo(Repo):
38 def __init__(self, http=None, client=None):
39 self._http = http
40 self._client = client
41 self._apiName = "/admin"
42 self._apiVersion = "/v1"
43 self._apiResource = "/osmrepos"
44 self._logger = logging.getLogger("osmclient")
45 self._apiBase = "{}{}{}".format(
46 self._apiName, self._apiVersion, self._apiResource
47 )
48
49 def pkg_list(self, pkgtype, filter=None, repo=None):
50 """
51 Returns a repo based on name or id
52 """
53 self._logger.debug("")
54 self._client.get_token()
55 # Get OSM registered repository list
56 repositories = self.list()
57 if repo:
58 repositories = [r for r in repositories if r["name"] == repo]
59 if not repositories:
60 raise ClientException("Not repository found")
61
62 vnf_repos = []
63 for repository in repositories:
64 try:
65 r = requests.get("{}/index.yaml".format(repository.get("url")))
66
67 if r.status_code == 200:
68 repo_list = yaml.safe_load(r.text)
69 vnf_packages = repo_list.get("{}_packages".format(pkgtype))
70 for repo in vnf_packages:
71 versions = vnf_packages.get(repo)
72 latest = versions.get("latest")
73 del versions["latest"]
74 for version in versions:
75 latest_version = False
76 if version == latest:
77 latest_version = True
78 vnf_repos.append(
79 {
80 "vendor": versions[version].get("vendor"),
81 "name": versions[version].get("name"),
82 "version": version,
83 "description": versions[version].get("description"),
84 "location": versions[version].get("path"),
85 "repository": repository.get("name"),
86 "repourl": repository.get("url"),
87 "latest": latest_version,
88 }
89 )
90 else:
91 raise Exception(
92 "repository in url {} unreachable".format(repository.get("url"))
93 )
94 except Exception as e:
95 self._logger.error(
96 "Error cannot read from repository {} '{}': {}".format(
97 repository["name"], repository["url"], e
98 ),
99 exc_info=True
100 )
101 continue
102
103 vnf_repos_filtered = []
104 if filter:
105 for vnf_repo in vnf_repos:
106 for k, v in vnf_repo.items():
107 if v:
108 kf, vf = filter.split("=")
109 if k == kf and vf in v:
110 vnf_repos_filtered.append(vnf_repo)
111 break
112 vnf_repos = vnf_repos_filtered
113 return vnf_repos
114
115 def get_pkg(self, pkgtype, name, repo, filter, version):
116 """
117 Returns the filename of the PKG downloaded to disk
118 """
119 self._logger.debug("")
120 self._client.get_token()
121 f = None
122 f_name = None
123 # Get OSM registered repository list
124 pkgs = self.pkg_list(pkgtype, filter, repo)
125 for pkg in pkgs:
126 if pkg.get("repository") == repo and pkg.get("name") == name:
127 if "latest" in version:
128 if not pkg.get("latest"):
129 continue
130 else:
131 version = pkg.get("version")
132 if pkg.get("version") == version:
133 r = requests.get(
134 "{}{}".format(pkg.get("repourl"), pkg.get("location")),
135 stream=True,
136 )
137 if r.status_code != 200:
138 raise ClientException("Package not found")
139
140 with tempfile.NamedTemporaryFile(delete=False) as f:
141 f.write(r.raw.read())
142 f_name = f.name
143 if not f_name:
144 raise ClientException(
145 "{} {} not found at repo {}".format(pkgtype, name, repo)
146 )
147 return f_name
148
149 def pkg_get(self, pkgtype, name, repo, version, filter):
150
151 pkg_name = self.get_pkg(pkgtype, name, repo, filter, version)
152 if not pkg_name:
153 raise ClientException("Package not found")
154 folder, descriptor = self.zip_extraction(pkg_name)
155 with open(descriptor) as pkg:
156 pkg_descriptor = yaml.safe_load(pkg)
157 rmtree(folder, ignore_errors=False)
158 if (
159 pkgtype == "vnf"
160 and (pkg_descriptor.get("vnfd") or pkg_descriptor.get("vnfd:vnfd_catalog"))
161 ) or (
162 pkgtype == "ns"
163 and (pkg_descriptor.get("nsd") or pkg_descriptor.get("nsd:nsd_catalog"))
164 ):
165 raise ClientException("Wrong Package type")
166 return pkg_descriptor
167
168 def repo_index(self, origin=".", destination="."):
169 """
170 Repo Index main function
171 :param origin: origin directory for getting all the artifacts
172 :param destination: destination folder for create and index the valid artifacts
173 """
174 self._logger.debug("Starting index composition")
175 if destination == ".":
176 if origin == destination:
177 destination = "repository"
178
179 destination = abspath(destination)
180 origin = abspath(origin)
181 self._logger.debug(f"Paths {destination}, {origin}")
182 if origin[0] != "/":
183 origin = join(getcwd(), origin)
184 if destination[0] != "/":
185 destination = join(getcwd(), destination)
186
187 self.init_directory(destination)
188 artifacts = []
189 directories = []
190 for f in listdir(origin):
191 if isfile(join(origin, f)) and f.endswith('.tar.gz'):
192 artifacts.append(f)
193 elif isdir(join(origin, f)) and f != destination.split('/')[-1] and not f.startswith('.'):
194 directories.append(f) # TODO: Document that nested directories are not supported
195 else:
196 self._logger.debug(f"Ignoring {f}")
197 for artifact in artifacts:
198 self.register_artifact_in_repository(
199 join(origin, artifact), destination, source="artifact"
200 )
201 for artifact in directories:
202 self.register_artifact_in_repository(
203 join(origin, artifact), destination, source="directory"
204 )
205 self._logger.info("\nFinal Results: ")
206 self._logger.info(
207 "VNF Packages Indexed: "
208 + str(len(glob.glob(destination + "/vnf/*/*/metadata.yaml")))
209 )
210 self._logger.info(
211 "NS Packages Indexed: "
212 + str(len(glob.glob(destination + "/ns/*/*/metadata.yaml")))
213 )
214
215 self._logger.info(
216 "NST Packages Indexed: "
217 + str(len(glob.glob(destination + "/nst/*/*/metadata.yaml")))
218 )
219
220 def md5(self, fname):
221 """
222 Checksum generator
223 :param fname: file path
224 :return: checksum string
225 """
226 self._logger.debug("")
227 hash_md5 = hashlib.md5()
228 with open(fname, "rb") as f:
229 for chunk in iter(lambda: f.read(4096), b""):
230 hash_md5.update(chunk)
231 return hash_md5.hexdigest()
232
233 def fields_building(self, descriptor_dict, file, package_type):
234 """
235 From an artifact descriptor, obtain the fields required for indexing
236 :param descriptor_dict: artifact description
237 :param file: artifact package
238 :param package_type: type of artifact (vnf, ns, nst)
239 :return: fields
240 """
241 self._logger.debug("")
242
243 fields = {}
244 base_path = "/{}/".format(package_type)
245 aux_dict = {}
246 if package_type == "vnf":
247 if descriptor_dict.get("vnfd-catalog", False):
248 aux_dict = descriptor_dict.get("vnfd-catalog", {}).get("vnfd", [{}])[0]
249 elif descriptor_dict.get("vnfd:vnfd-catalog"):
250 aux_dict = descriptor_dict.get("vnfd:vnfd-catalog", {}).get("vnfd", [{}])[0]
251 elif descriptor_dict.get("vnfd"):
252 aux_dict = descriptor_dict["vnfd"]
253 if aux_dict.get("vnfd"):
254 aux_dict = aux_dict['vnfd'][0]
255 else:
256 msg = f"Unexpected descriptor format {descriptor_dict}"
257 self._logger.error(msg)
258 raise ValueError(msg)
259 self._logger.debug(f"Extracted descriptor info for {package_type}: {aux_dict}")
260 images = []
261 for vdu in aux_dict.get("vdu", aux_dict.get('kdu', ())):
262 images.append(vdu.get("image", vdu.get('name')))
263 fields["images"] = images
264 elif package_type == "ns":
265 if descriptor_dict.get("nsd-catalog", False):
266 aux_dict = descriptor_dict.get("nsd-catalog", {}).get("nsd", [{}])[0]
267 elif descriptor_dict.get("nsd:nsd-catalog"):
268 aux_dict = descriptor_dict.get("nsd:nsd-catalog", {}).get("nsd", [{}])[0]
269 elif descriptor_dict.get("nsd"):
270 aux_dict = descriptor_dict['nsd']
271 if aux_dict.get("nsd"):
272 aux_dict = descriptor_dict["nsd"]["nsd"][0]
273 else:
274 msg = f"Unexpected descriptor format {descriptor_dict}"
275 self._logger.error(msg)
276 raise ValueError(msg)
277 vnfs = []
278 if aux_dict.get("constituent-vnfd"):
279 for vnf in aux_dict.get("constituent-vnfd", ()):
280 vnfs.append(vnf.get("vnfd-id-ref"))
281 else:
282 vnfs = aux_dict.get('vnfd-id')
283 self._logger.debug("Used VNFS in the NSD: " + str(vnfs))
284 fields["vnfd-id-ref"] = vnfs
285 elif package_type == 'nst':
286 if descriptor_dict.get("nst-catalog", False):
287 aux_dict = descriptor_dict.get("nst-catalog", {}).get("nst", [{}])[0]
288 elif descriptor_dict.get("nst:nst-catalog"):
289 aux_dict = descriptor_dict.get("nst:nst-catalog", {}).get("nst", [{}])[0]
290 elif descriptor_dict.get("nst"):
291 aux_dict = descriptor_dict['nst']
292 if aux_dict.get("nst"):
293 aux_dict = descriptor_dict["nst"]["nst"][0]
294 nsds = []
295 for nsd in aux_dict.get("netslice-subnet", ()):
296 nsds.append(nsd.get("nsd-ref"))
297 self._logger.debug("Used NSDs in the NST: " + str(nsds))
298 if not nsds:
299 msg = f"Unexpected descriptor format {descriptor_dict}"
300 self._logger.error(msg)
301 raise ValueError(msg)
302 fields["nsd-id-ref"] = nsds
303 else:
304 msg = f"Unexpected descriptor format {descriptor_dict}"
305 self._logger.error(msg)
306 raise ValueError(msg)
307
308 fields["name"] = aux_dict.get("name")
309 fields["id"] = aux_dict.get("id")
310 fields["description"] = aux_dict.get("description")
311 fields["vendor"] = aux_dict.get("vendor")
312 fields["version"] = str(aux_dict.get("version", "1.0"))
313 fields["path"] = "{}{}/{}/{}-{}.tar.gz".format(
314 base_path,
315 fields["id"],
316 fields["version"],
317 fields.get("id"),
318 fields.get("version"),
319 )
320 return fields
321
322 def zip_extraction(self, file_name):
323 """
324 Validation of artifact.
325 :param file: file path
326 :return: status details, status, fields, package_type
327 """
328 self._logger.debug("Decompressing package file")
329 temp_file = "/tmp/{}".format(file_name.split("/")[-1])
330 if file_name != temp_file:
331 copyfile(file_name, temp_file)
332 with tarfile.open(temp_file, "r:gz") as tar:
333 folder = tar.getnames()[0].split("/")[0]
334 tar.extractall()
335
336 remove(temp_file)
337 descriptor_file = glob.glob("{}/*.y*ml".format(folder))[0]
338 return folder, descriptor_file
339
340 def validate_artifact(self, path, source):
341 """
342 Validation of artifact.
343 :param path: file path
344 :param source: flag to select the correct file type (directory or artifact)
345 :return: status details, status, fields, package_type
346 """
347 self._logger.debug(f"Validating {path} {source}")
348 package_type = ""
349 folder = ""
350 try:
351 if source == "directory":
352 descriptor_file = glob.glob("{}/*.y*ml".format(path))[0]
353 else:
354 folder, descriptor_file = self.zip_extraction(path)
355
356 self._logger.debug("Opening descriptor file: {}".format(descriptor_file))
357
358 with open(descriptor_file, "r") as f:
359 descriptor_data = f.read()
360 self._logger.debug(f"Descriptor data: {descriptor_data}")
361 validation = validation_im()
362 desc_type, descriptor_dict = validation.yaml_validation(descriptor_data)
363 try:
364 validation_im.pyangbind_validation(self, desc_type, descriptor_dict)
365 except Exception as e:
366 self._logger.error(e, exc_info=True)
367 raise e
368 descriptor_type_ref = list(descriptor_dict.keys())[0].lower()
369 if "vnf" in descriptor_type_ref:
370 package_type = "vnf"
371 elif "nst" in descriptor_type_ref:
372 package_type = "nst"
373 elif "ns" in descriptor_type_ref:
374 package_type = "ns"
375 else:
376 msg = f"Unknown package type {descriptor_type_ref}"
377 self._logger.error(msg)
378 raise ValueError(msg)
379 self._logger.debug("Descriptor: {}".format(descriptor_dict))
380 fields = self.fields_building(descriptor_dict, path, package_type)
381 self._logger.debug(f"Descriptor successfully validated {fields}")
382 return (
383 {
384 "detail": "{}D successfully validated".format(package_type.upper()),
385 "code": "OK",
386 },
387 True,
388 fields,
389 package_type,
390 )
391 except Exception as e:
392 # Delete the folder we just created
393 return {"detail": str(e)}, False, {}, package_type
394 finally:
395 if folder:
396 rmtree(folder, ignore_errors=True)
397
398 def register_artifact_in_repository(self, path, destination, source):
399 """
400 Registration of one artifact in a repository
401 param path:
402 param destination: path for index creation
403 param source:
404 """
405 self._logger.debug("")
406 pt = PackageTool()
407 compressed = False
408 try:
409 fields = {}
410 _, valid, fields, package_type = self.validate_artifact(path, source)
411 if not valid:
412 raise Exception(
413 "{} {} Not well configured.".format(package_type.upper(), str(path))
414 )
415 else:
416 if source == "directory":
417 path = pt.build(path)
418 self._logger.debug(f"Directory path {path}")
419 compressed = True
420 fields["checksum"] = self.md5(path)
421 self.indexation(destination, path, package_type, fields)
422
423 except Exception as e:
424 self._logger.exception(
425 "Error registering artifact in Repository: {}".format(e)
426 )
427 raise ClientException(e)
428
429 finally:
430 if source == "directory" and compressed:
431 remove(path)
432
433 def indexation(self, destination, path, package_type, fields):
434 """
435 Process for index packages
436 :param destination: index repository path
437 :param path: path of the package
438 :param package_type: package type (vnf, ns, nst)
439 :param fields: dict with the required values
440 """
441 self._logger.debug(f"Processing {destination} {path} {package_type} {fields}")
442
443 data_ind = {
444 "name": fields.get("name"),
445 "description": fields.get("description"),
446 "vendor": fields.get("vendor"),
447 "path": fields.get("path"),
448 }
449 self._logger.debug(data_ind)
450 final_path = join(
451 destination, package_type, fields.get("id"), fields.get("version")
452 )
453 if isdir(join(destination, package_type, fields.get("id"))):
454 if isdir(final_path):
455 self._logger.warning(
456 "{} {} already exists".format(package_type.upper(), str(path))
457 )
458 else:
459 mkdir(final_path)
460 copyfile(
461 path,
462 final_path
463 + "/"
464 + fields.get("id")
465 + "-"
466 + fields.get("version")
467 + ".tar.gz",
468 )
469 yaml.safe_dump(
470 fields,
471 open(final_path + "/" + "metadata.yaml", "w"),
472 default_flow_style=False,
473 width=80,
474 indent=4,
475 )
476 index = yaml.safe_load(open(destination + "/index.yaml"))
477
478 index["{}_packages".format(package_type)][fields.get("id")][
479 fields.get("version")
480 ] = data_ind
481 if versioning.parse(
482 index["{}_packages".format(package_type)][fields.get("id")][
483 "latest"
484 ]
485 ) < versioning.parse(fields.get("version")):
486 index["{}_packages".format(package_type)][fields.get("id")][
487 "latest"
488 ] = fields.get("version")
489 yaml.safe_dump(
490 index,
491 open(destination + "/index.yaml", "w"),
492 default_flow_style=False,
493 width=80,
494 indent=4,
495 )
496 self._logger.info(
497 "{} {} added in the repository".format(
498 package_type.upper(), str(path)
499 )
500 )
501 else:
502 mkdir(destination + "/{}/".format(package_type) + fields.get("id"))
503 mkdir(final_path)
504 copyfile(
505 path,
506 final_path
507 + "/"
508 + fields.get("id")
509 + "-"
510 + fields.get("version")
511 + ".tar.gz",
512 )
513 yaml.safe_dump(
514 fields,
515 open(join(final_path, "metadata.yaml"), "w"),
516 default_flow_style=False,
517 width=80,
518 indent=4,
519 )
520 index = yaml.safe_load(open(destination + "/index.yaml"))
521
522 index["{}_packages".format(package_type)][fields.get("id")] = {
523 fields.get("version"): data_ind
524 }
525 index["{}_packages".format(package_type)][fields.get("id")][
526 "latest"
527 ] = fields.get("version")
528 yaml.safe_dump(
529 index,
530 open(join(destination, "index.yaml"), "w"),
531 default_flow_style=False,
532 width=80,
533 indent=4,
534 )
535 self._logger.info(
536 "{} {} added in the repository".format(package_type.upper(), str(path))
537 )
538
539 def current_datetime(self):
540 """
541 Datetime Generator
542 :return: Datetime as string with the following structure "2020-04-29T08:41:07.681653Z"
543 """
544 self._logger.debug("")
545 return time.strftime("%Y-%m-%dT%H:%M:%S.%sZ")
546
547 def init_directory(self, destination):
548 """
549 Initialize the index directory. Creation of index.yaml, and the directories for vnf and ns
550 :param destination:
551 :return:
552 """
553 self._logger.debug("")
554 if not isdir(destination):
555 mkdir(destination)
556 if not isfile(join(destination, "index.yaml")):
557 mkdir(join(destination, "vnf"))
558 mkdir(join(destination, "ns"))
559 mkdir(join(destination, "nst"))
560 index_data = {
561 "apiVersion": "v1",
562 "generated": self.current_datetime(),
563 "vnf_packages": {},
564 "ns_packages": {},
565 "nst_packages": {},
566 }
567 with open(join(destination, "index.yaml"), "w") as outfile:
568 yaml.safe_dump(
569 index_data, outfile, default_flow_style=False, width=80, indent=4
570 )