Fix bug 1494: raise exception if repo-index fails
[osm/osmclient.git] / osmclient / sol005 / osmrepo.py
1 #
2 # Licensed under the Apache License, Version 2.0 (the "License"); you may
3 # not use this file except in compliance with the License. You may obtain
4 # a copy of the License at
5 #
6 # http://www.apache.org/licenses/LICENSE-2.0
7 #
8 # Unless required by applicable law or agreed to in writing, software
9 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
10 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
11 # License for the specific language governing permissions and limitations
12 # under the License.
13 #
14
15 """
16 OSM Repo API handling
17 """
18 import glob
19 import hashlib
20 import logging
21 from os import listdir, mkdir, getcwd, remove
22 from os.path import isfile, isdir, join, abspath
23 from shutil import copyfile, rmtree
24 import tarfile
25 import tempfile
26 import time
27
28 from osm_im.validation import Validation as validation_im
29 from osmclient.common.exceptions import ClientException
30 from osmclient.common.package_tool import PackageTool
31 from osmclient.sol005.repo import Repo
32 from packaging import version as versioning
33 import requests
34 import yaml
35
36
37 class OSMRepo(Repo):
38 def __init__(self, http=None, client=None):
39 self._http = http
40 self._client = client
41 self._apiName = "/admin"
42 self._apiVersion = "/v1"
43 self._apiResource = "/osmrepos"
44 self._logger = logging.getLogger("osmclient")
45 self._apiBase = "{}{}{}".format(
46 self._apiName, self._apiVersion, self._apiResource
47 )
48
49 def pkg_list(self, pkgtype, filter=None, repo=None):
50 """
51 Returns a repo based on name or id
52 """
53 self._logger.debug("")
54 self._client.get_token()
55 # Get OSM registered repository list
56 repositories = self.list()
57 if repo:
58 repositories = [r for r in repositories if r["name"] == repo]
59 if not repositories:
60 raise ClientException("Not repository found")
61
62 vnf_repos = []
63 for repository in repositories:
64 try:
65 r = requests.get("{}/index.yaml".format(repository.get("url")))
66
67 if r.status_code == 200:
68 repo_list = yaml.safe_load(r.text)
69 vnf_packages = repo_list.get("{}_packages".format(pkgtype))
70 for repo in vnf_packages:
71 versions = vnf_packages.get(repo)
72 latest = versions.get("latest")
73 del versions["latest"]
74 for version in versions:
75 latest_version = False
76 if version == latest:
77 latest_version = True
78 vnf_repos.append(
79 {
80 "vendor": versions[version].get("vendor"),
81 "name": versions[version].get("name"),
82 "version": version,
83 "description": versions[version].get("description"),
84 "location": versions[version].get("path"),
85 "repository": repository.get("name"),
86 "repourl": repository.get("url"),
87 "latest": latest_version,
88 }
89 )
90 else:
91 raise Exception(
92 "repository in url {} unreachable".format(repository.get("url"))
93 )
94 except Exception as e:
95 logging.error(
96 "Error cannot read from repository {} '{}': {}".format(
97 repository["name"], repository["url"], e
98 )
99 )
100 continue
101
102 vnf_repos_filtered = []
103 if filter:
104 for vnf_repo in vnf_repos:
105 for k, v in vnf_repo.items():
106 if v:
107 kf, vf = filter.split("=")
108 if k == kf and vf in v:
109 vnf_repos_filtered.append(vnf_repo)
110 break
111 vnf_repos = vnf_repos_filtered
112 return vnf_repos
113
114 def get_pkg(self, pkgtype, name, repo, filter, version):
115 """
116 Returns the filename of the PKG downloaded to disk
117 """
118 self._logger.debug("")
119 self._client.get_token()
120 f = None
121 f_name = None
122 # Get OSM registered repository list
123 pkgs = self.pkg_list(pkgtype, filter, repo)
124 for pkg in pkgs:
125 if pkg.get("repository") == repo and pkg.get("name") == name:
126 if "latest" in version:
127 if not pkg.get("latest"):
128 continue
129 else:
130 version = pkg.get("version")
131 if pkg.get("version") == version:
132 r = requests.get(
133 "{}{}".format(pkg.get("repourl"), pkg.get("location")),
134 stream=True,
135 )
136 if r.status_code != 200:
137 raise ClientException("Package not found")
138
139 with tempfile.NamedTemporaryFile(delete=False) as f:
140 f.write(r.raw.read())
141 f_name = f.name
142 if not f_name:
143 raise ClientException(
144 "{} {} not found at repo {}".format(pkgtype, name, repo)
145 )
146 return f_name
147
148 def pkg_get(self, pkgtype, name, repo, version, filter):
149
150 pkg_name = self.get_pkg(pkgtype, name, repo, filter, version)
151 if not pkg_name:
152 raise ClientException("Package not found")
153 folder, descriptor = self.zip_extraction(pkg_name)
154 with open(descriptor) as pkg:
155 pkg_descriptor = yaml.safe_load(pkg)
156 rmtree(folder, ignore_errors=False)
157 if (
158 pkgtype == "vnf"
159 and (pkg_descriptor.get("vnfd") or pkg_descriptor.get("vnfd:vnfd_catalog"))
160 ) or (
161 pkgtype == "ns"
162 and (pkg_descriptor.get("nsd") or pkg_descriptor.get("nsd:nsd_catalog"))
163 ):
164 raise ClientException("Wrong Package type")
165 return pkg_descriptor
166
167 def repo_index(self, origin=".", destination="."):
168 """
169 Repo Index main function
170 :param origin: origin directory for getting all the artifacts
171 :param destination: destination folder for create and index the valid artifacts
172 """
173 self._logger.debug("")
174 if destination == ".":
175 if origin == destination:
176 destination = "repository"
177
178 destination = abspath(destination)
179 origin = abspath(origin)
180
181 if origin[0] != "/":
182 origin = join(getcwd(), origin)
183 if destination[0] != "/":
184 destination = join(getcwd(), destination)
185
186 self.init_directory(destination)
187 artifacts = [f for f in listdir(origin) if isfile(join(origin, f))]
188 directories = [f for f in listdir(origin) if isdir(join(origin, f))]
189 for artifact in artifacts:
190 self.register_artifact_in_repository(
191 join(origin, artifact), destination, source="file"
192 )
193 for artifact in directories:
194 self.register_artifact_in_repository(
195 join(origin, artifact), destination, source="directory"
196 )
197 print("\nFinal Results: ")
198 print(
199 "VNF Packages Indexed: "
200 + str(len(glob.glob(destination + "/vnf/*/*/metadata.yaml")))
201 )
202 print(
203 "NS Packages Indexed: "
204 + str(len(glob.glob(destination + "/ns/*/*/metadata.yaml")))
205 )
206
207 def md5(self, fname):
208 """
209 Checksum generator
210 :param fname: file path
211 :return: checksum string
212 """
213 self._logger.debug("")
214 hash_md5 = hashlib.md5()
215 with open(fname, "rb") as f:
216 for chunk in iter(lambda: f.read(4096), b""):
217 hash_md5.update(chunk)
218 return hash_md5.hexdigest()
219
220 def fields_building(self, descriptor_dict, file, package_type):
221 """
222 From an artifact descriptor, obtain the fields required for indexing
223 :param descriptor_dict: artifact description
224 :param file: artifact package
225 :param package_type: type of artifact (vnf or ns)
226 :return: fields
227 """
228 self._logger.debug("")
229 fields = {}
230 base_path = "/{}/".format(package_type)
231 aux_dict = {}
232 if package_type == "vnf":
233 if descriptor_dict.get("vnfd-catalog", False):
234 aux_dict = descriptor_dict.get("vnfd-catalog", {}).get("vnfd", [{}])[0]
235 else:
236 aux_dict = descriptor_dict.get("vnfd:vnfd-catalog", {}).get(
237 "vnfd", [{}]
238 )[0]
239
240 images = []
241 for vdu in aux_dict.get("vdu", ()):
242 images.append(vdu.get("image"))
243 fields["images"] = images
244 if package_type == "ns":
245 if descriptor_dict.get("nsd-catalog", False):
246 aux_dict = descriptor_dict.get("nsd-catalog", {}).get("nsd", [{}])[0]
247 else:
248 aux_dict = descriptor_dict.get("nsd:nsd-catalog", {}).get("nsd", [{}])[
249 0
250 ]
251
252 vnfs = []
253
254 for vnf in aux_dict.get("constituent-vnfd", ()):
255 vnfs.append(vnf.get("vnfd-id-ref"))
256 self._logger.debug("Used VNFS in the NSD: " + str(vnfs))
257 fields["vnfd-id-ref"] = vnfs
258
259 fields["name"] = aux_dict.get("name")
260 fields["id"] = aux_dict.get("id")
261 fields["description"] = aux_dict.get("description")
262 fields["vendor"] = aux_dict.get("vendor")
263 fields["version"] = aux_dict.get("version", "1.0")
264 fields["path"] = "{}{}/{}/{}-{}.tar.gz".format(
265 base_path,
266 fields["id"],
267 fields["version"],
268 fields.get("id"),
269 fields.get("version"),
270 )
271 return fields
272
273 def zip_extraction(self, file_name):
274 """
275 Validation of artifact.
276 :param file: file path
277 :return: status details, status, fields, package_type
278 """
279 self._logger.debug("Decompressing package file")
280 temp_file = "/tmp/{}".format(file_name.split("/")[-1])
281 if file_name != temp_file:
282 copyfile(file_name, temp_file)
283 with tarfile.open(temp_file, "r:gz") as tar:
284 folder = tar.getnames()[0].split("/")[0]
285 tar.extractall()
286
287 remove(temp_file)
288 descriptor_file = glob.glob("{}/*.y*ml".format(folder))[0]
289 return folder, descriptor_file
290
291 def validate_artifact(self, path, source):
292 """
293 Validation of artifact.
294 :param path: file path
295 :return: status details, status, fields, package_type
296 """
297 self._logger.debug("")
298 package_type = ""
299 folder = ""
300 try:
301 if source == "directory":
302 descriptor_file = glob.glob("{}/*.y*ml".format(path))[0]
303 else:
304 folder, descriptor_file = self.zip_extraction(path)
305
306 self._logger.debug("Opening descriptor file: {}".format(descriptor_file))
307
308 with open(descriptor_file, "r") as f:
309 descriptor_data = f.read()
310 validation = validation_im()
311 desc_type, descriptor_dict = validation.yaml_validation(descriptor_data)
312 validation_im.pyangbind_validation(self, desc_type, descriptor_dict)
313 if "vnf" in list(descriptor_dict.keys())[0]:
314 package_type = "vnf"
315 else:
316 # raise ClientException("Not VNF package")
317 package_type = "ns"
318
319 self._logger.debug("Descriptor: {}".format(descriptor_dict))
320 fields = self.fields_building(descriptor_dict, path, package_type)
321 self._logger.debug("Descriptor sucessfully validated")
322 return (
323 {
324 "detail": "{}D successfully validated".format(package_type.upper()),
325 "code": "OK",
326 },
327 True,
328 fields,
329 package_type,
330 )
331 except Exception as e:
332 # Delete the folder we just created
333 return {"detail": str(e)}, False, {}, package_type
334 finally:
335 if folder:
336 rmtree(folder, ignore_errors=True)
337
338 def register_artifact_in_repository(self, path, destination, source):
339 """
340 Registration of one artifact in a repository
341 file: VNF or NS
342 destination: path for index creation
343 """
344 self._logger.debug("")
345 pt = PackageTool()
346 compresed = False
347 try:
348 fields = {}
349 _, valid, fields, package_type = self.validate_artifact(path, source)
350 if not valid:
351 raise Exception(
352 "{} {} Not well configured.".format(package_type.upper(), str(path))
353 )
354 else:
355 if source == "directory":
356 path = pt.build(path)
357 compresed = True
358 fields["checksum"] = self.md5(path)
359 self.indexation(destination, path, package_type, fields)
360
361 except Exception as e:
362 self._logger.exception(
363 "Error registering artifact in Repository: {}".format(e)
364 )
365 raise ClientException(e)
366
367 finally:
368 if source == "directory" and compresed:
369 remove(path)
370
371 def indexation(self, destination, path, package_type, fields):
372 """
373 Process for index packages
374 :param destination: index repository path
375 :param path: path of the package
376 :param package_type: package type (vnf, ns)
377 :param fields: dict with the required values
378 """
379 self._logger.debug("")
380 data_ind = {
381 "name": fields.get("name"),
382 "description": fields.get("description"),
383 "vendor": fields.get("vendor"),
384 "path": fields.get("path"),
385 }
386
387 final_path = join(
388 destination, package_type, fields.get("id"), fields.get("version")
389 )
390 if isdir(join(destination, package_type, fields.get("id"))):
391 if isdir(final_path):
392 self._logger.warning(
393 "{} {} already exists".format(package_type.upper(), str(path))
394 )
395 else:
396 mkdir(final_path)
397 copyfile(
398 path,
399 final_path
400 + "/"
401 + fields.get("id")
402 + "-"
403 + fields.get("version")
404 + ".tar.gz",
405 )
406 yaml.safe_dump(
407 fields,
408 open(final_path + "/" + "metadata.yaml", "w"),
409 default_flow_style=False,
410 width=80,
411 indent=4,
412 )
413 index = yaml.safe_load(open(destination + "/index.yaml"))
414
415 index["{}_packages".format(package_type)][fields.get("id")][
416 fields.get("version")
417 ] = data_ind
418 if versioning.parse(
419 index["{}_packages".format(package_type)][fields.get("id")][
420 "latest"
421 ]
422 ) < versioning.parse(fields.get("version")):
423 index["{}_packages".format(package_type)][fields.get("id")][
424 "latest"
425 ] = fields.get("version")
426 yaml.safe_dump(
427 index,
428 open(destination + "/index.yaml", "w"),
429 default_flow_style=False,
430 width=80,
431 indent=4,
432 )
433 self._logger.info(
434 "{} {} added in the repository".format(
435 package_type.upper(), str(path)
436 )
437 )
438 else:
439 mkdir(destination + "/{}/".format(package_type) + fields.get("id"))
440 mkdir(final_path)
441 copyfile(
442 path,
443 final_path
444 + "/"
445 + fields.get("id")
446 + "-"
447 + fields.get("version")
448 + ".tar.gz",
449 )
450 yaml.safe_dump(
451 fields,
452 open(join(final_path, "metadata.yaml"), "w"),
453 default_flow_style=False,
454 width=80,
455 indent=4,
456 )
457 index = yaml.safe_load(open(destination + "/index.yaml"))
458
459 index["{}_packages".format(package_type)][fields.get("id")] = {
460 fields.get("version"): data_ind
461 }
462 index["{}_packages".format(package_type)][fields.get("id")][
463 "latest"
464 ] = fields.get("version")
465 yaml.safe_dump(
466 index,
467 open(join(destination, "index.yaml"), "w"),
468 default_flow_style=False,
469 width=80,
470 indent=4,
471 )
472 self._logger.info(
473 "{} {} added in the repository".format(package_type.upper(), str(path))
474 )
475
476 def current_datatime(self):
477 """
478 Datetime Generator
479 :return: Datetime as string with the following structure "2020-04-29T08:41:07.681653Z"
480 """
481 self._logger.debug("")
482 return time.strftime("%Y-%m-%dT%H:%M:%S.%sZ")
483
484 def init_directory(self, destination):
485 """
486 Initialize the index directory. Creation of index.yaml, and the directories for vnf and ns
487 :param destination:
488 :return:
489 """
490 self._logger.debug("")
491 if not isdir(destination):
492 mkdir(destination)
493 if not isfile(join(destination, "index.yaml")):
494 mkdir(join(destination, "vnf"))
495 mkdir(join(destination, "ns"))
496 index_data = {
497 "apiVersion": "v1",
498 "generated": self.current_datatime(),
499 "vnf_packages": {},
500 "ns_packages": {},
501 }
502 with open(join(destination, "index.yaml"), "w") as outfile:
503 yaml.safe_dump(
504 index_data, outfile, default_flow_style=False, width=80, indent=4
505 )