6ccc0613b3d11ad50cb501248f812c2b28012896
[osm/osmclient.git] / osmclient / sol005 / osmrepo.py
1 #
2 # Licensed under the Apache License, Version 2.0 (the "License"); you may
3 # not use this file except in compliance with the License. You may obtain
4 # a copy of the License at
5 #
6 # http://www.apache.org/licenses/LICENSE-2.0
7 #
8 # Unless required by applicable law or agreed to in writing, software
9 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
10 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
11 # License for the specific language governing permissions and limitations
12 # under the License.
13 #
14
15 """
16 OSM Repo API handling
17 """
18 import glob
19 import hashlib
20 import logging
21 from os import listdir, mkdir, getcwd, remove
22 from os.path import isfile, isdir, join, abspath
23 from shutil import copyfile, rmtree
24 import tarfile
25 import tempfile
26 import time
27
28 from osm_im.validation import Validation as validation_im
29 from osmclient.common.exceptions import ClientException
30 from osmclient.common.package_tool import PackageTool
31 from osmclient.sol005.repo import Repo
32 from packaging import version as versioning
33 import requests
34 import yaml
35
36
37 class OSMRepo(Repo):
38 def __init__(self, http=None, client=None):
39 self._http = http
40 self._client = client
41 self._apiName = "/admin"
42 self._apiVersion = "/v1"
43 self._apiResource = "/osmrepos"
44 self._logger = logging.getLogger("osmclient")
45 self._apiBase = "{}{}{}".format(
46 self._apiName, self._apiVersion, self._apiResource
47 )
48
49 def pkg_list(self, pkgtype, filter=None, repo=None):
50 """
51 Returns a repo based on name or id
52 """
53 self._logger.debug("")
54 self._client.get_token()
55 # Get OSM registered repository list
56 repositories = self.list()
57 if repo:
58 repositories = [r for r in repositories if r["name"] == repo]
59 if not repositories:
60 raise ClientException("Not repository found")
61
62 vnf_repos = []
63 for repository in repositories:
64 try:
65 r = requests.get("{}/index.yaml".format(repository.get("url")))
66
67 if r.status_code == 200:
68 repo_list = yaml.safe_load(r.text)
69 vnf_packages = repo_list.get("{}_packages".format(pkgtype))
70 for repo in vnf_packages:
71 versions = vnf_packages.get(repo)
72 latest = versions.get("latest")
73 del versions["latest"]
74 for version in versions:
75 latest_version = False
76 if version == latest:
77 latest_version = True
78 vnf_repos.append(
79 {
80 "vendor": versions[version].get("vendor"),
81 "name": versions[version].get("name"),
82 "version": version,
83 "description": versions[version].get("description"),
84 "location": versions[version].get("path"),
85 "repository": repository.get("name"),
86 "repourl": repository.get("url"),
87 "latest": latest_version,
88 }
89 )
90 else:
91 raise Exception(
92 "repository in url {} unreachable".format(repository.get("url"))
93 )
94 except Exception as e:
95 logging.error(
96 "Error cannot read from repository {} '{}': {}".format(
97 repository["name"], repository["url"], e
98 )
99 )
100 continue
101
102 vnf_repos_filtered = []
103 if filter:
104 for vnf_repo in vnf_repos:
105 for k, v in vnf_repo.items():
106 if v:
107 kf, vf = filter.split("=")
108 if k == kf and vf in v:
109 vnf_repos_filtered.append(vnf_repo)
110 break
111 vnf_repos = vnf_repos_filtered
112 return vnf_repos
113
114 def get_pkg(self, pkgtype, name, repo, filter, version):
115 """
116 Returns the filename of the PKG downloaded to disk
117 """
118 self._logger.debug("")
119 self._client.get_token()
120 f = None
121 f_name = None
122 # Get OSM registered repository list
123 pkgs = self.pkg_list(pkgtype, filter, repo)
124 for pkg in pkgs:
125 if pkg.get("repository") == repo and pkg.get("name") == name:
126 if "latest" in version:
127 if not pkg.get("latest"):
128 continue
129 else:
130 version = pkg.get("version")
131 if pkg.get("version") == version:
132 r = requests.get(
133 "{}{}".format(pkg.get("repourl"), pkg.get("location")),
134 stream=True,
135 )
136 if r.status_code != 200:
137 raise ClientException("Package not found")
138
139 with tempfile.NamedTemporaryFile(delete=False) as f:
140 f.write(r.raw.read())
141 f_name = f.name
142 if not f_name:
143 raise ClientException(
144 "{} {} not found at repo {}".format(pkgtype, name, repo)
145 )
146 return f_name
147
148 def pkg_get(self, pkgtype, name, repo, version, filter):
149
150 pkg_name = self.get_pkg(pkgtype, name, repo, filter, version)
151 if not pkg_name:
152 raise ClientException("Package not found")
153 folder, descriptor = self.zip_extraction(pkg_name)
154 with open(descriptor) as pkg:
155 pkg_descriptor = yaml.safe_load(pkg)
156 rmtree(folder, ignore_errors=False)
157 if (
158 pkgtype == "vnf"
159 and (pkg_descriptor.get("vnfd") or pkg_descriptor.get("vnfd:vnfd_catalog"))
160 ) or (
161 pkgtype == "ns"
162 and (pkg_descriptor.get("nsd") or pkg_descriptor.get("nsd:nsd_catalog"))
163 ):
164 raise ClientException("Wrong Package type")
165 return pkg_descriptor
166
167 def repo_index(self, origin=".", destination="."):
168 """
169 Repo Index main function
170 :param origin: origin directory for getting all the artifacts
171 :param destination: destination folder for create and index the valid artifacts
172 """
173 self._logger.debug("")
174 if destination == ".":
175 if origin == destination:
176 destination = "repository"
177
178 destination = abspath(destination)
179 origin = abspath(origin)
180
181 if origin[0] != "/":
182 origin = join(getcwd(), origin)
183 if destination[0] != "/":
184 destination = join(getcwd(), destination)
185
186 self.init_directory(destination)
187 artifacts = [f for f in listdir(origin) if isfile(join(origin, f))]
188 directories = [f for f in listdir(origin) if isdir(join(origin, f))]
189 for artifact in artifacts:
190 self.register_artifact_in_repository(
191 join(origin, artifact), destination, source="file"
192 )
193 for artifact in directories:
194 self.register_artifact_in_repository(
195 join(origin, artifact), destination, source="directory"
196 )
197 print("\nFinal Results: ")
198 print(
199 "VNF Packages Indexed: "
200 + str(len(glob.glob(destination + "/vnf/*/*/metadata.yaml")))
201 )
202 print(
203 "NS Packages Indexed: "
204 + str(len(glob.glob(destination + "/ns/*/*/metadata.yaml")))
205 )
206
207 def md5(self, fname):
208 """
209 Checksum generator
210 :param fname: file path
211 :return: checksum string
212 """
213 self._logger.debug("")
214 hash_md5 = hashlib.md5()
215 with open(fname, "rb") as f:
216 for chunk in iter(lambda: f.read(4096), b""):
217 hash_md5.update(chunk)
218 return hash_md5.hexdigest()
219
220 def fields_building(self, descriptor_dict, file, package_type):
221 """
222 From an artifact descriptor, obtain the fields required for indexing
223 :param descriptor_dict: artifact description
224 :param file: artifact package
225 :param package_type: type of artifact (vnf or ns)
226 :return: fields
227 """
228 self._logger.debug("")
229 fields = {}
230 base_path = "/{}/".format(package_type)
231 aux_dict = {}
232 if package_type == "vnf":
233 if descriptor_dict.get("vnfd-catalog", False):
234 aux_dict = descriptor_dict.get("vnfd-catalog", {}).get("vnfd", [{}])[0]
235 else:
236 aux_dict = descriptor_dict.get("vnfd:vnfd-catalog", {}).get(
237 "vnfd", [{}]
238 )[0]
239
240 images = []
241 for vdu in aux_dict.get("vdu", ()):
242 images.append(vdu.get("image"))
243 fields["images"] = images
244 if package_type == "ns":
245 if descriptor_dict.get("nsd-catalog", False):
246 aux_dict = descriptor_dict.get("nsd-catalog", {}).get("nsd", [{}])[0]
247 else:
248 aux_dict = descriptor_dict.get("nsd:nsd-catalog", {}).get("nsd", [{}])[
249 0
250 ]
251
252 vnfs = []
253
254 for vnf in aux_dict.get("constituent-vnfd", ()):
255 vnfs.append(vnf.get("vnfd-id-ref"))
256 self._logger.debug("Used VNFS in the NSD: " + str(vnfs))
257 fields["vnfd-id-ref"] = vnfs
258
259 fields["name"] = aux_dict.get("name")
260 fields["id"] = aux_dict.get("id")
261 fields["description"] = aux_dict.get("description")
262 fields["vendor"] = aux_dict.get("vendor")
263 fields["version"] = aux_dict.get("version", "1.0")
264 fields["path"] = "{}{}/{}/{}-{}.tar.gz".format(
265 base_path,
266 fields["id"],
267 fields["version"],
268 fields.get("id"),
269 fields.get("version"),
270 )
271 return fields
272
273 def zip_extraction(self, file_name):
274 """
275 Validation of artifact.
276 :param file: file path
277 :return: status details, status, fields, package_type
278 """
279 self._logger.debug("Decompressing package file")
280 temp_file = "/tmp/{}".format(file_name.split("/")[-1])
281 if file_name != temp_file:
282 copyfile(file_name, temp_file)
283 with tarfile.open(temp_file, "r:gz") as tar:
284 folder = tar.getnames()[0].split("/")[0]
285 tar.extractall()
286
287 remove(temp_file)
288 descriptor_file = glob.glob("{}/*.y*ml".format(folder))[0]
289 return folder, descriptor_file
290
291 def validate_artifact(self, path, source):
292 """
293 Validation of artifact.
294 :param path: file path
295 :return: status details, status, fields, package_type
296 """
297 self._logger.debug("")
298 package_type = ""
299 folder = ""
300 try:
301 if source == "directory":
302 descriptor_file = glob.glob("{}/*.y*ml".format(path))[0]
303 else:
304 folder, descriptor_file = self.zip_extraction(path)
305
306 self._logger.debug("Opening descriptor file: {}".format(descriptor_file))
307
308 with open(descriptor_file, "r") as f:
309 descriptor_data = f.read()
310 validation = validation_im()
311 desc_type, descriptor_dict = validation.yaml_validation(descriptor_data)
312 validation_im.pyangbind_validation(self, desc_type, descriptor_dict)
313 if "vnf" in list(descriptor_dict.keys())[0]:
314 package_type = "vnf"
315 else:
316 # raise ClientException("Not VNF package")
317 package_type = "ns"
318
319 self._logger.debug("Descriptor: {}".format(descriptor_dict))
320 fields = self.fields_building(descriptor_dict, path, package_type)
321 self._logger.debug("Descriptor sucessfully validated")
322 return (
323 {
324 "detail": "{}D successfully validated".format(package_type.upper()),
325 "code": "OK",
326 },
327 True,
328 fields,
329 package_type,
330 )
331 except Exception as e:
332 # Delete the folder we just created
333 return {"detail": str(e)}, False, {}, package_type
334 finally:
335 if folder:
336 rmtree(folder, ignore_errors=True)
337
338 def register_artifact_in_repository(self, path, destination, source):
339 """
340 Registration of one artifact in a repository
341 file: VNF or NS
342 destination: path for index creation
343 """
344 self._logger.debug("")
345 pt = PackageTool()
346 compresed = False
347 try:
348 fields = {}
349 _, valid, fields, package_type = self.validate_artifact(path, source)
350 if not valid:
351 raise Exception(
352 "{} {} Not well configured.".format(package_type.upper(), str(path))
353 )
354 else:
355 if source == "directory":
356 path = pt.build(path)
357 compresed = True
358 fields["checksum"] = self.md5(path)
359 self.indexation(destination, path, package_type, fields)
360
361 except Exception as e:
362 self._logger.exception(
363 "Error registering artifact in Repository: {}".format(e)
364 )
365
366 finally:
367 if source == "directory" and compresed:
368 remove(path)
369
370 def indexation(self, destination, path, package_type, fields):
371 """
372 Process for index packages
373 :param destination: index repository path
374 :param path: path of the package
375 :param package_type: package type (vnf, ns)
376 :param fields: dict with the required values
377 """
378 self._logger.debug("")
379 data_ind = {
380 "name": fields.get("name"),
381 "description": fields.get("description"),
382 "vendor": fields.get("vendor"),
383 "path": fields.get("path"),
384 }
385
386 final_path = join(
387 destination, package_type, fields.get("id"), fields.get("version")
388 )
389 if isdir(join(destination, package_type, fields.get("id"))):
390 if isdir(final_path):
391 self._logger.warning(
392 "{} {} already exists".format(package_type.upper(), str(path))
393 )
394 else:
395 mkdir(final_path)
396 copyfile(
397 path,
398 final_path
399 + "/"
400 + fields.get("id")
401 + "-"
402 + fields.get("version")
403 + ".tar.gz",
404 )
405 yaml.safe_dump(
406 fields,
407 open(final_path + "/" + "metadata.yaml", "w"),
408 default_flow_style=False,
409 width=80,
410 indent=4,
411 )
412 index = yaml.safe_load(open(destination + "/index.yaml"))
413
414 index["{}_packages".format(package_type)][fields.get("id")][
415 fields.get("version")
416 ] = data_ind
417 if versioning.parse(
418 index["{}_packages".format(package_type)][fields.get("id")][
419 "latest"
420 ]
421 ) < versioning.parse(fields.get("version")):
422 index["{}_packages".format(package_type)][fields.get("id")][
423 "latest"
424 ] = fields.get("version")
425 yaml.safe_dump(
426 index,
427 open(destination + "/index.yaml", "w"),
428 default_flow_style=False,
429 width=80,
430 indent=4,
431 )
432 self._logger.info(
433 "{} {} added in the repository".format(
434 package_type.upper(), str(path)
435 )
436 )
437 else:
438 mkdir(destination + "/{}/".format(package_type) + fields.get("id"))
439 mkdir(final_path)
440 copyfile(
441 path,
442 final_path
443 + "/"
444 + fields.get("id")
445 + "-"
446 + fields.get("version")
447 + ".tar.gz",
448 )
449 yaml.safe_dump(
450 fields,
451 open(join(final_path, "metadata.yaml"), "w"),
452 default_flow_style=False,
453 width=80,
454 indent=4,
455 )
456 index = yaml.safe_load(open(destination + "/index.yaml"))
457
458 index["{}_packages".format(package_type)][fields.get("id")] = {
459 fields.get("version"): data_ind
460 }
461 index["{}_packages".format(package_type)][fields.get("id")][
462 "latest"
463 ] = fields.get("version")
464 yaml.safe_dump(
465 index,
466 open(join(destination, "index.yaml"), "w"),
467 default_flow_style=False,
468 width=80,
469 indent=4,
470 )
471 self._logger.info(
472 "{} {} added in the repository".format(package_type.upper(), str(path))
473 )
474
475 def current_datatime(self):
476 """
477 Datetime Generator
478 :return: Datetime as string with the following structure "2020-04-29T08:41:07.681653Z"
479 """
480 self._logger.debug("")
481 return time.strftime("%Y-%m-%dT%H:%M:%S.%sZ")
482
483 def init_directory(self, destination):
484 """
485 Initialize the index directory. Creation of index.yaml, and the directories for vnf and ns
486 :param destination:
487 :return:
488 """
489 self._logger.debug("")
490 if not isdir(destination):
491 mkdir(destination)
492 if not isfile(join(destination, "index.yaml")):
493 mkdir(join(destination, "vnf"))
494 mkdir(join(destination, "ns"))
495 index_data = {
496 "apiVersion": "v1",
497 "generated": self.current_datatime(),
498 "vnf_packages": {},
499 "ns_packages": {},
500 }
501 with open(join(destination, "index.yaml"), "w") as outfile:
502 yaml.safe_dump(
503 index_data, outfile, default_flow_style=False, width=80, indent=4
504 )