Fixes bug 1657 repo generation from osm-packages
[osm/osmclient.git] / osmclient / sol005 / osmrepo.py
1 #
2 # Licensed under the Apache License, Version 2.0 (the "License"); you may
3 # not use this file except in compliance with the License. You may obtain
4 # a copy of the License at
5 #
6 # http://www.apache.org/licenses/LICENSE-2.0
7 #
8 # Unless required by applicable law or agreed to in writing, software
9 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
10 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
11 # License for the specific language governing permissions and limitations
12 # under the License.
13 #
14
15 """
16 OSM Repo API handling
17 """
18 import glob
19 import hashlib
20 import logging
21 from os import listdir, mkdir, getcwd, remove
22 from os.path import isfile, isdir, join, abspath
23 from shutil import copyfile, rmtree
24 import tarfile
25 import tempfile
26 import time
27
28 from osm_im.validation import Validation as validation_im
29 from osmclient.common.exceptions import ClientException
30 from osmclient.common.package_tool import PackageTool
31 from osmclient.sol005.repo import Repo
32 from packaging import version as versioning
33 import requests
34 import yaml
35
36
37 class OSMRepo(Repo):
38 def __init__(self, http=None, client=None):
39 self._http = http
40 self._client = client
41 self._apiName = '/admin'
42 self._apiVersion = '/v1'
43 self._apiResource = '/osmrepos'
44 self._logger = logging.getLogger('osmclient')
45 self._apiBase = '{}{}{}'.format(self._apiName,
46 self._apiVersion, self._apiResource)
47
48 def pkg_list(self, pkgtype, filter=None, repo=None):
49 """
50 Returns a repo based on name or id
51 """
52 self._logger.debug("")
53 self._client.get_token()
54 # Get OSM registered repository list
55 repositories = self.list()
56 if repo:
57 repositories = [r for r in repositories if r["name"] == repo]
58 if not repositories:
59 raise ClientException('Not repository found')
60
61 vnf_repos = []
62 for repository in repositories:
63 try:
64 r = requests.get('{}/index.yaml'.format(repository.get('url')))
65
66 if r.status_code == 200:
67 repo_list = yaml.safe_load(r.text)
68 vnf_packages = repo_list.get('{}_packages'.format(pkgtype))
69 for repo in vnf_packages:
70 versions = vnf_packages.get(repo)
71 latest = versions.get('latest')
72 del versions['latest']
73 for version in versions:
74 latest_version = False
75 if version == latest:
76 latest_version = True
77 vnf_repos.append({'vendor': versions[version].get("vendor"),
78 'name': versions[version].get("name"),
79 'version': version,
80 'description': versions[version].get("description"),
81 'location': versions[version].get("path"),
82 'repository': repository.get('name'),
83 'repourl': repository.get('url'),
84 'latest': latest_version
85 })
86 else:
87 raise Exception('repository in url {} unreachable'.format(repository.get('url')))
88 except Exception as e:
89 self._logger.error(
90 "Error cannot read from repository {} '{}': {}".format(
91 repository["name"], repository["url"], e
92 ),
93 exc_info=True
94 )
95 continue
96
97 vnf_repos_filtered = []
98 if filter:
99 for vnf_repo in vnf_repos:
100 for k, v in vnf_repo.items():
101 if v:
102 kf, vf = filter.split('=')
103 if k == kf and vf in v:
104 vnf_repos_filtered.append(vnf_repo)
105 break
106 vnf_repos = vnf_repos_filtered
107 return vnf_repos
108
109 def get_pkg(self, pkgtype, name, repo, filter, version):
110 """
111 Returns the filename of the PKG downloaded to disk
112 """
113 self._logger.debug("")
114 self._client.get_token()
115 f = None
116 f_name = None
117 # Get OSM registered repository list
118 pkgs = self.pkg_list(pkgtype, filter, repo)
119 for pkg in pkgs:
120 if pkg.get('repository') == repo and pkg.get('name') == name:
121 if 'latest' in version:
122 if not pkg.get('latest'):
123 continue
124 else:
125 version = pkg.get('version')
126 if pkg.get('version') == version:
127 r = requests.get('{}{}'.format(pkg.get('repourl'), pkg.get('location')), stream=True)
128 if r.status_code != 200:
129 raise ClientException("Package not found")
130
131 with tempfile.NamedTemporaryFile(delete=False) as f:
132 f.write(r.raw.read())
133 f_name = f.name
134 if not f_name:
135 raise ClientException("{} {} not found at repo {}".format(pkgtype, name, repo))
136 return f_name
137
138 def pkg_get(self, pkgtype, name, repo, version, filter):
139
140 pkg_name = self.get_pkg(pkgtype, name, repo, filter, version)
141 if not pkg_name:
142 raise ClientException('Package not found')
143 folder, descriptor = self.zip_extraction(pkg_name)
144 with open(descriptor) as pkg:
145 pkg_descriptor = yaml.safe_load(pkg)
146 rmtree(folder, ignore_errors=False)
147 if ((pkgtype == 'vnf' and (pkg_descriptor.get('vnfd') or pkg_descriptor.get('vnfd:vnfd_catalog'))) or
148 (pkgtype == 'ns' and (pkg_descriptor.get('nsd') or pkg_descriptor.get('nsd:nsd_catalog')))):
149 raise ClientException('Wrong Package type')
150 return pkg_descriptor
151
152 def repo_index(self, origin=".", destination='.'):
153 """
154 Repo Index main function
155 :param origin: origin directory for getting all the artifacts
156 :param destination: destination folder for create and index the valid artifacts
157 """
158 self._logger.debug("Starting index composition")
159 if destination == ".":
160 if origin == destination:
161 destination = 'repository'
162
163 destination = abspath(destination)
164 origin = abspath(origin)
165 self._logger.debug(f"Paths {destination}, {origin}")
166 if origin[0] != "/":
167 origin = join(getcwd(), origin)
168 if destination[0] != '/':
169 destination = join(getcwd(), destination)
170
171 self.init_directory(destination)
172 artifacts = []
173 directories = []
174 for f in listdir(origin):
175 if isfile(join(origin, f)) and f.endswith('.tar.gz'):
176 artifacts.append(f)
177 elif isdir(join(origin, f)) and f != destination.split('/')[-1] and not f.startswith('.'):
178 directories.append(f) # TODO: Document that nested directories are not supported
179 else:
180 self._logger.debug(f"Ignoring {f}")
181 for artifact in artifacts:
182 self.register_artifact_in_repository(
183 join(origin, artifact), destination, source="artifact"
184 )
185 for artifact in directories:
186 self.register_artifact_in_repository(
187 join(origin, artifact), destination, source="directory"
188 )
189 self._logger.info("\nFinal Results: ")
190 self._logger.info(
191 "VNF Packages Indexed: "
192 + str(len(glob.glob(destination + "/vnf/*/*/metadata.yaml")))
193 )
194 self._logger.info(
195 "NS Packages Indexed: "
196 + str(len(glob.glob(destination + "/ns/*/*/metadata.yaml")))
197 )
198
199 self._logger.info(
200 "NST Packages Indexed: "
201 + str(len(glob.glob(destination + "/nst/*/*/metadata.yaml")))
202 )
203
204 def md5(self, fname):
205 """
206 Checksum generator
207 :param fname: file path
208 :return: checksum string
209 """
210 self._logger.debug("")
211 hash_md5 = hashlib.md5()
212 with open(fname, "rb") as f:
213 for chunk in iter(lambda: f.read(4096), b""):
214 hash_md5.update(chunk)
215 return hash_md5.hexdigest()
216
217 def fields_building(self, descriptor_dict, file, package_type):
218 """
219 From an artifact descriptor, obtain the fields required for indexing
220 :param descriptor_dict: artifact description
221 :param file: artifact package
222 :param package_type: type of artifact (vnf, ns, nst)
223 :return: fields
224 """
225 self._logger.debug("")
226
227 fields = {}
228 base_path = '/{}/'.format(package_type)
229 aux_dict = {}
230 if package_type == "vnf":
231 if descriptor_dict.get("vnfd-catalog", False):
232 aux_dict = descriptor_dict.get("vnfd-catalog", {}).get("vnfd", [{}])[0]
233 elif descriptor_dict.get("vnfd:vnfd-catalog"):
234 aux_dict = descriptor_dict.get("vnfd:vnfd-catalog", {}).get("vnfd", [{}])[0]
235 elif descriptor_dict.get("vnfd"):
236 aux_dict = descriptor_dict["vnfd"]
237 if aux_dict.get("vnfd"):
238 aux_dict = aux_dict['vnfd'][0]
239 else:
240 msg = f"Unexpected descriptor format {descriptor_dict}"
241 self._logger.error(msg)
242 raise ValueError(msg)
243 self._logger.debug(f"Extracted descriptor info for {package_type}: {aux_dict}")
244 images = []
245 for vdu in aux_dict.get("vdu", aux_dict.get('kdu', ())):
246 images.append(vdu.get("image", vdu.get('name')))
247 fields["images"] = images
248 elif package_type == "ns":
249 if descriptor_dict.get("nsd-catalog", False):
250 aux_dict = descriptor_dict.get("nsd-catalog", {}).get("nsd", [{}])[0]
251 elif descriptor_dict.get("nsd:nsd-catalog"):
252 aux_dict = descriptor_dict.get("nsd:nsd-catalog", {}).get("nsd", [{}])[0]
253 elif descriptor_dict.get("nsd"):
254 aux_dict = descriptor_dict['nsd']
255 if aux_dict.get("nsd"):
256 aux_dict = descriptor_dict["nsd"]["nsd"][0]
257 else:
258 msg = f"Unexpected descriptor format {descriptor_dict}"
259 self._logger.error(msg)
260 raise ValueError(msg)
261 vnfs = []
262 if aux_dict.get("constituent-vnfd"):
263 for vnf in aux_dict.get("constituent-vnfd", ()):
264 vnfs.append(vnf.get("vnfd-id-ref"))
265 else:
266 vnfs = aux_dict.get('vnfd-id')
267 self._logger.debug("Used VNFS in the NSD: " + str(vnfs))
268 fields["vnfd-id-ref"] = vnfs
269 elif package_type == 'nst':
270 if descriptor_dict.get("nst-catalog", False):
271 aux_dict = descriptor_dict.get("nst-catalog", {}).get("nst", [{}])[0]
272 elif descriptor_dict.get("nst:nst-catalog"):
273 aux_dict = descriptor_dict.get("nst:nst-catalog", {}).get("nst", [{}])[0]
274 elif descriptor_dict.get("nst"):
275 aux_dict = descriptor_dict['nst']
276 if aux_dict.get("nst"):
277 aux_dict = descriptor_dict["nst"]["nst"][0]
278 nsds = []
279 for nsd in aux_dict.get("netslice-subnet", ()):
280 nsds.append(nsd.get("nsd-ref"))
281 self._logger.debug("Used NSDs in the NST: " + str(nsds))
282 if not nsds:
283 msg = f"Unexpected descriptor format {descriptor_dict}"
284 self._logger.error(msg)
285 raise ValueError(msg)
286 fields["nsd-id-ref"] = nsds
287 else:
288 msg = f"Unexpected descriptor format {descriptor_dict}"
289 self._logger.error(msg)
290 raise ValueError(msg)
291
292 fields["name"] = aux_dict.get("name")
293 fields["id"] = aux_dict.get("id")
294 fields["description"] = aux_dict.get("description")
295 fields["vendor"] = aux_dict.get("vendor")
296 fields["version"] = str(aux_dict.get("version", "1.0"))
297 fields["path"] = "{}{}/{}/{}-{}.tar.gz".format(
298 base_path,
299 fields["id"],
300 fields["version"],
301 fields.get("id"),
302 fields.get("version"),
303 )
304 return fields
305
306 def zip_extraction(self, file_name):
307 """
308 Validation of artifact.
309 :param file: file path
310 :return: status details, status, fields, package_type
311 """
312 self._logger.debug("Decompressing package file")
313 temp_file = '/tmp/{}'.format(file_name.split('/')[-1])
314 if file_name != temp_file:
315 copyfile(file_name, temp_file)
316 with tarfile.open(temp_file, "r:gz") as tar:
317 folder = tar.getnames()[0].split('/')[0]
318 tar.extractall()
319
320 remove(temp_file)
321 descriptor_file = glob.glob('{}/*.y*ml'.format(folder))[0]
322 return folder, descriptor_file
323
324 def validate_artifact(self, path, source):
325 """
326 Validation of artifact.
327 :param path: file path
328 :param source: flag to select the correct file type (directory or artifact)
329 :return: status details, status, fields, package_type
330 """
331 self._logger.debug("")
332 package_type = ''
333 folder = ''
334 try:
335 if source == 'directory':
336 descriptor_file = glob.glob('{}/*.y*ml'.format(path))[0]
337 else:
338 folder, descriptor_file = self.zip_extraction(path)
339
340 self._logger.debug("Opening descriptor file: {}".format(descriptor_file))
341
342 with open(descriptor_file, 'r') as f:
343 descriptor_data = f.read()
344 self._logger.debug(f"Descriptor data: {descriptor_data}")
345 validation = validation_im()
346 desc_type, descriptor_dict = validation.yaml_validation(descriptor_data)
347 try:
348 validation_im.pyangbind_validation(self, desc_type, descriptor_dict)
349 except Exception as e:
350 self._logger.error(e, exc_info=True)
351 raise e
352 descriptor_type_ref = list(descriptor_dict.keys())[0].lower()
353 if "vnf" in descriptor_type_ref:
354 package_type = "vnf"
355 elif "nst" in descriptor_type_ref:
356 package_type = "nst"
357 elif "ns" in descriptor_type_ref:
358 package_type = "ns"
359 else:
360 msg = f"Unknown package type {descriptor_type_ref}"
361 self._logger.error(msg)
362 raise ValueError(msg)
363 self._logger.debug("Descriptor: {}".format(descriptor_dict))
364 fields = self.fields_building(descriptor_dict, path, package_type)
365 self._logger.debug(f"Descriptor successfully validated {fields}")
366 return (
367 {
368 "detail": "{}D successfully validated".format(package_type.upper()),
369 "code": "OK",
370 },
371 True,
372 fields,
373 package_type,
374 )
375 except Exception as e:
376 # Delete the folder we just created
377 return {"detail": str(e)}, False, {}, package_type
378 finally:
379 if folder:
380 rmtree(folder, ignore_errors=True)
381
382 def register_artifact_in_repository(self, path, destination, source):
383 """
384 Registration of one artifact in a repository
385 file: VNF or NS
386 destination: path for index creation
387 """
388 self._logger.debug("")
389 pt = PackageTool()
390 compressed = False
391 try:
392 fields = {}
393 _, valid, fields, package_type = self.validate_artifact(path, source)
394 if not valid:
395 raise Exception(
396 "{} {} Not well configured.".format(package_type.upper(), str(path))
397 )
398 else:
399 if source == "directory":
400 path = pt.build(path)
401 self._logger.debug(f"Directory path {path}")
402 compressed = True
403 fields["checksum"] = self.md5(path)
404 self.indexation(destination, path, package_type, fields)
405
406 except Exception as e:
407 self._logger.exception("Error registering artifact in Repository: {}".format(e))
408
409 finally:
410 if source == "directory" and compressed:
411 remove(path)
412
413 def indexation(self, destination, path, package_type, fields):
414 """
415 Process for index packages
416 :param destination: index repository path
417 :param path: path of the package
418 :param package_type: package type (vnf, ns)
419 :param fields: dict with the required values
420 """
421 self._logger.debug("")
422 data_ind = {'name': fields.get('name'), 'description': fields.get('description'),
423 'vendor': fields.get('vendor'), 'path': fields.get('path')}
424
425 final_path = join(destination, package_type, fields.get('id'), fields.get('version'))
426 if isdir(join(destination, package_type, fields.get('id'))):
427 if isdir(final_path):
428 self._logger.warning('{} {} already exists'.format(package_type.upper(), str(path)))
429 else:
430 mkdir(final_path)
431 copyfile(path,
432 final_path + '/' + fields.get('id') + "-" + fields.get('version') + '.tar.gz')
433 yaml.safe_dump(fields, open(final_path + '/' + 'metadata.yaml', 'w'),
434 default_flow_style=False, width=80, indent=4)
435 index = yaml.safe_load(open(destination + '/index.yaml'))
436
437 index['{}_packages'.format(package_type)][fields.get('id')][fields.get('version')] = data_ind
438 if versioning.parse(index['{}_packages'.format(package_type)][fields.get('id')][
439 'latest']) < versioning.parse(fields.get('version')):
440 index['{}_packages'.format(package_type)][fields.get('id')]['latest'] = fields.get(
441 'version')
442 yaml.safe_dump(index, open(destination + '/index.yaml', 'w'),
443 default_flow_style=False, width=80, indent=4)
444 self._logger.info('{} {} added in the repository'.format(package_type.upper(), str(path)))
445 else:
446 mkdir(destination + '/{}/'.format(package_type) + fields.get('id'))
447 mkdir(final_path)
448 copyfile(path,
449 final_path + '/' + fields.get('id') + "-" + fields.get('version') + '.tar.gz')
450 yaml.safe_dump(fields, open(join(final_path, 'metadata.yaml'), 'w'),
451 default_flow_style=False, width=80, indent=4)
452 index = yaml.safe_load(open(destination + '/index.yaml'))
453
454 index['{}_packages'.format(package_type)][fields.get('id')] = {fields.get('version'): data_ind}
455 index['{}_packages'.format(package_type)][fields.get('id')]['latest'] = fields.get('version')
456 yaml.safe_dump(index, open(join(destination, 'index.yaml'), 'w'),
457 default_flow_style=False, width=80, indent=4)
458 self._logger.info('{} {} added in the repository'.format(package_type.upper(), str(path)))
459
460 def current_datetime(self):
461 """
462 Datetime Generator
463 :return: Datetime as string with the following structure "2020-04-29T08:41:07.681653Z"
464 """
465 self._logger.debug("")
466 return time.strftime('%Y-%m-%dT%H:%M:%S.%sZ')
467
468 def init_directory(self, destination):
469 """
470 Initialize the index directory. Creation of index.yaml, and the directories for vnf and ns
471 :param destination:
472 :return:
473 """
474 self._logger.debug("")
475 if not isdir(destination):
476 mkdir(destination)
477 if not isfile(join(destination, "index.yaml")):
478 mkdir(join(destination, "vnf"))
479 mkdir(join(destination, "ns"))
480 mkdir(join(destination, "nst"))
481 index_data = {
482 "apiVersion": "v1",
483 "generated": self.current_datetime(),
484 "vnf_packages": {},
485 "ns_packages": {},
486 "nst_packages": {},
487 }
488 with open(join(destination, "index.yaml"), "w") as outfile:
489 yaml.safe_dump(
490 index_data, outfile, default_flow_style=False, width=80, indent=4
491 )