Feature 8178 VNF Repositories
[osm/osmclient.git] / osmclient / sol005 / osmrepo.py
1 #
2 # Licensed under the Apache License, Version 2.0 (the "License"); you may
3 # not use this file except in compliance with the License. You may obtain
4 # a copy of the License at
5 #
6 # http://www.apache.org/licenses/LICENSE-2.0
7 #
8 # Unless required by applicable law or agreed to in writing, software
9 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
10 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
11 # License for the specific language governing permissions and limitations
12 # under the License.
13 #
14
15 """
16 OSM Repo API handling
17 """
18 from osmclient.common.exceptions import ClientException
19 from osmclient.sol005.repo import Repo
20 import requests
21 import logging
22 import tempfile
23 from shutil import copyfile, rmtree
24 import yaml
25 import tarfile
26 import glob
27 from packaging import version as versioning
28 import time
29 from os import listdir, mkdir, getcwd, remove
30 from os.path import isfile, isdir, join, abspath
31 import hashlib
32 from osm_im.validation import Validation as validation_im
33 import ruamel.yaml
34
35
36 class OSMRepo(Repo):
37 def __init__(self, http=None, client=None):
38 self._http = http
39 self._client = client
40 self._apiName = '/admin'
41 self._apiVersion = '/v1'
42 self._apiResource = '/osmrepos'
43 self._logger = logging.getLogger('osmclient')
44 self._apiBase = '{}{}{}'.format(self._apiName,
45 self._apiVersion, self._apiResource)
46
47 def pkg_list(self, pkgtype, filter=None, repo=None):
48 """
49 Returns a repo based on name or id
50 """
51 self._logger.debug("")
52 self._client.get_token()
53 # Get OSM registered repository list
54 repositories = self.list()
55 if repo:
56 repositories = [r for r in repositories if r["name"] == repo]
57 if not repositories:
58 raise ClientException('Not repository found')
59
60 vnf_repos = []
61 for repository in repositories:
62 try:
63 r = requests.get('{}/index.yaml'.format(repository.get('url')))
64
65 if r.status_code == 200:
66 repo_list = yaml.safe_load(r.text)
67 vnf_packages = repo_list.get('{}_packages'.format(pkgtype))
68 for repo in vnf_packages:
69 versions = vnf_packages.get(repo)
70 latest = versions.get('latest')
71 del versions['latest']
72 for version in versions:
73 latest_version = False
74 if version == latest:
75 latest_version = True
76 vnf_repos.append({'vendor': versions[version].get("vendor"),
77 'name': versions[version].get("name"),
78 'version': version,
79 'description': versions[version].get("description"),
80 'location': versions[version].get("path"),
81 'repository': repository.get('name'),
82 'repourl': repository.get('url'),
83 'latest': latest_version
84 })
85 else:
86 raise Exception('repository in url {} unreachable'.format(repository.get('url')))
87 except Exception as e:
88 logging.error("Error cannot read from repository {} '{}': {}".format(repository['name'], repository['url'], e))
89 continue
90
91 vnf_repos_filtered = []
92 if filter:
93 for vnf_repo in vnf_repos:
94 for k, v in vnf_repo.items():
95 if v:
96 kf, vf = filter.split('=')
97 if k == kf and vf in v:
98 vnf_repos_filtered.append(vnf_repo)
99 break
100 vnf_repos = vnf_repos_filtered
101 return vnf_repos
102
103 def get_pkg(self, pkgtype, name, repo, filter, version):
104 """
105 Returns the filename of the PKG downloaded to disk
106 """
107 self._logger.debug("")
108 self._client.get_token()
109 f = None
110 f_name = None
111 # Get OSM registered repository list
112 pkgs = self.pkg_list(pkgtype, filter, repo)
113 for pkg in pkgs:
114 if pkg.get('repository') == repo and pkg.get('name') == name:
115 if 'latest' in version:
116 if not pkg.get('latest'):
117 continue
118 else:
119 version = pkg.get('version')
120 if pkg.get('version') == version:
121 r = requests.get('{}{}'.format(pkg.get('repourl'), pkg.get('location')), stream=True)
122 if r.status_code != 200:
123 raise ClientException("Package not found")
124
125 with tempfile.NamedTemporaryFile(delete=False) as f:
126 f.write(r.raw.read())
127 f_name = f.name
128 if not f_name:
129 raise ClientException("{} {} not found at repo {}".format(pkgtype,name, repo))
130 return f_name
131
132 def pkg_get(self, pkgtype, name, repo, version, filter):
133
134 pkg_name = self.get_pkg(pkgtype, name, repo, filter, version)
135 if not pkg_name:
136 raise ClientException('Package not found')
137 folder, descriptor = self.zip_extraction(pkg_name)
138 with open(descriptor) as pkg:
139 pkg_descriptor = yaml.safe_load(pkg)
140 rmtree(folder, ignore_errors=False)
141 if ((pkgtype == 'vnf' and (pkg_descriptor.get('vnfd') or pkg_descriptor.get('vnfd:vnfd_catalog'))) or
142 (pkgtype == 'ns' and (pkg_descriptor.get('nsd') or pkg_descriptor.get('nsd:nsd_catalog')))):
143 raise ClientException('Wrong Package type')
144 return pkg_descriptor
145
146 def repo_index(self, origin=".", destination='.'):
147 """
148 Repo Index main function
149 :param origin: origin directory for getting all the artifacts
150 :param destination: destination folder for create and index the valid artifacts
151 """
152 if destination == '.':
153 if origin == destination:
154 destination = 'repository'
155
156 destination = abspath(destination)
157 origin = abspath(origin)
158
159 if origin[0] != '/':
160 origin = join(getcwd(), origin)
161 if destination[0] != '/':
162 destination = join(getcwd(), destination)
163
164 self.init_directory(destination)
165 artifacts = [f for f in listdir(origin) if isfile(join(origin, f))]
166 directories = [f for f in listdir(origin) if isdir(join(origin, f))]
167 for artifact in artifacts:
168 self.register_artifact_in_repository(join(origin, artifact), destination, source='file')
169 for artifact in directories:
170 self.register_artifact_in_repository(join(origin, artifact), destination, source='directory')
171 print("\nFinal Results: ")
172 print("VNF Packages Indexed: " + str(len(glob.glob(destination + "/vnf/*/*/metadata.yaml"))))
173 print("NS Packages Indexed: " + str(len(glob.glob(destination + "/ns/*/*/metadata.yaml"))))
174
175 def md5(self, fname):
176 """
177 Checksum generator
178 :param fname: file path
179 :return: checksum string
180 """
181 hash_md5 = hashlib.md5()
182 with open(fname, "rb") as f:
183 for chunk in iter(lambda: f.read(4096), b""):
184 hash_md5.update(chunk)
185 return hash_md5.hexdigest()
186
187 def fields_building(self, descriptor_json, file, package_type):
188 """
189 From an artifact descriptor, obtain the fields required for indexing
190 :param descriptor_json: artifact description
191 :param file: artifact package
192 :param package_type: type of artifact (vnf or ns)
193 :return: fields
194 """
195 fields = {}
196 base_path = '/' + package_type + '/'
197 if package_type == "vnf":
198 if descriptor_json.get('vnfd-catalog', False):
199 aux_dict = descriptor_json.get('vnfd-catalog', {}).get('vnfd', [{}])[0]
200 else:
201 aux_dict = descriptor_json.get('vnfd:vnfd-catalog', {}).get('vnfd', [{}])[0]
202
203 images = []
204 for vdu in aux_dict.get('vdu', ()):
205 images.append(vdu.get('image'))
206 fields['images'] = images
207 if package_type == "ns":
208 if descriptor_json.get('nsd-catalog', False):
209 aux_dict = descriptor_json.get('nsd-catalog', {}).get('nsd', [{}])[0]
210 else:
211 aux_dict = descriptor_json.get('nsd:nsd-catalog', {}).get('nsd', [{}])[0]
212
213 vnfs = []
214
215 for vnf in aux_dict.get('constituent-vnfd', ()):
216 vnfs.append(vnf.get('vnfd-id-ref'))
217 self._logger.debug('Used VNFS in the NSD: ' + str(vnfs))
218 fields['vnfd-id-ref'] = vnfs
219
220 fields['name'] = aux_dict.get('name')
221 fields['id'] = aux_dict.get('id')
222 fields['description'] = aux_dict.get('description')
223 fields['vendor'] = aux_dict.get('vendor')
224 fields['version'] = aux_dict.get('version', '1.0')
225 fields['path'] = base_path + fields['id'] + '/' + fields['version'] + '/' + fields.get('id') + "-" + \
226 fields.get('version') + '.tar.gz'
227 return fields
228
229 def zip_extraction(self, file):
230 """
231 Validation of artifact.
232 :param file: file path
233 :return: status details, status, fields, package_type
234 """
235 self._logger.debug("Decompressing package file")
236 temp_file = '/tmp/' + file.split('/')[-1]
237 if file != temp_file:
238 copyfile(file, temp_file)
239 with tarfile.open(temp_file, "r:gz") as tar:
240 folder = tar.getnames()[0].split('/')[0]
241 tar.extractall()
242
243 remove(temp_file)
244 descriptor_file = glob.glob(folder + "/*.y*ml")[0]
245 return folder, descriptor_file
246
247 def validate_artifact(self, path, source):
248 """
249 Validation of artifact.
250 :param path: file path
251 :return: status details, status, fields, package_type
252 """
253 try:
254 package_type = ''
255 folder = ''
256 if source == 'directory':
257 descriptor_file = glob.glob(path + "/*.y*ml")[0]
258 else:
259 folder, descriptor_file = self.zip_extraction(path)
260
261 self._logger.debug("Opening descriptor file: {}".format(descriptor_file))
262
263 with open(descriptor_file, 'r') as f:
264 descriptor_data = f.read()
265 validation = validation_im()
266 desc_type, descriptor_data = validation.yaml_validation(descriptor_data)
267 validation_im.pyangbind_validation(self, desc_type, descriptor_data)
268 if 'vnf' in list(descriptor_data.keys())[0]:
269 package_type = 'vnf'
270 else:
271 # raise ClientException("Not VNF package")
272 package_type = 'ns'
273
274 self._logger.debug("Descriptor: {}".format(descriptor_data))
275 fields = self.fields_building(descriptor_data, path, package_type)
276 self._logger.debug("Descriptor sucessfully validated")
277 return {"detail": "{}D successfully validated".format(package_type.upper()),
278 "code": "OK"}, True, fields, package_type
279 except Exception as e:
280 # Delete the folder we just created
281 return {"detail": str(e)}, False, {}, package_type
282 finally:
283 if folder:
284 rmtree(folder, ignore_errors=True)
285
286 def compress_artifact(self, path):
287 """
288 Compress a directory for building an artifact
289 :param path: path of the directory
290 :return: file path
291 """
292 if path[-1] == '/':
293 path = path[:-1]
294 file = path + '.tar.gz'
295 with tarfile.open(file, "w:gz") as tar:
296 tar.add(path)
297
298 return file
299
300 def register_artifact_in_repository(self, path, destination, source):
301 """
302 Registration of one artifact in a repository
303 file: VNF or NS
304 destination: path for index creation
305 """
306 try:
307 compresed = False
308 fields = {}
309 res, valid, fields, package_type = self.validate_artifact(path, source)
310 if not valid:
311 raise Exception('{} {} Not well configured.'.format(package_type.upper(), str(path)))
312 else:
313 if source == 'directory':
314 path = self.compress_artifact(path)
315 compresed = True
316 fields['checksum'] = self.md5(path)
317 self.indexation(destination, path, package_type, fields)
318
319 except Exception as e:
320 self._logger.debug(str(e))
321
322 finally:
323 if source == 'directory' and compresed:
324 remove(path)
325
326 def indexation(self, destination, path, package_type, fields):
327 """
328 Process for index packages
329 :param destination: index repository path
330 :param path: path of the package
331 :param package_type: package type (vnf, ns)
332 :param fields: dict with the required values
333 """
334 data_ind = {'name': fields.get('name'), 'description': fields.get('description'),
335 'vendor': fields.get('vendor'), 'path': fields.get('path')}
336
337 final_path = join(destination, package_type, fields.get('id'), fields.get('version'))
338 if isdir(join(destination, package_type, fields.get('id'))):
339 if isdir(final_path):
340 self._logger.warning('{} {} already exists'.format(package_type.upper(), str(path)))
341 else:
342 mkdir(final_path)
343 copyfile(path,
344 final_path + '/' + fields.get('id') + "-" + fields.get('version') + '.tar.gz')
345 yaml.dump(fields, open(final_path + '/' + 'metadata.yaml', 'w'),
346 Dumper=ruamel.yaml.RoundTripDumper)
347 index = yaml.load(open(destination + '/index.yaml'))
348
349 index['{}_packages'.format(package_type)][fields.get('id')][fields.get('version')] = data_ind
350 if versioning.parse(index['{}_packages'.format(package_type)][fields.get('id')][
351 'latest']) < versioning.parse(fields.get('version')):
352 index['{}_packages'.format(package_type)][fields.get('id')]['latest'] = fields.get(
353 'version')
354 yaml.dump(index, open(destination + '/index.yaml', 'w'), Dumper=ruamel.yaml.RoundTripDumper)
355 self._logger.info('{} {} added in the repository'.format(package_type.upper(), str(path)))
356 else:
357 mkdir(destination + '/{}/'.format(package_type) + fields.get('id'))
358 mkdir(final_path)
359 copyfile(path,
360 final_path + '/' + fields.get('id') + "-" + fields.get('version') + '.tar.gz')
361 yaml.dump(fields, open(join(final_path, 'metadata.yaml'), 'w'), Dumper=ruamel.yaml.RoundTripDumper)
362 index = yaml.load(open(destination + '/index.yaml'))
363
364 index['{}_packages'.format(package_type)][fields.get('id')] = {fields.get('version'): data_ind}
365 index['{}_packages'.format(package_type)][fields.get('id')]['latest'] = fields.get('version')
366 yaml.dump(index, open(join(destination, 'index.yaml'), 'w'), Dumper=ruamel.yaml.RoundTripDumper)
367 self._logger.info('{} {} added in the repository'.format(package_type.upper(), str(path)))
368
369 def current_datatime(self):
370 """
371 Datetime Generator
372 :return: Datetime as string with the following structure "2020-04-29T08:41:07.681653Z"
373 """
374 return time.strftime('%Y-%m-%dT%H:%M:%S.%sZ')
375
376 def init_directory(self, destination):
377 """
378 Initialize the index directory. Creation of index.yaml, and the directories for vnf and ns
379 :param destination:
380 :return:
381 """
382 if not isdir(destination):
383 mkdir(destination)
384 if not isfile(join(destination, 'index.yaml')):
385 mkdir(join(destination, 'vnf'))
386 mkdir(join(destination, 'ns'))
387 index_data = {'apiVersion': 'v1', 'generated': self.current_datatime(), 'vnf_packages': {},
388 'ns_packages': {}}
389 with open(join(destination, 'index.yaml'), 'w') as outfile:
390 yaml.dump(index_data, outfile, default_flow_style=False)