Fix VNF package generation for SOL006 and previous OSM packages when they contain...
[osm/osmclient.git] / osmclient / sol005 / osmrepo.py
1 #
2 # Licensed under the Apache License, Version 2.0 (the "License"); you may
3 # not use this file except in compliance with the License. You may obtain
4 # a copy of the License at
5 #
6 # http://www.apache.org/licenses/LICENSE-2.0
7 #
8 # Unless required by applicable law or agreed to in writing, software
9 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
10 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
11 # License for the specific language governing permissions and limitations
12 # under the License.
13 #
14
15 """
16 OSM Repo API handling
17 """
18 import glob
19 import hashlib
20 import logging
21 from os import listdir, mkdir, getcwd, remove
22 from os.path import isfile, isdir, join, abspath
23 from shutil import copyfile, rmtree
24 import tarfile
25 import tempfile
26 import time
27
28 from osm_im.validation import Validation as validation_im
29 from osmclient.common.exceptions import ClientException
30 from osmclient.common.package_tool import PackageTool
31 from osmclient.sol005.repo import Repo
32 from packaging import version as versioning
33 import requests
34 import yaml
35
36
37 class OSMRepo(Repo):
38 def __init__(self, http=None, client=None):
39 self._http = http
40 self._client = client
41 self._apiName = '/admin'
42 self._apiVersion = '/v1'
43 self._apiResource = '/osmrepos'
44 self._logger = logging.getLogger('osmclient')
45 self._apiBase = '{}{}{}'.format(self._apiName,
46 self._apiVersion, self._apiResource)
47
48 def pkg_list(self, pkgtype, filter=None, repo=None):
49 """
50 Returns a repo based on name or id
51 """
52 self._logger.debug("")
53 self._client.get_token()
54 # Get OSM registered repository list
55 repositories = self.list()
56 if repo:
57 repositories = [r for r in repositories if r["name"] == repo]
58 if not repositories:
59 raise ClientException('Not repository found')
60
61 vnf_repos = []
62 for repository in repositories:
63 try:
64 r = requests.get('{}/index.yaml'.format(repository.get('url')))
65
66 if r.status_code == 200:
67 repo_list = yaml.safe_load(r.text)
68 vnf_packages = repo_list.get('{}_packages'.format(pkgtype))
69 for repo in vnf_packages:
70 versions = vnf_packages.get(repo)
71 latest = versions.get('latest')
72 del versions['latest']
73 for version in versions:
74 latest_version = False
75 if version == latest:
76 latest_version = True
77 vnf_repos.append({'vendor': versions[version].get("vendor"),
78 'name': versions[version].get("name"),
79 'version': version,
80 'description': versions[version].get("description"),
81 'location': versions[version].get("path"),
82 'repository': repository.get('name'),
83 'repourl': repository.get('url'),
84 'latest': latest_version
85 })
86 else:
87 raise Exception('repository in url {} unreachable'.format(repository.get('url')))
88 except Exception as e:
89 logging.error("Error cannot read from repository {} '{}': {}".format(repository['name'], repository['url'], e))
90 continue
91
92 vnf_repos_filtered = []
93 if filter:
94 for vnf_repo in vnf_repos:
95 for k, v in vnf_repo.items():
96 if v:
97 kf, vf = filter.split('=')
98 if k == kf and vf in v:
99 vnf_repos_filtered.append(vnf_repo)
100 break
101 vnf_repos = vnf_repos_filtered
102 return vnf_repos
103
104 def get_pkg(self, pkgtype, name, repo, filter, version):
105 """
106 Returns the filename of the PKG downloaded to disk
107 """
108 self._logger.debug("")
109 self._client.get_token()
110 f = None
111 f_name = None
112 # Get OSM registered repository list
113 pkgs = self.pkg_list(pkgtype, filter, repo)
114 for pkg in pkgs:
115 if pkg.get('repository') == repo and pkg.get('name') == name:
116 if 'latest' in version:
117 if not pkg.get('latest'):
118 continue
119 else:
120 version = pkg.get('version')
121 if pkg.get('version') == version:
122 r = requests.get('{}{}'.format(pkg.get('repourl'), pkg.get('location')), stream=True)
123 if r.status_code != 200:
124 raise ClientException("Package not found")
125
126 with tempfile.NamedTemporaryFile(delete=False) as f:
127 f.write(r.raw.read())
128 f_name = f.name
129 if not f_name:
130 raise ClientException("{} {} not found at repo {}".format(pkgtype, name, repo))
131 return f_name
132
133 def pkg_get(self, pkgtype, name, repo, version, filter):
134
135 pkg_name = self.get_pkg(pkgtype, name, repo, filter, version)
136 if not pkg_name:
137 raise ClientException('Package not found')
138 folder, descriptor = self.zip_extraction(pkg_name)
139 with open(descriptor) as pkg:
140 pkg_descriptor = yaml.safe_load(pkg)
141 rmtree(folder, ignore_errors=False)
142 if ((pkgtype == 'vnf' and (pkg_descriptor.get('vnfd') or pkg_descriptor.get('vnfd:vnfd_catalog'))) or
143 (pkgtype == 'ns' and (pkg_descriptor.get('nsd') or pkg_descriptor.get('nsd:nsd_catalog')))):
144 raise ClientException('Wrong Package type')
145 return pkg_descriptor
146
147 def repo_index(self, origin=".", destination='.'):
148 """
149 Repo Index main function
150 :param origin: origin directory for getting all the artifacts
151 :param destination: destination folder for create and index the valid artifacts
152 """
153 self._logger.debug("")
154 if destination == '.':
155 if origin == destination:
156 destination = 'repository'
157
158 destination = abspath(destination)
159 origin = abspath(origin)
160
161 if origin[0] != '/':
162 origin = join(getcwd(), origin)
163 if destination[0] != '/':
164 destination = join(getcwd(), destination)
165
166 self.init_directory(destination)
167 artifacts = [f for f in listdir(origin) if isfile(join(origin, f))]
168 directories = [f for f in listdir(origin) if isdir(join(origin, f))]
169 for artifact in artifacts:
170 self.register_artifact_in_repository(join(origin, artifact), destination, source='file')
171 for artifact in directories:
172 self.register_artifact_in_repository(join(origin, artifact), destination, source='directory')
173 print("\nFinal Results: ")
174 print("VNF Packages Indexed: " + str(len(glob.glob(destination + "/vnf/*/*/metadata.yaml"))))
175 print("NS Packages Indexed: " + str(len(glob.glob(destination + "/ns/*/*/metadata.yaml"))))
176
177 def md5(self, fname):
178 """
179 Checksum generator
180 :param fname: file path
181 :return: checksum string
182 """
183 self._logger.debug("")
184 hash_md5 = hashlib.md5()
185 with open(fname, "rb") as f:
186 for chunk in iter(lambda: f.read(4096), b""):
187 hash_md5.update(chunk)
188 return hash_md5.hexdigest()
189
190 def fields_building(self, descriptor_dict, file, package_type):
191 """
192 From an artifact descriptor, obtain the fields required for indexing
193 :param descriptor_dict: artifact description
194 :param file: artifact package
195 :param package_type: type of artifact (vnf or ns)
196 :return: fields
197 """
198 self._logger.debug("")
199 fields = {}
200 base_path = '/{}/'.format(package_type)
201 aux_dict = {}
202 if package_type == "vnf":
203 if descriptor_dict.get('vnfd-catalog', False):
204 aux_dict = descriptor_dict.get('vnfd-catalog', {}).get('vnfd', [{}])[0]
205 else:
206 aux_dict = descriptor_dict.get('vnfd:vnfd-catalog', {}).get('vnfd', [{}])[0]
207
208 images = []
209 for vdu in aux_dict.get('vdu', ()):
210 images.append(vdu.get('image'))
211 fields['images'] = images
212 if package_type == "ns":
213 if descriptor_dict.get('nsd-catalog', False):
214 aux_dict = descriptor_dict.get('nsd-catalog', {}).get('nsd', [{}])[0]
215 else:
216 aux_dict = descriptor_dict.get('nsd:nsd-catalog', {}).get('nsd', [{}])[0]
217
218 vnfs = []
219
220 for vnf in aux_dict.get('constituent-vnfd', ()):
221 vnfs.append(vnf.get('vnfd-id-ref'))
222 self._logger.debug('Used VNFS in the NSD: ' + str(vnfs))
223 fields['vnfd-id-ref'] = vnfs
224
225 fields['name'] = aux_dict.get('name')
226 fields['id'] = aux_dict.get('id')
227 fields['description'] = aux_dict.get('description')
228 fields['vendor'] = aux_dict.get('vendor')
229 fields['version'] = aux_dict.get('version', '1.0')
230 fields['path'] = "{}{}/{}/{}-{}.tar.gz".format(base_path, fields['id'], fields['version'], fields.get('id'),
231 fields.get('version'))
232 return fields
233
234 def zip_extraction(self, file_name):
235 """
236 Validation of artifact.
237 :param file: file path
238 :return: status details, status, fields, package_type
239 """
240 self._logger.debug("Decompressing package file")
241 temp_file = '/tmp/{}'.format(file_name.split('/')[-1])
242 if file_name != temp_file:
243 copyfile(file_name, temp_file)
244 with tarfile.open(temp_file, "r:gz") as tar:
245 folder = tar.getnames()[0].split('/')[0]
246 tar.extractall()
247
248 remove(temp_file)
249 descriptor_file = glob.glob('{}/*.y*ml'.format(folder))[0]
250 return folder, descriptor_file
251
252 def validate_artifact(self, path, source):
253 """
254 Validation of artifact.
255 :param path: file path
256 :return: status details, status, fields, package_type
257 """
258 self._logger.debug("")
259 package_type = ''
260 folder = ''
261 try:
262 if source == 'directory':
263 descriptor_file = glob.glob('{}/*.y*ml'.format(path))[0]
264 else:
265 folder, descriptor_file = self.zip_extraction(path)
266
267 self._logger.debug("Opening descriptor file: {}".format(descriptor_file))
268
269 with open(descriptor_file, 'r') as f:
270 descriptor_data = f.read()
271 validation = validation_im()
272 desc_type, descriptor_dict = validation.yaml_validation(descriptor_data)
273 validation_im.pyangbind_validation(self, desc_type, descriptor_dict)
274 if 'vnf' in list(descriptor_dict.keys())[0]:
275 package_type = 'vnf'
276 else:
277 # raise ClientException("Not VNF package")
278 package_type = 'ns'
279
280 self._logger.debug("Descriptor: {}".format(descriptor_dict))
281 fields = self.fields_building(descriptor_dict, path, package_type)
282 self._logger.debug("Descriptor sucessfully validated")
283 return {"detail": "{}D successfully validated".format(package_type.upper()),
284 "code": "OK"}, True, fields, package_type
285 except Exception as e:
286 # Delete the folder we just created
287 return {"detail": str(e)}, False, {}, package_type
288 finally:
289 if folder:
290 rmtree(folder, ignore_errors=True)
291
292 def register_artifact_in_repository(self, path, destination, source):
293 """
294 Registration of one artifact in a repository
295 file: VNF or NS
296 destination: path for index creation
297 """
298 self._logger.debug("")
299 pt = PackageTool()
300 compresed = False
301 try:
302 fields = {}
303 _, valid, fields, package_type = self.validate_artifact(path, source)
304 if not valid:
305 raise Exception('{} {} Not well configured.'.format(package_type.upper(), str(path)))
306 else:
307 if source == 'directory':
308 path = pt.build(path)
309 compresed = True
310 fields['checksum'] = self.md5(path)
311 self.indexation(destination, path, package_type, fields)
312
313 except Exception as e:
314 self._logger.exception("Error registering artifact in Repository: {}".format(e))
315
316 finally:
317 if source == 'directory' and compresed:
318 remove(path)
319
320 def indexation(self, destination, path, package_type, fields):
321 """
322 Process for index packages
323 :param destination: index repository path
324 :param path: path of the package
325 :param package_type: package type (vnf, ns)
326 :param fields: dict with the required values
327 """
328 self._logger.debug("")
329 data_ind = {'name': fields.get('name'), 'description': fields.get('description'),
330 'vendor': fields.get('vendor'), 'path': fields.get('path')}
331
332 final_path = join(destination, package_type, fields.get('id'), fields.get('version'))
333 if isdir(join(destination, package_type, fields.get('id'))):
334 if isdir(final_path):
335 self._logger.warning('{} {} already exists'.format(package_type.upper(), str(path)))
336 else:
337 mkdir(final_path)
338 copyfile(path,
339 final_path + '/' + fields.get('id') + "-" + fields.get('version') + '.tar.gz')
340 yaml.safe_dump(fields, open(final_path + '/' + 'metadata.yaml', 'w'),
341 default_flow_style=False, width=80, indent=4)
342 index = yaml.safe_load(open(destination + '/index.yaml'))
343
344 index['{}_packages'.format(package_type)][fields.get('id')][fields.get('version')] = data_ind
345 if versioning.parse(index['{}_packages'.format(package_type)][fields.get('id')][
346 'latest']) < versioning.parse(fields.get('version')):
347 index['{}_packages'.format(package_type)][fields.get('id')]['latest'] = fields.get(
348 'version')
349 yaml.safe_dump(index, open(destination + '/index.yaml', 'w'),
350 default_flow_style=False, width=80, indent=4)
351 self._logger.info('{} {} added in the repository'.format(package_type.upper(), str(path)))
352 else:
353 mkdir(destination + '/{}/'.format(package_type) + fields.get('id'))
354 mkdir(final_path)
355 copyfile(path,
356 final_path + '/' + fields.get('id') + "-" + fields.get('version') + '.tar.gz')
357 yaml.safe_dump(fields, open(join(final_path, 'metadata.yaml'), 'w'),
358 default_flow_style=False, width=80, indent=4)
359 index = yaml.safe_load(open(destination + '/index.yaml'))
360
361 index['{}_packages'.format(package_type)][fields.get('id')] = {fields.get('version'): data_ind}
362 index['{}_packages'.format(package_type)][fields.get('id')]['latest'] = fields.get('version')
363 yaml.safe_dump(index, open(join(destination, 'index.yaml'), 'w'),
364 default_flow_style=False, width=80, indent=4)
365 self._logger.info('{} {} added in the repository'.format(package_type.upper(), str(path)))
366
367 def current_datatime(self):
368 """
369 Datetime Generator
370 :return: Datetime as string with the following structure "2020-04-29T08:41:07.681653Z"
371 """
372 self._logger.debug("")
373 return time.strftime('%Y-%m-%dT%H:%M:%S.%sZ')
374
375 def init_directory(self, destination):
376 """
377 Initialize the index directory. Creation of index.yaml, and the directories for vnf and ns
378 :param destination:
379 :return:
380 """
381 self._logger.debug("")
382 if not isdir(destination):
383 mkdir(destination)
384 if not isfile(join(destination, 'index.yaml')):
385 mkdir(join(destination, 'vnf'))
386 mkdir(join(destination, 'ns'))
387 index_data = {'apiVersion': 'v1', 'generated': self.current_datatime(), 'vnf_packages': {},
388 'ns_packages': {}}
389 with open(join(destination, 'index.yaml'), 'w') as outfile:
390 yaml.safe_dump(index_data, outfile, default_flow_style=False, width=80, indent=4)
391