2 # Licensed under the Apache License, Version 2.0 (the "License"); you may
3 # not use this file except in compliance with the License. You may obtain
4 # a copy of the License at
6 # http://www.apache.org/licenses/LICENSE-2.0
8 # Unless required by applicable law or agreed to in writing, software
9 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
10 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
11 # License for the specific language governing permissions and limitations
18 from osmclient
.common
.exceptions
import ClientException
19 from osmclient
.sol005
.repo
import Repo
23 from shutil
import copyfile
, rmtree
27 from packaging
import version
as versioning
29 from os
import listdir
, mkdir
, getcwd
, remove
30 from os
.path
import isfile
, isdir
, join
, abspath
32 from osm_im
.validation
import Validation
as validation_im
37 def __init__(self
, http
=None, client
=None):
40 self
._apiName
= '/admin'
41 self
._apiVersion
= '/v1'
42 self
._apiResource
= '/osmrepos'
43 self
._logger
= logging
.getLogger('osmclient')
44 self
._apiBase
= '{}{}{}'.format(self
._apiName
,
45 self
._apiVersion
, self
._apiResource
)
47 def pkg_list(self
, pkgtype
, filter=None, repo
=None):
49 Returns a repo based on name or id
51 self
._logger
.debug("")
52 self
._client
.get_token()
53 # Get OSM registered repository list
54 repositories
= self
.list()
56 repositories
= [r
for r
in repositories
if r
["name"] == repo
]
58 raise ClientException('Not repository found')
61 for repository
in repositories
:
63 r
= requests
.get('{}/index.yaml'.format(repository
.get('url')))
65 if r
.status_code
== 200:
66 repo_list
= yaml
.safe_load(r
.text
)
67 vnf_packages
= repo_list
.get('{}_packages'.format(pkgtype
))
68 for repo
in vnf_packages
:
69 versions
= vnf_packages
.get(repo
)
70 latest
= versions
.get('latest')
71 del versions
['latest']
72 for version
in versions
:
73 latest_version
= False
76 vnf_repos
.append({'vendor': versions
[version
].get("vendor"),
77 'name': versions
[version
].get("name"),
79 'description': versions
[version
].get("description"),
80 'location': versions
[version
].get("path"),
81 'repository': repository
.get('name'),
82 'repourl': repository
.get('url'),
83 'latest': latest_version
86 raise Exception('repository in url {} unreachable'.format(repository
.get('url')))
87 except Exception as e
:
88 logging
.error("Error cannot read from repository {} '{}': {}".format(repository
['name'], repository
['url'], e
))
91 vnf_repos_filtered
= []
93 for vnf_repo
in vnf_repos
:
94 for k
, v
in vnf_repo
.items():
96 kf
, vf
= filter.split('=')
97 if k
== kf
and vf
in v
:
98 vnf_repos_filtered
.append(vnf_repo
)
100 vnf_repos
= vnf_repos_filtered
103 def get_pkg(self
, pkgtype
, name
, repo
, filter, version
):
105 Returns the filename of the PKG downloaded to disk
107 self
._logger
.debug("")
108 self
._client
.get_token()
111 # Get OSM registered repository list
112 pkgs
= self
.pkg_list(pkgtype
, filter, repo
)
114 if pkg
.get('repository') == repo
and pkg
.get('name') == name
:
115 if 'latest' in version
:
116 if not pkg
.get('latest'):
119 version
= pkg
.get('version')
120 if pkg
.get('version') == version
:
121 r
= requests
.get('{}{}'.format(pkg
.get('repourl'), pkg
.get('location')), stream
=True)
122 if r
.status_code
!= 200:
123 raise ClientException("Package not found")
125 with tempfile
.NamedTemporaryFile(delete
=False) as f
:
126 f
.write(r
.raw
.read())
129 raise ClientException("{} {} not found at repo {}".format(pkgtype
,name
, repo
))
132 def pkg_get(self
, pkgtype
, name
, repo
, version
, filter):
134 pkg_name
= self
.get_pkg(pkgtype
, name
, repo
, filter, version
)
136 raise ClientException('Package not found')
137 folder
, descriptor
= self
.zip_extraction(pkg_name
)
138 with
open(descriptor
) as pkg
:
139 pkg_descriptor
= yaml
.safe_load(pkg
)
140 rmtree(folder
, ignore_errors
=False)
141 if ((pkgtype
== 'vnf' and (pkg_descriptor
.get('vnfd') or pkg_descriptor
.get('vnfd:vnfd_catalog'))) or
142 (pkgtype
== 'ns' and (pkg_descriptor
.get('nsd') or pkg_descriptor
.get('nsd:nsd_catalog')))):
143 raise ClientException('Wrong Package type')
144 return pkg_descriptor
146 def repo_index(self
, origin
=".", destination
='.'):
148 Repo Index main function
149 :param origin: origin directory for getting all the artifacts
150 :param destination: destination folder for create and index the valid artifacts
152 if destination
== '.':
153 if origin
== destination
:
154 destination
= 'repository'
156 destination
= abspath(destination
)
157 origin
= abspath(origin
)
160 origin
= join(getcwd(), origin
)
161 if destination
[0] != '/':
162 destination
= join(getcwd(), destination
)
164 self
.init_directory(destination
)
165 artifacts
= [f
for f
in listdir(origin
) if isfile(join(origin
, f
))]
166 directories
= [f
for f
in listdir(origin
) if isdir(join(origin
, f
))]
167 for artifact
in artifacts
:
168 self
.register_artifact_in_repository(join(origin
, artifact
), destination
, source
='file')
169 for artifact
in directories
:
170 self
.register_artifact_in_repository(join(origin
, artifact
), destination
, source
='directory')
171 print("\nFinal Results: ")
172 print("VNF Packages Indexed: " + str(len(glob
.glob(destination
+ "/vnf/*/*/metadata.yaml"))))
173 print("NS Packages Indexed: " + str(len(glob
.glob(destination
+ "/ns/*/*/metadata.yaml"))))
175 def md5(self
, fname
):
178 :param fname: file path
179 :return: checksum string
181 hash_md5
= hashlib
.md5()
182 with
open(fname
, "rb") as f
:
183 for chunk
in iter(lambda: f
.read(4096), b
""):
184 hash_md5
.update(chunk
)
185 return hash_md5
.hexdigest()
187 def fields_building(self
, descriptor_json
, file, package_type
):
189 From an artifact descriptor, obtain the fields required for indexing
190 :param descriptor_json: artifact description
191 :param file: artifact package
192 :param package_type: type of artifact (vnf or ns)
196 base_path
= '/' + package_type
+ '/'
197 if package_type
== "vnf":
198 if descriptor_json
.get('vnfd-catalog', False):
199 aux_dict
= descriptor_json
.get('vnfd-catalog', {}).get('vnfd', [{}])[0]
201 aux_dict
= descriptor_json
.get('vnfd:vnfd-catalog', {}).get('vnfd', [{}])[0]
204 for vdu
in aux_dict
.get('vdu', ()):
205 images
.append(vdu
.get('image'))
206 fields
['images'] = images
207 if package_type
== "ns":
208 if descriptor_json
.get('nsd-catalog', False):
209 aux_dict
= descriptor_json
.get('nsd-catalog', {}).get('nsd', [{}])[0]
211 aux_dict
= descriptor_json
.get('nsd:nsd-catalog', {}).get('nsd', [{}])[0]
215 for vnf
in aux_dict
.get('constituent-vnfd', ()):
216 vnfs
.append(vnf
.get('vnfd-id-ref'))
217 self
._logger
.debug('Used VNFS in the NSD: ' + str(vnfs
))
218 fields
['vnfd-id-ref'] = vnfs
220 fields
['name'] = aux_dict
.get('name')
221 fields
['id'] = aux_dict
.get('id')
222 fields
['description'] = aux_dict
.get('description')
223 fields
['vendor'] = aux_dict
.get('vendor')
224 fields
['version'] = aux_dict
.get('version', '1.0')
225 fields
['path'] = base_path
+ fields
['id'] + '/' + fields
['version'] + '/' + fields
.get('id') + "-" + \
226 fields
.get('version') + '.tar.gz'
229 def zip_extraction(self
, file):
231 Validation of artifact.
232 :param file: file path
233 :return: status details, status, fields, package_type
235 self
._logger
.debug("Decompressing package file")
236 temp_file
= '/tmp/' + file.split('/')[-1]
237 if file != temp_file
:
238 copyfile(file, temp_file
)
239 with tarfile
.open(temp_file
, "r:gz") as tar
:
240 folder
= tar
.getnames()[0].split('/')[0]
244 descriptor_file
= glob
.glob(folder
+ "/*.y*ml")[0]
245 return folder
, descriptor_file
247 def validate_artifact(self
, path
, source
):
249 Validation of artifact.
250 :param path: file path
251 :return: status details, status, fields, package_type
256 if source
== 'directory':
257 descriptor_file
= glob
.glob(path
+ "/*.y*ml")[0]
259 folder
, descriptor_file
= self
.zip_extraction(path
)
261 self
._logger
.debug("Opening descriptor file: {}".format(descriptor_file
))
263 with
open(descriptor_file
, 'r') as f
:
264 descriptor_data
= f
.read()
265 validation
= validation_im()
266 desc_type
, descriptor_data
= validation
.yaml_validation(descriptor_data
)
267 validation_im
.pyangbind_validation(self
, desc_type
, descriptor_data
)
268 if 'vnf' in list(descriptor_data
.keys())[0]:
271 # raise ClientException("Not VNF package")
274 self
._logger
.debug("Descriptor: {}".format(descriptor_data
))
275 fields
= self
.fields_building(descriptor_data
, path
, package_type
)
276 self
._logger
.debug("Descriptor sucessfully validated")
277 return {"detail": "{}D successfully validated".format(package_type
.upper()),
278 "code": "OK"}, True, fields
, package_type
279 except Exception as e
:
280 # Delete the folder we just created
281 return {"detail": str(e
)}, False, {}, package_type
284 rmtree(folder
, ignore_errors
=True)
286 def compress_artifact(self
, path
):
288 Compress a directory for building an artifact
289 :param path: path of the directory
294 file = path
+ '.tar.gz'
295 with tarfile
.open(file, "w:gz") as tar
:
300 def register_artifact_in_repository(self
, path
, destination
, source
):
302 Registration of one artifact in a repository
304 destination: path for index creation
309 res
, valid
, fields
, package_type
= self
.validate_artifact(path
, source
)
311 raise Exception('{} {} Not well configured.'.format(package_type
.upper(), str(path
)))
313 if source
== 'directory':
314 path
= self
.compress_artifact(path
)
316 fields
['checksum'] = self
.md5(path
)
317 self
.indexation(destination
, path
, package_type
, fields
)
319 except Exception as e
:
320 self
._logger
.debug(str(e
))
323 if source
== 'directory' and compresed
:
326 def indexation(self
, destination
, path
, package_type
, fields
):
328 Process for index packages
329 :param destination: index repository path
330 :param path: path of the package
331 :param package_type: package type (vnf, ns)
332 :param fields: dict with the required values
334 data_ind
= {'name': fields
.get('name'), 'description': fields
.get('description'),
335 'vendor': fields
.get('vendor'), 'path': fields
.get('path')}
337 final_path
= join(destination
, package_type
, fields
.get('id'), fields
.get('version'))
338 if isdir(join(destination
, package_type
, fields
.get('id'))):
339 if isdir(final_path
):
340 self
._logger
.warning('{} {} already exists'.format(package_type
.upper(), str(path
)))
344 final_path
+ '/' + fields
.get('id') + "-" + fields
.get('version') + '.tar.gz')
345 yaml
.dump(fields
, open(final_path
+ '/' + 'metadata.yaml', 'w'),
346 Dumper
=ruamel
.yaml
.RoundTripDumper
)
347 index
= yaml
.load(open(destination
+ '/index.yaml'))
349 index
['{}_packages'.format(package_type
)][fields
.get('id')][fields
.get('version')] = data_ind
350 if versioning
.parse(index
['{}_packages'.format(package_type
)][fields
.get('id')][
351 'latest']) < versioning
.parse(fields
.get('version')):
352 index
['{}_packages'.format(package_type
)][fields
.get('id')]['latest'] = fields
.get(
354 yaml
.dump(index
, open(destination
+ '/index.yaml', 'w'), Dumper
=ruamel
.yaml
.RoundTripDumper
)
355 self
._logger
.info('{} {} added in the repository'.format(package_type
.upper(), str(path
)))
357 mkdir(destination
+ '/{}/'.format(package_type
) + fields
.get('id'))
360 final_path
+ '/' + fields
.get('id') + "-" + fields
.get('version') + '.tar.gz')
361 yaml
.dump(fields
, open(join(final_path
, 'metadata.yaml'), 'w'), Dumper
=ruamel
.yaml
.RoundTripDumper
)
362 index
= yaml
.load(open(destination
+ '/index.yaml'))
364 index
['{}_packages'.format(package_type
)][fields
.get('id')] = {fields
.get('version'): data_ind
}
365 index
['{}_packages'.format(package_type
)][fields
.get('id')]['latest'] = fields
.get('version')
366 yaml
.dump(index
, open(join(destination
, 'index.yaml'), 'w'), Dumper
=ruamel
.yaml
.RoundTripDumper
)
367 self
._logger
.info('{} {} added in the repository'.format(package_type
.upper(), str(path
)))
369 def current_datatime(self
):
372 :return: Datetime as string with the following structure "2020-04-29T08:41:07.681653Z"
374 return time
.strftime('%Y-%m-%dT%H:%M:%S.%sZ')
376 def init_directory(self
, destination
):
378 Initialize the index directory. Creation of index.yaml, and the directories for vnf and ns
382 if not isdir(destination
):
384 if not isfile(join(destination
, 'index.yaml')):
385 mkdir(join(destination
, 'vnf'))
386 mkdir(join(destination
, 'ns'))
387 index_data
= {'apiVersion': 'v1', 'generated': self
.current_datatime(), 'vnf_packages': {},
389 with
open(join(destination
, 'index.yaml'), 'w') as outfile
:
390 yaml
.dump(index_data
, outfile
, default_flow_style
=False)