2 # Licensed under the Apache License, Version 2.0 (the "License"); you may
3 # not use this file except in compliance with the License. You may obtain
4 # a copy of the License at
6 # http://www.apache.org/licenses/LICENSE-2.0
8 # Unless required by applicable law or agreed to in writing, software
9 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
10 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
11 # License for the specific language governing permissions and limitations
21 from os
import listdir
, mkdir
, getcwd
, remove
22 from os
.path
import isfile
, isdir
, join
, abspath
23 from shutil
import copyfile
, rmtree
28 from osm_im
.validation
import Validation
as validation_im
29 from osmclient
.common
.exceptions
import ClientException
30 from osmclient
.common
.package_tool
import PackageTool
31 from osmclient
.sol005
.repo
import Repo
32 from packaging
import version
as versioning
38 def __init__(self
, http
=None, client
=None):
41 self
._apiName
= '/admin'
42 self
._apiVersion
= '/v1'
43 self
._apiResource
= '/osmrepos'
44 self
._logger
= logging
.getLogger('osmclient')
45 self
._apiBase
= '{}{}{}'.format(self
._apiName
,
46 self
._apiVersion
, self
._apiResource
)
48 def pkg_list(self
, pkgtype
, filter=None, repo
=None):
50 Returns a repo based on name or id
52 self
._logger
.debug("")
53 self
._client
.get_token()
54 # Get OSM registered repository list
55 repositories
= self
.list()
57 repositories
= [r
for r
in repositories
if r
["name"] == repo
]
59 raise ClientException('Not repository found')
62 for repository
in repositories
:
64 r
= requests
.get('{}/index.yaml'.format(repository
.get('url')))
66 if r
.status_code
== 200:
67 repo_list
= yaml
.safe_load(r
.text
)
68 vnf_packages
= repo_list
.get('{}_packages'.format(pkgtype
))
69 for repo
in vnf_packages
:
70 versions
= vnf_packages
.get(repo
)
71 latest
= versions
.get('latest')
72 del versions
['latest']
73 for version
in versions
:
74 latest_version
= False
77 vnf_repos
.append({'vendor': versions
[version
].get("vendor"),
78 'name': versions
[version
].get("name"),
80 'description': versions
[version
].get("description"),
81 'location': versions
[version
].get("path"),
82 'repository': repository
.get('name'),
83 'repourl': repository
.get('url'),
84 'latest': latest_version
87 raise Exception('repository in url {} unreachable'.format(repository
.get('url')))
88 except Exception as e
:
89 logging
.error("Error cannot read from repository {} '{}': {}".format(repository
['name'], repository
['url'], e
))
92 vnf_repos_filtered
= []
94 for vnf_repo
in vnf_repos
:
95 for k
, v
in vnf_repo
.items():
97 kf
, vf
= filter.split('=')
98 if k
== kf
and vf
in v
:
99 vnf_repos_filtered
.append(vnf_repo
)
101 vnf_repos
= vnf_repos_filtered
104 def get_pkg(self
, pkgtype
, name
, repo
, filter, version
):
106 Returns the filename of the PKG downloaded to disk
108 self
._logger
.debug("")
109 self
._client
.get_token()
112 # Get OSM registered repository list
113 pkgs
= self
.pkg_list(pkgtype
, filter, repo
)
115 if pkg
.get('repository') == repo
and pkg
.get('name') == name
:
116 if 'latest' in version
:
117 if not pkg
.get('latest'):
120 version
= pkg
.get('version')
121 if pkg
.get('version') == version
:
122 r
= requests
.get('{}{}'.format(pkg
.get('repourl'), pkg
.get('location')), stream
=True)
123 if r
.status_code
!= 200:
124 raise ClientException("Package not found")
126 with tempfile
.NamedTemporaryFile(delete
=False) as f
:
127 f
.write(r
.raw
.read())
130 raise ClientException("{} {} not found at repo {}".format(pkgtype
, name
, repo
))
133 def pkg_get(self
, pkgtype
, name
, repo
, version
, filter):
135 pkg_name
= self
.get_pkg(pkgtype
, name
, repo
, filter, version
)
137 raise ClientException('Package not found')
138 folder
, descriptor
= self
.zip_extraction(pkg_name
)
139 with
open(descriptor
) as pkg
:
140 pkg_descriptor
= yaml
.safe_load(pkg
)
141 rmtree(folder
, ignore_errors
=False)
142 if ((pkgtype
== 'vnf' and (pkg_descriptor
.get('vnfd') or pkg_descriptor
.get('vnfd:vnfd_catalog'))) or
143 (pkgtype
== 'ns' and (pkg_descriptor
.get('nsd') or pkg_descriptor
.get('nsd:nsd_catalog')))):
144 raise ClientException('Wrong Package type')
145 return pkg_descriptor
147 def repo_index(self
, origin
=".", destination
='.'):
149 Repo Index main function
150 :param origin: origin directory for getting all the artifacts
151 :param destination: destination folder for create and index the valid artifacts
153 self
._logger
.debug("")
154 if destination
== '.':
155 if origin
== destination
:
156 destination
= 'repository'
158 destination
= abspath(destination
)
159 origin
= abspath(origin
)
162 origin
= join(getcwd(), origin
)
163 if destination
[0] != '/':
164 destination
= join(getcwd(), destination
)
166 self
.init_directory(destination
)
167 artifacts
= [f
for f
in listdir(origin
) if isfile(join(origin
, f
))]
168 directories
= [f
for f
in listdir(origin
) if isdir(join(origin
, f
))]
169 for artifact
in artifacts
:
170 self
.register_artifact_in_repository(join(origin
, artifact
), destination
, source
='file')
171 for artifact
in directories
:
172 self
.register_artifact_in_repository(join(origin
, artifact
), destination
, source
='directory')
173 print("\nFinal Results: ")
174 print("VNF Packages Indexed: " + str(len(glob
.glob(destination
+ "/vnf/*/*/metadata.yaml"))))
175 print("NS Packages Indexed: " + str(len(glob
.glob(destination
+ "/ns/*/*/metadata.yaml"))))
177 def md5(self
, fname
):
180 :param fname: file path
181 :return: checksum string
183 self
._logger
.debug("")
184 hash_md5
= hashlib
.md5()
185 with
open(fname
, "rb") as f
:
186 for chunk
in iter(lambda: f
.read(4096), b
""):
187 hash_md5
.update(chunk
)
188 return hash_md5
.hexdigest()
190 def fields_building(self
, descriptor_dict
, file, package_type
):
192 From an artifact descriptor, obtain the fields required for indexing
193 :param descriptor_dict: artifact description
194 :param file: artifact package
195 :param package_type: type of artifact (vnf or ns)
198 self
._logger
.debug("")
200 base_path
= '/{}/'.format(package_type
)
202 if package_type
== "vnf":
203 if descriptor_dict
.get('vnfd-catalog', False):
204 aux_dict
= descriptor_dict
.get('vnfd-catalog', {}).get('vnfd', [{}])[0]
206 aux_dict
= descriptor_dict
.get('vnfd:vnfd-catalog', {}).get('vnfd', [{}])[0]
209 for vdu
in aux_dict
.get('vdu', ()):
210 images
.append(vdu
.get('image'))
211 fields
['images'] = images
212 if package_type
== "ns":
213 if descriptor_dict
.get('nsd-catalog', False):
214 aux_dict
= descriptor_dict
.get('nsd-catalog', {}).get('nsd', [{}])[0]
216 aux_dict
= descriptor_dict
.get('nsd:nsd-catalog', {}).get('nsd', [{}])[0]
220 for vnf
in aux_dict
.get('constituent-vnfd', ()):
221 vnfs
.append(vnf
.get('vnfd-id-ref'))
222 self
._logger
.debug('Used VNFS in the NSD: ' + str(vnfs
))
223 fields
['vnfd-id-ref'] = vnfs
225 fields
['name'] = aux_dict
.get('name')
226 fields
['id'] = aux_dict
.get('id')
227 fields
['description'] = aux_dict
.get('description')
228 fields
['vendor'] = aux_dict
.get('vendor')
229 fields
['version'] = aux_dict
.get('version', '1.0')
230 fields
['path'] = "{}{}/{}/{}-{}.tar.gz".format(base_path
, fields
['id'], fields
['version'], fields
.get('id'),
231 fields
.get('version'))
234 def zip_extraction(self
, file_name
):
236 Validation of artifact.
237 :param file: file path
238 :return: status details, status, fields, package_type
240 self
._logger
.debug("Decompressing package file")
241 temp_file
= '/tmp/{}'.format(file_name
.split('/')[-1])
242 if file_name
!= temp_file
:
243 copyfile(file_name
, temp_file
)
244 with tarfile
.open(temp_file
, "r:gz") as tar
:
245 folder
= tar
.getnames()[0].split('/')[0]
249 descriptor_file
= glob
.glob('{}/*.y*ml'.format(folder
))[0]
250 return folder
, descriptor_file
252 def validate_artifact(self
, path
, source
):
254 Validation of artifact.
255 :param path: file path
256 :return: status details, status, fields, package_type
258 self
._logger
.debug("")
262 if source
== 'directory':
263 descriptor_file
= glob
.glob('{}/*.y*ml'.format(path
))[0]
265 folder
, descriptor_file
= self
.zip_extraction(path
)
267 self
._logger
.debug("Opening descriptor file: {}".format(descriptor_file
))
269 with
open(descriptor_file
, 'r') as f
:
270 descriptor_data
= f
.read()
271 validation
= validation_im()
272 desc_type
, descriptor_dict
= validation
.yaml_validation(descriptor_data
)
273 validation_im
.pyangbind_validation(self
, desc_type
, descriptor_dict
)
274 if 'vnf' in list(descriptor_dict
.keys())[0]:
277 # raise ClientException("Not VNF package")
280 self
._logger
.debug("Descriptor: {}".format(descriptor_dict
))
281 fields
= self
.fields_building(descriptor_dict
, path
, package_type
)
282 self
._logger
.debug("Descriptor sucessfully validated")
283 return {"detail": "{}D successfully validated".format(package_type
.upper()),
284 "code": "OK"}, True, fields
, package_type
285 except Exception as e
:
286 # Delete the folder we just created
287 return {"detail": str(e
)}, False, {}, package_type
290 rmtree(folder
, ignore_errors
=True)
292 def register_artifact_in_repository(self
, path
, destination
, source
):
294 Registration of one artifact in a repository
296 destination: path for index creation
298 self
._logger
.debug("")
303 _
, valid
, fields
, package_type
= self
.validate_artifact(path
, source
)
305 raise Exception('{} {} Not well configured.'.format(package_type
.upper(), str(path
)))
307 if source
== 'directory':
308 path
= pt
.build(path
)
310 fields
['checksum'] = self
.md5(path
)
311 self
.indexation(destination
, path
, package_type
, fields
)
313 except Exception as e
:
314 self
._logger
.exception("Error registering artifact in Repository: {}".format(e
))
317 if source
== 'directory' and compresed
:
320 def indexation(self
, destination
, path
, package_type
, fields
):
322 Process for index packages
323 :param destination: index repository path
324 :param path: path of the package
325 :param package_type: package type (vnf, ns)
326 :param fields: dict with the required values
328 self
._logger
.debug("")
329 data_ind
= {'name': fields
.get('name'), 'description': fields
.get('description'),
330 'vendor': fields
.get('vendor'), 'path': fields
.get('path')}
332 final_path
= join(destination
, package_type
, fields
.get('id'), fields
.get('version'))
333 if isdir(join(destination
, package_type
, fields
.get('id'))):
334 if isdir(final_path
):
335 self
._logger
.warning('{} {} already exists'.format(package_type
.upper(), str(path
)))
339 final_path
+ '/' + fields
.get('id') + "-" + fields
.get('version') + '.tar.gz')
340 yaml
.safe_dump(fields
, open(final_path
+ '/' + 'metadata.yaml', 'w'),
341 default_flow_style
=False, width
=80, indent
=4)
342 index
= yaml
.safe_load(open(destination
+ '/index.yaml'))
344 index
['{}_packages'.format(package_type
)][fields
.get('id')][fields
.get('version')] = data_ind
345 if versioning
.parse(index
['{}_packages'.format(package_type
)][fields
.get('id')][
346 'latest']) < versioning
.parse(fields
.get('version')):
347 index
['{}_packages'.format(package_type
)][fields
.get('id')]['latest'] = fields
.get(
349 yaml
.safe_dump(index
, open(destination
+ '/index.yaml', 'w'),
350 default_flow_style
=False, width
=80, indent
=4)
351 self
._logger
.info('{} {} added in the repository'.format(package_type
.upper(), str(path
)))
353 mkdir(destination
+ '/{}/'.format(package_type
) + fields
.get('id'))
356 final_path
+ '/' + fields
.get('id') + "-" + fields
.get('version') + '.tar.gz')
357 yaml
.safe_dump(fields
, open(join(final_path
, 'metadata.yaml'), 'w'),
358 default_flow_style
=False, width
=80, indent
=4)
359 index
= yaml
.safe_load(open(destination
+ '/index.yaml'))
361 index
['{}_packages'.format(package_type
)][fields
.get('id')] = {fields
.get('version'): data_ind
}
362 index
['{}_packages'.format(package_type
)][fields
.get('id')]['latest'] = fields
.get('version')
363 yaml
.safe_dump(index
, open(join(destination
, 'index.yaml'), 'w'),
364 default_flow_style
=False, width
=80, indent
=4)
365 self
._logger
.info('{} {} added in the repository'.format(package_type
.upper(), str(path
)))
367 def current_datatime(self
):
370 :return: Datetime as string with the following structure "2020-04-29T08:41:07.681653Z"
372 self
._logger
.debug("")
373 return time
.strftime('%Y-%m-%dT%H:%M:%S.%sZ')
375 def init_directory(self
, destination
):
377 Initialize the index directory. Creation of index.yaml, and the directories for vnf and ns
381 self
._logger
.debug("")
382 if not isdir(destination
):
384 if not isfile(join(destination
, 'index.yaml')):
385 mkdir(join(destination
, 'vnf'))
386 mkdir(join(destination
, 'ns'))
387 index_data
= {'apiVersion': 'v1', 'generated': self
.current_datatime(), 'vnf_packages': {},
389 with
open(join(destination
, 'index.yaml'), 'w') as outfile
:
390 yaml
.safe_dump(index_data
, outfile
, default_flow_style
=False, width
=80, indent
=4)