1 |
|
# |
2 |
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may |
3 |
|
# not use this file except in compliance with the License. You may obtain |
4 |
|
# a copy of the License at |
5 |
|
# |
6 |
|
# http://www.apache.org/licenses/LICENSE-2.0 |
7 |
|
# |
8 |
|
# Unless required by applicable law or agreed to in writing, software |
9 |
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT |
10 |
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the |
11 |
|
# License for the specific language governing permissions and limitations |
12 |
|
# under the License. |
13 |
|
# |
14 |
|
|
15 |
1 |
""" |
16 |
|
OSM Repo API handling |
17 |
|
""" |
18 |
1 |
import glob |
19 |
1 |
import hashlib |
20 |
1 |
import logging |
21 |
1 |
from os import listdir, mkdir, getcwd, remove |
22 |
1 |
from os.path import isfile, isdir, join, abspath |
23 |
1 |
from shutil import copyfile, rmtree |
24 |
1 |
import tarfile |
25 |
1 |
import tempfile |
26 |
1 |
import time |
27 |
|
|
28 |
1 |
from osm_im.validation import Validation as validation_im |
29 |
1 |
from osmclient.common.exceptions import ClientException |
30 |
1 |
from osmclient.common.package_tool import PackageTool |
31 |
1 |
from osmclient.sol005.repo import Repo |
32 |
1 |
from packaging import version as versioning |
33 |
1 |
import requests |
34 |
1 |
import yaml |
35 |
|
|
36 |
|
|
37 |
1 |
class OSMRepo(Repo): |
38 |
1 |
def __init__(self, http=None, client=None): |
39 |
1 |
self._http = http |
40 |
1 |
self._client = client |
41 |
1 |
self._apiName = '/admin' |
42 |
1 |
self._apiVersion = '/v1' |
43 |
1 |
self._apiResource = '/osmrepos' |
44 |
1 |
self._logger = logging.getLogger('osmclient') |
45 |
1 |
self._apiBase = '{}{}{}'.format(self._apiName, |
46 |
|
self._apiVersion, self._apiResource) |
47 |
|
|
48 |
1 |
def pkg_list(self, pkgtype, filter=None, repo=None): |
49 |
|
""" |
50 |
|
Returns a repo based on name or id |
51 |
|
""" |
52 |
0 |
self._logger.debug("") |
53 |
0 |
self._client.get_token() |
54 |
|
# Get OSM registered repository list |
55 |
0 |
repositories = self.list() |
56 |
0 |
if repo: |
57 |
0 |
repositories = [r for r in repositories if r["name"] == repo] |
58 |
0 |
if not repositories: |
59 |
0 |
raise ClientException('Not repository found') |
60 |
|
|
61 |
0 |
vnf_repos = [] |
62 |
0 |
for repository in repositories: |
63 |
0 |
try: |
64 |
0 |
r = requests.get('{}/index.yaml'.format(repository.get('url'))) |
65 |
|
|
66 |
0 |
if r.status_code == 200: |
67 |
0 |
repo_list = yaml.safe_load(r.text) |
68 |
0 |
vnf_packages = repo_list.get('{}_packages'.format(pkgtype)) |
69 |
0 |
for repo in vnf_packages: |
70 |
0 |
versions = vnf_packages.get(repo) |
71 |
0 |
latest = versions.get('latest') |
72 |
0 |
del versions['latest'] |
73 |
0 |
for version in versions: |
74 |
0 |
latest_version = False |
75 |
0 |
if version == latest: |
76 |
0 |
latest_version = True |
77 |
0 |
vnf_repos.append({'vendor': versions[version].get("vendor"), |
78 |
|
'name': versions[version].get("name"), |
79 |
|
'version': version, |
80 |
|
'description': versions[version].get("description"), |
81 |
|
'location': versions[version].get("path"), |
82 |
|
'repository': repository.get('name'), |
83 |
|
'repourl': repository.get('url'), |
84 |
|
'latest': latest_version |
85 |
|
}) |
86 |
|
else: |
87 |
0 |
raise Exception('repository in url {} unreachable'.format(repository.get('url'))) |
88 |
0 |
except Exception as e: |
89 |
0 |
self._logger.error( |
90 |
|
"Error cannot read from repository {} '{}': {}".format( |
91 |
|
repository["name"], repository["url"], e |
92 |
|
), |
93 |
|
exc_info=True |
94 |
|
) |
95 |
0 |
continue |
96 |
|
|
97 |
0 |
vnf_repos_filtered = [] |
98 |
0 |
if filter: |
99 |
0 |
for vnf_repo in vnf_repos: |
100 |
0 |
for k, v in vnf_repo.items(): |
101 |
0 |
if v: |
102 |
0 |
kf, vf = filter.split('=') |
103 |
0 |
if k == kf and vf in v: |
104 |
0 |
vnf_repos_filtered.append(vnf_repo) |
105 |
0 |
break |
106 |
0 |
vnf_repos = vnf_repos_filtered |
107 |
0 |
return vnf_repos |
108 |
|
|
109 |
1 |
def get_pkg(self, pkgtype, name, repo, filter, version): |
110 |
|
""" |
111 |
|
Returns the filename of the PKG downloaded to disk |
112 |
|
""" |
113 |
0 |
self._logger.debug("") |
114 |
0 |
self._client.get_token() |
115 |
0 |
f = None |
116 |
0 |
f_name = None |
117 |
|
# Get OSM registered repository list |
118 |
0 |
pkgs = self.pkg_list(pkgtype, filter, repo) |
119 |
0 |
for pkg in pkgs: |
120 |
0 |
if pkg.get('repository') == repo and pkg.get('name') == name: |
121 |
0 |
if 'latest' in version: |
122 |
0 |
if not pkg.get('latest'): |
123 |
0 |
continue |
124 |
|
else: |
125 |
0 |
version = pkg.get('version') |
126 |
0 |
if pkg.get('version') == version: |
127 |
0 |
r = requests.get('{}{}'.format(pkg.get('repourl'), pkg.get('location')), stream=True) |
128 |
0 |
if r.status_code != 200: |
129 |
0 |
raise ClientException("Package not found") |
130 |
|
|
131 |
0 |
with tempfile.NamedTemporaryFile(delete=False) as f: |
132 |
0 |
f.write(r.raw.read()) |
133 |
0 |
f_name = f.name |
134 |
0 |
if not f_name: |
135 |
0 |
raise ClientException("{} {} not found at repo {}".format(pkgtype, name, repo)) |
136 |
0 |
return f_name |
137 |
|
|
138 |
1 |
def pkg_get(self, pkgtype, name, repo, version, filter): |
139 |
|
|
140 |
0 |
pkg_name = self.get_pkg(pkgtype, name, repo, filter, version) |
141 |
0 |
if not pkg_name: |
142 |
0 |
raise ClientException('Package not found') |
143 |
0 |
folder, descriptor = self.zip_extraction(pkg_name) |
144 |
0 |
with open(descriptor) as pkg: |
145 |
0 |
pkg_descriptor = yaml.safe_load(pkg) |
146 |
0 |
rmtree(folder, ignore_errors=False) |
147 |
0 |
if ((pkgtype == 'vnf' and (pkg_descriptor.get('vnfd') or pkg_descriptor.get('vnfd:vnfd_catalog'))) or |
148 |
|
(pkgtype == 'ns' and (pkg_descriptor.get('nsd') or pkg_descriptor.get('nsd:nsd_catalog')))): |
149 |
0 |
raise ClientException('Wrong Package type') |
150 |
0 |
return pkg_descriptor |
151 |
|
|
152 |
1 |
def repo_index(self, origin=".", destination='.'): |
153 |
|
""" |
154 |
|
Repo Index main function |
155 |
|
:param origin: origin directory for getting all the artifacts |
156 |
|
:param destination: destination folder for create and index the valid artifacts |
157 |
|
""" |
158 |
0 |
self._logger.debug("Starting index composition") |
159 |
0 |
if destination == ".": |
160 |
0 |
if origin == destination: |
161 |
0 |
destination = 'repository' |
162 |
|
|
163 |
0 |
destination = abspath(destination) |
164 |
0 |
origin = abspath(origin) |
165 |
0 |
self._logger.debug(f"Paths {destination}, {origin}") |
166 |
0 |
if origin[0] != "/": |
167 |
0 |
origin = join(getcwd(), origin) |
168 |
0 |
if destination[0] != '/': |
169 |
0 |
destination = join(getcwd(), destination) |
170 |
|
|
171 |
0 |
self.init_directory(destination) |
172 |
0 |
artifacts = [] |
173 |
0 |
directories = [] |
174 |
0 |
for f in listdir(origin): |
175 |
0 |
if isfile(join(origin, f)) and f.endswith('.tar.gz'): |
176 |
0 |
artifacts.append(f) |
177 |
0 |
elif isdir(join(origin, f)) and f != destination.split('/')[-1] and not f.startswith('.'): |
178 |
0 |
directories.append(f) # TODO: Document that nested directories are not supported |
179 |
|
else: |
180 |
0 |
self._logger.debug(f"Ignoring {f}") |
181 |
0 |
for artifact in artifacts: |
182 |
0 |
self.register_artifact_in_repository( |
183 |
|
join(origin, artifact), destination, source="artifact" |
184 |
|
) |
185 |
0 |
for artifact in directories: |
186 |
0 |
self.register_artifact_in_repository( |
187 |
|
join(origin, artifact), destination, source="directory" |
188 |
|
) |
189 |
0 |
self._logger.info("\nFinal Results: ") |
190 |
0 |
self._logger.info( |
191 |
|
"VNF Packages Indexed: " |
192 |
|
+ str(len(glob.glob(destination + "/vnf/*/*/metadata.yaml"))) |
193 |
|
) |
194 |
0 |
self._logger.info( |
195 |
|
"NS Packages Indexed: " |
196 |
|
+ str(len(glob.glob(destination + "/ns/*/*/metadata.yaml"))) |
197 |
|
) |
198 |
|
|
199 |
0 |
self._logger.info( |
200 |
|
"NST Packages Indexed: " |
201 |
|
+ str(len(glob.glob(destination + "/nst/*/*/metadata.yaml"))) |
202 |
|
) |
203 |
|
|
204 |
1 |
def md5(self, fname): |
205 |
|
""" |
206 |
|
Checksum generator |
207 |
|
:param fname: file path |
208 |
|
:return: checksum string |
209 |
|
""" |
210 |
0 |
self._logger.debug("") |
211 |
0 |
hash_md5 = hashlib.md5() |
212 |
0 |
with open(fname, "rb") as f: |
213 |
0 |
for chunk in iter(lambda: f.read(4096), b""): |
214 |
0 |
hash_md5.update(chunk) |
215 |
0 |
return hash_md5.hexdigest() |
216 |
|
|
217 |
1 |
def fields_building(self, descriptor_dict, file, package_type): |
218 |
|
""" |
219 |
|
From an artifact descriptor, obtain the fields required for indexing |
220 |
|
:param descriptor_dict: artifact description |
221 |
|
:param file: artifact package |
222 |
|
:param package_type: type of artifact (vnf, ns, nst) |
223 |
|
:return: fields |
224 |
|
""" |
225 |
0 |
self._logger.debug("") |
226 |
|
|
227 |
0 |
fields = {} |
228 |
0 |
base_path = '/{}/'.format(package_type) |
229 |
0 |
aux_dict = {} |
230 |
0 |
if package_type == "vnf": |
231 |
0 |
if descriptor_dict.get("vnfd-catalog", False): |
232 |
0 |
aux_dict = descriptor_dict.get("vnfd-catalog", {}).get("vnfd", [{}])[0] |
233 |
0 |
elif descriptor_dict.get("vnfd:vnfd-catalog"): |
234 |
0 |
aux_dict = descriptor_dict.get("vnfd:vnfd-catalog", {}).get("vnfd", [{}])[0] |
235 |
0 |
elif descriptor_dict.get("vnfd"): |
236 |
0 |
aux_dict = descriptor_dict["vnfd"] |
237 |
0 |
if aux_dict.get("vnfd"): |
238 |
0 |
aux_dict = aux_dict['vnfd'][0] |
239 |
|
else: |
240 |
0 |
msg = f"Unexpected descriptor format {descriptor_dict}" |
241 |
0 |
self._logger.error(msg) |
242 |
0 |
raise ValueError(msg) |
243 |
0 |
self._logger.debug(f"Extracted descriptor info for {package_type}: {aux_dict}") |
244 |
0 |
images = [] |
245 |
0 |
for vdu in aux_dict.get("vdu", aux_dict.get('kdu', ())): |
246 |
0 |
images.append(vdu.get("image", vdu.get('name'))) |
247 |
0 |
fields["images"] = images |
248 |
0 |
elif package_type == "ns": |
249 |
0 |
if descriptor_dict.get("nsd-catalog", False): |
250 |
0 |
aux_dict = descriptor_dict.get("nsd-catalog", {}).get("nsd", [{}])[0] |
251 |
0 |
elif descriptor_dict.get("nsd:nsd-catalog"): |
252 |
0 |
aux_dict = descriptor_dict.get("nsd:nsd-catalog", {}).get("nsd", [{}])[0] |
253 |
0 |
elif descriptor_dict.get("nsd"): |
254 |
0 |
aux_dict = descriptor_dict['nsd'] |
255 |
0 |
if aux_dict.get("nsd"): |
256 |
0 |
aux_dict = descriptor_dict["nsd"]["nsd"][0] |
257 |
|
else: |
258 |
0 |
msg = f"Unexpected descriptor format {descriptor_dict}" |
259 |
0 |
self._logger.error(msg) |
260 |
0 |
raise ValueError(msg) |
261 |
0 |
vnfs = [] |
262 |
0 |
if aux_dict.get("constituent-vnfd"): |
263 |
0 |
for vnf in aux_dict.get("constituent-vnfd", ()): |
264 |
0 |
vnfs.append(vnf.get("vnfd-id-ref")) |
265 |
|
else: |
266 |
0 |
vnfs = aux_dict.get('vnfd-id') |
267 |
0 |
self._logger.debug("Used VNFS in the NSD: " + str(vnfs)) |
268 |
0 |
fields["vnfd-id-ref"] = vnfs |
269 |
0 |
elif package_type == 'nst': |
270 |
0 |
if descriptor_dict.get("nst-catalog", False): |
271 |
0 |
aux_dict = descriptor_dict.get("nst-catalog", {}).get("nst", [{}])[0] |
272 |
0 |
elif descriptor_dict.get("nst:nst-catalog"): |
273 |
0 |
aux_dict = descriptor_dict.get("nst:nst-catalog", {}).get("nst", [{}])[0] |
274 |
0 |
elif descriptor_dict.get("nst"): |
275 |
0 |
aux_dict = descriptor_dict['nst'] |
276 |
0 |
if aux_dict.get("nst"): |
277 |
0 |
aux_dict = descriptor_dict["nst"]["nst"][0] |
278 |
0 |
nsds = [] |
279 |
0 |
for nsd in aux_dict.get("netslice-subnet", ()): |
280 |
0 |
nsds.append(nsd.get("nsd-ref")) |
281 |
0 |
self._logger.debug("Used NSDs in the NST: " + str(nsds)) |
282 |
0 |
if not nsds: |
283 |
0 |
msg = f"Unexpected descriptor format {descriptor_dict}" |
284 |
0 |
self._logger.error(msg) |
285 |
0 |
raise ValueError(msg) |
286 |
0 |
fields["nsd-id-ref"] = nsds |
287 |
|
else: |
288 |
0 |
msg = f"Unexpected descriptor format {descriptor_dict}" |
289 |
0 |
self._logger.error(msg) |
290 |
0 |
raise ValueError(msg) |
291 |
|
|
292 |
0 |
fields["name"] = aux_dict.get("name") |
293 |
0 |
fields["id"] = aux_dict.get("id") |
294 |
0 |
fields["description"] = aux_dict.get("description") |
295 |
0 |
fields["vendor"] = aux_dict.get("vendor") |
296 |
0 |
fields["version"] = str(aux_dict.get("version", "1.0")) |
297 |
0 |
fields["path"] = "{}{}/{}/{}-{}.tar.gz".format( |
298 |
|
base_path, |
299 |
|
fields["id"], |
300 |
|
fields["version"], |
301 |
|
fields.get("id"), |
302 |
|
fields.get("version"), |
303 |
|
) |
304 |
0 |
return fields |
305 |
|
|
306 |
1 |
def zip_extraction(self, file_name): |
307 |
|
""" |
308 |
|
Validation of artifact. |
309 |
|
:param file: file path |
310 |
|
:return: status details, status, fields, package_type |
311 |
|
""" |
312 |
0 |
self._logger.debug("Decompressing package file") |
313 |
0 |
temp_file = '/tmp/{}'.format(file_name.split('/')[-1]) |
314 |
0 |
if file_name != temp_file: |
315 |
0 |
copyfile(file_name, temp_file) |
316 |
0 |
with tarfile.open(temp_file, "r:gz") as tar: |
317 |
0 |
folder = tar.getnames()[0].split('/')[0] |
318 |
0 |
tar.extractall() |
319 |
|
|
320 |
0 |
remove(temp_file) |
321 |
0 |
descriptor_file = glob.glob('{}/*.y*ml'.format(folder))[0] |
322 |
0 |
return folder, descriptor_file |
323 |
|
|
324 |
1 |
def validate_artifact(self, path, source): |
325 |
|
""" |
326 |
|
Validation of artifact. |
327 |
|
:param path: file path |
328 |
|
:param source: flag to select the correct file type (directory or artifact) |
329 |
|
:return: status details, status, fields, package_type |
330 |
|
""" |
331 |
0 |
self._logger.debug("") |
332 |
0 |
package_type = '' |
333 |
0 |
folder = '' |
334 |
0 |
try: |
335 |
0 |
if source == 'directory': |
336 |
0 |
descriptor_file = glob.glob('{}/*.y*ml'.format(path))[0] |
337 |
|
else: |
338 |
0 |
folder, descriptor_file = self.zip_extraction(path) |
339 |
|
|
340 |
0 |
self._logger.debug("Opening descriptor file: {}".format(descriptor_file)) |
341 |
|
|
342 |
0 |
with open(descriptor_file, 'r') as f: |
343 |
0 |
descriptor_data = f.read() |
344 |
0 |
self._logger.debug(f"Descriptor data: {descriptor_data}") |
345 |
0 |
validation = validation_im() |
346 |
0 |
desc_type, descriptor_dict = validation.yaml_validation(descriptor_data) |
347 |
0 |
try: |
348 |
0 |
validation_im.pyangbind_validation(self, desc_type, descriptor_dict) |
349 |
0 |
except Exception as e: |
350 |
0 |
self._logger.error(e, exc_info=True) |
351 |
0 |
raise e |
352 |
0 |
descriptor_type_ref = list(descriptor_dict.keys())[0].lower() |
353 |
0 |
if "vnf" in descriptor_type_ref: |
354 |
0 |
package_type = "vnf" |
355 |
0 |
elif "nst" in descriptor_type_ref: |
356 |
0 |
package_type = "nst" |
357 |
0 |
elif "ns" in descriptor_type_ref: |
358 |
0 |
package_type = "ns" |
359 |
|
else: |
360 |
0 |
msg = f"Unknown package type {descriptor_type_ref}" |
361 |
0 |
self._logger.error(msg) |
362 |
0 |
raise ValueError(msg) |
363 |
0 |
self._logger.debug("Descriptor: {}".format(descriptor_dict)) |
364 |
0 |
fields = self.fields_building(descriptor_dict, path, package_type) |
365 |
0 |
self._logger.debug(f"Descriptor successfully validated {fields}") |
366 |
0 |
return ( |
367 |
|
{ |
368 |
|
"detail": "{}D successfully validated".format(package_type.upper()), |
369 |
|
"code": "OK", |
370 |
|
}, |
371 |
|
True, |
372 |
|
fields, |
373 |
|
package_type, |
374 |
|
) |
375 |
0 |
except Exception as e: |
376 |
|
# Delete the folder we just created |
377 |
0 |
return {"detail": str(e)}, False, {}, package_type |
378 |
|
finally: |
379 |
0 |
if folder: |
380 |
0 |
rmtree(folder, ignore_errors=True) |
381 |
|
|
382 |
1 |
def register_artifact_in_repository(self, path, destination, source): |
383 |
|
""" |
384 |
|
Registration of one artifact in a repository |
385 |
|
file: VNF or NS |
386 |
|
destination: path for index creation |
387 |
|
""" |
388 |
0 |
self._logger.debug("") |
389 |
0 |
pt = PackageTool() |
390 |
0 |
compressed = False |
391 |
0 |
try: |
392 |
0 |
fields = {} |
393 |
0 |
_, valid, fields, package_type = self.validate_artifact(path, source) |
394 |
0 |
if not valid: |
395 |
0 |
raise Exception( |
396 |
|
"{} {} Not well configured.".format(package_type.upper(), str(path)) |
397 |
|
) |
398 |
|
else: |
399 |
0 |
if source == "directory": |
400 |
0 |
path = pt.build(path) |
401 |
0 |
self._logger.debug(f"Directory path {path}") |
402 |
0 |
compressed = True |
403 |
0 |
fields["checksum"] = self.md5(path) |
404 |
0 |
self.indexation(destination, path, package_type, fields) |
405 |
|
|
406 |
0 |
except Exception as e: |
407 |
0 |
self._logger.exception("Error registering artifact in Repository: {}".format(e)) |
408 |
|
|
409 |
|
finally: |
410 |
0 |
if source == "directory" and compressed: |
411 |
0 |
remove(path) |
412 |
|
|
413 |
1 |
def indexation(self, destination, path, package_type, fields): |
414 |
|
""" |
415 |
|
Process for index packages |
416 |
|
:param destination: index repository path |
417 |
|
:param path: path of the package |
418 |
|
:param package_type: package type (vnf, ns) |
419 |
|
:param fields: dict with the required values |
420 |
|
""" |
421 |
0 |
self._logger.debug("") |
422 |
0 |
data_ind = {'name': fields.get('name'), 'description': fields.get('description'), |
423 |
|
'vendor': fields.get('vendor'), 'path': fields.get('path')} |
424 |
|
|
425 |
0 |
final_path = join(destination, package_type, fields.get('id'), fields.get('version')) |
426 |
0 |
if isdir(join(destination, package_type, fields.get('id'))): |
427 |
0 |
if isdir(final_path): |
428 |
0 |
self._logger.warning('{} {} already exists'.format(package_type.upper(), str(path))) |
429 |
|
else: |
430 |
0 |
mkdir(final_path) |
431 |
0 |
copyfile(path, |
432 |
|
final_path + '/' + fields.get('id') + "-" + fields.get('version') + '.tar.gz') |
433 |
0 |
yaml.safe_dump(fields, open(final_path + '/' + 'metadata.yaml', 'w'), |
434 |
|
default_flow_style=False, width=80, indent=4) |
435 |
0 |
index = yaml.safe_load(open(destination + '/index.yaml')) |
436 |
|
|
437 |
0 |
index['{}_packages'.format(package_type)][fields.get('id')][fields.get('version')] = data_ind |
438 |
0 |
if versioning.parse(index['{}_packages'.format(package_type)][fields.get('id')][ |
439 |
|
'latest']) < versioning.parse(fields.get('version')): |
440 |
0 |
index['{}_packages'.format(package_type)][fields.get('id')]['latest'] = fields.get( |
441 |
|
'version') |
442 |
0 |
yaml.safe_dump(index, open(destination + '/index.yaml', 'w'), |
443 |
|
default_flow_style=False, width=80, indent=4) |
444 |
0 |
self._logger.info('{} {} added in the repository'.format(package_type.upper(), str(path))) |
445 |
|
else: |
446 |
0 |
mkdir(destination + '/{}/'.format(package_type) + fields.get('id')) |
447 |
0 |
mkdir(final_path) |
448 |
0 |
copyfile(path, |
449 |
|
final_path + '/' + fields.get('id') + "-" + fields.get('version') + '.tar.gz') |
450 |
0 |
yaml.safe_dump(fields, open(join(final_path, 'metadata.yaml'), 'w'), |
451 |
|
default_flow_style=False, width=80, indent=4) |
452 |
0 |
index = yaml.safe_load(open(destination + '/index.yaml')) |
453 |
|
|
454 |
0 |
index['{}_packages'.format(package_type)][fields.get('id')] = {fields.get('version'): data_ind} |
455 |
0 |
index['{}_packages'.format(package_type)][fields.get('id')]['latest'] = fields.get('version') |
456 |
0 |
yaml.safe_dump(index, open(join(destination, 'index.yaml'), 'w'), |
457 |
|
default_flow_style=False, width=80, indent=4) |
458 |
0 |
self._logger.info('{} {} added in the repository'.format(package_type.upper(), str(path))) |
459 |
|
|
460 |
1 |
def current_datetime(self): |
461 |
|
""" |
462 |
|
Datetime Generator |
463 |
|
:return: Datetime as string with the following structure "2020-04-29T08:41:07.681653Z" |
464 |
|
""" |
465 |
1 |
self._logger.debug("") |
466 |
1 |
return time.strftime('%Y-%m-%dT%H:%M:%S.%sZ') |
467 |
|
|
468 |
1 |
def init_directory(self, destination): |
469 |
|
""" |
470 |
|
Initialize the index directory. Creation of index.yaml, and the directories for vnf and ns |
471 |
|
:param destination: |
472 |
|
:return: |
473 |
|
""" |
474 |
1 |
self._logger.debug("") |
475 |
1 |
if not isdir(destination): |
476 |
1 |
mkdir(destination) |
477 |
1 |
if not isfile(join(destination, "index.yaml")): |
478 |
1 |
mkdir(join(destination, "vnf")) |
479 |
1 |
mkdir(join(destination, "ns")) |
480 |
1 |
mkdir(join(destination, "nst")) |
481 |
1 |
index_data = { |
482 |
|
"apiVersion": "v1", |
483 |
|
"generated": self.current_datetime(), |
484 |
|
"vnf_packages": {}, |
485 |
|
"ns_packages": {}, |
486 |
|
"nst_packages": {}, |
487 |
|
} |
488 |
1 |
with open(join(destination, "index.yaml"), "w") as outfile: |
489 |
1 |
yaml.safe_dump( |
490 |
|
index_data, outfile, default_flow_style=False, width=80, indent=4 |
491 |
|
) |