1 |
|
# |
2 |
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may |
3 |
|
# not use this file except in compliance with the License. You may obtain |
4 |
|
# a copy of the License at |
5 |
|
# |
6 |
|
# http://www.apache.org/licenses/LICENSE-2.0 |
7 |
|
# |
8 |
|
# Unless required by applicable law or agreed to in writing, software |
9 |
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT |
10 |
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the |
11 |
|
# License for the specific language governing permissions and limitations |
12 |
|
# under the License. |
13 |
|
# |
14 |
|
|
15 |
1 |
""" |
16 |
|
OSM Repo API handling |
17 |
|
""" |
18 |
1 |
import glob |
19 |
1 |
import logging |
20 |
1 |
from os import listdir, mkdir, getcwd, remove |
21 |
1 |
from os.path import isfile, isdir, join, abspath |
22 |
1 |
from shutil import copyfile, rmtree |
23 |
1 |
import tarfile |
24 |
1 |
import tempfile |
25 |
1 |
import time |
26 |
|
|
27 |
1 |
from osm_im.validation import Validation as validation_im |
28 |
1 |
from osmclient.common.exceptions import ClientException |
29 |
1 |
from osmclient.common.package_tool import PackageTool |
30 |
1 |
from osmclient.sol005.repo import Repo |
31 |
1 |
from osmclient.common import utils |
32 |
1 |
from packaging import version as versioning |
33 |
1 |
import requests |
34 |
1 |
import yaml |
35 |
|
|
36 |
|
|
37 |
1 |
class OSMRepo(Repo): |
38 |
1 |
def __init__(self, http=None, client=None): |
39 |
1 |
self._http = http |
40 |
1 |
self._client = client |
41 |
1 |
self._apiName = "/admin" |
42 |
1 |
self._apiVersion = "/v1" |
43 |
1 |
self._apiResource = "/osmrepos" |
44 |
1 |
self._logger = logging.getLogger("osmclient") |
45 |
1 |
self._apiBase = "{}{}{}".format( |
46 |
|
self._apiName, self._apiVersion, self._apiResource |
47 |
|
) |
48 |
|
|
49 |
1 |
def pkg_list(self, pkgtype, filter=None, repo=None): |
50 |
|
""" |
51 |
|
Returns a repo based on name or id |
52 |
|
""" |
53 |
0 |
self._logger.debug("") |
54 |
0 |
self._client.get_token() |
55 |
|
# Get OSM registered repository list |
56 |
0 |
repositories = self.list() |
57 |
0 |
if repo: |
58 |
0 |
repositories = [r for r in repositories if r["name"] == repo] |
59 |
0 |
if not repositories: |
60 |
0 |
raise ClientException("Not repository found") |
61 |
|
|
62 |
0 |
vnf_repos = [] |
63 |
0 |
for repository in repositories: |
64 |
0 |
try: |
65 |
0 |
r = requests.get("{}/index.yaml".format(repository.get("url"))) |
66 |
|
|
67 |
0 |
if r.status_code == 200: |
68 |
0 |
repo_list = yaml.safe_load(r.text) |
69 |
0 |
vnf_packages = repo_list.get("{}_packages".format(pkgtype)) |
70 |
0 |
for repo in vnf_packages: |
71 |
0 |
versions = vnf_packages.get(repo) |
72 |
0 |
latest = versions.get("latest") |
73 |
0 |
del versions["latest"] |
74 |
0 |
for version in versions: |
75 |
0 |
latest_version = False |
76 |
0 |
if version == latest: |
77 |
0 |
latest_version = True |
78 |
0 |
vnf_repos.append( |
79 |
|
{ |
80 |
|
"vendor": versions[version].get("vendor"), |
81 |
|
"name": versions[version].get("name"), |
82 |
|
"version": version, |
83 |
|
"description": versions[version].get("description"), |
84 |
|
"location": versions[version].get("path"), |
85 |
|
"repository": repository.get("name"), |
86 |
|
"repourl": repository.get("url"), |
87 |
|
"latest": latest_version, |
88 |
|
} |
89 |
|
) |
90 |
|
else: |
91 |
0 |
raise Exception( |
92 |
|
"repository in url {} unreachable".format(repository.get("url")) |
93 |
|
) |
94 |
0 |
except Exception as e: |
95 |
0 |
self._logger.error( |
96 |
|
"Error cannot read from repository {} '{}': {}".format( |
97 |
|
repository["name"], repository["url"], e |
98 |
|
), |
99 |
|
exc_info=True, |
100 |
|
) |
101 |
0 |
continue |
102 |
|
|
103 |
0 |
vnf_repos_filtered = [] |
104 |
0 |
if filter: |
105 |
0 |
for vnf_repo in vnf_repos: |
106 |
0 |
for k, v in vnf_repo.items(): |
107 |
0 |
if v: |
108 |
0 |
kf, vf = filter.split("=") |
109 |
0 |
if k == kf and vf in v: |
110 |
0 |
vnf_repos_filtered.append(vnf_repo) |
111 |
0 |
break |
112 |
0 |
vnf_repos = vnf_repos_filtered |
113 |
0 |
return vnf_repos |
114 |
|
|
115 |
1 |
def get_pkg(self, pkgtype, name, repo, filter, version): |
116 |
|
""" |
117 |
|
Returns the filename of the PKG downloaded to disk |
118 |
|
""" |
119 |
0 |
self._logger.debug("") |
120 |
0 |
self._client.get_token() |
121 |
0 |
f = None |
122 |
0 |
f_name = None |
123 |
|
# Get OSM registered repository list |
124 |
0 |
pkgs = self.pkg_list(pkgtype, filter, repo) |
125 |
0 |
for pkg in pkgs: |
126 |
0 |
if pkg.get("repository") == repo and pkg.get("name") == name: |
127 |
0 |
if "latest" in version: |
128 |
0 |
if not pkg.get("latest"): |
129 |
0 |
continue |
130 |
|
else: |
131 |
0 |
version = pkg.get("version") |
132 |
0 |
if pkg.get("version") == version: |
133 |
0 |
r = requests.get( |
134 |
|
"{}{}".format(pkg.get("repourl"), pkg.get("location")), |
135 |
|
stream=True, |
136 |
|
) |
137 |
0 |
if r.status_code != 200: |
138 |
0 |
raise ClientException("Package not found") |
139 |
|
|
140 |
0 |
with tempfile.NamedTemporaryFile(delete=False) as f: |
141 |
0 |
f.write(r.raw.read()) |
142 |
0 |
f_name = f.name |
143 |
0 |
if not f_name: |
144 |
0 |
raise ClientException( |
145 |
|
"{} {} not found at repo {}".format(pkgtype, name, repo) |
146 |
|
) |
147 |
0 |
return f_name |
148 |
|
|
149 |
1 |
def pkg_get(self, pkgtype, name, repo, version, filter): |
150 |
0 |
pkg_name = self.get_pkg(pkgtype, name, repo, filter, version) |
151 |
0 |
if not pkg_name: |
152 |
0 |
raise ClientException("Package not found") |
153 |
0 |
folder, descriptor = self.zip_extraction(pkg_name) |
154 |
0 |
with open(descriptor) as pkg: |
155 |
0 |
pkg_descriptor = yaml.safe_load(pkg) |
156 |
0 |
rmtree(folder, ignore_errors=False) |
157 |
0 |
if ( |
158 |
|
pkgtype == "vnf" |
159 |
|
and (pkg_descriptor.get("vnfd") or pkg_descriptor.get("vnfd:vnfd_catalog")) |
160 |
|
) or ( |
161 |
|
pkgtype == "ns" |
162 |
|
and (pkg_descriptor.get("nsd") or pkg_descriptor.get("nsd:nsd_catalog")) |
163 |
|
): |
164 |
0 |
raise ClientException("Wrong Package type") |
165 |
0 |
return pkg_descriptor |
166 |
|
|
167 |
1 |
def repo_index(self, origin=".", destination="."): |
168 |
|
""" |
169 |
|
Repo Index main function |
170 |
|
:param origin: origin directory for getting all the artifacts |
171 |
|
:param destination: destination folder for create and index the valid artifacts |
172 |
|
""" |
173 |
0 |
self._logger.debug("Starting index composition") |
174 |
0 |
if destination == ".": |
175 |
0 |
if origin == destination: |
176 |
0 |
destination = "repository" |
177 |
|
|
178 |
0 |
destination = abspath(destination) |
179 |
0 |
origin = abspath(origin) |
180 |
0 |
self._logger.debug(f"Paths {destination}, {origin}") |
181 |
0 |
if origin[0] != "/": |
182 |
0 |
origin = join(getcwd(), origin) |
183 |
0 |
if destination[0] != "/": |
184 |
0 |
destination = join(getcwd(), destination) |
185 |
|
|
186 |
0 |
self.init_directory(destination) |
187 |
0 |
artifacts = [] |
188 |
0 |
directories = [] |
189 |
0 |
for f in listdir(origin): |
190 |
0 |
self._logger.debug(f"Element: {join(origin,f)}") |
191 |
0 |
if isfile(join(origin, f)) and f.endswith(".tar.gz"): |
192 |
0 |
artifacts.append(f) |
193 |
0 |
elif ( |
194 |
|
isdir(join(origin, f)) |
195 |
|
and f != destination.split("/")[-1] |
196 |
|
and not f.startswith(".") |
197 |
|
): |
198 |
0 |
directories.append( |
199 |
|
f |
200 |
|
) # TODO: Document that nested directories are not supported |
201 |
|
else: |
202 |
0 |
self._logger.debug(f"Ignoring {f}") |
203 |
0 |
self._logger.debug(f"Artifacts: {artifacts}") |
204 |
0 |
for package in artifacts: |
205 |
0 |
self.register_package_in_repository( |
206 |
|
join(origin, package), origin, destination, kind="artifact" |
207 |
|
) |
208 |
0 |
self._logger.debug(f"Directories: {directories}") |
209 |
0 |
for package in directories: |
210 |
0 |
self.register_package_in_repository( |
211 |
|
join(origin, package), origin, destination, kind="directory" |
212 |
|
) |
213 |
0 |
self._logger.info("\nFinal Results: ") |
214 |
0 |
self._logger.info( |
215 |
|
"VNF Packages Indexed: " |
216 |
|
+ str(len(glob.glob(destination + "/vnf/*/*/metadata.yaml"))) |
217 |
|
) |
218 |
0 |
self._logger.info( |
219 |
|
"NS Packages Indexed: " |
220 |
|
+ str(len(glob.glob(destination + "/ns/*/*/metadata.yaml"))) |
221 |
|
) |
222 |
|
|
223 |
0 |
self._logger.info( |
224 |
|
"NST Packages Indexed: " |
225 |
|
+ str(len(glob.glob(destination + "/nst/*/*/metadata.yaml"))) |
226 |
|
) |
227 |
|
|
228 |
1 |
def fields_building(self, descriptor_dict, file, package_type): |
229 |
|
""" |
230 |
|
From an artifact descriptor, obtain the fields required for indexing |
231 |
|
:param descriptor_dict: artifact description |
232 |
|
:param file: artifact package |
233 |
|
:param package_type: type of artifact (vnf, ns, nst) |
234 |
|
:return: fields |
235 |
|
""" |
236 |
0 |
self._logger.debug("") |
237 |
|
|
238 |
0 |
fields = {} |
239 |
0 |
base_path = "/{}/".format(package_type) |
240 |
0 |
aux_dict = {} |
241 |
0 |
if package_type == "vnf": |
242 |
0 |
if descriptor_dict.get("vnfd-catalog", False): |
243 |
0 |
aux_dict = descriptor_dict.get("vnfd-catalog", {}).get("vnfd", [{}])[0] |
244 |
0 |
elif descriptor_dict.get("vnfd:vnfd-catalog"): |
245 |
0 |
aux_dict = descriptor_dict.get("vnfd:vnfd-catalog", {}).get( |
246 |
|
"vnfd", [{}] |
247 |
|
)[0] |
248 |
0 |
elif descriptor_dict.get("vnfd"): |
249 |
0 |
aux_dict = descriptor_dict["vnfd"] |
250 |
0 |
if aux_dict.get("vnfd"): |
251 |
0 |
aux_dict = aux_dict["vnfd"][0] |
252 |
|
else: |
253 |
0 |
msg = f"Unexpected descriptor format {descriptor_dict}" |
254 |
0 |
self._logger.error(msg) |
255 |
0 |
raise ValueError(msg) |
256 |
0 |
self._logger.debug( |
257 |
|
f"Extracted descriptor info for {package_type}: {aux_dict}" |
258 |
|
) |
259 |
0 |
images = [] |
260 |
0 |
for vdu in aux_dict.get("vdu", aux_dict.get("kdu", ())): |
261 |
0 |
images.append(vdu.get("image", vdu.get("name"))) |
262 |
0 |
fields["images"] = images |
263 |
0 |
elif package_type == "ns": |
264 |
0 |
if descriptor_dict.get("nsd-catalog", False): |
265 |
0 |
aux_dict = descriptor_dict.get("nsd-catalog", {}).get("nsd", [{}])[0] |
266 |
0 |
elif descriptor_dict.get("nsd:nsd-catalog"): |
267 |
0 |
aux_dict = descriptor_dict.get("nsd:nsd-catalog", {}).get("nsd", [{}])[ |
268 |
|
0 |
269 |
|
] |
270 |
0 |
elif descriptor_dict.get("nsd"): |
271 |
0 |
aux_dict = descriptor_dict["nsd"] |
272 |
0 |
if aux_dict.get("nsd"): |
273 |
0 |
aux_dict = descriptor_dict["nsd"]["nsd"][0] |
274 |
|
else: |
275 |
0 |
msg = f"Unexpected descriptor format {descriptor_dict}" |
276 |
0 |
self._logger.error(msg) |
277 |
0 |
raise ValueError(msg) |
278 |
0 |
vnfs = [] |
279 |
0 |
if aux_dict.get("constituent-vnfd"): |
280 |
0 |
for vnf in aux_dict.get("constituent-vnfd", ()): |
281 |
0 |
vnfs.append(vnf.get("vnfd-id-ref")) |
282 |
|
else: |
283 |
0 |
vnfs = aux_dict.get("vnfd-id") |
284 |
0 |
self._logger.debug("Used VNFS in the NSD: " + str(vnfs)) |
285 |
0 |
fields["vnfd-id-ref"] = vnfs |
286 |
0 |
elif package_type == "nst": |
287 |
0 |
if descriptor_dict.get("nst-catalog", False): |
288 |
0 |
aux_dict = descriptor_dict.get("nst-catalog", {}).get("nst", [{}])[0] |
289 |
0 |
elif descriptor_dict.get("nst:nst-catalog"): |
290 |
0 |
aux_dict = descriptor_dict.get("nst:nst-catalog", {}).get("nst", [{}])[ |
291 |
|
0 |
292 |
|
] |
293 |
0 |
elif descriptor_dict.get("nst"): |
294 |
0 |
aux_dict = descriptor_dict["nst"] |
295 |
0 |
if aux_dict.get("nst"): |
296 |
0 |
aux_dict = descriptor_dict["nst"]["nst"][0] |
297 |
0 |
nsds = [] |
298 |
0 |
for nsd in aux_dict.get("netslice-subnet", ()): |
299 |
0 |
nsds.append(nsd.get("nsd-ref")) |
300 |
0 |
self._logger.debug("Used NSDs in the NST: " + str(nsds)) |
301 |
0 |
if not nsds: |
302 |
0 |
msg = f"Unexpected descriptor format {descriptor_dict}" |
303 |
0 |
self._logger.error(msg) |
304 |
0 |
raise ValueError(msg) |
305 |
0 |
fields["nsd-id-ref"] = nsds |
306 |
|
else: |
307 |
0 |
msg = f"Unexpected descriptor format {descriptor_dict}" |
308 |
0 |
self._logger.error(msg) |
309 |
0 |
raise ValueError(msg) |
310 |
|
# Repo search is based on 'name' entry in index.yaml. It is mandatory then |
311 |
0 |
fields["name"] = aux_dict.get("name", aux_dict["product-name"]) |
312 |
0 |
fields["id"] = aux_dict.get("id") |
313 |
0 |
fields["description"] = aux_dict.get("description") |
314 |
0 |
fields["vendor"] = aux_dict.get("vendor") |
315 |
0 |
fields["version"] = str(aux_dict.get("version", "1.0")) |
316 |
0 |
fields["path"] = "{}{}/{}/{}-{}.tar.gz".format( |
317 |
|
base_path, |
318 |
|
fields["id"], |
319 |
|
fields["version"], |
320 |
|
fields.get("id"), |
321 |
|
fields.get("version"), |
322 |
|
) |
323 |
0 |
return fields |
324 |
|
|
325 |
1 |
def zip_extraction(self, file_name): |
326 |
|
""" |
327 |
|
Validation of artifact. |
328 |
|
:param file: file path |
329 |
|
:return: status details, status, fields, package_type |
330 |
|
""" |
331 |
0 |
self._logger.debug("Decompressing package file") |
332 |
0 |
temp_file = "/tmp/{}".format(file_name.split("/")[-1]) |
333 |
0 |
if file_name != temp_file: |
334 |
0 |
copyfile(file_name, temp_file) |
335 |
0 |
with tarfile.open(temp_file, "r:gz") as tar: |
336 |
0 |
folder = tar.getnames()[0].split("/")[0] |
337 |
0 |
tar.extractall() |
338 |
|
|
339 |
0 |
remove(temp_file) |
340 |
0 |
descriptor_file = glob.glob("{}/*.y*ml".format(folder))[0] |
341 |
0 |
return folder, descriptor_file |
342 |
|
|
343 |
1 |
def validate_artifact(self, path, origin, kind): |
344 |
|
""" |
345 |
|
Validation of artifact. |
346 |
|
:param path: file path |
347 |
|
:param origin: folder where the package is located |
348 |
|
:param kind: flag to select the correct file type (directory or artifact) |
349 |
|
:return: status details, status, fields, package_type |
350 |
|
""" |
351 |
0 |
self._logger.debug(f"Validating {path} {kind}") |
352 |
0 |
package_type = "" |
353 |
0 |
folder = "" |
354 |
0 |
try: |
355 |
0 |
if kind == "directory": |
356 |
0 |
descriptor_file = glob.glob("{}/*.y*ml".format(path))[0] |
357 |
|
else: |
358 |
0 |
folder, descriptor_file = self.zip_extraction(path) |
359 |
0 |
folder = join(origin, folder) |
360 |
0 |
self._logger.debug( |
361 |
|
f"Kind is an artifact (tar.gz). Folder: {folder}. Descriptor_file: {descriptor_file}" |
362 |
|
) |
363 |
|
|
364 |
0 |
self._logger.debug("Opening descriptor file: {}".format(descriptor_file)) |
365 |
|
|
366 |
0 |
with open(descriptor_file, "r") as f: |
367 |
0 |
descriptor_data = f.read() |
368 |
0 |
self._logger.debug(f"Descriptor data: {descriptor_data}") |
369 |
0 |
validation = validation_im() |
370 |
0 |
desc_type, descriptor_dict = validation.yaml_validation(descriptor_data) |
371 |
0 |
try: |
372 |
0 |
validation_im.pyangbind_validation(self, desc_type, descriptor_dict) |
373 |
0 |
except Exception as e: |
374 |
0 |
self._logger.error(e, exc_info=True) |
375 |
0 |
raise e |
376 |
0 |
descriptor_type_ref = list(descriptor_dict.keys())[0].lower() |
377 |
0 |
if "vnf" in descriptor_type_ref: |
378 |
0 |
package_type = "vnf" |
379 |
0 |
elif "nst" in descriptor_type_ref: |
380 |
0 |
package_type = "nst" |
381 |
0 |
elif "ns" in descriptor_type_ref: |
382 |
0 |
package_type = "ns" |
383 |
|
else: |
384 |
0 |
msg = f"Unknown package type {descriptor_type_ref}" |
385 |
0 |
self._logger.error(msg) |
386 |
0 |
raise ValueError(msg) |
387 |
0 |
self._logger.debug("Descriptor: {}".format(descriptor_dict)) |
388 |
0 |
fields = self.fields_building(descriptor_dict, path, package_type) |
389 |
0 |
self._logger.debug(f"Descriptor successfully validated {fields}") |
390 |
0 |
return ( |
391 |
|
{ |
392 |
|
"detail": "{}D successfully validated".format(package_type.upper()), |
393 |
|
"code": "OK", |
394 |
|
}, |
395 |
|
True, |
396 |
|
fields, |
397 |
|
package_type, |
398 |
|
) |
399 |
0 |
except Exception as e: |
400 |
|
# Delete the folder we just created |
401 |
0 |
return {"detail": str(e)}, False, {}, package_type |
402 |
|
finally: |
403 |
0 |
if folder: |
404 |
0 |
rmtree(folder, ignore_errors=True) |
405 |
|
|
406 |
1 |
def register_package_in_repository(self, path, origin, destination, kind): |
407 |
|
""" |
408 |
|
Registration of one artifact in a repository |
409 |
|
:param path: absolute path of the VNF/NS package |
410 |
|
:param origin: folder where the package is located |
411 |
|
:param destination: path for index creation |
412 |
|
:param kind: artifact (tar.gz) or directory |
413 |
|
""" |
414 |
0 |
self._logger.debug("") |
415 |
0 |
pt = PackageTool() |
416 |
0 |
compressed = False |
417 |
0 |
try: |
418 |
0 |
fields = {} |
419 |
0 |
_, valid, fields, package_type = self.validate_artifact(path, origin, kind) |
420 |
0 |
if not valid: |
421 |
0 |
raise Exception( |
422 |
|
"{} {} Not well configured.".format(package_type.upper(), str(path)) |
423 |
|
) |
424 |
|
else: |
425 |
0 |
if kind == "directory": |
426 |
0 |
path = pt.build(path) |
427 |
0 |
self._logger.debug(f"Directory path {path}") |
428 |
0 |
compressed = True |
429 |
0 |
fields["checksum"] = utils.md5(path) |
430 |
0 |
self.indexation(destination, path, package_type, fields) |
431 |
|
|
432 |
0 |
except Exception as e: |
433 |
0 |
self._logger.exception( |
434 |
|
"Error registering package in Repository: {}".format(e) |
435 |
|
) |
436 |
0 |
raise ClientException(e) |
437 |
|
|
438 |
|
finally: |
439 |
0 |
if kind == "directory" and compressed: |
440 |
0 |
remove(path) |
441 |
|
|
442 |
1 |
def indexation(self, destination, path, package_type, fields): |
443 |
|
""" |
444 |
|
Process for index packages |
445 |
|
:param destination: index repository path |
446 |
|
:param path: path of the package |
447 |
|
:param package_type: package type (vnf, ns, nst) |
448 |
|
:param fields: dict with the required values |
449 |
|
""" |
450 |
0 |
self._logger.debug(f"Processing {destination} {path} {package_type} {fields}") |
451 |
|
|
452 |
0 |
data_ind = { |
453 |
|
"name": fields.get("name"), |
454 |
|
"description": fields.get("description"), |
455 |
|
"vendor": fields.get("vendor"), |
456 |
|
"path": fields.get("path"), |
457 |
|
} |
458 |
0 |
self._logger.debug(data_ind) |
459 |
0 |
final_path = join( |
460 |
|
destination, package_type, fields.get("id"), fields.get("version") |
461 |
|
) |
462 |
0 |
if isdir(join(destination, package_type, fields.get("id"))): |
463 |
0 |
if isdir(final_path): |
464 |
0 |
self._logger.warning( |
465 |
|
"{} {} already exists".format(package_type.upper(), str(path)) |
466 |
|
) |
467 |
|
else: |
468 |
0 |
mkdir(final_path) |
469 |
0 |
copyfile( |
470 |
|
path, |
471 |
|
final_path |
472 |
|
+ "/" |
473 |
|
+ fields.get("id") |
474 |
|
+ "-" |
475 |
|
+ fields.get("version") |
476 |
|
+ ".tar.gz", |
477 |
|
) |
478 |
0 |
yaml.safe_dump( |
479 |
|
fields, |
480 |
|
open(final_path + "/" + "metadata.yaml", "w"), |
481 |
|
default_flow_style=False, |
482 |
|
width=80, |
483 |
|
indent=4, |
484 |
|
) |
485 |
0 |
index = yaml.safe_load(open(destination + "/index.yaml")) |
486 |
|
|
487 |
0 |
index["{}_packages".format(package_type)][fields.get("id")][ |
488 |
|
fields.get("version") |
489 |
|
] = data_ind |
490 |
0 |
if versioning.parse( |
491 |
|
index["{}_packages".format(package_type)][fields.get("id")][ |
492 |
|
"latest" |
493 |
|
] |
494 |
|
) < versioning.parse(fields.get("version")): |
495 |
0 |
index["{}_packages".format(package_type)][fields.get("id")][ |
496 |
|
"latest" |
497 |
|
] = fields.get("version") |
498 |
0 |
yaml.safe_dump( |
499 |
|
index, |
500 |
|
open(destination + "/index.yaml", "w"), |
501 |
|
default_flow_style=False, |
502 |
|
width=80, |
503 |
|
indent=4, |
504 |
|
) |
505 |
0 |
self._logger.info( |
506 |
|
"{} {} added in the repository".format( |
507 |
|
package_type.upper(), str(path) |
508 |
|
) |
509 |
|
) |
510 |
|
else: |
511 |
0 |
mkdir(destination + "/{}/".format(package_type) + fields.get("id")) |
512 |
0 |
mkdir(final_path) |
513 |
0 |
copyfile( |
514 |
|
path, |
515 |
|
final_path |
516 |
|
+ "/" |
517 |
|
+ fields.get("id") |
518 |
|
+ "-" |
519 |
|
+ fields.get("version") |
520 |
|
+ ".tar.gz", |
521 |
|
) |
522 |
0 |
yaml.safe_dump( |
523 |
|
fields, |
524 |
|
open(join(final_path, "metadata.yaml"), "w"), |
525 |
|
default_flow_style=False, |
526 |
|
width=80, |
527 |
|
indent=4, |
528 |
|
) |
529 |
0 |
index = yaml.safe_load(open(destination + "/index.yaml")) |
530 |
|
|
531 |
0 |
index["{}_packages".format(package_type)][fields.get("id")] = { |
532 |
|
fields.get("version"): data_ind |
533 |
|
} |
534 |
0 |
index["{}_packages".format(package_type)][fields.get("id")]["latest"] = ( |
535 |
|
fields.get("version") |
536 |
|
) |
537 |
0 |
yaml.safe_dump( |
538 |
|
index, |
539 |
|
open(join(destination, "index.yaml"), "w"), |
540 |
|
default_flow_style=False, |
541 |
|
width=80, |
542 |
|
indent=4, |
543 |
|
) |
544 |
0 |
self._logger.info( |
545 |
|
"{} {} added in the repository".format(package_type.upper(), str(path)) |
546 |
|
) |
547 |
|
|
548 |
1 |
def current_datetime(self): |
549 |
|
""" |
550 |
|
Datetime Generator |
551 |
|
:return: Datetime as string with the following structure "2020-04-29T08:41:07.681653Z" |
552 |
|
""" |
553 |
1 |
self._logger.debug("") |
554 |
1 |
return time.strftime("%Y-%m-%dT%H:%M:%S.%sZ") |
555 |
|
|
556 |
1 |
def init_directory(self, destination): |
557 |
|
""" |
558 |
|
Initialize the index directory. Creation of index.yaml, and the directories for vnf and ns |
559 |
|
:param destination: |
560 |
|
:return: |
561 |
|
""" |
562 |
1 |
self._logger.debug("") |
563 |
1 |
if not isdir(destination): |
564 |
1 |
mkdir(destination) |
565 |
1 |
if not isfile(join(destination, "index.yaml")): |
566 |
1 |
mkdir(join(destination, "vnf")) |
567 |
1 |
mkdir(join(destination, "ns")) |
568 |
1 |
mkdir(join(destination, "nst")) |
569 |
1 |
index_data = { |
570 |
|
"apiVersion": "v1", |
571 |
|
"generated": self.current_datetime(), |
572 |
|
"vnf_packages": {}, |
573 |
|
"ns_packages": {}, |
574 |
|
"nst_packages": {}, |
575 |
|
} |
576 |
1 |
with open(join(destination, "index.yaml"), "w") as outfile: |
577 |
1 |
yaml.safe_dump( |
578 |
|
index_data, outfile, default_flow_style=False, width=80, indent=4 |
579 |
|
) |