1 |
|
# -*- coding: utf-8 -*- |
2 |
|
|
3 |
|
# Licensed under the Apache License, Version 2.0 (the "License"); |
4 |
|
# you may not use this file except in compliance with the License. |
5 |
|
# You may obtain a copy of the License at |
6 |
|
# |
7 |
|
# http://www.apache.org/licenses/LICENSE-2.0 |
8 |
|
# |
9 |
|
# Unless required by applicable law or agreed to in writing, software |
10 |
|
# distributed under the License is distributed on an "AS IS" BASIS, |
11 |
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or |
12 |
|
# implied. |
13 |
|
# See the License for the specific language governing permissions and |
14 |
|
# limitations under the License. |
15 |
|
|
16 |
1 |
import tarfile |
17 |
1 |
import yaml |
18 |
1 |
import json |
19 |
1 |
import copy |
20 |
1 |
import os |
21 |
1 |
import shutil |
22 |
1 |
import functools |
23 |
1 |
import re |
24 |
|
|
25 |
|
# import logging |
26 |
1 |
from deepdiff import DeepDiff |
27 |
1 |
from hashlib import md5 |
28 |
1 |
from osm_common.dbbase import DbException, deep_update_rfc7396 |
29 |
1 |
from http import HTTPStatus |
30 |
1 |
from time import time |
31 |
1 |
from uuid import uuid4 |
32 |
1 |
from re import fullmatch |
33 |
1 |
from zipfile import ZipFile |
34 |
1 |
from osm_nbi.validation import ( |
35 |
|
ValidationError, |
36 |
|
pdu_new_schema, |
37 |
|
pdu_edit_schema, |
38 |
|
validate_input, |
39 |
|
vnfpkgop_new_schema, |
40 |
|
) |
41 |
1 |
from osm_nbi.base_topic import ( |
42 |
|
BaseTopic, |
43 |
|
EngineException, |
44 |
|
get_iterable, |
45 |
|
detect_descriptor_usage, |
46 |
|
) |
47 |
1 |
from osm_im import etsi_nfv_vnfd, etsi_nfv_nsd |
48 |
1 |
from osm_im.nst import nst as nst_im |
49 |
1 |
from pyangbind.lib.serialise import pybindJSONDecoder |
50 |
1 |
import pyangbind.lib.pybindJSON as pybindJSON |
51 |
1 |
from osm_nbi import utils |
52 |
|
|
53 |
1 |
__author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>" |
54 |
|
|
55 |
1 |
valid_helm_chart_re = re.compile( |
56 |
|
r"^[a-z0-9]([-a-z0-9]*[a-z0-9]/)?([a-z0-9]([-a-z0-9]*[a-z0-9])?)*$" |
57 |
|
) |
58 |
|
|
59 |
|
|
60 |
1 |
class DescriptorTopic(BaseTopic): |
61 |
1 |
def __init__(self, db, fs, msg, auth): |
62 |
1 |
super().__init__(db, fs, msg, auth) |
63 |
|
|
64 |
1 |
def _validate_input_new(self, indata, storage_params, force=False): |
65 |
0 |
return indata |
66 |
|
|
67 |
1 |
def check_conflict_on_edit(self, session, final_content, edit_content, _id): |
68 |
1 |
final_content = super().check_conflict_on_edit( |
69 |
|
session, final_content, edit_content, _id |
70 |
|
) |
71 |
|
|
72 |
1 |
def _check_unique_id_name(descriptor, position=""): |
73 |
1 |
for desc_key, desc_item in descriptor.items(): |
74 |
1 |
if isinstance(desc_item, list) and desc_item: |
75 |
1 |
used_ids = [] |
76 |
1 |
desc_item_id = None |
77 |
1 |
for index, list_item in enumerate(desc_item): |
78 |
1 |
if isinstance(list_item, dict): |
79 |
1 |
_check_unique_id_name( |
80 |
|
list_item, "{}.{}[{}]".format(position, desc_key, index) |
81 |
|
) |
82 |
|
# Base case |
83 |
1 |
if index == 0 and ( |
84 |
|
list_item.get("id") or list_item.get("name") |
85 |
|
): |
86 |
1 |
desc_item_id = "id" if list_item.get("id") else "name" |
87 |
1 |
if desc_item_id and list_item.get(desc_item_id): |
88 |
1 |
if list_item[desc_item_id] in used_ids: |
89 |
1 |
position = "{}.{}[{}]".format( |
90 |
|
position, desc_key, index |
91 |
|
) |
92 |
1 |
raise EngineException( |
93 |
|
"Error: identifier {} '{}' is not unique and repeats at '{}'".format( |
94 |
|
desc_item_id, |
95 |
|
list_item[desc_item_id], |
96 |
|
position, |
97 |
|
), |
98 |
|
HTTPStatus.UNPROCESSABLE_ENTITY, |
99 |
|
) |
100 |
1 |
used_ids.append(list_item[desc_item_id]) |
101 |
|
|
102 |
1 |
_check_unique_id_name(final_content) |
103 |
|
# 1. validate again with pyangbind |
104 |
|
# 1.1. remove internal keys |
105 |
1 |
internal_keys = {} |
106 |
1 |
for k in ("_id", "_admin"): |
107 |
1 |
if k in final_content: |
108 |
1 |
internal_keys[k] = final_content.pop(k) |
109 |
1 |
storage_params = internal_keys["_admin"].get("storage") |
110 |
1 |
serialized = self._validate_input_new( |
111 |
|
final_content, storage_params, session["force"] |
112 |
|
) |
113 |
|
|
114 |
|
# 1.2. modify final_content with a serialized version |
115 |
1 |
final_content = copy.deepcopy(serialized) |
116 |
|
# 1.3. restore internal keys |
117 |
1 |
for k, v in internal_keys.items(): |
118 |
1 |
final_content[k] = v |
119 |
1 |
if session["force"]: |
120 |
1 |
return final_content |
121 |
|
|
122 |
|
# 2. check that this id is not present |
123 |
1 |
if "id" in edit_content: |
124 |
1 |
_filter = self._get_project_filter(session) |
125 |
|
|
126 |
1 |
_filter["id"] = final_content["id"] |
127 |
1 |
_filter["_id.neq"] = _id |
128 |
|
|
129 |
1 |
if self.db.get_one(self.topic, _filter, fail_on_empty=False): |
130 |
1 |
raise EngineException( |
131 |
|
"{} with id '{}' already exists for this project".format( |
132 |
|
(str(self.topic))[:-1], final_content["id"] |
133 |
|
), |
134 |
|
HTTPStatus.CONFLICT, |
135 |
|
) |
136 |
|
|
137 |
1 |
return final_content |
138 |
|
|
139 |
1 |
@staticmethod |
140 |
1 |
def format_on_new(content, project_id=None, make_public=False): |
141 |
1 |
BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public) |
142 |
1 |
content["_admin"]["onboardingState"] = "CREATED" |
143 |
1 |
content["_admin"]["operationalState"] = "DISABLED" |
144 |
1 |
content["_admin"]["usageState"] = "NOT_IN_USE" |
145 |
|
|
146 |
1 |
def delete_extra(self, session, _id, db_content, not_send_msg=None): |
147 |
|
""" |
148 |
|
Deletes file system storage associated with the descriptor |
149 |
|
:param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
150 |
|
:param _id: server internal id |
151 |
|
:param db_content: The database content of the descriptor |
152 |
|
:param not_send_msg: To not send message (False) or store content (list) instead |
153 |
|
:return: None if ok or raises EngineException with the problem |
154 |
|
""" |
155 |
1 |
self.fs.file_delete(_id, ignore_non_exist=True) |
156 |
1 |
self.fs.file_delete(_id + "_", ignore_non_exist=True) # remove temp folder |
157 |
|
# Remove file revisions |
158 |
1 |
if "revision" in db_content["_admin"]: |
159 |
0 |
revision = db_content["_admin"]["revision"] |
160 |
0 |
while revision > 0: |
161 |
0 |
self.fs.file_delete(_id + ":" + str(revision), ignore_non_exist=True) |
162 |
0 |
revision = revision - 1 |
163 |
|
|
164 |
1 |
@staticmethod |
165 |
1 |
def get_one_by_id(db, session, topic, id): |
166 |
|
# find owned by this project |
167 |
0 |
_filter = BaseTopic._get_project_filter(session) |
168 |
0 |
_filter["id"] = id |
169 |
0 |
desc_list = db.get_list(topic, _filter) |
170 |
0 |
if len(desc_list) == 1: |
171 |
0 |
return desc_list[0] |
172 |
0 |
elif len(desc_list) > 1: |
173 |
0 |
raise DbException( |
174 |
|
"Found more than one {} with id='{}' belonging to this project".format( |
175 |
|
topic[:-1], id |
176 |
|
), |
177 |
|
HTTPStatus.CONFLICT, |
178 |
|
) |
179 |
|
|
180 |
|
# not found any: try to find public |
181 |
0 |
_filter = BaseTopic._get_project_filter(session) |
182 |
0 |
_filter["id"] = id |
183 |
0 |
desc_list = db.get_list(topic, _filter) |
184 |
0 |
if not desc_list: |
185 |
0 |
raise DbException( |
186 |
|
"Not found any {} with id='{}'".format(topic[:-1], id), |
187 |
|
HTTPStatus.NOT_FOUND, |
188 |
|
) |
189 |
0 |
elif len(desc_list) == 1: |
190 |
0 |
return desc_list[0] |
191 |
|
else: |
192 |
0 |
raise DbException( |
193 |
|
"Found more than one public {} with id='{}'; and no one belonging to this project".format( |
194 |
|
topic[:-1], id |
195 |
|
), |
196 |
|
HTTPStatus.CONFLICT, |
197 |
|
) |
198 |
|
|
199 |
1 |
def new(self, rollback, session, indata=None, kwargs=None, headers=None): |
200 |
|
""" |
201 |
|
Creates a new almost empty DISABLED entry into database. Due to SOL005, it does not follow normal procedure. |
202 |
|
Creating a VNFD or NSD is done in two steps: 1. Creates an empty descriptor (this step) and 2) upload content |
203 |
|
(self.upload_content) |
204 |
|
:param rollback: list to append created items at database in case a rollback may to be done |
205 |
|
:param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
206 |
|
:param indata: data to be inserted |
207 |
|
:param kwargs: used to override the indata descriptor |
208 |
|
:param headers: http request headers |
209 |
|
:return: _id, None: identity of the inserted data; and None as there is not any operation |
210 |
|
""" |
211 |
|
|
212 |
|
# No needed to capture exceptions |
213 |
|
# Check Quota |
214 |
1 |
self.check_quota(session) |
215 |
|
|
216 |
|
# _remove_envelop |
217 |
1 |
if indata: |
218 |
0 |
if "userDefinedData" in indata: |
219 |
0 |
indata = indata["userDefinedData"] |
220 |
|
|
221 |
|
# Override descriptor with query string kwargs |
222 |
1 |
self._update_input_with_kwargs(indata, kwargs) |
223 |
|
# uncomment when this method is implemented. |
224 |
|
# Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors |
225 |
|
# indata = DescriptorTopic._validate_input_new(self, indata, project_id=session["force"]) |
226 |
|
|
227 |
1 |
content = {"_admin": {"userDefinedData": indata, "revision": 0}} |
228 |
|
|
229 |
1 |
self.format_on_new( |
230 |
|
content, session["project_id"], make_public=session["public"] |
231 |
|
) |
232 |
1 |
_id = self.db.create(self.topic, content) |
233 |
1 |
rollback.append({"topic": self.topic, "_id": _id}) |
234 |
1 |
self._send_msg("created", {"_id": _id}) |
235 |
1 |
return _id, None |
236 |
|
|
237 |
1 |
def upload_content(self, session, _id, indata, kwargs, headers): |
238 |
|
""" |
239 |
|
Used for receiving content by chunks (with a transaction_id header and/or gzip file. It will store and extract) |
240 |
|
:param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
241 |
|
:param _id : the nsd,vnfd is already created, this is the id |
242 |
|
:param indata: http body request |
243 |
|
:param kwargs: user query string to override parameters. NOT USED |
244 |
|
:param headers: http request headers |
245 |
|
:return: True if package is completely uploaded or False if partial content has been uploded |
246 |
|
Raise exception on error |
247 |
|
""" |
248 |
|
# Check that _id exists and it is valid |
249 |
1 |
current_desc = self.show(session, _id) |
250 |
|
|
251 |
1 |
content_range_text = headers.get("Content-Range") |
252 |
1 |
expected_md5 = headers.get("Content-File-MD5") |
253 |
1 |
compressed = None |
254 |
1 |
content_type = headers.get("Content-Type") |
255 |
1 |
if ( |
256 |
|
content_type |
257 |
|
and "application/gzip" in content_type |
258 |
|
or "application/x-gzip" in content_type |
259 |
|
): |
260 |
0 |
compressed = "gzip" |
261 |
1 |
if content_type and "application/zip" in content_type: |
262 |
0 |
compressed = "zip" |
263 |
1 |
filename = headers.get("Content-Filename") |
264 |
1 |
if not filename and compressed: |
265 |
0 |
filename = "package.tar.gz" if compressed == "gzip" else "package.zip" |
266 |
1 |
elif not filename: |
267 |
1 |
filename = "package" |
268 |
|
|
269 |
1 |
revision = 1 |
270 |
1 |
if "revision" in current_desc["_admin"]: |
271 |
1 |
revision = current_desc["_admin"]["revision"] + 1 |
272 |
|
|
273 |
|
# TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266 |
274 |
1 |
file_pkg = None |
275 |
1 |
error_text = "" |
276 |
1 |
fs_rollback = [] |
277 |
|
|
278 |
1 |
try: |
279 |
1 |
if content_range_text: |
280 |
0 |
content_range = ( |
281 |
|
content_range_text.replace("-", " ").replace("/", " ").split() |
282 |
|
) |
283 |
0 |
if ( |
284 |
|
content_range[0] != "bytes" |
285 |
|
): # TODO check x<y not negative < total.... |
286 |
0 |
raise IndexError() |
287 |
0 |
start = int(content_range[1]) |
288 |
0 |
end = int(content_range[2]) + 1 |
289 |
0 |
total = int(content_range[3]) |
290 |
|
else: |
291 |
1 |
start = 0 |
292 |
|
# Rather than using a temp folder, we will store the package in a folder based on |
293 |
|
# the current revision. |
294 |
1 |
proposed_revision_path = ( |
295 |
|
_id + ":" + str(revision) |
296 |
|
) # all the content is upload here and if ok, it is rename from id_ to is folder |
297 |
|
|
298 |
1 |
if start: |
299 |
0 |
if not self.fs.file_exists(proposed_revision_path, "dir"): |
300 |
0 |
raise EngineException( |
301 |
|
"invalid Transaction-Id header", HTTPStatus.NOT_FOUND |
302 |
|
) |
303 |
|
else: |
304 |
1 |
self.fs.file_delete(proposed_revision_path, ignore_non_exist=True) |
305 |
1 |
self.fs.mkdir(proposed_revision_path) |
306 |
1 |
fs_rollback.append(proposed_revision_path) |
307 |
|
|
308 |
1 |
storage = self.fs.get_params() |
309 |
1 |
storage["folder"] = proposed_revision_path |
310 |
|
|
311 |
1 |
file_path = (proposed_revision_path, filename) |
312 |
1 |
if self.fs.file_exists(file_path, "file"): |
313 |
0 |
file_size = self.fs.file_size(file_path) |
314 |
|
else: |
315 |
1 |
file_size = 0 |
316 |
1 |
if file_size != start: |
317 |
0 |
raise EngineException( |
318 |
|
"invalid Content-Range start sequence, expected '{}' but received '{}'".format( |
319 |
|
file_size, start |
320 |
|
), |
321 |
|
HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE, |
322 |
|
) |
323 |
1 |
file_pkg = self.fs.file_open(file_path, "a+b") |
324 |
1 |
if isinstance(indata, dict): |
325 |
1 |
indata_text = yaml.safe_dump(indata, indent=4, default_flow_style=False) |
326 |
1 |
file_pkg.write(indata_text.encode(encoding="utf-8")) |
327 |
|
else: |
328 |
0 |
indata_len = 0 |
329 |
0 |
while True: |
330 |
0 |
indata_text = indata.read(4096) |
331 |
0 |
indata_len += len(indata_text) |
332 |
0 |
if not indata_text: |
333 |
0 |
break |
334 |
0 |
file_pkg.write(indata_text) |
335 |
1 |
if content_range_text: |
336 |
0 |
if indata_len != end - start: |
337 |
0 |
raise EngineException( |
338 |
|
"Mismatch between Content-Range header {}-{} and body length of {}".format( |
339 |
|
start, end - 1, indata_len |
340 |
|
), |
341 |
|
HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE, |
342 |
|
) |
343 |
0 |
if end != total: |
344 |
|
# TODO update to UPLOADING |
345 |
0 |
return False |
346 |
|
|
347 |
|
# PACKAGE UPLOADED |
348 |
1 |
if expected_md5: |
349 |
0 |
file_pkg.seek(0, 0) |
350 |
0 |
file_md5 = md5() |
351 |
0 |
chunk_data = file_pkg.read(1024) |
352 |
0 |
while chunk_data: |
353 |
0 |
file_md5.update(chunk_data) |
354 |
0 |
chunk_data = file_pkg.read(1024) |
355 |
0 |
if expected_md5 != file_md5.hexdigest(): |
356 |
0 |
raise EngineException("Error, MD5 mismatch", HTTPStatus.CONFLICT) |
357 |
1 |
file_pkg.seek(0, 0) |
358 |
1 |
if compressed == "gzip": |
359 |
0 |
tar = tarfile.open(mode="r", fileobj=file_pkg) |
360 |
0 |
descriptor_file_name = None |
361 |
0 |
for tarinfo in tar: |
362 |
0 |
tarname = tarinfo.name |
363 |
0 |
tarname_path = tarname.split("/") |
364 |
0 |
if ( |
365 |
|
not tarname_path[0] or ".." in tarname_path |
366 |
|
): # if start with "/" means absolute path |
367 |
0 |
raise EngineException( |
368 |
|
"Absolute path or '..' are not allowed for package descriptor tar.gz" |
369 |
|
) |
370 |
0 |
if len(tarname_path) == 1 and not tarinfo.isdir(): |
371 |
0 |
raise EngineException( |
372 |
|
"All files must be inside a dir for package descriptor tar.gz" |
373 |
|
) |
374 |
0 |
if ( |
375 |
|
tarname.endswith(".yaml") |
376 |
|
or tarname.endswith(".json") |
377 |
|
or tarname.endswith(".yml") |
378 |
|
): |
379 |
0 |
storage["pkg-dir"] = tarname_path[0] |
380 |
0 |
if len(tarname_path) == 2: |
381 |
0 |
if descriptor_file_name: |
382 |
0 |
raise EngineException( |
383 |
|
"Found more than one descriptor file at package descriptor tar.gz" |
384 |
|
) |
385 |
0 |
descriptor_file_name = tarname |
386 |
0 |
if not descriptor_file_name: |
387 |
0 |
raise EngineException( |
388 |
|
"Not found any descriptor file at package descriptor tar.gz" |
389 |
|
) |
390 |
0 |
storage["descriptor"] = descriptor_file_name |
391 |
0 |
storage["zipfile"] = filename |
392 |
0 |
self.fs.file_extract(tar, proposed_revision_path) |
393 |
0 |
with self.fs.file_open( |
394 |
|
(proposed_revision_path, descriptor_file_name), "r" |
395 |
|
) as descriptor_file: |
396 |
0 |
content = descriptor_file.read() |
397 |
1 |
elif compressed == "zip": |
398 |
0 |
zipfile = ZipFile(file_pkg) |
399 |
0 |
descriptor_file_name = None |
400 |
0 |
for package_file in zipfile.infolist(): |
401 |
0 |
zipfilename = package_file.filename |
402 |
0 |
file_path = zipfilename.split("/") |
403 |
0 |
if ( |
404 |
|
not file_path[0] or ".." in zipfilename |
405 |
|
): # if start with "/" means absolute path |
406 |
0 |
raise EngineException( |
407 |
|
"Absolute path or '..' are not allowed for package descriptor zip" |
408 |
|
) |
409 |
|
|
410 |
0 |
if ( |
411 |
|
zipfilename.endswith(".yaml") |
412 |
|
or zipfilename.endswith(".json") |
413 |
|
or zipfilename.endswith(".yml") |
414 |
|
) and ( |
415 |
|
zipfilename.find("/") < 0 |
416 |
|
or zipfilename.find("Definitions") >= 0 |
417 |
|
): |
418 |
0 |
storage["pkg-dir"] = "" |
419 |
0 |
if descriptor_file_name: |
420 |
0 |
raise EngineException( |
421 |
|
"Found more than one descriptor file at package descriptor zip" |
422 |
|
) |
423 |
0 |
descriptor_file_name = zipfilename |
424 |
0 |
if not descriptor_file_name: |
425 |
0 |
raise EngineException( |
426 |
|
"Not found any descriptor file at package descriptor zip" |
427 |
|
) |
428 |
0 |
storage["descriptor"] = descriptor_file_name |
429 |
0 |
storage["zipfile"] = filename |
430 |
0 |
self.fs.file_extract(zipfile, proposed_revision_path) |
431 |
|
|
432 |
0 |
with self.fs.file_open( |
433 |
|
(proposed_revision_path, descriptor_file_name), "r" |
434 |
|
) as descriptor_file: |
435 |
0 |
content = descriptor_file.read() |
436 |
|
else: |
437 |
1 |
content = file_pkg.read() |
438 |
1 |
storage["descriptor"] = descriptor_file_name = filename |
439 |
|
|
440 |
1 |
if descriptor_file_name.endswith(".json"): |
441 |
0 |
error_text = "Invalid json format " |
442 |
0 |
indata = json.load(content) |
443 |
|
else: |
444 |
1 |
error_text = "Invalid yaml format " |
445 |
1 |
indata = yaml.safe_load(content) |
446 |
|
|
447 |
|
# Need to close the file package here so it can be copied from the |
448 |
|
# revision to the current, unrevisioned record |
449 |
1 |
if file_pkg: |
450 |
1 |
file_pkg.close() |
451 |
1 |
file_pkg = None |
452 |
|
|
453 |
|
# Fetch both the incoming, proposed revision and the original revision so we |
454 |
|
# can call a validate method to compare them |
455 |
1 |
current_revision_path = _id + "/" |
456 |
1 |
self.fs.sync(from_path=current_revision_path) |
457 |
1 |
self.fs.sync(from_path=proposed_revision_path) |
458 |
|
|
459 |
1 |
if revision > 1: |
460 |
1 |
try: |
461 |
1 |
self._validate_descriptor_changes( |
462 |
|
_id, |
463 |
|
descriptor_file_name, |
464 |
|
current_revision_path, |
465 |
|
proposed_revision_path, |
466 |
|
) |
467 |
0 |
except Exception as e: |
468 |
0 |
shutil.rmtree( |
469 |
|
self.fs.path + current_revision_path, ignore_errors=True |
470 |
|
) |
471 |
0 |
shutil.rmtree( |
472 |
|
self.fs.path + proposed_revision_path, ignore_errors=True |
473 |
|
) |
474 |
|
# Only delete the new revision. We need to keep the original version in place |
475 |
|
# as it has not been changed. |
476 |
0 |
self.fs.file_delete(proposed_revision_path, ignore_non_exist=True) |
477 |
0 |
raise e |
478 |
|
|
479 |
1 |
indata = self._remove_envelop(indata) |
480 |
|
|
481 |
|
# Override descriptor with query string kwargs |
482 |
1 |
if kwargs: |
483 |
0 |
self._update_input_with_kwargs(indata, kwargs) |
484 |
|
|
485 |
1 |
current_desc["_admin"]["storage"] = storage |
486 |
1 |
current_desc["_admin"]["onboardingState"] = "ONBOARDED" |
487 |
1 |
current_desc["_admin"]["operationalState"] = "ENABLED" |
488 |
1 |
current_desc["_admin"]["modified"] = time() |
489 |
1 |
current_desc["_admin"]["revision"] = revision |
490 |
|
|
491 |
1 |
deep_update_rfc7396(current_desc, indata) |
492 |
1 |
current_desc = self.check_conflict_on_edit( |
493 |
|
session, current_desc, indata, _id=_id |
494 |
|
) |
495 |
|
|
496 |
|
# Copy the revision to the active package name by its original id |
497 |
1 |
shutil.rmtree(self.fs.path + current_revision_path, ignore_errors=True) |
498 |
1 |
os.rename( |
499 |
|
self.fs.path + proposed_revision_path, |
500 |
|
self.fs.path + current_revision_path, |
501 |
|
) |
502 |
1 |
self.fs.file_delete(current_revision_path, ignore_non_exist=True) |
503 |
1 |
self.fs.mkdir(current_revision_path) |
504 |
1 |
self.fs.reverse_sync(from_path=current_revision_path) |
505 |
|
|
506 |
1 |
shutil.rmtree(self.fs.path + _id) |
507 |
|
|
508 |
1 |
self.db.replace(self.topic, _id, current_desc) |
509 |
|
|
510 |
|
# Store a copy of the package as a point in time revision |
511 |
1 |
revision_desc = dict(current_desc) |
512 |
1 |
revision_desc["_id"] = _id + ":" + str(revision_desc["_admin"]["revision"]) |
513 |
1 |
self.db.create(self.topic + "_revisions", revision_desc) |
514 |
1 |
fs_rollback = [] |
515 |
|
|
516 |
1 |
indata["_id"] = _id |
517 |
1 |
self._send_msg("edited", indata) |
518 |
|
|
519 |
|
# TODO if descriptor has changed because kwargs update content and remove cached zip |
520 |
|
# TODO if zip is not present creates one |
521 |
1 |
return True |
522 |
|
|
523 |
1 |
except EngineException: |
524 |
1 |
raise |
525 |
0 |
except IndexError: |
526 |
0 |
raise EngineException( |
527 |
|
"invalid Content-Range header format. Expected 'bytes start-end/total'", |
528 |
|
HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE, |
529 |
|
) |
530 |
0 |
except IOError as e: |
531 |
0 |
raise EngineException( |
532 |
|
"invalid upload transaction sequence: '{}'".format(e), |
533 |
|
HTTPStatus.BAD_REQUEST, |
534 |
|
) |
535 |
0 |
except tarfile.ReadError as e: |
536 |
0 |
raise EngineException( |
537 |
|
"invalid file content {}".format(e), HTTPStatus.BAD_REQUEST |
538 |
|
) |
539 |
0 |
except (ValueError, yaml.YAMLError) as e: |
540 |
0 |
raise EngineException(error_text + str(e)) |
541 |
0 |
except ValidationError as e: |
542 |
0 |
raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY) |
543 |
|
finally: |
544 |
1 |
if file_pkg: |
545 |
0 |
file_pkg.close() |
546 |
1 |
for file in fs_rollback: |
547 |
1 |
self.fs.file_delete(file, ignore_non_exist=True) |
548 |
|
|
549 |
1 |
def get_file(self, session, _id, path=None, accept_header=None): |
550 |
|
""" |
551 |
|
Return the file content of a vnfd or nsd |
552 |
|
:param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
553 |
|
:param _id: Identity of the vnfd, nsd |
554 |
|
:param path: artifact path or "$DESCRIPTOR" or None |
555 |
|
:param accept_header: Content of Accept header. Must contain applition/zip or/and text/plain |
556 |
|
:return: opened file plus Accept format or raises an exception |
557 |
|
""" |
558 |
0 |
accept_text = accept_zip = False |
559 |
0 |
if accept_header: |
560 |
0 |
if "text/plain" in accept_header or "*/*" in accept_header: |
561 |
0 |
accept_text = True |
562 |
0 |
if "application/zip" in accept_header or "*/*" in accept_header: |
563 |
0 |
accept_zip = "application/zip" |
564 |
0 |
elif "application/gzip" in accept_header: |
565 |
0 |
accept_zip = "application/gzip" |
566 |
|
|
567 |
0 |
if not accept_text and not accept_zip: |
568 |
0 |
raise EngineException( |
569 |
|
"provide request header 'Accept' with 'application/zip' or 'text/plain'", |
570 |
|
http_code=HTTPStatus.NOT_ACCEPTABLE, |
571 |
|
) |
572 |
|
|
573 |
0 |
content = self.show(session, _id) |
574 |
0 |
if content["_admin"]["onboardingState"] != "ONBOARDED": |
575 |
0 |
raise EngineException( |
576 |
|
"Cannot get content because this resource is not at 'ONBOARDED' state. " |
577 |
|
"onboardingState is {}".format(content["_admin"]["onboardingState"]), |
578 |
|
http_code=HTTPStatus.CONFLICT, |
579 |
|
) |
580 |
0 |
storage = content["_admin"]["storage"] |
581 |
0 |
if path is not None and path != "$DESCRIPTOR": # artifacts |
582 |
0 |
if not storage.get("pkg-dir") and not storage.get("folder"): |
583 |
0 |
raise EngineException( |
584 |
|
"Packages does not contains artifacts", |
585 |
|
http_code=HTTPStatus.BAD_REQUEST, |
586 |
|
) |
587 |
0 |
if self.fs.file_exists( |
588 |
|
(storage["folder"], storage["pkg-dir"], *path), "dir" |
589 |
|
): |
590 |
0 |
folder_content = self.fs.dir_ls( |
591 |
|
(storage["folder"], storage["pkg-dir"], *path) |
592 |
|
) |
593 |
0 |
return folder_content, "text/plain" |
594 |
|
# TODO manage folders in http |
595 |
|
else: |
596 |
0 |
return ( |
597 |
|
self.fs.file_open( |
598 |
|
(storage["folder"], storage["pkg-dir"], *path), "rb" |
599 |
|
), |
600 |
|
"application/octet-stream", |
601 |
|
) |
602 |
|
|
603 |
|
# pkgtype accept ZIP TEXT -> result |
604 |
|
# manyfiles yes X -> zip |
605 |
|
# no yes -> error |
606 |
|
# onefile yes no -> zip |
607 |
|
# X yes -> text |
608 |
0 |
contain_many_files = False |
609 |
0 |
if storage.get("pkg-dir"): |
610 |
|
# check if there are more than one file in the package, ignoring checksums.txt. |
611 |
0 |
pkg_files = self.fs.dir_ls((storage["folder"], storage["pkg-dir"])) |
612 |
0 |
if len(pkg_files) >= 3 or ( |
613 |
|
len(pkg_files) == 2 and "checksums.txt" not in pkg_files |
614 |
|
): |
615 |
0 |
contain_many_files = True |
616 |
0 |
if accept_text and (not contain_many_files or path == "$DESCRIPTOR"): |
617 |
0 |
return ( |
618 |
|
self.fs.file_open((storage["folder"], storage["descriptor"]), "r"), |
619 |
|
"text/plain", |
620 |
|
) |
621 |
0 |
elif contain_many_files and not accept_zip: |
622 |
0 |
raise EngineException( |
623 |
|
"Packages that contains several files need to be retrieved with 'application/zip'" |
624 |
|
"Accept header", |
625 |
|
http_code=HTTPStatus.NOT_ACCEPTABLE, |
626 |
|
) |
627 |
|
else: |
628 |
0 |
if not storage.get("zipfile"): |
629 |
|
# TODO generate zipfile if not present |
630 |
0 |
raise EngineException( |
631 |
|
"Only allowed 'text/plain' Accept header for this descriptor. To be solved in " |
632 |
|
"future versions", |
633 |
|
http_code=HTTPStatus.NOT_ACCEPTABLE, |
634 |
|
) |
635 |
0 |
return ( |
636 |
|
self.fs.file_open((storage["folder"], storage["zipfile"]), "rb"), |
637 |
|
accept_zip, |
638 |
|
) |
639 |
|
|
640 |
1 |
def _remove_yang_prefixes_from_descriptor(self, descriptor): |
641 |
1 |
new_descriptor = {} |
642 |
1 |
for k, v in descriptor.items(): |
643 |
1 |
new_v = v |
644 |
1 |
if isinstance(v, dict): |
645 |
1 |
new_v = self._remove_yang_prefixes_from_descriptor(v) |
646 |
1 |
elif isinstance(v, list): |
647 |
1 |
new_v = list() |
648 |
1 |
for x in v: |
649 |
1 |
if isinstance(x, dict): |
650 |
1 |
new_v.append(self._remove_yang_prefixes_from_descriptor(x)) |
651 |
|
else: |
652 |
1 |
new_v.append(x) |
653 |
1 |
new_descriptor[k.split(":")[-1]] = new_v |
654 |
1 |
return new_descriptor |
655 |
|
|
656 |
1 |
def pyangbind_validation(self, item, data, force=False): |
657 |
0 |
raise EngineException( |
658 |
|
"Not possible to validate '{}' item".format(item), |
659 |
|
http_code=HTTPStatus.INTERNAL_SERVER_ERROR, |
660 |
|
) |
661 |
|
|
662 |
1 |
def _validate_input_edit(self, indata, content, force=False): |
663 |
|
# not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit |
664 |
1 |
if "_id" in indata: |
665 |
0 |
indata.pop("_id") |
666 |
1 |
if "_admin" not in indata: |
667 |
1 |
indata["_admin"] = {} |
668 |
|
|
669 |
1 |
if "operationalState" in indata: |
670 |
0 |
if indata["operationalState"] in ("ENABLED", "DISABLED"): |
671 |
0 |
indata["_admin"]["operationalState"] = indata.pop("operationalState") |
672 |
|
else: |
673 |
0 |
raise EngineException( |
674 |
|
"State '{}' is not a valid operational state".format( |
675 |
|
indata["operationalState"] |
676 |
|
), |
677 |
|
http_code=HTTPStatus.BAD_REQUEST, |
678 |
|
) |
679 |
|
|
680 |
|
# In the case of user defined data, we need to put the data in the root of the object |
681 |
|
# to preserve current expected behaviour |
682 |
1 |
if "userDefinedData" in indata: |
683 |
0 |
data = indata.pop("userDefinedData") |
684 |
0 |
if isinstance(data, dict): |
685 |
0 |
indata["_admin"]["userDefinedData"] = data |
686 |
|
else: |
687 |
0 |
raise EngineException( |
688 |
|
"userDefinedData should be an object, but is '{}' instead".format( |
689 |
|
type(data) |
690 |
|
), |
691 |
|
http_code=HTTPStatus.BAD_REQUEST, |
692 |
|
) |
693 |
|
|
694 |
1 |
if ( |
695 |
|
"operationalState" in indata["_admin"] |
696 |
|
and content["_admin"]["operationalState"] |
697 |
|
== indata["_admin"]["operationalState"] |
698 |
|
): |
699 |
0 |
raise EngineException( |
700 |
|
"operationalState already {}".format( |
701 |
|
content["_admin"]["operationalState"] |
702 |
|
), |
703 |
|
http_code=HTTPStatus.CONFLICT, |
704 |
|
) |
705 |
|
|
706 |
1 |
return indata |
707 |
|
|
708 |
1 |
def _validate_descriptor_changes( |
709 |
|
self, |
710 |
|
descriptor_id, |
711 |
|
descriptor_file_name, |
712 |
|
old_descriptor_directory, |
713 |
|
new_descriptor_directory, |
714 |
|
): |
715 |
|
# Example: |
716 |
|
# raise EngineException( |
717 |
|
# "Error in validating new descriptor: <NODE> cannot be modified", |
718 |
|
# http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
719 |
|
# ) |
720 |
0 |
pass |
721 |
|
|
722 |
|
|
723 |
1 |
class VnfdTopic(DescriptorTopic): |
724 |
1 |
topic = "vnfds" |
725 |
1 |
topic_msg = "vnfd" |
726 |
|
|
727 |
1 |
def __init__(self, db, fs, msg, auth): |
728 |
1 |
DescriptorTopic.__init__(self, db, fs, msg, auth) |
729 |
|
|
730 |
1 |
def pyangbind_validation(self, item, data, force=False): |
731 |
1 |
if self._descriptor_data_is_in_old_format(data): |
732 |
0 |
raise EngineException( |
733 |
|
"ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.", |
734 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
735 |
|
) |
736 |
1 |
try: |
737 |
1 |
myvnfd = etsi_nfv_vnfd.etsi_nfv_vnfd() |
738 |
1 |
pybindJSONDecoder.load_ietf_json( |
739 |
|
{"etsi-nfv-vnfd:vnfd": data}, |
740 |
|
None, |
741 |
|
None, |
742 |
|
obj=myvnfd, |
743 |
|
path_helper=True, |
744 |
|
skip_unknown=force, |
745 |
|
) |
746 |
1 |
out = pybindJSON.dumps(myvnfd, mode="ietf") |
747 |
1 |
desc_out = self._remove_envelop(yaml.safe_load(out)) |
748 |
1 |
desc_out = self._remove_yang_prefixes_from_descriptor(desc_out) |
749 |
1 |
return utils.deep_update_dict(data, desc_out) |
750 |
1 |
except Exception as e: |
751 |
1 |
raise EngineException( |
752 |
|
"Error in pyangbind validation: {}".format(str(e)), |
753 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
754 |
|
) |
755 |
|
|
756 |
1 |
@staticmethod |
757 |
1 |
def _descriptor_data_is_in_old_format(data): |
758 |
1 |
return ("vnfd-catalog" in data) or ("vnfd:vnfd-catalog" in data) |
759 |
|
|
760 |
1 |
@staticmethod |
761 |
1 |
def _remove_envelop(indata=None): |
762 |
1 |
if not indata: |
763 |
0 |
return {} |
764 |
1 |
clean_indata = indata |
765 |
|
|
766 |
1 |
if clean_indata.get("etsi-nfv-vnfd:vnfd"): |
767 |
1 |
if not isinstance(clean_indata["etsi-nfv-vnfd:vnfd"], dict): |
768 |
0 |
raise EngineException("'etsi-nfv-vnfd:vnfd' must be a dict") |
769 |
1 |
clean_indata = clean_indata["etsi-nfv-vnfd:vnfd"] |
770 |
1 |
elif clean_indata.get("vnfd"): |
771 |
1 |
if not isinstance(clean_indata["vnfd"], dict): |
772 |
1 |
raise EngineException("'vnfd' must be dict") |
773 |
0 |
clean_indata = clean_indata["vnfd"] |
774 |
|
|
775 |
1 |
return clean_indata |
776 |
|
|
777 |
1 |
def check_conflict_on_edit(self, session, final_content, edit_content, _id): |
778 |
1 |
final_content = super().check_conflict_on_edit( |
779 |
|
session, final_content, edit_content, _id |
780 |
|
) |
781 |
|
|
782 |
|
# set type of vnfd |
783 |
1 |
contains_pdu = False |
784 |
1 |
contains_vdu = False |
785 |
1 |
for vdu in get_iterable(final_content.get("vdu")): |
786 |
1 |
if vdu.get("pdu-type"): |
787 |
0 |
contains_pdu = True |
788 |
|
else: |
789 |
1 |
contains_vdu = True |
790 |
1 |
if contains_pdu: |
791 |
0 |
final_content["_admin"]["type"] = "hnfd" if contains_vdu else "pnfd" |
792 |
1 |
elif contains_vdu: |
793 |
1 |
final_content["_admin"]["type"] = "vnfd" |
794 |
|
# if neither vud nor pdu do not fill type |
795 |
1 |
return final_content |
796 |
|
|
797 |
1 |
def check_conflict_on_del(self, session, _id, db_content): |
798 |
|
""" |
799 |
|
Check that there is not any NSD that uses this VNFD. Only NSDs belonging to this project are considered. Note |
800 |
|
that VNFD can be public and be used by NSD of other projects. Also check there are not deployments, or vnfr |
801 |
|
that uses this vnfd |
802 |
|
:param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
803 |
|
:param _id: vnfd internal id |
804 |
|
:param db_content: The database content of the _id. |
805 |
|
:return: None or raises EngineException with the conflict |
806 |
|
""" |
807 |
1 |
if session["force"]: |
808 |
0 |
return |
809 |
1 |
descriptor = db_content |
810 |
1 |
descriptor_id = descriptor.get("id") |
811 |
1 |
if not descriptor_id: # empty vnfd not uploaded |
812 |
0 |
return |
813 |
|
|
814 |
1 |
_filter = self._get_project_filter(session) |
815 |
|
|
816 |
|
# check vnfrs using this vnfd |
817 |
1 |
_filter["vnfd-id"] = _id |
818 |
1 |
if self.db.get_list("vnfrs", _filter): |
819 |
1 |
raise EngineException( |
820 |
|
"There is at least one VNF instance using this descriptor", |
821 |
|
http_code=HTTPStatus.CONFLICT, |
822 |
|
) |
823 |
|
|
824 |
|
# check NSD referencing this VNFD |
825 |
1 |
del _filter["vnfd-id"] |
826 |
1 |
_filter["vnfd-id"] = descriptor_id |
827 |
1 |
if self.db.get_list("nsds", _filter): |
828 |
1 |
raise EngineException( |
829 |
|
"There is at least one NS package referencing this descriptor", |
830 |
|
http_code=HTTPStatus.CONFLICT, |
831 |
|
) |
832 |
|
|
833 |
1 |
def _validate_input_new(self, indata, storage_params, force=False): |
834 |
1 |
indata.pop("onboardingState", None) |
835 |
1 |
indata.pop("operationalState", None) |
836 |
1 |
indata.pop("usageState", None) |
837 |
1 |
indata.pop("links", None) |
838 |
|
|
839 |
1 |
indata = self.pyangbind_validation("vnfds", indata, force) |
840 |
|
# Cross references validation in the descriptor |
841 |
|
|
842 |
1 |
self.validate_mgmt_interface_connection_point(indata) |
843 |
|
|
844 |
1 |
for vdu in get_iterable(indata.get("vdu")): |
845 |
1 |
self.validate_vdu_internal_connection_points(vdu) |
846 |
1 |
self._validate_vdu_cloud_init_in_package(storage_params, vdu, indata) |
847 |
1 |
self._validate_vdu_charms_in_package(storage_params, indata) |
848 |
|
|
849 |
1 |
self._validate_vnf_charms_in_package(storage_params, indata) |
850 |
|
|
851 |
1 |
self.validate_external_connection_points(indata) |
852 |
1 |
self.validate_internal_virtual_links(indata) |
853 |
1 |
self.validate_monitoring_params(indata) |
854 |
1 |
self.validate_scaling_group_descriptor(indata) |
855 |
1 |
self.validate_healing_group_descriptor(indata) |
856 |
1 |
self.validate_alarm_group_descriptor(indata) |
857 |
1 |
self.validate_storage_compute_descriptor(indata) |
858 |
1 |
self.validate_helm_chart(indata) |
859 |
|
|
860 |
1 |
return indata |
861 |
|
|
862 |
1 |
@staticmethod |
863 |
1 |
def validate_helm_chart(indata): |
864 |
1 |
kdus = indata.get("kdu", []) |
865 |
1 |
for kdu in kdus: |
866 |
1 |
helm_chart_value = kdu.get("helm-chart") |
867 |
1 |
if not helm_chart_value: |
868 |
0 |
continue |
869 |
1 |
if not valid_helm_chart_re.match(helm_chart_value): |
870 |
1 |
raise EngineException( |
871 |
|
"helm-chart '{}' is not valid".format(helm_chart_value), |
872 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
873 |
|
) |
874 |
|
|
875 |
1 |
@staticmethod |
876 |
1 |
def validate_mgmt_interface_connection_point(indata): |
877 |
1 |
if not indata.get("vdu"): |
878 |
1 |
return |
879 |
1 |
if not indata.get("mgmt-cp"): |
880 |
1 |
raise EngineException( |
881 |
|
"'mgmt-cp' is a mandatory field and it is not defined", |
882 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
883 |
|
) |
884 |
|
|
885 |
1 |
for cp in get_iterable(indata.get("ext-cpd")): |
886 |
1 |
if cp["id"] == indata["mgmt-cp"]: |
887 |
1 |
break |
888 |
|
else: |
889 |
1 |
raise EngineException( |
890 |
|
"mgmt-cp='{}' must match an existing ext-cpd".format(indata["mgmt-cp"]), |
891 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
892 |
|
) |
893 |
|
|
894 |
1 |
@staticmethod |
895 |
1 |
def validate_vdu_internal_connection_points(vdu): |
896 |
1 |
int_cpds = set() |
897 |
1 |
for cpd in get_iterable(vdu.get("int-cpd")): |
898 |
1 |
cpd_id = cpd.get("id") |
899 |
1 |
if cpd_id and cpd_id in int_cpds: |
900 |
1 |
raise EngineException( |
901 |
|
"vdu[id='{}']:int-cpd[id='{}'] is already used by other int-cpd".format( |
902 |
|
vdu["id"], cpd_id |
903 |
|
), |
904 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
905 |
|
) |
906 |
1 |
int_cpds.add(cpd_id) |
907 |
|
|
908 |
1 |
@staticmethod |
909 |
1 |
def validate_external_connection_points(indata): |
910 |
1 |
all_vdus_int_cpds = set() |
911 |
1 |
for vdu in get_iterable(indata.get("vdu")): |
912 |
1 |
for int_cpd in get_iterable(vdu.get("int-cpd")): |
913 |
1 |
all_vdus_int_cpds.add((vdu.get("id"), int_cpd.get("id"))) |
914 |
|
|
915 |
1 |
ext_cpds = set() |
916 |
1 |
for cpd in get_iterable(indata.get("ext-cpd")): |
917 |
1 |
cpd_id = cpd.get("id") |
918 |
1 |
if cpd_id and cpd_id in ext_cpds: |
919 |
1 |
raise EngineException( |
920 |
|
"ext-cpd[id='{}'] is already used by other ext-cpd".format(cpd_id), |
921 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
922 |
|
) |
923 |
1 |
ext_cpds.add(cpd_id) |
924 |
|
|
925 |
1 |
int_cpd = cpd.get("int-cpd") |
926 |
1 |
if int_cpd: |
927 |
1 |
if (int_cpd.get("vdu-id"), int_cpd.get("cpd")) not in all_vdus_int_cpds: |
928 |
1 |
raise EngineException( |
929 |
|
"ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd".format( |
930 |
|
cpd_id |
931 |
|
), |
932 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
933 |
|
) |
934 |
|
# TODO: Validate k8s-cluster-net points to a valid k8s-cluster:nets ? |
935 |
|
|
936 |
1 |
def _validate_vdu_charms_in_package(self, storage_params, indata): |
937 |
1 |
for df in indata["df"]: |
938 |
1 |
if ( |
939 |
|
"lcm-operations-configuration" in df |
940 |
|
and "operate-vnf-op-config" in df["lcm-operations-configuration"] |
941 |
|
): |
942 |
1 |
configs = df["lcm-operations-configuration"][ |
943 |
|
"operate-vnf-op-config" |
944 |
|
].get("day1-2", []) |
945 |
1 |
vdus = df.get("vdu-profile", []) |
946 |
1 |
for vdu in vdus: |
947 |
1 |
for config in configs: |
948 |
1 |
if config["id"] == vdu["id"] and utils.find_in_list( |
949 |
|
config.get("execution-environment-list", []), |
950 |
|
lambda ee: "juju" in ee, |
951 |
|
): |
952 |
0 |
if not self._validate_package_folders( |
953 |
|
storage_params, "charms" |
954 |
|
) and not self._validate_package_folders( |
955 |
|
storage_params, "Scripts/charms" |
956 |
|
): |
957 |
0 |
raise EngineException( |
958 |
|
"Charm defined in vnf[id={}] but not present in " |
959 |
|
"package".format(indata["id"]) |
960 |
|
) |
961 |
|
|
962 |
1 |
def _validate_vdu_cloud_init_in_package(self, storage_params, vdu, indata): |
963 |
1 |
if not vdu.get("cloud-init-file"): |
964 |
1 |
return |
965 |
1 |
if not self._validate_package_folders( |
966 |
|
storage_params, "cloud_init", vdu["cloud-init-file"] |
967 |
|
) and not self._validate_package_folders( |
968 |
|
storage_params, "Scripts/cloud_init", vdu["cloud-init-file"] |
969 |
|
): |
970 |
1 |
raise EngineException( |
971 |
|
"Cloud-init defined in vnf[id={}]:vdu[id={}] but not present in " |
972 |
|
"package".format(indata["id"], vdu["id"]) |
973 |
|
) |
974 |
|
|
975 |
1 |
def _validate_vnf_charms_in_package(self, storage_params, indata): |
976 |
|
# Get VNF configuration through new container |
977 |
1 |
for deployment_flavor in indata.get("df", []): |
978 |
1 |
if "lcm-operations-configuration" not in deployment_flavor: |
979 |
1 |
return |
980 |
1 |
if ( |
981 |
|
"operate-vnf-op-config" |
982 |
|
not in deployment_flavor["lcm-operations-configuration"] |
983 |
|
): |
984 |
0 |
return |
985 |
1 |
for day_1_2_config in deployment_flavor["lcm-operations-configuration"][ |
986 |
|
"operate-vnf-op-config" |
987 |
|
]["day1-2"]: |
988 |
1 |
if day_1_2_config["id"] == indata["id"]: |
989 |
1 |
if utils.find_in_list( |
990 |
|
day_1_2_config.get("execution-environment-list", []), |
991 |
|
lambda ee: "juju" in ee, |
992 |
|
): |
993 |
1 |
if not self._validate_package_folders( |
994 |
|
storage_params, "charms" |
995 |
|
) and not self._validate_package_folders( |
996 |
|
storage_params, "Scripts/charms" |
997 |
|
): |
998 |
1 |
raise EngineException( |
999 |
|
"Charm defined in vnf[id={}] but not present in " |
1000 |
|
"package".format(indata["id"]) |
1001 |
|
) |
1002 |
|
|
1003 |
1 |
def _validate_package_folders(self, storage_params, folder, file=None): |
1004 |
1 |
if not storage_params: |
1005 |
0 |
return False |
1006 |
1 |
elif not storage_params.get("pkg-dir"): |
1007 |
0 |
if self.fs.file_exists("{}_".format(storage_params["folder"]), "dir"): |
1008 |
0 |
f = "{}_/{}".format(storage_params["folder"], folder) |
1009 |
|
else: |
1010 |
0 |
f = "{}/{}".format(storage_params["folder"], folder) |
1011 |
0 |
if file: |
1012 |
0 |
return self.fs.file_exists("{}/{}".format(f, file), "file") |
1013 |
|
else: |
1014 |
0 |
if self.fs.file_exists(f, "dir"): |
1015 |
0 |
if self.fs.dir_ls(f): |
1016 |
0 |
return True |
1017 |
0 |
return False |
1018 |
|
else: |
1019 |
1 |
if self.fs.file_exists("{}_".format(storage_params["folder"]), "dir"): |
1020 |
1 |
f = "{}_/{}/{}".format( |
1021 |
|
storage_params["folder"], storage_params["pkg-dir"], folder |
1022 |
|
) |
1023 |
|
else: |
1024 |
1 |
f = "{}/{}/{}".format( |
1025 |
|
storage_params["folder"], storage_params["pkg-dir"], folder |
1026 |
|
) |
1027 |
1 |
if file: |
1028 |
1 |
return self.fs.file_exists("{}/{}".format(f, file), "file") |
1029 |
|
else: |
1030 |
1 |
if self.fs.file_exists(f, "dir"): |
1031 |
1 |
if self.fs.dir_ls(f): |
1032 |
1 |
return True |
1033 |
1 |
return False |
1034 |
|
|
1035 |
1 |
@staticmethod |
1036 |
1 |
def validate_internal_virtual_links(indata): |
1037 |
1 |
all_ivld_ids = set() |
1038 |
1 |
for ivld in get_iterable(indata.get("int-virtual-link-desc")): |
1039 |
1 |
ivld_id = ivld.get("id") |
1040 |
1 |
if ivld_id and ivld_id in all_ivld_ids: |
1041 |
1 |
raise EngineException( |
1042 |
|
"Duplicated VLD id in int-virtual-link-desc[id={}]".format(ivld_id), |
1043 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
1044 |
|
) |
1045 |
|
else: |
1046 |
1 |
all_ivld_ids.add(ivld_id) |
1047 |
|
|
1048 |
1 |
for vdu in get_iterable(indata.get("vdu")): |
1049 |
1 |
for int_cpd in get_iterable(vdu.get("int-cpd")): |
1050 |
1 |
int_cpd_ivld_id = int_cpd.get("int-virtual-link-desc") |
1051 |
1 |
if int_cpd_ivld_id and int_cpd_ivld_id not in all_ivld_ids: |
1052 |
1 |
raise EngineException( |
1053 |
|
"vdu[id='{}']:int-cpd[id='{}']:int-virtual-link-desc='{}' must match an existing " |
1054 |
|
"int-virtual-link-desc".format( |
1055 |
|
vdu["id"], int_cpd["id"], int_cpd_ivld_id |
1056 |
|
), |
1057 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
1058 |
|
) |
1059 |
|
|
1060 |
1 |
for df in get_iterable(indata.get("df")): |
1061 |
1 |
for vlp in get_iterable(df.get("virtual-link-profile")): |
1062 |
1 |
vlp_ivld_id = vlp.get("id") |
1063 |
1 |
if vlp_ivld_id and vlp_ivld_id not in all_ivld_ids: |
1064 |
1 |
raise EngineException( |
1065 |
|
"df[id='{}']:virtual-link-profile='{}' must match an existing " |
1066 |
|
"int-virtual-link-desc".format(df["id"], vlp_ivld_id), |
1067 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
1068 |
|
) |
1069 |
|
|
1070 |
1 |
@staticmethod |
1071 |
1 |
def validate_monitoring_params(indata): |
1072 |
1 |
all_monitoring_params = set() |
1073 |
1 |
for ivld in get_iterable(indata.get("int-virtual-link-desc")): |
1074 |
1 |
for mp in get_iterable(ivld.get("monitoring-parameters")): |
1075 |
1 |
mp_id = mp.get("id") |
1076 |
1 |
if mp_id and mp_id in all_monitoring_params: |
1077 |
1 |
raise EngineException( |
1078 |
|
"Duplicated monitoring-parameter id in " |
1079 |
|
"int-virtual-link-desc[id='{}']:monitoring-parameters[id='{}']".format( |
1080 |
|
ivld["id"], mp_id |
1081 |
|
), |
1082 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
1083 |
|
) |
1084 |
|
else: |
1085 |
1 |
all_monitoring_params.add(mp_id) |
1086 |
|
|
1087 |
1 |
for vdu in get_iterable(indata.get("vdu")): |
1088 |
1 |
for mp in get_iterable(vdu.get("monitoring-parameter")): |
1089 |
1 |
mp_id = mp.get("id") |
1090 |
1 |
if mp_id and mp_id in all_monitoring_params: |
1091 |
1 |
raise EngineException( |
1092 |
|
"Duplicated monitoring-parameter id in " |
1093 |
|
"vdu[id='{}']:monitoring-parameter[id='{}']".format( |
1094 |
|
vdu["id"], mp_id |
1095 |
|
), |
1096 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
1097 |
|
) |
1098 |
|
else: |
1099 |
1 |
all_monitoring_params.add(mp_id) |
1100 |
|
|
1101 |
1 |
for df in get_iterable(indata.get("df")): |
1102 |
1 |
for mp in get_iterable(df.get("monitoring-parameter")): |
1103 |
1 |
mp_id = mp.get("id") |
1104 |
1 |
if mp_id and mp_id in all_monitoring_params: |
1105 |
1 |
raise EngineException( |
1106 |
|
"Duplicated monitoring-parameter id in " |
1107 |
|
"df[id='{}']:monitoring-parameter[id='{}']".format( |
1108 |
|
df["id"], mp_id |
1109 |
|
), |
1110 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
1111 |
|
) |
1112 |
|
else: |
1113 |
1 |
all_monitoring_params.add(mp_id) |
1114 |
|
|
1115 |
1 |
@staticmethod |
1116 |
1 |
def validate_scaling_group_descriptor(indata): |
1117 |
1 |
all_monitoring_params = set() |
1118 |
1 |
all_vdu_ids = set() |
1119 |
1 |
for df in get_iterable(indata.get("df")): |
1120 |
1 |
for il in get_iterable(df.get("instantiation-level")): |
1121 |
1 |
for vl in get_iterable(il.get("vdu-level")): |
1122 |
1 |
all_vdu_ids.add(vl.get("vdu-id")) |
1123 |
|
|
1124 |
1 |
for ivld in get_iterable(indata.get("int-virtual-link-desc")): |
1125 |
1 |
for mp in get_iterable(ivld.get("monitoring-parameters")): |
1126 |
0 |
all_monitoring_params.add(mp.get("id")) |
1127 |
|
|
1128 |
1 |
for vdu in get_iterable(indata.get("vdu")): |
1129 |
1 |
for mp in get_iterable(vdu.get("monitoring-parameter")): |
1130 |
1 |
all_monitoring_params.add(mp.get("id")) |
1131 |
|
|
1132 |
1 |
for df in get_iterable(indata.get("df")): |
1133 |
1 |
for mp in get_iterable(df.get("monitoring-parameter")): |
1134 |
0 |
all_monitoring_params.add(mp.get("id")) |
1135 |
|
|
1136 |
1 |
for df in get_iterable(indata.get("df")): |
1137 |
1 |
for sa in get_iterable(df.get("scaling-aspect")): |
1138 |
1 |
for deltas in get_iterable( |
1139 |
|
sa.get("aspect-delta-details").get("deltas") |
1140 |
|
): |
1141 |
1 |
for vds in get_iterable(deltas.get("vdu-delta")): |
1142 |
1 |
sa_vdu_id = vds.get("id") |
1143 |
1 |
if sa_vdu_id and sa_vdu_id not in all_vdu_ids: |
1144 |
1 |
raise EngineException( |
1145 |
|
"df[id='{}']:scaling-aspect[id='{}']:aspect-delta-details" |
1146 |
|
"[delta='{}']: " |
1147 |
|
"vdu-id='{}' not defined in vdu".format( |
1148 |
|
df["id"], |
1149 |
|
sa["id"], |
1150 |
|
deltas["id"], |
1151 |
|
sa_vdu_id, |
1152 |
|
), |
1153 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
1154 |
|
) |
1155 |
|
|
1156 |
1 |
for df in get_iterable(indata.get("df")): |
1157 |
1 |
for sa in get_iterable(df.get("scaling-aspect")): |
1158 |
1 |
for sp in get_iterable(sa.get("scaling-policy")): |
1159 |
1 |
for sc in get_iterable(sp.get("scaling-criteria")): |
1160 |
1 |
sc_monitoring_param = sc.get("vnf-monitoring-param-ref") |
1161 |
1 |
if ( |
1162 |
|
sc_monitoring_param |
1163 |
|
and sc_monitoring_param not in all_monitoring_params |
1164 |
|
): |
1165 |
1 |
raise EngineException( |
1166 |
|
"df[id='{}']:scaling-aspect[id='{}']:scaling-policy" |
1167 |
|
"[name='{}']:scaling-criteria[name='{}']: " |
1168 |
|
"vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format( |
1169 |
|
df["id"], |
1170 |
|
sa["id"], |
1171 |
|
sp["name"], |
1172 |
|
sc["name"], |
1173 |
|
sc_monitoring_param, |
1174 |
|
), |
1175 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
1176 |
|
) |
1177 |
|
|
1178 |
1 |
for sca in get_iterable(sa.get("scaling-config-action")): |
1179 |
1 |
if ( |
1180 |
|
"lcm-operations-configuration" not in df |
1181 |
|
or "operate-vnf-op-config" |
1182 |
|
not in df["lcm-operations-configuration"] |
1183 |
|
or not utils.find_in_list( |
1184 |
|
df["lcm-operations-configuration"][ |
1185 |
|
"operate-vnf-op-config" |
1186 |
|
].get("day1-2", []), |
1187 |
|
lambda config: config["id"] == indata["id"], |
1188 |
|
) |
1189 |
|
): |
1190 |
1 |
raise EngineException( |
1191 |
|
"'day1-2 configuration' not defined in the descriptor but it is " |
1192 |
|
"referenced by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action".format( |
1193 |
|
df["id"], sa["id"] |
1194 |
|
), |
1195 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
1196 |
|
) |
1197 |
1 |
for configuration in get_iterable( |
1198 |
|
df["lcm-operations-configuration"]["operate-vnf-op-config"].get( |
1199 |
|
"day1-2", [] |
1200 |
|
) |
1201 |
|
): |
1202 |
1 |
for primitive in get_iterable( |
1203 |
|
configuration.get("config-primitive") |
1204 |
|
): |
1205 |
1 |
if ( |
1206 |
|
primitive["name"] |
1207 |
|
== sca["vnf-config-primitive-name-ref"] |
1208 |
|
): |
1209 |
1 |
break |
1210 |
|
else: |
1211 |
1 |
raise EngineException( |
1212 |
|
"df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-" |
1213 |
|
"config-primitive-name-ref='{}' does not match any " |
1214 |
|
"day1-2 configuration:config-primitive:name".format( |
1215 |
|
df["id"], |
1216 |
|
sa["id"], |
1217 |
|
sca["vnf-config-primitive-name-ref"], |
1218 |
|
), |
1219 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
1220 |
|
) |
1221 |
|
|
1222 |
1 |
@staticmethod |
1223 |
1 |
def validate_healing_group_descriptor(indata): |
1224 |
1 |
all_vdu_ids = set() |
1225 |
1 |
for df in get_iterable(indata.get("df")): |
1226 |
1 |
for il in get_iterable(df.get("instantiation-level")): |
1227 |
1 |
for vl in get_iterable(il.get("vdu-level")): |
1228 |
1 |
all_vdu_ids.add(vl.get("vdu-id")) |
1229 |
|
|
1230 |
1 |
for df in get_iterable(indata.get("df")): |
1231 |
1 |
for ha in get_iterable(df.get("healing-aspect")): |
1232 |
1 |
for hp in get_iterable(ha.get("healing-policy")): |
1233 |
1 |
hp_monitoring_param = hp.get("vdu-id") |
1234 |
1 |
if hp_monitoring_param and hp_monitoring_param not in all_vdu_ids: |
1235 |
1 |
raise EngineException( |
1236 |
|
"df[id='{}']:healing-aspect[id='{}']:healing-policy" |
1237 |
|
"[name='{}']: " |
1238 |
|
"vdu-id='{}' not defined in vdu".format( |
1239 |
|
df["id"], |
1240 |
|
ha["id"], |
1241 |
|
hp["event-name"], |
1242 |
|
hp_monitoring_param, |
1243 |
|
), |
1244 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
1245 |
|
) |
1246 |
|
|
1247 |
1 |
@staticmethod |
1248 |
1 |
def validate_alarm_group_descriptor(indata): |
1249 |
1 |
all_monitoring_params = set() |
1250 |
1 |
for ivld in get_iterable(indata.get("int-virtual-link-desc")): |
1251 |
1 |
for mp in get_iterable(ivld.get("monitoring-parameters")): |
1252 |
0 |
all_monitoring_params.add(mp.get("id")) |
1253 |
|
|
1254 |
1 |
for vdu in get_iterable(indata.get("vdu")): |
1255 |
1 |
for mp in get_iterable(vdu.get("monitoring-parameter")): |
1256 |
1 |
all_monitoring_params.add(mp.get("id")) |
1257 |
|
|
1258 |
1 |
for df in get_iterable(indata.get("df")): |
1259 |
1 |
for mp in get_iterable(df.get("monitoring-parameter")): |
1260 |
0 |
all_monitoring_params.add(mp.get("id")) |
1261 |
|
|
1262 |
1 |
for vdus in get_iterable(indata.get("vdu")): |
1263 |
1 |
for alarms in get_iterable(vdus.get("alarm")): |
1264 |
1 |
alarm_monitoring_param = alarms.get("vnf-monitoring-param-ref") |
1265 |
1 |
if ( |
1266 |
|
alarm_monitoring_param |
1267 |
|
and alarm_monitoring_param not in all_monitoring_params |
1268 |
|
): |
1269 |
1 |
raise EngineException( |
1270 |
|
"vdu[id='{}']:alarm[id='{}']:" |
1271 |
|
"vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format( |
1272 |
|
vdus["id"], |
1273 |
|
alarms["alarm-id"], |
1274 |
|
alarm_monitoring_param, |
1275 |
|
), |
1276 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
1277 |
|
) |
1278 |
|
|
1279 |
1 |
@staticmethod |
1280 |
1 |
def validate_storage_compute_descriptor(indata): |
1281 |
1 |
all_vsd_ids = set() |
1282 |
1 |
for vsd in get_iterable(indata.get("virtual-storage-desc")): |
1283 |
1 |
all_vsd_ids.add(vsd.get("id")) |
1284 |
|
|
1285 |
1 |
all_vcd_ids = set() |
1286 |
1 |
for vcd in get_iterable(indata.get("virtual-compute-desc")): |
1287 |
1 |
all_vcd_ids.add(vcd.get("id")) |
1288 |
|
|
1289 |
1 |
for vdus in get_iterable(indata.get("vdu")): |
1290 |
1 |
for vsd_ref in vdus.get("virtual-storage-desc"): |
1291 |
1 |
if vsd_ref and vsd_ref not in all_vsd_ids: |
1292 |
1 |
raise EngineException( |
1293 |
|
"vdu[virtual-storage-desc='{}']" |
1294 |
|
"not defined in vnfd".format( |
1295 |
|
vsd_ref, |
1296 |
|
), |
1297 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
1298 |
|
) |
1299 |
|
|
1300 |
1 |
for vdus in get_iterable(indata.get("vdu")): |
1301 |
1 |
vcd_ref = vdus.get("virtual-compute-desc") |
1302 |
1 |
if vcd_ref and vcd_ref not in all_vcd_ids: |
1303 |
1 |
raise EngineException( |
1304 |
|
"vdu[virtual-compute-desc='{}']" |
1305 |
|
"not defined in vnfd".format( |
1306 |
|
vdus["virtual-compute-desc"], |
1307 |
|
), |
1308 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
1309 |
|
) |
1310 |
|
|
1311 |
1 |
def delete_extra(self, session, _id, db_content, not_send_msg=None): |
1312 |
|
""" |
1313 |
|
Deletes associate file system storage (via super) |
1314 |
|
Deletes associated vnfpkgops from database. |
1315 |
|
:param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
1316 |
|
:param _id: server internal id |
1317 |
|
:param db_content: The database content of the descriptor |
1318 |
|
:return: None |
1319 |
|
:raises: FsException in case of error while deleting associated storage |
1320 |
|
""" |
1321 |
1 |
super().delete_extra(session, _id, db_content, not_send_msg) |
1322 |
1 |
self.db.del_list("vnfpkgops", {"vnfPkgId": _id}) |
1323 |
1 |
self.db.del_list(self.topic + "_revisions", {"_id": {"$regex": _id}}) |
1324 |
|
|
1325 |
1 |
def sol005_projection(self, data): |
1326 |
0 |
data["onboardingState"] = data["_admin"]["onboardingState"] |
1327 |
0 |
data["operationalState"] = data["_admin"]["operationalState"] |
1328 |
0 |
data["usageState"] = data["_admin"]["usageState"] |
1329 |
|
|
1330 |
0 |
links = {} |
1331 |
0 |
links["self"] = {"href": "/vnfpkgm/v1/vnf_packages/{}".format(data["_id"])} |
1332 |
0 |
links["vnfd"] = {"href": "/vnfpkgm/v1/vnf_packages/{}/vnfd".format(data["_id"])} |
1333 |
0 |
links["packageContent"] = { |
1334 |
|
"href": "/vnfpkgm/v1/vnf_packages/{}/package_content".format(data["_id"]) |
1335 |
|
} |
1336 |
0 |
data["_links"] = links |
1337 |
|
|
1338 |
0 |
return super().sol005_projection(data) |
1339 |
|
|
1340 |
1 |
@staticmethod |
1341 |
1 |
def find_software_version(vnfd: dict) -> str: |
1342 |
|
"""Find the sotware version in the VNFD descriptors |
1343 |
|
|
1344 |
|
Args: |
1345 |
|
vnfd (dict): Descriptor as a dictionary |
1346 |
|
|
1347 |
|
Returns: |
1348 |
|
software-version (str) |
1349 |
|
""" |
1350 |
1 |
default_sw_version = "1.0" |
1351 |
1 |
if vnfd.get("vnfd"): |
1352 |
0 |
vnfd = vnfd["vnfd"] |
1353 |
1 |
if vnfd.get("software-version"): |
1354 |
1 |
return vnfd["software-version"] |
1355 |
|
else: |
1356 |
1 |
return default_sw_version |
1357 |
|
|
1358 |
1 |
@staticmethod |
1359 |
1 |
def extract_policies(vnfd: dict) -> dict: |
1360 |
|
"""Removes the policies from the VNFD descriptors |
1361 |
|
|
1362 |
|
Args: |
1363 |
|
vnfd (dict): Descriptor as a dictionary |
1364 |
|
|
1365 |
|
Returns: |
1366 |
|
vnfd (dict): VNFD which does not include policies |
1367 |
|
""" |
1368 |
1 |
for df in vnfd.get("df", {}): |
1369 |
1 |
for policy in ["scaling-aspect", "healing-aspect"]: |
1370 |
1 |
if df.get(policy, {}): |
1371 |
1 |
df.pop(policy) |
1372 |
1 |
for vdu in vnfd.get("vdu", {}): |
1373 |
1 |
for alarm_policy in ["alarm", "monitoring-parameter"]: |
1374 |
1 |
if vdu.get(alarm_policy, {}): |
1375 |
1 |
vdu.pop(alarm_policy) |
1376 |
1 |
return vnfd |
1377 |
|
|
1378 |
1 |
@staticmethod |
1379 |
1 |
def extract_day12_primitives(vnfd: dict) -> dict: |
1380 |
|
"""Removes the day12 primitives from the VNFD descriptors |
1381 |
|
|
1382 |
|
Args: |
1383 |
|
vnfd (dict): Descriptor as a dictionary |
1384 |
|
|
1385 |
|
Returns: |
1386 |
|
vnfd (dict) |
1387 |
|
""" |
1388 |
1 |
for df_id, df in enumerate(vnfd.get("df", {})): |
1389 |
1 |
if ( |
1390 |
|
df.get("lcm-operations-configuration", {}) |
1391 |
|
.get("operate-vnf-op-config", {}) |
1392 |
|
.get("day1-2") |
1393 |
|
): |
1394 |
1 |
day12 = df["lcm-operations-configuration"]["operate-vnf-op-config"].get( |
1395 |
|
"day1-2" |
1396 |
|
) |
1397 |
1 |
for config_id, config in enumerate(day12): |
1398 |
1 |
for key in [ |
1399 |
|
"initial-config-primitive", |
1400 |
|
"config-primitive", |
1401 |
|
"terminate-config-primitive", |
1402 |
|
]: |
1403 |
1 |
config.pop(key, None) |
1404 |
1 |
day12[config_id] = config |
1405 |
1 |
df["lcm-operations-configuration"]["operate-vnf-op-config"][ |
1406 |
|
"day1-2" |
1407 |
|
] = day12 |
1408 |
1 |
vnfd["df"][df_id] = df |
1409 |
1 |
return vnfd |
1410 |
|
|
1411 |
1 |
def remove_modifiable_items(self, vnfd: dict) -> dict: |
1412 |
|
"""Removes the modifiable parts from the VNFD descriptors |
1413 |
|
|
1414 |
|
It calls different extract functions according to different update types |
1415 |
|
to clear all the modifiable items from VNFD |
1416 |
|
|
1417 |
|
Args: |
1418 |
|
vnfd (dict): Descriptor as a dictionary |
1419 |
|
|
1420 |
|
Returns: |
1421 |
|
vnfd (dict): Descriptor which does not include modifiable contents |
1422 |
|
""" |
1423 |
1 |
if vnfd.get("vnfd"): |
1424 |
0 |
vnfd = vnfd["vnfd"] |
1425 |
1 |
vnfd.pop("_admin", None) |
1426 |
|
# If the other extractions need to be done from VNFD, |
1427 |
|
# the new extract methods could be appended to below list. |
1428 |
1 |
for extract_function in [self.extract_day12_primitives, self.extract_policies]: |
1429 |
1 |
vnfd_temp = extract_function(vnfd) |
1430 |
1 |
vnfd = vnfd_temp |
1431 |
1 |
return vnfd |
1432 |
|
|
1433 |
1 |
def _validate_descriptor_changes( |
1434 |
|
self, |
1435 |
|
descriptor_id: str, |
1436 |
|
descriptor_file_name: str, |
1437 |
|
old_descriptor_directory: str, |
1438 |
|
new_descriptor_directory: str, |
1439 |
|
): |
1440 |
|
"""Compares the old and new VNFD descriptors and validates the new descriptor. |
1441 |
|
|
1442 |
|
Args: |
1443 |
|
old_descriptor_directory (str): Directory of descriptor which is in-use |
1444 |
|
new_descriptor_directory (str): Directory of descriptor which is proposed to update (new revision) |
1445 |
|
|
1446 |
|
Returns: |
1447 |
|
None |
1448 |
|
|
1449 |
|
Raises: |
1450 |
|
EngineException: In case of error when there are unallowed changes |
1451 |
|
""" |
1452 |
1 |
try: |
1453 |
|
# If VNFD does not exist in DB or it is not in use by any NS, |
1454 |
|
# validation is not required. |
1455 |
1 |
vnfd = self.db.get_one("vnfds", {"_id": descriptor_id}) |
1456 |
1 |
if not vnfd or not detect_descriptor_usage(vnfd, "vnfds", self.db): |
1457 |
1 |
return |
1458 |
|
|
1459 |
|
# Get the old and new descriptor contents in order to compare them. |
1460 |
1 |
with self.fs.file_open( |
1461 |
|
(old_descriptor_directory.rstrip("/"), descriptor_file_name), "r" |
1462 |
|
) as old_descriptor_file: |
1463 |
1 |
with self.fs.file_open( |
1464 |
|
(new_descriptor_directory.rstrip("/"), descriptor_file_name), "r" |
1465 |
|
) as new_descriptor_file: |
1466 |
1 |
old_content = yaml.safe_load(old_descriptor_file.read()) |
1467 |
1 |
new_content = yaml.safe_load(new_descriptor_file.read()) |
1468 |
|
|
1469 |
|
# If software version has changed, we do not need to validate |
1470 |
|
# the differences anymore. |
1471 |
1 |
if old_content and new_content: |
1472 |
1 |
if self.find_software_version( |
1473 |
|
old_content |
1474 |
|
) != self.find_software_version(new_content): |
1475 |
1 |
return |
1476 |
|
|
1477 |
1 |
disallowed_change = DeepDiff( |
1478 |
|
self.remove_modifiable_items(old_content), |
1479 |
|
self.remove_modifiable_items(new_content), |
1480 |
|
) |
1481 |
|
|
1482 |
1 |
if disallowed_change: |
1483 |
1 |
changed_nodes = functools.reduce( |
1484 |
|
lambda a, b: a + " , " + b, |
1485 |
|
[ |
1486 |
|
node.lstrip("root") |
1487 |
|
for node in disallowed_change.get( |
1488 |
|
"values_changed" |
1489 |
|
).keys() |
1490 |
|
], |
1491 |
|
) |
1492 |
|
|
1493 |
1 |
raise EngineException( |
1494 |
|
f"Error in validating new descriptor: {changed_nodes} cannot be modified, " |
1495 |
|
"there are disallowed changes in the vnf descriptor.", |
1496 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
1497 |
|
) |
1498 |
1 |
except ( |
1499 |
|
DbException, |
1500 |
|
AttributeError, |
1501 |
|
IndexError, |
1502 |
|
KeyError, |
1503 |
|
ValueError, |
1504 |
|
) as e: |
1505 |
0 |
raise type(e)( |
1506 |
|
"VNF Descriptor could not be processed with error: {}.".format(e) |
1507 |
|
) |
1508 |
|
|
1509 |
|
|
1510 |
1 |
class NsdTopic(DescriptorTopic): |
1511 |
1 |
topic = "nsds" |
1512 |
1 |
topic_msg = "nsd" |
1513 |
|
|
1514 |
1 |
def __init__(self, db, fs, msg, auth): |
1515 |
1 |
super().__init__(db, fs, msg, auth) |
1516 |
|
|
1517 |
1 |
def pyangbind_validation(self, item, data, force=False): |
1518 |
1 |
if self._descriptor_data_is_in_old_format(data): |
1519 |
0 |
raise EngineException( |
1520 |
|
"ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.", |
1521 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
1522 |
|
) |
1523 |
1 |
try: |
1524 |
1 |
nsd_vnf_profiles = data.get("df", [{}])[0].get("vnf-profile", []) |
1525 |
1 |
mynsd = etsi_nfv_nsd.etsi_nfv_nsd() |
1526 |
1 |
pybindJSONDecoder.load_ietf_json( |
1527 |
|
{"nsd": {"nsd": [data]}}, |
1528 |
|
None, |
1529 |
|
None, |
1530 |
|
obj=mynsd, |
1531 |
|
path_helper=True, |
1532 |
|
skip_unknown=force, |
1533 |
|
) |
1534 |
1 |
out = pybindJSON.dumps(mynsd, mode="ietf") |
1535 |
1 |
desc_out = self._remove_envelop(yaml.safe_load(out)) |
1536 |
1 |
desc_out = self._remove_yang_prefixes_from_descriptor(desc_out) |
1537 |
1 |
if nsd_vnf_profiles: |
1538 |
1 |
desc_out["df"][0]["vnf-profile"] = nsd_vnf_profiles |
1539 |
1 |
return desc_out |
1540 |
1 |
except Exception as e: |
1541 |
1 |
raise EngineException( |
1542 |
|
"Error in pyangbind validation: {}".format(str(e)), |
1543 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
1544 |
|
) |
1545 |
|
|
1546 |
1 |
@staticmethod |
1547 |
1 |
def _descriptor_data_is_in_old_format(data): |
1548 |
1 |
return ("nsd-catalog" in data) or ("nsd:nsd-catalog" in data) |
1549 |
|
|
1550 |
1 |
@staticmethod |
1551 |
1 |
def _remove_envelop(indata=None): |
1552 |
1 |
if not indata: |
1553 |
0 |
return {} |
1554 |
1 |
clean_indata = indata |
1555 |
|
|
1556 |
1 |
if clean_indata.get("nsd"): |
1557 |
1 |
clean_indata = clean_indata["nsd"] |
1558 |
1 |
elif clean_indata.get("etsi-nfv-nsd:nsd"): |
1559 |
1 |
clean_indata = clean_indata["etsi-nfv-nsd:nsd"] |
1560 |
1 |
if clean_indata.get("nsd"): |
1561 |
1 |
if ( |
1562 |
|
not isinstance(clean_indata["nsd"], list) |
1563 |
|
or len(clean_indata["nsd"]) != 1 |
1564 |
|
): |
1565 |
1 |
raise EngineException("'nsd' must be a list of only one element") |
1566 |
1 |
clean_indata = clean_indata["nsd"][0] |
1567 |
1 |
return clean_indata |
1568 |
|
|
1569 |
1 |
def _validate_input_new(self, indata, storage_params, force=False): |
1570 |
1 |
indata.pop("nsdOnboardingState", None) |
1571 |
1 |
indata.pop("nsdOperationalState", None) |
1572 |
1 |
indata.pop("nsdUsageState", None) |
1573 |
|
|
1574 |
1 |
indata.pop("links", None) |
1575 |
|
|
1576 |
1 |
indata = self.pyangbind_validation("nsds", indata, force) |
1577 |
|
# Cross references validation in the descriptor |
1578 |
|
# TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none |
1579 |
1 |
for vld in get_iterable(indata.get("virtual-link-desc")): |
1580 |
1 |
self.validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata) |
1581 |
|
|
1582 |
1 |
self.validate_vnf_profiles_vnfd_id(indata) |
1583 |
|
|
1584 |
1 |
return indata |
1585 |
|
|
1586 |
1 |
@staticmethod |
1587 |
1 |
def validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata): |
1588 |
1 |
if not vld.get("mgmt-network"): |
1589 |
1 |
return |
1590 |
1 |
vld_id = vld.get("id") |
1591 |
1 |
for df in get_iterable(indata.get("df")): |
1592 |
1 |
for vlp in get_iterable(df.get("virtual-link-profile")): |
1593 |
1 |
if vld_id and vld_id == vlp.get("virtual-link-desc-id"): |
1594 |
1 |
if vlp.get("virtual-link-protocol-data"): |
1595 |
1 |
raise EngineException( |
1596 |
|
"Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-" |
1597 |
|
"protocol-data You cannot set a virtual-link-protocol-data " |
1598 |
|
"when mgmt-network is True".format(df["id"], vlp["id"]), |
1599 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
1600 |
|
) |
1601 |
|
|
1602 |
1 |
@staticmethod |
1603 |
1 |
def validate_vnf_profiles_vnfd_id(indata): |
1604 |
1 |
all_vnfd_ids = set(get_iterable(indata.get("vnfd-id"))) |
1605 |
1 |
for df in get_iterable(indata.get("df")): |
1606 |
1 |
for vnf_profile in get_iterable(df.get("vnf-profile")): |
1607 |
1 |
vnfd_id = vnf_profile.get("vnfd-id") |
1608 |
1 |
if vnfd_id and vnfd_id not in all_vnfd_ids: |
1609 |
1 |
raise EngineException( |
1610 |
|
"Error at df[id='{}']:vnf_profile[id='{}']:vnfd-id='{}' " |
1611 |
|
"does not match any vnfd-id".format( |
1612 |
|
df["id"], vnf_profile["id"], vnfd_id |
1613 |
|
), |
1614 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
1615 |
|
) |
1616 |
|
|
1617 |
1 |
def _validate_input_edit(self, indata, content, force=False): |
1618 |
|
# not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit |
1619 |
|
""" |
1620 |
|
indata looks as follows: |
1621 |
|
- In the new case (conformant) |
1622 |
|
{'nsdOperationalState': 'DISABLED', 'userDefinedData': {'id': 'string23', |
1623 |
|
'_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}} |
1624 |
|
- In the old case (backwards-compatible) |
1625 |
|
{'id': 'string23', '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'} |
1626 |
|
""" |
1627 |
1 |
if "_admin" not in indata: |
1628 |
1 |
indata["_admin"] = {} |
1629 |
|
|
1630 |
1 |
if "nsdOperationalState" in indata: |
1631 |
0 |
if indata["nsdOperationalState"] in ("ENABLED", "DISABLED"): |
1632 |
0 |
indata["_admin"]["operationalState"] = indata.pop("nsdOperationalState") |
1633 |
|
else: |
1634 |
0 |
raise EngineException( |
1635 |
|
"State '{}' is not a valid operational state".format( |
1636 |
|
indata["nsdOperationalState"] |
1637 |
|
), |
1638 |
|
http_code=HTTPStatus.BAD_REQUEST, |
1639 |
|
) |
1640 |
|
|
1641 |
|
# In the case of user defined data, we need to put the data in the root of the object |
1642 |
|
# to preserve current expected behaviour |
1643 |
1 |
if "userDefinedData" in indata: |
1644 |
0 |
data = indata.pop("userDefinedData") |
1645 |
0 |
if isinstance(data, dict): |
1646 |
0 |
indata["_admin"]["userDefinedData"] = data |
1647 |
|
else: |
1648 |
0 |
raise EngineException( |
1649 |
|
"userDefinedData should be an object, but is '{}' instead".format( |
1650 |
|
type(data) |
1651 |
|
), |
1652 |
|
http_code=HTTPStatus.BAD_REQUEST, |
1653 |
|
) |
1654 |
1 |
if ( |
1655 |
|
"operationalState" in indata["_admin"] |
1656 |
|
and content["_admin"]["operationalState"] |
1657 |
|
== indata["_admin"]["operationalState"] |
1658 |
|
): |
1659 |
0 |
raise EngineException( |
1660 |
|
"nsdOperationalState already {}".format( |
1661 |
|
content["_admin"]["operationalState"] |
1662 |
|
), |
1663 |
|
http_code=HTTPStatus.CONFLICT, |
1664 |
|
) |
1665 |
1 |
return indata |
1666 |
|
|
1667 |
1 |
def _check_descriptor_dependencies(self, session, descriptor): |
1668 |
|
""" |
1669 |
|
Check that the dependent descriptors exist on a new descriptor or edition. Also checks references to vnfd |
1670 |
|
connection points are ok |
1671 |
|
:param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
1672 |
|
:param descriptor: descriptor to be inserted or edit |
1673 |
|
:return: None or raises exception |
1674 |
|
""" |
1675 |
1 |
if session["force"]: |
1676 |
1 |
return |
1677 |
1 |
vnfds_index = self._get_descriptor_constituent_vnfds_index(session, descriptor) |
1678 |
|
|
1679 |
|
# Cross references validation in the descriptor and vnfd connection point validation |
1680 |
1 |
for df in get_iterable(descriptor.get("df")): |
1681 |
1 |
self.validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index) |
1682 |
|
|
1683 |
1 |
def _get_descriptor_constituent_vnfds_index(self, session, descriptor): |
1684 |
1 |
vnfds_index = {} |
1685 |
1 |
if descriptor.get("vnfd-id") and not session["force"]: |
1686 |
1 |
for vnfd_id in get_iterable(descriptor.get("vnfd-id")): |
1687 |
1 |
query_filter = self._get_project_filter(session) |
1688 |
1 |
query_filter["id"] = vnfd_id |
1689 |
1 |
vnf_list = self.db.get_list("vnfds", query_filter) |
1690 |
1 |
if not vnf_list: |
1691 |
1 |
raise EngineException( |
1692 |
|
"Descriptor error at 'vnfd-id'='{}' references a non " |
1693 |
|
"existing vnfd".format(vnfd_id), |
1694 |
|
http_code=HTTPStatus.CONFLICT, |
1695 |
|
) |
1696 |
1 |
vnfds_index[vnfd_id] = vnf_list[0] |
1697 |
1 |
return vnfds_index |
1698 |
|
|
1699 |
1 |
@staticmethod |
1700 |
1 |
def validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index): |
1701 |
1 |
for vnf_profile in get_iterable(df.get("vnf-profile")): |
1702 |
1 |
vnfd = vnfds_index.get(vnf_profile["vnfd-id"]) |
1703 |
1 |
all_vnfd_ext_cpds = set() |
1704 |
1 |
for ext_cpd in get_iterable(vnfd.get("ext-cpd")): |
1705 |
1 |
if ext_cpd.get("id"): |
1706 |
1 |
all_vnfd_ext_cpds.add(ext_cpd.get("id")) |
1707 |
|
|
1708 |
1 |
for virtual_link in get_iterable( |
1709 |
|
vnf_profile.get("virtual-link-connectivity") |
1710 |
|
): |
1711 |
1 |
for vl_cpd in get_iterable(virtual_link.get("constituent-cpd-id")): |
1712 |
1 |
vl_cpd_id = vl_cpd.get("constituent-cpd-id") |
1713 |
1 |
if vl_cpd_id and vl_cpd_id not in all_vnfd_ext_cpds: |
1714 |
1 |
raise EngineException( |
1715 |
|
"Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity" |
1716 |
|
"[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a " |
1717 |
|
"non existing ext-cpd:id inside vnfd '{}'".format( |
1718 |
|
df["id"], |
1719 |
|
vnf_profile["id"], |
1720 |
|
virtual_link["virtual-link-profile-id"], |
1721 |
|
vl_cpd_id, |
1722 |
|
vnfd["id"], |
1723 |
|
), |
1724 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
1725 |
|
) |
1726 |
|
|
1727 |
1 |
def check_conflict_on_edit(self, session, final_content, edit_content, _id): |
1728 |
1 |
final_content = super().check_conflict_on_edit( |
1729 |
|
session, final_content, edit_content, _id |
1730 |
|
) |
1731 |
|
|
1732 |
1 |
self._check_descriptor_dependencies(session, final_content) |
1733 |
|
|
1734 |
1 |
return final_content |
1735 |
|
|
1736 |
1 |
def check_conflict_on_del(self, session, _id, db_content): |
1737 |
|
""" |
1738 |
|
Check that there is not any NSR that uses this NSD. Only NSRs belonging to this project are considered. Note |
1739 |
|
that NSD can be public and be used by other projects. |
1740 |
|
:param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
1741 |
|
:param _id: nsd internal id |
1742 |
|
:param db_content: The database content of the _id |
1743 |
|
:return: None or raises EngineException with the conflict |
1744 |
|
""" |
1745 |
1 |
if session["force"]: |
1746 |
0 |
return |
1747 |
1 |
descriptor = db_content |
1748 |
1 |
descriptor_id = descriptor.get("id") |
1749 |
1 |
if not descriptor_id: # empty nsd not uploaded |
1750 |
0 |
return |
1751 |
|
|
1752 |
|
# check NSD used by NS |
1753 |
1 |
_filter = self._get_project_filter(session) |
1754 |
1 |
_filter["nsd-id"] = _id |
1755 |
1 |
if self.db.get_list("nsrs", _filter): |
1756 |
1 |
raise EngineException( |
1757 |
|
"There is at least one NS instance using this descriptor", |
1758 |
|
http_code=HTTPStatus.CONFLICT, |
1759 |
|
) |
1760 |
|
|
1761 |
|
# check NSD referenced by NST |
1762 |
1 |
del _filter["nsd-id"] |
1763 |
1 |
_filter["netslice-subnet.ANYINDEX.nsd-ref"] = descriptor_id |
1764 |
1 |
if self.db.get_list("nsts", _filter): |
1765 |
1 |
raise EngineException( |
1766 |
|
"There is at least one NetSlice Template referencing this descriptor", |
1767 |
|
http_code=HTTPStatus.CONFLICT, |
1768 |
|
) |
1769 |
|
|
1770 |
1 |
def delete_extra(self, session, _id, db_content, not_send_msg=None): |
1771 |
|
""" |
1772 |
|
Deletes associate file system storage (via super) |
1773 |
|
Deletes associated vnfpkgops from database. |
1774 |
|
:param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
1775 |
|
:param _id: server internal id |
1776 |
|
:param db_content: The database content of the descriptor |
1777 |
|
:return: None |
1778 |
|
:raises: FsException in case of error while deleting associated storage |
1779 |
|
""" |
1780 |
1 |
super().delete_extra(session, _id, db_content, not_send_msg) |
1781 |
1 |
self.db.del_list(self.topic + "_revisions", {"_id": {"$regex": _id}}) |
1782 |
|
|
1783 |
1 |
@staticmethod |
1784 |
1 |
def extract_day12_primitives(nsd: dict) -> dict: |
1785 |
|
"""Removes the day12 primitives from the NSD descriptors |
1786 |
|
|
1787 |
|
Args: |
1788 |
|
nsd (dict): Descriptor as a dictionary |
1789 |
|
|
1790 |
|
Returns: |
1791 |
|
nsd (dict): Cleared NSD |
1792 |
|
""" |
1793 |
1 |
if nsd.get("ns-configuration"): |
1794 |
1 |
for key in [ |
1795 |
|
"config-primitive", |
1796 |
|
"initial-config-primitive", |
1797 |
|
"terminate-config-primitive", |
1798 |
|
]: |
1799 |
1 |
nsd["ns-configuration"].pop(key, None) |
1800 |
1 |
return nsd |
1801 |
|
|
1802 |
1 |
def remove_modifiable_items(self, nsd: dict) -> dict: |
1803 |
|
"""Removes the modifiable parts from the VNFD descriptors |
1804 |
|
|
1805 |
|
It calls different extract functions according to different update types |
1806 |
|
to clear all the modifiable items from NSD |
1807 |
|
|
1808 |
|
Args: |
1809 |
|
nsd (dict): Descriptor as a dictionary |
1810 |
|
|
1811 |
|
Returns: |
1812 |
|
nsd (dict): Descriptor which does not include modifiable contents |
1813 |
|
""" |
1814 |
1 |
while isinstance(nsd, dict) and nsd.get("nsd"): |
1815 |
0 |
nsd = nsd["nsd"] |
1816 |
1 |
if isinstance(nsd, list): |
1817 |
0 |
nsd = nsd[0] |
1818 |
1 |
nsd.pop("_admin", None) |
1819 |
|
# If the more extractions need to be done from NSD, |
1820 |
|
# the new extract methods could be appended to below list. |
1821 |
1 |
for extract_function in [self.extract_day12_primitives]: |
1822 |
1 |
nsd_temp = extract_function(nsd) |
1823 |
1 |
nsd = nsd_temp |
1824 |
1 |
return nsd |
1825 |
|
|
1826 |
1 |
def _validate_descriptor_changes( |
1827 |
|
self, |
1828 |
|
descriptor_id: str, |
1829 |
|
descriptor_file_name: str, |
1830 |
|
old_descriptor_directory: str, |
1831 |
|
new_descriptor_directory: str, |
1832 |
|
): |
1833 |
|
"""Compares the old and new NSD descriptors and validates the new descriptor |
1834 |
|
|
1835 |
|
Args: |
1836 |
|
old_descriptor_directory: Directory of descriptor which is in-use |
1837 |
|
new_descriptor_directory: Directory of descriptor which is proposed to update (new revision) |
1838 |
|
|
1839 |
|
Returns: |
1840 |
|
None |
1841 |
|
|
1842 |
|
Raises: |
1843 |
|
EngineException: In case of error if the changes are not allowed |
1844 |
|
""" |
1845 |
|
|
1846 |
1 |
try: |
1847 |
|
# If NSD does not exist in DB, or it is not in use by any NS, |
1848 |
|
# validation is not required. |
1849 |
1 |
nsd = self.db.get_one("nsds", {"_id": descriptor_id}, fail_on_empty=False) |
1850 |
1 |
if not nsd or not detect_descriptor_usage(nsd, "nsds", self.db): |
1851 |
1 |
return |
1852 |
|
|
1853 |
|
# Get the old and new descriptor contents in order to compare them. |
1854 |
1 |
with self.fs.file_open( |
1855 |
|
(old_descriptor_directory.rstrip("/"), descriptor_file_name), "r" |
1856 |
|
) as old_descriptor_file: |
1857 |
1 |
with self.fs.file_open( |
1858 |
|
(new_descriptor_directory.rstrip("/"), descriptor_file_name), "r" |
1859 |
|
) as new_descriptor_file: |
1860 |
1 |
old_content = yaml.safe_load(old_descriptor_file.read()) |
1861 |
1 |
new_content = yaml.safe_load(new_descriptor_file.read()) |
1862 |
|
|
1863 |
1 |
if old_content and new_content: |
1864 |
1 |
disallowed_change = DeepDiff( |
1865 |
|
self.remove_modifiable_items(old_content), |
1866 |
|
self.remove_modifiable_items(new_content), |
1867 |
|
) |
1868 |
|
|
1869 |
1 |
if disallowed_change: |
1870 |
1 |
changed_nodes = functools.reduce( |
1871 |
|
lambda a, b: a + ", " + b, |
1872 |
|
[ |
1873 |
|
node.lstrip("root") |
1874 |
|
for node in disallowed_change.get( |
1875 |
|
"values_changed" |
1876 |
|
).keys() |
1877 |
|
], |
1878 |
|
) |
1879 |
|
|
1880 |
1 |
raise EngineException( |
1881 |
|
f"Error in validating new descriptor: {changed_nodes} cannot be modified, " |
1882 |
|
"there are disallowed changes in the ns descriptor. ", |
1883 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
1884 |
|
) |
1885 |
1 |
except ( |
1886 |
|
DbException, |
1887 |
|
AttributeError, |
1888 |
|
IndexError, |
1889 |
|
KeyError, |
1890 |
|
ValueError, |
1891 |
|
) as e: |
1892 |
0 |
raise type(e)( |
1893 |
|
"NS Descriptor could not be processed with error: {}.".format(e) |
1894 |
|
) |
1895 |
|
|
1896 |
1 |
def sol005_projection(self, data): |
1897 |
0 |
data["nsdOnboardingState"] = data["_admin"]["onboardingState"] |
1898 |
0 |
data["nsdOperationalState"] = data["_admin"]["operationalState"] |
1899 |
0 |
data["nsdUsageState"] = data["_admin"]["usageState"] |
1900 |
|
|
1901 |
0 |
links = {} |
1902 |
0 |
links["self"] = {"href": "/nsd/v1/ns_descriptors/{}".format(data["_id"])} |
1903 |
0 |
links["nsd_content"] = { |
1904 |
|
"href": "/nsd/v1/ns_descriptors/{}/nsd_content".format(data["_id"]) |
1905 |
|
} |
1906 |
0 |
data["_links"] = links |
1907 |
|
|
1908 |
0 |
return super().sol005_projection(data) |
1909 |
|
|
1910 |
|
|
1911 |
1 |
class NstTopic(DescriptorTopic): |
1912 |
1 |
topic = "nsts" |
1913 |
1 |
topic_msg = "nst" |
1914 |
1 |
quota_name = "slice_templates" |
1915 |
|
|
1916 |
1 |
def __init__(self, db, fs, msg, auth): |
1917 |
0 |
DescriptorTopic.__init__(self, db, fs, msg, auth) |
1918 |
|
|
1919 |
1 |
def pyangbind_validation(self, item, data, force=False): |
1920 |
0 |
try: |
1921 |
0 |
mynst = nst_im() |
1922 |
0 |
pybindJSONDecoder.load_ietf_json( |
1923 |
|
{"nst": [data]}, |
1924 |
|
None, |
1925 |
|
None, |
1926 |
|
obj=mynst, |
1927 |
|
path_helper=True, |
1928 |
|
skip_unknown=force, |
1929 |
|
) |
1930 |
0 |
out = pybindJSON.dumps(mynst, mode="ietf") |
1931 |
0 |
desc_out = self._remove_envelop(yaml.safe_load(out)) |
1932 |
0 |
return desc_out |
1933 |
0 |
except Exception as e: |
1934 |
0 |
raise EngineException( |
1935 |
|
"Error in pyangbind validation: {}".format(str(e)), |
1936 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
1937 |
|
) |
1938 |
|
|
1939 |
1 |
@staticmethod |
1940 |
1 |
def _remove_envelop(indata=None): |
1941 |
0 |
if not indata: |
1942 |
0 |
return {} |
1943 |
0 |
clean_indata = indata |
1944 |
|
|
1945 |
0 |
if clean_indata.get("nst"): |
1946 |
0 |
if ( |
1947 |
|
not isinstance(clean_indata["nst"], list) |
1948 |
|
or len(clean_indata["nst"]) != 1 |
1949 |
|
): |
1950 |
0 |
raise EngineException("'nst' must be a list only one element") |
1951 |
0 |
clean_indata = clean_indata["nst"][0] |
1952 |
0 |
elif clean_indata.get("nst:nst"): |
1953 |
0 |
if ( |
1954 |
|
not isinstance(clean_indata["nst:nst"], list) |
1955 |
|
or len(clean_indata["nst:nst"]) != 1 |
1956 |
|
): |
1957 |
0 |
raise EngineException("'nst:nst' must be a list only one element") |
1958 |
0 |
clean_indata = clean_indata["nst:nst"][0] |
1959 |
0 |
return clean_indata |
1960 |
|
|
1961 |
1 |
def _validate_input_new(self, indata, storage_params, force=False): |
1962 |
0 |
indata.pop("onboardingState", None) |
1963 |
0 |
indata.pop("operationalState", None) |
1964 |
0 |
indata.pop("usageState", None) |
1965 |
0 |
indata = self.pyangbind_validation("nsts", indata, force) |
1966 |
0 |
return indata.copy() |
1967 |
|
|
1968 |
1 |
def _check_descriptor_dependencies(self, session, descriptor): |
1969 |
|
""" |
1970 |
|
Check that the dependent descriptors exist on a new descriptor or edition |
1971 |
|
:param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
1972 |
|
:param descriptor: descriptor to be inserted or edit |
1973 |
|
:return: None or raises exception |
1974 |
|
""" |
1975 |
0 |
if not descriptor.get("netslice-subnet"): |
1976 |
0 |
return |
1977 |
0 |
for nsd in descriptor["netslice-subnet"]: |
1978 |
0 |
nsd_id = nsd["nsd-ref"] |
1979 |
0 |
filter_q = self._get_project_filter(session) |
1980 |
0 |
filter_q["id"] = nsd_id |
1981 |
0 |
if not self.db.get_list("nsds", filter_q): |
1982 |
0 |
raise EngineException( |
1983 |
|
"Descriptor error at 'netslice-subnet':'nsd-ref'='{}' references a non " |
1984 |
|
"existing nsd".format(nsd_id), |
1985 |
|
http_code=HTTPStatus.CONFLICT, |
1986 |
|
) |
1987 |
|
|
1988 |
1 |
def check_conflict_on_edit(self, session, final_content, edit_content, _id): |
1989 |
0 |
final_content = super().check_conflict_on_edit( |
1990 |
|
session, final_content, edit_content, _id |
1991 |
|
) |
1992 |
|
|
1993 |
0 |
self._check_descriptor_dependencies(session, final_content) |
1994 |
0 |
return final_content |
1995 |
|
|
1996 |
1 |
def check_conflict_on_del(self, session, _id, db_content): |
1997 |
|
""" |
1998 |
|
Check that there is not any NSIR that uses this NST. Only NSIRs belonging to this project are considered. Note |
1999 |
|
that NST can be public and be used by other projects. |
2000 |
|
:param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
2001 |
|
:param _id: nst internal id |
2002 |
|
:param db_content: The database content of the _id. |
2003 |
|
:return: None or raises EngineException with the conflict |
2004 |
|
""" |
2005 |
|
# TODO: Check this method |
2006 |
0 |
if session["force"]: |
2007 |
0 |
return |
2008 |
|
# Get Network Slice Template from Database |
2009 |
0 |
_filter = self._get_project_filter(session) |
2010 |
0 |
_filter["_admin.nst-id"] = _id |
2011 |
0 |
if self.db.get_list("nsis", _filter): |
2012 |
0 |
raise EngineException( |
2013 |
|
"there is at least one Netslice Instance using this descriptor", |
2014 |
|
http_code=HTTPStatus.CONFLICT, |
2015 |
|
) |
2016 |
|
|
2017 |
1 |
def sol005_projection(self, data): |
2018 |
0 |
data["onboardingState"] = data["_admin"]["onboardingState"] |
2019 |
0 |
data["operationalState"] = data["_admin"]["operationalState"] |
2020 |
0 |
data["usageState"] = data["_admin"]["usageState"] |
2021 |
|
|
2022 |
0 |
links = {} |
2023 |
0 |
links["self"] = {"href": "/nst/v1/netslice_templates/{}".format(data["_id"])} |
2024 |
0 |
links["nst"] = {"href": "/nst/v1/netslice_templates/{}/nst".format(data["_id"])} |
2025 |
0 |
data["_links"] = links |
2026 |
|
|
2027 |
0 |
return super().sol005_projection(data) |
2028 |
|
|
2029 |
|
|
2030 |
1 |
class PduTopic(BaseTopic): |
2031 |
1 |
topic = "pdus" |
2032 |
1 |
topic_msg = "pdu" |
2033 |
1 |
quota_name = "pduds" |
2034 |
1 |
schema_new = pdu_new_schema |
2035 |
1 |
schema_edit = pdu_edit_schema |
2036 |
|
|
2037 |
1 |
def __init__(self, db, fs, msg, auth): |
2038 |
0 |
BaseTopic.__init__(self, db, fs, msg, auth) |
2039 |
|
|
2040 |
1 |
@staticmethod |
2041 |
1 |
def format_on_new(content, project_id=None, make_public=False): |
2042 |
0 |
BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public) |
2043 |
0 |
content["_admin"]["onboardingState"] = "CREATED" |
2044 |
0 |
content["_admin"]["operationalState"] = "ENABLED" |
2045 |
0 |
content["_admin"]["usageState"] = "NOT_IN_USE" |
2046 |
|
|
2047 |
1 |
def check_conflict_on_del(self, session, _id, db_content): |
2048 |
|
""" |
2049 |
|
Check that there is not any vnfr that uses this PDU |
2050 |
|
:param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
2051 |
|
:param _id: pdu internal id |
2052 |
|
:param db_content: The database content of the _id. |
2053 |
|
:return: None or raises EngineException with the conflict |
2054 |
|
""" |
2055 |
0 |
if session["force"]: |
2056 |
0 |
return |
2057 |
|
|
2058 |
0 |
_filter = self._get_project_filter(session) |
2059 |
0 |
_filter["vdur.pdu-id"] = _id |
2060 |
0 |
if self.db.get_list("vnfrs", _filter): |
2061 |
0 |
raise EngineException( |
2062 |
|
"There is at least one VNF instance using this PDU", |
2063 |
|
http_code=HTTPStatus.CONFLICT, |
2064 |
|
) |
2065 |
|
|
2066 |
|
|
2067 |
1 |
class VnfPkgOpTopic(BaseTopic): |
2068 |
1 |
topic = "vnfpkgops" |
2069 |
1 |
topic_msg = "vnfd" |
2070 |
1 |
schema_new = vnfpkgop_new_schema |
2071 |
1 |
schema_edit = None |
2072 |
|
|
2073 |
1 |
def __init__(self, db, fs, msg, auth): |
2074 |
0 |
BaseTopic.__init__(self, db, fs, msg, auth) |
2075 |
|
|
2076 |
1 |
def edit(self, session, _id, indata=None, kwargs=None, content=None): |
2077 |
0 |
raise EngineException( |
2078 |
|
"Method 'edit' not allowed for topic '{}'".format(self.topic), |
2079 |
|
HTTPStatus.METHOD_NOT_ALLOWED, |
2080 |
|
) |
2081 |
|
|
2082 |
1 |
def delete(self, session, _id, dry_run=False): |
2083 |
0 |
raise EngineException( |
2084 |
|
"Method 'delete' not allowed for topic '{}'".format(self.topic), |
2085 |
|
HTTPStatus.METHOD_NOT_ALLOWED, |
2086 |
|
) |
2087 |
|
|
2088 |
1 |
def delete_list(self, session, filter_q=None): |
2089 |
0 |
raise EngineException( |
2090 |
|
"Method 'delete_list' not allowed for topic '{}'".format(self.topic), |
2091 |
|
HTTPStatus.METHOD_NOT_ALLOWED, |
2092 |
|
) |
2093 |
|
|
2094 |
1 |
def new(self, rollback, session, indata=None, kwargs=None, headers=None): |
2095 |
|
""" |
2096 |
|
Creates a new entry into database. |
2097 |
|
:param rollback: list to append created items at database in case a rollback may to be done |
2098 |
|
:param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
2099 |
|
:param indata: data to be inserted |
2100 |
|
:param kwargs: used to override the indata descriptor |
2101 |
|
:param headers: http request headers |
2102 |
|
:return: _id, op_id: |
2103 |
|
_id: identity of the inserted data. |
2104 |
|
op_id: None |
2105 |
|
""" |
2106 |
0 |
self._update_input_with_kwargs(indata, kwargs) |
2107 |
0 |
validate_input(indata, self.schema_new) |
2108 |
0 |
vnfpkg_id = indata["vnfPkgId"] |
2109 |
0 |
filter_q = BaseTopic._get_project_filter(session) |
2110 |
0 |
filter_q["_id"] = vnfpkg_id |
2111 |
0 |
vnfd = self.db.get_one("vnfds", filter_q) |
2112 |
0 |
operation = indata["lcmOperationType"] |
2113 |
0 |
kdu_name = indata["kdu_name"] |
2114 |
0 |
for kdu in vnfd.get("kdu", []): |
2115 |
0 |
if kdu["name"] == kdu_name: |
2116 |
0 |
helm_chart = kdu.get("helm-chart") |
2117 |
0 |
juju_bundle = kdu.get("juju-bundle") |
2118 |
0 |
break |
2119 |
|
else: |
2120 |
0 |
raise EngineException( |
2121 |
|
"Not found vnfd[id='{}']:kdu[name='{}']".format(vnfpkg_id, kdu_name) |
2122 |
|
) |
2123 |
0 |
if helm_chart: |
2124 |
0 |
indata["helm-chart"] = helm_chart |
2125 |
0 |
match = fullmatch(r"([^/]*)/([^/]*)", helm_chart) |
2126 |
0 |
repo_name = match.group(1) if match else None |
2127 |
0 |
elif juju_bundle: |
2128 |
0 |
indata["juju-bundle"] = juju_bundle |
2129 |
0 |
match = fullmatch(r"([^/]*)/([^/]*)", juju_bundle) |
2130 |
0 |
repo_name = match.group(1) if match else None |
2131 |
|
else: |
2132 |
0 |
raise EngineException( |
2133 |
|
"Found neither 'helm-chart' nor 'juju-bundle' in vnfd[id='{}']:kdu[name='{}']".format( |
2134 |
|
vnfpkg_id, kdu_name |
2135 |
|
) |
2136 |
|
) |
2137 |
0 |
if repo_name: |
2138 |
0 |
del filter_q["_id"] |
2139 |
0 |
filter_q["name"] = repo_name |
2140 |
0 |
repo = self.db.get_one("k8srepos", filter_q) |
2141 |
0 |
k8srepo_id = repo.get("_id") |
2142 |
0 |
k8srepo_url = repo.get("url") |
2143 |
|
else: |
2144 |
0 |
k8srepo_id = None |
2145 |
0 |
k8srepo_url = None |
2146 |
0 |
indata["k8srepoId"] = k8srepo_id |
2147 |
0 |
indata["k8srepo_url"] = k8srepo_url |
2148 |
0 |
vnfpkgop_id = str(uuid4()) |
2149 |
0 |
vnfpkgop_desc = { |
2150 |
|
"_id": vnfpkgop_id, |
2151 |
|
"operationState": "PROCESSING", |
2152 |
|
"vnfPkgId": vnfpkg_id, |
2153 |
|
"lcmOperationType": operation, |
2154 |
|
"isAutomaticInvocation": False, |
2155 |
|
"isCancelPending": False, |
2156 |
|
"operationParams": indata, |
2157 |
|
"links": { |
2158 |
|
"self": "/osm/vnfpkgm/v1/vnfpkg_op_occs/" + vnfpkgop_id, |
2159 |
|
"vnfpkg": "/osm/vnfpkgm/v1/vnf_packages/" + vnfpkg_id, |
2160 |
|
}, |
2161 |
|
} |
2162 |
0 |
self.format_on_new( |
2163 |
|
vnfpkgop_desc, session["project_id"], make_public=session["public"] |
2164 |
|
) |
2165 |
0 |
ctime = vnfpkgop_desc["_admin"]["created"] |
2166 |
0 |
vnfpkgop_desc["statusEnteredTime"] = ctime |
2167 |
0 |
vnfpkgop_desc["startTime"] = ctime |
2168 |
0 |
self.db.create(self.topic, vnfpkgop_desc) |
2169 |
0 |
rollback.append({"topic": self.topic, "_id": vnfpkgop_id}) |
2170 |
0 |
self.msg.write(self.topic_msg, operation, vnfpkgop_desc) |
2171 |
0 |
return vnfpkgop_id, None |