Feature 10999: Dual-Stack IP Support for VNFs in SOL003 VNFM Interface
[osm/NBI.git] / osm_nbi / descriptor_topics.py
1 # -*- coding: utf-8 -*-
2
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
12 # implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
15
16 import tarfile
17 import yaml
18 import json
19 import copy
20 import os
21 import shutil
22 import functools
23 import re
24
25 # import logging
26 from deepdiff import DeepDiff
27 from hashlib import md5
28 from osm_common.dbbase import DbException, deep_update_rfc7396
29 from http import HTTPStatus
30 from time import time
31 from uuid import uuid4
32 from re import fullmatch
33 from zipfile import ZipFile
34 from osm_nbi.validation import (
35 ValidationError,
36 pdu_new_schema,
37 pdu_edit_schema,
38 validate_input,
39 vnfpkgop_new_schema,
40 )
41 from osm_nbi.base_topic import (
42 BaseTopic,
43 EngineException,
44 get_iterable,
45 detect_descriptor_usage,
46 )
47 from osm_im import etsi_nfv_vnfd, etsi_nfv_nsd
48 from osm_im.nst import nst as nst_im
49 from pyangbind.lib.serialise import pybindJSONDecoder
50 import pyangbind.lib.pybindJSON as pybindJSON
51 from osm_nbi import utils
52
53 __author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
54
55 valid_helm_chart_re = re.compile(
56 r"^[a-z0-9]([-a-z0-9]*[a-z0-9]/)?([a-z0-9]([-a-z0-9]*[a-z0-9])?)*$"
57 )
58
59
60 class DescriptorTopic(BaseTopic):
61 def __init__(self, db, fs, msg, auth):
62 super().__init__(db, fs, msg, auth)
63
64 def _validate_input_new(self, indata, storage_params, force=False):
65 return indata
66
67 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
68 final_content = super().check_conflict_on_edit(
69 session, final_content, edit_content, _id
70 )
71
72 def _check_unique_id_name(descriptor, position=""):
73 for desc_key, desc_item in descriptor.items():
74 if isinstance(desc_item, list) and desc_item:
75 used_ids = []
76 desc_item_id = None
77 for index, list_item in enumerate(desc_item):
78 if isinstance(list_item, dict):
79 _check_unique_id_name(
80 list_item, "{}.{}[{}]".format(position, desc_key, index)
81 )
82 # Base case
83 if index == 0 and (
84 list_item.get("id") or list_item.get("name")
85 ):
86 desc_item_id = "id" if list_item.get("id") else "name"
87 if desc_item_id and list_item.get(desc_item_id):
88 if list_item[desc_item_id] in used_ids:
89 position = "{}.{}[{}]".format(
90 position, desc_key, index
91 )
92 raise EngineException(
93 "Error: identifier {} '{}' is not unique and repeats at '{}'".format(
94 desc_item_id,
95 list_item[desc_item_id],
96 position,
97 ),
98 HTTPStatus.UNPROCESSABLE_ENTITY,
99 )
100 used_ids.append(list_item[desc_item_id])
101
102 _check_unique_id_name(final_content)
103 # 1. validate again with pyangbind
104 # 1.1. remove internal keys
105 internal_keys = {}
106 for k in ("_id", "_admin"):
107 if k in final_content:
108 internal_keys[k] = final_content.pop(k)
109 storage_params = internal_keys["_admin"].get("storage")
110 serialized = self._validate_input_new(
111 final_content, storage_params, session["force"]
112 )
113
114 # 1.2. modify final_content with a serialized version
115 final_content = copy.deepcopy(serialized)
116 # 1.3. restore internal keys
117 for k, v in internal_keys.items():
118 final_content[k] = v
119 if session["force"]:
120 return final_content
121
122 # 2. check that this id is not present
123 if "id" in edit_content:
124 _filter = self._get_project_filter(session)
125
126 _filter["id"] = final_content["id"]
127 _filter["_id.neq"] = _id
128
129 if self.db.get_one(self.topic, _filter, fail_on_empty=False):
130 raise EngineException(
131 "{} with id '{}' already exists for this project".format(
132 (str(self.topic))[:-1], final_content["id"]
133 ),
134 HTTPStatus.CONFLICT,
135 )
136
137 return final_content
138
139 @staticmethod
140 def format_on_new(content, project_id=None, make_public=False):
141 BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
142 content["_admin"]["onboardingState"] = "CREATED"
143 content["_admin"]["operationalState"] = "DISABLED"
144 content["_admin"]["usageState"] = "NOT_IN_USE"
145
146 def delete_extra(self, session, _id, db_content, not_send_msg=None):
147 """
148 Deletes file system storage associated with the descriptor
149 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
150 :param _id: server internal id
151 :param db_content: The database content of the descriptor
152 :param not_send_msg: To not send message (False) or store content (list) instead
153 :return: None if ok or raises EngineException with the problem
154 """
155 self.fs.file_delete(_id, ignore_non_exist=True)
156 self.fs.file_delete(_id + "_", ignore_non_exist=True) # remove temp folder
157 # Remove file revisions
158 if "revision" in db_content["_admin"]:
159 revision = db_content["_admin"]["revision"]
160 while revision > 0:
161 self.fs.file_delete(_id + ":" + str(revision), ignore_non_exist=True)
162 revision = revision - 1
163
164 @staticmethod
165 def get_one_by_id(db, session, topic, id):
166 # find owned by this project
167 _filter = BaseTopic._get_project_filter(session)
168 _filter["id"] = id
169 desc_list = db.get_list(topic, _filter)
170 if len(desc_list) == 1:
171 return desc_list[0]
172 elif len(desc_list) > 1:
173 raise DbException(
174 "Found more than one {} with id='{}' belonging to this project".format(
175 topic[:-1], id
176 ),
177 HTTPStatus.CONFLICT,
178 )
179
180 # not found any: try to find public
181 _filter = BaseTopic._get_project_filter(session)
182 _filter["id"] = id
183 desc_list = db.get_list(topic, _filter)
184 if not desc_list:
185 raise DbException(
186 "Not found any {} with id='{}'".format(topic[:-1], id),
187 HTTPStatus.NOT_FOUND,
188 )
189 elif len(desc_list) == 1:
190 return desc_list[0]
191 else:
192 raise DbException(
193 "Found more than one public {} with id='{}'; and no one belonging to this project".format(
194 topic[:-1], id
195 ),
196 HTTPStatus.CONFLICT,
197 )
198
199 def new(self, rollback, session, indata=None, kwargs=None, headers=None):
200 """
201 Creates a new almost empty DISABLED entry into database. Due to SOL005, it does not follow normal procedure.
202 Creating a VNFD or NSD is done in two steps: 1. Creates an empty descriptor (this step) and 2) upload content
203 (self.upload_content)
204 :param rollback: list to append created items at database in case a rollback may to be done
205 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
206 :param indata: data to be inserted
207 :param kwargs: used to override the indata descriptor
208 :param headers: http request headers
209 :return: _id, None: identity of the inserted data; and None as there is not any operation
210 """
211
212 # No needed to capture exceptions
213 # Check Quota
214 self.check_quota(session)
215
216 # _remove_envelop
217 if indata:
218 if "userDefinedData" in indata:
219 indata = indata["userDefinedData"]
220
221 # Override descriptor with query string kwargs
222 self._update_input_with_kwargs(indata, kwargs)
223 # uncomment when this method is implemented.
224 # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors
225 # indata = DescriptorTopic._validate_input_new(self, indata, project_id=session["force"])
226
227 content = {"_admin": {"userDefinedData": indata, "revision": 0}}
228
229 self.format_on_new(
230 content, session["project_id"], make_public=session["public"]
231 )
232 _id = self.db.create(self.topic, content)
233 rollback.append({"topic": self.topic, "_id": _id})
234 self._send_msg("created", {"_id": _id})
235 return _id, None
236
237 def upload_content(self, session, _id, indata, kwargs, headers):
238 """
239 Used for receiving content by chunks (with a transaction_id header and/or gzip file. It will store and extract)
240 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
241 :param _id : the nsd,vnfd is already created, this is the id
242 :param indata: http body request
243 :param kwargs: user query string to override parameters. NOT USED
244 :param headers: http request headers
245 :return: True if package is completely uploaded or False if partial content has been uploded
246 Raise exception on error
247 """
248 # Check that _id exists and it is valid
249 current_desc = self.show(session, _id)
250
251 content_range_text = headers.get("Content-Range")
252 expected_md5 = headers.get("Content-File-MD5")
253 compressed = None
254 content_type = headers.get("Content-Type")
255 if (
256 content_type
257 and "application/gzip" in content_type
258 or "application/x-gzip" in content_type
259 ):
260 compressed = "gzip"
261 if content_type and "application/zip" in content_type:
262 compressed = "zip"
263 filename = headers.get("Content-Filename")
264 if not filename and compressed:
265 filename = "package.tar.gz" if compressed == "gzip" else "package.zip"
266 elif not filename:
267 filename = "package"
268
269 revision = 1
270 if "revision" in current_desc["_admin"]:
271 revision = current_desc["_admin"]["revision"] + 1
272
273 # TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266
274 file_pkg = None
275 error_text = ""
276 fs_rollback = []
277
278 try:
279 if content_range_text:
280 content_range = (
281 content_range_text.replace("-", " ").replace("/", " ").split()
282 )
283 if (
284 content_range[0] != "bytes"
285 ): # TODO check x<y not negative < total....
286 raise IndexError()
287 start = int(content_range[1])
288 end = int(content_range[2]) + 1
289 total = int(content_range[3])
290 else:
291 start = 0
292 # Rather than using a temp folder, we will store the package in a folder based on
293 # the current revision.
294 proposed_revision_path = (
295 _id + ":" + str(revision)
296 ) # all the content is upload here and if ok, it is rename from id_ to is folder
297
298 if start:
299 if not self.fs.file_exists(proposed_revision_path, "dir"):
300 raise EngineException(
301 "invalid Transaction-Id header", HTTPStatus.NOT_FOUND
302 )
303 else:
304 self.fs.file_delete(proposed_revision_path, ignore_non_exist=True)
305 self.fs.mkdir(proposed_revision_path)
306 fs_rollback.append(proposed_revision_path)
307
308 storage = self.fs.get_params()
309 storage["folder"] = proposed_revision_path
310
311 file_path = (proposed_revision_path, filename)
312 if self.fs.file_exists(file_path, "file"):
313 file_size = self.fs.file_size(file_path)
314 else:
315 file_size = 0
316 if file_size != start:
317 raise EngineException(
318 "invalid Content-Range start sequence, expected '{}' but received '{}'".format(
319 file_size, start
320 ),
321 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
322 )
323 file_pkg = self.fs.file_open(file_path, "a+b")
324 if isinstance(indata, dict):
325 indata_text = yaml.safe_dump(indata, indent=4, default_flow_style=False)
326 file_pkg.write(indata_text.encode(encoding="utf-8"))
327 else:
328 indata_len = 0
329 while True:
330 indata_text = indata.read(4096)
331 indata_len += len(indata_text)
332 if not indata_text:
333 break
334 file_pkg.write(indata_text)
335 if content_range_text:
336 if indata_len != end - start:
337 raise EngineException(
338 "Mismatch between Content-Range header {}-{} and body length of {}".format(
339 start, end - 1, indata_len
340 ),
341 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
342 )
343 if end != total:
344 # TODO update to UPLOADING
345 return False
346
347 # PACKAGE UPLOADED
348 if expected_md5:
349 file_pkg.seek(0, 0)
350 file_md5 = md5()
351 chunk_data = file_pkg.read(1024)
352 while chunk_data:
353 file_md5.update(chunk_data)
354 chunk_data = file_pkg.read(1024)
355 if expected_md5 != file_md5.hexdigest():
356 raise EngineException("Error, MD5 mismatch", HTTPStatus.CONFLICT)
357 file_pkg.seek(0, 0)
358 if compressed == "gzip":
359 tar = tarfile.open(mode="r", fileobj=file_pkg)
360 descriptor_file_name = None
361 for tarinfo in tar:
362 tarname = tarinfo.name
363 tarname_path = tarname.split("/")
364 if (
365 not tarname_path[0] or ".." in tarname_path
366 ): # if start with "/" means absolute path
367 raise EngineException(
368 "Absolute path or '..' are not allowed for package descriptor tar.gz"
369 )
370 if len(tarname_path) == 1 and not tarinfo.isdir():
371 raise EngineException(
372 "All files must be inside a dir for package descriptor tar.gz"
373 )
374 if (
375 tarname.endswith(".yaml")
376 or tarname.endswith(".json")
377 or tarname.endswith(".yml")
378 ):
379 storage["pkg-dir"] = tarname_path[0]
380 if len(tarname_path) == 2:
381 if descriptor_file_name:
382 raise EngineException(
383 "Found more than one descriptor file at package descriptor tar.gz"
384 )
385 descriptor_file_name = tarname
386 if not descriptor_file_name:
387 raise EngineException(
388 "Not found any descriptor file at package descriptor tar.gz"
389 )
390 storage["descriptor"] = descriptor_file_name
391 storage["zipfile"] = filename
392 self.fs.file_extract(tar, proposed_revision_path)
393 with self.fs.file_open(
394 (proposed_revision_path, descriptor_file_name), "r"
395 ) as descriptor_file:
396 content = descriptor_file.read()
397 elif compressed == "zip":
398 zipfile = ZipFile(file_pkg)
399 descriptor_file_name = None
400 for package_file in zipfile.infolist():
401 zipfilename = package_file.filename
402 file_path = zipfilename.split("/")
403 if (
404 not file_path[0] or ".." in zipfilename
405 ): # if start with "/" means absolute path
406 raise EngineException(
407 "Absolute path or '..' are not allowed for package descriptor zip"
408 )
409
410 if (
411 zipfilename.endswith(".yaml")
412 or zipfilename.endswith(".json")
413 or zipfilename.endswith(".yml")
414 ) and (
415 zipfilename.find("/") < 0
416 or zipfilename.find("Definitions") >= 0
417 ):
418 storage["pkg-dir"] = ""
419 if descriptor_file_name:
420 raise EngineException(
421 "Found more than one descriptor file at package descriptor zip"
422 )
423 descriptor_file_name = zipfilename
424 if not descriptor_file_name:
425 raise EngineException(
426 "Not found any descriptor file at package descriptor zip"
427 )
428 storage["descriptor"] = descriptor_file_name
429 storage["zipfile"] = filename
430 self.fs.file_extract(zipfile, proposed_revision_path)
431
432 with self.fs.file_open(
433 (proposed_revision_path, descriptor_file_name), "r"
434 ) as descriptor_file:
435 content = descriptor_file.read()
436 else:
437 content = file_pkg.read()
438 storage["descriptor"] = descriptor_file_name = filename
439
440 if descriptor_file_name.endswith(".json"):
441 error_text = "Invalid json format "
442 indata = json.load(content)
443 else:
444 error_text = "Invalid yaml format "
445 indata = yaml.safe_load(content)
446
447 # Need to close the file package here so it can be copied from the
448 # revision to the current, unrevisioned record
449 if file_pkg:
450 file_pkg.close()
451 file_pkg = None
452
453 # Fetch both the incoming, proposed revision and the original revision so we
454 # can call a validate method to compare them
455 current_revision_path = _id + "/"
456 self.fs.sync(from_path=current_revision_path)
457 self.fs.sync(from_path=proposed_revision_path)
458
459 if revision > 1:
460 try:
461 self._validate_descriptor_changes(
462 _id,
463 descriptor_file_name,
464 current_revision_path,
465 proposed_revision_path,
466 )
467 except Exception as e:
468 shutil.rmtree(
469 self.fs.path + current_revision_path, ignore_errors=True
470 )
471 shutil.rmtree(
472 self.fs.path + proposed_revision_path, ignore_errors=True
473 )
474 # Only delete the new revision. We need to keep the original version in place
475 # as it has not been changed.
476 self.fs.file_delete(proposed_revision_path, ignore_non_exist=True)
477 raise e
478
479 indata = self._remove_envelop(indata)
480
481 # Override descriptor with query string kwargs
482 if kwargs:
483 self._update_input_with_kwargs(indata, kwargs)
484
485 current_desc["_admin"]["storage"] = storage
486 current_desc["_admin"]["onboardingState"] = "ONBOARDED"
487 current_desc["_admin"]["operationalState"] = "ENABLED"
488 current_desc["_admin"]["modified"] = time()
489 current_desc["_admin"]["revision"] = revision
490
491 deep_update_rfc7396(current_desc, indata)
492 current_desc = self.check_conflict_on_edit(
493 session, current_desc, indata, _id=_id
494 )
495
496 # Copy the revision to the active package name by its original id
497 shutil.rmtree(self.fs.path + current_revision_path, ignore_errors=True)
498 os.rename(
499 self.fs.path + proposed_revision_path,
500 self.fs.path + current_revision_path,
501 )
502 self.fs.file_delete(current_revision_path, ignore_non_exist=True)
503 self.fs.mkdir(current_revision_path)
504 self.fs.reverse_sync(from_path=current_revision_path)
505
506 shutil.rmtree(self.fs.path + _id)
507
508 self.db.replace(self.topic, _id, current_desc)
509
510 # Store a copy of the package as a point in time revision
511 revision_desc = dict(current_desc)
512 revision_desc["_id"] = _id + ":" + str(revision_desc["_admin"]["revision"])
513 self.db.create(self.topic + "_revisions", revision_desc)
514 fs_rollback = []
515
516 indata["_id"] = _id
517 self._send_msg("edited", indata)
518
519 # TODO if descriptor has changed because kwargs update content and remove cached zip
520 # TODO if zip is not present creates one
521 return True
522
523 except EngineException:
524 raise
525 except IndexError:
526 raise EngineException(
527 "invalid Content-Range header format. Expected 'bytes start-end/total'",
528 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
529 )
530 except IOError as e:
531 raise EngineException(
532 "invalid upload transaction sequence: '{}'".format(e),
533 HTTPStatus.BAD_REQUEST,
534 )
535 except tarfile.ReadError as e:
536 raise EngineException(
537 "invalid file content {}".format(e), HTTPStatus.BAD_REQUEST
538 )
539 except (ValueError, yaml.YAMLError) as e:
540 raise EngineException(error_text + str(e))
541 except ValidationError as e:
542 raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
543 finally:
544 if file_pkg:
545 file_pkg.close()
546 for file in fs_rollback:
547 self.fs.file_delete(file, ignore_non_exist=True)
548
549 def get_file(self, session, _id, path=None, accept_header=None):
550 """
551 Return the file content of a vnfd or nsd
552 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
553 :param _id: Identity of the vnfd, nsd
554 :param path: artifact path or "$DESCRIPTOR" or None
555 :param accept_header: Content of Accept header. Must contain applition/zip or/and text/plain
556 :return: opened file plus Accept format or raises an exception
557 """
558 accept_text = accept_zip = False
559 if accept_header:
560 if "text/plain" in accept_header or "*/*" in accept_header:
561 accept_text = True
562 if "application/zip" in accept_header or "*/*" in accept_header:
563 accept_zip = "application/zip"
564 elif "application/gzip" in accept_header:
565 accept_zip = "application/gzip"
566
567 if not accept_text and not accept_zip:
568 raise EngineException(
569 "provide request header 'Accept' with 'application/zip' or 'text/plain'",
570 http_code=HTTPStatus.NOT_ACCEPTABLE,
571 )
572
573 content = self.show(session, _id)
574 if content["_admin"]["onboardingState"] != "ONBOARDED":
575 raise EngineException(
576 "Cannot get content because this resource is not at 'ONBOARDED' state. "
577 "onboardingState is {}".format(content["_admin"]["onboardingState"]),
578 http_code=HTTPStatus.CONFLICT,
579 )
580 storage = content["_admin"]["storage"]
581 if path is not None and path != "$DESCRIPTOR": # artifacts
582 if not storage.get("pkg-dir") and not storage.get("folder"):
583 raise EngineException(
584 "Packages does not contains artifacts",
585 http_code=HTTPStatus.BAD_REQUEST,
586 )
587 if self.fs.file_exists(
588 (storage["folder"], storage["pkg-dir"], *path), "dir"
589 ):
590 folder_content = self.fs.dir_ls(
591 (storage["folder"], storage["pkg-dir"], *path)
592 )
593 return folder_content, "text/plain"
594 # TODO manage folders in http
595 else:
596 return (
597 self.fs.file_open(
598 (storage["folder"], storage["pkg-dir"], *path), "rb"
599 ),
600 "application/octet-stream",
601 )
602
603 # pkgtype accept ZIP TEXT -> result
604 # manyfiles yes X -> zip
605 # no yes -> error
606 # onefile yes no -> zip
607 # X yes -> text
608 contain_many_files = False
609 if storage.get("pkg-dir"):
610 # check if there are more than one file in the package, ignoring checksums.txt.
611 pkg_files = self.fs.dir_ls((storage["folder"], storage["pkg-dir"]))
612 if len(pkg_files) >= 3 or (
613 len(pkg_files) == 2 and "checksums.txt" not in pkg_files
614 ):
615 contain_many_files = True
616 if accept_text and (not contain_many_files or path == "$DESCRIPTOR"):
617 return (
618 self.fs.file_open((storage["folder"], storage["descriptor"]), "r"),
619 "text/plain",
620 )
621 elif contain_many_files and not accept_zip:
622 raise EngineException(
623 "Packages that contains several files need to be retrieved with 'application/zip'"
624 "Accept header",
625 http_code=HTTPStatus.NOT_ACCEPTABLE,
626 )
627 else:
628 if not storage.get("zipfile"):
629 # TODO generate zipfile if not present
630 raise EngineException(
631 "Only allowed 'text/plain' Accept header for this descriptor. To be solved in "
632 "future versions",
633 http_code=HTTPStatus.NOT_ACCEPTABLE,
634 )
635 return (
636 self.fs.file_open((storage["folder"], storage["zipfile"]), "rb"),
637 accept_zip,
638 )
639
640 def _remove_yang_prefixes_from_descriptor(self, descriptor):
641 new_descriptor = {}
642 for k, v in descriptor.items():
643 new_v = v
644 if isinstance(v, dict):
645 new_v = self._remove_yang_prefixes_from_descriptor(v)
646 elif isinstance(v, list):
647 new_v = list()
648 for x in v:
649 if isinstance(x, dict):
650 new_v.append(self._remove_yang_prefixes_from_descriptor(x))
651 else:
652 new_v.append(x)
653 new_descriptor[k.split(":")[-1]] = new_v
654 return new_descriptor
655
656 def pyangbind_validation(self, item, data, force=False):
657 raise EngineException(
658 "Not possible to validate '{}' item".format(item),
659 http_code=HTTPStatus.INTERNAL_SERVER_ERROR,
660 )
661
662 def _validate_input_edit(self, indata, content, force=False):
663 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
664 if "_id" in indata:
665 indata.pop("_id")
666 if "_admin" not in indata:
667 indata["_admin"] = {}
668
669 if "operationalState" in indata:
670 if indata["operationalState"] in ("ENABLED", "DISABLED"):
671 indata["_admin"]["operationalState"] = indata.pop("operationalState")
672 else:
673 raise EngineException(
674 "State '{}' is not a valid operational state".format(
675 indata["operationalState"]
676 ),
677 http_code=HTTPStatus.BAD_REQUEST,
678 )
679
680 # In the case of user defined data, we need to put the data in the root of the object
681 # to preserve current expected behaviour
682 if "userDefinedData" in indata:
683 data = indata.pop("userDefinedData")
684 if isinstance(data, dict):
685 indata["_admin"]["userDefinedData"] = data
686 else:
687 raise EngineException(
688 "userDefinedData should be an object, but is '{}' instead".format(
689 type(data)
690 ),
691 http_code=HTTPStatus.BAD_REQUEST,
692 )
693
694 if (
695 "operationalState" in indata["_admin"]
696 and content["_admin"]["operationalState"]
697 == indata["_admin"]["operationalState"]
698 ):
699 raise EngineException(
700 "operationalState already {}".format(
701 content["_admin"]["operationalState"]
702 ),
703 http_code=HTTPStatus.CONFLICT,
704 )
705
706 return indata
707
708 def _validate_descriptor_changes(
709 self,
710 descriptor_id,
711 descriptor_file_name,
712 old_descriptor_directory,
713 new_descriptor_directory,
714 ):
715 # Example:
716 # raise EngineException(
717 # "Error in validating new descriptor: <NODE> cannot be modified",
718 # http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
719 # )
720 pass
721
722
723 class VnfdTopic(DescriptorTopic):
724 topic = "vnfds"
725 topic_msg = "vnfd"
726
727 def __init__(self, db, fs, msg, auth):
728 DescriptorTopic.__init__(self, db, fs, msg, auth)
729
730 def pyangbind_validation(self, item, data, force=False):
731 if self._descriptor_data_is_in_old_format(data):
732 raise EngineException(
733 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
734 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
735 )
736 try:
737 myvnfd = etsi_nfv_vnfd.etsi_nfv_vnfd()
738 pybindJSONDecoder.load_ietf_json(
739 {"etsi-nfv-vnfd:vnfd": data},
740 None,
741 None,
742 obj=myvnfd,
743 path_helper=True,
744 skip_unknown=force,
745 )
746 out = pybindJSON.dumps(myvnfd, mode="ietf")
747 desc_out = self._remove_envelop(yaml.safe_load(out))
748 desc_out = self._remove_yang_prefixes_from_descriptor(desc_out)
749 return utils.deep_update_dict(data, desc_out)
750 except Exception as e:
751 raise EngineException(
752 "Error in pyangbind validation: {}".format(str(e)),
753 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
754 )
755
756 @staticmethod
757 def _descriptor_data_is_in_old_format(data):
758 return ("vnfd-catalog" in data) or ("vnfd:vnfd-catalog" in data)
759
760 @staticmethod
761 def _remove_envelop(indata=None):
762 if not indata:
763 return {}
764 clean_indata = indata
765
766 if clean_indata.get("etsi-nfv-vnfd:vnfd"):
767 if not isinstance(clean_indata["etsi-nfv-vnfd:vnfd"], dict):
768 raise EngineException("'etsi-nfv-vnfd:vnfd' must be a dict")
769 clean_indata = clean_indata["etsi-nfv-vnfd:vnfd"]
770 elif clean_indata.get("vnfd"):
771 if not isinstance(clean_indata["vnfd"], dict):
772 raise EngineException("'vnfd' must be dict")
773 clean_indata = clean_indata["vnfd"]
774
775 return clean_indata
776
777 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
778 final_content = super().check_conflict_on_edit(
779 session, final_content, edit_content, _id
780 )
781
782 # set type of vnfd
783 contains_pdu = False
784 contains_vdu = False
785 for vdu in get_iterable(final_content.get("vdu")):
786 if vdu.get("pdu-type"):
787 contains_pdu = True
788 else:
789 contains_vdu = True
790 if contains_pdu:
791 final_content["_admin"]["type"] = "hnfd" if contains_vdu else "pnfd"
792 elif contains_vdu:
793 final_content["_admin"]["type"] = "vnfd"
794 # if neither vud nor pdu do not fill type
795 return final_content
796
797 def check_conflict_on_del(self, session, _id, db_content):
798 """
799 Check that there is not any NSD that uses this VNFD. Only NSDs belonging to this project are considered. Note
800 that VNFD can be public and be used by NSD of other projects. Also check there are not deployments, or vnfr
801 that uses this vnfd
802 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
803 :param _id: vnfd internal id
804 :param db_content: The database content of the _id.
805 :return: None or raises EngineException with the conflict
806 """
807 if session["force"]:
808 return
809 descriptor = db_content
810 descriptor_id = descriptor.get("id")
811 if not descriptor_id: # empty vnfd not uploaded
812 return
813
814 _filter = self._get_project_filter(session)
815
816 # check vnfrs using this vnfd
817 _filter["vnfd-id"] = _id
818 if self.db.get_list("vnfrs", _filter):
819 raise EngineException(
820 "There is at least one VNF instance using this descriptor",
821 http_code=HTTPStatus.CONFLICT,
822 )
823
824 # check NSD referencing this VNFD
825 del _filter["vnfd-id"]
826 _filter["vnfd-id"] = descriptor_id
827 if self.db.get_list("nsds", _filter):
828 raise EngineException(
829 "There is at least one NS package referencing this descriptor",
830 http_code=HTTPStatus.CONFLICT,
831 )
832
833 def _validate_input_new(self, indata, storage_params, force=False):
834 indata.pop("onboardingState", None)
835 indata.pop("operationalState", None)
836 indata.pop("usageState", None)
837 indata.pop("links", None)
838
839 indata = self.pyangbind_validation("vnfds", indata, force)
840 # Cross references validation in the descriptor
841
842 self.validate_mgmt_interface_connection_point(indata)
843
844 for vdu in get_iterable(indata.get("vdu")):
845 self.validate_vdu_internal_connection_points(vdu)
846 self._validate_vdu_cloud_init_in_package(storage_params, vdu, indata)
847 self._validate_vdu_charms_in_package(storage_params, indata)
848
849 self._validate_vnf_charms_in_package(storage_params, indata)
850
851 self.validate_external_connection_points(indata)
852 self.validate_internal_virtual_links(indata)
853 self.validate_monitoring_params(indata)
854 self.validate_scaling_group_descriptor(indata)
855 self.validate_healing_group_descriptor(indata)
856 self.validate_alarm_group_descriptor(indata)
857 self.validate_storage_compute_descriptor(indata)
858 self.validate_helm_chart(indata)
859
860 return indata
861
862 @staticmethod
863 def validate_helm_chart(indata):
864 kdus = indata.get("kdu", [])
865 for kdu in kdus:
866 helm_chart_value = kdu.get("helm-chart")
867 if not helm_chart_value:
868 continue
869 if not valid_helm_chart_re.match(helm_chart_value):
870 raise EngineException(
871 "helm-chart '{}' is not valid".format(helm_chart_value),
872 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
873 )
874
875 @staticmethod
876 def validate_mgmt_interface_connection_point(indata):
877 if not indata.get("vdu"):
878 return
879 if not indata.get("mgmt-cp"):
880 raise EngineException(
881 "'mgmt-cp' is a mandatory field and it is not defined",
882 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
883 )
884
885 for cp in get_iterable(indata.get("ext-cpd")):
886 if cp["id"] == indata["mgmt-cp"]:
887 break
888 else:
889 raise EngineException(
890 "mgmt-cp='{}' must match an existing ext-cpd".format(indata["mgmt-cp"]),
891 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
892 )
893
894 @staticmethod
895 def validate_vdu_internal_connection_points(vdu):
896 int_cpds = set()
897 for cpd in get_iterable(vdu.get("int-cpd")):
898 cpd_id = cpd.get("id")
899 if cpd_id and cpd_id in int_cpds:
900 raise EngineException(
901 "vdu[id='{}']:int-cpd[id='{}'] is already used by other int-cpd".format(
902 vdu["id"], cpd_id
903 ),
904 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
905 )
906 int_cpds.add(cpd_id)
907
908 @staticmethod
909 def validate_external_connection_points(indata):
910 all_vdus_int_cpds = set()
911 for vdu in get_iterable(indata.get("vdu")):
912 for int_cpd in get_iterable(vdu.get("int-cpd")):
913 all_vdus_int_cpds.add((vdu.get("id"), int_cpd.get("id")))
914
915 ext_cpds = set()
916 for cpd in get_iterable(indata.get("ext-cpd")):
917 cpd_id = cpd.get("id")
918 if cpd_id and cpd_id in ext_cpds:
919 raise EngineException(
920 "ext-cpd[id='{}'] is already used by other ext-cpd".format(cpd_id),
921 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
922 )
923 ext_cpds.add(cpd_id)
924
925 int_cpd = cpd.get("int-cpd")
926 if int_cpd:
927 if (int_cpd.get("vdu-id"), int_cpd.get("cpd")) not in all_vdus_int_cpds:
928 raise EngineException(
929 "ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd".format(
930 cpd_id
931 ),
932 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
933 )
934 # TODO: Validate k8s-cluster-net points to a valid k8s-cluster:nets ?
935
936 def _validate_vdu_charms_in_package(self, storage_params, indata):
937 for df in indata["df"]:
938 if (
939 "lcm-operations-configuration" in df
940 and "operate-vnf-op-config" in df["lcm-operations-configuration"]
941 ):
942 configs = df["lcm-operations-configuration"][
943 "operate-vnf-op-config"
944 ].get("day1-2", [])
945 vdus = df.get("vdu-profile", [])
946 for vdu in vdus:
947 for config in configs:
948 if config["id"] == vdu["id"] and utils.find_in_list(
949 config.get("execution-environment-list", []),
950 lambda ee: "juju" in ee,
951 ):
952 if not self._validate_package_folders(
953 storage_params, "charms"
954 ) and not self._validate_package_folders(
955 storage_params, "Scripts/charms"
956 ):
957 raise EngineException(
958 "Charm defined in vnf[id={}] but not present in "
959 "package".format(indata["id"])
960 )
961
962 def _validate_vdu_cloud_init_in_package(self, storage_params, vdu, indata):
963 if not vdu.get("cloud-init-file"):
964 return
965 if not self._validate_package_folders(
966 storage_params, "cloud_init", vdu["cloud-init-file"]
967 ) and not self._validate_package_folders(
968 storage_params, "Scripts/cloud_init", vdu["cloud-init-file"]
969 ):
970 raise EngineException(
971 "Cloud-init defined in vnf[id={}]:vdu[id={}] but not present in "
972 "package".format(indata["id"], vdu["id"])
973 )
974
975 def _validate_vnf_charms_in_package(self, storage_params, indata):
976 # Get VNF configuration through new container
977 for deployment_flavor in indata.get("df", []):
978 if "lcm-operations-configuration" not in deployment_flavor:
979 return
980 if (
981 "operate-vnf-op-config"
982 not in deployment_flavor["lcm-operations-configuration"]
983 ):
984 return
985 for day_1_2_config in deployment_flavor["lcm-operations-configuration"][
986 "operate-vnf-op-config"
987 ]["day1-2"]:
988 if day_1_2_config["id"] == indata["id"]:
989 if utils.find_in_list(
990 day_1_2_config.get("execution-environment-list", []),
991 lambda ee: "juju" in ee,
992 ):
993 if not self._validate_package_folders(
994 storage_params, "charms"
995 ) and not self._validate_package_folders(
996 storage_params, "Scripts/charms"
997 ):
998 raise EngineException(
999 "Charm defined in vnf[id={}] but not present in "
1000 "package".format(indata["id"])
1001 )
1002
1003 def _validate_package_folders(self, storage_params, folder, file=None):
1004 if not storage_params:
1005 return False
1006 elif not storage_params.get("pkg-dir"):
1007 if self.fs.file_exists("{}_".format(storage_params["folder"]), "dir"):
1008 f = "{}_/{}".format(storage_params["folder"], folder)
1009 else:
1010 f = "{}/{}".format(storage_params["folder"], folder)
1011 if file:
1012 return self.fs.file_exists("{}/{}".format(f, file), "file")
1013 else:
1014 if self.fs.file_exists(f, "dir"):
1015 if self.fs.dir_ls(f):
1016 return True
1017 return False
1018 else:
1019 if self.fs.file_exists("{}_".format(storage_params["folder"]), "dir"):
1020 f = "{}_/{}/{}".format(
1021 storage_params["folder"], storage_params["pkg-dir"], folder
1022 )
1023 else:
1024 f = "{}/{}/{}".format(
1025 storage_params["folder"], storage_params["pkg-dir"], folder
1026 )
1027 if file:
1028 return self.fs.file_exists("{}/{}".format(f, file), "file")
1029 else:
1030 if self.fs.file_exists(f, "dir"):
1031 if self.fs.dir_ls(f):
1032 return True
1033 return False
1034
1035 @staticmethod
1036 def validate_internal_virtual_links(indata):
1037 all_ivld_ids = set()
1038 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
1039 ivld_id = ivld.get("id")
1040 if ivld_id and ivld_id in all_ivld_ids:
1041 raise EngineException(
1042 "Duplicated VLD id in int-virtual-link-desc[id={}]".format(ivld_id),
1043 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1044 )
1045 else:
1046 all_ivld_ids.add(ivld_id)
1047
1048 for vdu in get_iterable(indata.get("vdu")):
1049 for int_cpd in get_iterable(vdu.get("int-cpd")):
1050 int_cpd_ivld_id = int_cpd.get("int-virtual-link-desc")
1051 if int_cpd_ivld_id and int_cpd_ivld_id not in all_ivld_ids:
1052 raise EngineException(
1053 "vdu[id='{}']:int-cpd[id='{}']:int-virtual-link-desc='{}' must match an existing "
1054 "int-virtual-link-desc".format(
1055 vdu["id"], int_cpd["id"], int_cpd_ivld_id
1056 ),
1057 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1058 )
1059
1060 for df in get_iterable(indata.get("df")):
1061 for vlp in get_iterable(df.get("virtual-link-profile")):
1062 vlp_ivld_id = vlp.get("id")
1063 if vlp_ivld_id and vlp_ivld_id not in all_ivld_ids:
1064 raise EngineException(
1065 "df[id='{}']:virtual-link-profile='{}' must match an existing "
1066 "int-virtual-link-desc".format(df["id"], vlp_ivld_id),
1067 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1068 )
1069
1070 @staticmethod
1071 def validate_monitoring_params(indata):
1072 all_monitoring_params = set()
1073 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
1074 for mp in get_iterable(ivld.get("monitoring-parameters")):
1075 mp_id = mp.get("id")
1076 if mp_id and mp_id in all_monitoring_params:
1077 raise EngineException(
1078 "Duplicated monitoring-parameter id in "
1079 "int-virtual-link-desc[id='{}']:monitoring-parameters[id='{}']".format(
1080 ivld["id"], mp_id
1081 ),
1082 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1083 )
1084 else:
1085 all_monitoring_params.add(mp_id)
1086
1087 for vdu in get_iterable(indata.get("vdu")):
1088 for mp in get_iterable(vdu.get("monitoring-parameter")):
1089 mp_id = mp.get("id")
1090 if mp_id and mp_id in all_monitoring_params:
1091 raise EngineException(
1092 "Duplicated monitoring-parameter id in "
1093 "vdu[id='{}']:monitoring-parameter[id='{}']".format(
1094 vdu["id"], mp_id
1095 ),
1096 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1097 )
1098 else:
1099 all_monitoring_params.add(mp_id)
1100
1101 for df in get_iterable(indata.get("df")):
1102 for mp in get_iterable(df.get("monitoring-parameter")):
1103 mp_id = mp.get("id")
1104 if mp_id and mp_id in all_monitoring_params:
1105 raise EngineException(
1106 "Duplicated monitoring-parameter id in "
1107 "df[id='{}']:monitoring-parameter[id='{}']".format(
1108 df["id"], mp_id
1109 ),
1110 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1111 )
1112 else:
1113 all_monitoring_params.add(mp_id)
1114
1115 @staticmethod
1116 def validate_scaling_group_descriptor(indata):
1117 all_monitoring_params = set()
1118 all_vdu_ids = set()
1119 for df in get_iterable(indata.get("df")):
1120 for il in get_iterable(df.get("instantiation-level")):
1121 for vl in get_iterable(il.get("vdu-level")):
1122 all_vdu_ids.add(vl.get("vdu-id"))
1123
1124 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
1125 for mp in get_iterable(ivld.get("monitoring-parameters")):
1126 all_monitoring_params.add(mp.get("id"))
1127
1128 for vdu in get_iterable(indata.get("vdu")):
1129 for mp in get_iterable(vdu.get("monitoring-parameter")):
1130 all_monitoring_params.add(mp.get("id"))
1131
1132 for df in get_iterable(indata.get("df")):
1133 for mp in get_iterable(df.get("monitoring-parameter")):
1134 all_monitoring_params.add(mp.get("id"))
1135
1136 for df in get_iterable(indata.get("df")):
1137 for sa in get_iterable(df.get("scaling-aspect")):
1138 for deltas in get_iterable(
1139 sa.get("aspect-delta-details").get("deltas")
1140 ):
1141 for vds in get_iterable(deltas.get("vdu-delta")):
1142 sa_vdu_id = vds.get("id")
1143 if sa_vdu_id and sa_vdu_id not in all_vdu_ids:
1144 raise EngineException(
1145 "df[id='{}']:scaling-aspect[id='{}']:aspect-delta-details"
1146 "[delta='{}']: "
1147 "vdu-id='{}' not defined in vdu".format(
1148 df["id"],
1149 sa["id"],
1150 deltas["id"],
1151 sa_vdu_id,
1152 ),
1153 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1154 )
1155
1156 for df in get_iterable(indata.get("df")):
1157 for sa in get_iterable(df.get("scaling-aspect")):
1158 for sp in get_iterable(sa.get("scaling-policy")):
1159 for sc in get_iterable(sp.get("scaling-criteria")):
1160 sc_monitoring_param = sc.get("vnf-monitoring-param-ref")
1161 if (
1162 sc_monitoring_param
1163 and sc_monitoring_param not in all_monitoring_params
1164 ):
1165 raise EngineException(
1166 "df[id='{}']:scaling-aspect[id='{}']:scaling-policy"
1167 "[name='{}']:scaling-criteria[name='{}']: "
1168 "vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format(
1169 df["id"],
1170 sa["id"],
1171 sp["name"],
1172 sc["name"],
1173 sc_monitoring_param,
1174 ),
1175 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1176 )
1177
1178 for sca in get_iterable(sa.get("scaling-config-action")):
1179 if (
1180 "lcm-operations-configuration" not in df
1181 or "operate-vnf-op-config"
1182 not in df["lcm-operations-configuration"]
1183 or not utils.find_in_list(
1184 df["lcm-operations-configuration"][
1185 "operate-vnf-op-config"
1186 ].get("day1-2", []),
1187 lambda config: config["id"] == indata["id"],
1188 )
1189 ):
1190 raise EngineException(
1191 "'day1-2 configuration' not defined in the descriptor but it is "
1192 "referenced by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action".format(
1193 df["id"], sa["id"]
1194 ),
1195 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1196 )
1197 for configuration in get_iterable(
1198 df["lcm-operations-configuration"]["operate-vnf-op-config"].get(
1199 "day1-2", []
1200 )
1201 ):
1202 for primitive in get_iterable(
1203 configuration.get("config-primitive")
1204 ):
1205 if (
1206 primitive["name"]
1207 == sca["vnf-config-primitive-name-ref"]
1208 ):
1209 break
1210 else:
1211 raise EngineException(
1212 "df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-"
1213 "config-primitive-name-ref='{}' does not match any "
1214 "day1-2 configuration:config-primitive:name".format(
1215 df["id"],
1216 sa["id"],
1217 sca["vnf-config-primitive-name-ref"],
1218 ),
1219 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1220 )
1221
1222 @staticmethod
1223 def validate_healing_group_descriptor(indata):
1224 all_vdu_ids = set()
1225 for df in get_iterable(indata.get("df")):
1226 for il in get_iterable(df.get("instantiation-level")):
1227 for vl in get_iterable(il.get("vdu-level")):
1228 all_vdu_ids.add(vl.get("vdu-id"))
1229
1230 for df in get_iterable(indata.get("df")):
1231 for ha in get_iterable(df.get("healing-aspect")):
1232 for hp in get_iterable(ha.get("healing-policy")):
1233 hp_monitoring_param = hp.get("vdu-id")
1234 if hp_monitoring_param and hp_monitoring_param not in all_vdu_ids:
1235 raise EngineException(
1236 "df[id='{}']:healing-aspect[id='{}']:healing-policy"
1237 "[name='{}']: "
1238 "vdu-id='{}' not defined in vdu".format(
1239 df["id"],
1240 ha["id"],
1241 hp["event-name"],
1242 hp_monitoring_param,
1243 ),
1244 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1245 )
1246
1247 @staticmethod
1248 def validate_alarm_group_descriptor(indata):
1249 all_monitoring_params = set()
1250 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
1251 for mp in get_iterable(ivld.get("monitoring-parameters")):
1252 all_monitoring_params.add(mp.get("id"))
1253
1254 for vdu in get_iterable(indata.get("vdu")):
1255 for mp in get_iterable(vdu.get("monitoring-parameter")):
1256 all_monitoring_params.add(mp.get("id"))
1257
1258 for df in get_iterable(indata.get("df")):
1259 for mp in get_iterable(df.get("monitoring-parameter")):
1260 all_monitoring_params.add(mp.get("id"))
1261
1262 for vdus in get_iterable(indata.get("vdu")):
1263 for alarms in get_iterable(vdus.get("alarm")):
1264 alarm_monitoring_param = alarms.get("vnf-monitoring-param-ref")
1265 if (
1266 alarm_monitoring_param
1267 and alarm_monitoring_param not in all_monitoring_params
1268 ):
1269 raise EngineException(
1270 "vdu[id='{}']:alarm[id='{}']:"
1271 "vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format(
1272 vdus["id"],
1273 alarms["alarm-id"],
1274 alarm_monitoring_param,
1275 ),
1276 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1277 )
1278
1279 @staticmethod
1280 def validate_storage_compute_descriptor(indata):
1281 all_vsd_ids = set()
1282 for vsd in get_iterable(indata.get("virtual-storage-desc")):
1283 all_vsd_ids.add(vsd.get("id"))
1284
1285 all_vcd_ids = set()
1286 for vcd in get_iterable(indata.get("virtual-compute-desc")):
1287 all_vcd_ids.add(vcd.get("id"))
1288
1289 for vdus in get_iterable(indata.get("vdu")):
1290 for vsd_ref in vdus.get("virtual-storage-desc"):
1291 if vsd_ref and vsd_ref not in all_vsd_ids:
1292 raise EngineException(
1293 "vdu[virtual-storage-desc='{}']"
1294 "not defined in vnfd".format(
1295 vsd_ref,
1296 ),
1297 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1298 )
1299
1300 for vdus in get_iterable(indata.get("vdu")):
1301 vcd_ref = vdus.get("virtual-compute-desc")
1302 if vcd_ref and vcd_ref not in all_vcd_ids:
1303 raise EngineException(
1304 "vdu[virtual-compute-desc='{}']"
1305 "not defined in vnfd".format(
1306 vdus["virtual-compute-desc"],
1307 ),
1308 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1309 )
1310
1311 def delete_extra(self, session, _id, db_content, not_send_msg=None):
1312 """
1313 Deletes associate file system storage (via super)
1314 Deletes associated vnfpkgops from database.
1315 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1316 :param _id: server internal id
1317 :param db_content: The database content of the descriptor
1318 :return: None
1319 :raises: FsException in case of error while deleting associated storage
1320 """
1321 super().delete_extra(session, _id, db_content, not_send_msg)
1322 self.db.del_list("vnfpkgops", {"vnfPkgId": _id})
1323 self.db.del_list(self.topic + "_revisions", {"_id": {"$regex": _id}})
1324
1325 def sol005_projection(self, data):
1326 data["onboardingState"] = data["_admin"]["onboardingState"]
1327 data["operationalState"] = data["_admin"]["operationalState"]
1328 data["usageState"] = data["_admin"]["usageState"]
1329
1330 links = {}
1331 links["self"] = {"href": "/vnfpkgm/v1/vnf_packages/{}".format(data["_id"])}
1332 links["vnfd"] = {"href": "/vnfpkgm/v1/vnf_packages/{}/vnfd".format(data["_id"])}
1333 links["packageContent"] = {
1334 "href": "/vnfpkgm/v1/vnf_packages/{}/package_content".format(data["_id"])
1335 }
1336 data["_links"] = links
1337
1338 return super().sol005_projection(data)
1339
1340 @staticmethod
1341 def find_software_version(vnfd: dict) -> str:
1342 """Find the sotware version in the VNFD descriptors
1343
1344 Args:
1345 vnfd (dict): Descriptor as a dictionary
1346
1347 Returns:
1348 software-version (str)
1349 """
1350 default_sw_version = "1.0"
1351 if vnfd.get("vnfd"):
1352 vnfd = vnfd["vnfd"]
1353 if vnfd.get("software-version"):
1354 return vnfd["software-version"]
1355 else:
1356 return default_sw_version
1357
1358 @staticmethod
1359 def extract_policies(vnfd: dict) -> dict:
1360 """Removes the policies from the VNFD descriptors
1361
1362 Args:
1363 vnfd (dict): Descriptor as a dictionary
1364
1365 Returns:
1366 vnfd (dict): VNFD which does not include policies
1367 """
1368 for df in vnfd.get("df", {}):
1369 for policy in ["scaling-aspect", "healing-aspect"]:
1370 if df.get(policy, {}):
1371 df.pop(policy)
1372 for vdu in vnfd.get("vdu", {}):
1373 for alarm_policy in ["alarm", "monitoring-parameter"]:
1374 if vdu.get(alarm_policy, {}):
1375 vdu.pop(alarm_policy)
1376 return vnfd
1377
1378 @staticmethod
1379 def extract_day12_primitives(vnfd: dict) -> dict:
1380 """Removes the day12 primitives from the VNFD descriptors
1381
1382 Args:
1383 vnfd (dict): Descriptor as a dictionary
1384
1385 Returns:
1386 vnfd (dict)
1387 """
1388 for df_id, df in enumerate(vnfd.get("df", {})):
1389 if (
1390 df.get("lcm-operations-configuration", {})
1391 .get("operate-vnf-op-config", {})
1392 .get("day1-2")
1393 ):
1394 day12 = df["lcm-operations-configuration"]["operate-vnf-op-config"].get(
1395 "day1-2"
1396 )
1397 for config_id, config in enumerate(day12):
1398 for key in [
1399 "initial-config-primitive",
1400 "config-primitive",
1401 "terminate-config-primitive",
1402 ]:
1403 config.pop(key, None)
1404 day12[config_id] = config
1405 df["lcm-operations-configuration"]["operate-vnf-op-config"][
1406 "day1-2"
1407 ] = day12
1408 vnfd["df"][df_id] = df
1409 return vnfd
1410
1411 def remove_modifiable_items(self, vnfd: dict) -> dict:
1412 """Removes the modifiable parts from the VNFD descriptors
1413
1414 It calls different extract functions according to different update types
1415 to clear all the modifiable items from VNFD
1416
1417 Args:
1418 vnfd (dict): Descriptor as a dictionary
1419
1420 Returns:
1421 vnfd (dict): Descriptor which does not include modifiable contents
1422 """
1423 if vnfd.get("vnfd"):
1424 vnfd = vnfd["vnfd"]
1425 vnfd.pop("_admin", None)
1426 # If the other extractions need to be done from VNFD,
1427 # the new extract methods could be appended to below list.
1428 for extract_function in [self.extract_day12_primitives, self.extract_policies]:
1429 vnfd_temp = extract_function(vnfd)
1430 vnfd = vnfd_temp
1431 return vnfd
1432
1433 def _validate_descriptor_changes(
1434 self,
1435 descriptor_id: str,
1436 descriptor_file_name: str,
1437 old_descriptor_directory: str,
1438 new_descriptor_directory: str,
1439 ):
1440 """Compares the old and new VNFD descriptors and validates the new descriptor.
1441
1442 Args:
1443 old_descriptor_directory (str): Directory of descriptor which is in-use
1444 new_descriptor_directory (str): Directory of descriptor which is proposed to update (new revision)
1445
1446 Returns:
1447 None
1448
1449 Raises:
1450 EngineException: In case of error when there are unallowed changes
1451 """
1452 try:
1453 # If VNFD does not exist in DB or it is not in use by any NS,
1454 # validation is not required.
1455 vnfd = self.db.get_one("vnfds", {"_id": descriptor_id})
1456 if not vnfd or not detect_descriptor_usage(vnfd, "vnfds", self.db):
1457 return
1458
1459 # Get the old and new descriptor contents in order to compare them.
1460 with self.fs.file_open(
1461 (old_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
1462 ) as old_descriptor_file:
1463 with self.fs.file_open(
1464 (new_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
1465 ) as new_descriptor_file:
1466 old_content = yaml.safe_load(old_descriptor_file.read())
1467 new_content = yaml.safe_load(new_descriptor_file.read())
1468
1469 # If software version has changed, we do not need to validate
1470 # the differences anymore.
1471 if old_content and new_content:
1472 if self.find_software_version(
1473 old_content
1474 ) != self.find_software_version(new_content):
1475 return
1476
1477 disallowed_change = DeepDiff(
1478 self.remove_modifiable_items(old_content),
1479 self.remove_modifiable_items(new_content),
1480 )
1481
1482 if disallowed_change:
1483 changed_nodes = functools.reduce(
1484 lambda a, b: a + " , " + b,
1485 [
1486 node.lstrip("root")
1487 for node in disallowed_change.get(
1488 "values_changed"
1489 ).keys()
1490 ],
1491 )
1492
1493 raise EngineException(
1494 f"Error in validating new descriptor: {changed_nodes} cannot be modified, "
1495 "there are disallowed changes in the vnf descriptor.",
1496 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1497 )
1498 except (
1499 DbException,
1500 AttributeError,
1501 IndexError,
1502 KeyError,
1503 ValueError,
1504 ) as e:
1505 raise type(e)(
1506 "VNF Descriptor could not be processed with error: {}.".format(e)
1507 )
1508
1509
1510 class NsdTopic(DescriptorTopic):
1511 topic = "nsds"
1512 topic_msg = "nsd"
1513
1514 def __init__(self, db, fs, msg, auth):
1515 super().__init__(db, fs, msg, auth)
1516
1517 def pyangbind_validation(self, item, data, force=False):
1518 if self._descriptor_data_is_in_old_format(data):
1519 raise EngineException(
1520 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
1521 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1522 )
1523 try:
1524 nsd_vnf_profiles = data.get("df", [{}])[0].get("vnf-profile", [])
1525 mynsd = etsi_nfv_nsd.etsi_nfv_nsd()
1526 pybindJSONDecoder.load_ietf_json(
1527 {"nsd": {"nsd": [data]}},
1528 None,
1529 None,
1530 obj=mynsd,
1531 path_helper=True,
1532 skip_unknown=force,
1533 )
1534 out = pybindJSON.dumps(mynsd, mode="ietf")
1535 desc_out = self._remove_envelop(yaml.safe_load(out))
1536 desc_out = self._remove_yang_prefixes_from_descriptor(desc_out)
1537 if nsd_vnf_profiles:
1538 desc_out["df"][0]["vnf-profile"] = nsd_vnf_profiles
1539 return desc_out
1540 except Exception as e:
1541 raise EngineException(
1542 "Error in pyangbind validation: {}".format(str(e)),
1543 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1544 )
1545
1546 @staticmethod
1547 def _descriptor_data_is_in_old_format(data):
1548 return ("nsd-catalog" in data) or ("nsd:nsd-catalog" in data)
1549
1550 @staticmethod
1551 def _remove_envelop(indata=None):
1552 if not indata:
1553 return {}
1554 clean_indata = indata
1555
1556 if clean_indata.get("nsd"):
1557 clean_indata = clean_indata["nsd"]
1558 elif clean_indata.get("etsi-nfv-nsd:nsd"):
1559 clean_indata = clean_indata["etsi-nfv-nsd:nsd"]
1560 if clean_indata.get("nsd"):
1561 if (
1562 not isinstance(clean_indata["nsd"], list)
1563 or len(clean_indata["nsd"]) != 1
1564 ):
1565 raise EngineException("'nsd' must be a list of only one element")
1566 clean_indata = clean_indata["nsd"][0]
1567 return clean_indata
1568
1569 def _validate_input_new(self, indata, storage_params, force=False):
1570 indata.pop("nsdOnboardingState", None)
1571 indata.pop("nsdOperationalState", None)
1572 indata.pop("nsdUsageState", None)
1573
1574 indata.pop("links", None)
1575
1576 indata = self.pyangbind_validation("nsds", indata, force)
1577 # Cross references validation in the descriptor
1578 # TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none
1579 for vld in get_iterable(indata.get("virtual-link-desc")):
1580 self.validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata)
1581
1582 self.validate_vnf_profiles_vnfd_id(indata)
1583
1584 return indata
1585
1586 @staticmethod
1587 def validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata):
1588 if not vld.get("mgmt-network"):
1589 return
1590 vld_id = vld.get("id")
1591 for df in get_iterable(indata.get("df")):
1592 for vlp in get_iterable(df.get("virtual-link-profile")):
1593 if vld_id and vld_id == vlp.get("virtual-link-desc-id"):
1594 if vlp.get("virtual-link-protocol-data"):
1595 raise EngineException(
1596 "Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-"
1597 "protocol-data You cannot set a virtual-link-protocol-data "
1598 "when mgmt-network is True".format(df["id"], vlp["id"]),
1599 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1600 )
1601
1602 @staticmethod
1603 def validate_vnf_profiles_vnfd_id(indata):
1604 all_vnfd_ids = set(get_iterable(indata.get("vnfd-id")))
1605 for df in get_iterable(indata.get("df")):
1606 for vnf_profile in get_iterable(df.get("vnf-profile")):
1607 vnfd_id = vnf_profile.get("vnfd-id")
1608 if vnfd_id and vnfd_id not in all_vnfd_ids:
1609 raise EngineException(
1610 "Error at df[id='{}']:vnf_profile[id='{}']:vnfd-id='{}' "
1611 "does not match any vnfd-id".format(
1612 df["id"], vnf_profile["id"], vnfd_id
1613 ),
1614 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1615 )
1616
1617 def _validate_input_edit(self, indata, content, force=False):
1618 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
1619 """
1620 indata looks as follows:
1621 - In the new case (conformant)
1622 {'nsdOperationalState': 'DISABLED', 'userDefinedData': {'id': 'string23',
1623 '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}}
1624 - In the old case (backwards-compatible)
1625 {'id': 'string23', '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}
1626 """
1627 if "_admin" not in indata:
1628 indata["_admin"] = {}
1629
1630 if "nsdOperationalState" in indata:
1631 if indata["nsdOperationalState"] in ("ENABLED", "DISABLED"):
1632 indata["_admin"]["operationalState"] = indata.pop("nsdOperationalState")
1633 else:
1634 raise EngineException(
1635 "State '{}' is not a valid operational state".format(
1636 indata["nsdOperationalState"]
1637 ),
1638 http_code=HTTPStatus.BAD_REQUEST,
1639 )
1640
1641 # In the case of user defined data, we need to put the data in the root of the object
1642 # to preserve current expected behaviour
1643 if "userDefinedData" in indata:
1644 data = indata.pop("userDefinedData")
1645 if isinstance(data, dict):
1646 indata["_admin"]["userDefinedData"] = data
1647 else:
1648 raise EngineException(
1649 "userDefinedData should be an object, but is '{}' instead".format(
1650 type(data)
1651 ),
1652 http_code=HTTPStatus.BAD_REQUEST,
1653 )
1654 if (
1655 "operationalState" in indata["_admin"]
1656 and content["_admin"]["operationalState"]
1657 == indata["_admin"]["operationalState"]
1658 ):
1659 raise EngineException(
1660 "nsdOperationalState already {}".format(
1661 content["_admin"]["operationalState"]
1662 ),
1663 http_code=HTTPStatus.CONFLICT,
1664 )
1665 return indata
1666
1667 def _check_descriptor_dependencies(self, session, descriptor):
1668 """
1669 Check that the dependent descriptors exist on a new descriptor or edition. Also checks references to vnfd
1670 connection points are ok
1671 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1672 :param descriptor: descriptor to be inserted or edit
1673 :return: None or raises exception
1674 """
1675 if session["force"]:
1676 return
1677 vnfds_index = self._get_descriptor_constituent_vnfds_index(session, descriptor)
1678
1679 # Cross references validation in the descriptor and vnfd connection point validation
1680 for df in get_iterable(descriptor.get("df")):
1681 self.validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index)
1682
1683 def _get_descriptor_constituent_vnfds_index(self, session, descriptor):
1684 vnfds_index = {}
1685 if descriptor.get("vnfd-id") and not session["force"]:
1686 for vnfd_id in get_iterable(descriptor.get("vnfd-id")):
1687 query_filter = self._get_project_filter(session)
1688 query_filter["id"] = vnfd_id
1689 vnf_list = self.db.get_list("vnfds", query_filter)
1690 if not vnf_list:
1691 raise EngineException(
1692 "Descriptor error at 'vnfd-id'='{}' references a non "
1693 "existing vnfd".format(vnfd_id),
1694 http_code=HTTPStatus.CONFLICT,
1695 )
1696 vnfds_index[vnfd_id] = vnf_list[0]
1697 return vnfds_index
1698
1699 @staticmethod
1700 def validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index):
1701 for vnf_profile in get_iterable(df.get("vnf-profile")):
1702 vnfd = vnfds_index.get(vnf_profile["vnfd-id"])
1703 all_vnfd_ext_cpds = set()
1704 for ext_cpd in get_iterable(vnfd.get("ext-cpd")):
1705 if ext_cpd.get("id"):
1706 all_vnfd_ext_cpds.add(ext_cpd.get("id"))
1707
1708 for virtual_link in get_iterable(
1709 vnf_profile.get("virtual-link-connectivity")
1710 ):
1711 for vl_cpd in get_iterable(virtual_link.get("constituent-cpd-id")):
1712 vl_cpd_id = vl_cpd.get("constituent-cpd-id")
1713 if vl_cpd_id and vl_cpd_id not in all_vnfd_ext_cpds:
1714 raise EngineException(
1715 "Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity"
1716 "[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a "
1717 "non existing ext-cpd:id inside vnfd '{}'".format(
1718 df["id"],
1719 vnf_profile["id"],
1720 virtual_link["virtual-link-profile-id"],
1721 vl_cpd_id,
1722 vnfd["id"],
1723 ),
1724 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1725 )
1726
1727 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
1728 final_content = super().check_conflict_on_edit(
1729 session, final_content, edit_content, _id
1730 )
1731
1732 self._check_descriptor_dependencies(session, final_content)
1733
1734 return final_content
1735
1736 def check_conflict_on_del(self, session, _id, db_content):
1737 """
1738 Check that there is not any NSR that uses this NSD. Only NSRs belonging to this project are considered. Note
1739 that NSD can be public and be used by other projects.
1740 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1741 :param _id: nsd internal id
1742 :param db_content: The database content of the _id
1743 :return: None or raises EngineException with the conflict
1744 """
1745 if session["force"]:
1746 return
1747 descriptor = db_content
1748 descriptor_id = descriptor.get("id")
1749 if not descriptor_id: # empty nsd not uploaded
1750 return
1751
1752 # check NSD used by NS
1753 _filter = self._get_project_filter(session)
1754 _filter["nsd-id"] = _id
1755 if self.db.get_list("nsrs", _filter):
1756 raise EngineException(
1757 "There is at least one NS instance using this descriptor",
1758 http_code=HTTPStatus.CONFLICT,
1759 )
1760
1761 # check NSD referenced by NST
1762 del _filter["nsd-id"]
1763 _filter["netslice-subnet.ANYINDEX.nsd-ref"] = descriptor_id
1764 if self.db.get_list("nsts", _filter):
1765 raise EngineException(
1766 "There is at least one NetSlice Template referencing this descriptor",
1767 http_code=HTTPStatus.CONFLICT,
1768 )
1769
1770 def delete_extra(self, session, _id, db_content, not_send_msg=None):
1771 """
1772 Deletes associate file system storage (via super)
1773 Deletes associated vnfpkgops from database.
1774 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1775 :param _id: server internal id
1776 :param db_content: The database content of the descriptor
1777 :return: None
1778 :raises: FsException in case of error while deleting associated storage
1779 """
1780 super().delete_extra(session, _id, db_content, not_send_msg)
1781 self.db.del_list(self.topic + "_revisions", {"_id": {"$regex": _id}})
1782
1783 @staticmethod
1784 def extract_day12_primitives(nsd: dict) -> dict:
1785 """Removes the day12 primitives from the NSD descriptors
1786
1787 Args:
1788 nsd (dict): Descriptor as a dictionary
1789
1790 Returns:
1791 nsd (dict): Cleared NSD
1792 """
1793 if nsd.get("ns-configuration"):
1794 for key in [
1795 "config-primitive",
1796 "initial-config-primitive",
1797 "terminate-config-primitive",
1798 ]:
1799 nsd["ns-configuration"].pop(key, None)
1800 return nsd
1801
1802 def remove_modifiable_items(self, nsd: dict) -> dict:
1803 """Removes the modifiable parts from the VNFD descriptors
1804
1805 It calls different extract functions according to different update types
1806 to clear all the modifiable items from NSD
1807
1808 Args:
1809 nsd (dict): Descriptor as a dictionary
1810
1811 Returns:
1812 nsd (dict): Descriptor which does not include modifiable contents
1813 """
1814 while isinstance(nsd, dict) and nsd.get("nsd"):
1815 nsd = nsd["nsd"]
1816 if isinstance(nsd, list):
1817 nsd = nsd[0]
1818 nsd.pop("_admin", None)
1819 # If the more extractions need to be done from NSD,
1820 # the new extract methods could be appended to below list.
1821 for extract_function in [self.extract_day12_primitives]:
1822 nsd_temp = extract_function(nsd)
1823 nsd = nsd_temp
1824 return nsd
1825
1826 def _validate_descriptor_changes(
1827 self,
1828 descriptor_id: str,
1829 descriptor_file_name: str,
1830 old_descriptor_directory: str,
1831 new_descriptor_directory: str,
1832 ):
1833 """Compares the old and new NSD descriptors and validates the new descriptor
1834
1835 Args:
1836 old_descriptor_directory: Directory of descriptor which is in-use
1837 new_descriptor_directory: Directory of descriptor which is proposed to update (new revision)
1838
1839 Returns:
1840 None
1841
1842 Raises:
1843 EngineException: In case of error if the changes are not allowed
1844 """
1845
1846 try:
1847 # If NSD does not exist in DB, or it is not in use by any NS,
1848 # validation is not required.
1849 nsd = self.db.get_one("nsds", {"_id": descriptor_id}, fail_on_empty=False)
1850 if not nsd or not detect_descriptor_usage(nsd, "nsds", self.db):
1851 return
1852
1853 # Get the old and new descriptor contents in order to compare them.
1854 with self.fs.file_open(
1855 (old_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
1856 ) as old_descriptor_file:
1857 with self.fs.file_open(
1858 (new_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
1859 ) as new_descriptor_file:
1860 old_content = yaml.safe_load(old_descriptor_file.read())
1861 new_content = yaml.safe_load(new_descriptor_file.read())
1862
1863 if old_content and new_content:
1864 disallowed_change = DeepDiff(
1865 self.remove_modifiable_items(old_content),
1866 self.remove_modifiable_items(new_content),
1867 )
1868
1869 if disallowed_change:
1870 changed_nodes = functools.reduce(
1871 lambda a, b: a + ", " + b,
1872 [
1873 node.lstrip("root")
1874 for node in disallowed_change.get(
1875 "values_changed"
1876 ).keys()
1877 ],
1878 )
1879
1880 raise EngineException(
1881 f"Error in validating new descriptor: {changed_nodes} cannot be modified, "
1882 "there are disallowed changes in the ns descriptor. ",
1883 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1884 )
1885 except (
1886 DbException,
1887 AttributeError,
1888 IndexError,
1889 KeyError,
1890 ValueError,
1891 ) as e:
1892 raise type(e)(
1893 "NS Descriptor could not be processed with error: {}.".format(e)
1894 )
1895
1896 def sol005_projection(self, data):
1897 data["nsdOnboardingState"] = data["_admin"]["onboardingState"]
1898 data["nsdOperationalState"] = data["_admin"]["operationalState"]
1899 data["nsdUsageState"] = data["_admin"]["usageState"]
1900
1901 links = {}
1902 links["self"] = {"href": "/nsd/v1/ns_descriptors/{}".format(data["_id"])}
1903 links["nsd_content"] = {
1904 "href": "/nsd/v1/ns_descriptors/{}/nsd_content".format(data["_id"])
1905 }
1906 data["_links"] = links
1907
1908 return super().sol005_projection(data)
1909
1910
1911 class NstTopic(DescriptorTopic):
1912 topic = "nsts"
1913 topic_msg = "nst"
1914 quota_name = "slice_templates"
1915
1916 def __init__(self, db, fs, msg, auth):
1917 DescriptorTopic.__init__(self, db, fs, msg, auth)
1918
1919 def pyangbind_validation(self, item, data, force=False):
1920 try:
1921 mynst = nst_im()
1922 pybindJSONDecoder.load_ietf_json(
1923 {"nst": [data]},
1924 None,
1925 None,
1926 obj=mynst,
1927 path_helper=True,
1928 skip_unknown=force,
1929 )
1930 out = pybindJSON.dumps(mynst, mode="ietf")
1931 desc_out = self._remove_envelop(yaml.safe_load(out))
1932 return desc_out
1933 except Exception as e:
1934 raise EngineException(
1935 "Error in pyangbind validation: {}".format(str(e)),
1936 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1937 )
1938
1939 @staticmethod
1940 def _remove_envelop(indata=None):
1941 if not indata:
1942 return {}
1943 clean_indata = indata
1944
1945 if clean_indata.get("nst"):
1946 if (
1947 not isinstance(clean_indata["nst"], list)
1948 or len(clean_indata["nst"]) != 1
1949 ):
1950 raise EngineException("'nst' must be a list only one element")
1951 clean_indata = clean_indata["nst"][0]
1952 elif clean_indata.get("nst:nst"):
1953 if (
1954 not isinstance(clean_indata["nst:nst"], list)
1955 or len(clean_indata["nst:nst"]) != 1
1956 ):
1957 raise EngineException("'nst:nst' must be a list only one element")
1958 clean_indata = clean_indata["nst:nst"][0]
1959 return clean_indata
1960
1961 def _validate_input_new(self, indata, storage_params, force=False):
1962 indata.pop("onboardingState", None)
1963 indata.pop("operationalState", None)
1964 indata.pop("usageState", None)
1965 indata = self.pyangbind_validation("nsts", indata, force)
1966 return indata.copy()
1967
1968 def _check_descriptor_dependencies(self, session, descriptor):
1969 """
1970 Check that the dependent descriptors exist on a new descriptor or edition
1971 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1972 :param descriptor: descriptor to be inserted or edit
1973 :return: None or raises exception
1974 """
1975 if not descriptor.get("netslice-subnet"):
1976 return
1977 for nsd in descriptor["netslice-subnet"]:
1978 nsd_id = nsd["nsd-ref"]
1979 filter_q = self._get_project_filter(session)
1980 filter_q["id"] = nsd_id
1981 if not self.db.get_list("nsds", filter_q):
1982 raise EngineException(
1983 "Descriptor error at 'netslice-subnet':'nsd-ref'='{}' references a non "
1984 "existing nsd".format(nsd_id),
1985 http_code=HTTPStatus.CONFLICT,
1986 )
1987
1988 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
1989 final_content = super().check_conflict_on_edit(
1990 session, final_content, edit_content, _id
1991 )
1992
1993 self._check_descriptor_dependencies(session, final_content)
1994 return final_content
1995
1996 def check_conflict_on_del(self, session, _id, db_content):
1997 """
1998 Check that there is not any NSIR that uses this NST. Only NSIRs belonging to this project are considered. Note
1999 that NST can be public and be used by other projects.
2000 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
2001 :param _id: nst internal id
2002 :param db_content: The database content of the _id.
2003 :return: None or raises EngineException with the conflict
2004 """
2005 # TODO: Check this method
2006 if session["force"]:
2007 return
2008 # Get Network Slice Template from Database
2009 _filter = self._get_project_filter(session)
2010 _filter["_admin.nst-id"] = _id
2011 if self.db.get_list("nsis", _filter):
2012 raise EngineException(
2013 "there is at least one Netslice Instance using this descriptor",
2014 http_code=HTTPStatus.CONFLICT,
2015 )
2016
2017 def sol005_projection(self, data):
2018 data["onboardingState"] = data["_admin"]["onboardingState"]
2019 data["operationalState"] = data["_admin"]["operationalState"]
2020 data["usageState"] = data["_admin"]["usageState"]
2021
2022 links = {}
2023 links["self"] = {"href": "/nst/v1/netslice_templates/{}".format(data["_id"])}
2024 links["nst"] = {"href": "/nst/v1/netslice_templates/{}/nst".format(data["_id"])}
2025 data["_links"] = links
2026
2027 return super().sol005_projection(data)
2028
2029
2030 class PduTopic(BaseTopic):
2031 topic = "pdus"
2032 topic_msg = "pdu"
2033 quota_name = "pduds"
2034 schema_new = pdu_new_schema
2035 schema_edit = pdu_edit_schema
2036
2037 def __init__(self, db, fs, msg, auth):
2038 BaseTopic.__init__(self, db, fs, msg, auth)
2039
2040 @staticmethod
2041 def format_on_new(content, project_id=None, make_public=False):
2042 BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
2043 content["_admin"]["onboardingState"] = "CREATED"
2044 content["_admin"]["operationalState"] = "ENABLED"
2045 content["_admin"]["usageState"] = "NOT_IN_USE"
2046
2047 def check_conflict_on_del(self, session, _id, db_content):
2048 """
2049 Check that there is not any vnfr that uses this PDU
2050 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
2051 :param _id: pdu internal id
2052 :param db_content: The database content of the _id.
2053 :return: None or raises EngineException with the conflict
2054 """
2055 if session["force"]:
2056 return
2057
2058 _filter = self._get_project_filter(session)
2059 _filter["vdur.pdu-id"] = _id
2060 if self.db.get_list("vnfrs", _filter):
2061 raise EngineException(
2062 "There is at least one VNF instance using this PDU",
2063 http_code=HTTPStatus.CONFLICT,
2064 )
2065
2066
2067 class VnfPkgOpTopic(BaseTopic):
2068 topic = "vnfpkgops"
2069 topic_msg = "vnfd"
2070 schema_new = vnfpkgop_new_schema
2071 schema_edit = None
2072
2073 def __init__(self, db, fs, msg, auth):
2074 BaseTopic.__init__(self, db, fs, msg, auth)
2075
2076 def edit(self, session, _id, indata=None, kwargs=None, content=None):
2077 raise EngineException(
2078 "Method 'edit' not allowed for topic '{}'".format(self.topic),
2079 HTTPStatus.METHOD_NOT_ALLOWED,
2080 )
2081
2082 def delete(self, session, _id, dry_run=False):
2083 raise EngineException(
2084 "Method 'delete' not allowed for topic '{}'".format(self.topic),
2085 HTTPStatus.METHOD_NOT_ALLOWED,
2086 )
2087
2088 def delete_list(self, session, filter_q=None):
2089 raise EngineException(
2090 "Method 'delete_list' not allowed for topic '{}'".format(self.topic),
2091 HTTPStatus.METHOD_NOT_ALLOWED,
2092 )
2093
2094 def new(self, rollback, session, indata=None, kwargs=None, headers=None):
2095 """
2096 Creates a new entry into database.
2097 :param rollback: list to append created items at database in case a rollback may to be done
2098 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
2099 :param indata: data to be inserted
2100 :param kwargs: used to override the indata descriptor
2101 :param headers: http request headers
2102 :return: _id, op_id:
2103 _id: identity of the inserted data.
2104 op_id: None
2105 """
2106 self._update_input_with_kwargs(indata, kwargs)
2107 validate_input(indata, self.schema_new)
2108 vnfpkg_id = indata["vnfPkgId"]
2109 filter_q = BaseTopic._get_project_filter(session)
2110 filter_q["_id"] = vnfpkg_id
2111 vnfd = self.db.get_one("vnfds", filter_q)
2112 operation = indata["lcmOperationType"]
2113 kdu_name = indata["kdu_name"]
2114 for kdu in vnfd.get("kdu", []):
2115 if kdu["name"] == kdu_name:
2116 helm_chart = kdu.get("helm-chart")
2117 juju_bundle = kdu.get("juju-bundle")
2118 break
2119 else:
2120 raise EngineException(
2121 "Not found vnfd[id='{}']:kdu[name='{}']".format(vnfpkg_id, kdu_name)
2122 )
2123 if helm_chart:
2124 indata["helm-chart"] = helm_chart
2125 match = fullmatch(r"([^/]*)/([^/]*)", helm_chart)
2126 repo_name = match.group(1) if match else None
2127 elif juju_bundle:
2128 indata["juju-bundle"] = juju_bundle
2129 match = fullmatch(r"([^/]*)/([^/]*)", juju_bundle)
2130 repo_name = match.group(1) if match else None
2131 else:
2132 raise EngineException(
2133 "Found neither 'helm-chart' nor 'juju-bundle' in vnfd[id='{}']:kdu[name='{}']".format(
2134 vnfpkg_id, kdu_name
2135 )
2136 )
2137 if repo_name:
2138 del filter_q["_id"]
2139 filter_q["name"] = repo_name
2140 repo = self.db.get_one("k8srepos", filter_q)
2141 k8srepo_id = repo.get("_id")
2142 k8srepo_url = repo.get("url")
2143 else:
2144 k8srepo_id = None
2145 k8srepo_url = None
2146 indata["k8srepoId"] = k8srepo_id
2147 indata["k8srepo_url"] = k8srepo_url
2148 vnfpkgop_id = str(uuid4())
2149 vnfpkgop_desc = {
2150 "_id": vnfpkgop_id,
2151 "operationState": "PROCESSING",
2152 "vnfPkgId": vnfpkg_id,
2153 "lcmOperationType": operation,
2154 "isAutomaticInvocation": False,
2155 "isCancelPending": False,
2156 "operationParams": indata,
2157 "links": {
2158 "self": "/osm/vnfpkgm/v1/vnfpkg_op_occs/" + vnfpkgop_id,
2159 "vnfpkg": "/osm/vnfpkgm/v1/vnf_packages/" + vnfpkg_id,
2160 },
2161 }
2162 self.format_on_new(
2163 vnfpkgop_desc, session["project_id"], make_public=session["public"]
2164 )
2165 ctime = vnfpkgop_desc["_admin"]["created"]
2166 vnfpkgop_desc["statusEnteredTime"] = ctime
2167 vnfpkgop_desc["startTime"] = ctime
2168 self.db.create(self.topic, vnfpkgop_desc)
2169 rollback.append({"topic": self.topic, "_id": vnfpkgop_id})
2170 self.msg.write(self.topic_msg, operation, vnfpkgop_desc)
2171 return vnfpkgop_id, None