Coverage for osm_nbi/descriptor_topics.py: 66%
1119 statements
« prev ^ index » next coverage.py v7.6.12, created at 2025-04-12 20:04 +0000
« prev ^ index » next coverage.py v7.6.12, created at 2025-04-12 20:04 +0000
1# -*- coding: utf-8 -*-
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
12# implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
16import tarfile
17import yaml
18import json
19import copy
20import os
21import shutil
22import functools
23import re
25# import logging
26from deepdiff import DeepDiff
27from hashlib import md5
28from osm_common.dbbase import DbException, deep_update_rfc7396
29from http import HTTPStatus
30from time import time
31from uuid import uuid4
32from re import fullmatch
33from zipfile import ZipFile
34from urllib.parse import urlparse
35from osm_nbi.validation import (
36 ValidationError,
37 pdu_new_schema,
38 pdu_edit_schema,
39 validate_input,
40 vnfpkgop_new_schema,
41 ns_config_template,
42 vnf_schema,
43 vld_schema,
44 additional_params_for_vnf,
45)
46from osm_nbi.base_topic import (
47 BaseTopic,
48 EngineException,
49 get_iterable,
50 detect_descriptor_usage,
51)
52from osm_im import etsi_nfv_vnfd, etsi_nfv_nsd
53from osm_im.nst import nst as nst_im
54from pyangbind.lib.serialise import pybindJSONDecoder
55import pyangbind.lib.pybindJSON as pybindJSON
56from osm_nbi import utils
58__author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
60valid_helm_chart_re = re.compile(
61 r"^[a-z0-9]([-a-z0-9]*[a-z0-9]/)?([a-z0-9]([-a-z0-9]*[a-z0-9])?)*$"
62)
65class DescriptorTopic(BaseTopic):
66 def __init__(self, db, fs, msg, auth):
67 super().__init__(db, fs, msg, auth)
69 def _validate_input_new(self, indata, storage_params, force=False):
70 return indata
72 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
73 final_content = super().check_conflict_on_edit(
74 session, final_content, edit_content, _id
75 )
77 def _check_unique_id_name(descriptor, position=""):
78 for desc_key, desc_item in descriptor.items():
79 if isinstance(desc_item, list) and desc_item:
80 used_ids = []
81 desc_item_id = None
82 for index, list_item in enumerate(desc_item):
83 if isinstance(list_item, dict):
84 _check_unique_id_name(
85 list_item, "{}.{}[{}]".format(position, desc_key, index)
86 )
87 # Base case
88 if index == 0 and (
89 list_item.get("id") or list_item.get("name")
90 ):
91 desc_item_id = "id" if list_item.get("id") else "name"
92 if desc_item_id and list_item.get(desc_item_id):
93 if list_item[desc_item_id] in used_ids:
94 position = "{}.{}[{}]".format(
95 position, desc_key, index
96 )
97 raise EngineException(
98 "Error: identifier {} '{}' is not unique and repeats at '{}'".format(
99 desc_item_id,
100 list_item[desc_item_id],
101 position,
102 ),
103 HTTPStatus.UNPROCESSABLE_ENTITY,
104 )
105 used_ids.append(list_item[desc_item_id])
107 _check_unique_id_name(final_content)
108 # 1. validate again with pyangbind
109 # 1.1. remove internal keys
110 internal_keys = {}
111 for k in ("_id", "_admin"):
112 if k in final_content:
113 internal_keys[k] = final_content.pop(k)
114 storage_params = internal_keys["_admin"].get("storage")
115 serialized = self._validate_input_new(
116 final_content, storage_params, session["force"]
117 )
119 # 1.2. modify final_content with a serialized version
120 final_content = copy.deepcopy(serialized)
121 # 1.3. restore internal keys
122 for k, v in internal_keys.items():
123 final_content[k] = v
124 if session["force"]:
125 return final_content
127 # 2. check that this id is not present
128 if "id" in edit_content:
129 _filter = self._get_project_filter(session)
131 _filter["id"] = final_content["id"]
132 _filter["_id.neq"] = _id
134 if self.db.get_one(self.topic, _filter, fail_on_empty=False):
135 raise EngineException(
136 "{} with id '{}' already exists for this project".format(
137 (str(self.topic))[:-1], final_content["id"]
138 ),
139 HTTPStatus.CONFLICT,
140 )
142 return final_content
144 @staticmethod
145 def format_on_new(content, project_id=None, make_public=False):
146 BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
147 content["_admin"]["onboardingState"] = "CREATED"
148 content["_admin"]["operationalState"] = "DISABLED"
149 content["_admin"]["usageState"] = "NOT_IN_USE"
151 def delete_extra(self, session, _id, db_content, not_send_msg=None):
152 """
153 Deletes file system storage associated with the descriptor
154 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
155 :param _id: server internal id
156 :param db_content: The database content of the descriptor
157 :param not_send_msg: To not send message (False) or store content (list) instead
158 :return: None if ok or raises EngineException with the problem
159 """
160 self.fs.file_delete(_id, ignore_non_exist=True)
161 self.fs.file_delete(_id + "_", ignore_non_exist=True) # remove temp folder
162 # Remove file revisions
163 if "revision" in db_content["_admin"]:
164 revision = db_content["_admin"]["revision"]
165 while revision > 0:
166 self.fs.file_delete(_id + ":" + str(revision), ignore_non_exist=True)
167 revision = revision - 1
169 @staticmethod
170 def get_one_by_id(db, session, topic, id):
171 # find owned by this project
172 _filter = BaseTopic._get_project_filter(session)
173 _filter["id"] = id
174 desc_list = db.get_list(topic, _filter)
175 if len(desc_list) == 1:
176 return desc_list[0]
177 elif len(desc_list) > 1:
178 raise DbException(
179 "Found more than one {} with id='{}' belonging to this project".format(
180 topic[:-1], id
181 ),
182 HTTPStatus.CONFLICT,
183 )
185 # not found any: try to find public
186 _filter = BaseTopic._get_project_filter(session)
187 _filter["id"] = id
188 desc_list = db.get_list(topic, _filter)
189 if not desc_list:
190 raise DbException(
191 "Not found any {} with id='{}'".format(topic[:-1], id),
192 HTTPStatus.NOT_FOUND,
193 )
194 elif len(desc_list) == 1:
195 return desc_list[0]
196 else:
197 raise DbException(
198 "Found more than one public {} with id='{}'; and no one belonging to this project".format(
199 topic[:-1], id
200 ),
201 HTTPStatus.CONFLICT,
202 )
204 def new(self, rollback, session, indata=None, kwargs=None, headers=None):
205 """
206 Creates a new almost empty DISABLED entry into database. Due to SOL005, it does not follow normal procedure.
207 Creating a VNFD or NSD is done in two steps: 1. Creates an empty descriptor (this step) and 2) upload content
208 (self.upload_content)
209 :param rollback: list to append created items at database in case a rollback may to be done
210 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
211 :param indata: data to be inserted
212 :param kwargs: used to override the indata descriptor
213 :param headers: http request headers
214 :return: _id, None: identity of the inserted data; and None as there is not any operation
215 """
217 # No needed to capture exceptions
218 # Check Quota
219 self.check_quota(session)
221 # _remove_envelop
222 if indata:
223 if "userDefinedData" in indata:
224 indata = indata["userDefinedData"]
226 # Override descriptor with query string kwargs
227 self._update_input_with_kwargs(indata, kwargs)
228 # uncomment when this method is implemented.
229 # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors
230 # indata = DescriptorTopic._validate_input_new(self, indata, project_id=session["force"])
232 content = {"_admin": {"userDefinedData": indata, "revision": 0}}
234 self.format_on_new(
235 content, session["project_id"], make_public=session["public"]
236 )
237 _id = self.db.create(self.topic, content)
238 rollback.append({"topic": self.topic, "_id": _id})
239 self._send_msg("created", {"_id": _id})
240 return _id, None
242 def upload_content(self, session, _id, indata, kwargs, headers):
243 """
244 Used for receiving content by chunks (with a transaction_id header and/or gzip file. It will store and extract)
245 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
246 :param _id : the nsd,vnfd is already created, this is the id
247 :param indata: http body request
248 :param kwargs: user query string to override parameters. NOT USED
249 :param headers: http request headers
250 :return: True if package is completely uploaded or False if partial content has been uploded
251 Raise exception on error
252 """
253 # Check that _id exists and it is valid
254 current_desc = self.show(session, _id)
256 content_range_text = headers.get("Content-Range")
257 expected_md5 = headers.get("Content-File-MD5")
258 compressed = None
259 content_type = headers.get("Content-Type")
260 if (
261 content_type
262 and "application/gzip" in content_type
263 or "application/x-gzip" in content_type
264 ):
265 compressed = "gzip"
266 if content_type and "application/zip" in content_type:
267 compressed = "zip"
268 filename = headers.get("Content-Filename")
269 if not filename and compressed:
270 filename = "package.tar.gz" if compressed == "gzip" else "package.zip"
271 elif not filename:
272 filename = "package"
274 revision = 1
275 if "revision" in current_desc["_admin"]:
276 revision = current_desc["_admin"]["revision"] + 1
278 # TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266
279 file_pkg = None
280 error_text = ""
281 fs_rollback = []
283 try:
284 if content_range_text:
285 content_range = (
286 content_range_text.replace("-", " ").replace("/", " ").split()
287 )
288 if (
289 content_range[0] != "bytes"
290 ): # TODO check x<y not negative < total....
291 raise IndexError()
292 start = int(content_range[1])
293 end = int(content_range[2]) + 1
294 total = int(content_range[3])
295 else:
296 start = 0
297 # Rather than using a temp folder, we will store the package in a folder based on
298 # the current revision.
299 proposed_revision_path = (
300 _id + ":" + str(revision)
301 ) # all the content is upload here and if ok, it is rename from id_ to is folder
303 if start:
304 if not self.fs.file_exists(proposed_revision_path, "dir"):
305 raise EngineException(
306 "invalid Transaction-Id header", HTTPStatus.NOT_FOUND
307 )
308 else:
309 self.fs.file_delete(proposed_revision_path, ignore_non_exist=True)
310 self.fs.mkdir(proposed_revision_path)
311 fs_rollback.append(proposed_revision_path)
313 storage = self.fs.get_params()
314 storage["folder"] = proposed_revision_path
316 file_path = (proposed_revision_path, filename)
317 if self.fs.file_exists(file_path, "file"):
318 file_size = self.fs.file_size(file_path)
319 else:
320 file_size = 0
321 if file_size != start:
322 raise EngineException(
323 "invalid Content-Range start sequence, expected '{}' but received '{}'".format(
324 file_size, start
325 ),
326 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
327 )
328 file_pkg = self.fs.file_open(file_path, "a+b")
330 if isinstance(indata, dict):
331 indata_text = yaml.safe_dump(indata, indent=4, default_flow_style=False)
332 file_pkg.write(indata_text.encode(encoding="utf-8"))
333 else:
334 indata_len = 0
335 while True:
336 indata_text = indata.read(4096)
337 indata_len += len(indata_text)
338 if not indata_text:
339 break
340 file_pkg.write(indata_text)
341 if content_range_text:
342 if indata_len != end - start:
343 raise EngineException(
344 "Mismatch between Content-Range header {}-{} and body length of {}".format(
345 start, end - 1, indata_len
346 ),
347 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
348 )
349 if end != total:
350 # TODO update to UPLOADING
351 return False
353 # PACKAGE UPLOADED
354 if expected_md5:
355 file_pkg.seek(0, 0)
356 file_md5 = md5()
357 chunk_data = file_pkg.read(1024)
358 while chunk_data:
359 file_md5.update(chunk_data)
360 chunk_data = file_pkg.read(1024)
361 if expected_md5 != file_md5.hexdigest():
362 raise EngineException("Error, MD5 mismatch", HTTPStatus.CONFLICT)
363 file_pkg.seek(0, 0)
364 if compressed == "gzip":
365 tar = tarfile.open(mode="r", fileobj=file_pkg)
366 descriptor_file_name = None
367 for tarinfo in tar:
368 tarname = tarinfo.name
369 tarname_path = tarname.split("/")
370 if (
371 not tarname_path[0] or ".." in tarname_path
372 ): # if start with "/" means absolute path
373 raise EngineException(
374 "Absolute path or '..' are not allowed for package descriptor tar.gz"
375 )
376 if len(tarname_path) == 1 and not tarinfo.isdir():
377 raise EngineException(
378 "All files must be inside a dir for package descriptor tar.gz"
379 )
380 if (
381 tarname.endswith(".yaml")
382 or tarname.endswith(".json")
383 or tarname.endswith(".yml")
384 ):
385 storage["pkg-dir"] = tarname_path[0]
386 if len(tarname_path) == 2:
387 if descriptor_file_name:
388 raise EngineException(
389 "Found more than one descriptor file at package descriptor tar.gz"
390 )
391 descriptor_file_name = tarname
392 if not descriptor_file_name:
393 raise EngineException(
394 "Not found any descriptor file at package descriptor tar.gz"
395 )
396 storage["descriptor"] = descriptor_file_name
397 storage["zipfile"] = filename
398 self.fs.file_extract(tar, proposed_revision_path)
399 with self.fs.file_open(
400 (proposed_revision_path, descriptor_file_name), "r"
401 ) as descriptor_file:
402 content = descriptor_file.read()
403 elif compressed == "zip":
404 zipfile = ZipFile(file_pkg)
405 descriptor_file_name = None
406 for package_file in zipfile.infolist():
407 zipfilename = package_file.filename
408 file_path = zipfilename.split("/")
409 if (
410 not file_path[0] or ".." in zipfilename
411 ): # if start with "/" means absolute path
412 raise EngineException(
413 "Absolute path or '..' are not allowed for package descriptor zip"
414 )
416 if (
417 zipfilename.endswith(".yaml")
418 or zipfilename.endswith(".json")
419 or zipfilename.endswith(".yml")
420 ) and (
421 zipfilename.find("/") < 0
422 or zipfilename.find("Definitions") >= 0
423 ):
424 storage["pkg-dir"] = ""
425 if descriptor_file_name:
426 raise EngineException(
427 "Found more than one descriptor file at package descriptor zip"
428 )
429 descriptor_file_name = zipfilename
430 if not descriptor_file_name:
431 raise EngineException(
432 "Not found any descriptor file at package descriptor zip"
433 )
434 storage["descriptor"] = descriptor_file_name
435 storage["zipfile"] = filename
436 self.fs.file_extract(zipfile, proposed_revision_path)
438 with self.fs.file_open(
439 (proposed_revision_path, descriptor_file_name), "r"
440 ) as descriptor_file:
441 content = descriptor_file.read()
442 else:
443 content = file_pkg.read()
444 storage["descriptor"] = descriptor_file_name = filename
446 if descriptor_file_name.endswith(".json"):
447 error_text = "Invalid json format "
448 indata = json.load(content)
449 else:
450 error_text = "Invalid yaml format "
451 indata = yaml.safe_load(content)
453 # Need to close the file package here so it can be copied from the
454 # revision to the current, unrevisioned record
455 if file_pkg:
456 file_pkg.close()
457 file_pkg = None
459 # Fetch both the incoming, proposed revision and the original revision so we
460 # can call a validate method to compare them
461 current_revision_path = _id + "/"
462 self.fs.sync(from_path=current_revision_path)
463 self.fs.sync(from_path=proposed_revision_path)
465 if revision > 1:
466 try:
467 self._validate_descriptor_changes(
468 _id,
469 descriptor_file_name,
470 current_revision_path,
471 proposed_revision_path,
472 )
473 except Exception as e:
474 shutil.rmtree(
475 self.fs.path + current_revision_path, ignore_errors=True
476 )
477 shutil.rmtree(
478 self.fs.path + proposed_revision_path, ignore_errors=True
479 )
480 # Only delete the new revision. We need to keep the original version in place
481 # as it has not been changed.
482 self.fs.file_delete(proposed_revision_path, ignore_non_exist=True)
483 raise e
485 indata = self._remove_envelop(indata)
487 # Override descriptor with query string kwargs
488 if kwargs:
489 self._update_input_with_kwargs(indata, kwargs)
491 current_desc["_admin"]["storage"] = storage
492 current_desc["_admin"]["onboardingState"] = "ONBOARDED"
493 current_desc["_admin"]["operationalState"] = "ENABLED"
494 current_desc["_admin"]["modified"] = time()
495 current_desc["_admin"]["revision"] = revision
497 deep_update_rfc7396(current_desc, indata)
498 current_desc = self.check_conflict_on_edit(
499 session, current_desc, indata, _id=_id
500 )
502 # Copy the revision to the active package name by its original id
503 shutil.rmtree(self.fs.path + current_revision_path, ignore_errors=True)
504 os.rename(
505 self.fs.path + proposed_revision_path,
506 self.fs.path + current_revision_path,
507 )
508 self.fs.file_delete(current_revision_path, ignore_non_exist=True)
509 self.fs.mkdir(current_revision_path)
510 self.fs.reverse_sync(from_path=current_revision_path)
512 shutil.rmtree(self.fs.path + _id)
514 self.db.replace(self.topic, _id, current_desc)
516 # Store a copy of the package as a point in time revision
517 revision_desc = dict(current_desc)
518 revision_desc["_id"] = _id + ":" + str(revision_desc["_admin"]["revision"])
519 self.db.create(self.topic + "_revisions", revision_desc)
520 fs_rollback = []
522 indata["_id"] = _id
523 self._send_msg("edited", indata)
525 # TODO if descriptor has changed because kwargs update content and remove cached zip
526 # TODO if zip is not present creates one
527 return True
529 except EngineException:
530 raise
531 except IndexError:
532 raise EngineException(
533 "invalid Content-Range header format. Expected 'bytes start-end/total'",
534 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
535 )
536 except IOError as e:
537 raise EngineException(
538 "invalid upload transaction sequence: '{}'".format(e),
539 HTTPStatus.BAD_REQUEST,
540 )
541 except tarfile.ReadError as e:
542 raise EngineException(
543 "invalid file content {}".format(e), HTTPStatus.BAD_REQUEST
544 )
545 except (ValueError, yaml.YAMLError) as e:
546 raise EngineException(error_text + str(e))
547 except ValidationError as e:
548 raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
549 finally:
550 if file_pkg:
551 file_pkg.close()
552 for file in fs_rollback:
553 self.fs.file_delete(file, ignore_non_exist=True)
555 def get_file(self, session, _id, path=None, accept_header=None):
556 """
557 Return the file content of a vnfd or nsd
558 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
559 :param _id: Identity of the vnfd, nsd
560 :param path: artifact path or "$DESCRIPTOR" or None
561 :param accept_header: Content of Accept header. Must contain applition/zip or/and text/plain
562 :return: opened file plus Accept format or raises an exception
563 """
564 accept_text = accept_zip = False
565 if accept_header:
566 if "text/plain" in accept_header or "*/*" in accept_header:
567 accept_text = True
568 if "application/zip" in accept_header or "*/*" in accept_header:
569 accept_zip = "application/zip"
570 elif "application/gzip" in accept_header:
571 accept_zip = "application/gzip"
573 if not accept_text and not accept_zip:
574 raise EngineException(
575 "provide request header 'Accept' with 'application/zip' or 'text/plain'",
576 http_code=HTTPStatus.NOT_ACCEPTABLE,
577 )
579 content = self.show(session, _id)
580 if content["_admin"]["onboardingState"] != "ONBOARDED":
581 raise EngineException(
582 "Cannot get content because this resource is not at 'ONBOARDED' state. "
583 "onboardingState is {}".format(content["_admin"]["onboardingState"]),
584 http_code=HTTPStatus.CONFLICT,
585 )
586 storage = content["_admin"]["storage"]
587 if path is not None and path != "$DESCRIPTOR": # artifacts
588 if not storage.get("pkg-dir") and not storage.get("folder"):
589 raise EngineException(
590 "Packages does not contains artifacts",
591 http_code=HTTPStatus.BAD_REQUEST,
592 )
593 if self.fs.file_exists(
594 (storage["folder"], storage["pkg-dir"], *path), "dir"
595 ):
596 folder_content = self.fs.dir_ls(
597 (storage["folder"], storage["pkg-dir"], *path)
598 )
599 return folder_content, "text/plain"
600 # TODO manage folders in http
601 else:
602 return (
603 self.fs.file_open(
604 (storage["folder"], storage["pkg-dir"], *path), "rb"
605 ),
606 "application/octet-stream",
607 )
609 # pkgtype accept ZIP TEXT -> result
610 # manyfiles yes X -> zip
611 # no yes -> error
612 # onefile yes no -> zip
613 # X yes -> text
614 contain_many_files = False
615 if storage.get("pkg-dir"):
616 # check if there are more than one file in the package, ignoring checksums.txt.
617 pkg_files = self.fs.dir_ls((storage["folder"], storage["pkg-dir"]))
618 if len(pkg_files) >= 3 or (
619 len(pkg_files) == 2 and "checksums.txt" not in pkg_files
620 ):
621 contain_many_files = True
622 if accept_text and (not contain_many_files or path == "$DESCRIPTOR"):
623 return (
624 self.fs.file_open((storage["folder"], storage["descriptor"]), "r"),
625 "text/plain",
626 )
627 elif contain_many_files and not accept_zip:
628 raise EngineException(
629 "Packages that contains several files need to be retrieved with 'application/zip'"
630 "Accept header",
631 http_code=HTTPStatus.NOT_ACCEPTABLE,
632 )
633 else:
634 if not storage.get("zipfile"):
635 # TODO generate zipfile if not present
636 raise EngineException(
637 "Only allowed 'text/plain' Accept header for this descriptor. To be solved in "
638 "future versions",
639 http_code=HTTPStatus.NOT_ACCEPTABLE,
640 )
641 return (
642 self.fs.file_open((storage["folder"], storage["zipfile"]), "rb"),
643 accept_zip,
644 )
646 def _remove_yang_prefixes_from_descriptor(self, descriptor):
647 new_descriptor = {}
648 for k, v in descriptor.items():
649 new_v = v
650 if isinstance(v, dict):
651 new_v = self._remove_yang_prefixes_from_descriptor(v)
652 elif isinstance(v, list):
653 new_v = list()
654 for x in v:
655 if isinstance(x, dict):
656 new_v.append(self._remove_yang_prefixes_from_descriptor(x))
657 else:
658 new_v.append(x)
659 new_descriptor[k.split(":")[-1]] = new_v
660 return new_descriptor
662 def pyangbind_validation(self, item, data, force=False):
663 raise EngineException(
664 "Not possible to validate '{}' item".format(item),
665 http_code=HTTPStatus.INTERNAL_SERVER_ERROR,
666 )
668 def _validate_input_edit(self, indata, content, force=False):
669 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
670 if "_id" in indata:
671 indata.pop("_id")
672 if "_admin" not in indata:
673 indata["_admin"] = {}
675 if "operationalState" in indata:
676 if indata["operationalState"] in ("ENABLED", "DISABLED"):
677 indata["_admin"]["operationalState"] = indata.pop("operationalState")
678 else:
679 raise EngineException(
680 "State '{}' is not a valid operational state".format(
681 indata["operationalState"]
682 ),
683 http_code=HTTPStatus.BAD_REQUEST,
684 )
686 # In the case of user defined data, we need to put the data in the root of the object
687 # to preserve current expected behaviour
688 if "userDefinedData" in indata:
689 data = indata.pop("userDefinedData")
690 if isinstance(data, dict):
691 indata["_admin"]["userDefinedData"] = data
692 else:
693 raise EngineException(
694 "userDefinedData should be an object, but is '{}' instead".format(
695 type(data)
696 ),
697 http_code=HTTPStatus.BAD_REQUEST,
698 )
700 if (
701 "operationalState" in indata["_admin"]
702 and content["_admin"]["operationalState"]
703 == indata["_admin"]["operationalState"]
704 ):
705 raise EngineException(
706 "operationalState already {}".format(
707 content["_admin"]["operationalState"]
708 ),
709 http_code=HTTPStatus.CONFLICT,
710 )
712 return indata
714 def _validate_descriptor_changes(
715 self,
716 descriptor_id,
717 descriptor_file_name,
718 old_descriptor_directory,
719 new_descriptor_directory,
720 ):
721 # Example:
722 # raise EngineException(
723 # "Error in validating new descriptor: <NODE> cannot be modified",
724 # http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
725 # )
726 pass
729class VnfdTopic(DescriptorTopic):
730 topic = "vnfds"
731 topic_msg = "vnfd"
733 def __init__(self, db, fs, msg, auth):
734 DescriptorTopic.__init__(self, db, fs, msg, auth)
736 def pyangbind_validation(self, item, data, force=False):
737 if self._descriptor_data_is_in_old_format(data):
738 raise EngineException(
739 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
740 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
741 )
742 try:
743 myvnfd = etsi_nfv_vnfd.etsi_nfv_vnfd()
744 pybindJSONDecoder.load_ietf_json(
745 {"etsi-nfv-vnfd:vnfd": data},
746 None,
747 None,
748 obj=myvnfd,
749 path_helper=True,
750 skip_unknown=force,
751 )
752 out = pybindJSON.dumps(myvnfd, mode="ietf")
753 desc_out = self._remove_envelop(yaml.safe_load(out))
754 desc_out = self._remove_yang_prefixes_from_descriptor(desc_out)
755 return utils.deep_update_dict(data, desc_out)
756 except Exception as e:
757 raise EngineException(
758 "Error in pyangbind validation: {}".format(str(e)),
759 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
760 )
762 @staticmethod
763 def _descriptor_data_is_in_old_format(data):
764 return ("vnfd-catalog" in data) or ("vnfd:vnfd-catalog" in data)
766 @staticmethod
767 def _remove_envelop(indata=None):
768 if not indata:
769 return {}
770 clean_indata = indata
772 if clean_indata.get("etsi-nfv-vnfd:vnfd"):
773 if not isinstance(clean_indata["etsi-nfv-vnfd:vnfd"], dict):
774 raise EngineException("'etsi-nfv-vnfd:vnfd' must be a dict")
775 clean_indata = clean_indata["etsi-nfv-vnfd:vnfd"]
776 elif clean_indata.get("vnfd"):
777 if not isinstance(clean_indata["vnfd"], dict):
778 raise EngineException("'vnfd' must be dict")
779 clean_indata = clean_indata["vnfd"]
781 return clean_indata
783 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
784 final_content = super().check_conflict_on_edit(
785 session, final_content, edit_content, _id
786 )
788 # set type of vnfd
789 contains_pdu = False
790 contains_vdu = False
791 for vdu in get_iterable(final_content.get("vdu")):
792 if vdu.get("pdu-type"):
793 contains_pdu = True
794 else:
795 contains_vdu = True
796 if contains_pdu:
797 final_content["_admin"]["type"] = "hnfd" if contains_vdu else "pnfd"
798 elif contains_vdu:
799 final_content["_admin"]["type"] = "vnfd"
800 # if neither vud nor pdu do not fill type
801 return final_content
803 def check_conflict_on_del(self, session, _id, db_content):
804 """
805 Check that there is not any NSD that uses this VNFD. Only NSDs belonging to this project are considered. Note
806 that VNFD can be public and be used by NSD of other projects. Also check there are not deployments, or vnfr
807 that uses this vnfd
808 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
809 :param _id: vnfd internal id
810 :param db_content: The database content of the _id.
811 :return: None or raises EngineException with the conflict
812 """
813 if session["force"]:
814 return
815 descriptor = db_content
816 descriptor_id = descriptor.get("id")
817 if not descriptor_id: # empty vnfd not uploaded
818 return
820 _filter = self._get_project_filter(session)
821 # check vnfrs using this vnfd
822 _filter["vnfd-id"] = _id
824 if self.db.get_list("vnfrs", _filter):
825 raise EngineException(
826 "There is at least one VNF instance using this descriptor",
827 http_code=HTTPStatus.CONFLICT,
828 )
830 # check NSD referencing this VNFD
831 del _filter["vnfd-id"]
832 _filter["vnfd-id"] = descriptor_id
834 if self.db.get_list("nsds", _filter):
835 raise EngineException(
836 "There is at least one NS package referencing this descriptor",
837 http_code=HTTPStatus.CONFLICT,
838 )
840 def _validate_input_new(self, indata, storage_params, force=False):
841 indata.pop("onboardingState", None)
842 indata.pop("operationalState", None)
843 indata.pop("usageState", None)
844 indata.pop("links", None)
846 indata = self.pyangbind_validation("vnfds", indata, force)
847 # Cross references validation in the descriptor
849 self.validate_mgmt_interface_connection_point(indata)
851 for vdu in get_iterable(indata.get("vdu")):
852 self.validate_vdu_internal_connection_points(vdu)
853 self._validate_vdu_cloud_init_in_package(storage_params, vdu, indata)
854 self._validate_vdu_charms_in_package(storage_params, indata)
856 self._validate_vnf_charms_in_package(storage_params, indata)
858 self.validate_external_connection_points(indata)
859 self.validate_internal_virtual_links(indata)
860 self.validate_monitoring_params(indata)
861 self.validate_scaling_group_descriptor(indata)
862 self.validate_healing_group_descriptor(indata)
863 self.validate_alarm_group_descriptor(indata)
864 self.validate_storage_compute_descriptor(indata)
865 self.validate_helm_chart(indata)
867 return indata
869 @staticmethod
870 def validate_helm_chart(indata):
871 def is_url(url):
872 result = urlparse(url)
873 return all([result.scheme, result.netloc])
875 kdus = indata.get("kdu", [])
876 for kdu in kdus:
877 helm_chart_value = kdu.get("helm-chart")
878 if not helm_chart_value:
879 continue
880 if not (
881 valid_helm_chart_re.match(helm_chart_value) or is_url(helm_chart_value)
882 ):
883 raise EngineException(
884 "helm-chart '{}' is not valid".format(helm_chart_value),
885 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
886 )
888 @staticmethod
889 def validate_mgmt_interface_connection_point(indata):
890 if not indata.get("vdu"):
891 return
892 if not indata.get("mgmt-cp"):
893 raise EngineException(
894 "'mgmt-cp' is a mandatory field and it is not defined",
895 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
896 )
898 for cp in get_iterable(indata.get("ext-cpd")):
899 if cp["id"] == indata["mgmt-cp"]:
900 break
901 else:
902 raise EngineException(
903 "mgmt-cp='{}' must match an existing ext-cpd".format(indata["mgmt-cp"]),
904 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
905 )
907 @staticmethod
908 def validate_vdu_internal_connection_points(vdu):
909 int_cpds = set()
910 for cpd in get_iterable(vdu.get("int-cpd")):
911 cpd_id = cpd.get("id")
912 if cpd_id and cpd_id in int_cpds:
913 raise EngineException(
914 "vdu[id='{}']:int-cpd[id='{}'] is already used by other int-cpd".format(
915 vdu["id"], cpd_id
916 ),
917 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
918 )
919 int_cpds.add(cpd_id)
921 @staticmethod
922 def validate_external_connection_points(indata):
923 all_vdus_int_cpds = set()
924 for vdu in get_iterable(indata.get("vdu")):
925 for int_cpd in get_iterable(vdu.get("int-cpd")):
926 all_vdus_int_cpds.add((vdu.get("id"), int_cpd.get("id")))
928 ext_cpds = set()
929 for cpd in get_iterable(indata.get("ext-cpd")):
930 cpd_id = cpd.get("id")
931 if cpd_id and cpd_id in ext_cpds:
932 raise EngineException(
933 "ext-cpd[id='{}'] is already used by other ext-cpd".format(cpd_id),
934 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
935 )
936 ext_cpds.add(cpd_id)
938 int_cpd = cpd.get("int-cpd")
939 if int_cpd:
940 if (int_cpd.get("vdu-id"), int_cpd.get("cpd")) not in all_vdus_int_cpds:
941 raise EngineException(
942 "ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd".format(
943 cpd_id
944 ),
945 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
946 )
947 # TODO: Validate k8s-cluster-net points to a valid k8s-cluster:nets ?
949 def _validate_vdu_charms_in_package(self, storage_params, indata):
950 for df in indata["df"]:
951 if (
952 "lcm-operations-configuration" in df
953 and "operate-vnf-op-config" in df["lcm-operations-configuration"]
954 ):
955 configs = df["lcm-operations-configuration"][
956 "operate-vnf-op-config"
957 ].get("day1-2", [])
958 vdus = df.get("vdu-profile", [])
959 for vdu in vdus:
960 for config in configs:
961 if config["id"] == vdu["id"] and utils.find_in_list(
962 config.get("execution-environment-list", []),
963 lambda ee: "juju" in ee,
964 ):
965 if not self._validate_package_folders(
966 storage_params, "charms"
967 ) and not self._validate_package_folders(
968 storage_params, "Scripts/charms"
969 ):
970 raise EngineException(
971 "Charm defined in vnf[id={}] but not present in "
972 "package".format(indata["id"])
973 )
975 def _validate_vdu_cloud_init_in_package(self, storage_params, vdu, indata):
976 if not vdu.get("cloud-init-file"):
977 return
978 if not self._validate_package_folders(
979 storage_params, "cloud_init", vdu["cloud-init-file"]
980 ) and not self._validate_package_folders(
981 storage_params, "Scripts/cloud_init", vdu["cloud-init-file"]
982 ):
983 raise EngineException(
984 "Cloud-init defined in vnf[id={}]:vdu[id={}] but not present in "
985 "package".format(indata["id"], vdu["id"])
986 )
988 def _validate_vnf_charms_in_package(self, storage_params, indata):
989 # Get VNF configuration through new container
990 for deployment_flavor in indata.get("df", []):
991 if "lcm-operations-configuration" not in deployment_flavor:
992 return
993 if (
994 "operate-vnf-op-config"
995 not in deployment_flavor["lcm-operations-configuration"]
996 ):
997 return
998 for day_1_2_config in deployment_flavor["lcm-operations-configuration"][
999 "operate-vnf-op-config"
1000 ]["day1-2"]:
1001 if day_1_2_config["id"] == indata["id"]:
1002 if utils.find_in_list(
1003 day_1_2_config.get("execution-environment-list", []),
1004 lambda ee: "juju" in ee,
1005 ):
1006 if not self._validate_package_folders(
1007 storage_params, "charms"
1008 ) and not self._validate_package_folders(
1009 storage_params, "Scripts/charms"
1010 ):
1011 raise EngineException(
1012 "Charm defined in vnf[id={}] but not present in "
1013 "package".format(indata["id"])
1014 )
1016 def _validate_package_folders(self, storage_params, folder, file=None):
1017 if not storage_params:
1018 return False
1019 elif not storage_params.get("pkg-dir"):
1020 if self.fs.file_exists("{}_".format(storage_params["folder"]), "dir"):
1021 f = "{}_/{}".format(storage_params["folder"], folder)
1022 else:
1023 f = "{}/{}".format(storage_params["folder"], folder)
1024 if file:
1025 return self.fs.file_exists("{}/{}".format(f, file), "file")
1026 else:
1027 if self.fs.file_exists(f, "dir"):
1028 if self.fs.dir_ls(f):
1029 return True
1030 return False
1031 else:
1032 if self.fs.file_exists("{}_".format(storage_params["folder"]), "dir"):
1033 f = "{}_/{}/{}".format(
1034 storage_params["folder"], storage_params["pkg-dir"], folder
1035 )
1036 else:
1037 f = "{}/{}/{}".format(
1038 storage_params["folder"], storage_params["pkg-dir"], folder
1039 )
1040 if file:
1041 return self.fs.file_exists("{}/{}".format(f, file), "file")
1042 else:
1043 if self.fs.file_exists(f, "dir"):
1044 if self.fs.dir_ls(f):
1045 return True
1046 return False
1048 @staticmethod
1049 def validate_internal_virtual_links(indata):
1050 all_ivld_ids = set()
1051 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
1052 ivld_id = ivld.get("id")
1053 if ivld_id and ivld_id in all_ivld_ids:
1054 raise EngineException(
1055 "Duplicated VLD id in int-virtual-link-desc[id={}]".format(ivld_id),
1056 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1057 )
1058 else:
1059 all_ivld_ids.add(ivld_id)
1061 for vdu in get_iterable(indata.get("vdu")):
1062 for int_cpd in get_iterable(vdu.get("int-cpd")):
1063 int_cpd_ivld_id = int_cpd.get("int-virtual-link-desc")
1064 if int_cpd_ivld_id and int_cpd_ivld_id not in all_ivld_ids:
1065 raise EngineException(
1066 "vdu[id='{}']:int-cpd[id='{}']:int-virtual-link-desc='{}' must match an existing "
1067 "int-virtual-link-desc".format(
1068 vdu["id"], int_cpd["id"], int_cpd_ivld_id
1069 ),
1070 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1071 )
1073 for df in get_iterable(indata.get("df")):
1074 for vlp in get_iterable(df.get("virtual-link-profile")):
1075 vlp_ivld_id = vlp.get("id")
1076 if vlp_ivld_id and vlp_ivld_id not in all_ivld_ids:
1077 raise EngineException(
1078 "df[id='{}']:virtual-link-profile='{}' must match an existing "
1079 "int-virtual-link-desc".format(df["id"], vlp_ivld_id),
1080 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1081 )
1083 @staticmethod
1084 def validate_monitoring_params(indata):
1085 all_monitoring_params = set()
1086 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
1087 for mp in get_iterable(ivld.get("monitoring-parameters")):
1088 mp_id = mp.get("id")
1089 if mp_id and mp_id in all_monitoring_params:
1090 raise EngineException(
1091 "Duplicated monitoring-parameter id in "
1092 "int-virtual-link-desc[id='{}']:monitoring-parameters[id='{}']".format(
1093 ivld["id"], mp_id
1094 ),
1095 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1096 )
1097 else:
1098 all_monitoring_params.add(mp_id)
1100 for vdu in get_iterable(indata.get("vdu")):
1101 for mp in get_iterable(vdu.get("monitoring-parameter")):
1102 mp_id = mp.get("id")
1103 if mp_id and mp_id in all_monitoring_params:
1104 raise EngineException(
1105 "Duplicated monitoring-parameter id in "
1106 "vdu[id='{}']:monitoring-parameter[id='{}']".format(
1107 vdu["id"], mp_id
1108 ),
1109 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1110 )
1111 else:
1112 all_monitoring_params.add(mp_id)
1114 for df in get_iterable(indata.get("df")):
1115 for mp in get_iterable(df.get("monitoring-parameter")):
1116 mp_id = mp.get("id")
1117 if mp_id and mp_id in all_monitoring_params:
1118 raise EngineException(
1119 "Duplicated monitoring-parameter id in "
1120 "df[id='{}']:monitoring-parameter[id='{}']".format(
1121 df["id"], mp_id
1122 ),
1123 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1124 )
1125 else:
1126 all_monitoring_params.add(mp_id)
1128 @staticmethod
1129 def validate_scaling_group_descriptor(indata):
1130 all_monitoring_params = set()
1131 all_vdu_ids = set()
1132 for df in get_iterable(indata.get("df")):
1133 for il in get_iterable(df.get("instantiation-level")):
1134 for vl in get_iterable(il.get("vdu-level")):
1135 all_vdu_ids.add(vl.get("vdu-id"))
1137 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
1138 for mp in get_iterable(ivld.get("monitoring-parameters")):
1139 all_monitoring_params.add(mp.get("id"))
1141 for vdu in get_iterable(indata.get("vdu")):
1142 for mp in get_iterable(vdu.get("monitoring-parameter")):
1143 all_monitoring_params.add(mp.get("id"))
1145 for df in get_iterable(indata.get("df")):
1146 for mp in get_iterable(df.get("monitoring-parameter")):
1147 all_monitoring_params.add(mp.get("id"))
1149 for df in get_iterable(indata.get("df")):
1150 for sa in get_iterable(df.get("scaling-aspect")):
1151 for deltas in get_iterable(
1152 sa.get("aspect-delta-details").get("deltas")
1153 ):
1154 for vds in get_iterable(deltas.get("vdu-delta")):
1155 sa_vdu_id = vds.get("id")
1156 if sa_vdu_id and sa_vdu_id not in all_vdu_ids:
1157 raise EngineException(
1158 "df[id='{}']:scaling-aspect[id='{}']:aspect-delta-details"
1159 "[delta='{}']: "
1160 "vdu-id='{}' not defined in vdu".format(
1161 df["id"],
1162 sa["id"],
1163 deltas["id"],
1164 sa_vdu_id,
1165 ),
1166 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1167 )
1169 for df in get_iterable(indata.get("df")):
1170 for sa in get_iterable(df.get("scaling-aspect")):
1171 for sp in get_iterable(sa.get("scaling-policy")):
1172 for sc in get_iterable(sp.get("scaling-criteria")):
1173 sc_monitoring_param = sc.get("vnf-monitoring-param-ref")
1174 if (
1175 sc_monitoring_param
1176 and sc_monitoring_param not in all_monitoring_params
1177 ):
1178 raise EngineException(
1179 "df[id='{}']:scaling-aspect[id='{}']:scaling-policy"
1180 "[name='{}']:scaling-criteria[name='{}']: "
1181 "vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format(
1182 df["id"],
1183 sa["id"],
1184 sp["name"],
1185 sc["name"],
1186 sc_monitoring_param,
1187 ),
1188 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1189 )
1191 for sca in get_iterable(sa.get("scaling-config-action")):
1192 if (
1193 "lcm-operations-configuration" not in df
1194 or "operate-vnf-op-config"
1195 not in df["lcm-operations-configuration"]
1196 or not utils.find_in_list(
1197 df["lcm-operations-configuration"][
1198 "operate-vnf-op-config"
1199 ].get("day1-2", []),
1200 lambda config: config["id"] == indata["id"],
1201 )
1202 ):
1203 raise EngineException(
1204 "'day1-2 configuration' not defined in the descriptor but it is "
1205 "referenced by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action".format(
1206 df["id"], sa["id"]
1207 ),
1208 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1209 )
1210 for configuration in get_iterable(
1211 df["lcm-operations-configuration"]["operate-vnf-op-config"].get(
1212 "day1-2", []
1213 )
1214 ):
1215 for primitive in get_iterable(
1216 configuration.get("config-primitive")
1217 ):
1218 if (
1219 primitive["name"]
1220 == sca["vnf-config-primitive-name-ref"]
1221 ):
1222 break
1223 else:
1224 raise EngineException(
1225 "df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-"
1226 "config-primitive-name-ref='{}' does not match any "
1227 "day1-2 configuration:config-primitive:name".format(
1228 df["id"],
1229 sa["id"],
1230 sca["vnf-config-primitive-name-ref"],
1231 ),
1232 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1233 )
1235 @staticmethod
1236 def validate_healing_group_descriptor(indata):
1237 all_vdu_ids = set()
1238 for df in get_iterable(indata.get("df")):
1239 for il in get_iterable(df.get("instantiation-level")):
1240 for vl in get_iterable(il.get("vdu-level")):
1241 all_vdu_ids.add(vl.get("vdu-id"))
1243 for df in get_iterable(indata.get("df")):
1244 for ha in get_iterable(df.get("healing-aspect")):
1245 for hp in get_iterable(ha.get("healing-policy")):
1246 hp_monitoring_param = hp.get("vdu-id")
1247 if hp_monitoring_param and hp_monitoring_param not in all_vdu_ids:
1248 raise EngineException(
1249 "df[id='{}']:healing-aspect[id='{}']:healing-policy"
1250 "[name='{}']: "
1251 "vdu-id='{}' not defined in vdu".format(
1252 df["id"],
1253 ha["id"],
1254 hp["event-name"],
1255 hp_monitoring_param,
1256 ),
1257 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1258 )
1260 @staticmethod
1261 def validate_alarm_group_descriptor(indata):
1262 all_monitoring_params = set()
1263 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
1264 for mp in get_iterable(ivld.get("monitoring-parameters")):
1265 all_monitoring_params.add(mp.get("id"))
1267 for vdu in get_iterable(indata.get("vdu")):
1268 for mp in get_iterable(vdu.get("monitoring-parameter")):
1269 all_monitoring_params.add(mp.get("id"))
1271 for df in get_iterable(indata.get("df")):
1272 for mp in get_iterable(df.get("monitoring-parameter")):
1273 all_monitoring_params.add(mp.get("id"))
1275 for vdus in get_iterable(indata.get("vdu")):
1276 for alarms in get_iterable(vdus.get("alarm")):
1277 alarm_monitoring_param = alarms.get("vnf-monitoring-param-ref")
1278 if (
1279 alarm_monitoring_param
1280 and alarm_monitoring_param not in all_monitoring_params
1281 ):
1282 raise EngineException(
1283 "vdu[id='{}']:alarm[id='{}']:"
1284 "vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format(
1285 vdus["id"],
1286 alarms["alarm-id"],
1287 alarm_monitoring_param,
1288 ),
1289 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1290 )
1292 @staticmethod
1293 def validate_storage_compute_descriptor(indata):
1294 all_vsd_ids = set()
1295 for vsd in get_iterable(indata.get("virtual-storage-desc")):
1296 all_vsd_ids.add(vsd.get("id"))
1298 all_vcd_ids = set()
1299 for vcd in get_iterable(indata.get("virtual-compute-desc")):
1300 all_vcd_ids.add(vcd.get("id"))
1302 for vdus in get_iterable(indata.get("vdu")):
1303 for vsd_ref in vdus.get("virtual-storage-desc"):
1304 if vsd_ref and vsd_ref not in all_vsd_ids:
1305 raise EngineException(
1306 "vdu[virtual-storage-desc='{}']"
1307 "not defined in vnfd".format(
1308 vsd_ref,
1309 ),
1310 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1311 )
1313 for vdus in get_iterable(indata.get("vdu")):
1314 vcd_ref = vdus.get("virtual-compute-desc")
1315 if vcd_ref and vcd_ref not in all_vcd_ids:
1316 raise EngineException(
1317 "vdu[virtual-compute-desc='{}']"
1318 "not defined in vnfd".format(
1319 vdus["virtual-compute-desc"],
1320 ),
1321 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1322 )
1324 def delete_extra(self, session, _id, db_content, not_send_msg=None):
1325 """
1326 Deletes associate file system storage (via super)
1327 Deletes associated vnfpkgops from database.
1328 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1329 :param _id: server internal id
1330 :param db_content: The database content of the descriptor
1331 :return: None
1332 :raises: FsException in case of error while deleting associated storage
1333 """
1334 super().delete_extra(session, _id, db_content, not_send_msg)
1335 self.db.del_list("vnfpkgops", {"vnfPkgId": _id})
1336 self.db.del_list(self.topic + "_revisions", {"_id": {"$regex": _id}})
1338 def sol005_projection(self, data):
1339 data["onboardingState"] = data["_admin"]["onboardingState"]
1340 data["operationalState"] = data["_admin"]["operationalState"]
1341 data["usageState"] = data["_admin"]["usageState"]
1343 links = {}
1344 links["self"] = {"href": "/vnfpkgm/v1/vnf_packages/{}".format(data["_id"])}
1345 links["vnfd"] = {"href": "/vnfpkgm/v1/vnf_packages/{}/vnfd".format(data["_id"])}
1346 links["packageContent"] = {
1347 "href": "/vnfpkgm/v1/vnf_packages/{}/package_content".format(data["_id"])
1348 }
1349 data["_links"] = links
1351 return super().sol005_projection(data)
1353 @staticmethod
1354 def find_software_version(vnfd: dict) -> str:
1355 """Find the sotware version in the VNFD descriptors
1357 Args:
1358 vnfd (dict): Descriptor as a dictionary
1360 Returns:
1361 software-version (str)
1362 """
1363 default_sw_version = "1.0"
1364 if vnfd.get("vnfd"):
1365 vnfd = vnfd["vnfd"]
1366 if vnfd.get("software-version"):
1367 return vnfd["software-version"]
1368 else:
1369 return default_sw_version
1371 @staticmethod
1372 def extract_policies(vnfd: dict) -> dict:
1373 """Removes the policies from the VNFD descriptors
1375 Args:
1376 vnfd (dict): Descriptor as a dictionary
1378 Returns:
1379 vnfd (dict): VNFD which does not include policies
1380 """
1381 for df in vnfd.get("df", {}):
1382 for policy in ["scaling-aspect", "healing-aspect"]:
1383 if df.get(policy, {}):
1384 df.pop(policy)
1385 for vdu in vnfd.get("vdu", {}):
1386 for alarm_policy in ["alarm", "monitoring-parameter"]:
1387 if vdu.get(alarm_policy, {}):
1388 vdu.pop(alarm_policy)
1389 return vnfd
1391 @staticmethod
1392 def extract_day12_primitives(vnfd: dict) -> dict:
1393 """Removes the day12 primitives from the VNFD descriptors
1395 Args:
1396 vnfd (dict): Descriptor as a dictionary
1398 Returns:
1399 vnfd (dict)
1400 """
1401 for df_id, df in enumerate(vnfd.get("df", {})):
1402 if (
1403 df.get("lcm-operations-configuration", {})
1404 .get("operate-vnf-op-config", {})
1405 .get("day1-2")
1406 ):
1407 day12 = df["lcm-operations-configuration"]["operate-vnf-op-config"].get(
1408 "day1-2"
1409 )
1410 for config_id, config in enumerate(day12):
1411 for key in [
1412 "initial-config-primitive",
1413 "config-primitive",
1414 "terminate-config-primitive",
1415 ]:
1416 config.pop(key, None)
1417 day12[config_id] = config
1418 df["lcm-operations-configuration"]["operate-vnf-op-config"][
1419 "day1-2"
1420 ] = day12
1421 vnfd["df"][df_id] = df
1422 return vnfd
1424 def remove_modifiable_items(self, vnfd: dict) -> dict:
1425 """Removes the modifiable parts from the VNFD descriptors
1427 It calls different extract functions according to different update types
1428 to clear all the modifiable items from VNFD
1430 Args:
1431 vnfd (dict): Descriptor as a dictionary
1433 Returns:
1434 vnfd (dict): Descriptor which does not include modifiable contents
1435 """
1436 if vnfd.get("vnfd"):
1437 vnfd = vnfd["vnfd"]
1438 vnfd.pop("_admin", None)
1439 # If the other extractions need to be done from VNFD,
1440 # the new extract methods could be appended to below list.
1441 for extract_function in [self.extract_day12_primitives, self.extract_policies]:
1442 vnfd_temp = extract_function(vnfd)
1443 vnfd = vnfd_temp
1444 return vnfd
1446 def _validate_descriptor_changes(
1447 self,
1448 descriptor_id: str,
1449 descriptor_file_name: str,
1450 old_descriptor_directory: str,
1451 new_descriptor_directory: str,
1452 ):
1453 """Compares the old and new VNFD descriptors and validates the new descriptor.
1455 Args:
1456 old_descriptor_directory (str): Directory of descriptor which is in-use
1457 new_descriptor_directory (str): Directory of descriptor which is proposed to update (new revision)
1459 Returns:
1460 None
1462 Raises:
1463 EngineException: In case of error when there are unallowed changes
1464 """
1465 try:
1466 # If VNFD does not exist in DB or it is not in use by any NS,
1467 # validation is not required.
1468 vnfd = self.db.get_one("vnfds", {"_id": descriptor_id})
1469 if not vnfd or not detect_descriptor_usage(vnfd, "vnfds", self.db):
1470 return
1472 # Get the old and new descriptor contents in order to compare them.
1473 with self.fs.file_open(
1474 (old_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
1475 ) as old_descriptor_file:
1476 with self.fs.file_open(
1477 (new_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
1478 ) as new_descriptor_file:
1479 old_content = yaml.safe_load(old_descriptor_file.read())
1480 new_content = yaml.safe_load(new_descriptor_file.read())
1482 # If software version has changed, we do not need to validate
1483 # the differences anymore.
1484 if old_content and new_content:
1485 if self.find_software_version(
1486 old_content
1487 ) != self.find_software_version(new_content):
1488 return
1490 disallowed_change = DeepDiff(
1491 self.remove_modifiable_items(old_content),
1492 self.remove_modifiable_items(new_content),
1493 )
1495 if disallowed_change:
1496 changed_nodes = functools.reduce(
1497 lambda a, b: a + " , " + b,
1498 [
1499 node.lstrip("root")
1500 for node in disallowed_change.get(
1501 "values_changed"
1502 ).keys()
1503 ],
1504 )
1506 raise EngineException(
1507 f"Error in validating new descriptor: {changed_nodes} cannot be modified, "
1508 "there are disallowed changes in the vnf descriptor.",
1509 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1510 )
1511 except (
1512 DbException,
1513 AttributeError,
1514 IndexError,
1515 KeyError,
1516 ValueError,
1517 ) as e:
1518 raise type(e)(
1519 "VNF Descriptor could not be processed with error: {}.".format(e)
1520 )
1523class NsdTopic(DescriptorTopic):
1524 topic = "nsds"
1525 topic_msg = "nsd"
1527 def __init__(self, db, fs, msg, auth):
1528 super().__init__(db, fs, msg, auth)
1530 def pyangbind_validation(self, item, data, force=False):
1531 if self._descriptor_data_is_in_old_format(data):
1532 raise EngineException(
1533 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
1534 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1535 )
1536 try:
1537 nsd_vnf_profiles = data.get("df", [{}])[0].get("vnf-profile", [])
1538 mynsd = etsi_nfv_nsd.etsi_nfv_nsd()
1539 pybindJSONDecoder.load_ietf_json(
1540 {"nsd": {"nsd": [data]}},
1541 None,
1542 None,
1543 obj=mynsd,
1544 path_helper=True,
1545 skip_unknown=force,
1546 )
1547 out = pybindJSON.dumps(mynsd, mode="ietf")
1548 desc_out = self._remove_envelop(yaml.safe_load(out))
1549 desc_out = self._remove_yang_prefixes_from_descriptor(desc_out)
1550 if nsd_vnf_profiles:
1551 desc_out["df"][0]["vnf-profile"] = nsd_vnf_profiles
1552 return desc_out
1553 except Exception as e:
1554 raise EngineException(
1555 "Error in pyangbind validation: {}".format(str(e)),
1556 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1557 )
1559 @staticmethod
1560 def _descriptor_data_is_in_old_format(data):
1561 return ("nsd-catalog" in data) or ("nsd:nsd-catalog" in data)
1563 @staticmethod
1564 def _remove_envelop(indata=None):
1565 if not indata:
1566 return {}
1567 clean_indata = indata
1569 if clean_indata.get("nsd"):
1570 clean_indata = clean_indata["nsd"]
1571 elif clean_indata.get("etsi-nfv-nsd:nsd"):
1572 clean_indata = clean_indata["etsi-nfv-nsd:nsd"]
1573 if clean_indata.get("nsd"):
1574 if (
1575 not isinstance(clean_indata["nsd"], list)
1576 or len(clean_indata["nsd"]) != 1
1577 ):
1578 raise EngineException("'nsd' must be a list of only one element")
1579 clean_indata = clean_indata["nsd"][0]
1580 return clean_indata
1582 def _validate_input_new(self, indata, storage_params, force=False):
1583 indata.pop("nsdOnboardingState", None)
1584 indata.pop("nsdOperationalState", None)
1585 indata.pop("nsdUsageState", None)
1587 indata.pop("links", None)
1589 indata = self.pyangbind_validation("nsds", indata, force)
1590 # Cross references validation in the descriptor
1591 # TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none
1592 for vld in get_iterable(indata.get("virtual-link-desc")):
1593 self.validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata)
1594 for fg in get_iterable(indata.get("vnffgd")):
1595 self.validate_vnffgd_data(fg, indata)
1597 self.validate_vnf_profiles_vnfd_id(indata)
1599 return indata
1601 @staticmethod
1602 def validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata):
1603 if not vld.get("mgmt-network"):
1604 return
1605 vld_id = vld.get("id")
1606 for df in get_iterable(indata.get("df")):
1607 for vlp in get_iterable(df.get("virtual-link-profile")):
1608 if vld_id and vld_id == vlp.get("virtual-link-desc-id"):
1609 if vlp.get("virtual-link-protocol-data"):
1610 raise EngineException(
1611 "Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-"
1612 "protocol-data You cannot set a virtual-link-protocol-data "
1613 "when mgmt-network is True".format(df["id"], vlp["id"]),
1614 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1615 )
1617 @staticmethod
1618 def validate_vnffgd_data(fg, indata):
1619 position_list = []
1620 all_vnf_ids = set(get_iterable(fg.get("vnf-profile-id")))
1621 for fgposition in get_iterable(fg.get("nfp-position-element")):
1622 position_list.append(fgposition["id"])
1624 for nfpd in get_iterable(fg.get("nfpd")):
1625 nfp_position = []
1626 for position in get_iterable(nfpd.get("position-desc-id")):
1627 nfp_position = position.get("nfp-position-element-id")
1628 if position == "nfp-position-element-id":
1629 nfp_position = position.get("nfp-position-element-id")
1630 if nfp_position[0] not in position_list:
1631 raise EngineException(
1632 "Error at vnffgd nfpd[id='{}']:nfp-position-element-id='{}' "
1633 "does not match any nfp-position-element".format(
1634 nfpd["id"], nfp_position[0]
1635 ),
1636 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1637 )
1639 for cp in get_iterable(position.get("cp-profile-id")):
1640 for cpe in get_iterable(cp.get("constituent-profile-elements")):
1641 constituent_base_element_id = cpe.get(
1642 "constituent-base-element-id"
1643 )
1644 if (
1645 constituent_base_element_id
1646 and constituent_base_element_id not in all_vnf_ids
1647 ):
1648 raise EngineException(
1649 "Error at vnffgd constituent_profile[id='{}']:vnfd-id='{}' "
1650 "does not match any constituent-base-element-id".format(
1651 cpe["id"], constituent_base_element_id
1652 ),
1653 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1654 )
1656 @staticmethod
1657 def validate_vnf_profiles_vnfd_id(indata):
1658 all_vnfd_ids = set(get_iterable(indata.get("vnfd-id")))
1659 for df in get_iterable(indata.get("df")):
1660 for vnf_profile in get_iterable(df.get("vnf-profile")):
1661 vnfd_id = vnf_profile.get("vnfd-id")
1662 if vnfd_id and vnfd_id not in all_vnfd_ids:
1663 raise EngineException(
1664 "Error at df[id='{}']:vnf_profile[id='{}']:vnfd-id='{}' "
1665 "does not match any vnfd-id".format(
1666 df["id"], vnf_profile["id"], vnfd_id
1667 ),
1668 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1669 )
1671 def _validate_input_edit(self, indata, content, force=False):
1672 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
1673 """
1674 indata looks as follows:
1675 - In the new case (conformant)
1676 {'nsdOperationalState': 'DISABLED', 'userDefinedData': {'id': 'string23',
1677 '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}}
1678 - In the old case (backwards-compatible)
1679 {'id': 'string23', '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}
1680 """
1681 if "_admin" not in indata:
1682 indata["_admin"] = {}
1684 if "nsdOperationalState" in indata:
1685 if indata["nsdOperationalState"] in ("ENABLED", "DISABLED"):
1686 indata["_admin"]["operationalState"] = indata.pop("nsdOperationalState")
1687 else:
1688 raise EngineException(
1689 "State '{}' is not a valid operational state".format(
1690 indata["nsdOperationalState"]
1691 ),
1692 http_code=HTTPStatus.BAD_REQUEST,
1693 )
1695 # In the case of user defined data, we need to put the data in the root of the object
1696 # to preserve current expected behaviour
1697 if "userDefinedData" in indata:
1698 data = indata.pop("userDefinedData")
1699 if isinstance(data, dict):
1700 indata["_admin"]["userDefinedData"] = data
1701 else:
1702 raise EngineException(
1703 "userDefinedData should be an object, but is '{}' instead".format(
1704 type(data)
1705 ),
1706 http_code=HTTPStatus.BAD_REQUEST,
1707 )
1708 if (
1709 "operationalState" in indata["_admin"]
1710 and content["_admin"]["operationalState"]
1711 == indata["_admin"]["operationalState"]
1712 ):
1713 raise EngineException(
1714 "nsdOperationalState already {}".format(
1715 content["_admin"]["operationalState"]
1716 ),
1717 http_code=HTTPStatus.CONFLICT,
1718 )
1719 return indata
1721 def _check_descriptor_dependencies(self, session, descriptor):
1722 """
1723 Check that the dependent descriptors exist on a new descriptor or edition. Also checks references to vnfd
1724 connection points are ok
1725 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1726 :param descriptor: descriptor to be inserted or edit
1727 :return: None or raises exception
1728 """
1729 if session["force"]:
1730 return
1731 vnfds_index = self._get_descriptor_constituent_vnfds_index(session, descriptor)
1733 # Cross references validation in the descriptor and vnfd connection point validation
1734 for df in get_iterable(descriptor.get("df")):
1735 self.validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index)
1737 def _get_descriptor_constituent_vnfds_index(self, session, descriptor):
1738 vnfds_index = {}
1739 if descriptor.get("vnfd-id") and not session["force"]:
1740 for vnfd_id in get_iterable(descriptor.get("vnfd-id")):
1741 query_filter = self._get_project_filter(session)
1742 query_filter["id"] = vnfd_id
1743 vnf_list = self.db.get_list("vnfds", query_filter)
1744 if not vnf_list:
1745 raise EngineException(
1746 "Descriptor error at 'vnfd-id'='{}' references a non "
1747 "existing vnfd".format(vnfd_id),
1748 http_code=HTTPStatus.CONFLICT,
1749 )
1750 vnfds_index[vnfd_id] = vnf_list[0]
1751 return vnfds_index
1753 @staticmethod
1754 def validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index):
1755 for vnf_profile in get_iterable(df.get("vnf-profile")):
1756 vnfd = vnfds_index.get(vnf_profile["vnfd-id"])
1757 all_vnfd_ext_cpds = set()
1758 for ext_cpd in get_iterable(vnfd.get("ext-cpd")):
1759 if ext_cpd.get("id"):
1760 all_vnfd_ext_cpds.add(ext_cpd.get("id"))
1762 for virtual_link in get_iterable(
1763 vnf_profile.get("virtual-link-connectivity")
1764 ):
1765 for vl_cpd in get_iterable(virtual_link.get("constituent-cpd-id")):
1766 vl_cpd_id = vl_cpd.get("constituent-cpd-id")
1767 if vl_cpd_id and vl_cpd_id not in all_vnfd_ext_cpds:
1768 raise EngineException(
1769 "Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity"
1770 "[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a "
1771 "non existing ext-cpd:id inside vnfd '{}'".format(
1772 df["id"],
1773 vnf_profile["id"],
1774 virtual_link["virtual-link-profile-id"],
1775 vl_cpd_id,
1776 vnfd["id"],
1777 ),
1778 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1779 )
1781 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
1782 final_content = super().check_conflict_on_edit(
1783 session, final_content, edit_content, _id
1784 )
1786 self._check_descriptor_dependencies(session, final_content)
1788 return final_content
1790 def check_conflict_on_del(self, session, _id, db_content):
1791 """
1792 Check that there is not any NSR that uses this NSD. Only NSRs belonging to this project are considered. Note
1793 that NSD can be public and be used by other projects.
1794 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1795 :param _id: nsd internal id
1796 :param db_content: The database content of the _id
1797 :return: None or raises EngineException with the conflict
1798 """
1799 if session["force"]:
1800 return
1801 descriptor = db_content
1802 descriptor_id = descriptor.get("id")
1803 if not descriptor_id: # empty nsd not uploaded
1804 return
1806 # check NSD used by NS
1807 _filter = self._get_project_filter(session)
1808 _filter["nsd-id"] = _id
1809 if self.db.get_list("nsrs", _filter):
1810 raise EngineException(
1811 "There is at least one NS instance using this descriptor",
1812 http_code=HTTPStatus.CONFLICT,
1813 )
1815 # check NSD referenced by NST
1816 del _filter["nsd-id"]
1817 _filter["netslice-subnet.ANYINDEX.nsd-ref"] = descriptor_id
1818 if self.db.get_list("nsts", _filter):
1819 raise EngineException(
1820 "There is at least one NetSlice Template referencing this descriptor",
1821 http_code=HTTPStatus.CONFLICT,
1822 )
1824 def delete_extra(self, session, _id, db_content, not_send_msg=None):
1825 """
1826 Deletes associate file system storage (via super)
1827 Deletes associated vnfpkgops from database.
1828 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1829 :param _id: server internal id
1830 :param db_content: The database content of the descriptor
1831 :return: None
1832 :raises: FsException in case of error while deleting associated storage
1833 """
1834 super().delete_extra(session, _id, db_content, not_send_msg)
1835 self.db.del_list(self.topic + "_revisions", {"_id": {"$regex": _id}})
1837 @staticmethod
1838 def extract_day12_primitives(nsd: dict) -> dict:
1839 """Removes the day12 primitives from the NSD descriptors
1841 Args:
1842 nsd (dict): Descriptor as a dictionary
1844 Returns:
1845 nsd (dict): Cleared NSD
1846 """
1847 if nsd.get("ns-configuration"):
1848 for key in [
1849 "config-primitive",
1850 "initial-config-primitive",
1851 "terminate-config-primitive",
1852 ]:
1853 nsd["ns-configuration"].pop(key, None)
1854 return nsd
1856 def remove_modifiable_items(self, nsd: dict) -> dict:
1857 """Removes the modifiable parts from the VNFD descriptors
1859 It calls different extract functions according to different update types
1860 to clear all the modifiable items from NSD
1862 Args:
1863 nsd (dict): Descriptor as a dictionary
1865 Returns:
1866 nsd (dict): Descriptor which does not include modifiable contents
1867 """
1868 while isinstance(nsd, dict) and nsd.get("nsd"):
1869 nsd = nsd["nsd"]
1870 if isinstance(nsd, list):
1871 nsd = nsd[0]
1872 nsd.pop("_admin", None)
1873 # If the more extractions need to be done from NSD,
1874 # the new extract methods could be appended to below list.
1875 for extract_function in [self.extract_day12_primitives]:
1876 nsd_temp = extract_function(nsd)
1877 nsd = nsd_temp
1878 return nsd
1880 def _validate_descriptor_changes(
1881 self,
1882 descriptor_id: str,
1883 descriptor_file_name: str,
1884 old_descriptor_directory: str,
1885 new_descriptor_directory: str,
1886 ):
1887 """Compares the old and new NSD descriptors and validates the new descriptor
1889 Args:
1890 old_descriptor_directory: Directory of descriptor which is in-use
1891 new_descriptor_directory: Directory of descriptor which is proposed to update (new revision)
1893 Returns:
1894 None
1896 Raises:
1897 EngineException: In case of error if the changes are not allowed
1898 """
1900 try:
1901 # If NSD does not exist in DB, or it is not in use by any NS,
1902 # validation is not required.
1903 nsd = self.db.get_one("nsds", {"_id": descriptor_id}, fail_on_empty=False)
1904 if not nsd or not detect_descriptor_usage(nsd, "nsds", self.db):
1905 return
1907 # Get the old and new descriptor contents in order to compare them.
1908 with self.fs.file_open(
1909 (old_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
1910 ) as old_descriptor_file:
1911 with self.fs.file_open(
1912 (new_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
1913 ) as new_descriptor_file:
1914 old_content = yaml.safe_load(old_descriptor_file.read())
1915 new_content = yaml.safe_load(new_descriptor_file.read())
1917 if old_content and new_content:
1918 disallowed_change = DeepDiff(
1919 self.remove_modifiable_items(old_content),
1920 self.remove_modifiable_items(new_content),
1921 )
1923 if disallowed_change:
1924 changed_nodes = functools.reduce(
1925 lambda a, b: a + ", " + b,
1926 [
1927 node.lstrip("root")
1928 for node in disallowed_change.get(
1929 "values_changed"
1930 ).keys()
1931 ],
1932 )
1934 raise EngineException(
1935 f"Error in validating new descriptor: {changed_nodes} cannot be modified, "
1936 "there are disallowed changes in the ns descriptor. ",
1937 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1938 )
1939 except (
1940 DbException,
1941 AttributeError,
1942 IndexError,
1943 KeyError,
1944 ValueError,
1945 ) as e:
1946 raise type(e)(
1947 "NS Descriptor could not be processed with error: {}.".format(e)
1948 )
1950 def sol005_projection(self, data):
1951 data["nsdOnboardingState"] = data["_admin"]["onboardingState"]
1952 data["nsdOperationalState"] = data["_admin"]["operationalState"]
1953 data["nsdUsageState"] = data["_admin"]["usageState"]
1955 links = {}
1956 links["self"] = {"href": "/nsd/v1/ns_descriptors/{}".format(data["_id"])}
1957 links["nsd_content"] = {
1958 "href": "/nsd/v1/ns_descriptors/{}/nsd_content".format(data["_id"])
1959 }
1960 data["_links"] = links
1962 return super().sol005_projection(data)
1965class NstTopic(DescriptorTopic):
1966 topic = "nsts"
1967 topic_msg = "nst"
1968 quota_name = "slice_templates"
1970 def __init__(self, db, fs, msg, auth):
1971 DescriptorTopic.__init__(self, db, fs, msg, auth)
1973 def pyangbind_validation(self, item, data, force=False):
1974 try:
1975 mynst = nst_im()
1976 pybindJSONDecoder.load_ietf_json(
1977 {"nst": [data]},
1978 None,
1979 None,
1980 obj=mynst,
1981 path_helper=True,
1982 skip_unknown=force,
1983 )
1984 out = pybindJSON.dumps(mynst, mode="ietf")
1985 desc_out = self._remove_envelop(yaml.safe_load(out))
1986 return desc_out
1987 except Exception as e:
1988 raise EngineException(
1989 "Error in pyangbind validation: {}".format(str(e)),
1990 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1991 )
1993 @staticmethod
1994 def _remove_envelop(indata=None):
1995 if not indata:
1996 return {}
1997 clean_indata = indata
1999 if clean_indata.get("nst"):
2000 if (
2001 not isinstance(clean_indata["nst"], list)
2002 or len(clean_indata["nst"]) != 1
2003 ):
2004 raise EngineException("'nst' must be a list only one element")
2005 clean_indata = clean_indata["nst"][0]
2006 elif clean_indata.get("nst:nst"):
2007 if (
2008 not isinstance(clean_indata["nst:nst"], list)
2009 or len(clean_indata["nst:nst"]) != 1
2010 ):
2011 raise EngineException("'nst:nst' must be a list only one element")
2012 clean_indata = clean_indata["nst:nst"][0]
2013 return clean_indata
2015 def _validate_input_new(self, indata, storage_params, force=False):
2016 indata.pop("onboardingState", None)
2017 indata.pop("operationalState", None)
2018 indata.pop("usageState", None)
2019 indata = self.pyangbind_validation("nsts", indata, force)
2020 return indata.copy()
2022 def _check_descriptor_dependencies(self, session, descriptor):
2023 """
2024 Check that the dependent descriptors exist on a new descriptor or edition
2025 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
2026 :param descriptor: descriptor to be inserted or edit
2027 :return: None or raises exception
2028 """
2029 if not descriptor.get("netslice-subnet"):
2030 return
2031 for nsd in descriptor["netslice-subnet"]:
2032 nsd_id = nsd["nsd-ref"]
2033 filter_q = self._get_project_filter(session)
2034 filter_q["id"] = nsd_id
2035 if not self.db.get_list("nsds", filter_q):
2036 raise EngineException(
2037 "Descriptor error at 'netslice-subnet':'nsd-ref'='{}' references a non "
2038 "existing nsd".format(nsd_id),
2039 http_code=HTTPStatus.CONFLICT,
2040 )
2042 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
2043 final_content = super().check_conflict_on_edit(
2044 session, final_content, edit_content, _id
2045 )
2047 self._check_descriptor_dependencies(session, final_content)
2048 return final_content
2050 def check_conflict_on_del(self, session, _id, db_content):
2051 """
2052 Check that there is not any NSIR that uses this NST. Only NSIRs belonging to this project are considered. Note
2053 that NST can be public and be used by other projects.
2054 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
2055 :param _id: nst internal id
2056 :param db_content: The database content of the _id.
2057 :return: None or raises EngineException with the conflict
2058 """
2059 # TODO: Check this method
2060 if session["force"]:
2061 return
2062 # Get Network Slice Template from Database
2063 _filter = self._get_project_filter(session)
2064 _filter["_admin.nst-id"] = _id
2065 if self.db.get_list("nsis", _filter):
2066 raise EngineException(
2067 "there is at least one Netslice Instance using this descriptor",
2068 http_code=HTTPStatus.CONFLICT,
2069 )
2071 def sol005_projection(self, data):
2072 data["onboardingState"] = data["_admin"]["onboardingState"]
2073 data["operationalState"] = data["_admin"]["operationalState"]
2074 data["usageState"] = data["_admin"]["usageState"]
2076 links = {}
2077 links["self"] = {"href": "/nst/v1/netslice_templates/{}".format(data["_id"])}
2078 links["nst"] = {"href": "/nst/v1/netslice_templates/{}/nst".format(data["_id"])}
2079 data["_links"] = links
2081 return super().sol005_projection(data)
2084class PduTopic(BaseTopic):
2085 topic = "pdus"
2086 topic_msg = "pdu"
2087 quota_name = "pduds"
2088 schema_new = pdu_new_schema
2089 schema_edit = pdu_edit_schema
2091 def __init__(self, db, fs, msg, auth):
2092 BaseTopic.__init__(self, db, fs, msg, auth)
2094 @staticmethod
2095 def format_on_new(content, project_id=None, make_public=False):
2096 BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
2097 content["_admin"]["onboardingState"] = "CREATED"
2098 content["_admin"]["operationalState"] = "ENABLED"
2099 content["_admin"]["usageState"] = "NOT_IN_USE"
2101 def check_conflict_on_del(self, session, _id, db_content):
2102 """
2103 Check that there is not any vnfr that uses this PDU
2104 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
2105 :param _id: pdu internal id
2106 :param db_content: The database content of the _id.
2107 :return: None or raises EngineException with the conflict
2108 """
2109 if session["force"]:
2110 return
2112 _filter = self._get_project_filter(session)
2113 _filter["vdur.pdu-id"] = _id
2114 if self.db.get_list("vnfrs", _filter):
2115 raise EngineException(
2116 "There is at least one VNF instance using this PDU",
2117 http_code=HTTPStatus.CONFLICT,
2118 )
2121class VnfPkgOpTopic(BaseTopic):
2122 topic = "vnfpkgops"
2123 topic_msg = "vnfd"
2124 schema_new = vnfpkgop_new_schema
2125 schema_edit = None
2127 def __init__(self, db, fs, msg, auth):
2128 BaseTopic.__init__(self, db, fs, msg, auth)
2130 def edit(self, session, _id, indata=None, kwargs=None, content=None):
2131 raise EngineException(
2132 "Method 'edit' not allowed for topic '{}'".format(self.topic),
2133 HTTPStatus.METHOD_NOT_ALLOWED,
2134 )
2136 def delete(self, session, _id, dry_run=False):
2137 raise EngineException(
2138 "Method 'delete' not allowed for topic '{}'".format(self.topic),
2139 HTTPStatus.METHOD_NOT_ALLOWED,
2140 )
2142 def delete_list(self, session, filter_q=None):
2143 raise EngineException(
2144 "Method 'delete_list' not allowed for topic '{}'".format(self.topic),
2145 HTTPStatus.METHOD_NOT_ALLOWED,
2146 )
2148 def new(self, rollback, session, indata=None, kwargs=None, headers=None):
2149 """
2150 Creates a new entry into database.
2151 :param rollback: list to append created items at database in case a rollback may to be done
2152 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
2153 :param indata: data to be inserted
2154 :param kwargs: used to override the indata descriptor
2155 :param headers: http request headers
2156 :return: _id, op_id:
2157 _id: identity of the inserted data.
2158 op_id: None
2159 """
2160 self._update_input_with_kwargs(indata, kwargs)
2161 validate_input(indata, self.schema_new)
2162 vnfpkg_id = indata["vnfPkgId"]
2163 filter_q = BaseTopic._get_project_filter(session)
2164 filter_q["_id"] = vnfpkg_id
2165 vnfd = self.db.get_one("vnfds", filter_q)
2166 operation = indata["lcmOperationType"]
2167 kdu_name = indata["kdu_name"]
2168 for kdu in vnfd.get("kdu", []):
2169 if kdu["name"] == kdu_name:
2170 helm_chart = kdu.get("helm-chart")
2171 juju_bundle = kdu.get("juju-bundle")
2172 break
2173 else:
2174 raise EngineException(
2175 "Not found vnfd[id='{}']:kdu[name='{}']".format(vnfpkg_id, kdu_name)
2176 )
2177 if helm_chart:
2178 indata["helm-chart"] = helm_chart
2179 match = fullmatch(r"([^/]*)/([^/]*)", helm_chart)
2180 repo_name = match.group(1) if match else None
2181 elif juju_bundle:
2182 indata["juju-bundle"] = juju_bundle
2183 match = fullmatch(r"([^/]*)/([^/]*)", juju_bundle)
2184 repo_name = match.group(1) if match else None
2185 else:
2186 raise EngineException(
2187 "Found neither 'helm-chart' nor 'juju-bundle' in vnfd[id='{}']:kdu[name='{}']".format(
2188 vnfpkg_id, kdu_name
2189 )
2190 )
2191 if repo_name:
2192 del filter_q["_id"]
2193 filter_q["name"] = repo_name
2194 repo = self.db.get_one("k8srepos", filter_q)
2195 k8srepo_id = repo.get("_id")
2196 k8srepo_url = repo.get("url")
2197 else:
2198 k8srepo_id = None
2199 k8srepo_url = None
2200 indata["k8srepoId"] = k8srepo_id
2201 indata["k8srepo_url"] = k8srepo_url
2202 vnfpkgop_id = str(uuid4())
2203 vnfpkgop_desc = {
2204 "_id": vnfpkgop_id,
2205 "operationState": "PROCESSING",
2206 "vnfPkgId": vnfpkg_id,
2207 "lcmOperationType": operation,
2208 "isAutomaticInvocation": False,
2209 "isCancelPending": False,
2210 "operationParams": indata,
2211 "links": {
2212 "self": "/osm/vnfpkgm/v1/vnfpkg_op_occs/" + vnfpkgop_id,
2213 "vnfpkg": "/osm/vnfpkgm/v1/vnf_packages/" + vnfpkg_id,
2214 },
2215 }
2216 self.format_on_new(
2217 vnfpkgop_desc, session["project_id"], make_public=session["public"]
2218 )
2219 ctime = vnfpkgop_desc["_admin"]["created"]
2220 vnfpkgop_desc["statusEnteredTime"] = ctime
2221 vnfpkgop_desc["startTime"] = ctime
2222 self.db.create(self.topic, vnfpkgop_desc)
2223 rollback.append({"topic": self.topic, "_id": vnfpkgop_id})
2224 self.msg.write(self.topic_msg, operation, vnfpkgop_desc)
2225 return vnfpkgop_id, None
2228class NsConfigTemplateTopic(DescriptorTopic):
2229 topic = "ns_config_template"
2230 topic_msg = "nsd"
2231 schema_new = ns_config_template
2232 instantiation_params = {
2233 "vnf": vnf_schema,
2234 "vld": vld_schema,
2235 "additionalParamsForVnf": additional_params_for_vnf,
2236 }
2238 def __init__(self, db, fs, msg, auth):
2239 super().__init__(db, fs, msg, auth)
2241 def check_conflict_on_del(self, session, _id, db_content):
2242 """
2243 Check that there is not any NSR that uses this NS CONFIG TEMPLATE. Only NSRs belonging to this project are considered.
2244 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
2245 :param _id: ns config template internal id
2246 :param db_content: The database content of the _id
2247 :return: None or raises EngineException with the conflict
2248 """
2249 if session["force"]:
2250 return
2251 descriptor = db_content
2252 descriptor_id = descriptor.get("nsdId")
2253 if not descriptor_id: # empty nsd not uploaded
2254 return
2256 # check NS CONFIG TEMPLATE used by NS
2257 ns_config_template_id = _id
2259 if self.db.get_list(
2260 "nsrs", {"instantiate_params.nsConfigTemplateId": ns_config_template_id}
2261 ):
2262 raise EngineException(
2263 "There is at least one NS instance using this template",
2264 http_code=HTTPStatus.CONFLICT,
2265 )
2267 def check_unique_template_name(self, edit_content, _id, session):
2268 """
2269 Check whether the name of the template is unique or not
2270 """
2272 if edit_content.get("name"):
2273 name = edit_content.get("name")
2274 db_content = self.db.get_one(
2275 "ns_config_template", {"name": name}, fail_on_empty=False
2276 )
2277 if db_content is not None:
2278 if db_content.get("_id") == _id:
2279 if db_content.get("name") == name:
2280 return
2281 elif db_content.get("_id") != _id:
2282 raise EngineException(
2283 "{} of the template already exist".format(name)
2284 )
2285 else:
2286 return
2288 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
2289 """
2290 Check the input data format
2291 And the edit content data too.
2292 """
2293 final_content = super().check_conflict_on_edit(
2294 session, final_content, edit_content, _id
2295 )
2296 db_content_id = self.db.get_one(
2297 "ns_config_template", {"_id": _id}, fail_on_empty=False
2298 )
2299 if not (
2300 db_content_id.get("name")
2301 and db_content_id.get("nsdId")
2302 and db_content_id.get("config")
2303 ):
2304 validate_input(edit_content, self.schema_new)
2306 try:
2307 for key, value in edit_content.items():
2308 if key == "name":
2309 self.check_unique_template_name(edit_content, _id, session)
2310 elif key == "nsdId":
2311 ns_config_template = self.db.get_one(
2312 "ns_config_template", {"_id": _id}, fail_on_empty=False
2313 )
2314 if not ns_config_template.get("nsdId"):
2315 pass
2316 else:
2317 raise EngineException("Nsd id cannot be edited")
2318 elif key == "config":
2319 edit_content_param = edit_content.get("config")
2320 for key, value in edit_content_param.items():
2321 param = key
2322 param_content = value
2323 if param == "vnf":
2324 for content in param_content:
2325 for vdu in content.get("vdu"):
2326 if vdu.get("vim-flavor-name") and vdu.get(
2327 "vim-flavor-id"
2328 ):
2329 raise EngineException(
2330 "Instantiation parameters vim-flavor-name and vim-flavor-id are mutually exclusive"
2331 )
2332 validate_input(param_content, self.instantiation_params[param])
2333 final_content.update({"config": edit_content_param})
2334 return final_content
2335 except Exception as e:
2336 raise EngineException(
2337 "Error in instantiation parameters validation: {}".format(str(e)),
2338 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
2339 )