Fix Bug 2307 - Invalid references of vdu-id, monitoring-param, storage and compute...
[osm/NBI.git] / osm_nbi / descriptor_topics.py
1 # -*- coding: utf-8 -*-
2
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
12 # implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
15
16 import tarfile
17 import yaml
18 import json
19 import copy
20 import os
21 import shutil
22 import functools
23 import re
24
25 # import logging
26 from deepdiff import DeepDiff
27 from hashlib import md5
28 from osm_common.dbbase import DbException, deep_update_rfc7396
29 from http import HTTPStatus
30 from time import time
31 from uuid import uuid4
32 from re import fullmatch
33 from zipfile import ZipFile
34 from urllib.parse import urlparse
35 from osm_nbi.validation import (
36 ValidationError,
37 pdu_new_schema,
38 pdu_edit_schema,
39 validate_input,
40 vnfpkgop_new_schema,
41 )
42 from osm_nbi.base_topic import (
43 BaseTopic,
44 EngineException,
45 get_iterable,
46 detect_descriptor_usage,
47 )
48 from osm_im import etsi_nfv_vnfd, etsi_nfv_nsd
49 from osm_im.nst import nst as nst_im
50 from pyangbind.lib.serialise import pybindJSONDecoder
51 import pyangbind.lib.pybindJSON as pybindJSON
52 from osm_nbi import utils
53
54 __author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
55
56 valid_helm_chart_re = re.compile(
57 r"^[a-z0-9]([-a-z0-9]*[a-z0-9]/)?([a-z0-9]([-a-z0-9]*[a-z0-9])?)*$"
58 )
59
60
61 class DescriptorTopic(BaseTopic):
62 def __init__(self, db, fs, msg, auth):
63 super().__init__(db, fs, msg, auth)
64
65 def _validate_input_new(self, indata, storage_params, force=False):
66 return indata
67
68 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
69 final_content = super().check_conflict_on_edit(
70 session, final_content, edit_content, _id
71 )
72
73 def _check_unique_id_name(descriptor, position=""):
74 for desc_key, desc_item in descriptor.items():
75 if isinstance(desc_item, list) and desc_item:
76 used_ids = []
77 desc_item_id = None
78 for index, list_item in enumerate(desc_item):
79 if isinstance(list_item, dict):
80 _check_unique_id_name(
81 list_item, "{}.{}[{}]".format(position, desc_key, index)
82 )
83 # Base case
84 if index == 0 and (
85 list_item.get("id") or list_item.get("name")
86 ):
87 desc_item_id = "id" if list_item.get("id") else "name"
88 if desc_item_id and list_item.get(desc_item_id):
89 if list_item[desc_item_id] in used_ids:
90 position = "{}.{}[{}]".format(
91 position, desc_key, index
92 )
93 raise EngineException(
94 "Error: identifier {} '{}' is not unique and repeats at '{}'".format(
95 desc_item_id,
96 list_item[desc_item_id],
97 position,
98 ),
99 HTTPStatus.UNPROCESSABLE_ENTITY,
100 )
101 used_ids.append(list_item[desc_item_id])
102
103 _check_unique_id_name(final_content)
104 # 1. validate again with pyangbind
105 # 1.1. remove internal keys
106 internal_keys = {}
107 for k in ("_id", "_admin"):
108 if k in final_content:
109 internal_keys[k] = final_content.pop(k)
110 storage_params = internal_keys["_admin"].get("storage")
111 serialized = self._validate_input_new(
112 final_content, storage_params, session["force"]
113 )
114
115 # 1.2. modify final_content with a serialized version
116 final_content = copy.deepcopy(serialized)
117 # 1.3. restore internal keys
118 for k, v in internal_keys.items():
119 final_content[k] = v
120 if session["force"]:
121 return final_content
122
123 # 2. check that this id is not present
124 if "id" in edit_content:
125 _filter = self._get_project_filter(session)
126
127 _filter["id"] = final_content["id"]
128 _filter["_id.neq"] = _id
129
130 if self.db.get_one(self.topic, _filter, fail_on_empty=False):
131 raise EngineException(
132 "{} with id '{}' already exists for this project".format(
133 (str(self.topic))[:-1], final_content["id"]
134 ),
135 HTTPStatus.CONFLICT,
136 )
137
138 return final_content
139
140 @staticmethod
141 def format_on_new(content, project_id=None, make_public=False):
142 BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
143 content["_admin"]["onboardingState"] = "CREATED"
144 content["_admin"]["operationalState"] = "DISABLED"
145 content["_admin"]["usageState"] = "NOT_IN_USE"
146
147 def delete_extra(self, session, _id, db_content, not_send_msg=None):
148 """
149 Deletes file system storage associated with the descriptor
150 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
151 :param _id: server internal id
152 :param db_content: The database content of the descriptor
153 :param not_send_msg: To not send message (False) or store content (list) instead
154 :return: None if ok or raises EngineException with the problem
155 """
156 self.fs.file_delete(_id, ignore_non_exist=True)
157 self.fs.file_delete(_id + "_", ignore_non_exist=True) # remove temp folder
158 # Remove file revisions
159 if "revision" in db_content["_admin"]:
160 revision = db_content["_admin"]["revision"]
161 while revision > 0:
162 self.fs.file_delete(_id + ":" + str(revision), ignore_non_exist=True)
163 revision = revision - 1
164
165 @staticmethod
166 def get_one_by_id(db, session, topic, id):
167 # find owned by this project
168 _filter = BaseTopic._get_project_filter(session)
169 _filter["id"] = id
170 desc_list = db.get_list(topic, _filter)
171 if len(desc_list) == 1:
172 return desc_list[0]
173 elif len(desc_list) > 1:
174 raise DbException(
175 "Found more than one {} with id='{}' belonging to this project".format(
176 topic[:-1], id
177 ),
178 HTTPStatus.CONFLICT,
179 )
180
181 # not found any: try to find public
182 _filter = BaseTopic._get_project_filter(session)
183 _filter["id"] = id
184 desc_list = db.get_list(topic, _filter)
185 if not desc_list:
186 raise DbException(
187 "Not found any {} with id='{}'".format(topic[:-1], id),
188 HTTPStatus.NOT_FOUND,
189 )
190 elif len(desc_list) == 1:
191 return desc_list[0]
192 else:
193 raise DbException(
194 "Found more than one public {} with id='{}'; and no one belonging to this project".format(
195 topic[:-1], id
196 ),
197 HTTPStatus.CONFLICT,
198 )
199
200 def new(self, rollback, session, indata=None, kwargs=None, headers=None):
201 """
202 Creates a new almost empty DISABLED entry into database. Due to SOL005, it does not follow normal procedure.
203 Creating a VNFD or NSD is done in two steps: 1. Creates an empty descriptor (this step) and 2) upload content
204 (self.upload_content)
205 :param rollback: list to append created items at database in case a rollback may to be done
206 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
207 :param indata: data to be inserted
208 :param kwargs: used to override the indata descriptor
209 :param headers: http request headers
210 :return: _id, None: identity of the inserted data; and None as there is not any operation
211 """
212
213 # No needed to capture exceptions
214 # Check Quota
215 self.check_quota(session)
216
217 # _remove_envelop
218 if indata:
219 if "userDefinedData" in indata:
220 indata = indata["userDefinedData"]
221
222 # Override descriptor with query string kwargs
223 self._update_input_with_kwargs(indata, kwargs)
224 # uncomment when this method is implemented.
225 # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors
226 # indata = DescriptorTopic._validate_input_new(self, indata, project_id=session["force"])
227
228 content = {"_admin": {"userDefinedData": indata, "revision": 0}}
229
230 self.format_on_new(
231 content, session["project_id"], make_public=session["public"]
232 )
233 _id = self.db.create(self.topic, content)
234 rollback.append({"topic": self.topic, "_id": _id})
235 self._send_msg("created", {"_id": _id})
236 return _id, None
237
238 def upload_content(self, session, _id, indata, kwargs, headers):
239 """
240 Used for receiving content by chunks (with a transaction_id header and/or gzip file. It will store and extract)
241 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
242 :param _id : the nsd,vnfd is already created, this is the id
243 :param indata: http body request
244 :param kwargs: user query string to override parameters. NOT USED
245 :param headers: http request headers
246 :return: True if package is completely uploaded or False if partial content has been uploded
247 Raise exception on error
248 """
249 # Check that _id exists and it is valid
250 current_desc = self.show(session, _id)
251
252 content_range_text = headers.get("Content-Range")
253 expected_md5 = headers.get("Content-File-MD5")
254 compressed = None
255 content_type = headers.get("Content-Type")
256 if (
257 content_type
258 and "application/gzip" in content_type
259 or "application/x-gzip" in content_type
260 ):
261 compressed = "gzip"
262 if content_type and "application/zip" in content_type:
263 compressed = "zip"
264 filename = headers.get("Content-Filename")
265 if not filename and compressed:
266 filename = "package.tar.gz" if compressed == "gzip" else "package.zip"
267 elif not filename:
268 filename = "package"
269
270 revision = 1
271 if "revision" in current_desc["_admin"]:
272 revision = current_desc["_admin"]["revision"] + 1
273
274 # TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266
275 file_pkg = None
276 error_text = ""
277 fs_rollback = []
278
279 try:
280 if content_range_text:
281 content_range = (
282 content_range_text.replace("-", " ").replace("/", " ").split()
283 )
284 if (
285 content_range[0] != "bytes"
286 ): # TODO check x<y not negative < total....
287 raise IndexError()
288 start = int(content_range[1])
289 end = int(content_range[2]) + 1
290 total = int(content_range[3])
291 else:
292 start = 0
293 # Rather than using a temp folder, we will store the package in a folder based on
294 # the current revision.
295 proposed_revision_path = (
296 _id + ":" + str(revision)
297 ) # all the content is upload here and if ok, it is rename from id_ to is folder
298
299 if start:
300 if not self.fs.file_exists(proposed_revision_path, "dir"):
301 raise EngineException(
302 "invalid Transaction-Id header", HTTPStatus.NOT_FOUND
303 )
304 else:
305 self.fs.file_delete(proposed_revision_path, ignore_non_exist=True)
306 self.fs.mkdir(proposed_revision_path)
307 fs_rollback.append(proposed_revision_path)
308
309 storage = self.fs.get_params()
310 storage["folder"] = proposed_revision_path
311
312 file_path = (proposed_revision_path, filename)
313 if self.fs.file_exists(file_path, "file"):
314 file_size = self.fs.file_size(file_path)
315 else:
316 file_size = 0
317 if file_size != start:
318 raise EngineException(
319 "invalid Content-Range start sequence, expected '{}' but received '{}'".format(
320 file_size, start
321 ),
322 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
323 )
324 file_pkg = self.fs.file_open(file_path, "a+b")
325 if isinstance(indata, dict):
326 indata_text = yaml.safe_dump(indata, indent=4, default_flow_style=False)
327 file_pkg.write(indata_text.encode(encoding="utf-8"))
328 else:
329 indata_len = 0
330 while True:
331 indata_text = indata.read(4096)
332 indata_len += len(indata_text)
333 if not indata_text:
334 break
335 file_pkg.write(indata_text)
336 if content_range_text:
337 if indata_len != end - start:
338 raise EngineException(
339 "Mismatch between Content-Range header {}-{} and body length of {}".format(
340 start, end - 1, indata_len
341 ),
342 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
343 )
344 if end != total:
345 # TODO update to UPLOADING
346 return False
347
348 # PACKAGE UPLOADED
349 if expected_md5:
350 file_pkg.seek(0, 0)
351 file_md5 = md5()
352 chunk_data = file_pkg.read(1024)
353 while chunk_data:
354 file_md5.update(chunk_data)
355 chunk_data = file_pkg.read(1024)
356 if expected_md5 != file_md5.hexdigest():
357 raise EngineException("Error, MD5 mismatch", HTTPStatus.CONFLICT)
358 file_pkg.seek(0, 0)
359 if compressed == "gzip":
360 tar = tarfile.open(mode="r", fileobj=file_pkg)
361 descriptor_file_name = None
362 for tarinfo in tar:
363 tarname = tarinfo.name
364 tarname_path = tarname.split("/")
365 if (
366 not tarname_path[0] or ".." in tarname_path
367 ): # if start with "/" means absolute path
368 raise EngineException(
369 "Absolute path or '..' are not allowed for package descriptor tar.gz"
370 )
371 if len(tarname_path) == 1 and not tarinfo.isdir():
372 raise EngineException(
373 "All files must be inside a dir for package descriptor tar.gz"
374 )
375 if (
376 tarname.endswith(".yaml")
377 or tarname.endswith(".json")
378 or tarname.endswith(".yml")
379 ):
380 storage["pkg-dir"] = tarname_path[0]
381 if len(tarname_path) == 2:
382 if descriptor_file_name:
383 raise EngineException(
384 "Found more than one descriptor file at package descriptor tar.gz"
385 )
386 descriptor_file_name = tarname
387 if not descriptor_file_name:
388 raise EngineException(
389 "Not found any descriptor file at package descriptor tar.gz"
390 )
391 storage["descriptor"] = descriptor_file_name
392 storage["zipfile"] = filename
393 self.fs.file_extract(tar, proposed_revision_path)
394 with self.fs.file_open(
395 (proposed_revision_path, descriptor_file_name), "r"
396 ) as descriptor_file:
397 content = descriptor_file.read()
398 elif compressed == "zip":
399 zipfile = ZipFile(file_pkg)
400 descriptor_file_name = None
401 for package_file in zipfile.infolist():
402 zipfilename = package_file.filename
403 file_path = zipfilename.split("/")
404 if (
405 not file_path[0] or ".." in zipfilename
406 ): # if start with "/" means absolute path
407 raise EngineException(
408 "Absolute path or '..' are not allowed for package descriptor zip"
409 )
410
411 if (
412 zipfilename.endswith(".yaml")
413 or zipfilename.endswith(".json")
414 or zipfilename.endswith(".yml")
415 ) and (
416 zipfilename.find("/") < 0
417 or zipfilename.find("Definitions") >= 0
418 ):
419 storage["pkg-dir"] = ""
420 if descriptor_file_name:
421 raise EngineException(
422 "Found more than one descriptor file at package descriptor zip"
423 )
424 descriptor_file_name = zipfilename
425 if not descriptor_file_name:
426 raise EngineException(
427 "Not found any descriptor file at package descriptor zip"
428 )
429 storage["descriptor"] = descriptor_file_name
430 storage["zipfile"] = filename
431 self.fs.file_extract(zipfile, proposed_revision_path)
432
433 with self.fs.file_open(
434 (proposed_revision_path, descriptor_file_name), "r"
435 ) as descriptor_file:
436 content = descriptor_file.read()
437 else:
438 content = file_pkg.read()
439 storage["descriptor"] = descriptor_file_name = filename
440
441 if descriptor_file_name.endswith(".json"):
442 error_text = "Invalid json format "
443 indata = json.load(content)
444 else:
445 error_text = "Invalid yaml format "
446 indata = yaml.safe_load(content)
447
448 # Need to close the file package here so it can be copied from the
449 # revision to the current, unrevisioned record
450 if file_pkg:
451 file_pkg.close()
452 file_pkg = None
453
454 # Fetch both the incoming, proposed revision and the original revision so we
455 # can call a validate method to compare them
456 current_revision_path = _id + "/"
457 self.fs.sync(from_path=current_revision_path)
458 self.fs.sync(from_path=proposed_revision_path)
459
460 if revision > 1:
461 try:
462 self._validate_descriptor_changes(
463 _id,
464 descriptor_file_name,
465 current_revision_path,
466 proposed_revision_path,
467 )
468 except Exception as e:
469 shutil.rmtree(
470 self.fs.path + current_revision_path, ignore_errors=True
471 )
472 shutil.rmtree(
473 self.fs.path + proposed_revision_path, ignore_errors=True
474 )
475 # Only delete the new revision. We need to keep the original version in place
476 # as it has not been changed.
477 self.fs.file_delete(proposed_revision_path, ignore_non_exist=True)
478 raise e
479
480 indata = self._remove_envelop(indata)
481
482 # Override descriptor with query string kwargs
483 if kwargs:
484 self._update_input_with_kwargs(indata, kwargs)
485
486 current_desc["_admin"]["storage"] = storage
487 current_desc["_admin"]["onboardingState"] = "ONBOARDED"
488 current_desc["_admin"]["operationalState"] = "ENABLED"
489 current_desc["_admin"]["modified"] = time()
490 current_desc["_admin"]["revision"] = revision
491
492 deep_update_rfc7396(current_desc, indata)
493 current_desc = self.check_conflict_on_edit(
494 session, current_desc, indata, _id=_id
495 )
496
497 # Copy the revision to the active package name by its original id
498 shutil.rmtree(self.fs.path + current_revision_path, ignore_errors=True)
499 os.rename(
500 self.fs.path + proposed_revision_path,
501 self.fs.path + current_revision_path,
502 )
503 self.fs.file_delete(current_revision_path, ignore_non_exist=True)
504 self.fs.mkdir(current_revision_path)
505 self.fs.reverse_sync(from_path=current_revision_path)
506
507 shutil.rmtree(self.fs.path + _id)
508
509 self.db.replace(self.topic, _id, current_desc)
510
511 # Store a copy of the package as a point in time revision
512 revision_desc = dict(current_desc)
513 revision_desc["_id"] = _id + ":" + str(revision_desc["_admin"]["revision"])
514 self.db.create(self.topic + "_revisions", revision_desc)
515 fs_rollback = []
516
517 indata["_id"] = _id
518 self._send_msg("edited", indata)
519
520 # TODO if descriptor has changed because kwargs update content and remove cached zip
521 # TODO if zip is not present creates one
522 return True
523
524 except EngineException:
525 raise
526 except IndexError:
527 raise EngineException(
528 "invalid Content-Range header format. Expected 'bytes start-end/total'",
529 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
530 )
531 except IOError as e:
532 raise EngineException(
533 "invalid upload transaction sequence: '{}'".format(e),
534 HTTPStatus.BAD_REQUEST,
535 )
536 except tarfile.ReadError as e:
537 raise EngineException(
538 "invalid file content {}".format(e), HTTPStatus.BAD_REQUEST
539 )
540 except (ValueError, yaml.YAMLError) as e:
541 raise EngineException(error_text + str(e))
542 except ValidationError as e:
543 raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
544 finally:
545 if file_pkg:
546 file_pkg.close()
547 for file in fs_rollback:
548 self.fs.file_delete(file, ignore_non_exist=True)
549
550 def get_file(self, session, _id, path=None, accept_header=None):
551 """
552 Return the file content of a vnfd or nsd
553 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
554 :param _id: Identity of the vnfd, nsd
555 :param path: artifact path or "$DESCRIPTOR" or None
556 :param accept_header: Content of Accept header. Must contain applition/zip or/and text/plain
557 :return: opened file plus Accept format or raises an exception
558 """
559 accept_text = accept_zip = False
560 if accept_header:
561 if "text/plain" in accept_header or "*/*" in accept_header:
562 accept_text = True
563 if "application/zip" in accept_header or "*/*" in accept_header:
564 accept_zip = "application/zip"
565 elif "application/gzip" in accept_header:
566 accept_zip = "application/gzip"
567
568 if not accept_text and not accept_zip:
569 raise EngineException(
570 "provide request header 'Accept' with 'application/zip' or 'text/plain'",
571 http_code=HTTPStatus.NOT_ACCEPTABLE,
572 )
573
574 content = self.show(session, _id)
575 if content["_admin"]["onboardingState"] != "ONBOARDED":
576 raise EngineException(
577 "Cannot get content because this resource is not at 'ONBOARDED' state. "
578 "onboardingState is {}".format(content["_admin"]["onboardingState"]),
579 http_code=HTTPStatus.CONFLICT,
580 )
581 storage = content["_admin"]["storage"]
582 if path is not None and path != "$DESCRIPTOR": # artifacts
583 if not storage.get("pkg-dir") and not storage.get("folder"):
584 raise EngineException(
585 "Packages does not contains artifacts",
586 http_code=HTTPStatus.BAD_REQUEST,
587 )
588 if self.fs.file_exists(
589 (storage["folder"], storage["pkg-dir"], *path), "dir"
590 ):
591 folder_content = self.fs.dir_ls(
592 (storage["folder"], storage["pkg-dir"], *path)
593 )
594 return folder_content, "text/plain"
595 # TODO manage folders in http
596 else:
597 return (
598 self.fs.file_open(
599 (storage["folder"], storage["pkg-dir"], *path), "rb"
600 ),
601 "application/octet-stream",
602 )
603
604 # pkgtype accept ZIP TEXT -> result
605 # manyfiles yes X -> zip
606 # no yes -> error
607 # onefile yes no -> zip
608 # X yes -> text
609 contain_many_files = False
610 if storage.get("pkg-dir"):
611 # check if there are more than one file in the package, ignoring checksums.txt.
612 pkg_files = self.fs.dir_ls((storage["folder"], storage["pkg-dir"]))
613 if len(pkg_files) >= 3 or (
614 len(pkg_files) == 2 and "checksums.txt" not in pkg_files
615 ):
616 contain_many_files = True
617 if accept_text and (not contain_many_files or path == "$DESCRIPTOR"):
618 return (
619 self.fs.file_open((storage["folder"], storage["descriptor"]), "r"),
620 "text/plain",
621 )
622 elif contain_many_files and not accept_zip:
623 raise EngineException(
624 "Packages that contains several files need to be retrieved with 'application/zip'"
625 "Accept header",
626 http_code=HTTPStatus.NOT_ACCEPTABLE,
627 )
628 else:
629 if not storage.get("zipfile"):
630 # TODO generate zipfile if not present
631 raise EngineException(
632 "Only allowed 'text/plain' Accept header for this descriptor. To be solved in "
633 "future versions",
634 http_code=HTTPStatus.NOT_ACCEPTABLE,
635 )
636 return (
637 self.fs.file_open((storage["folder"], storage["zipfile"]), "rb"),
638 accept_zip,
639 )
640
641 def _remove_yang_prefixes_from_descriptor(self, descriptor):
642 new_descriptor = {}
643 for k, v in descriptor.items():
644 new_v = v
645 if isinstance(v, dict):
646 new_v = self._remove_yang_prefixes_from_descriptor(v)
647 elif isinstance(v, list):
648 new_v = list()
649 for x in v:
650 if isinstance(x, dict):
651 new_v.append(self._remove_yang_prefixes_from_descriptor(x))
652 else:
653 new_v.append(x)
654 new_descriptor[k.split(":")[-1]] = new_v
655 return new_descriptor
656
657 def pyangbind_validation(self, item, data, force=False):
658 raise EngineException(
659 "Not possible to validate '{}' item".format(item),
660 http_code=HTTPStatus.INTERNAL_SERVER_ERROR,
661 )
662
663 def _validate_input_edit(self, indata, content, force=False):
664 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
665 if "_id" in indata:
666 indata.pop("_id")
667 if "_admin" not in indata:
668 indata["_admin"] = {}
669
670 if "operationalState" in indata:
671 if indata["operationalState"] in ("ENABLED", "DISABLED"):
672 indata["_admin"]["operationalState"] = indata.pop("operationalState")
673 else:
674 raise EngineException(
675 "State '{}' is not a valid operational state".format(
676 indata["operationalState"]
677 ),
678 http_code=HTTPStatus.BAD_REQUEST,
679 )
680
681 # In the case of user defined data, we need to put the data in the root of the object
682 # to preserve current expected behaviour
683 if "userDefinedData" in indata:
684 data = indata.pop("userDefinedData")
685 if isinstance(data, dict):
686 indata["_admin"]["userDefinedData"] = data
687 else:
688 raise EngineException(
689 "userDefinedData should be an object, but is '{}' instead".format(
690 type(data)
691 ),
692 http_code=HTTPStatus.BAD_REQUEST,
693 )
694
695 if (
696 "operationalState" in indata["_admin"]
697 and content["_admin"]["operationalState"]
698 == indata["_admin"]["operationalState"]
699 ):
700 raise EngineException(
701 "operationalState already {}".format(
702 content["_admin"]["operationalState"]
703 ),
704 http_code=HTTPStatus.CONFLICT,
705 )
706
707 return indata
708
709 def _validate_descriptor_changes(
710 self,
711 descriptor_id,
712 descriptor_file_name,
713 old_descriptor_directory,
714 new_descriptor_directory,
715 ):
716 # Example:
717 # raise EngineException(
718 # "Error in validating new descriptor: <NODE> cannot be modified",
719 # http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
720 # )
721 pass
722
723
724 class VnfdTopic(DescriptorTopic):
725 topic = "vnfds"
726 topic_msg = "vnfd"
727
728 def __init__(self, db, fs, msg, auth):
729 DescriptorTopic.__init__(self, db, fs, msg, auth)
730
731 def pyangbind_validation(self, item, data, force=False):
732 if self._descriptor_data_is_in_old_format(data):
733 raise EngineException(
734 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
735 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
736 )
737 try:
738 myvnfd = etsi_nfv_vnfd.etsi_nfv_vnfd()
739 pybindJSONDecoder.load_ietf_json(
740 {"etsi-nfv-vnfd:vnfd": data},
741 None,
742 None,
743 obj=myvnfd,
744 path_helper=True,
745 skip_unknown=force,
746 )
747 out = pybindJSON.dumps(myvnfd, mode="ietf")
748 desc_out = self._remove_envelop(yaml.safe_load(out))
749 desc_out = self._remove_yang_prefixes_from_descriptor(desc_out)
750 return utils.deep_update_dict(data, desc_out)
751 except Exception as e:
752 raise EngineException(
753 "Error in pyangbind validation: {}".format(str(e)),
754 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
755 )
756
757 @staticmethod
758 def _descriptor_data_is_in_old_format(data):
759 return ("vnfd-catalog" in data) or ("vnfd:vnfd-catalog" in data)
760
761 @staticmethod
762 def _remove_envelop(indata=None):
763 if not indata:
764 return {}
765 clean_indata = indata
766
767 if clean_indata.get("etsi-nfv-vnfd:vnfd"):
768 if not isinstance(clean_indata["etsi-nfv-vnfd:vnfd"], dict):
769 raise EngineException("'etsi-nfv-vnfd:vnfd' must be a dict")
770 clean_indata = clean_indata["etsi-nfv-vnfd:vnfd"]
771 elif clean_indata.get("vnfd"):
772 if not isinstance(clean_indata["vnfd"], dict):
773 raise EngineException("'vnfd' must be dict")
774 clean_indata = clean_indata["vnfd"]
775
776 return clean_indata
777
778 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
779 final_content = super().check_conflict_on_edit(
780 session, final_content, edit_content, _id
781 )
782
783 # set type of vnfd
784 contains_pdu = False
785 contains_vdu = False
786 for vdu in get_iterable(final_content.get("vdu")):
787 if vdu.get("pdu-type"):
788 contains_pdu = True
789 else:
790 contains_vdu = True
791 if contains_pdu:
792 final_content["_admin"]["type"] = "hnfd" if contains_vdu else "pnfd"
793 elif contains_vdu:
794 final_content["_admin"]["type"] = "vnfd"
795 # if neither vud nor pdu do not fill type
796 return final_content
797
798 def check_conflict_on_del(self, session, _id, db_content):
799 """
800 Check that there is not any NSD that uses this VNFD. Only NSDs belonging to this project are considered. Note
801 that VNFD can be public and be used by NSD of other projects. Also check there are not deployments, or vnfr
802 that uses this vnfd
803 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
804 :param _id: vnfd internal id
805 :param db_content: The database content of the _id.
806 :return: None or raises EngineException with the conflict
807 """
808 if session["force"]:
809 return
810 descriptor = db_content
811 descriptor_id = descriptor.get("id")
812 if not descriptor_id: # empty vnfd not uploaded
813 return
814
815 _filter = self._get_project_filter(session)
816
817 # check vnfrs using this vnfd
818 _filter["vnfd-id"] = _id
819 if self.db.get_list("vnfrs", _filter):
820 raise EngineException(
821 "There is at least one VNF instance using this descriptor",
822 http_code=HTTPStatus.CONFLICT,
823 )
824
825 # check NSD referencing this VNFD
826 del _filter["vnfd-id"]
827 _filter["vnfd-id"] = descriptor_id
828 if self.db.get_list("nsds", _filter):
829 raise EngineException(
830 "There is at least one NS package referencing this descriptor",
831 http_code=HTTPStatus.CONFLICT,
832 )
833
834 def _validate_input_new(self, indata, storage_params, force=False):
835 indata.pop("onboardingState", None)
836 indata.pop("operationalState", None)
837 indata.pop("usageState", None)
838 indata.pop("links", None)
839
840 indata = self.pyangbind_validation("vnfds", indata, force)
841 # Cross references validation in the descriptor
842
843 self.validate_mgmt_interface_connection_point(indata)
844
845 for vdu in get_iterable(indata.get("vdu")):
846 self.validate_vdu_internal_connection_points(vdu)
847 self._validate_vdu_cloud_init_in_package(storage_params, vdu, indata)
848 self._validate_vdu_charms_in_package(storage_params, indata)
849
850 self._validate_vnf_charms_in_package(storage_params, indata)
851
852 self.validate_external_connection_points(indata)
853 self.validate_internal_virtual_links(indata)
854 self.validate_monitoring_params(indata)
855 self.validate_scaling_group_descriptor(indata)
856 self.validate_healing_group_descriptor(indata)
857 self.validate_alarm_group_descriptor(indata)
858 self.validate_storage_compute_descriptor(indata)
859 self.validate_helm_chart(indata)
860
861 return indata
862
863 @staticmethod
864 def validate_helm_chart(indata):
865 def is_url(url):
866 result = urlparse(url)
867 return all([result.scheme, result.netloc])
868
869 kdus = indata.get("kdu", [])
870 for kdu in kdus:
871 helm_chart_value = kdu.get("helm-chart")
872 if not helm_chart_value:
873 continue
874 if not (
875 valid_helm_chart_re.match(helm_chart_value) or is_url(helm_chart_value)
876 ):
877 raise EngineException(
878 "helm-chart '{}' is not valid".format(helm_chart_value),
879 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
880 )
881
882 @staticmethod
883 def validate_mgmt_interface_connection_point(indata):
884 if not indata.get("vdu"):
885 return
886 if not indata.get("mgmt-cp"):
887 raise EngineException(
888 "'mgmt-cp' is a mandatory field and it is not defined",
889 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
890 )
891
892 for cp in get_iterable(indata.get("ext-cpd")):
893 if cp["id"] == indata["mgmt-cp"]:
894 break
895 else:
896 raise EngineException(
897 "mgmt-cp='{}' must match an existing ext-cpd".format(indata["mgmt-cp"]),
898 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
899 )
900
901 @staticmethod
902 def validate_vdu_internal_connection_points(vdu):
903 int_cpds = set()
904 for cpd in get_iterable(vdu.get("int-cpd")):
905 cpd_id = cpd.get("id")
906 if cpd_id and cpd_id in int_cpds:
907 raise EngineException(
908 "vdu[id='{}']:int-cpd[id='{}'] is already used by other int-cpd".format(
909 vdu["id"], cpd_id
910 ),
911 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
912 )
913 int_cpds.add(cpd_id)
914
915 @staticmethod
916 def validate_external_connection_points(indata):
917 all_vdus_int_cpds = set()
918 for vdu in get_iterable(indata.get("vdu")):
919 for int_cpd in get_iterable(vdu.get("int-cpd")):
920 all_vdus_int_cpds.add((vdu.get("id"), int_cpd.get("id")))
921
922 ext_cpds = set()
923 for cpd in get_iterable(indata.get("ext-cpd")):
924 cpd_id = cpd.get("id")
925 if cpd_id and cpd_id in ext_cpds:
926 raise EngineException(
927 "ext-cpd[id='{}'] is already used by other ext-cpd".format(cpd_id),
928 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
929 )
930 ext_cpds.add(cpd_id)
931
932 int_cpd = cpd.get("int-cpd")
933 if int_cpd:
934 if (int_cpd.get("vdu-id"), int_cpd.get("cpd")) not in all_vdus_int_cpds:
935 raise EngineException(
936 "ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd".format(
937 cpd_id
938 ),
939 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
940 )
941 # TODO: Validate k8s-cluster-net points to a valid k8s-cluster:nets ?
942
943 def _validate_vdu_charms_in_package(self, storage_params, indata):
944 for df in indata["df"]:
945 if (
946 "lcm-operations-configuration" in df
947 and "operate-vnf-op-config" in df["lcm-operations-configuration"]
948 ):
949 configs = df["lcm-operations-configuration"][
950 "operate-vnf-op-config"
951 ].get("day1-2", [])
952 vdus = df.get("vdu-profile", [])
953 for vdu in vdus:
954 for config in configs:
955 if config["id"] == vdu["id"] and utils.find_in_list(
956 config.get("execution-environment-list", []),
957 lambda ee: "juju" in ee,
958 ):
959 if not self._validate_package_folders(
960 storage_params, "charms"
961 ) and not self._validate_package_folders(
962 storage_params, "Scripts/charms"
963 ):
964 raise EngineException(
965 "Charm defined in vnf[id={}] but not present in "
966 "package".format(indata["id"])
967 )
968
969 def _validate_vdu_cloud_init_in_package(self, storage_params, vdu, indata):
970 if not vdu.get("cloud-init-file"):
971 return
972 if not self._validate_package_folders(
973 storage_params, "cloud_init", vdu["cloud-init-file"]
974 ) and not self._validate_package_folders(
975 storage_params, "Scripts/cloud_init", vdu["cloud-init-file"]
976 ):
977 raise EngineException(
978 "Cloud-init defined in vnf[id={}]:vdu[id={}] but not present in "
979 "package".format(indata["id"], vdu["id"])
980 )
981
982 def _validate_vnf_charms_in_package(self, storage_params, indata):
983 # Get VNF configuration through new container
984 for deployment_flavor in indata.get("df", []):
985 if "lcm-operations-configuration" not in deployment_flavor:
986 return
987 if (
988 "operate-vnf-op-config"
989 not in deployment_flavor["lcm-operations-configuration"]
990 ):
991 return
992 for day_1_2_config in deployment_flavor["lcm-operations-configuration"][
993 "operate-vnf-op-config"
994 ]["day1-2"]:
995 if day_1_2_config["id"] == indata["id"]:
996 if utils.find_in_list(
997 day_1_2_config.get("execution-environment-list", []),
998 lambda ee: "juju" in ee,
999 ):
1000 if not self._validate_package_folders(
1001 storage_params, "charms"
1002 ) and not self._validate_package_folders(
1003 storage_params, "Scripts/charms"
1004 ):
1005 raise EngineException(
1006 "Charm defined in vnf[id={}] but not present in "
1007 "package".format(indata["id"])
1008 )
1009
1010 def _validate_package_folders(self, storage_params, folder, file=None):
1011 if not storage_params:
1012 return False
1013 elif not storage_params.get("pkg-dir"):
1014 if self.fs.file_exists("{}_".format(storage_params["folder"]), "dir"):
1015 f = "{}_/{}".format(storage_params["folder"], folder)
1016 else:
1017 f = "{}/{}".format(storage_params["folder"], folder)
1018 if file:
1019 return self.fs.file_exists("{}/{}".format(f, file), "file")
1020 else:
1021 if self.fs.file_exists(f, "dir"):
1022 if self.fs.dir_ls(f):
1023 return True
1024 return False
1025 else:
1026 if self.fs.file_exists("{}_".format(storage_params["folder"]), "dir"):
1027 f = "{}_/{}/{}".format(
1028 storage_params["folder"], storage_params["pkg-dir"], folder
1029 )
1030 else:
1031 f = "{}/{}/{}".format(
1032 storage_params["folder"], storage_params["pkg-dir"], folder
1033 )
1034 if file:
1035 return self.fs.file_exists("{}/{}".format(f, file), "file")
1036 else:
1037 if self.fs.file_exists(f, "dir"):
1038 if self.fs.dir_ls(f):
1039 return True
1040 return False
1041
1042 @staticmethod
1043 def validate_internal_virtual_links(indata):
1044 all_ivld_ids = set()
1045 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
1046 ivld_id = ivld.get("id")
1047 if ivld_id and ivld_id in all_ivld_ids:
1048 raise EngineException(
1049 "Duplicated VLD id in int-virtual-link-desc[id={}]".format(ivld_id),
1050 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1051 )
1052 else:
1053 all_ivld_ids.add(ivld_id)
1054
1055 for vdu in get_iterable(indata.get("vdu")):
1056 for int_cpd in get_iterable(vdu.get("int-cpd")):
1057 int_cpd_ivld_id = int_cpd.get("int-virtual-link-desc")
1058 if int_cpd_ivld_id and int_cpd_ivld_id not in all_ivld_ids:
1059 raise EngineException(
1060 "vdu[id='{}']:int-cpd[id='{}']:int-virtual-link-desc='{}' must match an existing "
1061 "int-virtual-link-desc".format(
1062 vdu["id"], int_cpd["id"], int_cpd_ivld_id
1063 ),
1064 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1065 )
1066
1067 for df in get_iterable(indata.get("df")):
1068 for vlp in get_iterable(df.get("virtual-link-profile")):
1069 vlp_ivld_id = vlp.get("id")
1070 if vlp_ivld_id and vlp_ivld_id not in all_ivld_ids:
1071 raise EngineException(
1072 "df[id='{}']:virtual-link-profile='{}' must match an existing "
1073 "int-virtual-link-desc".format(df["id"], vlp_ivld_id),
1074 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1075 )
1076
1077 @staticmethod
1078 def validate_monitoring_params(indata):
1079 all_monitoring_params = set()
1080 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
1081 for mp in get_iterable(ivld.get("monitoring-parameters")):
1082 mp_id = mp.get("id")
1083 if mp_id and mp_id in all_monitoring_params:
1084 raise EngineException(
1085 "Duplicated monitoring-parameter id in "
1086 "int-virtual-link-desc[id='{}']:monitoring-parameters[id='{}']".format(
1087 ivld["id"], mp_id
1088 ),
1089 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1090 )
1091 else:
1092 all_monitoring_params.add(mp_id)
1093
1094 for vdu in get_iterable(indata.get("vdu")):
1095 for mp in get_iterable(vdu.get("monitoring-parameter")):
1096 mp_id = mp.get("id")
1097 if mp_id and mp_id in all_monitoring_params:
1098 raise EngineException(
1099 "Duplicated monitoring-parameter id in "
1100 "vdu[id='{}']:monitoring-parameter[id='{}']".format(
1101 vdu["id"], mp_id
1102 ),
1103 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1104 )
1105 else:
1106 all_monitoring_params.add(mp_id)
1107
1108 for df in get_iterable(indata.get("df")):
1109 for mp in get_iterable(df.get("monitoring-parameter")):
1110 mp_id = mp.get("id")
1111 if mp_id and mp_id in all_monitoring_params:
1112 raise EngineException(
1113 "Duplicated monitoring-parameter id in "
1114 "df[id='{}']:monitoring-parameter[id='{}']".format(
1115 df["id"], mp_id
1116 ),
1117 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1118 )
1119 else:
1120 all_monitoring_params.add(mp_id)
1121
1122 @staticmethod
1123 def validate_scaling_group_descriptor(indata):
1124 all_monitoring_params = set()
1125 all_vdu_ids = set()
1126 for df in get_iterable(indata.get("df")):
1127 for il in get_iterable(df.get("instantiation-level")):
1128 for vl in get_iterable(il.get("vdu-level")):
1129 all_vdu_ids.add(vl.get("vdu-id"))
1130
1131 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
1132 for mp in get_iterable(ivld.get("monitoring-parameters")):
1133 all_monitoring_params.add(mp.get("id"))
1134
1135 for vdu in get_iterable(indata.get("vdu")):
1136 for mp in get_iterable(vdu.get("monitoring-parameter")):
1137 all_monitoring_params.add(mp.get("id"))
1138
1139 for df in get_iterable(indata.get("df")):
1140 for mp in get_iterable(df.get("monitoring-parameter")):
1141 all_monitoring_params.add(mp.get("id"))
1142
1143 for df in get_iterable(indata.get("df")):
1144 for sa in get_iterable(df.get("scaling-aspect")):
1145 for deltas in get_iterable(
1146 sa.get("aspect-delta-details").get("deltas")
1147 ):
1148 for vds in get_iterable(deltas.get("vdu-delta")):
1149 sa_vdu_id = vds.get("id")
1150 if sa_vdu_id and sa_vdu_id not in all_vdu_ids:
1151 raise EngineException(
1152 "df[id='{}']:scaling-aspect[id='{}']:aspect-delta-details"
1153 "[delta='{}']: "
1154 "vdu-id='{}' not defined in vdu".format(
1155 df["id"],
1156 sa["id"],
1157 deltas["id"],
1158 sa_vdu_id,
1159 ),
1160 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1161 )
1162
1163 for df in get_iterable(indata.get("df")):
1164 for sa in get_iterable(df.get("scaling-aspect")):
1165 for sp in get_iterable(sa.get("scaling-policy")):
1166 for sc in get_iterable(sp.get("scaling-criteria")):
1167 sc_monitoring_param = sc.get("vnf-monitoring-param-ref")
1168 if (
1169 sc_monitoring_param
1170 and sc_monitoring_param not in all_monitoring_params
1171 ):
1172 raise EngineException(
1173 "df[id='{}']:scaling-aspect[id='{}']:scaling-policy"
1174 "[name='{}']:scaling-criteria[name='{}']: "
1175 "vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format(
1176 df["id"],
1177 sa["id"],
1178 sp["name"],
1179 sc["name"],
1180 sc_monitoring_param,
1181 ),
1182 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1183 )
1184
1185 for sca in get_iterable(sa.get("scaling-config-action")):
1186 if (
1187 "lcm-operations-configuration" not in df
1188 or "operate-vnf-op-config"
1189 not in df["lcm-operations-configuration"]
1190 or not utils.find_in_list(
1191 df["lcm-operations-configuration"][
1192 "operate-vnf-op-config"
1193 ].get("day1-2", []),
1194 lambda config: config["id"] == indata["id"],
1195 )
1196 ):
1197 raise EngineException(
1198 "'day1-2 configuration' not defined in the descriptor but it is "
1199 "referenced by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action".format(
1200 df["id"], sa["id"]
1201 ),
1202 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1203 )
1204 for configuration in get_iterable(
1205 df["lcm-operations-configuration"]["operate-vnf-op-config"].get(
1206 "day1-2", []
1207 )
1208 ):
1209 for primitive in get_iterable(
1210 configuration.get("config-primitive")
1211 ):
1212 if (
1213 primitive["name"]
1214 == sca["vnf-config-primitive-name-ref"]
1215 ):
1216 break
1217 else:
1218 raise EngineException(
1219 "df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-"
1220 "config-primitive-name-ref='{}' does not match any "
1221 "day1-2 configuration:config-primitive:name".format(
1222 df["id"],
1223 sa["id"],
1224 sca["vnf-config-primitive-name-ref"],
1225 ),
1226 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1227 )
1228
1229 @staticmethod
1230 def validate_healing_group_descriptor(indata):
1231 all_vdu_ids = set()
1232 for df in get_iterable(indata.get("df")):
1233 for il in get_iterable(df.get("instantiation-level")):
1234 for vl in get_iterable(il.get("vdu-level")):
1235 all_vdu_ids.add(vl.get("vdu-id"))
1236
1237 for df in get_iterable(indata.get("df")):
1238 for ha in get_iterable(df.get("healing-aspect")):
1239 for hp in get_iterable(ha.get("healing-policy")):
1240 hp_monitoring_param = hp.get("vdu-id")
1241 if hp_monitoring_param and hp_monitoring_param not in all_vdu_ids:
1242 raise EngineException(
1243 "df[id='{}']:healing-aspect[id='{}']:healing-policy"
1244 "[name='{}']: "
1245 "vdu-id='{}' not defined in vdu".format(
1246 df["id"],
1247 ha["id"],
1248 hp["event-name"],
1249 hp_monitoring_param,
1250 ),
1251 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1252 )
1253
1254 @staticmethod
1255 def validate_alarm_group_descriptor(indata):
1256 all_monitoring_params = set()
1257 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
1258 for mp in get_iterable(ivld.get("monitoring-parameters")):
1259 all_monitoring_params.add(mp.get("id"))
1260
1261 for vdu in get_iterable(indata.get("vdu")):
1262 for mp in get_iterable(vdu.get("monitoring-parameter")):
1263 all_monitoring_params.add(mp.get("id"))
1264
1265 for df in get_iterable(indata.get("df")):
1266 for mp in get_iterable(df.get("monitoring-parameter")):
1267 all_monitoring_params.add(mp.get("id"))
1268
1269 for vdus in get_iterable(indata.get("vdu")):
1270 for alarms in get_iterable(vdus.get("alarm")):
1271 alarm_monitoring_param = alarms.get("vnf-monitoring-param-ref")
1272 if (
1273 alarm_monitoring_param
1274 and alarm_monitoring_param not in all_monitoring_params
1275 ):
1276 raise EngineException(
1277 "vdu[id='{}']:alarm[id='{}']:"
1278 "vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format(
1279 vdus["id"],
1280 alarms["alarm-id"],
1281 alarm_monitoring_param,
1282 ),
1283 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1284 )
1285
1286 @staticmethod
1287 def validate_storage_compute_descriptor(indata):
1288 all_vsd_ids = set()
1289 for vsd in get_iterable(indata.get("virtual-storage-desc")):
1290 all_vsd_ids.add(vsd.get("id"))
1291
1292 all_vcd_ids = set()
1293 for vcd in get_iterable(indata.get("virtual-compute-desc")):
1294 all_vcd_ids.add(vcd.get("id"))
1295
1296 for vdus in get_iterable(indata.get("vdu")):
1297 for vsd_ref in vdus.get("virtual-storage-desc"):
1298 if vsd_ref and vsd_ref not in all_vsd_ids:
1299 raise EngineException(
1300 "vdu[virtual-storage-desc='{}']"
1301 "not defined in vnfd".format(
1302 vsd_ref,
1303 ),
1304 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1305 )
1306
1307 for vdus in get_iterable(indata.get("vdu")):
1308 vcd_ref = vdus.get("virtual-compute-desc")
1309 if vcd_ref and vcd_ref not in all_vcd_ids:
1310 raise EngineException(
1311 "vdu[virtual-compute-desc='{}']"
1312 "not defined in vnfd".format(
1313 vdus["virtual-compute-desc"],
1314 ),
1315 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1316 )
1317
1318 def delete_extra(self, session, _id, db_content, not_send_msg=None):
1319 """
1320 Deletes associate file system storage (via super)
1321 Deletes associated vnfpkgops from database.
1322 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1323 :param _id: server internal id
1324 :param db_content: The database content of the descriptor
1325 :return: None
1326 :raises: FsException in case of error while deleting associated storage
1327 """
1328 super().delete_extra(session, _id, db_content, not_send_msg)
1329 self.db.del_list("vnfpkgops", {"vnfPkgId": _id})
1330 self.db.del_list(self.topic + "_revisions", {"_id": {"$regex": _id}})
1331
1332 def sol005_projection(self, data):
1333 data["onboardingState"] = data["_admin"]["onboardingState"]
1334 data["operationalState"] = data["_admin"]["operationalState"]
1335 data["usageState"] = data["_admin"]["usageState"]
1336
1337 links = {}
1338 links["self"] = {"href": "/vnfpkgm/v1/vnf_packages/{}".format(data["_id"])}
1339 links["vnfd"] = {"href": "/vnfpkgm/v1/vnf_packages/{}/vnfd".format(data["_id"])}
1340 links["packageContent"] = {
1341 "href": "/vnfpkgm/v1/vnf_packages/{}/package_content".format(data["_id"])
1342 }
1343 data["_links"] = links
1344
1345 return super().sol005_projection(data)
1346
1347 @staticmethod
1348 def find_software_version(vnfd: dict) -> str:
1349 """Find the sotware version in the VNFD descriptors
1350
1351 Args:
1352 vnfd (dict): Descriptor as a dictionary
1353
1354 Returns:
1355 software-version (str)
1356 """
1357 default_sw_version = "1.0"
1358 if vnfd.get("vnfd"):
1359 vnfd = vnfd["vnfd"]
1360 if vnfd.get("software-version"):
1361 return vnfd["software-version"]
1362 else:
1363 return default_sw_version
1364
1365 @staticmethod
1366 def extract_policies(vnfd: dict) -> dict:
1367 """Removes the policies from the VNFD descriptors
1368
1369 Args:
1370 vnfd (dict): Descriptor as a dictionary
1371
1372 Returns:
1373 vnfd (dict): VNFD which does not include policies
1374 """
1375 for df in vnfd.get("df", {}):
1376 for policy in ["scaling-aspect", "healing-aspect"]:
1377 if df.get(policy, {}):
1378 df.pop(policy)
1379 for vdu in vnfd.get("vdu", {}):
1380 for alarm_policy in ["alarm", "monitoring-parameter"]:
1381 if vdu.get(alarm_policy, {}):
1382 vdu.pop(alarm_policy)
1383 return vnfd
1384
1385 @staticmethod
1386 def extract_day12_primitives(vnfd: dict) -> dict:
1387 """Removes the day12 primitives from the VNFD descriptors
1388
1389 Args:
1390 vnfd (dict): Descriptor as a dictionary
1391
1392 Returns:
1393 vnfd (dict)
1394 """
1395 for df_id, df in enumerate(vnfd.get("df", {})):
1396 if (
1397 df.get("lcm-operations-configuration", {})
1398 .get("operate-vnf-op-config", {})
1399 .get("day1-2")
1400 ):
1401 day12 = df["lcm-operations-configuration"]["operate-vnf-op-config"].get(
1402 "day1-2"
1403 )
1404 for config_id, config in enumerate(day12):
1405 for key in [
1406 "initial-config-primitive",
1407 "config-primitive",
1408 "terminate-config-primitive",
1409 ]:
1410 config.pop(key, None)
1411 day12[config_id] = config
1412 df["lcm-operations-configuration"]["operate-vnf-op-config"][
1413 "day1-2"
1414 ] = day12
1415 vnfd["df"][df_id] = df
1416 return vnfd
1417
1418 def remove_modifiable_items(self, vnfd: dict) -> dict:
1419 """Removes the modifiable parts from the VNFD descriptors
1420
1421 It calls different extract functions according to different update types
1422 to clear all the modifiable items from VNFD
1423
1424 Args:
1425 vnfd (dict): Descriptor as a dictionary
1426
1427 Returns:
1428 vnfd (dict): Descriptor which does not include modifiable contents
1429 """
1430 if vnfd.get("vnfd"):
1431 vnfd = vnfd["vnfd"]
1432 vnfd.pop("_admin", None)
1433 # If the other extractions need to be done from VNFD,
1434 # the new extract methods could be appended to below list.
1435 for extract_function in [self.extract_day12_primitives, self.extract_policies]:
1436 vnfd_temp = extract_function(vnfd)
1437 vnfd = vnfd_temp
1438 return vnfd
1439
1440 def _validate_descriptor_changes(
1441 self,
1442 descriptor_id: str,
1443 descriptor_file_name: str,
1444 old_descriptor_directory: str,
1445 new_descriptor_directory: str,
1446 ):
1447 """Compares the old and new VNFD descriptors and validates the new descriptor.
1448
1449 Args:
1450 old_descriptor_directory (str): Directory of descriptor which is in-use
1451 new_descriptor_directory (str): Directory of descriptor which is proposed to update (new revision)
1452
1453 Returns:
1454 None
1455
1456 Raises:
1457 EngineException: In case of error when there are unallowed changes
1458 """
1459 try:
1460 # If VNFD does not exist in DB or it is not in use by any NS,
1461 # validation is not required.
1462 vnfd = self.db.get_one("vnfds", {"_id": descriptor_id})
1463 if not vnfd or not detect_descriptor_usage(vnfd, "vnfds", self.db):
1464 return
1465
1466 # Get the old and new descriptor contents in order to compare them.
1467 with self.fs.file_open(
1468 (old_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
1469 ) as old_descriptor_file:
1470 with self.fs.file_open(
1471 (new_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
1472 ) as new_descriptor_file:
1473 old_content = yaml.safe_load(old_descriptor_file.read())
1474 new_content = yaml.safe_load(new_descriptor_file.read())
1475
1476 # If software version has changed, we do not need to validate
1477 # the differences anymore.
1478 if old_content and new_content:
1479 if self.find_software_version(
1480 old_content
1481 ) != self.find_software_version(new_content):
1482 return
1483
1484 disallowed_change = DeepDiff(
1485 self.remove_modifiable_items(old_content),
1486 self.remove_modifiable_items(new_content),
1487 )
1488
1489 if disallowed_change:
1490 changed_nodes = functools.reduce(
1491 lambda a, b: a + " , " + b,
1492 [
1493 node.lstrip("root")
1494 for node in disallowed_change.get(
1495 "values_changed"
1496 ).keys()
1497 ],
1498 )
1499
1500 raise EngineException(
1501 f"Error in validating new descriptor: {changed_nodes} cannot be modified, "
1502 "there are disallowed changes in the vnf descriptor.",
1503 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1504 )
1505 except (
1506 DbException,
1507 AttributeError,
1508 IndexError,
1509 KeyError,
1510 ValueError,
1511 ) as e:
1512 raise type(e)(
1513 "VNF Descriptor could not be processed with error: {}.".format(e)
1514 )
1515
1516
1517 class NsdTopic(DescriptorTopic):
1518 topic = "nsds"
1519 topic_msg = "nsd"
1520
1521 def __init__(self, db, fs, msg, auth):
1522 super().__init__(db, fs, msg, auth)
1523
1524 def pyangbind_validation(self, item, data, force=False):
1525 if self._descriptor_data_is_in_old_format(data):
1526 raise EngineException(
1527 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
1528 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1529 )
1530 try:
1531 nsd_vnf_profiles = data.get("df", [{}])[0].get("vnf-profile", [])
1532 mynsd = etsi_nfv_nsd.etsi_nfv_nsd()
1533 pybindJSONDecoder.load_ietf_json(
1534 {"nsd": {"nsd": [data]}},
1535 None,
1536 None,
1537 obj=mynsd,
1538 path_helper=True,
1539 skip_unknown=force,
1540 )
1541 out = pybindJSON.dumps(mynsd, mode="ietf")
1542 desc_out = self._remove_envelop(yaml.safe_load(out))
1543 desc_out = self._remove_yang_prefixes_from_descriptor(desc_out)
1544 if nsd_vnf_profiles:
1545 desc_out["df"][0]["vnf-profile"] = nsd_vnf_profiles
1546 return desc_out
1547 except Exception as e:
1548 raise EngineException(
1549 "Error in pyangbind validation: {}".format(str(e)),
1550 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1551 )
1552
1553 @staticmethod
1554 def _descriptor_data_is_in_old_format(data):
1555 return ("nsd-catalog" in data) or ("nsd:nsd-catalog" in data)
1556
1557 @staticmethod
1558 def _remove_envelop(indata=None):
1559 if not indata:
1560 return {}
1561 clean_indata = indata
1562
1563 if clean_indata.get("nsd"):
1564 clean_indata = clean_indata["nsd"]
1565 elif clean_indata.get("etsi-nfv-nsd:nsd"):
1566 clean_indata = clean_indata["etsi-nfv-nsd:nsd"]
1567 if clean_indata.get("nsd"):
1568 if (
1569 not isinstance(clean_indata["nsd"], list)
1570 or len(clean_indata["nsd"]) != 1
1571 ):
1572 raise EngineException("'nsd' must be a list of only one element")
1573 clean_indata = clean_indata["nsd"][0]
1574 return clean_indata
1575
1576 def _validate_input_new(self, indata, storage_params, force=False):
1577 indata.pop("nsdOnboardingState", None)
1578 indata.pop("nsdOperationalState", None)
1579 indata.pop("nsdUsageState", None)
1580
1581 indata.pop("links", None)
1582
1583 indata = self.pyangbind_validation("nsds", indata, force)
1584 # Cross references validation in the descriptor
1585 # TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none
1586 for vld in get_iterable(indata.get("virtual-link-desc")):
1587 self.validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata)
1588 for fg in get_iterable(indata.get("vnffgd")):
1589 self.validate_vnffgd_data(fg, indata)
1590
1591 self.validate_vnf_profiles_vnfd_id(indata)
1592
1593 return indata
1594
1595 @staticmethod
1596 def validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata):
1597 if not vld.get("mgmt-network"):
1598 return
1599 vld_id = vld.get("id")
1600 for df in get_iterable(indata.get("df")):
1601 for vlp in get_iterable(df.get("virtual-link-profile")):
1602 if vld_id and vld_id == vlp.get("virtual-link-desc-id"):
1603 if vlp.get("virtual-link-protocol-data"):
1604 raise EngineException(
1605 "Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-"
1606 "protocol-data You cannot set a virtual-link-protocol-data "
1607 "when mgmt-network is True".format(df["id"], vlp["id"]),
1608 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1609 )
1610
1611 @staticmethod
1612 def validate_vnffgd_data(fg, indata):
1613 position_list = []
1614 all_vnf_ids = set(get_iterable(fg.get("vnf-profile-id")))
1615 for fgposition in get_iterable(fg.get("nfp-position-element")):
1616 position_list.append(fgposition["id"])
1617
1618 for nfpd in get_iterable(fg.get("nfpd")):
1619 nfp_position = []
1620 for position in get_iterable(nfpd.get("position-desc-id")):
1621 nfp_position = position.get("nfp-position-element-id")
1622 if position == "nfp-position-element-id":
1623 nfp_position = position.get("nfp-position-element-id")
1624 if nfp_position[0] not in position_list:
1625 raise EngineException(
1626 "Error at vnffgd nfpd[id='{}']:nfp-position-element-id='{}' "
1627 "does not match any nfp-position-element".format(
1628 nfpd["id"], nfp_position[0]
1629 ),
1630 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1631 )
1632
1633 for cp in get_iterable(position.get("cp-profile-id")):
1634 for cpe in get_iterable(cp.get("constituent-profile-elements")):
1635 constituent_base_element_id = cpe.get(
1636 "constituent-base-element-id"
1637 )
1638 if (
1639 constituent_base_element_id
1640 and constituent_base_element_id not in all_vnf_ids
1641 ):
1642 raise EngineException(
1643 "Error at vnffgd constituent_profile[id='{}']:vnfd-id='{}' "
1644 "does not match any constituent-base-element-id".format(
1645 cpe["id"], constituent_base_element_id
1646 ),
1647 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1648 )
1649
1650 @staticmethod
1651 def validate_vnf_profiles_vnfd_id(indata):
1652 all_vnfd_ids = set(get_iterable(indata.get("vnfd-id")))
1653 for df in get_iterable(indata.get("df")):
1654 for vnf_profile in get_iterable(df.get("vnf-profile")):
1655 vnfd_id = vnf_profile.get("vnfd-id")
1656 if vnfd_id and vnfd_id not in all_vnfd_ids:
1657 raise EngineException(
1658 "Error at df[id='{}']:vnf_profile[id='{}']:vnfd-id='{}' "
1659 "does not match any vnfd-id".format(
1660 df["id"], vnf_profile["id"], vnfd_id
1661 ),
1662 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1663 )
1664
1665 def _validate_input_edit(self, indata, content, force=False):
1666 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
1667 """
1668 indata looks as follows:
1669 - In the new case (conformant)
1670 {'nsdOperationalState': 'DISABLED', 'userDefinedData': {'id': 'string23',
1671 '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}}
1672 - In the old case (backwards-compatible)
1673 {'id': 'string23', '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}
1674 """
1675 if "_admin" not in indata:
1676 indata["_admin"] = {}
1677
1678 if "nsdOperationalState" in indata:
1679 if indata["nsdOperationalState"] in ("ENABLED", "DISABLED"):
1680 indata["_admin"]["operationalState"] = indata.pop("nsdOperationalState")
1681 else:
1682 raise EngineException(
1683 "State '{}' is not a valid operational state".format(
1684 indata["nsdOperationalState"]
1685 ),
1686 http_code=HTTPStatus.BAD_REQUEST,
1687 )
1688
1689 # In the case of user defined data, we need to put the data in the root of the object
1690 # to preserve current expected behaviour
1691 if "userDefinedData" in indata:
1692 data = indata.pop("userDefinedData")
1693 if isinstance(data, dict):
1694 indata["_admin"]["userDefinedData"] = data
1695 else:
1696 raise EngineException(
1697 "userDefinedData should be an object, but is '{}' instead".format(
1698 type(data)
1699 ),
1700 http_code=HTTPStatus.BAD_REQUEST,
1701 )
1702 if (
1703 "operationalState" in indata["_admin"]
1704 and content["_admin"]["operationalState"]
1705 == indata["_admin"]["operationalState"]
1706 ):
1707 raise EngineException(
1708 "nsdOperationalState already {}".format(
1709 content["_admin"]["operationalState"]
1710 ),
1711 http_code=HTTPStatus.CONFLICT,
1712 )
1713 return indata
1714
1715 def _check_descriptor_dependencies(self, session, descriptor):
1716 """
1717 Check that the dependent descriptors exist on a new descriptor or edition. Also checks references to vnfd
1718 connection points are ok
1719 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1720 :param descriptor: descriptor to be inserted or edit
1721 :return: None or raises exception
1722 """
1723 if session["force"]:
1724 return
1725 vnfds_index = self._get_descriptor_constituent_vnfds_index(session, descriptor)
1726
1727 # Cross references validation in the descriptor and vnfd connection point validation
1728 for df in get_iterable(descriptor.get("df")):
1729 self.validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index)
1730
1731 def _get_descriptor_constituent_vnfds_index(self, session, descriptor):
1732 vnfds_index = {}
1733 if descriptor.get("vnfd-id") and not session["force"]:
1734 for vnfd_id in get_iterable(descriptor.get("vnfd-id")):
1735 query_filter = self._get_project_filter(session)
1736 query_filter["id"] = vnfd_id
1737 vnf_list = self.db.get_list("vnfds", query_filter)
1738 if not vnf_list:
1739 raise EngineException(
1740 "Descriptor error at 'vnfd-id'='{}' references a non "
1741 "existing vnfd".format(vnfd_id),
1742 http_code=HTTPStatus.CONFLICT,
1743 )
1744 vnfds_index[vnfd_id] = vnf_list[0]
1745 return vnfds_index
1746
1747 @staticmethod
1748 def validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index):
1749 for vnf_profile in get_iterable(df.get("vnf-profile")):
1750 vnfd = vnfds_index.get(vnf_profile["vnfd-id"])
1751 all_vnfd_ext_cpds = set()
1752 for ext_cpd in get_iterable(vnfd.get("ext-cpd")):
1753 if ext_cpd.get("id"):
1754 all_vnfd_ext_cpds.add(ext_cpd.get("id"))
1755
1756 for virtual_link in get_iterable(
1757 vnf_profile.get("virtual-link-connectivity")
1758 ):
1759 for vl_cpd in get_iterable(virtual_link.get("constituent-cpd-id")):
1760 vl_cpd_id = vl_cpd.get("constituent-cpd-id")
1761 if vl_cpd_id and vl_cpd_id not in all_vnfd_ext_cpds:
1762 raise EngineException(
1763 "Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity"
1764 "[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a "
1765 "non existing ext-cpd:id inside vnfd '{}'".format(
1766 df["id"],
1767 vnf_profile["id"],
1768 virtual_link["virtual-link-profile-id"],
1769 vl_cpd_id,
1770 vnfd["id"],
1771 ),
1772 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1773 )
1774
1775 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
1776 final_content = super().check_conflict_on_edit(
1777 session, final_content, edit_content, _id
1778 )
1779
1780 self._check_descriptor_dependencies(session, final_content)
1781
1782 return final_content
1783
1784 def check_conflict_on_del(self, session, _id, db_content):
1785 """
1786 Check that there is not any NSR that uses this NSD. Only NSRs belonging to this project are considered. Note
1787 that NSD can be public and be used by other projects.
1788 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1789 :param _id: nsd internal id
1790 :param db_content: The database content of the _id
1791 :return: None or raises EngineException with the conflict
1792 """
1793 if session["force"]:
1794 return
1795 descriptor = db_content
1796 descriptor_id = descriptor.get("id")
1797 if not descriptor_id: # empty nsd not uploaded
1798 return
1799
1800 # check NSD used by NS
1801 _filter = self._get_project_filter(session)
1802 _filter["nsd-id"] = _id
1803 if self.db.get_list("nsrs", _filter):
1804 raise EngineException(
1805 "There is at least one NS instance using this descriptor",
1806 http_code=HTTPStatus.CONFLICT,
1807 )
1808
1809 # check NSD referenced by NST
1810 del _filter["nsd-id"]
1811 _filter["netslice-subnet.ANYINDEX.nsd-ref"] = descriptor_id
1812 if self.db.get_list("nsts", _filter):
1813 raise EngineException(
1814 "There is at least one NetSlice Template referencing this descriptor",
1815 http_code=HTTPStatus.CONFLICT,
1816 )
1817
1818 def delete_extra(self, session, _id, db_content, not_send_msg=None):
1819 """
1820 Deletes associate file system storage (via super)
1821 Deletes associated vnfpkgops from database.
1822 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1823 :param _id: server internal id
1824 :param db_content: The database content of the descriptor
1825 :return: None
1826 :raises: FsException in case of error while deleting associated storage
1827 """
1828 super().delete_extra(session, _id, db_content, not_send_msg)
1829 self.db.del_list(self.topic + "_revisions", {"_id": {"$regex": _id}})
1830
1831 @staticmethod
1832 def extract_day12_primitives(nsd: dict) -> dict:
1833 """Removes the day12 primitives from the NSD descriptors
1834
1835 Args:
1836 nsd (dict): Descriptor as a dictionary
1837
1838 Returns:
1839 nsd (dict): Cleared NSD
1840 """
1841 if nsd.get("ns-configuration"):
1842 for key in [
1843 "config-primitive",
1844 "initial-config-primitive",
1845 "terminate-config-primitive",
1846 ]:
1847 nsd["ns-configuration"].pop(key, None)
1848 return nsd
1849
1850 def remove_modifiable_items(self, nsd: dict) -> dict:
1851 """Removes the modifiable parts from the VNFD descriptors
1852
1853 It calls different extract functions according to different update types
1854 to clear all the modifiable items from NSD
1855
1856 Args:
1857 nsd (dict): Descriptor as a dictionary
1858
1859 Returns:
1860 nsd (dict): Descriptor which does not include modifiable contents
1861 """
1862 while isinstance(nsd, dict) and nsd.get("nsd"):
1863 nsd = nsd["nsd"]
1864 if isinstance(nsd, list):
1865 nsd = nsd[0]
1866 nsd.pop("_admin", None)
1867 # If the more extractions need to be done from NSD,
1868 # the new extract methods could be appended to below list.
1869 for extract_function in [self.extract_day12_primitives]:
1870 nsd_temp = extract_function(nsd)
1871 nsd = nsd_temp
1872 return nsd
1873
1874 def _validate_descriptor_changes(
1875 self,
1876 descriptor_id: str,
1877 descriptor_file_name: str,
1878 old_descriptor_directory: str,
1879 new_descriptor_directory: str,
1880 ):
1881 """Compares the old and new NSD descriptors and validates the new descriptor
1882
1883 Args:
1884 old_descriptor_directory: Directory of descriptor which is in-use
1885 new_descriptor_directory: Directory of descriptor which is proposed to update (new revision)
1886
1887 Returns:
1888 None
1889
1890 Raises:
1891 EngineException: In case of error if the changes are not allowed
1892 """
1893
1894 try:
1895 # If NSD does not exist in DB, or it is not in use by any NS,
1896 # validation is not required.
1897 nsd = self.db.get_one("nsds", {"_id": descriptor_id}, fail_on_empty=False)
1898 if not nsd or not detect_descriptor_usage(nsd, "nsds", self.db):
1899 return
1900
1901 # Get the old and new descriptor contents in order to compare them.
1902 with self.fs.file_open(
1903 (old_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
1904 ) as old_descriptor_file:
1905 with self.fs.file_open(
1906 (new_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
1907 ) as new_descriptor_file:
1908 old_content = yaml.safe_load(old_descriptor_file.read())
1909 new_content = yaml.safe_load(new_descriptor_file.read())
1910
1911 if old_content and new_content:
1912 disallowed_change = DeepDiff(
1913 self.remove_modifiable_items(old_content),
1914 self.remove_modifiable_items(new_content),
1915 )
1916
1917 if disallowed_change:
1918 changed_nodes = functools.reduce(
1919 lambda a, b: a + ", " + b,
1920 [
1921 node.lstrip("root")
1922 for node in disallowed_change.get(
1923 "values_changed"
1924 ).keys()
1925 ],
1926 )
1927
1928 raise EngineException(
1929 f"Error in validating new descriptor: {changed_nodes} cannot be modified, "
1930 "there are disallowed changes in the ns descriptor. ",
1931 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1932 )
1933 except (
1934 DbException,
1935 AttributeError,
1936 IndexError,
1937 KeyError,
1938 ValueError,
1939 ) as e:
1940 raise type(e)(
1941 "NS Descriptor could not be processed with error: {}.".format(e)
1942 )
1943
1944 def sol005_projection(self, data):
1945 data["nsdOnboardingState"] = data["_admin"]["onboardingState"]
1946 data["nsdOperationalState"] = data["_admin"]["operationalState"]
1947 data["nsdUsageState"] = data["_admin"]["usageState"]
1948
1949 links = {}
1950 links["self"] = {"href": "/nsd/v1/ns_descriptors/{}".format(data["_id"])}
1951 links["nsd_content"] = {
1952 "href": "/nsd/v1/ns_descriptors/{}/nsd_content".format(data["_id"])
1953 }
1954 data["_links"] = links
1955
1956 return super().sol005_projection(data)
1957
1958
1959 class NstTopic(DescriptorTopic):
1960 topic = "nsts"
1961 topic_msg = "nst"
1962 quota_name = "slice_templates"
1963
1964 def __init__(self, db, fs, msg, auth):
1965 DescriptorTopic.__init__(self, db, fs, msg, auth)
1966
1967 def pyangbind_validation(self, item, data, force=False):
1968 try:
1969 mynst = nst_im()
1970 pybindJSONDecoder.load_ietf_json(
1971 {"nst": [data]},
1972 None,
1973 None,
1974 obj=mynst,
1975 path_helper=True,
1976 skip_unknown=force,
1977 )
1978 out = pybindJSON.dumps(mynst, mode="ietf")
1979 desc_out = self._remove_envelop(yaml.safe_load(out))
1980 return desc_out
1981 except Exception as e:
1982 raise EngineException(
1983 "Error in pyangbind validation: {}".format(str(e)),
1984 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1985 )
1986
1987 @staticmethod
1988 def _remove_envelop(indata=None):
1989 if not indata:
1990 return {}
1991 clean_indata = indata
1992
1993 if clean_indata.get("nst"):
1994 if (
1995 not isinstance(clean_indata["nst"], list)
1996 or len(clean_indata["nst"]) != 1
1997 ):
1998 raise EngineException("'nst' must be a list only one element")
1999 clean_indata = clean_indata["nst"][0]
2000 elif clean_indata.get("nst:nst"):
2001 if (
2002 not isinstance(clean_indata["nst:nst"], list)
2003 or len(clean_indata["nst:nst"]) != 1
2004 ):
2005 raise EngineException("'nst:nst' must be a list only one element")
2006 clean_indata = clean_indata["nst:nst"][0]
2007 return clean_indata
2008
2009 def _validate_input_new(self, indata, storage_params, force=False):
2010 indata.pop("onboardingState", None)
2011 indata.pop("operationalState", None)
2012 indata.pop("usageState", None)
2013 indata = self.pyangbind_validation("nsts", indata, force)
2014 return indata.copy()
2015
2016 def _check_descriptor_dependencies(self, session, descriptor):
2017 """
2018 Check that the dependent descriptors exist on a new descriptor or edition
2019 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
2020 :param descriptor: descriptor to be inserted or edit
2021 :return: None or raises exception
2022 """
2023 if not descriptor.get("netslice-subnet"):
2024 return
2025 for nsd in descriptor["netslice-subnet"]:
2026 nsd_id = nsd["nsd-ref"]
2027 filter_q = self._get_project_filter(session)
2028 filter_q["id"] = nsd_id
2029 if not self.db.get_list("nsds", filter_q):
2030 raise EngineException(
2031 "Descriptor error at 'netslice-subnet':'nsd-ref'='{}' references a non "
2032 "existing nsd".format(nsd_id),
2033 http_code=HTTPStatus.CONFLICT,
2034 )
2035
2036 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
2037 final_content = super().check_conflict_on_edit(
2038 session, final_content, edit_content, _id
2039 )
2040
2041 self._check_descriptor_dependencies(session, final_content)
2042 return final_content
2043
2044 def check_conflict_on_del(self, session, _id, db_content):
2045 """
2046 Check that there is not any NSIR that uses this NST. Only NSIRs belonging to this project are considered. Note
2047 that NST can be public and be used by other projects.
2048 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
2049 :param _id: nst internal id
2050 :param db_content: The database content of the _id.
2051 :return: None or raises EngineException with the conflict
2052 """
2053 # TODO: Check this method
2054 if session["force"]:
2055 return
2056 # Get Network Slice Template from Database
2057 _filter = self._get_project_filter(session)
2058 _filter["_admin.nst-id"] = _id
2059 if self.db.get_list("nsis", _filter):
2060 raise EngineException(
2061 "there is at least one Netslice Instance using this descriptor",
2062 http_code=HTTPStatus.CONFLICT,
2063 )
2064
2065 def sol005_projection(self, data):
2066 data["onboardingState"] = data["_admin"]["onboardingState"]
2067 data["operationalState"] = data["_admin"]["operationalState"]
2068 data["usageState"] = data["_admin"]["usageState"]
2069
2070 links = {}
2071 links["self"] = {"href": "/nst/v1/netslice_templates/{}".format(data["_id"])}
2072 links["nst"] = {"href": "/nst/v1/netslice_templates/{}/nst".format(data["_id"])}
2073 data["_links"] = links
2074
2075 return super().sol005_projection(data)
2076
2077
2078 class PduTopic(BaseTopic):
2079 topic = "pdus"
2080 topic_msg = "pdu"
2081 quota_name = "pduds"
2082 schema_new = pdu_new_schema
2083 schema_edit = pdu_edit_schema
2084
2085 def __init__(self, db, fs, msg, auth):
2086 BaseTopic.__init__(self, db, fs, msg, auth)
2087
2088 @staticmethod
2089 def format_on_new(content, project_id=None, make_public=False):
2090 BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
2091 content["_admin"]["onboardingState"] = "CREATED"
2092 content["_admin"]["operationalState"] = "ENABLED"
2093 content["_admin"]["usageState"] = "NOT_IN_USE"
2094
2095 def check_conflict_on_del(self, session, _id, db_content):
2096 """
2097 Check that there is not any vnfr that uses this PDU
2098 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
2099 :param _id: pdu internal id
2100 :param db_content: The database content of the _id.
2101 :return: None or raises EngineException with the conflict
2102 """
2103 if session["force"]:
2104 return
2105
2106 _filter = self._get_project_filter(session)
2107 _filter["vdur.pdu-id"] = _id
2108 if self.db.get_list("vnfrs", _filter):
2109 raise EngineException(
2110 "There is at least one VNF instance using this PDU",
2111 http_code=HTTPStatus.CONFLICT,
2112 )
2113
2114
2115 class VnfPkgOpTopic(BaseTopic):
2116 topic = "vnfpkgops"
2117 topic_msg = "vnfd"
2118 schema_new = vnfpkgop_new_schema
2119 schema_edit = None
2120
2121 def __init__(self, db, fs, msg, auth):
2122 BaseTopic.__init__(self, db, fs, msg, auth)
2123
2124 def edit(self, session, _id, indata=None, kwargs=None, content=None):
2125 raise EngineException(
2126 "Method 'edit' not allowed for topic '{}'".format(self.topic),
2127 HTTPStatus.METHOD_NOT_ALLOWED,
2128 )
2129
2130 def delete(self, session, _id, dry_run=False):
2131 raise EngineException(
2132 "Method 'delete' not allowed for topic '{}'".format(self.topic),
2133 HTTPStatus.METHOD_NOT_ALLOWED,
2134 )
2135
2136 def delete_list(self, session, filter_q=None):
2137 raise EngineException(
2138 "Method 'delete_list' not allowed for topic '{}'".format(self.topic),
2139 HTTPStatus.METHOD_NOT_ALLOWED,
2140 )
2141
2142 def new(self, rollback, session, indata=None, kwargs=None, headers=None):
2143 """
2144 Creates a new entry into database.
2145 :param rollback: list to append created items at database in case a rollback may to be done
2146 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
2147 :param indata: data to be inserted
2148 :param kwargs: used to override the indata descriptor
2149 :param headers: http request headers
2150 :return: _id, op_id:
2151 _id: identity of the inserted data.
2152 op_id: None
2153 """
2154 self._update_input_with_kwargs(indata, kwargs)
2155 validate_input(indata, self.schema_new)
2156 vnfpkg_id = indata["vnfPkgId"]
2157 filter_q = BaseTopic._get_project_filter(session)
2158 filter_q["_id"] = vnfpkg_id
2159 vnfd = self.db.get_one("vnfds", filter_q)
2160 operation = indata["lcmOperationType"]
2161 kdu_name = indata["kdu_name"]
2162 for kdu in vnfd.get("kdu", []):
2163 if kdu["name"] == kdu_name:
2164 helm_chart = kdu.get("helm-chart")
2165 juju_bundle = kdu.get("juju-bundle")
2166 break
2167 else:
2168 raise EngineException(
2169 "Not found vnfd[id='{}']:kdu[name='{}']".format(vnfpkg_id, kdu_name)
2170 )
2171 if helm_chart:
2172 indata["helm-chart"] = helm_chart
2173 match = fullmatch(r"([^/]*)/([^/]*)", helm_chart)
2174 repo_name = match.group(1) if match else None
2175 elif juju_bundle:
2176 indata["juju-bundle"] = juju_bundle
2177 match = fullmatch(r"([^/]*)/([^/]*)", juju_bundle)
2178 repo_name = match.group(1) if match else None
2179 else:
2180 raise EngineException(
2181 "Found neither 'helm-chart' nor 'juju-bundle' in vnfd[id='{}']:kdu[name='{}']".format(
2182 vnfpkg_id, kdu_name
2183 )
2184 )
2185 if repo_name:
2186 del filter_q["_id"]
2187 filter_q["name"] = repo_name
2188 repo = self.db.get_one("k8srepos", filter_q)
2189 k8srepo_id = repo.get("_id")
2190 k8srepo_url = repo.get("url")
2191 else:
2192 k8srepo_id = None
2193 k8srepo_url = None
2194 indata["k8srepoId"] = k8srepo_id
2195 indata["k8srepo_url"] = k8srepo_url
2196 vnfpkgop_id = str(uuid4())
2197 vnfpkgop_desc = {
2198 "_id": vnfpkgop_id,
2199 "operationState": "PROCESSING",
2200 "vnfPkgId": vnfpkg_id,
2201 "lcmOperationType": operation,
2202 "isAutomaticInvocation": False,
2203 "isCancelPending": False,
2204 "operationParams": indata,
2205 "links": {
2206 "self": "/osm/vnfpkgm/v1/vnfpkg_op_occs/" + vnfpkgop_id,
2207 "vnfpkg": "/osm/vnfpkgm/v1/vnf_packages/" + vnfpkg_id,
2208 },
2209 }
2210 self.format_on_new(
2211 vnfpkgop_desc, session["project_id"], make_public=session["public"]
2212 )
2213 ctime = vnfpkgop_desc["_admin"]["created"]
2214 vnfpkgop_desc["statusEnteredTime"] = ctime
2215 vnfpkgop_desc["startTime"] = ctime
2216 self.db.create(self.topic, vnfpkgop_desc)
2217 rollback.append({"topic": self.topic, "_id": vnfpkgop_id})
2218 self.msg.write(self.topic_msg, operation, vnfpkgop_desc)
2219 return vnfpkgop_id, None