2814653075319bddcec773e78e8ca4ee74a6df95
[osm/NBI.git] / osm_nbi / descriptor_topics.py
1 # -*- coding: utf-8 -*-
2
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
12 # implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
15
16 import tarfile
17 import yaml
18 import json
19 import copy
20 import os
21 import shutil
22 import functools
23
24 # import logging
25 from deepdiff import DeepDiff
26 from hashlib import md5
27 from osm_common.dbbase import DbException, deep_update_rfc7396
28 from http import HTTPStatus
29 from time import time
30 from uuid import uuid4
31 from re import fullmatch
32 from zipfile import ZipFile
33 from osm_nbi.validation import (
34 ValidationError,
35 pdu_new_schema,
36 pdu_edit_schema,
37 validate_input,
38 vnfpkgop_new_schema,
39 )
40 from osm_nbi.base_topic import (
41 BaseTopic,
42 EngineException,
43 get_iterable,
44 detect_descriptor_usage,
45 )
46 from osm_im import etsi_nfv_vnfd, etsi_nfv_nsd
47 from osm_im.nst import nst as nst_im
48 from pyangbind.lib.serialise import pybindJSONDecoder
49 import pyangbind.lib.pybindJSON as pybindJSON
50 from osm_nbi import utils
51
52 __author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
53
54
55 class DescriptorTopic(BaseTopic):
56 def __init__(self, db, fs, msg, auth):
57 BaseTopic.__init__(self, db, fs, msg, auth)
58
59 def _validate_input_new(self, indata, storage_params, force=False):
60 return indata
61
62 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
63 final_content = super().check_conflict_on_edit(
64 session, final_content, edit_content, _id
65 )
66
67 def _check_unique_id_name(descriptor, position=""):
68 for desc_key, desc_item in descriptor.items():
69 if isinstance(desc_item, list) and desc_item:
70 used_ids = []
71 desc_item_id = None
72 for index, list_item in enumerate(desc_item):
73 if isinstance(list_item, dict):
74 _check_unique_id_name(
75 list_item, "{}.{}[{}]".format(position, desc_key, index)
76 )
77 # Base case
78 if index == 0 and (
79 list_item.get("id") or list_item.get("name")
80 ):
81 desc_item_id = "id" if list_item.get("id") else "name"
82 if desc_item_id and list_item.get(desc_item_id):
83 if list_item[desc_item_id] in used_ids:
84 position = "{}.{}[{}]".format(
85 position, desc_key, index
86 )
87 raise EngineException(
88 "Error: identifier {} '{}' is not unique and repeats at '{}'".format(
89 desc_item_id,
90 list_item[desc_item_id],
91 position,
92 ),
93 HTTPStatus.UNPROCESSABLE_ENTITY,
94 )
95 used_ids.append(list_item[desc_item_id])
96
97 _check_unique_id_name(final_content)
98 # 1. validate again with pyangbind
99 # 1.1. remove internal keys
100 internal_keys = {}
101 for k in ("_id", "_admin"):
102 if k in final_content:
103 internal_keys[k] = final_content.pop(k)
104 storage_params = internal_keys["_admin"].get("storage")
105 serialized = self._validate_input_new(
106 final_content, storage_params, session["force"]
107 )
108
109 # 1.2. modify final_content with a serialized version
110 final_content = copy.deepcopy(serialized)
111 # 1.3. restore internal keys
112 for k, v in internal_keys.items():
113 final_content[k] = v
114 if session["force"]:
115 return final_content
116
117 # 2. check that this id is not present
118 if "id" in edit_content:
119 _filter = self._get_project_filter(session)
120
121 _filter["id"] = final_content["id"]
122 _filter["_id.neq"] = _id
123
124 if self.db.get_one(self.topic, _filter, fail_on_empty=False):
125 raise EngineException(
126 "{} with id '{}' already exists for this project".format(
127 (str(self.topic))[:-1], final_content["id"]
128 ),
129 HTTPStatus.CONFLICT,
130 )
131
132 return final_content
133
134 @staticmethod
135 def format_on_new(content, project_id=None, make_public=False):
136 BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
137 content["_admin"]["onboardingState"] = "CREATED"
138 content["_admin"]["operationalState"] = "DISABLED"
139 content["_admin"]["usageState"] = "NOT_IN_USE"
140
141 def delete_extra(self, session, _id, db_content, not_send_msg=None):
142 """
143 Deletes file system storage associated with the descriptor
144 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
145 :param _id: server internal id
146 :param db_content: The database content of the descriptor
147 :param not_send_msg: To not send message (False) or store content (list) instead
148 :return: None if ok or raises EngineException with the problem
149 """
150 self.fs.file_delete(_id, ignore_non_exist=True)
151 self.fs.file_delete(_id + "_", ignore_non_exist=True) # remove temp folder
152 # Remove file revisions
153 if "revision" in db_content["_admin"]:
154 revision = db_content["_admin"]["revision"]
155 while revision > 0:
156 self.fs.file_delete(_id + ":" + str(revision), ignore_non_exist=True)
157 revision = revision - 1
158
159 @staticmethod
160 def get_one_by_id(db, session, topic, id):
161 # find owned by this project
162 _filter = BaseTopic._get_project_filter(session)
163 _filter["id"] = id
164 desc_list = db.get_list(topic, _filter)
165 if len(desc_list) == 1:
166 return desc_list[0]
167 elif len(desc_list) > 1:
168 raise DbException(
169 "Found more than one {} with id='{}' belonging to this project".format(
170 topic[:-1], id
171 ),
172 HTTPStatus.CONFLICT,
173 )
174
175 # not found any: try to find public
176 _filter = BaseTopic._get_project_filter(session)
177 _filter["id"] = id
178 desc_list = db.get_list(topic, _filter)
179 if not desc_list:
180 raise DbException(
181 "Not found any {} with id='{}'".format(topic[:-1], id),
182 HTTPStatus.NOT_FOUND,
183 )
184 elif len(desc_list) == 1:
185 return desc_list[0]
186 else:
187 raise DbException(
188 "Found more than one public {} with id='{}'; and no one belonging to this project".format(
189 topic[:-1], id
190 ),
191 HTTPStatus.CONFLICT,
192 )
193
194 def new(self, rollback, session, indata=None, kwargs=None, headers=None):
195 """
196 Creates a new almost empty DISABLED entry into database. Due to SOL005, it does not follow normal procedure.
197 Creating a VNFD or NSD is done in two steps: 1. Creates an empty descriptor (this step) and 2) upload content
198 (self.upload_content)
199 :param rollback: list to append created items at database in case a rollback may to be done
200 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
201 :param indata: data to be inserted
202 :param kwargs: used to override the indata descriptor
203 :param headers: http request headers
204 :return: _id, None: identity of the inserted data; and None as there is not any operation
205 """
206
207 # No needed to capture exceptions
208 # Check Quota
209 self.check_quota(session)
210
211 # _remove_envelop
212 if indata:
213 if "userDefinedData" in indata:
214 indata = indata["userDefinedData"]
215
216 # Override descriptor with query string kwargs
217 self._update_input_with_kwargs(indata, kwargs)
218 # uncomment when this method is implemented.
219 # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors
220 # indata = DescriptorTopic._validate_input_new(self, indata, project_id=session["force"])
221
222 content = {"_admin": {"userDefinedData": indata, "revision": 0}}
223
224 self.format_on_new(
225 content, session["project_id"], make_public=session["public"]
226 )
227 _id = self.db.create(self.topic, content)
228 rollback.append({"topic": self.topic, "_id": _id})
229 self._send_msg("created", {"_id": _id})
230 return _id, None
231
232 def upload_content(self, session, _id, indata, kwargs, headers):
233 """
234 Used for receiving content by chunks (with a transaction_id header and/or gzip file. It will store and extract)
235 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
236 :param _id : the nsd,vnfd is already created, this is the id
237 :param indata: http body request
238 :param kwargs: user query string to override parameters. NOT USED
239 :param headers: http request headers
240 :return: True if package is completely uploaded or False if partial content has been uploded
241 Raise exception on error
242 """
243 # Check that _id exists and it is valid
244 current_desc = self.show(session, _id)
245
246 content_range_text = headers.get("Content-Range")
247 expected_md5 = headers.get("Content-File-MD5")
248 compressed = None
249 content_type = headers.get("Content-Type")
250 if (
251 content_type
252 and "application/gzip" in content_type
253 or "application/x-gzip" in content_type
254 ):
255 compressed = "gzip"
256 if content_type and "application/zip" in content_type:
257 compressed = "zip"
258 filename = headers.get("Content-Filename")
259 if not filename and compressed:
260 filename = "package.tar.gz" if compressed == "gzip" else "package.zip"
261 elif not filename:
262 filename = "package"
263
264 revision = 1
265 if "revision" in current_desc["_admin"]:
266 revision = current_desc["_admin"]["revision"] + 1
267
268 # TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266
269 file_pkg = None
270 error_text = ""
271 fs_rollback = []
272
273 try:
274 if content_range_text:
275 content_range = (
276 content_range_text.replace("-", " ").replace("/", " ").split()
277 )
278 if (
279 content_range[0] != "bytes"
280 ): # TODO check x<y not negative < total....
281 raise IndexError()
282 start = int(content_range[1])
283 end = int(content_range[2]) + 1
284 total = int(content_range[3])
285 else:
286 start = 0
287 # Rather than using a temp folder, we will store the package in a folder based on
288 # the current revision.
289 proposed_revision_path = (
290 _id + ":" + str(revision)
291 ) # all the content is upload here and if ok, it is rename from id_ to is folder
292
293 if start:
294 if not self.fs.file_exists(proposed_revision_path, "dir"):
295 raise EngineException(
296 "invalid Transaction-Id header", HTTPStatus.NOT_FOUND
297 )
298 else:
299 self.fs.file_delete(proposed_revision_path, ignore_non_exist=True)
300 self.fs.mkdir(proposed_revision_path)
301 fs_rollback.append(proposed_revision_path)
302
303 storage = self.fs.get_params()
304 storage["folder"] = proposed_revision_path
305
306 file_path = (proposed_revision_path, filename)
307 if self.fs.file_exists(file_path, "file"):
308 file_size = self.fs.file_size(file_path)
309 else:
310 file_size = 0
311 if file_size != start:
312 raise EngineException(
313 "invalid Content-Range start sequence, expected '{}' but received '{}'".format(
314 file_size, start
315 ),
316 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
317 )
318 file_pkg = self.fs.file_open(file_path, "a+b")
319 if isinstance(indata, dict):
320 indata_text = yaml.safe_dump(indata, indent=4, default_flow_style=False)
321 file_pkg.write(indata_text.encode(encoding="utf-8"))
322 else:
323 indata_len = 0
324 while True:
325 indata_text = indata.read(4096)
326 indata_len += len(indata_text)
327 if not indata_text:
328 break
329 file_pkg.write(indata_text)
330 if content_range_text:
331 if indata_len != end - start:
332 raise EngineException(
333 "Mismatch between Content-Range header {}-{} and body length of {}".format(
334 start, end - 1, indata_len
335 ),
336 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
337 )
338 if end != total:
339 # TODO update to UPLOADING
340 return False
341
342 # PACKAGE UPLOADED
343 if expected_md5:
344 file_pkg.seek(0, 0)
345 file_md5 = md5()
346 chunk_data = file_pkg.read(1024)
347 while chunk_data:
348 file_md5.update(chunk_data)
349 chunk_data = file_pkg.read(1024)
350 if expected_md5 != file_md5.hexdigest():
351 raise EngineException("Error, MD5 mismatch", HTTPStatus.CONFLICT)
352 file_pkg.seek(0, 0)
353 if compressed == "gzip":
354 tar = tarfile.open(mode="r", fileobj=file_pkg)
355 descriptor_file_name = None
356 for tarinfo in tar:
357 tarname = tarinfo.name
358 tarname_path = tarname.split("/")
359 if (
360 not tarname_path[0] or ".." in tarname_path
361 ): # if start with "/" means absolute path
362 raise EngineException(
363 "Absolute path or '..' are not allowed for package descriptor tar.gz"
364 )
365 if len(tarname_path) == 1 and not tarinfo.isdir():
366 raise EngineException(
367 "All files must be inside a dir for package descriptor tar.gz"
368 )
369 if (
370 tarname.endswith(".yaml")
371 or tarname.endswith(".json")
372 or tarname.endswith(".yml")
373 ):
374 storage["pkg-dir"] = tarname_path[0]
375 if len(tarname_path) == 2:
376 if descriptor_file_name:
377 raise EngineException(
378 "Found more than one descriptor file at package descriptor tar.gz"
379 )
380 descriptor_file_name = tarname
381 if not descriptor_file_name:
382 raise EngineException(
383 "Not found any descriptor file at package descriptor tar.gz"
384 )
385 storage["descriptor"] = descriptor_file_name
386 storage["zipfile"] = filename
387 self.fs.file_extract(tar, proposed_revision_path)
388 with self.fs.file_open(
389 (proposed_revision_path, descriptor_file_name), "r"
390 ) as descriptor_file:
391 content = descriptor_file.read()
392 elif compressed == "zip":
393 zipfile = ZipFile(file_pkg)
394 descriptor_file_name = None
395 for package_file in zipfile.infolist():
396 zipfilename = package_file.filename
397 file_path = zipfilename.split("/")
398 if (
399 not file_path[0] or ".." in zipfilename
400 ): # if start with "/" means absolute path
401 raise EngineException(
402 "Absolute path or '..' are not allowed for package descriptor zip"
403 )
404
405 if (
406 zipfilename.endswith(".yaml")
407 or zipfilename.endswith(".json")
408 or zipfilename.endswith(".yml")
409 ) and (
410 zipfilename.find("/") < 0
411 or zipfilename.find("Definitions") >= 0
412 ):
413 storage["pkg-dir"] = ""
414 if descriptor_file_name:
415 raise EngineException(
416 "Found more than one descriptor file at package descriptor zip"
417 )
418 descriptor_file_name = zipfilename
419 if not descriptor_file_name:
420 raise EngineException(
421 "Not found any descriptor file at package descriptor zip"
422 )
423 storage["descriptor"] = descriptor_file_name
424 storage["zipfile"] = filename
425 self.fs.file_extract(zipfile, proposed_revision_path)
426
427 with self.fs.file_open(
428 (proposed_revision_path, descriptor_file_name), "r"
429 ) as descriptor_file:
430 content = descriptor_file.read()
431 else:
432 content = file_pkg.read()
433 storage["descriptor"] = descriptor_file_name = filename
434
435 if descriptor_file_name.endswith(".json"):
436 error_text = "Invalid json format "
437 indata = json.load(content)
438 else:
439 error_text = "Invalid yaml format "
440 indata = yaml.load(content, Loader=yaml.SafeLoader)
441
442 # Need to close the file package here so it can be copied from the
443 # revision to the current, unrevisioned record
444 if file_pkg:
445 file_pkg.close()
446 file_pkg = None
447
448 # Fetch both the incoming, proposed revision and the original revision so we
449 # can call a validate method to compare them
450 current_revision_path = _id + "/"
451 self.fs.sync(from_path=current_revision_path)
452 self.fs.sync(from_path=proposed_revision_path)
453
454 if revision > 1:
455 try:
456 self._validate_descriptor_changes(
457 _id,
458 descriptor_file_name,
459 current_revision_path,
460 proposed_revision_path,
461 )
462 except Exception as e:
463 shutil.rmtree(
464 self.fs.path + current_revision_path, ignore_errors=True
465 )
466 shutil.rmtree(
467 self.fs.path + proposed_revision_path, ignore_errors=True
468 )
469 # Only delete the new revision. We need to keep the original version in place
470 # as it has not been changed.
471 self.fs.file_delete(proposed_revision_path, ignore_non_exist=True)
472 raise e
473
474 indata = self._remove_envelop(indata)
475
476 # Override descriptor with query string kwargs
477 if kwargs:
478 self._update_input_with_kwargs(indata, kwargs)
479
480 current_desc["_admin"]["storage"] = storage
481 current_desc["_admin"]["onboardingState"] = "ONBOARDED"
482 current_desc["_admin"]["operationalState"] = "ENABLED"
483 current_desc["_admin"]["modified"] = time()
484 current_desc["_admin"]["revision"] = revision
485
486 deep_update_rfc7396(current_desc, indata)
487 current_desc = self.check_conflict_on_edit(
488 session, current_desc, indata, _id=_id
489 )
490
491 # Copy the revision to the active package name by its original id
492 shutil.rmtree(self.fs.path + current_revision_path, ignore_errors=True)
493 os.rename(
494 self.fs.path + proposed_revision_path,
495 self.fs.path + current_revision_path,
496 )
497 self.fs.file_delete(current_revision_path, ignore_non_exist=True)
498 self.fs.mkdir(current_revision_path)
499 self.fs.reverse_sync(from_path=current_revision_path)
500
501 shutil.rmtree(self.fs.path + _id)
502
503 self.db.replace(self.topic, _id, current_desc)
504
505 # Store a copy of the package as a point in time revision
506 revision_desc = dict(current_desc)
507 revision_desc["_id"] = _id + ":" + str(revision_desc["_admin"]["revision"])
508 self.db.create(self.topic + "_revisions", revision_desc)
509 fs_rollback = []
510
511 indata["_id"] = _id
512 self._send_msg("edited", indata)
513
514 # TODO if descriptor has changed because kwargs update content and remove cached zip
515 # TODO if zip is not present creates one
516 return True
517
518 except EngineException:
519 raise
520 except IndexError:
521 raise EngineException(
522 "invalid Content-Range header format. Expected 'bytes start-end/total'",
523 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
524 )
525 except IOError as e:
526 raise EngineException(
527 "invalid upload transaction sequence: '{}'".format(e),
528 HTTPStatus.BAD_REQUEST,
529 )
530 except tarfile.ReadError as e:
531 raise EngineException(
532 "invalid file content {}".format(e), HTTPStatus.BAD_REQUEST
533 )
534 except (ValueError, yaml.YAMLError) as e:
535 raise EngineException(error_text + str(e))
536 except ValidationError as e:
537 raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
538 finally:
539 if file_pkg:
540 file_pkg.close()
541 for file in fs_rollback:
542 self.fs.file_delete(file, ignore_non_exist=True)
543
544 def get_file(self, session, _id, path=None, accept_header=None):
545 """
546 Return the file content of a vnfd or nsd
547 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
548 :param _id: Identity of the vnfd, nsd
549 :param path: artifact path or "$DESCRIPTOR" or None
550 :param accept_header: Content of Accept header. Must contain applition/zip or/and text/plain
551 :return: opened file plus Accept format or raises an exception
552 """
553 accept_text = accept_zip = False
554 if accept_header:
555 if "text/plain" in accept_header or "*/*" in accept_header:
556 accept_text = True
557 if "application/zip" in accept_header or "*/*" in accept_header:
558 accept_zip = "application/zip"
559 elif "application/gzip" in accept_header:
560 accept_zip = "application/gzip"
561
562 if not accept_text and not accept_zip:
563 raise EngineException(
564 "provide request header 'Accept' with 'application/zip' or 'text/plain'",
565 http_code=HTTPStatus.NOT_ACCEPTABLE,
566 )
567
568 content = self.show(session, _id)
569 if content["_admin"]["onboardingState"] != "ONBOARDED":
570 raise EngineException(
571 "Cannot get content because this resource is not at 'ONBOARDED' state. "
572 "onboardingState is {}".format(content["_admin"]["onboardingState"]),
573 http_code=HTTPStatus.CONFLICT,
574 )
575 storage = content["_admin"]["storage"]
576 if path is not None and path != "$DESCRIPTOR": # artifacts
577 if not storage.get("pkg-dir") and not storage.get("folder"):
578 raise EngineException(
579 "Packages does not contains artifacts",
580 http_code=HTTPStatus.BAD_REQUEST,
581 )
582 if self.fs.file_exists(
583 (storage["folder"], storage["pkg-dir"], *path), "dir"
584 ):
585 folder_content = self.fs.dir_ls(
586 (storage["folder"], storage["pkg-dir"], *path)
587 )
588 return folder_content, "text/plain"
589 # TODO manage folders in http
590 else:
591 return (
592 self.fs.file_open(
593 (storage["folder"], storage["pkg-dir"], *path), "rb"
594 ),
595 "application/octet-stream",
596 )
597
598 # pkgtype accept ZIP TEXT -> result
599 # manyfiles yes X -> zip
600 # no yes -> error
601 # onefile yes no -> zip
602 # X yes -> text
603 contain_many_files = False
604 if storage.get("pkg-dir"):
605 # check if there are more than one file in the package, ignoring checksums.txt.
606 pkg_files = self.fs.dir_ls((storage["folder"], storage["pkg-dir"]))
607 if len(pkg_files) >= 3 or (
608 len(pkg_files) == 2 and "checksums.txt" not in pkg_files
609 ):
610 contain_many_files = True
611 if accept_text and (not contain_many_files or path == "$DESCRIPTOR"):
612 return (
613 self.fs.file_open((storage["folder"], storage["descriptor"]), "r"),
614 "text/plain",
615 )
616 elif contain_many_files and not accept_zip:
617 raise EngineException(
618 "Packages that contains several files need to be retrieved with 'application/zip'"
619 "Accept header",
620 http_code=HTTPStatus.NOT_ACCEPTABLE,
621 )
622 else:
623 if not storage.get("zipfile"):
624 # TODO generate zipfile if not present
625 raise EngineException(
626 "Only allowed 'text/plain' Accept header for this descriptor. To be solved in "
627 "future versions",
628 http_code=HTTPStatus.NOT_ACCEPTABLE,
629 )
630 return (
631 self.fs.file_open((storage["folder"], storage["zipfile"]), "rb"),
632 accept_zip,
633 )
634
635 def _remove_yang_prefixes_from_descriptor(self, descriptor):
636 new_descriptor = {}
637 for k, v in descriptor.items():
638 new_v = v
639 if isinstance(v, dict):
640 new_v = self._remove_yang_prefixes_from_descriptor(v)
641 elif isinstance(v, list):
642 new_v = list()
643 for x in v:
644 if isinstance(x, dict):
645 new_v.append(self._remove_yang_prefixes_from_descriptor(x))
646 else:
647 new_v.append(x)
648 new_descriptor[k.split(":")[-1]] = new_v
649 return new_descriptor
650
651 def pyangbind_validation(self, item, data, force=False):
652 raise EngineException(
653 "Not possible to validate '{}' item".format(item),
654 http_code=HTTPStatus.INTERNAL_SERVER_ERROR,
655 )
656
657 def _validate_input_edit(self, indata, content, force=False):
658 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
659 if "_id" in indata:
660 indata.pop("_id")
661 if "_admin" not in indata:
662 indata["_admin"] = {}
663
664 if "operationalState" in indata:
665 if indata["operationalState"] in ("ENABLED", "DISABLED"):
666 indata["_admin"]["operationalState"] = indata.pop("operationalState")
667 else:
668 raise EngineException(
669 "State '{}' is not a valid operational state".format(
670 indata["operationalState"]
671 ),
672 http_code=HTTPStatus.BAD_REQUEST,
673 )
674
675 # In the case of user defined data, we need to put the data in the root of the object
676 # to preserve current expected behaviour
677 if "userDefinedData" in indata:
678 data = indata.pop("userDefinedData")
679 if type(data) == dict:
680 indata["_admin"]["userDefinedData"] = data
681 else:
682 raise EngineException(
683 "userDefinedData should be an object, but is '{}' instead".format(
684 type(data)
685 ),
686 http_code=HTTPStatus.BAD_REQUEST,
687 )
688
689 if (
690 "operationalState" in indata["_admin"]
691 and content["_admin"]["operationalState"]
692 == indata["_admin"]["operationalState"]
693 ):
694 raise EngineException(
695 "operationalState already {}".format(
696 content["_admin"]["operationalState"]
697 ),
698 http_code=HTTPStatus.CONFLICT,
699 )
700
701 return indata
702
703 def _validate_descriptor_changes(
704 self,
705 descriptor_id,
706 descriptor_file_name,
707 old_descriptor_directory,
708 new_descriptor_directory,
709 ):
710 # Example:
711 # raise EngineException(
712 # "Error in validating new descriptor: <NODE> cannot be modified",
713 # http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
714 # )
715 pass
716
717
718 class VnfdTopic(DescriptorTopic):
719 topic = "vnfds"
720 topic_msg = "vnfd"
721
722 def __init__(self, db, fs, msg, auth):
723 DescriptorTopic.__init__(self, db, fs, msg, auth)
724
725 def pyangbind_validation(self, item, data, force=False):
726 if self._descriptor_data_is_in_old_format(data):
727 raise EngineException(
728 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
729 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
730 )
731 try:
732 myvnfd = etsi_nfv_vnfd.etsi_nfv_vnfd()
733 pybindJSONDecoder.load_ietf_json(
734 {"etsi-nfv-vnfd:vnfd": data},
735 None,
736 None,
737 obj=myvnfd,
738 path_helper=True,
739 skip_unknown=force,
740 )
741 out = pybindJSON.dumps(myvnfd, mode="ietf")
742 desc_out = self._remove_envelop(yaml.safe_load(out))
743 desc_out = self._remove_yang_prefixes_from_descriptor(desc_out)
744 return utils.deep_update_dict(data, desc_out)
745 except Exception as e:
746 raise EngineException(
747 "Error in pyangbind validation: {}".format(str(e)),
748 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
749 )
750
751 @staticmethod
752 def _descriptor_data_is_in_old_format(data):
753 return ("vnfd-catalog" in data) or ("vnfd:vnfd-catalog" in data)
754
755 @staticmethod
756 def _remove_envelop(indata=None):
757 if not indata:
758 return {}
759 clean_indata = indata
760
761 if clean_indata.get("etsi-nfv-vnfd:vnfd"):
762 if not isinstance(clean_indata["etsi-nfv-vnfd:vnfd"], dict):
763 raise EngineException("'etsi-nfv-vnfd:vnfd' must be a dict")
764 clean_indata = clean_indata["etsi-nfv-vnfd:vnfd"]
765 elif clean_indata.get("vnfd"):
766 if not isinstance(clean_indata["vnfd"], dict):
767 raise EngineException("'vnfd' must be dict")
768 clean_indata = clean_indata["vnfd"]
769
770 return clean_indata
771
772 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
773 final_content = super().check_conflict_on_edit(
774 session, final_content, edit_content, _id
775 )
776
777 # set type of vnfd
778 contains_pdu = False
779 contains_vdu = False
780 for vdu in get_iterable(final_content.get("vdu")):
781 if vdu.get("pdu-type"):
782 contains_pdu = True
783 else:
784 contains_vdu = True
785 if contains_pdu:
786 final_content["_admin"]["type"] = "hnfd" if contains_vdu else "pnfd"
787 elif contains_vdu:
788 final_content["_admin"]["type"] = "vnfd"
789 # if neither vud nor pdu do not fill type
790 return final_content
791
792 def check_conflict_on_del(self, session, _id, db_content):
793 """
794 Check that there is not any NSD that uses this VNFD. Only NSDs belonging to this project are considered. Note
795 that VNFD can be public and be used by NSD of other projects. Also check there are not deployments, or vnfr
796 that uses this vnfd
797 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
798 :param _id: vnfd internal id
799 :param db_content: The database content of the _id.
800 :return: None or raises EngineException with the conflict
801 """
802 if session["force"]:
803 return
804 descriptor = db_content
805 descriptor_id = descriptor.get("id")
806 if not descriptor_id: # empty vnfd not uploaded
807 return
808
809 _filter = self._get_project_filter(session)
810
811 # check vnfrs using this vnfd
812 _filter["vnfd-id"] = _id
813 if self.db.get_list("vnfrs", _filter):
814 raise EngineException(
815 "There is at least one VNF instance using this descriptor",
816 http_code=HTTPStatus.CONFLICT,
817 )
818
819 # check NSD referencing this VNFD
820 del _filter["vnfd-id"]
821 _filter["vnfd-id"] = descriptor_id
822 if self.db.get_list("nsds", _filter):
823 raise EngineException(
824 "There is at least one NS package referencing this descriptor",
825 http_code=HTTPStatus.CONFLICT,
826 )
827
828 def _validate_input_new(self, indata, storage_params, force=False):
829 indata.pop("onboardingState", None)
830 indata.pop("operationalState", None)
831 indata.pop("usageState", None)
832 indata.pop("links", None)
833
834 indata = self.pyangbind_validation("vnfds", indata, force)
835 # Cross references validation in the descriptor
836
837 self.validate_mgmt_interface_connection_point(indata)
838
839 for vdu in get_iterable(indata.get("vdu")):
840 self.validate_vdu_internal_connection_points(vdu)
841 self._validate_vdu_cloud_init_in_package(storage_params, vdu, indata)
842 self._validate_vdu_charms_in_package(storage_params, indata)
843
844 self._validate_vnf_charms_in_package(storage_params, indata)
845
846 self.validate_external_connection_points(indata)
847 self.validate_internal_virtual_links(indata)
848 self.validate_monitoring_params(indata)
849 self.validate_scaling_group_descriptor(indata)
850
851 return indata
852
853 @staticmethod
854 def validate_mgmt_interface_connection_point(indata):
855 if not indata.get("vdu"):
856 return
857 if not indata.get("mgmt-cp"):
858 raise EngineException(
859 "'mgmt-cp' is a mandatory field and it is not defined",
860 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
861 )
862
863 for cp in get_iterable(indata.get("ext-cpd")):
864 if cp["id"] == indata["mgmt-cp"]:
865 break
866 else:
867 raise EngineException(
868 "mgmt-cp='{}' must match an existing ext-cpd".format(indata["mgmt-cp"]),
869 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
870 )
871
872 @staticmethod
873 def validate_vdu_internal_connection_points(vdu):
874 int_cpds = set()
875 for cpd in get_iterable(vdu.get("int-cpd")):
876 cpd_id = cpd.get("id")
877 if cpd_id and cpd_id in int_cpds:
878 raise EngineException(
879 "vdu[id='{}']:int-cpd[id='{}'] is already used by other int-cpd".format(
880 vdu["id"], cpd_id
881 ),
882 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
883 )
884 int_cpds.add(cpd_id)
885
886 @staticmethod
887 def validate_external_connection_points(indata):
888 all_vdus_int_cpds = set()
889 for vdu in get_iterable(indata.get("vdu")):
890 for int_cpd in get_iterable(vdu.get("int-cpd")):
891 all_vdus_int_cpds.add((vdu.get("id"), int_cpd.get("id")))
892
893 ext_cpds = set()
894 for cpd in get_iterable(indata.get("ext-cpd")):
895 cpd_id = cpd.get("id")
896 if cpd_id and cpd_id in ext_cpds:
897 raise EngineException(
898 "ext-cpd[id='{}'] is already used by other ext-cpd".format(cpd_id),
899 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
900 )
901 ext_cpds.add(cpd_id)
902
903 int_cpd = cpd.get("int-cpd")
904 if int_cpd:
905 if (int_cpd.get("vdu-id"), int_cpd.get("cpd")) not in all_vdus_int_cpds:
906 raise EngineException(
907 "ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd".format(
908 cpd_id
909 ),
910 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
911 )
912 # TODO: Validate k8s-cluster-net points to a valid k8s-cluster:nets ?
913
914 def _validate_vdu_charms_in_package(self, storage_params, indata):
915 for df in indata["df"]:
916 if (
917 "lcm-operations-configuration" in df
918 and "operate-vnf-op-config" in df["lcm-operations-configuration"]
919 ):
920 configs = df["lcm-operations-configuration"][
921 "operate-vnf-op-config"
922 ].get("day1-2", [])
923 vdus = df.get("vdu-profile", [])
924 for vdu in vdus:
925 for config in configs:
926 if config["id"] == vdu["id"] and utils.find_in_list(
927 config.get("execution-environment-list", []),
928 lambda ee: "juju" in ee,
929 ):
930 if not self._validate_package_folders(
931 storage_params, "charms"
932 ) and not self._validate_package_folders(
933 storage_params, "Scripts/charms"
934 ):
935 raise EngineException(
936 "Charm defined in vnf[id={}] but not present in "
937 "package".format(indata["id"])
938 )
939
940 def _validate_vdu_cloud_init_in_package(self, storage_params, vdu, indata):
941 if not vdu.get("cloud-init-file"):
942 return
943 if not self._validate_package_folders(
944 storage_params, "cloud_init", vdu["cloud-init-file"]
945 ) and not self._validate_package_folders(
946 storage_params, "Scripts/cloud_init", vdu["cloud-init-file"]
947 ):
948 raise EngineException(
949 "Cloud-init defined in vnf[id={}]:vdu[id={}] but not present in "
950 "package".format(indata["id"], vdu["id"])
951 )
952
953 def _validate_vnf_charms_in_package(self, storage_params, indata):
954 # Get VNF configuration through new container
955 for deployment_flavor in indata.get("df", []):
956 if "lcm-operations-configuration" not in deployment_flavor:
957 return
958 if (
959 "operate-vnf-op-config"
960 not in deployment_flavor["lcm-operations-configuration"]
961 ):
962 return
963 for day_1_2_config in deployment_flavor["lcm-operations-configuration"][
964 "operate-vnf-op-config"
965 ]["day1-2"]:
966 if day_1_2_config["id"] == indata["id"]:
967 if utils.find_in_list(
968 day_1_2_config.get("execution-environment-list", []),
969 lambda ee: "juju" in ee,
970 ):
971 if not self._validate_package_folders(
972 storage_params, "charms"
973 ) and not self._validate_package_folders(
974 storage_params, "Scripts/charms"
975 ):
976 raise EngineException(
977 "Charm defined in vnf[id={}] but not present in "
978 "package".format(indata["id"])
979 )
980
981 def _validate_package_folders(self, storage_params, folder, file=None):
982 if not storage_params:
983 return False
984 elif not storage_params.get("pkg-dir"):
985 if self.fs.file_exists("{}_".format(storage_params["folder"]), "dir"):
986 f = "{}_/{}".format(storage_params["folder"], folder)
987 else:
988 f = "{}/{}".format(storage_params["folder"], folder)
989 if file:
990 return self.fs.file_exists("{}/{}".format(f, file), "file")
991 else:
992 if self.fs.file_exists(f, "dir"):
993 if self.fs.dir_ls(f):
994 return True
995 return False
996 else:
997 if self.fs.file_exists("{}_".format(storage_params["folder"]), "dir"):
998 f = "{}_/{}/{}".format(
999 storage_params["folder"], storage_params["pkg-dir"], folder
1000 )
1001 else:
1002 f = "{}/{}/{}".format(
1003 storage_params["folder"], storage_params["pkg-dir"], folder
1004 )
1005 if file:
1006 return self.fs.file_exists("{}/{}".format(f, file), "file")
1007 else:
1008 if self.fs.file_exists(f, "dir"):
1009 if self.fs.dir_ls(f):
1010 return True
1011 return False
1012
1013 @staticmethod
1014 def validate_internal_virtual_links(indata):
1015 all_ivld_ids = set()
1016 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
1017 ivld_id = ivld.get("id")
1018 if ivld_id and ivld_id in all_ivld_ids:
1019 raise EngineException(
1020 "Duplicated VLD id in int-virtual-link-desc[id={}]".format(ivld_id),
1021 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1022 )
1023 else:
1024 all_ivld_ids.add(ivld_id)
1025
1026 for vdu in get_iterable(indata.get("vdu")):
1027 for int_cpd in get_iterable(vdu.get("int-cpd")):
1028 int_cpd_ivld_id = int_cpd.get("int-virtual-link-desc")
1029 if int_cpd_ivld_id and int_cpd_ivld_id not in all_ivld_ids:
1030 raise EngineException(
1031 "vdu[id='{}']:int-cpd[id='{}']:int-virtual-link-desc='{}' must match an existing "
1032 "int-virtual-link-desc".format(
1033 vdu["id"], int_cpd["id"], int_cpd_ivld_id
1034 ),
1035 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1036 )
1037
1038 for df in get_iterable(indata.get("df")):
1039 for vlp in get_iterable(df.get("virtual-link-profile")):
1040 vlp_ivld_id = vlp.get("id")
1041 if vlp_ivld_id and vlp_ivld_id not in all_ivld_ids:
1042 raise EngineException(
1043 "df[id='{}']:virtual-link-profile='{}' must match an existing "
1044 "int-virtual-link-desc".format(df["id"], vlp_ivld_id),
1045 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1046 )
1047
1048 @staticmethod
1049 def validate_monitoring_params(indata):
1050 all_monitoring_params = set()
1051 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
1052 for mp in get_iterable(ivld.get("monitoring-parameters")):
1053 mp_id = mp.get("id")
1054 if mp_id and mp_id in all_monitoring_params:
1055 raise EngineException(
1056 "Duplicated monitoring-parameter id in "
1057 "int-virtual-link-desc[id='{}']:monitoring-parameters[id='{}']".format(
1058 ivld["id"], mp_id
1059 ),
1060 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1061 )
1062 else:
1063 all_monitoring_params.add(mp_id)
1064
1065 for vdu in get_iterable(indata.get("vdu")):
1066 for mp in get_iterable(vdu.get("monitoring-parameter")):
1067 mp_id = mp.get("id")
1068 if mp_id and mp_id in all_monitoring_params:
1069 raise EngineException(
1070 "Duplicated monitoring-parameter id in "
1071 "vdu[id='{}']:monitoring-parameter[id='{}']".format(
1072 vdu["id"], mp_id
1073 ),
1074 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1075 )
1076 else:
1077 all_monitoring_params.add(mp_id)
1078
1079 for df in get_iterable(indata.get("df")):
1080 for mp in get_iterable(df.get("monitoring-parameter")):
1081 mp_id = mp.get("id")
1082 if mp_id and mp_id in all_monitoring_params:
1083 raise EngineException(
1084 "Duplicated monitoring-parameter id in "
1085 "df[id='{}']:monitoring-parameter[id='{}']".format(
1086 df["id"], mp_id
1087 ),
1088 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1089 )
1090 else:
1091 all_monitoring_params.add(mp_id)
1092
1093 @staticmethod
1094 def validate_scaling_group_descriptor(indata):
1095 all_monitoring_params = set()
1096 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
1097 for mp in get_iterable(ivld.get("monitoring-parameters")):
1098 all_monitoring_params.add(mp.get("id"))
1099
1100 for vdu in get_iterable(indata.get("vdu")):
1101 for mp in get_iterable(vdu.get("monitoring-parameter")):
1102 all_monitoring_params.add(mp.get("id"))
1103
1104 for df in get_iterable(indata.get("df")):
1105 for mp in get_iterable(df.get("monitoring-parameter")):
1106 all_monitoring_params.add(mp.get("id"))
1107
1108 for df in get_iterable(indata.get("df")):
1109 for sa in get_iterable(df.get("scaling-aspect")):
1110 for sp in get_iterable(sa.get("scaling-policy")):
1111 for sc in get_iterable(sp.get("scaling-criteria")):
1112 sc_monitoring_param = sc.get("vnf-monitoring-param-ref")
1113 if (
1114 sc_monitoring_param
1115 and sc_monitoring_param not in all_monitoring_params
1116 ):
1117 raise EngineException(
1118 "df[id='{}']:scaling-aspect[id='{}']:scaling-policy"
1119 "[name='{}']:scaling-criteria[name='{}']: "
1120 "vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format(
1121 df["id"],
1122 sa["id"],
1123 sp["name"],
1124 sc["name"],
1125 sc_monitoring_param,
1126 ),
1127 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1128 )
1129
1130 for sca in get_iterable(sa.get("scaling-config-action")):
1131 if (
1132 "lcm-operations-configuration" not in df
1133 or "operate-vnf-op-config"
1134 not in df["lcm-operations-configuration"]
1135 or not utils.find_in_list(
1136 df["lcm-operations-configuration"][
1137 "operate-vnf-op-config"
1138 ].get("day1-2", []),
1139 lambda config: config["id"] == indata["id"],
1140 )
1141 ):
1142 raise EngineException(
1143 "'day1-2 configuration' not defined in the descriptor but it is "
1144 "referenced by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action".format(
1145 df["id"], sa["id"]
1146 ),
1147 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1148 )
1149 for configuration in get_iterable(
1150 df["lcm-operations-configuration"]["operate-vnf-op-config"].get(
1151 "day1-2", []
1152 )
1153 ):
1154 for primitive in get_iterable(
1155 configuration.get("config-primitive")
1156 ):
1157 if (
1158 primitive["name"]
1159 == sca["vnf-config-primitive-name-ref"]
1160 ):
1161 break
1162 else:
1163 raise EngineException(
1164 "df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-"
1165 "config-primitive-name-ref='{}' does not match any "
1166 "day1-2 configuration:config-primitive:name".format(
1167 df["id"],
1168 sa["id"],
1169 sca["vnf-config-primitive-name-ref"],
1170 ),
1171 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1172 )
1173
1174 def delete_extra(self, session, _id, db_content, not_send_msg=None):
1175 """
1176 Deletes associate file system storage (via super)
1177 Deletes associated vnfpkgops from database.
1178 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1179 :param _id: server internal id
1180 :param db_content: The database content of the descriptor
1181 :return: None
1182 :raises: FsException in case of error while deleting associated storage
1183 """
1184 super().delete_extra(session, _id, db_content, not_send_msg)
1185 self.db.del_list("vnfpkgops", {"vnfPkgId": _id})
1186 self.db.del_list(self.topic + "_revisions", {"_id": {"$regex": _id}})
1187
1188 def sol005_projection(self, data):
1189 data["onboardingState"] = data["_admin"]["onboardingState"]
1190 data["operationalState"] = data["_admin"]["operationalState"]
1191 data["usageState"] = data["_admin"]["usageState"]
1192
1193 links = {}
1194 links["self"] = {"href": "/vnfpkgm/v1/vnf_packages/{}".format(data["_id"])}
1195 links["vnfd"] = {"href": "/vnfpkgm/v1/vnf_packages/{}/vnfd".format(data["_id"])}
1196 links["packageContent"] = {
1197 "href": "/vnfpkgm/v1/vnf_packages/{}/package_content".format(data["_id"])
1198 }
1199 data["_links"] = links
1200
1201 return super().sol005_projection(data)
1202
1203 @staticmethod
1204 def find_software_version(vnfd: dict) -> str:
1205 """Find the sotware version in the VNFD descriptors
1206
1207 Args:
1208 vnfd (dict): Descriptor as a dictionary
1209
1210 Returns:
1211 software-version (str)
1212 """
1213 default_sw_version = "1.0"
1214 if vnfd.get("vnfd"):
1215 vnfd = vnfd["vnfd"]
1216 if vnfd.get("software-version"):
1217 return vnfd["software-version"]
1218 else:
1219 return default_sw_version
1220
1221 @staticmethod
1222 def extract_policies(vnfd: dict) -> dict:
1223 """Removes the policies from the VNFD descriptors
1224
1225 Args:
1226 vnfd (dict): Descriptor as a dictionary
1227
1228 Returns:
1229 vnfd (dict): VNFD which does not include policies
1230 """
1231 for df in vnfd.get("df", {}):
1232 for policy in ["scaling-aspect", "healing-aspect"]:
1233 if df.get(policy, {}):
1234 df.pop(policy)
1235 for vdu in vnfd.get("vdu", {}):
1236 for alarm_policy in ["alarm", "monitoring-parameter"]:
1237 if vdu.get(alarm_policy, {}):
1238 vdu.pop(alarm_policy)
1239 return vnfd
1240
1241 @staticmethod
1242 def extract_day12_primitives(vnfd: dict) -> dict:
1243 """Removes the day12 primitives from the VNFD descriptors
1244
1245 Args:
1246 vnfd (dict): Descriptor as a dictionary
1247
1248 Returns:
1249 vnfd (dict)
1250 """
1251 for df_id, df in enumerate(vnfd.get("df", {})):
1252 if (
1253 df.get("lcm-operations-configuration", {})
1254 .get("operate-vnf-op-config", {})
1255 .get("day1-2")
1256 ):
1257 day12 = df["lcm-operations-configuration"]["operate-vnf-op-config"].get(
1258 "day1-2"
1259 )
1260 for config_id, config in enumerate(day12):
1261 for key in [
1262 "initial-config-primitive",
1263 "config-primitive",
1264 "terminate-config-primitive",
1265 ]:
1266 config.pop(key, None)
1267 day12[config_id] = config
1268 df["lcm-operations-configuration"]["operate-vnf-op-config"][
1269 "day1-2"
1270 ] = day12
1271 vnfd["df"][df_id] = df
1272 return vnfd
1273
1274 def remove_modifiable_items(self, vnfd: dict) -> dict:
1275 """Removes the modifiable parts from the VNFD descriptors
1276
1277 It calls different extract functions according to different update types
1278 to clear all the modifiable items from VNFD
1279
1280 Args:
1281 vnfd (dict): Descriptor as a dictionary
1282
1283 Returns:
1284 vnfd (dict): Descriptor which does not include modifiable contents
1285 """
1286 if vnfd.get("vnfd"):
1287 vnfd = vnfd["vnfd"]
1288 vnfd.pop("_admin", None)
1289 # If the other extractions need to be done from VNFD,
1290 # the new extract methods could be appended to below list.
1291 for extract_function in [self.extract_day12_primitives, self.extract_policies]:
1292 vnfd_temp = extract_function(vnfd)
1293 vnfd = vnfd_temp
1294 return vnfd
1295
1296 def _validate_descriptor_changes(
1297 self,
1298 descriptor_id: str,
1299 descriptor_file_name: str,
1300 old_descriptor_directory: str,
1301 new_descriptor_directory: str,
1302 ):
1303 """Compares the old and new VNFD descriptors and validates the new descriptor.
1304
1305 Args:
1306 old_descriptor_directory (str): Directory of descriptor which is in-use
1307 new_descriptor_directory (str): Directory of descriptor which is proposed to update (new revision)
1308
1309 Returns:
1310 None
1311
1312 Raises:
1313 EngineException: In case of error when there are unallowed changes
1314 """
1315 try:
1316 # If VNFD does not exist in DB or it is not in use by any NS,
1317 # validation is not required.
1318 vnfd = self.db.get_one("vnfds", {"_id": descriptor_id})
1319 if not vnfd or not detect_descriptor_usage(vnfd, "vnfds", self.db):
1320 return
1321
1322 # Get the old and new descriptor contents in order to compare them.
1323 with self.fs.file_open(
1324 (old_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
1325 ) as old_descriptor_file:
1326
1327 with self.fs.file_open(
1328 (new_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
1329 ) as new_descriptor_file:
1330
1331 old_content = yaml.safe_load(old_descriptor_file.read())
1332 new_content = yaml.safe_load(new_descriptor_file.read())
1333
1334 # If software version has changed, we do not need to validate
1335 # the differences anymore.
1336 if old_content and new_content:
1337 if self.find_software_version(
1338 old_content
1339 ) != self.find_software_version(new_content):
1340 return
1341
1342 disallowed_change = DeepDiff(
1343 self.remove_modifiable_items(old_content),
1344 self.remove_modifiable_items(new_content),
1345 )
1346
1347 if disallowed_change:
1348 changed_nodes = functools.reduce(
1349 lambda a, b: a + " , " + b,
1350 [
1351 node.lstrip("root")
1352 for node in disallowed_change.get(
1353 "values_changed"
1354 ).keys()
1355 ],
1356 )
1357
1358 raise EngineException(
1359 f"Error in validating new descriptor: {changed_nodes} cannot be modified, "
1360 "there are disallowed changes in the vnf descriptor.",
1361 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1362 )
1363 except (
1364 DbException,
1365 AttributeError,
1366 IndexError,
1367 KeyError,
1368 ValueError,
1369 ) as e:
1370 raise type(e)(
1371 "VNF Descriptor could not be processed with error: {}.".format(e)
1372 )
1373
1374
1375 class NsdTopic(DescriptorTopic):
1376 topic = "nsds"
1377 topic_msg = "nsd"
1378
1379 def __init__(self, db, fs, msg, auth):
1380 DescriptorTopic.__init__(self, db, fs, msg, auth)
1381
1382 def pyangbind_validation(self, item, data, force=False):
1383 if self._descriptor_data_is_in_old_format(data):
1384 raise EngineException(
1385 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
1386 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1387 )
1388 try:
1389 nsd_vnf_profiles = data.get("df", [{}])[0].get("vnf-profile", [])
1390 mynsd = etsi_nfv_nsd.etsi_nfv_nsd()
1391 pybindJSONDecoder.load_ietf_json(
1392 {"nsd": {"nsd": [data]}},
1393 None,
1394 None,
1395 obj=mynsd,
1396 path_helper=True,
1397 skip_unknown=force,
1398 )
1399 out = pybindJSON.dumps(mynsd, mode="ietf")
1400 desc_out = self._remove_envelop(yaml.safe_load(out))
1401 desc_out = self._remove_yang_prefixes_from_descriptor(desc_out)
1402 if nsd_vnf_profiles:
1403 desc_out["df"][0]["vnf-profile"] = nsd_vnf_profiles
1404 return desc_out
1405 except Exception as e:
1406 raise EngineException(
1407 "Error in pyangbind validation: {}".format(str(e)),
1408 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1409 )
1410
1411 @staticmethod
1412 def _descriptor_data_is_in_old_format(data):
1413 return ("nsd-catalog" in data) or ("nsd:nsd-catalog" in data)
1414
1415 @staticmethod
1416 def _remove_envelop(indata=None):
1417 if not indata:
1418 return {}
1419 clean_indata = indata
1420
1421 if clean_indata.get("nsd"):
1422 clean_indata = clean_indata["nsd"]
1423 elif clean_indata.get("etsi-nfv-nsd:nsd"):
1424 clean_indata = clean_indata["etsi-nfv-nsd:nsd"]
1425 if clean_indata.get("nsd"):
1426 if (
1427 not isinstance(clean_indata["nsd"], list)
1428 or len(clean_indata["nsd"]) != 1
1429 ):
1430 raise EngineException("'nsd' must be a list of only one element")
1431 clean_indata = clean_indata["nsd"][0]
1432 return clean_indata
1433
1434 def _validate_input_new(self, indata, storage_params, force=False):
1435 indata.pop("nsdOnboardingState", None)
1436 indata.pop("nsdOperationalState", None)
1437 indata.pop("nsdUsageState", None)
1438
1439 indata.pop("links", None)
1440
1441 indata = self.pyangbind_validation("nsds", indata, force)
1442 # Cross references validation in the descriptor
1443 # TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none
1444 for vld in get_iterable(indata.get("virtual-link-desc")):
1445 self.validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata)
1446
1447 self.validate_vnf_profiles_vnfd_id(indata)
1448
1449 return indata
1450
1451 @staticmethod
1452 def validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata):
1453 if not vld.get("mgmt-network"):
1454 return
1455 vld_id = vld.get("id")
1456 for df in get_iterable(indata.get("df")):
1457 for vlp in get_iterable(df.get("virtual-link-profile")):
1458 if vld_id and vld_id == vlp.get("virtual-link-desc-id"):
1459 if vlp.get("virtual-link-protocol-data"):
1460 raise EngineException(
1461 "Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-"
1462 "protocol-data You cannot set a virtual-link-protocol-data "
1463 "when mgmt-network is True".format(df["id"], vlp["id"]),
1464 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1465 )
1466
1467 @staticmethod
1468 def validate_vnf_profiles_vnfd_id(indata):
1469 all_vnfd_ids = set(get_iterable(indata.get("vnfd-id")))
1470 for df in get_iterable(indata.get("df")):
1471 for vnf_profile in get_iterable(df.get("vnf-profile")):
1472 vnfd_id = vnf_profile.get("vnfd-id")
1473 if vnfd_id and vnfd_id not in all_vnfd_ids:
1474 raise EngineException(
1475 "Error at df[id='{}']:vnf_profile[id='{}']:vnfd-id='{}' "
1476 "does not match any vnfd-id".format(
1477 df["id"], vnf_profile["id"], vnfd_id
1478 ),
1479 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1480 )
1481
1482 def _validate_input_edit(self, indata, content, force=False):
1483 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
1484 """
1485 indata looks as follows:
1486 - In the new case (conformant)
1487 {'nsdOperationalState': 'DISABLED', 'userDefinedData': {'id': 'string23',
1488 '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}}
1489 - In the old case (backwards-compatible)
1490 {'id': 'string23', '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}
1491 """
1492 if "_admin" not in indata:
1493 indata["_admin"] = {}
1494
1495 if "nsdOperationalState" in indata:
1496 if indata["nsdOperationalState"] in ("ENABLED", "DISABLED"):
1497 indata["_admin"]["operationalState"] = indata.pop("nsdOperationalState")
1498 else:
1499 raise EngineException(
1500 "State '{}' is not a valid operational state".format(
1501 indata["nsdOperationalState"]
1502 ),
1503 http_code=HTTPStatus.BAD_REQUEST,
1504 )
1505
1506 # In the case of user defined data, we need to put the data in the root of the object
1507 # to preserve current expected behaviour
1508 if "userDefinedData" in indata:
1509 data = indata.pop("userDefinedData")
1510 if type(data) == dict:
1511 indata["_admin"]["userDefinedData"] = data
1512 else:
1513 raise EngineException(
1514 "userDefinedData should be an object, but is '{}' instead".format(
1515 type(data)
1516 ),
1517 http_code=HTTPStatus.BAD_REQUEST,
1518 )
1519 if (
1520 "operationalState" in indata["_admin"]
1521 and content["_admin"]["operationalState"]
1522 == indata["_admin"]["operationalState"]
1523 ):
1524 raise EngineException(
1525 "nsdOperationalState already {}".format(
1526 content["_admin"]["operationalState"]
1527 ),
1528 http_code=HTTPStatus.CONFLICT,
1529 )
1530 return indata
1531
1532 def _check_descriptor_dependencies(self, session, descriptor):
1533 """
1534 Check that the dependent descriptors exist on a new descriptor or edition. Also checks references to vnfd
1535 connection points are ok
1536 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1537 :param descriptor: descriptor to be inserted or edit
1538 :return: None or raises exception
1539 """
1540 if session["force"]:
1541 return
1542 vnfds_index = self._get_descriptor_constituent_vnfds_index(session, descriptor)
1543
1544 # Cross references validation in the descriptor and vnfd connection point validation
1545 for df in get_iterable(descriptor.get("df")):
1546 self.validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index)
1547
1548 def _get_descriptor_constituent_vnfds_index(self, session, descriptor):
1549 vnfds_index = {}
1550 if descriptor.get("vnfd-id") and not session["force"]:
1551 for vnfd_id in get_iterable(descriptor.get("vnfd-id")):
1552 query_filter = self._get_project_filter(session)
1553 query_filter["id"] = vnfd_id
1554 vnf_list = self.db.get_list("vnfds", query_filter)
1555 if not vnf_list:
1556 raise EngineException(
1557 "Descriptor error at 'vnfd-id'='{}' references a non "
1558 "existing vnfd".format(vnfd_id),
1559 http_code=HTTPStatus.CONFLICT,
1560 )
1561 vnfds_index[vnfd_id] = vnf_list[0]
1562 return vnfds_index
1563
1564 @staticmethod
1565 def validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index):
1566 for vnf_profile in get_iterable(df.get("vnf-profile")):
1567 vnfd = vnfds_index.get(vnf_profile["vnfd-id"])
1568 all_vnfd_ext_cpds = set()
1569 for ext_cpd in get_iterable(vnfd.get("ext-cpd")):
1570 if ext_cpd.get("id"):
1571 all_vnfd_ext_cpds.add(ext_cpd.get("id"))
1572
1573 for virtual_link in get_iterable(
1574 vnf_profile.get("virtual-link-connectivity")
1575 ):
1576 for vl_cpd in get_iterable(virtual_link.get("constituent-cpd-id")):
1577 vl_cpd_id = vl_cpd.get("constituent-cpd-id")
1578 if vl_cpd_id and vl_cpd_id not in all_vnfd_ext_cpds:
1579 raise EngineException(
1580 "Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity"
1581 "[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a "
1582 "non existing ext-cpd:id inside vnfd '{}'".format(
1583 df["id"],
1584 vnf_profile["id"],
1585 virtual_link["virtual-link-profile-id"],
1586 vl_cpd_id,
1587 vnfd["id"],
1588 ),
1589 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1590 )
1591
1592 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
1593 final_content = super().check_conflict_on_edit(
1594 session, final_content, edit_content, _id
1595 )
1596
1597 self._check_descriptor_dependencies(session, final_content)
1598
1599 return final_content
1600
1601 def check_conflict_on_del(self, session, _id, db_content):
1602 """
1603 Check that there is not any NSR that uses this NSD. Only NSRs belonging to this project are considered. Note
1604 that NSD can be public and be used by other projects.
1605 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1606 :param _id: nsd internal id
1607 :param db_content: The database content of the _id
1608 :return: None or raises EngineException with the conflict
1609 """
1610 if session["force"]:
1611 return
1612 descriptor = db_content
1613 descriptor_id = descriptor.get("id")
1614 if not descriptor_id: # empty nsd not uploaded
1615 return
1616
1617 # check NSD used by NS
1618 _filter = self._get_project_filter(session)
1619 _filter["nsd-id"] = _id
1620 if self.db.get_list("nsrs", _filter):
1621 raise EngineException(
1622 "There is at least one NS instance using this descriptor",
1623 http_code=HTTPStatus.CONFLICT,
1624 )
1625
1626 # check NSD referenced by NST
1627 del _filter["nsd-id"]
1628 _filter["netslice-subnet.ANYINDEX.nsd-ref"] = descriptor_id
1629 if self.db.get_list("nsts", _filter):
1630 raise EngineException(
1631 "There is at least one NetSlice Template referencing this descriptor",
1632 http_code=HTTPStatus.CONFLICT,
1633 )
1634
1635 def delete_extra(self, session, _id, db_content, not_send_msg=None):
1636 """
1637 Deletes associate file system storage (via super)
1638 Deletes associated vnfpkgops from database.
1639 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1640 :param _id: server internal id
1641 :param db_content: The database content of the descriptor
1642 :return: None
1643 :raises: FsException in case of error while deleting associated storage
1644 """
1645 super().delete_extra(session, _id, db_content, not_send_msg)
1646 self.db.del_list(self.topic + "_revisions", {"_id": {"$regex": _id}})
1647
1648 @staticmethod
1649 def extract_day12_primitives(nsd: dict) -> dict:
1650 """Removes the day12 primitives from the NSD descriptors
1651
1652 Args:
1653 nsd (dict): Descriptor as a dictionary
1654
1655 Returns:
1656 nsd (dict): Cleared NSD
1657 """
1658 if nsd.get("ns-configuration"):
1659 for key in [
1660 "config-primitive",
1661 "initial-config-primitive",
1662 "terminate-config-primitive",
1663 ]:
1664 nsd["ns-configuration"].pop(key, None)
1665 return nsd
1666
1667 def remove_modifiable_items(self, nsd: dict) -> dict:
1668 """Removes the modifiable parts from the VNFD descriptors
1669
1670 It calls different extract functions according to different update types
1671 to clear all the modifiable items from NSD
1672
1673 Args:
1674 nsd (dict): Descriptor as a dictionary
1675
1676 Returns:
1677 nsd (dict): Descriptor which does not include modifiable contents
1678 """
1679 while isinstance(nsd, dict) and nsd.get("nsd"):
1680 nsd = nsd["nsd"]
1681 if isinstance(nsd, list):
1682 nsd = nsd[0]
1683 nsd.pop("_admin", None)
1684 # If the more extractions need to be done from NSD,
1685 # the new extract methods could be appended to below list.
1686 for extract_function in [self.extract_day12_primitives]:
1687 nsd_temp = extract_function(nsd)
1688 nsd = nsd_temp
1689 return nsd
1690
1691 def _validate_descriptor_changes(
1692 self,
1693 descriptor_id: str,
1694 descriptor_file_name: str,
1695 old_descriptor_directory: str,
1696 new_descriptor_directory: str,
1697 ):
1698 """Compares the old and new NSD descriptors and validates the new descriptor
1699
1700 Args:
1701 old_descriptor_directory: Directory of descriptor which is in-use
1702 new_descriptor_directory: Directory of descriptor which is proposed to update (new revision)
1703
1704 Returns:
1705 None
1706
1707 Raises:
1708 EngineException: In case of error if the changes are not allowed
1709 """
1710
1711 try:
1712 # If NSD does not exist in DB, or it is not in use by any NS,
1713 # validation is not required.
1714 nsd = self.db.get_one("nsds", {"_id": descriptor_id}, fail_on_empty=False)
1715 if not nsd or not detect_descriptor_usage(nsd, "nsds", self.db):
1716 return
1717
1718 # Get the old and new descriptor contents in order to compare them.
1719 with self.fs.file_open(
1720 (old_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
1721 ) as old_descriptor_file:
1722
1723 with self.fs.file_open(
1724 (new_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
1725 ) as new_descriptor_file:
1726
1727 old_content = yaml.safe_load(old_descriptor_file.read())
1728 new_content = yaml.safe_load(new_descriptor_file.read())
1729
1730 if old_content and new_content:
1731 disallowed_change = DeepDiff(
1732 self.remove_modifiable_items(old_content),
1733 self.remove_modifiable_items(new_content),
1734 )
1735
1736 if disallowed_change:
1737 changed_nodes = functools.reduce(
1738 lambda a, b: a + ", " + b,
1739 [
1740 node.lstrip("root")
1741 for node in disallowed_change.get(
1742 "values_changed"
1743 ).keys()
1744 ],
1745 )
1746
1747 raise EngineException(
1748 f"Error in validating new descriptor: {changed_nodes} cannot be modified, "
1749 "there are disallowed changes in the ns descriptor. ",
1750 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1751 )
1752 except (
1753 DbException,
1754 AttributeError,
1755 IndexError,
1756 KeyError,
1757 ValueError,
1758 ) as e:
1759 raise type(e)(
1760 "NS Descriptor could not be processed with error: {}.".format(e)
1761 )
1762
1763 def sol005_projection(self, data):
1764 data["nsdOnboardingState"] = data["_admin"]["onboardingState"]
1765 data["nsdOperationalState"] = data["_admin"]["operationalState"]
1766 data["nsdUsageState"] = data["_admin"]["usageState"]
1767
1768 links = {}
1769 links["self"] = {"href": "/nsd/v1/ns_descriptors/{}".format(data["_id"])}
1770 links["nsd_content"] = {
1771 "href": "/nsd/v1/ns_descriptors/{}/nsd_content".format(data["_id"])
1772 }
1773 data["_links"] = links
1774
1775 return super().sol005_projection(data)
1776
1777
1778 class NstTopic(DescriptorTopic):
1779 topic = "nsts"
1780 topic_msg = "nst"
1781 quota_name = "slice_templates"
1782
1783 def __init__(self, db, fs, msg, auth):
1784 DescriptorTopic.__init__(self, db, fs, msg, auth)
1785
1786 def pyangbind_validation(self, item, data, force=False):
1787 try:
1788 mynst = nst_im()
1789 pybindJSONDecoder.load_ietf_json(
1790 {"nst": [data]},
1791 None,
1792 None,
1793 obj=mynst,
1794 path_helper=True,
1795 skip_unknown=force,
1796 )
1797 out = pybindJSON.dumps(mynst, mode="ietf")
1798 desc_out = self._remove_envelop(yaml.safe_load(out))
1799 return desc_out
1800 except Exception as e:
1801 raise EngineException(
1802 "Error in pyangbind validation: {}".format(str(e)),
1803 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1804 )
1805
1806 @staticmethod
1807 def _remove_envelop(indata=None):
1808 if not indata:
1809 return {}
1810 clean_indata = indata
1811
1812 if clean_indata.get("nst"):
1813 if (
1814 not isinstance(clean_indata["nst"], list)
1815 or len(clean_indata["nst"]) != 1
1816 ):
1817 raise EngineException("'nst' must be a list only one element")
1818 clean_indata = clean_indata["nst"][0]
1819 elif clean_indata.get("nst:nst"):
1820 if (
1821 not isinstance(clean_indata["nst:nst"], list)
1822 or len(clean_indata["nst:nst"]) != 1
1823 ):
1824 raise EngineException("'nst:nst' must be a list only one element")
1825 clean_indata = clean_indata["nst:nst"][0]
1826 return clean_indata
1827
1828 def _validate_input_new(self, indata, storage_params, force=False):
1829 indata.pop("onboardingState", None)
1830 indata.pop("operationalState", None)
1831 indata.pop("usageState", None)
1832 indata = self.pyangbind_validation("nsts", indata, force)
1833 return indata.copy()
1834
1835 def _check_descriptor_dependencies(self, session, descriptor):
1836 """
1837 Check that the dependent descriptors exist on a new descriptor or edition
1838 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1839 :param descriptor: descriptor to be inserted or edit
1840 :return: None or raises exception
1841 """
1842 if not descriptor.get("netslice-subnet"):
1843 return
1844 for nsd in descriptor["netslice-subnet"]:
1845 nsd_id = nsd["nsd-ref"]
1846 filter_q = self._get_project_filter(session)
1847 filter_q["id"] = nsd_id
1848 if not self.db.get_list("nsds", filter_q):
1849 raise EngineException(
1850 "Descriptor error at 'netslice-subnet':'nsd-ref'='{}' references a non "
1851 "existing nsd".format(nsd_id),
1852 http_code=HTTPStatus.CONFLICT,
1853 )
1854
1855 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
1856 final_content = super().check_conflict_on_edit(
1857 session, final_content, edit_content, _id
1858 )
1859
1860 self._check_descriptor_dependencies(session, final_content)
1861 return final_content
1862
1863 def check_conflict_on_del(self, session, _id, db_content):
1864 """
1865 Check that there is not any NSIR that uses this NST. Only NSIRs belonging to this project are considered. Note
1866 that NST can be public and be used by other projects.
1867 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1868 :param _id: nst internal id
1869 :param db_content: The database content of the _id.
1870 :return: None or raises EngineException with the conflict
1871 """
1872 # TODO: Check this method
1873 if session["force"]:
1874 return
1875 # Get Network Slice Template from Database
1876 _filter = self._get_project_filter(session)
1877 _filter["_admin.nst-id"] = _id
1878 if self.db.get_list("nsis", _filter):
1879 raise EngineException(
1880 "there is at least one Netslice Instance using this descriptor",
1881 http_code=HTTPStatus.CONFLICT,
1882 )
1883
1884 def sol005_projection(self, data):
1885 data["onboardingState"] = data["_admin"]["onboardingState"]
1886 data["operationalState"] = data["_admin"]["operationalState"]
1887 data["usageState"] = data["_admin"]["usageState"]
1888
1889 links = {}
1890 links["self"] = {"href": "/nst/v1/netslice_templates/{}".format(data["_id"])}
1891 links["nst"] = {"href": "/nst/v1/netslice_templates/{}/nst".format(data["_id"])}
1892 data["_links"] = links
1893
1894 return super().sol005_projection(data)
1895
1896
1897 class PduTopic(BaseTopic):
1898 topic = "pdus"
1899 topic_msg = "pdu"
1900 quota_name = "pduds"
1901 schema_new = pdu_new_schema
1902 schema_edit = pdu_edit_schema
1903
1904 def __init__(self, db, fs, msg, auth):
1905 BaseTopic.__init__(self, db, fs, msg, auth)
1906
1907 @staticmethod
1908 def format_on_new(content, project_id=None, make_public=False):
1909 BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
1910 content["_admin"]["onboardingState"] = "CREATED"
1911 content["_admin"]["operationalState"] = "ENABLED"
1912 content["_admin"]["usageState"] = "NOT_IN_USE"
1913
1914 def check_conflict_on_del(self, session, _id, db_content):
1915 """
1916 Check that there is not any vnfr that uses this PDU
1917 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1918 :param _id: pdu internal id
1919 :param db_content: The database content of the _id.
1920 :return: None or raises EngineException with the conflict
1921 """
1922 if session["force"]:
1923 return
1924
1925 _filter = self._get_project_filter(session)
1926 _filter["vdur.pdu-id"] = _id
1927 if self.db.get_list("vnfrs", _filter):
1928 raise EngineException(
1929 "There is at least one VNF instance using this PDU",
1930 http_code=HTTPStatus.CONFLICT,
1931 )
1932
1933
1934 class VnfPkgOpTopic(BaseTopic):
1935 topic = "vnfpkgops"
1936 topic_msg = "vnfd"
1937 schema_new = vnfpkgop_new_schema
1938 schema_edit = None
1939
1940 def __init__(self, db, fs, msg, auth):
1941 BaseTopic.__init__(self, db, fs, msg, auth)
1942
1943 def edit(self, session, _id, indata=None, kwargs=None, content=None):
1944 raise EngineException(
1945 "Method 'edit' not allowed for topic '{}'".format(self.topic),
1946 HTTPStatus.METHOD_NOT_ALLOWED,
1947 )
1948
1949 def delete(self, session, _id, dry_run=False):
1950 raise EngineException(
1951 "Method 'delete' not allowed for topic '{}'".format(self.topic),
1952 HTTPStatus.METHOD_NOT_ALLOWED,
1953 )
1954
1955 def delete_list(self, session, filter_q=None):
1956 raise EngineException(
1957 "Method 'delete_list' not allowed for topic '{}'".format(self.topic),
1958 HTTPStatus.METHOD_NOT_ALLOWED,
1959 )
1960
1961 def new(self, rollback, session, indata=None, kwargs=None, headers=None):
1962 """
1963 Creates a new entry into database.
1964 :param rollback: list to append created items at database in case a rollback may to be done
1965 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1966 :param indata: data to be inserted
1967 :param kwargs: used to override the indata descriptor
1968 :param headers: http request headers
1969 :return: _id, op_id:
1970 _id: identity of the inserted data.
1971 op_id: None
1972 """
1973 self._update_input_with_kwargs(indata, kwargs)
1974 validate_input(indata, self.schema_new)
1975 vnfpkg_id = indata["vnfPkgId"]
1976 filter_q = BaseTopic._get_project_filter(session)
1977 filter_q["_id"] = vnfpkg_id
1978 vnfd = self.db.get_one("vnfds", filter_q)
1979 operation = indata["lcmOperationType"]
1980 kdu_name = indata["kdu_name"]
1981 for kdu in vnfd.get("kdu", []):
1982 if kdu["name"] == kdu_name:
1983 helm_chart = kdu.get("helm-chart")
1984 juju_bundle = kdu.get("juju-bundle")
1985 break
1986 else:
1987 raise EngineException(
1988 "Not found vnfd[id='{}']:kdu[name='{}']".format(vnfpkg_id, kdu_name)
1989 )
1990 if helm_chart:
1991 indata["helm-chart"] = helm_chart
1992 match = fullmatch(r"([^/]*)/([^/]*)", helm_chart)
1993 repo_name = match.group(1) if match else None
1994 elif juju_bundle:
1995 indata["juju-bundle"] = juju_bundle
1996 match = fullmatch(r"([^/]*)/([^/]*)", juju_bundle)
1997 repo_name = match.group(1) if match else None
1998 else:
1999 raise EngineException(
2000 "Found neither 'helm-chart' nor 'juju-bundle' in vnfd[id='{}']:kdu[name='{}']".format(
2001 vnfpkg_id, kdu_name
2002 )
2003 )
2004 if repo_name:
2005 del filter_q["_id"]
2006 filter_q["name"] = repo_name
2007 repo = self.db.get_one("k8srepos", filter_q)
2008 k8srepo_id = repo.get("_id")
2009 k8srepo_url = repo.get("url")
2010 else:
2011 k8srepo_id = None
2012 k8srepo_url = None
2013 indata["k8srepoId"] = k8srepo_id
2014 indata["k8srepo_url"] = k8srepo_url
2015 vnfpkgop_id = str(uuid4())
2016 vnfpkgop_desc = {
2017 "_id": vnfpkgop_id,
2018 "operationState": "PROCESSING",
2019 "vnfPkgId": vnfpkg_id,
2020 "lcmOperationType": operation,
2021 "isAutomaticInvocation": False,
2022 "isCancelPending": False,
2023 "operationParams": indata,
2024 "links": {
2025 "self": "/osm/vnfpkgm/v1/vnfpkg_op_occs/" + vnfpkgop_id,
2026 "vnfpkg": "/osm/vnfpkgm/v1/vnf_packages/" + vnfpkg_id,
2027 },
2028 }
2029 self.format_on_new(
2030 vnfpkgop_desc, session["project_id"], make_public=session["public"]
2031 )
2032 ctime = vnfpkgop_desc["_admin"]["created"]
2033 vnfpkgop_desc["statusEnteredTime"] = ctime
2034 vnfpkgop_desc["startTime"] = ctime
2035 self.db.create(self.topic, vnfpkgop_desc)
2036 rollback.append({"topic": self.topic, "_id": vnfpkgop_id})
2037 self.msg.write(self.topic_msg, operation, vnfpkgop_desc)
2038 return vnfpkgop_id, None