Bug 1830 fixed: maps completed operations to original operation types
[osm/NBI.git] / osm_nbi / descriptor_topics.py
1 # -*- coding: utf-8 -*-
2
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
12 # implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
15
16 import tarfile
17 import yaml
18 import json
19 import copy
20 import os
21 import shutil
22 import functools
23
24 # import logging
25 from deepdiff import DeepDiff
26 from hashlib import md5
27 from osm_common.dbbase import DbException, deep_update_rfc7396
28 from http import HTTPStatus
29 from time import time
30 from uuid import uuid4
31 from re import fullmatch
32 from zipfile import ZipFile
33 from osm_nbi.validation import (
34 ValidationError,
35 pdu_new_schema,
36 pdu_edit_schema,
37 validate_input,
38 vnfpkgop_new_schema,
39 )
40 from osm_nbi.base_topic import (
41 BaseTopic,
42 EngineException,
43 get_iterable,
44 detect_descriptor_usage,
45 )
46 from osm_im import etsi_nfv_vnfd, etsi_nfv_nsd
47 from osm_im.nst import nst as nst_im
48 from pyangbind.lib.serialise import pybindJSONDecoder
49 import pyangbind.lib.pybindJSON as pybindJSON
50 from osm_nbi import utils
51
52 __author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
53
54
55 class DescriptorTopic(BaseTopic):
56 def __init__(self, db, fs, msg, auth):
57
58 BaseTopic.__init__(self, db, fs, msg, auth)
59
60 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
61 final_content = super().check_conflict_on_edit(
62 session, final_content, edit_content, _id
63 )
64
65 def _check_unique_id_name(descriptor, position=""):
66 for desc_key, desc_item in descriptor.items():
67 if isinstance(desc_item, list) and desc_item:
68 used_ids = []
69 desc_item_id = None
70 for index, list_item in enumerate(desc_item):
71 if isinstance(list_item, dict):
72 _check_unique_id_name(
73 list_item, "{}.{}[{}]".format(position, desc_key, index)
74 )
75 # Base case
76 if index == 0 and (
77 list_item.get("id") or list_item.get("name")
78 ):
79 desc_item_id = "id" if list_item.get("id") else "name"
80 if desc_item_id and list_item.get(desc_item_id):
81 if list_item[desc_item_id] in used_ids:
82 position = "{}.{}[{}]".format(
83 position, desc_key, index
84 )
85 raise EngineException(
86 "Error: identifier {} '{}' is not unique and repeats at '{}'".format(
87 desc_item_id,
88 list_item[desc_item_id],
89 position,
90 ),
91 HTTPStatus.UNPROCESSABLE_ENTITY,
92 )
93 used_ids.append(list_item[desc_item_id])
94
95 _check_unique_id_name(final_content)
96 # 1. validate again with pyangbind
97 # 1.1. remove internal keys
98 internal_keys = {}
99 for k in ("_id", "_admin"):
100 if k in final_content:
101 internal_keys[k] = final_content.pop(k)
102 storage_params = internal_keys["_admin"].get("storage")
103 serialized = self._validate_input_new(
104 final_content, storage_params, session["force"]
105 )
106
107 # 1.2. modify final_content with a serialized version
108 final_content = copy.deepcopy(serialized)
109 # 1.3. restore internal keys
110 for k, v in internal_keys.items():
111 final_content[k] = v
112 if session["force"]:
113 return final_content
114
115 # 2. check that this id is not present
116 if "id" in edit_content:
117 _filter = self._get_project_filter(session)
118
119 _filter["id"] = final_content["id"]
120 _filter["_id.neq"] = _id
121
122 if self.db.get_one(self.topic, _filter, fail_on_empty=False):
123 raise EngineException(
124 "{} with id '{}' already exists for this project".format(
125 self.topic[:-1], final_content["id"]
126 ),
127 HTTPStatus.CONFLICT,
128 )
129
130 return final_content
131
132 @staticmethod
133 def format_on_new(content, project_id=None, make_public=False):
134 BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
135 content["_admin"]["onboardingState"] = "CREATED"
136 content["_admin"]["operationalState"] = "DISABLED"
137 content["_admin"]["usageState"] = "NOT_IN_USE"
138
139 def delete_extra(self, session, _id, db_content, not_send_msg=None):
140 """
141 Deletes file system storage associated with the descriptor
142 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
143 :param _id: server internal id
144 :param db_content: The database content of the descriptor
145 :param not_send_msg: To not send message (False) or store content (list) instead
146 :return: None if ok or raises EngineException with the problem
147 """
148 self.fs.file_delete(_id, ignore_non_exist=True)
149 self.fs.file_delete(_id + "_", ignore_non_exist=True) # remove temp folder
150 # Remove file revisions
151 if "revision" in db_content["_admin"]:
152 revision = db_content["_admin"]["revision"]
153 while revision > 0:
154 self.fs.file_delete(_id + ":" + str(revision), ignore_non_exist=True)
155 revision = revision - 1
156
157
158 @staticmethod
159 def get_one_by_id(db, session, topic, id):
160 # find owned by this project
161 _filter = BaseTopic._get_project_filter(session)
162 _filter["id"] = id
163 desc_list = db.get_list(topic, _filter)
164 if len(desc_list) == 1:
165 return desc_list[0]
166 elif len(desc_list) > 1:
167 raise DbException(
168 "Found more than one {} with id='{}' belonging to this project".format(
169 topic[:-1], id
170 ),
171 HTTPStatus.CONFLICT,
172 )
173
174 # not found any: try to find public
175 _filter = BaseTopic._get_project_filter(session)
176 _filter["id"] = id
177 desc_list = db.get_list(topic, _filter)
178 if not desc_list:
179 raise DbException(
180 "Not found any {} with id='{}'".format(topic[:-1], id),
181 HTTPStatus.NOT_FOUND,
182 )
183 elif len(desc_list) == 1:
184 return desc_list[0]
185 else:
186 raise DbException(
187 "Found more than one public {} with id='{}'; and no one belonging to this project".format(
188 topic[:-1], id
189 ),
190 HTTPStatus.CONFLICT,
191 )
192
193 def new(self, rollback, session, indata=None, kwargs=None, headers=None):
194 """
195 Creates a new almost empty DISABLED entry into database. Due to SOL005, it does not follow normal procedure.
196 Creating a VNFD or NSD is done in two steps: 1. Creates an empty descriptor (this step) and 2) upload content
197 (self.upload_content)
198 :param rollback: list to append created items at database in case a rollback may to be done
199 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
200 :param indata: data to be inserted
201 :param kwargs: used to override the indata descriptor
202 :param headers: http request headers
203 :return: _id, None: identity of the inserted data; and None as there is not any operation
204 """
205
206 # No needed to capture exceptions
207 # Check Quota
208 self.check_quota(session)
209
210 # _remove_envelop
211 if indata:
212 if "userDefinedData" in indata:
213 indata = indata["userDefinedData"]
214
215 # Override descriptor with query string kwargs
216 self._update_input_with_kwargs(indata, kwargs)
217 # uncomment when this method is implemented.
218 # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors
219 # indata = DescriptorTopic._validate_input_new(self, indata, project_id=session["force"])
220
221 content = {"_admin": {
222 "userDefinedData": indata,
223 "revision": 0
224 }}
225
226 self.format_on_new(
227 content, session["project_id"], make_public=session["public"]
228 )
229 _id = self.db.create(self.topic, content)
230 rollback.append({"topic": self.topic, "_id": _id})
231 self._send_msg("created", {"_id": _id})
232 return _id, None
233
234 def upload_content(self, session, _id, indata, kwargs, headers):
235 """
236 Used for receiving content by chunks (with a transaction_id header and/or gzip file. It will store and extract)
237 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
238 :param _id : the nsd,vnfd is already created, this is the id
239 :param indata: http body request
240 :param kwargs: user query string to override parameters. NOT USED
241 :param headers: http request headers
242 :return: True if package is completely uploaded or False if partial content has been uploded
243 Raise exception on error
244 """
245 # Check that _id exists and it is valid
246 current_desc = self.show(session, _id)
247
248 content_range_text = headers.get("Content-Range")
249 expected_md5 = headers.get("Content-File-MD5")
250 compressed = None
251 content_type = headers.get("Content-Type")
252 if (
253 content_type
254 and "application/gzip" in content_type
255 or "application/x-gzip" in content_type
256 ):
257 compressed = "gzip"
258 if (
259 content_type
260 and "application/zip" in content_type
261 ):
262 compressed = "zip"
263 filename = headers.get("Content-Filename")
264 if not filename and compressed:
265 filename = "package.tar.gz" if compressed == "gzip" else "package.zip"
266 elif not filename:
267 filename = "package"
268
269 revision = 1
270 if "revision" in current_desc["_admin"]:
271 revision = current_desc["_admin"]["revision"] + 1
272
273 # TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266
274 file_pkg = None
275 error_text = ""
276 fs_rollback = []
277
278 try:
279 if content_range_text:
280 content_range = (
281 content_range_text.replace("-", " ").replace("/", " ").split()
282 )
283 if (
284 content_range[0] != "bytes"
285 ): # TODO check x<y not negative < total....
286 raise IndexError()
287 start = int(content_range[1])
288 end = int(content_range[2]) + 1
289 total = int(content_range[3])
290 else:
291 start = 0
292 # Rather than using a temp folder, we will store the package in a folder based on
293 # the current revision.
294 proposed_revision_path = (
295 _id + ":" + str(revision)
296 ) # all the content is upload here and if ok, it is rename from id_ to is folder
297
298 if start:
299 if not self.fs.file_exists(proposed_revision_path, "dir"):
300 raise EngineException(
301 "invalid Transaction-Id header", HTTPStatus.NOT_FOUND
302 )
303 else:
304 self.fs.file_delete(proposed_revision_path, ignore_non_exist=True)
305 self.fs.mkdir(proposed_revision_path)
306 fs_rollback.append(proposed_revision_path)
307
308 storage = self.fs.get_params()
309 storage["folder"] = proposed_revision_path
310
311 file_path = (proposed_revision_path, filename)
312 if self.fs.file_exists(file_path, "file"):
313 file_size = self.fs.file_size(file_path)
314 else:
315 file_size = 0
316 if file_size != start:
317 raise EngineException(
318 "invalid Content-Range start sequence, expected '{}' but received '{}'".format(
319 file_size, start
320 ),
321 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
322 )
323 file_pkg = self.fs.file_open(file_path, "a+b")
324 if isinstance(indata, dict):
325 indata_text = yaml.safe_dump(indata, indent=4, default_flow_style=False)
326 file_pkg.write(indata_text.encode(encoding="utf-8"))
327 else:
328 indata_len = 0
329 while True:
330 indata_text = indata.read(4096)
331 indata_len += len(indata_text)
332 if not indata_text:
333 break
334 file_pkg.write(indata_text)
335 if content_range_text:
336 if indata_len != end - start:
337 raise EngineException(
338 "Mismatch between Content-Range header {}-{} and body length of {}".format(
339 start, end - 1, indata_len
340 ),
341 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
342 )
343 if end != total:
344 # TODO update to UPLOADING
345 return False
346
347 # PACKAGE UPLOADED
348 if expected_md5:
349 file_pkg.seek(0, 0)
350 file_md5 = md5()
351 chunk_data = file_pkg.read(1024)
352 while chunk_data:
353 file_md5.update(chunk_data)
354 chunk_data = file_pkg.read(1024)
355 if expected_md5 != file_md5.hexdigest():
356 raise EngineException("Error, MD5 mismatch", HTTPStatus.CONFLICT)
357 file_pkg.seek(0, 0)
358 if compressed == "gzip":
359 tar = tarfile.open(mode="r", fileobj=file_pkg)
360 descriptor_file_name = None
361 for tarinfo in tar:
362 tarname = tarinfo.name
363 tarname_path = tarname.split("/")
364 if (
365 not tarname_path[0] or ".." in tarname_path
366 ): # if start with "/" means absolute path
367 raise EngineException(
368 "Absolute path or '..' are not allowed for package descriptor tar.gz"
369 )
370 if len(tarname_path) == 1 and not tarinfo.isdir():
371 raise EngineException(
372 "All files must be inside a dir for package descriptor tar.gz"
373 )
374 if (
375 tarname.endswith(".yaml")
376 or tarname.endswith(".json")
377 or tarname.endswith(".yml")
378 ):
379 storage["pkg-dir"] = tarname_path[0]
380 if len(tarname_path) == 2:
381 if descriptor_file_name:
382 raise EngineException(
383 "Found more than one descriptor file at package descriptor tar.gz"
384 )
385 descriptor_file_name = tarname
386 if not descriptor_file_name:
387 raise EngineException(
388 "Not found any descriptor file at package descriptor tar.gz"
389 )
390 storage["descriptor"] = descriptor_file_name
391 storage["zipfile"] = filename
392 self.fs.file_extract(tar, proposed_revision_path)
393 with self.fs.file_open(
394 (proposed_revision_path, descriptor_file_name), "r"
395 ) as descriptor_file:
396 content = descriptor_file.read()
397 elif compressed == "zip":
398 zipfile = ZipFile(file_pkg)
399 descriptor_file_name = None
400 for package_file in zipfile.infolist():
401 zipfilename = package_file.filename
402 file_path = zipfilename.split("/")
403 if (
404 not file_path[0] or ".." in zipfilename
405 ): # if start with "/" means absolute path
406 raise EngineException(
407 "Absolute path or '..' are not allowed for package descriptor zip"
408 )
409
410 if (
411 (
412 zipfilename.endswith(".yaml")
413 or zipfilename.endswith(".json")
414 or zipfilename.endswith(".yml")
415 ) and (
416 zipfilename.find("/") < 0
417 or zipfilename.find("Definitions") >= 0
418 )
419 ):
420 storage["pkg-dir"] = ""
421 if descriptor_file_name:
422 raise EngineException(
423 "Found more than one descriptor file at package descriptor zip"
424 )
425 descriptor_file_name = zipfilename
426 if not descriptor_file_name:
427 raise EngineException(
428 "Not found any descriptor file at package descriptor zip"
429 )
430 storage["descriptor"] = descriptor_file_name
431 storage["zipfile"] = filename
432 self.fs.file_extract(zipfile, proposed_revision_path)
433
434 with self.fs.file_open(
435 (proposed_revision_path, descriptor_file_name), "r"
436 ) as descriptor_file:
437 content = descriptor_file.read()
438 else:
439 content = file_pkg.read()
440 storage["descriptor"] = descriptor_file_name = filename
441
442 if descriptor_file_name.endswith(".json"):
443 error_text = "Invalid json format "
444 indata = json.load(content)
445 else:
446 error_text = "Invalid yaml format "
447 indata = yaml.load(content, Loader=yaml.SafeLoader)
448
449 # Need to close the file package here so it can be copied from the
450 # revision to the current, unrevisioned record
451 if file_pkg:
452 file_pkg.close()
453 file_pkg = None
454
455 # Fetch both the incoming, proposed revision and the original revision so we
456 # can call a validate method to compare them
457 current_revision_path = _id + "/"
458 self.fs.sync(from_path=current_revision_path)
459 self.fs.sync(from_path=proposed_revision_path)
460
461 if revision > 1:
462 try:
463 self._validate_descriptor_changes(
464 _id,
465 descriptor_file_name,
466 current_revision_path,
467 proposed_revision_path,
468 )
469 except Exception as e:
470 shutil.rmtree(self.fs.path + current_revision_path, ignore_errors=True)
471 shutil.rmtree(self.fs.path + proposed_revision_path, ignore_errors=True)
472 # Only delete the new revision. We need to keep the original version in place
473 # as it has not been changed.
474 self.fs.file_delete(proposed_revision_path, ignore_non_exist=True)
475 raise e
476
477
478 indata = self._remove_envelop(indata)
479
480 # Override descriptor with query string kwargs
481 if kwargs:
482 self._update_input_with_kwargs(indata, kwargs)
483
484 current_desc["_admin"]["storage"] = storage
485 current_desc["_admin"]["onboardingState"] = "ONBOARDED"
486 current_desc["_admin"]["operationalState"] = "ENABLED"
487 current_desc["_admin"]["modified"] = time()
488 current_desc["_admin"]["revision"] = revision
489
490 deep_update_rfc7396(current_desc, indata)
491 current_desc = self.check_conflict_on_edit(
492 session, current_desc, indata, _id=_id
493 )
494
495 # Copy the revision to the active package name by its original id
496 shutil.rmtree(self.fs.path + current_revision_path, ignore_errors=True)
497 os.rename(self.fs.path + proposed_revision_path, self.fs.path + current_revision_path)
498 self.fs.file_delete(current_revision_path, ignore_non_exist=True)
499 self.fs.mkdir(current_revision_path)
500 self.fs.reverse_sync(from_path=current_revision_path)
501
502 shutil.rmtree(self.fs.path + _id)
503
504 self.db.replace(self.topic, _id, current_desc)
505
506 # Store a copy of the package as a point in time revision
507 revision_desc = dict(current_desc)
508 revision_desc["_id"] = _id + ":" + str(revision_desc["_admin"]["revision"])
509 self.db.create(self.topic + "_revisions", revision_desc)
510 fs_rollback = []
511
512 indata["_id"] = _id
513 self._send_msg("edited", indata)
514
515 # TODO if descriptor has changed because kwargs update content and remove cached zip
516 # TODO if zip is not present creates one
517 return True
518
519 except EngineException:
520 raise
521 except IndexError:
522 raise EngineException(
523 "invalid Content-Range header format. Expected 'bytes start-end/total'",
524 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
525 )
526 except IOError as e:
527 raise EngineException(
528 "invalid upload transaction sequence: '{}'".format(e),
529 HTTPStatus.BAD_REQUEST,
530 )
531 except tarfile.ReadError as e:
532 raise EngineException(
533 "invalid file content {}".format(e), HTTPStatus.BAD_REQUEST
534 )
535 except (ValueError, yaml.YAMLError) as e:
536 raise EngineException(error_text + str(e))
537 except ValidationError as e:
538 raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
539 finally:
540 if file_pkg:
541 file_pkg.close()
542 for file in fs_rollback:
543 self.fs.file_delete(file, ignore_non_exist=True)
544
545 def get_file(self, session, _id, path=None, accept_header=None):
546 """
547 Return the file content of a vnfd or nsd
548 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
549 :param _id: Identity of the vnfd, nsd
550 :param path: artifact path or "$DESCRIPTOR" or None
551 :param accept_header: Content of Accept header. Must contain applition/zip or/and text/plain
552 :return: opened file plus Accept format or raises an exception
553 """
554 accept_text = accept_zip = False
555 if accept_header:
556 if "text/plain" in accept_header or "*/*" in accept_header:
557 accept_text = True
558 if "application/zip" in accept_header or "*/*" in accept_header:
559 accept_zip = "application/zip"
560 elif "application/gzip" in accept_header:
561 accept_zip = "application/gzip"
562
563 if not accept_text and not accept_zip:
564 raise EngineException(
565 "provide request header 'Accept' with 'application/zip' or 'text/plain'",
566 http_code=HTTPStatus.NOT_ACCEPTABLE,
567 )
568
569 content = self.show(session, _id)
570 if content["_admin"]["onboardingState"] != "ONBOARDED":
571 raise EngineException(
572 "Cannot get content because this resource is not at 'ONBOARDED' state. "
573 "onboardingState is {}".format(content["_admin"]["onboardingState"]),
574 http_code=HTTPStatus.CONFLICT,
575 )
576 storage = content["_admin"]["storage"]
577 if path is not None and path != "$DESCRIPTOR": # artifacts
578 if not storage.get("pkg-dir") and not storage.get("folder"):
579 raise EngineException(
580 "Packages does not contains artifacts",
581 http_code=HTTPStatus.BAD_REQUEST,
582 )
583 if self.fs.file_exists(
584 (storage["folder"], storage["pkg-dir"], *path), "dir"
585 ):
586 folder_content = self.fs.dir_ls(
587 (storage["folder"], storage["pkg-dir"], *path)
588 )
589 return folder_content, "text/plain"
590 # TODO manage folders in http
591 else:
592 return (
593 self.fs.file_open(
594 (storage["folder"], storage["pkg-dir"], *path), "rb"
595 ),
596 "application/octet-stream",
597 )
598
599 # pkgtype accept ZIP TEXT -> result
600 # manyfiles yes X -> zip
601 # no yes -> error
602 # onefile yes no -> zip
603 # X yes -> text
604 contain_many_files = False
605 if storage.get("pkg-dir"):
606 # check if there are more than one file in the package, ignoring checksums.txt.
607 pkg_files = self.fs.dir_ls((storage["folder"], storage["pkg-dir"]))
608 if len(pkg_files) >= 3 or (
609 len(pkg_files) == 2 and "checksums.txt" not in pkg_files
610 ):
611 contain_many_files = True
612 if accept_text and (not contain_many_files or path == "$DESCRIPTOR"):
613 return (
614 self.fs.file_open((storage["folder"], storage["descriptor"]), "r"),
615 "text/plain",
616 )
617 elif contain_many_files and not accept_zip:
618 raise EngineException(
619 "Packages that contains several files need to be retrieved with 'application/zip'"
620 "Accept header",
621 http_code=HTTPStatus.NOT_ACCEPTABLE,
622 )
623 else:
624 if not storage.get("zipfile"):
625 # TODO generate zipfile if not present
626 raise EngineException(
627 "Only allowed 'text/plain' Accept header for this descriptor. To be solved in "
628 "future versions",
629 http_code=HTTPStatus.NOT_ACCEPTABLE,
630 )
631 return (
632 self.fs.file_open((storage["folder"], storage["zipfile"]), "rb"),
633 accept_zip,
634 )
635
636 def _remove_yang_prefixes_from_descriptor(self, descriptor):
637 new_descriptor = {}
638 for k, v in descriptor.items():
639 new_v = v
640 if isinstance(v, dict):
641 new_v = self._remove_yang_prefixes_from_descriptor(v)
642 elif isinstance(v, list):
643 new_v = list()
644 for x in v:
645 if isinstance(x, dict):
646 new_v.append(self._remove_yang_prefixes_from_descriptor(x))
647 else:
648 new_v.append(x)
649 new_descriptor[k.split(":")[-1]] = new_v
650 return new_descriptor
651
652 def pyangbind_validation(self, item, data, force=False):
653 raise EngineException(
654 "Not possible to validate '{}' item".format(item),
655 http_code=HTTPStatus.INTERNAL_SERVER_ERROR,
656 )
657
658 def _validate_input_edit(self, indata, content, force=False):
659 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
660 if "_id" in indata:
661 indata.pop("_id")
662 if "_admin" not in indata:
663 indata["_admin"] = {}
664
665 if "operationalState" in indata:
666 if indata["operationalState"] in ("ENABLED", "DISABLED"):
667 indata["_admin"]["operationalState"] = indata.pop("operationalState")
668 else:
669 raise EngineException(
670 "State '{}' is not a valid operational state".format(
671 indata["operationalState"]
672 ),
673 http_code=HTTPStatus.BAD_REQUEST,
674 )
675
676 # In the case of user defined data, we need to put the data in the root of the object
677 # to preserve current expected behaviour
678 if "userDefinedData" in indata:
679 data = indata.pop("userDefinedData")
680 if type(data) == dict:
681 indata["_admin"]["userDefinedData"] = data
682 else:
683 raise EngineException(
684 "userDefinedData should be an object, but is '{}' instead".format(
685 type(data)
686 ),
687 http_code=HTTPStatus.BAD_REQUEST,
688 )
689
690 if (
691 "operationalState" in indata["_admin"]
692 and content["_admin"]["operationalState"]
693 == indata["_admin"]["operationalState"]
694 ):
695 raise EngineException(
696 "operationalState already {}".format(
697 content["_admin"]["operationalState"]
698 ),
699 http_code=HTTPStatus.CONFLICT,
700 )
701
702 return indata
703
704 def _validate_descriptor_changes(
705 self,
706 descriptor_id,
707 descriptor_file_name,
708 old_descriptor_directory,
709 new_descriptor_directory
710 ):
711 # Example:
712 # raise EngineException(
713 # "Error in validating new descriptor: <NODE> cannot be modified",
714 # http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
715 # )
716 pass
717
718 class VnfdTopic(DescriptorTopic):
719 topic = "vnfds"
720 topic_msg = "vnfd"
721
722 def __init__(self, db, fs, msg, auth):
723 DescriptorTopic.__init__(self, db, fs, msg, auth)
724
725 def pyangbind_validation(self, item, data, force=False):
726 if self._descriptor_data_is_in_old_format(data):
727 raise EngineException(
728 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
729 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
730 )
731 try:
732 myvnfd = etsi_nfv_vnfd.etsi_nfv_vnfd()
733 pybindJSONDecoder.load_ietf_json(
734 {"etsi-nfv-vnfd:vnfd": data},
735 None,
736 None,
737 obj=myvnfd,
738 path_helper=True,
739 skip_unknown=force,
740 )
741 out = pybindJSON.dumps(myvnfd, mode="ietf")
742 desc_out = self._remove_envelop(yaml.safe_load(out))
743 desc_out = self._remove_yang_prefixes_from_descriptor(desc_out)
744 return utils.deep_update_dict(data, desc_out)
745 except Exception as e:
746 raise EngineException(
747 "Error in pyangbind validation: {}".format(str(e)),
748 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
749 )
750
751 @staticmethod
752 def _descriptor_data_is_in_old_format(data):
753 return ("vnfd-catalog" in data) or ("vnfd:vnfd-catalog" in data)
754
755 @staticmethod
756 def _remove_envelop(indata=None):
757 if not indata:
758 return {}
759 clean_indata = indata
760
761 if clean_indata.get("etsi-nfv-vnfd:vnfd"):
762 if not isinstance(clean_indata["etsi-nfv-vnfd:vnfd"], dict):
763 raise EngineException("'etsi-nfv-vnfd:vnfd' must be a dict")
764 clean_indata = clean_indata["etsi-nfv-vnfd:vnfd"]
765 elif clean_indata.get("vnfd"):
766 if not isinstance(clean_indata["vnfd"], dict):
767 raise EngineException("'vnfd' must be dict")
768 clean_indata = clean_indata["vnfd"]
769
770 return clean_indata
771
772 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
773 final_content = super().check_conflict_on_edit(
774 session, final_content, edit_content, _id
775 )
776
777 # set type of vnfd
778 contains_pdu = False
779 contains_vdu = False
780 for vdu in get_iterable(final_content.get("vdu")):
781 if vdu.get("pdu-type"):
782 contains_pdu = True
783 else:
784 contains_vdu = True
785 if contains_pdu:
786 final_content["_admin"]["type"] = "hnfd" if contains_vdu else "pnfd"
787 elif contains_vdu:
788 final_content["_admin"]["type"] = "vnfd"
789 # if neither vud nor pdu do not fill type
790 return final_content
791
792 def check_conflict_on_del(self, session, _id, db_content):
793 """
794 Check that there is not any NSD that uses this VNFD. Only NSDs belonging to this project are considered. Note
795 that VNFD can be public and be used by NSD of other projects. Also check there are not deployments, or vnfr
796 that uses this vnfd
797 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
798 :param _id: vnfd internal id
799 :param db_content: The database content of the _id.
800 :return: None or raises EngineException with the conflict
801 """
802 if session["force"]:
803 return
804 descriptor = db_content
805 descriptor_id = descriptor.get("id")
806 if not descriptor_id: # empty vnfd not uploaded
807 return
808
809 _filter = self._get_project_filter(session)
810
811 # check vnfrs using this vnfd
812 _filter["vnfd-id"] = _id
813 if self.db.get_list("vnfrs", _filter):
814 raise EngineException(
815 "There is at least one VNF instance using this descriptor",
816 http_code=HTTPStatus.CONFLICT,
817 )
818
819 # check NSD referencing this VNFD
820 del _filter["vnfd-id"]
821 _filter["vnfd-id"] = descriptor_id
822 if self.db.get_list("nsds", _filter):
823 raise EngineException(
824 "There is at least one NS package referencing this descriptor",
825 http_code=HTTPStatus.CONFLICT,
826 )
827
828 def _validate_input_new(self, indata, storage_params, force=False):
829 indata.pop("onboardingState", None)
830 indata.pop("operationalState", None)
831 indata.pop("usageState", None)
832 indata.pop("links", None)
833
834 indata = self.pyangbind_validation("vnfds", indata, force)
835 # Cross references validation in the descriptor
836
837 self.validate_mgmt_interface_connection_point(indata)
838
839 for vdu in get_iterable(indata.get("vdu")):
840 self.validate_vdu_internal_connection_points(vdu)
841 self._validate_vdu_cloud_init_in_package(storage_params, vdu, indata)
842 self._validate_vdu_charms_in_package(storage_params, indata)
843
844 self._validate_vnf_charms_in_package(storage_params, indata)
845
846 self.validate_external_connection_points(indata)
847 self.validate_internal_virtual_links(indata)
848 self.validate_monitoring_params(indata)
849 self.validate_scaling_group_descriptor(indata)
850
851 return indata
852
853 @staticmethod
854 def validate_mgmt_interface_connection_point(indata):
855 if not indata.get("vdu"):
856 return
857 if not indata.get("mgmt-cp"):
858 raise EngineException(
859 "'mgmt-cp' is a mandatory field and it is not defined",
860 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
861 )
862
863 for cp in get_iterable(indata.get("ext-cpd")):
864 if cp["id"] == indata["mgmt-cp"]:
865 break
866 else:
867 raise EngineException(
868 "mgmt-cp='{}' must match an existing ext-cpd".format(indata["mgmt-cp"]),
869 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
870 )
871
872 @staticmethod
873 def validate_vdu_internal_connection_points(vdu):
874 int_cpds = set()
875 for cpd in get_iterable(vdu.get("int-cpd")):
876 cpd_id = cpd.get("id")
877 if cpd_id and cpd_id in int_cpds:
878 raise EngineException(
879 "vdu[id='{}']:int-cpd[id='{}'] is already used by other int-cpd".format(
880 vdu["id"], cpd_id
881 ),
882 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
883 )
884 int_cpds.add(cpd_id)
885
886 @staticmethod
887 def validate_external_connection_points(indata):
888 all_vdus_int_cpds = set()
889 for vdu in get_iterable(indata.get("vdu")):
890 for int_cpd in get_iterable(vdu.get("int-cpd")):
891 all_vdus_int_cpds.add((vdu.get("id"), int_cpd.get("id")))
892
893 ext_cpds = set()
894 for cpd in get_iterable(indata.get("ext-cpd")):
895 cpd_id = cpd.get("id")
896 if cpd_id and cpd_id in ext_cpds:
897 raise EngineException(
898 "ext-cpd[id='{}'] is already used by other ext-cpd".format(cpd_id),
899 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
900 )
901 ext_cpds.add(cpd_id)
902
903 int_cpd = cpd.get("int-cpd")
904 if int_cpd:
905 if (int_cpd.get("vdu-id"), int_cpd.get("cpd")) not in all_vdus_int_cpds:
906 raise EngineException(
907 "ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd".format(
908 cpd_id
909 ),
910 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
911 )
912 # TODO: Validate k8s-cluster-net points to a valid k8s-cluster:nets ?
913
914 def _validate_vdu_charms_in_package(self, storage_params, indata):
915 for df in indata["df"]:
916 if (
917 "lcm-operations-configuration" in df
918 and "operate-vnf-op-config" in df["lcm-operations-configuration"]
919 ):
920 configs = df["lcm-operations-configuration"][
921 "operate-vnf-op-config"
922 ].get("day1-2", [])
923 vdus = df.get("vdu-profile", [])
924 for vdu in vdus:
925 for config in configs:
926 if config["id"] == vdu["id"] and utils.find_in_list(
927 config.get("execution-environment-list", []),
928 lambda ee: "juju" in ee,
929 ):
930 if not self._validate_package_folders(
931 storage_params, "charms"
932 ) and not self._validate_package_folders(
933 storage_params, "Scripts/charms"
934 ):
935 raise EngineException(
936 "Charm defined in vnf[id={}] but not present in "
937 "package".format(indata["id"])
938 )
939
940 def _validate_vdu_cloud_init_in_package(self, storage_params, vdu, indata):
941 if not vdu.get("cloud-init-file"):
942 return
943 if not self._validate_package_folders(
944 storage_params, "cloud_init", vdu["cloud-init-file"]
945 ) and not self._validate_package_folders(
946 storage_params, "Scripts/cloud_init", vdu["cloud-init-file"]
947 ):
948 raise EngineException(
949 "Cloud-init defined in vnf[id={}]:vdu[id={}] but not present in "
950 "package".format(indata["id"], vdu["id"])
951 )
952
953 def _validate_vnf_charms_in_package(self, storage_params, indata):
954 # Get VNF configuration through new container
955 for deployment_flavor in indata.get("df", []):
956 if "lcm-operations-configuration" not in deployment_flavor:
957 return
958 if (
959 "operate-vnf-op-config"
960 not in deployment_flavor["lcm-operations-configuration"]
961 ):
962 return
963 for day_1_2_config in deployment_flavor["lcm-operations-configuration"][
964 "operate-vnf-op-config"
965 ]["day1-2"]:
966 if day_1_2_config["id"] == indata["id"]:
967 if utils.find_in_list(
968 day_1_2_config.get("execution-environment-list", []),
969 lambda ee: "juju" in ee,
970 ):
971 if not self._validate_package_folders(
972 storage_params, "charms"
973 ) and not self._validate_package_folders(
974 storage_params, "Scripts/charms"
975 ):
976 raise EngineException(
977 "Charm defined in vnf[id={}] but not present in "
978 "package".format(indata["id"])
979 )
980
981 def _validate_package_folders(self, storage_params, folder, file=None):
982 if not storage_params:
983 return False
984 elif not storage_params.get("pkg-dir"):
985 if self.fs.file_exists("{}_".format(storage_params["folder"]), "dir"):
986 f = "{}_/{}".format(
987 storage_params["folder"], folder
988 )
989 else:
990 f = "{}/{}".format(
991 storage_params["folder"], folder
992 )
993 if file:
994 return self.fs.file_exists("{}/{}".format(f, file), "file")
995 else:
996 if self.fs.file_exists(f, "dir"):
997 if self.fs.dir_ls(f):
998 return True
999 return False
1000 else:
1001 if self.fs.file_exists("{}_".format(storage_params["folder"]), "dir"):
1002 f = "{}_/{}/{}".format(
1003 storage_params["folder"], storage_params["pkg-dir"], folder
1004 )
1005 else:
1006 f = "{}/{}/{}".format(
1007 storage_params["folder"], storage_params["pkg-dir"], folder
1008 )
1009 if file:
1010 return self.fs.file_exists("{}/{}".format(f, file), "file")
1011 else:
1012 if self.fs.file_exists(f, "dir"):
1013 if self.fs.dir_ls(f):
1014 return True
1015 return False
1016
1017 @staticmethod
1018 def validate_internal_virtual_links(indata):
1019 all_ivld_ids = set()
1020 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
1021 ivld_id = ivld.get("id")
1022 if ivld_id and ivld_id in all_ivld_ids:
1023 raise EngineException(
1024 "Duplicated VLD id in int-virtual-link-desc[id={}]".format(ivld_id),
1025 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1026 )
1027 else:
1028 all_ivld_ids.add(ivld_id)
1029
1030 for vdu in get_iterable(indata.get("vdu")):
1031 for int_cpd in get_iterable(vdu.get("int-cpd")):
1032 int_cpd_ivld_id = int_cpd.get("int-virtual-link-desc")
1033 if int_cpd_ivld_id and int_cpd_ivld_id not in all_ivld_ids:
1034 raise EngineException(
1035 "vdu[id='{}']:int-cpd[id='{}']:int-virtual-link-desc='{}' must match an existing "
1036 "int-virtual-link-desc".format(
1037 vdu["id"], int_cpd["id"], int_cpd_ivld_id
1038 ),
1039 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1040 )
1041
1042 for df in get_iterable(indata.get("df")):
1043 for vlp in get_iterable(df.get("virtual-link-profile")):
1044 vlp_ivld_id = vlp.get("id")
1045 if vlp_ivld_id and vlp_ivld_id not in all_ivld_ids:
1046 raise EngineException(
1047 "df[id='{}']:virtual-link-profile='{}' must match an existing "
1048 "int-virtual-link-desc".format(df["id"], vlp_ivld_id),
1049 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1050 )
1051
1052 @staticmethod
1053 def validate_monitoring_params(indata):
1054 all_monitoring_params = set()
1055 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
1056 for mp in get_iterable(ivld.get("monitoring-parameters")):
1057 mp_id = mp.get("id")
1058 if mp_id and mp_id in all_monitoring_params:
1059 raise EngineException(
1060 "Duplicated monitoring-parameter id in "
1061 "int-virtual-link-desc[id='{}']:monitoring-parameters[id='{}']".format(
1062 ivld["id"], mp_id
1063 ),
1064 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1065 )
1066 else:
1067 all_monitoring_params.add(mp_id)
1068
1069 for vdu in get_iterable(indata.get("vdu")):
1070 for mp in get_iterable(vdu.get("monitoring-parameter")):
1071 mp_id = mp.get("id")
1072 if mp_id and mp_id in all_monitoring_params:
1073 raise EngineException(
1074 "Duplicated monitoring-parameter id in "
1075 "vdu[id='{}']:monitoring-parameter[id='{}']".format(
1076 vdu["id"], mp_id
1077 ),
1078 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1079 )
1080 else:
1081 all_monitoring_params.add(mp_id)
1082
1083 for df in get_iterable(indata.get("df")):
1084 for mp in get_iterable(df.get("monitoring-parameter")):
1085 mp_id = mp.get("id")
1086 if mp_id and mp_id in all_monitoring_params:
1087 raise EngineException(
1088 "Duplicated monitoring-parameter id in "
1089 "df[id='{}']:monitoring-parameter[id='{}']".format(
1090 df["id"], mp_id
1091 ),
1092 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1093 )
1094 else:
1095 all_monitoring_params.add(mp_id)
1096
1097 @staticmethod
1098 def validate_scaling_group_descriptor(indata):
1099 all_monitoring_params = set()
1100 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
1101 for mp in get_iterable(ivld.get("monitoring-parameters")):
1102 all_monitoring_params.add(mp.get("id"))
1103
1104 for vdu in get_iterable(indata.get("vdu")):
1105 for mp in get_iterable(vdu.get("monitoring-parameter")):
1106 all_monitoring_params.add(mp.get("id"))
1107
1108 for df in get_iterable(indata.get("df")):
1109 for mp in get_iterable(df.get("monitoring-parameter")):
1110 all_monitoring_params.add(mp.get("id"))
1111
1112 for df in get_iterable(indata.get("df")):
1113 for sa in get_iterable(df.get("scaling-aspect")):
1114 for sp in get_iterable(sa.get("scaling-policy")):
1115 for sc in get_iterable(sp.get("scaling-criteria")):
1116 sc_monitoring_param = sc.get("vnf-monitoring-param-ref")
1117 if (
1118 sc_monitoring_param
1119 and sc_monitoring_param not in all_monitoring_params
1120 ):
1121 raise EngineException(
1122 "df[id='{}']:scaling-aspect[id='{}']:scaling-policy"
1123 "[name='{}']:scaling-criteria[name='{}']: "
1124 "vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format(
1125 df["id"],
1126 sa["id"],
1127 sp["name"],
1128 sc["name"],
1129 sc_monitoring_param,
1130 ),
1131 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1132 )
1133
1134 for sca in get_iterable(sa.get("scaling-config-action")):
1135 if (
1136 "lcm-operations-configuration" not in df
1137 or "operate-vnf-op-config"
1138 not in df["lcm-operations-configuration"]
1139 or not utils.find_in_list(
1140 df["lcm-operations-configuration"][
1141 "operate-vnf-op-config"
1142 ].get("day1-2", []),
1143 lambda config: config["id"] == indata["id"],
1144 )
1145 ):
1146 raise EngineException(
1147 "'day1-2 configuration' not defined in the descriptor but it is "
1148 "referenced by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action".format(
1149 df["id"], sa["id"]
1150 ),
1151 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1152 )
1153 for configuration in get_iterable(
1154 df["lcm-operations-configuration"]["operate-vnf-op-config"].get(
1155 "day1-2", []
1156 )
1157 ):
1158 for primitive in get_iterable(
1159 configuration.get("config-primitive")
1160 ):
1161 if (
1162 primitive["name"]
1163 == sca["vnf-config-primitive-name-ref"]
1164 ):
1165 break
1166 else:
1167 raise EngineException(
1168 "df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-"
1169 "config-primitive-name-ref='{}' does not match any "
1170 "day1-2 configuration:config-primitive:name".format(
1171 df["id"],
1172 sa["id"],
1173 sca["vnf-config-primitive-name-ref"],
1174 ),
1175 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1176 )
1177
1178 def delete_extra(self, session, _id, db_content, not_send_msg=None):
1179 """
1180 Deletes associate file system storage (via super)
1181 Deletes associated vnfpkgops from database.
1182 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1183 :param _id: server internal id
1184 :param db_content: The database content of the descriptor
1185 :return: None
1186 :raises: FsException in case of error while deleting associated storage
1187 """
1188 super().delete_extra(session, _id, db_content, not_send_msg)
1189 self.db.del_list("vnfpkgops", {"vnfPkgId": _id})
1190 self.db.del_list(self.topic+"_revisions", {"_id": {"$regex": _id}})
1191
1192 def sol005_projection(self, data):
1193 data["onboardingState"] = data["_admin"]["onboardingState"]
1194 data["operationalState"] = data["_admin"]["operationalState"]
1195 data["usageState"] = data["_admin"]["usageState"]
1196
1197 links = {}
1198 links["self"] = {"href": "/vnfpkgm/v1/vnf_packages/{}".format(data["_id"])}
1199 links["vnfd"] = {"href": "/vnfpkgm/v1/vnf_packages/{}/vnfd".format(data["_id"])}
1200 links["packageContent"] = {
1201 "href": "/vnfpkgm/v1/vnf_packages/{}/package_content".format(data["_id"])
1202 }
1203 data["_links"] = links
1204
1205 return super().sol005_projection(data)
1206
1207 @staticmethod
1208 def find_software_version(vnfd: dict) -> str:
1209 """Find the sotware version in the VNFD descriptors
1210
1211 Args:
1212 vnfd (dict): Descriptor as a dictionary
1213
1214 Returns:
1215 software-version (str)
1216 """
1217 default_sw_version = "1.0"
1218 if vnfd.get("vnfd"):
1219 vnfd = vnfd["vnfd"]
1220 if vnfd.get("software-version"):
1221 return vnfd["software-version"]
1222 else:
1223 return default_sw_version
1224
1225 @staticmethod
1226 def extract_policies(vnfd: dict) -> dict:
1227 """Removes the policies from the VNFD descriptors
1228
1229 Args:
1230 vnfd (dict): Descriptor as a dictionary
1231
1232 Returns:
1233 vnfd (dict): VNFD which does not include policies
1234 """
1235 for df in vnfd.get("df", {}):
1236 for policy in ["scaling-aspect", "healing-aspect"]:
1237 if (df.get(policy, {})):
1238 df.pop(policy)
1239 for vdu in vnfd.get("vdu", {}):
1240 for alarm_policy in ["alarm", "monitoring-parameter"]:
1241 if (vdu.get(alarm_policy, {})):
1242 vdu.pop(alarm_policy)
1243 return vnfd
1244
1245 @staticmethod
1246 def extract_day12_primitives(vnfd: dict) -> dict:
1247 """Removes the day12 primitives from the VNFD descriptors
1248
1249 Args:
1250 vnfd (dict): Descriptor as a dictionary
1251
1252 Returns:
1253 vnfd (dict)
1254 """
1255 for df_id, df in enumerate(vnfd.get("df", {})):
1256 if (
1257 df.get("lcm-operations-configuration", {})
1258 .get("operate-vnf-op-config", {})
1259 .get("day1-2")
1260 ):
1261 day12 = df["lcm-operations-configuration"]["operate-vnf-op-config"].get(
1262 "day1-2"
1263 )
1264 for config_id, config in enumerate(day12):
1265 for key in [
1266 "initial-config-primitive",
1267 "config-primitive",
1268 "terminate-config-primitive",
1269 ]:
1270 config.pop(key, None)
1271 day12[config_id] = config
1272 df["lcm-operations-configuration"]["operate-vnf-op-config"][
1273 "day1-2"
1274 ] = day12
1275 vnfd["df"][df_id] = df
1276 return vnfd
1277
1278 def remove_modifiable_items(self, vnfd: dict) -> dict:
1279 """Removes the modifiable parts from the VNFD descriptors
1280
1281 It calls different extract functions according to different update types
1282 to clear all the modifiable items from VNFD
1283
1284 Args:
1285 vnfd (dict): Descriptor as a dictionary
1286
1287 Returns:
1288 vnfd (dict): Descriptor which does not include modifiable contents
1289 """
1290 if vnfd.get("vnfd"):
1291 vnfd = vnfd["vnfd"]
1292 vnfd.pop("_admin", None)
1293 # If the other extractions need to be done from VNFD,
1294 # the new extract methods could be appended to below list.
1295 for extract_function in [self.extract_day12_primitives, self.extract_policies]:
1296 vnfd_temp = extract_function(vnfd)
1297 vnfd = vnfd_temp
1298 return vnfd
1299
1300 def _validate_descriptor_changes(
1301 self,
1302 descriptor_id: str,
1303 descriptor_file_name: str,
1304 old_descriptor_directory: str,
1305 new_descriptor_directory: str,
1306 ):
1307 """Compares the old and new VNFD descriptors and validates the new descriptor.
1308
1309 Args:
1310 old_descriptor_directory (str): Directory of descriptor which is in-use
1311 new_descriptor_directory (str): Directory of descriptor which is proposed to update (new revision)
1312
1313 Returns:
1314 None
1315
1316 Raises:
1317 EngineException: In case of error when there are unallowed changes
1318 """
1319 try:
1320 # If VNFD does not exist in DB or it is not in use by any NS,
1321 # validation is not required.
1322 vnfd = self.db.get_one("vnfds", {"_id": descriptor_id})
1323 if not vnfd or not detect_descriptor_usage(vnfd, "vnfds", self.db):
1324 return
1325
1326 # Get the old and new descriptor contents in order to compare them.
1327 with self.fs.file_open(
1328 (old_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
1329 ) as old_descriptor_file:
1330
1331 with self.fs.file_open(
1332 (new_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
1333 ) as new_descriptor_file:
1334
1335 old_content = yaml.safe_load(old_descriptor_file.read())
1336 new_content = yaml.safe_load(new_descriptor_file.read())
1337
1338 # If software version has changed, we do not need to validate
1339 # the differences anymore.
1340 if old_content and new_content:
1341 if self.find_software_version(
1342 old_content
1343 ) != self.find_software_version(new_content):
1344 return
1345
1346 disallowed_change = DeepDiff(
1347 self.remove_modifiable_items(old_content),
1348 self.remove_modifiable_items(new_content),
1349 )
1350
1351 if disallowed_change:
1352 changed_nodes = functools.reduce(
1353 lambda a, b: a + " , " + b,
1354 [
1355 node.lstrip("root")
1356 for node in disallowed_change.get(
1357 "values_changed"
1358 ).keys()
1359 ],
1360 )
1361
1362 raise EngineException(
1363 f"Error in validating new descriptor: {changed_nodes} cannot be modified, "
1364 "there are disallowed changes in the vnf descriptor.",
1365 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1366 )
1367 except (
1368 DbException,
1369 AttributeError,
1370 IndexError,
1371 KeyError,
1372 ValueError,
1373 ) as e:
1374 raise type(e)(
1375 "VNF Descriptor could not be processed with error: {}.".format(e)
1376 )
1377
1378
1379 class NsdTopic(DescriptorTopic):
1380 topic = "nsds"
1381 topic_msg = "nsd"
1382
1383 def __init__(self, db, fs, msg, auth):
1384 DescriptorTopic.__init__(self, db, fs, msg, auth)
1385
1386 def pyangbind_validation(self, item, data, force=False):
1387 if self._descriptor_data_is_in_old_format(data):
1388 raise EngineException(
1389 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
1390 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1391 )
1392 try:
1393 nsd_vnf_profiles = data.get("df", [{}])[0].get("vnf-profile", [])
1394 mynsd = etsi_nfv_nsd.etsi_nfv_nsd()
1395 pybindJSONDecoder.load_ietf_json(
1396 {"nsd": {"nsd": [data]}},
1397 None,
1398 None,
1399 obj=mynsd,
1400 path_helper=True,
1401 skip_unknown=force,
1402 )
1403 out = pybindJSON.dumps(mynsd, mode="ietf")
1404 desc_out = self._remove_envelop(yaml.safe_load(out))
1405 desc_out = self._remove_yang_prefixes_from_descriptor(desc_out)
1406 if nsd_vnf_profiles:
1407 desc_out["df"][0]["vnf-profile"] = nsd_vnf_profiles
1408 return desc_out
1409 except Exception as e:
1410 raise EngineException(
1411 "Error in pyangbind validation: {}".format(str(e)),
1412 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1413 )
1414
1415 @staticmethod
1416 def _descriptor_data_is_in_old_format(data):
1417 return ("nsd-catalog" in data) or ("nsd:nsd-catalog" in data)
1418
1419 @staticmethod
1420 def _remove_envelop(indata=None):
1421 if not indata:
1422 return {}
1423 clean_indata = indata
1424
1425 if clean_indata.get("nsd"):
1426 clean_indata = clean_indata["nsd"]
1427 elif clean_indata.get("etsi-nfv-nsd:nsd"):
1428 clean_indata = clean_indata["etsi-nfv-nsd:nsd"]
1429 if clean_indata.get("nsd"):
1430 if (
1431 not isinstance(clean_indata["nsd"], list)
1432 or len(clean_indata["nsd"]) != 1
1433 ):
1434 raise EngineException("'nsd' must be a list of only one element")
1435 clean_indata = clean_indata["nsd"][0]
1436 return clean_indata
1437
1438 def _validate_input_new(self, indata, storage_params, force=False):
1439 indata.pop("nsdOnboardingState", None)
1440 indata.pop("nsdOperationalState", None)
1441 indata.pop("nsdUsageState", None)
1442
1443 indata.pop("links", None)
1444
1445 indata = self.pyangbind_validation("nsds", indata, force)
1446 # Cross references validation in the descriptor
1447 # TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none
1448 for vld in get_iterable(indata.get("virtual-link-desc")):
1449 self.validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata)
1450
1451 self.validate_vnf_profiles_vnfd_id(indata)
1452
1453 return indata
1454
1455 @staticmethod
1456 def validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata):
1457 if not vld.get("mgmt-network"):
1458 return
1459 vld_id = vld.get("id")
1460 for df in get_iterable(indata.get("df")):
1461 for vlp in get_iterable(df.get("virtual-link-profile")):
1462 if vld_id and vld_id == vlp.get("virtual-link-desc-id"):
1463 if vlp.get("virtual-link-protocol-data"):
1464 raise EngineException(
1465 "Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-"
1466 "protocol-data You cannot set a virtual-link-protocol-data "
1467 "when mgmt-network is True".format(df["id"], vlp["id"]),
1468 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1469 )
1470
1471 @staticmethod
1472 def validate_vnf_profiles_vnfd_id(indata):
1473 all_vnfd_ids = set(get_iterable(indata.get("vnfd-id")))
1474 for df in get_iterable(indata.get("df")):
1475 for vnf_profile in get_iterable(df.get("vnf-profile")):
1476 vnfd_id = vnf_profile.get("vnfd-id")
1477 if vnfd_id and vnfd_id not in all_vnfd_ids:
1478 raise EngineException(
1479 "Error at df[id='{}']:vnf_profile[id='{}']:vnfd-id='{}' "
1480 "does not match any vnfd-id".format(
1481 df["id"], vnf_profile["id"], vnfd_id
1482 ),
1483 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1484 )
1485
1486 def _validate_input_edit(self, indata, content, force=False):
1487 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
1488 """
1489 indata looks as follows:
1490 - In the new case (conformant)
1491 {'nsdOperationalState': 'DISABLED', 'userDefinedData': {'id': 'string23',
1492 '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}}
1493 - In the old case (backwards-compatible)
1494 {'id': 'string23', '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}
1495 """
1496 if "_admin" not in indata:
1497 indata["_admin"] = {}
1498
1499 if "nsdOperationalState" in indata:
1500 if indata["nsdOperationalState"] in ("ENABLED", "DISABLED"):
1501 indata["_admin"]["operationalState"] = indata.pop("nsdOperationalState")
1502 else:
1503 raise EngineException(
1504 "State '{}' is not a valid operational state".format(
1505 indata["nsdOperationalState"]
1506 ),
1507 http_code=HTTPStatus.BAD_REQUEST,
1508 )
1509
1510 # In the case of user defined data, we need to put the data in the root of the object
1511 # to preserve current expected behaviour
1512 if "userDefinedData" in indata:
1513 data = indata.pop("userDefinedData")
1514 if type(data) == dict:
1515 indata["_admin"]["userDefinedData"] = data
1516 else:
1517 raise EngineException(
1518 "userDefinedData should be an object, but is '{}' instead".format(
1519 type(data)
1520 ),
1521 http_code=HTTPStatus.BAD_REQUEST,
1522 )
1523 if (
1524 "operationalState" in indata["_admin"]
1525 and content["_admin"]["operationalState"]
1526 == indata["_admin"]["operationalState"]
1527 ):
1528 raise EngineException(
1529 "nsdOperationalState already {}".format(
1530 content["_admin"]["operationalState"]
1531 ),
1532 http_code=HTTPStatus.CONFLICT,
1533 )
1534 return indata
1535
1536 def _check_descriptor_dependencies(self, session, descriptor):
1537 """
1538 Check that the dependent descriptors exist on a new descriptor or edition. Also checks references to vnfd
1539 connection points are ok
1540 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1541 :param descriptor: descriptor to be inserted or edit
1542 :return: None or raises exception
1543 """
1544 if session["force"]:
1545 return
1546 vnfds_index = self._get_descriptor_constituent_vnfds_index(session, descriptor)
1547
1548 # Cross references validation in the descriptor and vnfd connection point validation
1549 for df in get_iterable(descriptor.get("df")):
1550 self.validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index)
1551
1552 def _get_descriptor_constituent_vnfds_index(self, session, descriptor):
1553 vnfds_index = {}
1554 if descriptor.get("vnfd-id") and not session["force"]:
1555 for vnfd_id in get_iterable(descriptor.get("vnfd-id")):
1556 query_filter = self._get_project_filter(session)
1557 query_filter["id"] = vnfd_id
1558 vnf_list = self.db.get_list("vnfds", query_filter)
1559 if not vnf_list:
1560 raise EngineException(
1561 "Descriptor error at 'vnfd-id'='{}' references a non "
1562 "existing vnfd".format(vnfd_id),
1563 http_code=HTTPStatus.CONFLICT,
1564 )
1565 vnfds_index[vnfd_id] = vnf_list[0]
1566 return vnfds_index
1567
1568 @staticmethod
1569 def validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index):
1570 for vnf_profile in get_iterable(df.get("vnf-profile")):
1571 vnfd = vnfds_index.get(vnf_profile["vnfd-id"])
1572 all_vnfd_ext_cpds = set()
1573 for ext_cpd in get_iterable(vnfd.get("ext-cpd")):
1574 if ext_cpd.get("id"):
1575 all_vnfd_ext_cpds.add(ext_cpd.get("id"))
1576
1577 for virtual_link in get_iterable(
1578 vnf_profile.get("virtual-link-connectivity")
1579 ):
1580 for vl_cpd in get_iterable(virtual_link.get("constituent-cpd-id")):
1581 vl_cpd_id = vl_cpd.get("constituent-cpd-id")
1582 if vl_cpd_id and vl_cpd_id not in all_vnfd_ext_cpds:
1583 raise EngineException(
1584 "Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity"
1585 "[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a "
1586 "non existing ext-cpd:id inside vnfd '{}'".format(
1587 df["id"],
1588 vnf_profile["id"],
1589 virtual_link["virtual-link-profile-id"],
1590 vl_cpd_id,
1591 vnfd["id"],
1592 ),
1593 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1594 )
1595
1596 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
1597 final_content = super().check_conflict_on_edit(
1598 session, final_content, edit_content, _id
1599 )
1600
1601 self._check_descriptor_dependencies(session, final_content)
1602
1603 return final_content
1604
1605 def check_conflict_on_del(self, session, _id, db_content):
1606 """
1607 Check that there is not any NSR that uses this NSD. Only NSRs belonging to this project are considered. Note
1608 that NSD can be public and be used by other projects.
1609 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1610 :param _id: nsd internal id
1611 :param db_content: The database content of the _id
1612 :return: None or raises EngineException with the conflict
1613 """
1614 if session["force"]:
1615 return
1616 descriptor = db_content
1617 descriptor_id = descriptor.get("id")
1618 if not descriptor_id: # empty nsd not uploaded
1619 return
1620
1621 # check NSD used by NS
1622 _filter = self._get_project_filter(session)
1623 _filter["nsd-id"] = _id
1624 if self.db.get_list("nsrs", _filter):
1625 raise EngineException(
1626 "There is at least one NS instance using this descriptor",
1627 http_code=HTTPStatus.CONFLICT,
1628 )
1629
1630 # check NSD referenced by NST
1631 del _filter["nsd-id"]
1632 _filter["netslice-subnet.ANYINDEX.nsd-ref"] = descriptor_id
1633 if self.db.get_list("nsts", _filter):
1634 raise EngineException(
1635 "There is at least one NetSlice Template referencing this descriptor",
1636 http_code=HTTPStatus.CONFLICT,
1637 )
1638
1639 def delete_extra(self, session, _id, db_content, not_send_msg=None):
1640 """
1641 Deletes associate file system storage (via super)
1642 Deletes associated vnfpkgops from database.
1643 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1644 :param _id: server internal id
1645 :param db_content: The database content of the descriptor
1646 :return: None
1647 :raises: FsException in case of error while deleting associated storage
1648 """
1649 super().delete_extra(session, _id, db_content, not_send_msg)
1650 self.db.del_list(self.topic+"_revisions", { "_id": { "$regex": _id}})
1651
1652 @staticmethod
1653 def extract_day12_primitives(nsd: dict) -> dict:
1654 """Removes the day12 primitives from the NSD descriptors
1655
1656 Args:
1657 nsd (dict): Descriptor as a dictionary
1658
1659 Returns:
1660 nsd (dict): Cleared NSD
1661 """
1662 if nsd.get("ns-configuration"):
1663 for key in [
1664 "config-primitive",
1665 "initial-config-primitive",
1666 "terminate-config-primitive",
1667 ]:
1668 nsd["ns-configuration"].pop(key, None)
1669 return nsd
1670
1671 def remove_modifiable_items(self, nsd: dict) -> dict:
1672 """Removes the modifiable parts from the VNFD descriptors
1673
1674 It calls different extract functions according to different update types
1675 to clear all the modifiable items from NSD
1676
1677 Args:
1678 nsd (dict): Descriptor as a dictionary
1679
1680 Returns:
1681 nsd (dict): Descriptor which does not include modifiable contents
1682 """
1683 while isinstance(nsd, dict) and nsd.get("nsd"):
1684 nsd = nsd["nsd"]
1685 if isinstance(nsd, list):
1686 nsd = nsd[0]
1687 nsd.pop("_admin", None)
1688 # If the more extractions need to be done from NSD,
1689 # the new extract methods could be appended to below list.
1690 for extract_function in [self.extract_day12_primitives]:
1691 nsd_temp = extract_function(nsd)
1692 nsd = nsd_temp
1693 return nsd
1694
1695 def _validate_descriptor_changes(
1696 self,
1697 descriptor_id: str,
1698 descriptor_file_name: str,
1699 old_descriptor_directory: str,
1700 new_descriptor_directory: str,
1701 ):
1702 """Compares the old and new NSD descriptors and validates the new descriptor
1703
1704 Args:
1705 old_descriptor_directory: Directory of descriptor which is in-use
1706 new_descriptor_directory: Directory of descriptor which is proposed to update (new revision)
1707
1708 Returns:
1709 None
1710
1711 Raises:
1712 EngineException: In case of error if the changes are not allowed
1713 """
1714
1715 try:
1716 # If NSD does not exist in DB, or it is not in use by any NS,
1717 # validation is not required.
1718 nsd = self.db.get_one("nsds", {"_id": descriptor_id}, fail_on_empty=False)
1719 if not nsd or not detect_descriptor_usage(nsd, "nsds", self.db):
1720 return
1721
1722 # Get the old and new descriptor contents in order to compare them.
1723 with self.fs.file_open(
1724 (old_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
1725 ) as old_descriptor_file:
1726
1727 with self.fs.file_open(
1728 (new_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
1729 ) as new_descriptor_file:
1730
1731 old_content = yaml.safe_load(old_descriptor_file.read())
1732 new_content = yaml.safe_load(new_descriptor_file.read())
1733
1734 if old_content and new_content:
1735 disallowed_change = DeepDiff(
1736 self.remove_modifiable_items(old_content),
1737 self.remove_modifiable_items(new_content),
1738 )
1739
1740 if disallowed_change:
1741 changed_nodes = functools.reduce(
1742 lambda a, b: a + ", " + b,
1743 [
1744 node.lstrip("root")
1745 for node in disallowed_change.get(
1746 "values_changed"
1747 ).keys()
1748 ],
1749 )
1750
1751 raise EngineException(
1752 f"Error in validating new descriptor: {changed_nodes} cannot be modified, "
1753 "there are disallowed changes in the ns descriptor. ",
1754 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1755 )
1756 except (
1757 DbException,
1758 AttributeError,
1759 IndexError,
1760 KeyError,
1761 ValueError,
1762 ) as e:
1763 raise type(e)(
1764 "NS Descriptor could not be processed with error: {}.".format(e)
1765 )
1766
1767 def sol005_projection(self, data):
1768 data["nsdOnboardingState"] = data["_admin"]["onboardingState"]
1769 data["nsdOperationalState"] = data["_admin"]["operationalState"]
1770 data["nsdUsageState"] = data["_admin"]["usageState"]
1771
1772 links = {}
1773 links["self"] = {"href": "/nsd/v1/ns_descriptors/{}".format(data["_id"])}
1774 links["nsd_content"] = {
1775 "href": "/nsd/v1/ns_descriptors/{}/nsd_content".format(data["_id"])
1776 }
1777 data["_links"] = links
1778
1779 return super().sol005_projection(data)
1780
1781
1782 class NstTopic(DescriptorTopic):
1783 topic = "nsts"
1784 topic_msg = "nst"
1785 quota_name = "slice_templates"
1786
1787 def __init__(self, db, fs, msg, auth):
1788 DescriptorTopic.__init__(self, db, fs, msg, auth)
1789
1790 def pyangbind_validation(self, item, data, force=False):
1791 try:
1792 mynst = nst_im()
1793 pybindJSONDecoder.load_ietf_json(
1794 {"nst": [data]},
1795 None,
1796 None,
1797 obj=mynst,
1798 path_helper=True,
1799 skip_unknown=force,
1800 )
1801 out = pybindJSON.dumps(mynst, mode="ietf")
1802 desc_out = self._remove_envelop(yaml.safe_load(out))
1803 return desc_out
1804 except Exception as e:
1805 raise EngineException(
1806 "Error in pyangbind validation: {}".format(str(e)),
1807 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1808 )
1809
1810 @staticmethod
1811 def _remove_envelop(indata=None):
1812 if not indata:
1813 return {}
1814 clean_indata = indata
1815
1816 if clean_indata.get("nst"):
1817 if (
1818 not isinstance(clean_indata["nst"], list)
1819 or len(clean_indata["nst"]) != 1
1820 ):
1821 raise EngineException("'nst' must be a list only one element")
1822 clean_indata = clean_indata["nst"][0]
1823 elif clean_indata.get("nst:nst"):
1824 if (
1825 not isinstance(clean_indata["nst:nst"], list)
1826 or len(clean_indata["nst:nst"]) != 1
1827 ):
1828 raise EngineException("'nst:nst' must be a list only one element")
1829 clean_indata = clean_indata["nst:nst"][0]
1830 return clean_indata
1831
1832 def _validate_input_new(self, indata, storage_params, force=False):
1833 indata.pop("onboardingState", None)
1834 indata.pop("operationalState", None)
1835 indata.pop("usageState", None)
1836 indata = self.pyangbind_validation("nsts", indata, force)
1837 return indata.copy()
1838
1839 def _check_descriptor_dependencies(self, session, descriptor):
1840 """
1841 Check that the dependent descriptors exist on a new descriptor or edition
1842 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1843 :param descriptor: descriptor to be inserted or edit
1844 :return: None or raises exception
1845 """
1846 if not descriptor.get("netslice-subnet"):
1847 return
1848 for nsd in descriptor["netslice-subnet"]:
1849 nsd_id = nsd["nsd-ref"]
1850 filter_q = self._get_project_filter(session)
1851 filter_q["id"] = nsd_id
1852 if not self.db.get_list("nsds", filter_q):
1853 raise EngineException(
1854 "Descriptor error at 'netslice-subnet':'nsd-ref'='{}' references a non "
1855 "existing nsd".format(nsd_id),
1856 http_code=HTTPStatus.CONFLICT,
1857 )
1858
1859 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
1860 final_content = super().check_conflict_on_edit(
1861 session, final_content, edit_content, _id
1862 )
1863
1864 self._check_descriptor_dependencies(session, final_content)
1865 return final_content
1866
1867 def check_conflict_on_del(self, session, _id, db_content):
1868 """
1869 Check that there is not any NSIR that uses this NST. Only NSIRs belonging to this project are considered. Note
1870 that NST can be public and be used by other projects.
1871 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1872 :param _id: nst internal id
1873 :param db_content: The database content of the _id.
1874 :return: None or raises EngineException with the conflict
1875 """
1876 # TODO: Check this method
1877 if session["force"]:
1878 return
1879 # Get Network Slice Template from Database
1880 _filter = self._get_project_filter(session)
1881 _filter["_admin.nst-id"] = _id
1882 if self.db.get_list("nsis", _filter):
1883 raise EngineException(
1884 "there is at least one Netslice Instance using this descriptor",
1885 http_code=HTTPStatus.CONFLICT,
1886 )
1887
1888 def sol005_projection(self, data):
1889 data["onboardingState"] = data["_admin"]["onboardingState"]
1890 data["operationalState"] = data["_admin"]["operationalState"]
1891 data["usageState"] = data["_admin"]["usageState"]
1892
1893 links = {}
1894 links["self"] = {"href": "/nst/v1/netslice_templates/{}".format(data["_id"])}
1895 links["nst"] = {"href": "/nst/v1/netslice_templates/{}/nst".format(data["_id"])}
1896 data["_links"] = links
1897
1898 return super().sol005_projection(data)
1899
1900
1901 class PduTopic(BaseTopic):
1902 topic = "pdus"
1903 topic_msg = "pdu"
1904 quota_name = "pduds"
1905 schema_new = pdu_new_schema
1906 schema_edit = pdu_edit_schema
1907
1908 def __init__(self, db, fs, msg, auth):
1909 BaseTopic.__init__(self, db, fs, msg, auth)
1910
1911 @staticmethod
1912 def format_on_new(content, project_id=None, make_public=False):
1913 BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
1914 content["_admin"]["onboardingState"] = "CREATED"
1915 content["_admin"]["operationalState"] = "ENABLED"
1916 content["_admin"]["usageState"] = "NOT_IN_USE"
1917
1918 def check_conflict_on_del(self, session, _id, db_content):
1919 """
1920 Check that there is not any vnfr that uses this PDU
1921 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1922 :param _id: pdu internal id
1923 :param db_content: The database content of the _id.
1924 :return: None or raises EngineException with the conflict
1925 """
1926 if session["force"]:
1927 return
1928
1929 _filter = self._get_project_filter(session)
1930 _filter["vdur.pdu-id"] = _id
1931 if self.db.get_list("vnfrs", _filter):
1932 raise EngineException(
1933 "There is at least one VNF instance using this PDU",
1934 http_code=HTTPStatus.CONFLICT,
1935 )
1936
1937
1938 class VnfPkgOpTopic(BaseTopic):
1939 topic = "vnfpkgops"
1940 topic_msg = "vnfd"
1941 schema_new = vnfpkgop_new_schema
1942 schema_edit = None
1943
1944 def __init__(self, db, fs, msg, auth):
1945 BaseTopic.__init__(self, db, fs, msg, auth)
1946
1947 def edit(self, session, _id, indata=None, kwargs=None, content=None):
1948 raise EngineException(
1949 "Method 'edit' not allowed for topic '{}'".format(self.topic),
1950 HTTPStatus.METHOD_NOT_ALLOWED,
1951 )
1952
1953 def delete(self, session, _id, dry_run=False):
1954 raise EngineException(
1955 "Method 'delete' not allowed for topic '{}'".format(self.topic),
1956 HTTPStatus.METHOD_NOT_ALLOWED,
1957 )
1958
1959 def delete_list(self, session, filter_q=None):
1960 raise EngineException(
1961 "Method 'delete_list' not allowed for topic '{}'".format(self.topic),
1962 HTTPStatus.METHOD_NOT_ALLOWED,
1963 )
1964
1965 def new(self, rollback, session, indata=None, kwargs=None, headers=None):
1966 """
1967 Creates a new entry into database.
1968 :param rollback: list to append created items at database in case a rollback may to be done
1969 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1970 :param indata: data to be inserted
1971 :param kwargs: used to override the indata descriptor
1972 :param headers: http request headers
1973 :return: _id, op_id:
1974 _id: identity of the inserted data.
1975 op_id: None
1976 """
1977 self._update_input_with_kwargs(indata, kwargs)
1978 validate_input(indata, self.schema_new)
1979 vnfpkg_id = indata["vnfPkgId"]
1980 filter_q = BaseTopic._get_project_filter(session)
1981 filter_q["_id"] = vnfpkg_id
1982 vnfd = self.db.get_one("vnfds", filter_q)
1983 operation = indata["lcmOperationType"]
1984 kdu_name = indata["kdu_name"]
1985 for kdu in vnfd.get("kdu", []):
1986 if kdu["name"] == kdu_name:
1987 helm_chart = kdu.get("helm-chart")
1988 juju_bundle = kdu.get("juju-bundle")
1989 break
1990 else:
1991 raise EngineException(
1992 "Not found vnfd[id='{}']:kdu[name='{}']".format(vnfpkg_id, kdu_name)
1993 )
1994 if helm_chart:
1995 indata["helm-chart"] = helm_chart
1996 match = fullmatch(r"([^/]*)/([^/]*)", helm_chart)
1997 repo_name = match.group(1) if match else None
1998 elif juju_bundle:
1999 indata["juju-bundle"] = juju_bundle
2000 match = fullmatch(r"([^/]*)/([^/]*)", juju_bundle)
2001 repo_name = match.group(1) if match else None
2002 else:
2003 raise EngineException(
2004 "Found neither 'helm-chart' nor 'juju-bundle' in vnfd[id='{}']:kdu[name='{}']".format(
2005 vnfpkg_id, kdu_name
2006 )
2007 )
2008 if repo_name:
2009 del filter_q["_id"]
2010 filter_q["name"] = repo_name
2011 repo = self.db.get_one("k8srepos", filter_q)
2012 k8srepo_id = repo.get("_id")
2013 k8srepo_url = repo.get("url")
2014 else:
2015 k8srepo_id = None
2016 k8srepo_url = None
2017 indata["k8srepoId"] = k8srepo_id
2018 indata["k8srepo_url"] = k8srepo_url
2019 vnfpkgop_id = str(uuid4())
2020 vnfpkgop_desc = {
2021 "_id": vnfpkgop_id,
2022 "operationState": "PROCESSING",
2023 "vnfPkgId": vnfpkg_id,
2024 "lcmOperationType": operation,
2025 "isAutomaticInvocation": False,
2026 "isCancelPending": False,
2027 "operationParams": indata,
2028 "links": {
2029 "self": "/osm/vnfpkgm/v1/vnfpkg_op_occs/" + vnfpkgop_id,
2030 "vnfpkg": "/osm/vnfpkgm/v1/vnf_packages/" + vnfpkg_id,
2031 },
2032 }
2033 self.format_on_new(
2034 vnfpkgop_desc, session["project_id"], make_public=session["public"]
2035 )
2036 ctime = vnfpkgop_desc["_admin"]["created"]
2037 vnfpkgop_desc["statusEnteredTime"] = ctime
2038 vnfpkgop_desc["startTime"] = ctime
2039 self.db.create(self.topic, vnfpkgop_desc)
2040 rollback.append({"topic": self.topic, "_id": vnfpkgop_id})
2041 self.msg.write(self.topic_msg, operation, vnfpkgop_desc)
2042 return vnfpkgop_id, None