Fix bug 2088 by validating helm-chart value on VNF
[osm/NBI.git] / osm_nbi / descriptor_topics.py
1 # -*- coding: utf-8 -*-
2
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
12 # implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
15
16 import tarfile
17 import yaml
18 import json
19 import copy
20 import os
21 import shutil
22 import functools
23 import re
24
25 # import logging
26 from deepdiff import DeepDiff
27 from hashlib import md5
28 from osm_common.dbbase import DbException, deep_update_rfc7396
29 from http import HTTPStatus
30 from time import time
31 from uuid import uuid4
32 from re import fullmatch
33 from zipfile import ZipFile
34 from osm_nbi.validation import (
35 ValidationError,
36 pdu_new_schema,
37 pdu_edit_schema,
38 validate_input,
39 vnfpkgop_new_schema,
40 )
41 from osm_nbi.base_topic import (
42 BaseTopic,
43 EngineException,
44 get_iterable,
45 detect_descriptor_usage,
46 )
47 from osm_im import etsi_nfv_vnfd, etsi_nfv_nsd
48 from osm_im.nst import nst as nst_im
49 from pyangbind.lib.serialise import pybindJSONDecoder
50 import pyangbind.lib.pybindJSON as pybindJSON
51 from osm_nbi import utils
52
53 __author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
54
55 valid_helm_chart_re = re.compile(
56 r"^[a-z0-9]([-a-z0-9]*[a-z0-9]/)?([a-z0-9]([-a-z0-9]*[a-z0-9])?)*$"
57 )
58
59
60 class DescriptorTopic(BaseTopic):
61 def __init__(self, db, fs, msg, auth):
62 super().__init__(db, fs, msg, auth)
63
64 def _validate_input_new(self, indata, storage_params, force=False):
65 return indata
66
67 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
68 final_content = super().check_conflict_on_edit(
69 session, final_content, edit_content, _id
70 )
71
72 def _check_unique_id_name(descriptor, position=""):
73 for desc_key, desc_item in descriptor.items():
74 if isinstance(desc_item, list) and desc_item:
75 used_ids = []
76 desc_item_id = None
77 for index, list_item in enumerate(desc_item):
78 if isinstance(list_item, dict):
79 _check_unique_id_name(
80 list_item, "{}.{}[{}]".format(position, desc_key, index)
81 )
82 # Base case
83 if index == 0 and (
84 list_item.get("id") or list_item.get("name")
85 ):
86 desc_item_id = "id" if list_item.get("id") else "name"
87 if desc_item_id and list_item.get(desc_item_id):
88 if list_item[desc_item_id] in used_ids:
89 position = "{}.{}[{}]".format(
90 position, desc_key, index
91 )
92 raise EngineException(
93 "Error: identifier {} '{}' is not unique and repeats at '{}'".format(
94 desc_item_id,
95 list_item[desc_item_id],
96 position,
97 ),
98 HTTPStatus.UNPROCESSABLE_ENTITY,
99 )
100 used_ids.append(list_item[desc_item_id])
101
102 _check_unique_id_name(final_content)
103 # 1. validate again with pyangbind
104 # 1.1. remove internal keys
105 internal_keys = {}
106 for k in ("_id", "_admin"):
107 if k in final_content:
108 internal_keys[k] = final_content.pop(k)
109 storage_params = internal_keys["_admin"].get("storage")
110 serialized = self._validate_input_new(
111 final_content, storage_params, session["force"]
112 )
113
114 # 1.2. modify final_content with a serialized version
115 final_content = copy.deepcopy(serialized)
116 # 1.3. restore internal keys
117 for k, v in internal_keys.items():
118 final_content[k] = v
119 if session["force"]:
120 return final_content
121
122 # 2. check that this id is not present
123 if "id" in edit_content:
124 _filter = self._get_project_filter(session)
125
126 _filter["id"] = final_content["id"]
127 _filter["_id.neq"] = _id
128
129 if self.db.get_one(self.topic, _filter, fail_on_empty=False):
130 raise EngineException(
131 "{} with id '{}' already exists for this project".format(
132 (str(self.topic))[:-1], final_content["id"]
133 ),
134 HTTPStatus.CONFLICT,
135 )
136
137 return final_content
138
139 @staticmethod
140 def format_on_new(content, project_id=None, make_public=False):
141 BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
142 content["_admin"]["onboardingState"] = "CREATED"
143 content["_admin"]["operationalState"] = "DISABLED"
144 content["_admin"]["usageState"] = "NOT_IN_USE"
145
146 def delete_extra(self, session, _id, db_content, not_send_msg=None):
147 """
148 Deletes file system storage associated with the descriptor
149 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
150 :param _id: server internal id
151 :param db_content: The database content of the descriptor
152 :param not_send_msg: To not send message (False) or store content (list) instead
153 :return: None if ok or raises EngineException with the problem
154 """
155 self.fs.file_delete(_id, ignore_non_exist=True)
156 self.fs.file_delete(_id + "_", ignore_non_exist=True) # remove temp folder
157 # Remove file revisions
158 if "revision" in db_content["_admin"]:
159 revision = db_content["_admin"]["revision"]
160 while revision > 0:
161 self.fs.file_delete(_id + ":" + str(revision), ignore_non_exist=True)
162 revision = revision - 1
163
164 @staticmethod
165 def get_one_by_id(db, session, topic, id):
166 # find owned by this project
167 _filter = BaseTopic._get_project_filter(session)
168 _filter["id"] = id
169 desc_list = db.get_list(topic, _filter)
170 if len(desc_list) == 1:
171 return desc_list[0]
172 elif len(desc_list) > 1:
173 raise DbException(
174 "Found more than one {} with id='{}' belonging to this project".format(
175 topic[:-1], id
176 ),
177 HTTPStatus.CONFLICT,
178 )
179
180 # not found any: try to find public
181 _filter = BaseTopic._get_project_filter(session)
182 _filter["id"] = id
183 desc_list = db.get_list(topic, _filter)
184 if not desc_list:
185 raise DbException(
186 "Not found any {} with id='{}'".format(topic[:-1], id),
187 HTTPStatus.NOT_FOUND,
188 )
189 elif len(desc_list) == 1:
190 return desc_list[0]
191 else:
192 raise DbException(
193 "Found more than one public {} with id='{}'; and no one belonging to this project".format(
194 topic[:-1], id
195 ),
196 HTTPStatus.CONFLICT,
197 )
198
199 def new(self, rollback, session, indata=None, kwargs=None, headers=None):
200 """
201 Creates a new almost empty DISABLED entry into database. Due to SOL005, it does not follow normal procedure.
202 Creating a VNFD or NSD is done in two steps: 1. Creates an empty descriptor (this step) and 2) upload content
203 (self.upload_content)
204 :param rollback: list to append created items at database in case a rollback may to be done
205 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
206 :param indata: data to be inserted
207 :param kwargs: used to override the indata descriptor
208 :param headers: http request headers
209 :return: _id, None: identity of the inserted data; and None as there is not any operation
210 """
211
212 # No needed to capture exceptions
213 # Check Quota
214 self.check_quota(session)
215
216 # _remove_envelop
217 if indata:
218 if "userDefinedData" in indata:
219 indata = indata["userDefinedData"]
220
221 # Override descriptor with query string kwargs
222 self._update_input_with_kwargs(indata, kwargs)
223 # uncomment when this method is implemented.
224 # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors
225 # indata = DescriptorTopic._validate_input_new(self, indata, project_id=session["force"])
226
227 content = {"_admin": {"userDefinedData": indata, "revision": 0}}
228
229 self.format_on_new(
230 content, session["project_id"], make_public=session["public"]
231 )
232 _id = self.db.create(self.topic, content)
233 rollback.append({"topic": self.topic, "_id": _id})
234 self._send_msg("created", {"_id": _id})
235 return _id, None
236
237 def upload_content(self, session, _id, indata, kwargs, headers):
238 """
239 Used for receiving content by chunks (with a transaction_id header and/or gzip file. It will store and extract)
240 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
241 :param _id : the nsd,vnfd is already created, this is the id
242 :param indata: http body request
243 :param kwargs: user query string to override parameters. NOT USED
244 :param headers: http request headers
245 :return: True if package is completely uploaded or False if partial content has been uploded
246 Raise exception on error
247 """
248 # Check that _id exists and it is valid
249 current_desc = self.show(session, _id)
250
251 content_range_text = headers.get("Content-Range")
252 expected_md5 = headers.get("Content-File-MD5")
253 compressed = None
254 content_type = headers.get("Content-Type")
255 if (
256 content_type
257 and "application/gzip" in content_type
258 or "application/x-gzip" in content_type
259 ):
260 compressed = "gzip"
261 if content_type and "application/zip" in content_type:
262 compressed = "zip"
263 filename = headers.get("Content-Filename")
264 if not filename and compressed:
265 filename = "package.tar.gz" if compressed == "gzip" else "package.zip"
266 elif not filename:
267 filename = "package"
268
269 revision = 1
270 if "revision" in current_desc["_admin"]:
271 revision = current_desc["_admin"]["revision"] + 1
272
273 # TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266
274 file_pkg = None
275 error_text = ""
276 fs_rollback = []
277
278 try:
279 if content_range_text:
280 content_range = (
281 content_range_text.replace("-", " ").replace("/", " ").split()
282 )
283 if (
284 content_range[0] != "bytes"
285 ): # TODO check x<y not negative < total....
286 raise IndexError()
287 start = int(content_range[1])
288 end = int(content_range[2]) + 1
289 total = int(content_range[3])
290 else:
291 start = 0
292 # Rather than using a temp folder, we will store the package in a folder based on
293 # the current revision.
294 proposed_revision_path = (
295 _id + ":" + str(revision)
296 ) # all the content is upload here and if ok, it is rename from id_ to is folder
297
298 if start:
299 if not self.fs.file_exists(proposed_revision_path, "dir"):
300 raise EngineException(
301 "invalid Transaction-Id header", HTTPStatus.NOT_FOUND
302 )
303 else:
304 self.fs.file_delete(proposed_revision_path, ignore_non_exist=True)
305 self.fs.mkdir(proposed_revision_path)
306 fs_rollback.append(proposed_revision_path)
307
308 storage = self.fs.get_params()
309 storage["folder"] = proposed_revision_path
310
311 file_path = (proposed_revision_path, filename)
312 if self.fs.file_exists(file_path, "file"):
313 file_size = self.fs.file_size(file_path)
314 else:
315 file_size = 0
316 if file_size != start:
317 raise EngineException(
318 "invalid Content-Range start sequence, expected '{}' but received '{}'".format(
319 file_size, start
320 ),
321 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
322 )
323 file_pkg = self.fs.file_open(file_path, "a+b")
324 if isinstance(indata, dict):
325 indata_text = yaml.safe_dump(indata, indent=4, default_flow_style=False)
326 file_pkg.write(indata_text.encode(encoding="utf-8"))
327 else:
328 indata_len = 0
329 while True:
330 indata_text = indata.read(4096)
331 indata_len += len(indata_text)
332 if not indata_text:
333 break
334 file_pkg.write(indata_text)
335 if content_range_text:
336 if indata_len != end - start:
337 raise EngineException(
338 "Mismatch between Content-Range header {}-{} and body length of {}".format(
339 start, end - 1, indata_len
340 ),
341 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
342 )
343 if end != total:
344 # TODO update to UPLOADING
345 return False
346
347 # PACKAGE UPLOADED
348 if expected_md5:
349 file_pkg.seek(0, 0)
350 file_md5 = md5()
351 chunk_data = file_pkg.read(1024)
352 while chunk_data:
353 file_md5.update(chunk_data)
354 chunk_data = file_pkg.read(1024)
355 if expected_md5 != file_md5.hexdigest():
356 raise EngineException("Error, MD5 mismatch", HTTPStatus.CONFLICT)
357 file_pkg.seek(0, 0)
358 if compressed == "gzip":
359 tar = tarfile.open(mode="r", fileobj=file_pkg)
360 descriptor_file_name = None
361 for tarinfo in tar:
362 tarname = tarinfo.name
363 tarname_path = tarname.split("/")
364 if (
365 not tarname_path[0] or ".." in tarname_path
366 ): # if start with "/" means absolute path
367 raise EngineException(
368 "Absolute path or '..' are not allowed for package descriptor tar.gz"
369 )
370 if len(tarname_path) == 1 and not tarinfo.isdir():
371 raise EngineException(
372 "All files must be inside a dir for package descriptor tar.gz"
373 )
374 if (
375 tarname.endswith(".yaml")
376 or tarname.endswith(".json")
377 or tarname.endswith(".yml")
378 ):
379 storage["pkg-dir"] = tarname_path[0]
380 if len(tarname_path) == 2:
381 if descriptor_file_name:
382 raise EngineException(
383 "Found more than one descriptor file at package descriptor tar.gz"
384 )
385 descriptor_file_name = tarname
386 if not descriptor_file_name:
387 raise EngineException(
388 "Not found any descriptor file at package descriptor tar.gz"
389 )
390 storage["descriptor"] = descriptor_file_name
391 storage["zipfile"] = filename
392 self.fs.file_extract(tar, proposed_revision_path)
393 with self.fs.file_open(
394 (proposed_revision_path, descriptor_file_name), "r"
395 ) as descriptor_file:
396 content = descriptor_file.read()
397 elif compressed == "zip":
398 zipfile = ZipFile(file_pkg)
399 descriptor_file_name = None
400 for package_file in zipfile.infolist():
401 zipfilename = package_file.filename
402 file_path = zipfilename.split("/")
403 if (
404 not file_path[0] or ".." in zipfilename
405 ): # if start with "/" means absolute path
406 raise EngineException(
407 "Absolute path or '..' are not allowed for package descriptor zip"
408 )
409
410 if (
411 zipfilename.endswith(".yaml")
412 or zipfilename.endswith(".json")
413 or zipfilename.endswith(".yml")
414 ) and (
415 zipfilename.find("/") < 0
416 or zipfilename.find("Definitions") >= 0
417 ):
418 storage["pkg-dir"] = ""
419 if descriptor_file_name:
420 raise EngineException(
421 "Found more than one descriptor file at package descriptor zip"
422 )
423 descriptor_file_name = zipfilename
424 if not descriptor_file_name:
425 raise EngineException(
426 "Not found any descriptor file at package descriptor zip"
427 )
428 storage["descriptor"] = descriptor_file_name
429 storage["zipfile"] = filename
430 self.fs.file_extract(zipfile, proposed_revision_path)
431
432 with self.fs.file_open(
433 (proposed_revision_path, descriptor_file_name), "r"
434 ) as descriptor_file:
435 content = descriptor_file.read()
436 else:
437 content = file_pkg.read()
438 storage["descriptor"] = descriptor_file_name = filename
439
440 if descriptor_file_name.endswith(".json"):
441 error_text = "Invalid json format "
442 indata = json.load(content)
443 else:
444 error_text = "Invalid yaml format "
445 indata = yaml.safe_load(content)
446
447 # Need to close the file package here so it can be copied from the
448 # revision to the current, unrevisioned record
449 if file_pkg:
450 file_pkg.close()
451 file_pkg = None
452
453 # Fetch both the incoming, proposed revision and the original revision so we
454 # can call a validate method to compare them
455 current_revision_path = _id + "/"
456 self.fs.sync(from_path=current_revision_path)
457 self.fs.sync(from_path=proposed_revision_path)
458
459 if revision > 1:
460 try:
461 self._validate_descriptor_changes(
462 _id,
463 descriptor_file_name,
464 current_revision_path,
465 proposed_revision_path,
466 )
467 except Exception as e:
468 shutil.rmtree(
469 self.fs.path + current_revision_path, ignore_errors=True
470 )
471 shutil.rmtree(
472 self.fs.path + proposed_revision_path, ignore_errors=True
473 )
474 # Only delete the new revision. We need to keep the original version in place
475 # as it has not been changed.
476 self.fs.file_delete(proposed_revision_path, ignore_non_exist=True)
477 raise e
478
479 indata = self._remove_envelop(indata)
480
481 # Override descriptor with query string kwargs
482 if kwargs:
483 self._update_input_with_kwargs(indata, kwargs)
484
485 current_desc["_admin"]["storage"] = storage
486 current_desc["_admin"]["onboardingState"] = "ONBOARDED"
487 current_desc["_admin"]["operationalState"] = "ENABLED"
488 current_desc["_admin"]["modified"] = time()
489 current_desc["_admin"]["revision"] = revision
490
491 deep_update_rfc7396(current_desc, indata)
492 current_desc = self.check_conflict_on_edit(
493 session, current_desc, indata, _id=_id
494 )
495
496 # Copy the revision to the active package name by its original id
497 shutil.rmtree(self.fs.path + current_revision_path, ignore_errors=True)
498 os.rename(
499 self.fs.path + proposed_revision_path,
500 self.fs.path + current_revision_path,
501 )
502 self.fs.file_delete(current_revision_path, ignore_non_exist=True)
503 self.fs.mkdir(current_revision_path)
504 self.fs.reverse_sync(from_path=current_revision_path)
505
506 shutil.rmtree(self.fs.path + _id)
507
508 self.db.replace(self.topic, _id, current_desc)
509
510 # Store a copy of the package as a point in time revision
511 revision_desc = dict(current_desc)
512 revision_desc["_id"] = _id + ":" + str(revision_desc["_admin"]["revision"])
513 self.db.create(self.topic + "_revisions", revision_desc)
514 fs_rollback = []
515
516 indata["_id"] = _id
517 self._send_msg("edited", indata)
518
519 # TODO if descriptor has changed because kwargs update content and remove cached zip
520 # TODO if zip is not present creates one
521 return True
522
523 except EngineException:
524 raise
525 except IndexError:
526 raise EngineException(
527 "invalid Content-Range header format. Expected 'bytes start-end/total'",
528 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
529 )
530 except IOError as e:
531 raise EngineException(
532 "invalid upload transaction sequence: '{}'".format(e),
533 HTTPStatus.BAD_REQUEST,
534 )
535 except tarfile.ReadError as e:
536 raise EngineException(
537 "invalid file content {}".format(e), HTTPStatus.BAD_REQUEST
538 )
539 except (ValueError, yaml.YAMLError) as e:
540 raise EngineException(error_text + str(e))
541 except ValidationError as e:
542 raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
543 finally:
544 if file_pkg:
545 file_pkg.close()
546 for file in fs_rollback:
547 self.fs.file_delete(file, ignore_non_exist=True)
548
549 def get_file(self, session, _id, path=None, accept_header=None):
550 """
551 Return the file content of a vnfd or nsd
552 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
553 :param _id: Identity of the vnfd, nsd
554 :param path: artifact path or "$DESCRIPTOR" or None
555 :param accept_header: Content of Accept header. Must contain applition/zip or/and text/plain
556 :return: opened file plus Accept format or raises an exception
557 """
558 accept_text = accept_zip = False
559 if accept_header:
560 if "text/plain" in accept_header or "*/*" in accept_header:
561 accept_text = True
562 if "application/zip" in accept_header or "*/*" in accept_header:
563 accept_zip = "application/zip"
564 elif "application/gzip" in accept_header:
565 accept_zip = "application/gzip"
566
567 if not accept_text and not accept_zip:
568 raise EngineException(
569 "provide request header 'Accept' with 'application/zip' or 'text/plain'",
570 http_code=HTTPStatus.NOT_ACCEPTABLE,
571 )
572
573 content = self.show(session, _id)
574 if content["_admin"]["onboardingState"] != "ONBOARDED":
575 raise EngineException(
576 "Cannot get content because this resource is not at 'ONBOARDED' state. "
577 "onboardingState is {}".format(content["_admin"]["onboardingState"]),
578 http_code=HTTPStatus.CONFLICT,
579 )
580 storage = content["_admin"]["storage"]
581 if path is not None and path != "$DESCRIPTOR": # artifacts
582 if not storage.get("pkg-dir") and not storage.get("folder"):
583 raise EngineException(
584 "Packages does not contains artifacts",
585 http_code=HTTPStatus.BAD_REQUEST,
586 )
587 if self.fs.file_exists(
588 (storage["folder"], storage["pkg-dir"], *path), "dir"
589 ):
590 folder_content = self.fs.dir_ls(
591 (storage["folder"], storage["pkg-dir"], *path)
592 )
593 return folder_content, "text/plain"
594 # TODO manage folders in http
595 else:
596 return (
597 self.fs.file_open(
598 (storage["folder"], storage["pkg-dir"], *path), "rb"
599 ),
600 "application/octet-stream",
601 )
602
603 # pkgtype accept ZIP TEXT -> result
604 # manyfiles yes X -> zip
605 # no yes -> error
606 # onefile yes no -> zip
607 # X yes -> text
608 contain_many_files = False
609 if storage.get("pkg-dir"):
610 # check if there are more than one file in the package, ignoring checksums.txt.
611 pkg_files = self.fs.dir_ls((storage["folder"], storage["pkg-dir"]))
612 if len(pkg_files) >= 3 or (
613 len(pkg_files) == 2 and "checksums.txt" not in pkg_files
614 ):
615 contain_many_files = True
616 if accept_text and (not contain_many_files or path == "$DESCRIPTOR"):
617 return (
618 self.fs.file_open((storage["folder"], storage["descriptor"]), "r"),
619 "text/plain",
620 )
621 elif contain_many_files and not accept_zip:
622 raise EngineException(
623 "Packages that contains several files need to be retrieved with 'application/zip'"
624 "Accept header",
625 http_code=HTTPStatus.NOT_ACCEPTABLE,
626 )
627 else:
628 if not storage.get("zipfile"):
629 # TODO generate zipfile if not present
630 raise EngineException(
631 "Only allowed 'text/plain' Accept header for this descriptor. To be solved in "
632 "future versions",
633 http_code=HTTPStatus.NOT_ACCEPTABLE,
634 )
635 return (
636 self.fs.file_open((storage["folder"], storage["zipfile"]), "rb"),
637 accept_zip,
638 )
639
640 def _remove_yang_prefixes_from_descriptor(self, descriptor):
641 new_descriptor = {}
642 for k, v in descriptor.items():
643 new_v = v
644 if isinstance(v, dict):
645 new_v = self._remove_yang_prefixes_from_descriptor(v)
646 elif isinstance(v, list):
647 new_v = list()
648 for x in v:
649 if isinstance(x, dict):
650 new_v.append(self._remove_yang_prefixes_from_descriptor(x))
651 else:
652 new_v.append(x)
653 new_descriptor[k.split(":")[-1]] = new_v
654 return new_descriptor
655
656 def pyangbind_validation(self, item, data, force=False):
657 raise EngineException(
658 "Not possible to validate '{}' item".format(item),
659 http_code=HTTPStatus.INTERNAL_SERVER_ERROR,
660 )
661
662 def _validate_input_edit(self, indata, content, force=False):
663 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
664 if "_id" in indata:
665 indata.pop("_id")
666 if "_admin" not in indata:
667 indata["_admin"] = {}
668
669 if "operationalState" in indata:
670 if indata["operationalState"] in ("ENABLED", "DISABLED"):
671 indata["_admin"]["operationalState"] = indata.pop("operationalState")
672 else:
673 raise EngineException(
674 "State '{}' is not a valid operational state".format(
675 indata["operationalState"]
676 ),
677 http_code=HTTPStatus.BAD_REQUEST,
678 )
679
680 # In the case of user defined data, we need to put the data in the root of the object
681 # to preserve current expected behaviour
682 if "userDefinedData" in indata:
683 data = indata.pop("userDefinedData")
684 if type(data) == dict:
685 indata["_admin"]["userDefinedData"] = data
686 else:
687 raise EngineException(
688 "userDefinedData should be an object, but is '{}' instead".format(
689 type(data)
690 ),
691 http_code=HTTPStatus.BAD_REQUEST,
692 )
693
694 if (
695 "operationalState" in indata["_admin"]
696 and content["_admin"]["operationalState"]
697 == indata["_admin"]["operationalState"]
698 ):
699 raise EngineException(
700 "operationalState already {}".format(
701 content["_admin"]["operationalState"]
702 ),
703 http_code=HTTPStatus.CONFLICT,
704 )
705
706 return indata
707
708 def _validate_descriptor_changes(
709 self,
710 descriptor_id,
711 descriptor_file_name,
712 old_descriptor_directory,
713 new_descriptor_directory,
714 ):
715 # Example:
716 # raise EngineException(
717 # "Error in validating new descriptor: <NODE> cannot be modified",
718 # http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
719 # )
720 pass
721
722
723 class VnfdTopic(DescriptorTopic):
724 topic = "vnfds"
725 topic_msg = "vnfd"
726
727 def __init__(self, db, fs, msg, auth):
728 DescriptorTopic.__init__(self, db, fs, msg, auth)
729
730 def pyangbind_validation(self, item, data, force=False):
731 if self._descriptor_data_is_in_old_format(data):
732 raise EngineException(
733 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
734 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
735 )
736 try:
737 myvnfd = etsi_nfv_vnfd.etsi_nfv_vnfd()
738 pybindJSONDecoder.load_ietf_json(
739 {"etsi-nfv-vnfd:vnfd": data},
740 None,
741 None,
742 obj=myvnfd,
743 path_helper=True,
744 skip_unknown=force,
745 )
746 out = pybindJSON.dumps(myvnfd, mode="ietf")
747 desc_out = self._remove_envelop(yaml.safe_load(out))
748 desc_out = self._remove_yang_prefixes_from_descriptor(desc_out)
749 return utils.deep_update_dict(data, desc_out)
750 except Exception as e:
751 raise EngineException(
752 "Error in pyangbind validation: {}".format(str(e)),
753 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
754 )
755
756 @staticmethod
757 def _descriptor_data_is_in_old_format(data):
758 return ("vnfd-catalog" in data) or ("vnfd:vnfd-catalog" in data)
759
760 @staticmethod
761 def _remove_envelop(indata=None):
762 if not indata:
763 return {}
764 clean_indata = indata
765
766 if clean_indata.get("etsi-nfv-vnfd:vnfd"):
767 if not isinstance(clean_indata["etsi-nfv-vnfd:vnfd"], dict):
768 raise EngineException("'etsi-nfv-vnfd:vnfd' must be a dict")
769 clean_indata = clean_indata["etsi-nfv-vnfd:vnfd"]
770 elif clean_indata.get("vnfd"):
771 if not isinstance(clean_indata["vnfd"], dict):
772 raise EngineException("'vnfd' must be dict")
773 clean_indata = clean_indata["vnfd"]
774
775 return clean_indata
776
777 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
778 final_content = super().check_conflict_on_edit(
779 session, final_content, edit_content, _id
780 )
781
782 # set type of vnfd
783 contains_pdu = False
784 contains_vdu = False
785 for vdu in get_iterable(final_content.get("vdu")):
786 if vdu.get("pdu-type"):
787 contains_pdu = True
788 else:
789 contains_vdu = True
790 if contains_pdu:
791 final_content["_admin"]["type"] = "hnfd" if contains_vdu else "pnfd"
792 elif contains_vdu:
793 final_content["_admin"]["type"] = "vnfd"
794 # if neither vud nor pdu do not fill type
795 return final_content
796
797 def check_conflict_on_del(self, session, _id, db_content):
798 """
799 Check that there is not any NSD that uses this VNFD. Only NSDs belonging to this project are considered. Note
800 that VNFD can be public and be used by NSD of other projects. Also check there are not deployments, or vnfr
801 that uses this vnfd
802 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
803 :param _id: vnfd internal id
804 :param db_content: The database content of the _id.
805 :return: None or raises EngineException with the conflict
806 """
807 if session["force"]:
808 return
809 descriptor = db_content
810 descriptor_id = descriptor.get("id")
811 if not descriptor_id: # empty vnfd not uploaded
812 return
813
814 _filter = self._get_project_filter(session)
815
816 # check vnfrs using this vnfd
817 _filter["vnfd-id"] = _id
818 if self.db.get_list("vnfrs", _filter):
819 raise EngineException(
820 "There is at least one VNF instance using this descriptor",
821 http_code=HTTPStatus.CONFLICT,
822 )
823
824 # check NSD referencing this VNFD
825 del _filter["vnfd-id"]
826 _filter["vnfd-id"] = descriptor_id
827 if self.db.get_list("nsds", _filter):
828 raise EngineException(
829 "There is at least one NS package referencing this descriptor",
830 http_code=HTTPStatus.CONFLICT,
831 )
832
833 def _validate_input_new(self, indata, storage_params, force=False):
834 indata.pop("onboardingState", None)
835 indata.pop("operationalState", None)
836 indata.pop("usageState", None)
837 indata.pop("links", None)
838
839 indata = self.pyangbind_validation("vnfds", indata, force)
840 # Cross references validation in the descriptor
841
842 self.validate_mgmt_interface_connection_point(indata)
843
844 for vdu in get_iterable(indata.get("vdu")):
845 self.validate_vdu_internal_connection_points(vdu)
846 self._validate_vdu_cloud_init_in_package(storage_params, vdu, indata)
847 self._validate_vdu_charms_in_package(storage_params, indata)
848
849 self._validate_vnf_charms_in_package(storage_params, indata)
850
851 self.validate_external_connection_points(indata)
852 self.validate_internal_virtual_links(indata)
853 self.validate_monitoring_params(indata)
854 self.validate_scaling_group_descriptor(indata)
855 self.validate_helm_chart(indata)
856
857 return indata
858
859 @staticmethod
860 def validate_helm_chart(indata):
861 kdus = indata.get("kdu", [])
862 for kdu in kdus:
863 helm_chart_value = kdu.get("helm-chart")
864 if not helm_chart_value:
865 continue
866 if not valid_helm_chart_re.match(helm_chart_value):
867 raise EngineException(
868 "helm-chart '{}' is not valid".format(helm_chart_value),
869 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
870 )
871
872 @staticmethod
873 def validate_mgmt_interface_connection_point(indata):
874 if not indata.get("vdu"):
875 return
876 if not indata.get("mgmt-cp"):
877 raise EngineException(
878 "'mgmt-cp' is a mandatory field and it is not defined",
879 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
880 )
881
882 for cp in get_iterable(indata.get("ext-cpd")):
883 if cp["id"] == indata["mgmt-cp"]:
884 break
885 else:
886 raise EngineException(
887 "mgmt-cp='{}' must match an existing ext-cpd".format(indata["mgmt-cp"]),
888 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
889 )
890
891 @staticmethod
892 def validate_vdu_internal_connection_points(vdu):
893 int_cpds = set()
894 for cpd in get_iterable(vdu.get("int-cpd")):
895 cpd_id = cpd.get("id")
896 if cpd_id and cpd_id in int_cpds:
897 raise EngineException(
898 "vdu[id='{}']:int-cpd[id='{}'] is already used by other int-cpd".format(
899 vdu["id"], cpd_id
900 ),
901 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
902 )
903 int_cpds.add(cpd_id)
904
905 @staticmethod
906 def validate_external_connection_points(indata):
907 all_vdus_int_cpds = set()
908 for vdu in get_iterable(indata.get("vdu")):
909 for int_cpd in get_iterable(vdu.get("int-cpd")):
910 all_vdus_int_cpds.add((vdu.get("id"), int_cpd.get("id")))
911
912 ext_cpds = set()
913 for cpd in get_iterable(indata.get("ext-cpd")):
914 cpd_id = cpd.get("id")
915 if cpd_id and cpd_id in ext_cpds:
916 raise EngineException(
917 "ext-cpd[id='{}'] is already used by other ext-cpd".format(cpd_id),
918 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
919 )
920 ext_cpds.add(cpd_id)
921
922 int_cpd = cpd.get("int-cpd")
923 if int_cpd:
924 if (int_cpd.get("vdu-id"), int_cpd.get("cpd")) not in all_vdus_int_cpds:
925 raise EngineException(
926 "ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd".format(
927 cpd_id
928 ),
929 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
930 )
931 # TODO: Validate k8s-cluster-net points to a valid k8s-cluster:nets ?
932
933 def _validate_vdu_charms_in_package(self, storage_params, indata):
934 for df in indata["df"]:
935 if (
936 "lcm-operations-configuration" in df
937 and "operate-vnf-op-config" in df["lcm-operations-configuration"]
938 ):
939 configs = df["lcm-operations-configuration"][
940 "operate-vnf-op-config"
941 ].get("day1-2", [])
942 vdus = df.get("vdu-profile", [])
943 for vdu in vdus:
944 for config in configs:
945 if config["id"] == vdu["id"] and utils.find_in_list(
946 config.get("execution-environment-list", []),
947 lambda ee: "juju" in ee,
948 ):
949 if not self._validate_package_folders(
950 storage_params, "charms"
951 ) and not self._validate_package_folders(
952 storage_params, "Scripts/charms"
953 ):
954 raise EngineException(
955 "Charm defined in vnf[id={}] but not present in "
956 "package".format(indata["id"])
957 )
958
959 def _validate_vdu_cloud_init_in_package(self, storage_params, vdu, indata):
960 if not vdu.get("cloud-init-file"):
961 return
962 if not self._validate_package_folders(
963 storage_params, "cloud_init", vdu["cloud-init-file"]
964 ) and not self._validate_package_folders(
965 storage_params, "Scripts/cloud_init", vdu["cloud-init-file"]
966 ):
967 raise EngineException(
968 "Cloud-init defined in vnf[id={}]:vdu[id={}] but not present in "
969 "package".format(indata["id"], vdu["id"])
970 )
971
972 def _validate_vnf_charms_in_package(self, storage_params, indata):
973 # Get VNF configuration through new container
974 for deployment_flavor in indata.get("df", []):
975 if "lcm-operations-configuration" not in deployment_flavor:
976 return
977 if (
978 "operate-vnf-op-config"
979 not in deployment_flavor["lcm-operations-configuration"]
980 ):
981 return
982 for day_1_2_config in deployment_flavor["lcm-operations-configuration"][
983 "operate-vnf-op-config"
984 ]["day1-2"]:
985 if day_1_2_config["id"] == indata["id"]:
986 if utils.find_in_list(
987 day_1_2_config.get("execution-environment-list", []),
988 lambda ee: "juju" in ee,
989 ):
990 if not self._validate_package_folders(
991 storage_params, "charms"
992 ) and not self._validate_package_folders(
993 storage_params, "Scripts/charms"
994 ):
995 raise EngineException(
996 "Charm defined in vnf[id={}] but not present in "
997 "package".format(indata["id"])
998 )
999
1000 def _validate_package_folders(self, storage_params, folder, file=None):
1001 if not storage_params:
1002 return False
1003 elif not storage_params.get("pkg-dir"):
1004 if self.fs.file_exists("{}_".format(storage_params["folder"]), "dir"):
1005 f = "{}_/{}".format(storage_params["folder"], folder)
1006 else:
1007 f = "{}/{}".format(storage_params["folder"], folder)
1008 if file:
1009 return self.fs.file_exists("{}/{}".format(f, file), "file")
1010 else:
1011 if self.fs.file_exists(f, "dir"):
1012 if self.fs.dir_ls(f):
1013 return True
1014 return False
1015 else:
1016 if self.fs.file_exists("{}_".format(storage_params["folder"]), "dir"):
1017 f = "{}_/{}/{}".format(
1018 storage_params["folder"], storage_params["pkg-dir"], folder
1019 )
1020 else:
1021 f = "{}/{}/{}".format(
1022 storage_params["folder"], storage_params["pkg-dir"], folder
1023 )
1024 if file:
1025 return self.fs.file_exists("{}/{}".format(f, file), "file")
1026 else:
1027 if self.fs.file_exists(f, "dir"):
1028 if self.fs.dir_ls(f):
1029 return True
1030 return False
1031
1032 @staticmethod
1033 def validate_internal_virtual_links(indata):
1034 all_ivld_ids = set()
1035 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
1036 ivld_id = ivld.get("id")
1037 if ivld_id and ivld_id in all_ivld_ids:
1038 raise EngineException(
1039 "Duplicated VLD id in int-virtual-link-desc[id={}]".format(ivld_id),
1040 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1041 )
1042 else:
1043 all_ivld_ids.add(ivld_id)
1044
1045 for vdu in get_iterable(indata.get("vdu")):
1046 for int_cpd in get_iterable(vdu.get("int-cpd")):
1047 int_cpd_ivld_id = int_cpd.get("int-virtual-link-desc")
1048 if int_cpd_ivld_id and int_cpd_ivld_id not in all_ivld_ids:
1049 raise EngineException(
1050 "vdu[id='{}']:int-cpd[id='{}']:int-virtual-link-desc='{}' must match an existing "
1051 "int-virtual-link-desc".format(
1052 vdu["id"], int_cpd["id"], int_cpd_ivld_id
1053 ),
1054 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1055 )
1056
1057 for df in get_iterable(indata.get("df")):
1058 for vlp in get_iterable(df.get("virtual-link-profile")):
1059 vlp_ivld_id = vlp.get("id")
1060 if vlp_ivld_id and vlp_ivld_id not in all_ivld_ids:
1061 raise EngineException(
1062 "df[id='{}']:virtual-link-profile='{}' must match an existing "
1063 "int-virtual-link-desc".format(df["id"], vlp_ivld_id),
1064 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1065 )
1066
1067 @staticmethod
1068 def validate_monitoring_params(indata):
1069 all_monitoring_params = set()
1070 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
1071 for mp in get_iterable(ivld.get("monitoring-parameters")):
1072 mp_id = mp.get("id")
1073 if mp_id and mp_id in all_monitoring_params:
1074 raise EngineException(
1075 "Duplicated monitoring-parameter id in "
1076 "int-virtual-link-desc[id='{}']:monitoring-parameters[id='{}']".format(
1077 ivld["id"], mp_id
1078 ),
1079 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1080 )
1081 else:
1082 all_monitoring_params.add(mp_id)
1083
1084 for vdu in get_iterable(indata.get("vdu")):
1085 for mp in get_iterable(vdu.get("monitoring-parameter")):
1086 mp_id = mp.get("id")
1087 if mp_id and mp_id in all_monitoring_params:
1088 raise EngineException(
1089 "Duplicated monitoring-parameter id in "
1090 "vdu[id='{}']:monitoring-parameter[id='{}']".format(
1091 vdu["id"], mp_id
1092 ),
1093 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1094 )
1095 else:
1096 all_monitoring_params.add(mp_id)
1097
1098 for df in get_iterable(indata.get("df")):
1099 for mp in get_iterable(df.get("monitoring-parameter")):
1100 mp_id = mp.get("id")
1101 if mp_id and mp_id in all_monitoring_params:
1102 raise EngineException(
1103 "Duplicated monitoring-parameter id in "
1104 "df[id='{}']:monitoring-parameter[id='{}']".format(
1105 df["id"], mp_id
1106 ),
1107 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1108 )
1109 else:
1110 all_monitoring_params.add(mp_id)
1111
1112 @staticmethod
1113 def validate_scaling_group_descriptor(indata):
1114 all_monitoring_params = set()
1115 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
1116 for mp in get_iterable(ivld.get("monitoring-parameters")):
1117 all_monitoring_params.add(mp.get("id"))
1118
1119 for vdu in get_iterable(indata.get("vdu")):
1120 for mp in get_iterable(vdu.get("monitoring-parameter")):
1121 all_monitoring_params.add(mp.get("id"))
1122
1123 for df in get_iterable(indata.get("df")):
1124 for mp in get_iterable(df.get("monitoring-parameter")):
1125 all_monitoring_params.add(mp.get("id"))
1126
1127 for df in get_iterable(indata.get("df")):
1128 for sa in get_iterable(df.get("scaling-aspect")):
1129 for sp in get_iterable(sa.get("scaling-policy")):
1130 for sc in get_iterable(sp.get("scaling-criteria")):
1131 sc_monitoring_param = sc.get("vnf-monitoring-param-ref")
1132 if (
1133 sc_monitoring_param
1134 and sc_monitoring_param not in all_monitoring_params
1135 ):
1136 raise EngineException(
1137 "df[id='{}']:scaling-aspect[id='{}']:scaling-policy"
1138 "[name='{}']:scaling-criteria[name='{}']: "
1139 "vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format(
1140 df["id"],
1141 sa["id"],
1142 sp["name"],
1143 sc["name"],
1144 sc_monitoring_param,
1145 ),
1146 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1147 )
1148
1149 for sca in get_iterable(sa.get("scaling-config-action")):
1150 if (
1151 "lcm-operations-configuration" not in df
1152 or "operate-vnf-op-config"
1153 not in df["lcm-operations-configuration"]
1154 or not utils.find_in_list(
1155 df["lcm-operations-configuration"][
1156 "operate-vnf-op-config"
1157 ].get("day1-2", []),
1158 lambda config: config["id"] == indata["id"],
1159 )
1160 ):
1161 raise EngineException(
1162 "'day1-2 configuration' not defined in the descriptor but it is "
1163 "referenced by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action".format(
1164 df["id"], sa["id"]
1165 ),
1166 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1167 )
1168 for configuration in get_iterable(
1169 df["lcm-operations-configuration"]["operate-vnf-op-config"].get(
1170 "day1-2", []
1171 )
1172 ):
1173 for primitive in get_iterable(
1174 configuration.get("config-primitive")
1175 ):
1176 if (
1177 primitive["name"]
1178 == sca["vnf-config-primitive-name-ref"]
1179 ):
1180 break
1181 else:
1182 raise EngineException(
1183 "df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-"
1184 "config-primitive-name-ref='{}' does not match any "
1185 "day1-2 configuration:config-primitive:name".format(
1186 df["id"],
1187 sa["id"],
1188 sca["vnf-config-primitive-name-ref"],
1189 ),
1190 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1191 )
1192
1193 def delete_extra(self, session, _id, db_content, not_send_msg=None):
1194 """
1195 Deletes associate file system storage (via super)
1196 Deletes associated vnfpkgops from database.
1197 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1198 :param _id: server internal id
1199 :param db_content: The database content of the descriptor
1200 :return: None
1201 :raises: FsException in case of error while deleting associated storage
1202 """
1203 super().delete_extra(session, _id, db_content, not_send_msg)
1204 self.db.del_list("vnfpkgops", {"vnfPkgId": _id})
1205 self.db.del_list(self.topic + "_revisions", {"_id": {"$regex": _id}})
1206
1207 def sol005_projection(self, data):
1208 data["onboardingState"] = data["_admin"]["onboardingState"]
1209 data["operationalState"] = data["_admin"]["operationalState"]
1210 data["usageState"] = data["_admin"]["usageState"]
1211
1212 links = {}
1213 links["self"] = {"href": "/vnfpkgm/v1/vnf_packages/{}".format(data["_id"])}
1214 links["vnfd"] = {"href": "/vnfpkgm/v1/vnf_packages/{}/vnfd".format(data["_id"])}
1215 links["packageContent"] = {
1216 "href": "/vnfpkgm/v1/vnf_packages/{}/package_content".format(data["_id"])
1217 }
1218 data["_links"] = links
1219
1220 return super().sol005_projection(data)
1221
1222 @staticmethod
1223 def find_software_version(vnfd: dict) -> str:
1224 """Find the sotware version in the VNFD descriptors
1225
1226 Args:
1227 vnfd (dict): Descriptor as a dictionary
1228
1229 Returns:
1230 software-version (str)
1231 """
1232 default_sw_version = "1.0"
1233 if vnfd.get("vnfd"):
1234 vnfd = vnfd["vnfd"]
1235 if vnfd.get("software-version"):
1236 return vnfd["software-version"]
1237 else:
1238 return default_sw_version
1239
1240 @staticmethod
1241 def extract_policies(vnfd: dict) -> dict:
1242 """Removes the policies from the VNFD descriptors
1243
1244 Args:
1245 vnfd (dict): Descriptor as a dictionary
1246
1247 Returns:
1248 vnfd (dict): VNFD which does not include policies
1249 """
1250 for df in vnfd.get("df", {}):
1251 for policy in ["scaling-aspect", "healing-aspect"]:
1252 if df.get(policy, {}):
1253 df.pop(policy)
1254 for vdu in vnfd.get("vdu", {}):
1255 for alarm_policy in ["alarm", "monitoring-parameter"]:
1256 if vdu.get(alarm_policy, {}):
1257 vdu.pop(alarm_policy)
1258 return vnfd
1259
1260 @staticmethod
1261 def extract_day12_primitives(vnfd: dict) -> dict:
1262 """Removes the day12 primitives from the VNFD descriptors
1263
1264 Args:
1265 vnfd (dict): Descriptor as a dictionary
1266
1267 Returns:
1268 vnfd (dict)
1269 """
1270 for df_id, df in enumerate(vnfd.get("df", {})):
1271 if (
1272 df.get("lcm-operations-configuration", {})
1273 .get("operate-vnf-op-config", {})
1274 .get("day1-2")
1275 ):
1276 day12 = df["lcm-operations-configuration"]["operate-vnf-op-config"].get(
1277 "day1-2"
1278 )
1279 for config_id, config in enumerate(day12):
1280 for key in [
1281 "initial-config-primitive",
1282 "config-primitive",
1283 "terminate-config-primitive",
1284 ]:
1285 config.pop(key, None)
1286 day12[config_id] = config
1287 df["lcm-operations-configuration"]["operate-vnf-op-config"][
1288 "day1-2"
1289 ] = day12
1290 vnfd["df"][df_id] = df
1291 return vnfd
1292
1293 def remove_modifiable_items(self, vnfd: dict) -> dict:
1294 """Removes the modifiable parts from the VNFD descriptors
1295
1296 It calls different extract functions according to different update types
1297 to clear all the modifiable items from VNFD
1298
1299 Args:
1300 vnfd (dict): Descriptor as a dictionary
1301
1302 Returns:
1303 vnfd (dict): Descriptor which does not include modifiable contents
1304 """
1305 if vnfd.get("vnfd"):
1306 vnfd = vnfd["vnfd"]
1307 vnfd.pop("_admin", None)
1308 # If the other extractions need to be done from VNFD,
1309 # the new extract methods could be appended to below list.
1310 for extract_function in [self.extract_day12_primitives, self.extract_policies]:
1311 vnfd_temp = extract_function(vnfd)
1312 vnfd = vnfd_temp
1313 return vnfd
1314
1315 def _validate_descriptor_changes(
1316 self,
1317 descriptor_id: str,
1318 descriptor_file_name: str,
1319 old_descriptor_directory: str,
1320 new_descriptor_directory: str,
1321 ):
1322 """Compares the old and new VNFD descriptors and validates the new descriptor.
1323
1324 Args:
1325 old_descriptor_directory (str): Directory of descriptor which is in-use
1326 new_descriptor_directory (str): Directory of descriptor which is proposed to update (new revision)
1327
1328 Returns:
1329 None
1330
1331 Raises:
1332 EngineException: In case of error when there are unallowed changes
1333 """
1334 try:
1335 # If VNFD does not exist in DB or it is not in use by any NS,
1336 # validation is not required.
1337 vnfd = self.db.get_one("vnfds", {"_id": descriptor_id})
1338 if not vnfd or not detect_descriptor_usage(vnfd, "vnfds", self.db):
1339 return
1340
1341 # Get the old and new descriptor contents in order to compare them.
1342 with self.fs.file_open(
1343 (old_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
1344 ) as old_descriptor_file:
1345 with self.fs.file_open(
1346 (new_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
1347 ) as new_descriptor_file:
1348 old_content = yaml.safe_load(old_descriptor_file.read())
1349 new_content = yaml.safe_load(new_descriptor_file.read())
1350
1351 # If software version has changed, we do not need to validate
1352 # the differences anymore.
1353 if old_content and new_content:
1354 if self.find_software_version(
1355 old_content
1356 ) != self.find_software_version(new_content):
1357 return
1358
1359 disallowed_change = DeepDiff(
1360 self.remove_modifiable_items(old_content),
1361 self.remove_modifiable_items(new_content),
1362 )
1363
1364 if disallowed_change:
1365 changed_nodes = functools.reduce(
1366 lambda a, b: a + " , " + b,
1367 [
1368 node.lstrip("root")
1369 for node in disallowed_change.get(
1370 "values_changed"
1371 ).keys()
1372 ],
1373 )
1374
1375 raise EngineException(
1376 f"Error in validating new descriptor: {changed_nodes} cannot be modified, "
1377 "there are disallowed changes in the vnf descriptor.",
1378 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1379 )
1380 except (
1381 DbException,
1382 AttributeError,
1383 IndexError,
1384 KeyError,
1385 ValueError,
1386 ) as e:
1387 raise type(e)(
1388 "VNF Descriptor could not be processed with error: {}.".format(e)
1389 )
1390
1391
1392 class NsdTopic(DescriptorTopic):
1393 topic = "nsds"
1394 topic_msg = "nsd"
1395
1396 def __init__(self, db, fs, msg, auth):
1397 super().__init__(db, fs, msg, auth)
1398
1399 def pyangbind_validation(self, item, data, force=False):
1400 if self._descriptor_data_is_in_old_format(data):
1401 raise EngineException(
1402 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
1403 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1404 )
1405 try:
1406 nsd_vnf_profiles = data.get("df", [{}])[0].get("vnf-profile", [])
1407 mynsd = etsi_nfv_nsd.etsi_nfv_nsd()
1408 pybindJSONDecoder.load_ietf_json(
1409 {"nsd": {"nsd": [data]}},
1410 None,
1411 None,
1412 obj=mynsd,
1413 path_helper=True,
1414 skip_unknown=force,
1415 )
1416 out = pybindJSON.dumps(mynsd, mode="ietf")
1417 desc_out = self._remove_envelop(yaml.safe_load(out))
1418 desc_out = self._remove_yang_prefixes_from_descriptor(desc_out)
1419 if nsd_vnf_profiles:
1420 desc_out["df"][0]["vnf-profile"] = nsd_vnf_profiles
1421 return desc_out
1422 except Exception as e:
1423 raise EngineException(
1424 "Error in pyangbind validation: {}".format(str(e)),
1425 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1426 )
1427
1428 @staticmethod
1429 def _descriptor_data_is_in_old_format(data):
1430 return ("nsd-catalog" in data) or ("nsd:nsd-catalog" in data)
1431
1432 @staticmethod
1433 def _remove_envelop(indata=None):
1434 if not indata:
1435 return {}
1436 clean_indata = indata
1437
1438 if clean_indata.get("nsd"):
1439 clean_indata = clean_indata["nsd"]
1440 elif clean_indata.get("etsi-nfv-nsd:nsd"):
1441 clean_indata = clean_indata["etsi-nfv-nsd:nsd"]
1442 if clean_indata.get("nsd"):
1443 if (
1444 not isinstance(clean_indata["nsd"], list)
1445 or len(clean_indata["nsd"]) != 1
1446 ):
1447 raise EngineException("'nsd' must be a list of only one element")
1448 clean_indata = clean_indata["nsd"][0]
1449 return clean_indata
1450
1451 def _validate_input_new(self, indata, storage_params, force=False):
1452 indata.pop("nsdOnboardingState", None)
1453 indata.pop("nsdOperationalState", None)
1454 indata.pop("nsdUsageState", None)
1455
1456 indata.pop("links", None)
1457
1458 indata = self.pyangbind_validation("nsds", indata, force)
1459 # Cross references validation in the descriptor
1460 # TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none
1461 for vld in get_iterable(indata.get("virtual-link-desc")):
1462 self.validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata)
1463
1464 self.validate_vnf_profiles_vnfd_id(indata)
1465
1466 return indata
1467
1468 @staticmethod
1469 def validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata):
1470 if not vld.get("mgmt-network"):
1471 return
1472 vld_id = vld.get("id")
1473 for df in get_iterable(indata.get("df")):
1474 for vlp in get_iterable(df.get("virtual-link-profile")):
1475 if vld_id and vld_id == vlp.get("virtual-link-desc-id"):
1476 if vlp.get("virtual-link-protocol-data"):
1477 raise EngineException(
1478 "Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-"
1479 "protocol-data You cannot set a virtual-link-protocol-data "
1480 "when mgmt-network is True".format(df["id"], vlp["id"]),
1481 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1482 )
1483
1484 @staticmethod
1485 def validate_vnf_profiles_vnfd_id(indata):
1486 all_vnfd_ids = set(get_iterable(indata.get("vnfd-id")))
1487 for df in get_iterable(indata.get("df")):
1488 for vnf_profile in get_iterable(df.get("vnf-profile")):
1489 vnfd_id = vnf_profile.get("vnfd-id")
1490 if vnfd_id and vnfd_id not in all_vnfd_ids:
1491 raise EngineException(
1492 "Error at df[id='{}']:vnf_profile[id='{}']:vnfd-id='{}' "
1493 "does not match any vnfd-id".format(
1494 df["id"], vnf_profile["id"], vnfd_id
1495 ),
1496 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1497 )
1498
1499 def _validate_input_edit(self, indata, content, force=False):
1500 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
1501 """
1502 indata looks as follows:
1503 - In the new case (conformant)
1504 {'nsdOperationalState': 'DISABLED', 'userDefinedData': {'id': 'string23',
1505 '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}}
1506 - In the old case (backwards-compatible)
1507 {'id': 'string23', '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}
1508 """
1509 if "_admin" not in indata:
1510 indata["_admin"] = {}
1511
1512 if "nsdOperationalState" in indata:
1513 if indata["nsdOperationalState"] in ("ENABLED", "DISABLED"):
1514 indata["_admin"]["operationalState"] = indata.pop("nsdOperationalState")
1515 else:
1516 raise EngineException(
1517 "State '{}' is not a valid operational state".format(
1518 indata["nsdOperationalState"]
1519 ),
1520 http_code=HTTPStatus.BAD_REQUEST,
1521 )
1522
1523 # In the case of user defined data, we need to put the data in the root of the object
1524 # to preserve current expected behaviour
1525 if "userDefinedData" in indata:
1526 data = indata.pop("userDefinedData")
1527 if type(data) == dict:
1528 indata["_admin"]["userDefinedData"] = data
1529 else:
1530 raise EngineException(
1531 "userDefinedData should be an object, but is '{}' instead".format(
1532 type(data)
1533 ),
1534 http_code=HTTPStatus.BAD_REQUEST,
1535 )
1536 if (
1537 "operationalState" in indata["_admin"]
1538 and content["_admin"]["operationalState"]
1539 == indata["_admin"]["operationalState"]
1540 ):
1541 raise EngineException(
1542 "nsdOperationalState already {}".format(
1543 content["_admin"]["operationalState"]
1544 ),
1545 http_code=HTTPStatus.CONFLICT,
1546 )
1547 return indata
1548
1549 def _check_descriptor_dependencies(self, session, descriptor):
1550 """
1551 Check that the dependent descriptors exist on a new descriptor or edition. Also checks references to vnfd
1552 connection points are ok
1553 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1554 :param descriptor: descriptor to be inserted or edit
1555 :return: None or raises exception
1556 """
1557 if session["force"]:
1558 return
1559 vnfds_index = self._get_descriptor_constituent_vnfds_index(session, descriptor)
1560
1561 # Cross references validation in the descriptor and vnfd connection point validation
1562 for df in get_iterable(descriptor.get("df")):
1563 self.validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index)
1564
1565 def _get_descriptor_constituent_vnfds_index(self, session, descriptor):
1566 vnfds_index = {}
1567 if descriptor.get("vnfd-id") and not session["force"]:
1568 for vnfd_id in get_iterable(descriptor.get("vnfd-id")):
1569 query_filter = self._get_project_filter(session)
1570 query_filter["id"] = vnfd_id
1571 vnf_list = self.db.get_list("vnfds", query_filter)
1572 if not vnf_list:
1573 raise EngineException(
1574 "Descriptor error at 'vnfd-id'='{}' references a non "
1575 "existing vnfd".format(vnfd_id),
1576 http_code=HTTPStatus.CONFLICT,
1577 )
1578 vnfds_index[vnfd_id] = vnf_list[0]
1579 return vnfds_index
1580
1581 @staticmethod
1582 def validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index):
1583 for vnf_profile in get_iterable(df.get("vnf-profile")):
1584 vnfd = vnfds_index.get(vnf_profile["vnfd-id"])
1585 all_vnfd_ext_cpds = set()
1586 for ext_cpd in get_iterable(vnfd.get("ext-cpd")):
1587 if ext_cpd.get("id"):
1588 all_vnfd_ext_cpds.add(ext_cpd.get("id"))
1589
1590 for virtual_link in get_iterable(
1591 vnf_profile.get("virtual-link-connectivity")
1592 ):
1593 for vl_cpd in get_iterable(virtual_link.get("constituent-cpd-id")):
1594 vl_cpd_id = vl_cpd.get("constituent-cpd-id")
1595 if vl_cpd_id and vl_cpd_id not in all_vnfd_ext_cpds:
1596 raise EngineException(
1597 "Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity"
1598 "[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a "
1599 "non existing ext-cpd:id inside vnfd '{}'".format(
1600 df["id"],
1601 vnf_profile["id"],
1602 virtual_link["virtual-link-profile-id"],
1603 vl_cpd_id,
1604 vnfd["id"],
1605 ),
1606 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1607 )
1608
1609 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
1610 final_content = super().check_conflict_on_edit(
1611 session, final_content, edit_content, _id
1612 )
1613
1614 self._check_descriptor_dependencies(session, final_content)
1615
1616 return final_content
1617
1618 def check_conflict_on_del(self, session, _id, db_content):
1619 """
1620 Check that there is not any NSR that uses this NSD. Only NSRs belonging to this project are considered. Note
1621 that NSD can be public and be used by other projects.
1622 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1623 :param _id: nsd internal id
1624 :param db_content: The database content of the _id
1625 :return: None or raises EngineException with the conflict
1626 """
1627 if session["force"]:
1628 return
1629 descriptor = db_content
1630 descriptor_id = descriptor.get("id")
1631 if not descriptor_id: # empty nsd not uploaded
1632 return
1633
1634 # check NSD used by NS
1635 _filter = self._get_project_filter(session)
1636 _filter["nsd-id"] = _id
1637 if self.db.get_list("nsrs", _filter):
1638 raise EngineException(
1639 "There is at least one NS instance using this descriptor",
1640 http_code=HTTPStatus.CONFLICT,
1641 )
1642
1643 # check NSD referenced by NST
1644 del _filter["nsd-id"]
1645 _filter["netslice-subnet.ANYINDEX.nsd-ref"] = descriptor_id
1646 if self.db.get_list("nsts", _filter):
1647 raise EngineException(
1648 "There is at least one NetSlice Template referencing this descriptor",
1649 http_code=HTTPStatus.CONFLICT,
1650 )
1651
1652 def delete_extra(self, session, _id, db_content, not_send_msg=None):
1653 """
1654 Deletes associate file system storage (via super)
1655 Deletes associated vnfpkgops from database.
1656 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1657 :param _id: server internal id
1658 :param db_content: The database content of the descriptor
1659 :return: None
1660 :raises: FsException in case of error while deleting associated storage
1661 """
1662 super().delete_extra(session, _id, db_content, not_send_msg)
1663 self.db.del_list(self.topic + "_revisions", {"_id": {"$regex": _id}})
1664
1665 @staticmethod
1666 def extract_day12_primitives(nsd: dict) -> dict:
1667 """Removes the day12 primitives from the NSD descriptors
1668
1669 Args:
1670 nsd (dict): Descriptor as a dictionary
1671
1672 Returns:
1673 nsd (dict): Cleared NSD
1674 """
1675 if nsd.get("ns-configuration"):
1676 for key in [
1677 "config-primitive",
1678 "initial-config-primitive",
1679 "terminate-config-primitive",
1680 ]:
1681 nsd["ns-configuration"].pop(key, None)
1682 return nsd
1683
1684 def remove_modifiable_items(self, nsd: dict) -> dict:
1685 """Removes the modifiable parts from the VNFD descriptors
1686
1687 It calls different extract functions according to different update types
1688 to clear all the modifiable items from NSD
1689
1690 Args:
1691 nsd (dict): Descriptor as a dictionary
1692
1693 Returns:
1694 nsd (dict): Descriptor which does not include modifiable contents
1695 """
1696 while isinstance(nsd, dict) and nsd.get("nsd"):
1697 nsd = nsd["nsd"]
1698 if isinstance(nsd, list):
1699 nsd = nsd[0]
1700 nsd.pop("_admin", None)
1701 # If the more extractions need to be done from NSD,
1702 # the new extract methods could be appended to below list.
1703 for extract_function in [self.extract_day12_primitives]:
1704 nsd_temp = extract_function(nsd)
1705 nsd = nsd_temp
1706 return nsd
1707
1708 def _validate_descriptor_changes(
1709 self,
1710 descriptor_id: str,
1711 descriptor_file_name: str,
1712 old_descriptor_directory: str,
1713 new_descriptor_directory: str,
1714 ):
1715 """Compares the old and new NSD descriptors and validates the new descriptor
1716
1717 Args:
1718 old_descriptor_directory: Directory of descriptor which is in-use
1719 new_descriptor_directory: Directory of descriptor which is proposed to update (new revision)
1720
1721 Returns:
1722 None
1723
1724 Raises:
1725 EngineException: In case of error if the changes are not allowed
1726 """
1727
1728 try:
1729 # If NSD does not exist in DB, or it is not in use by any NS,
1730 # validation is not required.
1731 nsd = self.db.get_one("nsds", {"_id": descriptor_id}, fail_on_empty=False)
1732 if not nsd or not detect_descriptor_usage(nsd, "nsds", self.db):
1733 return
1734
1735 # Get the old and new descriptor contents in order to compare them.
1736 with self.fs.file_open(
1737 (old_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
1738 ) as old_descriptor_file:
1739 with self.fs.file_open(
1740 (new_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
1741 ) as new_descriptor_file:
1742 old_content = yaml.safe_load(old_descriptor_file.read())
1743 new_content = yaml.safe_load(new_descriptor_file.read())
1744
1745 if old_content and new_content:
1746 disallowed_change = DeepDiff(
1747 self.remove_modifiable_items(old_content),
1748 self.remove_modifiable_items(new_content),
1749 )
1750
1751 if disallowed_change:
1752 changed_nodes = functools.reduce(
1753 lambda a, b: a + ", " + b,
1754 [
1755 node.lstrip("root")
1756 for node in disallowed_change.get(
1757 "values_changed"
1758 ).keys()
1759 ],
1760 )
1761
1762 raise EngineException(
1763 f"Error in validating new descriptor: {changed_nodes} cannot be modified, "
1764 "there are disallowed changes in the ns descriptor. ",
1765 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1766 )
1767 except (
1768 DbException,
1769 AttributeError,
1770 IndexError,
1771 KeyError,
1772 ValueError,
1773 ) as e:
1774 raise type(e)(
1775 "NS Descriptor could not be processed with error: {}.".format(e)
1776 )
1777
1778 def sol005_projection(self, data):
1779 data["nsdOnboardingState"] = data["_admin"]["onboardingState"]
1780 data["nsdOperationalState"] = data["_admin"]["operationalState"]
1781 data["nsdUsageState"] = data["_admin"]["usageState"]
1782
1783 links = {}
1784 links["self"] = {"href": "/nsd/v1/ns_descriptors/{}".format(data["_id"])}
1785 links["nsd_content"] = {
1786 "href": "/nsd/v1/ns_descriptors/{}/nsd_content".format(data["_id"])
1787 }
1788 data["_links"] = links
1789
1790 return super().sol005_projection(data)
1791
1792
1793 class NstTopic(DescriptorTopic):
1794 topic = "nsts"
1795 topic_msg = "nst"
1796 quota_name = "slice_templates"
1797
1798 def __init__(self, db, fs, msg, auth):
1799 DescriptorTopic.__init__(self, db, fs, msg, auth)
1800
1801 def pyangbind_validation(self, item, data, force=False):
1802 try:
1803 mynst = nst_im()
1804 pybindJSONDecoder.load_ietf_json(
1805 {"nst": [data]},
1806 None,
1807 None,
1808 obj=mynst,
1809 path_helper=True,
1810 skip_unknown=force,
1811 )
1812 out = pybindJSON.dumps(mynst, mode="ietf")
1813 desc_out = self._remove_envelop(yaml.safe_load(out))
1814 return desc_out
1815 except Exception as e:
1816 raise EngineException(
1817 "Error in pyangbind validation: {}".format(str(e)),
1818 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1819 )
1820
1821 @staticmethod
1822 def _remove_envelop(indata=None):
1823 if not indata:
1824 return {}
1825 clean_indata = indata
1826
1827 if clean_indata.get("nst"):
1828 if (
1829 not isinstance(clean_indata["nst"], list)
1830 or len(clean_indata["nst"]) != 1
1831 ):
1832 raise EngineException("'nst' must be a list only one element")
1833 clean_indata = clean_indata["nst"][0]
1834 elif clean_indata.get("nst:nst"):
1835 if (
1836 not isinstance(clean_indata["nst:nst"], list)
1837 or len(clean_indata["nst:nst"]) != 1
1838 ):
1839 raise EngineException("'nst:nst' must be a list only one element")
1840 clean_indata = clean_indata["nst:nst"][0]
1841 return clean_indata
1842
1843 def _validate_input_new(self, indata, storage_params, force=False):
1844 indata.pop("onboardingState", None)
1845 indata.pop("operationalState", None)
1846 indata.pop("usageState", None)
1847 indata = self.pyangbind_validation("nsts", indata, force)
1848 return indata.copy()
1849
1850 def _check_descriptor_dependencies(self, session, descriptor):
1851 """
1852 Check that the dependent descriptors exist on a new descriptor or edition
1853 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1854 :param descriptor: descriptor to be inserted or edit
1855 :return: None or raises exception
1856 """
1857 if not descriptor.get("netslice-subnet"):
1858 return
1859 for nsd in descriptor["netslice-subnet"]:
1860 nsd_id = nsd["nsd-ref"]
1861 filter_q = self._get_project_filter(session)
1862 filter_q["id"] = nsd_id
1863 if not self.db.get_list("nsds", filter_q):
1864 raise EngineException(
1865 "Descriptor error at 'netslice-subnet':'nsd-ref'='{}' references a non "
1866 "existing nsd".format(nsd_id),
1867 http_code=HTTPStatus.CONFLICT,
1868 )
1869
1870 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
1871 final_content = super().check_conflict_on_edit(
1872 session, final_content, edit_content, _id
1873 )
1874
1875 self._check_descriptor_dependencies(session, final_content)
1876 return final_content
1877
1878 def check_conflict_on_del(self, session, _id, db_content):
1879 """
1880 Check that there is not any NSIR that uses this NST. Only NSIRs belonging to this project are considered. Note
1881 that NST can be public and be used by other projects.
1882 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1883 :param _id: nst internal id
1884 :param db_content: The database content of the _id.
1885 :return: None or raises EngineException with the conflict
1886 """
1887 # TODO: Check this method
1888 if session["force"]:
1889 return
1890 # Get Network Slice Template from Database
1891 _filter = self._get_project_filter(session)
1892 _filter["_admin.nst-id"] = _id
1893 if self.db.get_list("nsis", _filter):
1894 raise EngineException(
1895 "there is at least one Netslice Instance using this descriptor",
1896 http_code=HTTPStatus.CONFLICT,
1897 )
1898
1899 def sol005_projection(self, data):
1900 data["onboardingState"] = data["_admin"]["onboardingState"]
1901 data["operationalState"] = data["_admin"]["operationalState"]
1902 data["usageState"] = data["_admin"]["usageState"]
1903
1904 links = {}
1905 links["self"] = {"href": "/nst/v1/netslice_templates/{}".format(data["_id"])}
1906 links["nst"] = {"href": "/nst/v1/netslice_templates/{}/nst".format(data["_id"])}
1907 data["_links"] = links
1908
1909 return super().sol005_projection(data)
1910
1911
1912 class PduTopic(BaseTopic):
1913 topic = "pdus"
1914 topic_msg = "pdu"
1915 quota_name = "pduds"
1916 schema_new = pdu_new_schema
1917 schema_edit = pdu_edit_schema
1918
1919 def __init__(self, db, fs, msg, auth):
1920 BaseTopic.__init__(self, db, fs, msg, auth)
1921
1922 @staticmethod
1923 def format_on_new(content, project_id=None, make_public=False):
1924 BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
1925 content["_admin"]["onboardingState"] = "CREATED"
1926 content["_admin"]["operationalState"] = "ENABLED"
1927 content["_admin"]["usageState"] = "NOT_IN_USE"
1928
1929 def check_conflict_on_del(self, session, _id, db_content):
1930 """
1931 Check that there is not any vnfr that uses this PDU
1932 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1933 :param _id: pdu internal id
1934 :param db_content: The database content of the _id.
1935 :return: None or raises EngineException with the conflict
1936 """
1937 if session["force"]:
1938 return
1939
1940 _filter = self._get_project_filter(session)
1941 _filter["vdur.pdu-id"] = _id
1942 if self.db.get_list("vnfrs", _filter):
1943 raise EngineException(
1944 "There is at least one VNF instance using this PDU",
1945 http_code=HTTPStatus.CONFLICT,
1946 )
1947
1948
1949 class VnfPkgOpTopic(BaseTopic):
1950 topic = "vnfpkgops"
1951 topic_msg = "vnfd"
1952 schema_new = vnfpkgop_new_schema
1953 schema_edit = None
1954
1955 def __init__(self, db, fs, msg, auth):
1956 BaseTopic.__init__(self, db, fs, msg, auth)
1957
1958 def edit(self, session, _id, indata=None, kwargs=None, content=None):
1959 raise EngineException(
1960 "Method 'edit' not allowed for topic '{}'".format(self.topic),
1961 HTTPStatus.METHOD_NOT_ALLOWED,
1962 )
1963
1964 def delete(self, session, _id, dry_run=False):
1965 raise EngineException(
1966 "Method 'delete' not allowed for topic '{}'".format(self.topic),
1967 HTTPStatus.METHOD_NOT_ALLOWED,
1968 )
1969
1970 def delete_list(self, session, filter_q=None):
1971 raise EngineException(
1972 "Method 'delete_list' not allowed for topic '{}'".format(self.topic),
1973 HTTPStatus.METHOD_NOT_ALLOWED,
1974 )
1975
1976 def new(self, rollback, session, indata=None, kwargs=None, headers=None):
1977 """
1978 Creates a new entry into database.
1979 :param rollback: list to append created items at database in case a rollback may to be done
1980 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1981 :param indata: data to be inserted
1982 :param kwargs: used to override the indata descriptor
1983 :param headers: http request headers
1984 :return: _id, op_id:
1985 _id: identity of the inserted data.
1986 op_id: None
1987 """
1988 self._update_input_with_kwargs(indata, kwargs)
1989 validate_input(indata, self.schema_new)
1990 vnfpkg_id = indata["vnfPkgId"]
1991 filter_q = BaseTopic._get_project_filter(session)
1992 filter_q["_id"] = vnfpkg_id
1993 vnfd = self.db.get_one("vnfds", filter_q)
1994 operation = indata["lcmOperationType"]
1995 kdu_name = indata["kdu_name"]
1996 for kdu in vnfd.get("kdu", []):
1997 if kdu["name"] == kdu_name:
1998 helm_chart = kdu.get("helm-chart")
1999 juju_bundle = kdu.get("juju-bundle")
2000 break
2001 else:
2002 raise EngineException(
2003 "Not found vnfd[id='{}']:kdu[name='{}']".format(vnfpkg_id, kdu_name)
2004 )
2005 if helm_chart:
2006 indata["helm-chart"] = helm_chart
2007 match = fullmatch(r"([^/]*)/([^/]*)", helm_chart)
2008 repo_name = match.group(1) if match else None
2009 elif juju_bundle:
2010 indata["juju-bundle"] = juju_bundle
2011 match = fullmatch(r"([^/]*)/([^/]*)", juju_bundle)
2012 repo_name = match.group(1) if match else None
2013 else:
2014 raise EngineException(
2015 "Found neither 'helm-chart' nor 'juju-bundle' in vnfd[id='{}']:kdu[name='{}']".format(
2016 vnfpkg_id, kdu_name
2017 )
2018 )
2019 if repo_name:
2020 del filter_q["_id"]
2021 filter_q["name"] = repo_name
2022 repo = self.db.get_one("k8srepos", filter_q)
2023 k8srepo_id = repo.get("_id")
2024 k8srepo_url = repo.get("url")
2025 else:
2026 k8srepo_id = None
2027 k8srepo_url = None
2028 indata["k8srepoId"] = k8srepo_id
2029 indata["k8srepo_url"] = k8srepo_url
2030 vnfpkgop_id = str(uuid4())
2031 vnfpkgop_desc = {
2032 "_id": vnfpkgop_id,
2033 "operationState": "PROCESSING",
2034 "vnfPkgId": vnfpkg_id,
2035 "lcmOperationType": operation,
2036 "isAutomaticInvocation": False,
2037 "isCancelPending": False,
2038 "operationParams": indata,
2039 "links": {
2040 "self": "/osm/vnfpkgm/v1/vnfpkg_op_occs/" + vnfpkgop_id,
2041 "vnfpkg": "/osm/vnfpkgm/v1/vnf_packages/" + vnfpkg_id,
2042 },
2043 }
2044 self.format_on_new(
2045 vnfpkgop_desc, session["project_id"], make_public=session["public"]
2046 )
2047 ctime = vnfpkgop_desc["_admin"]["created"]
2048 vnfpkgop_desc["statusEnteredTime"] = ctime
2049 vnfpkgop_desc["startTime"] = ctime
2050 self.db.create(self.topic, vnfpkgop_desc)
2051 rollback.append({"topic": self.topic, "_id": vnfpkgop_id})
2052 self.msg.write(self.topic_msg, operation, vnfpkgop_desc)
2053 return vnfpkgop_id, None