Fix bug 2088 by validating helm-chart value on VNF
[osm/NBI.git] / osm_nbi / descriptor_topics.py
1 # -*- coding: utf-8 -*-
2
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
12 # implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
15
16 import tarfile
17 import yaml
18 import json
19 import copy
20 import os
21 import shutil
22 import functools
23 import re
24
25 # import logging
26 from deepdiff import DeepDiff
27 from hashlib import md5
28 from osm_common.dbbase import DbException, deep_update_rfc7396
29 from http import HTTPStatus
30 from time import time
31 from uuid import uuid4
32 from re import fullmatch
33 from zipfile import ZipFile
34 from osm_nbi.validation import (
35 ValidationError,
36 pdu_new_schema,
37 pdu_edit_schema,
38 validate_input,
39 vnfpkgop_new_schema,
40 )
41 from osm_nbi.base_topic import BaseTopic, EngineException, get_iterable
42 from osm_im import etsi_nfv_vnfd, etsi_nfv_nsd
43 from osm_im.nst import nst as nst_im
44 from pyangbind.lib.serialise import pybindJSONDecoder
45 import pyangbind.lib.pybindJSON as pybindJSON
46 from osm_nbi import utils
47
48 __author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
49
50 valid_helm_chart_re = re.compile(
51 r"^[a-z0-9]([-a-z0-9]*[a-z0-9]/)?([a-z0-9]([-a-z0-9]*[a-z0-9])?)*$"
52 )
53
54
55 class DescriptorTopic(BaseTopic):
56 def __init__(self, db, fs, msg, auth):
57 super().__init__(db, fs, msg, auth)
58
59 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
60 final_content = super().check_conflict_on_edit(
61 session, final_content, edit_content, _id
62 )
63
64 def _check_unique_id_name(descriptor, position=""):
65 for desc_key, desc_item in descriptor.items():
66 if isinstance(desc_item, list) and desc_item:
67 used_ids = []
68 desc_item_id = None
69 for index, list_item in enumerate(desc_item):
70 if isinstance(list_item, dict):
71 _check_unique_id_name(
72 list_item, "{}.{}[{}]".format(position, desc_key, index)
73 )
74 # Base case
75 if index == 0 and (
76 list_item.get("id") or list_item.get("name")
77 ):
78 desc_item_id = "id" if list_item.get("id") else "name"
79 if desc_item_id and list_item.get(desc_item_id):
80 if list_item[desc_item_id] in used_ids:
81 position = "{}.{}[{}]".format(
82 position, desc_key, index
83 )
84 raise EngineException(
85 "Error: identifier {} '{}' is not unique and repeats at '{}'".format(
86 desc_item_id,
87 list_item[desc_item_id],
88 position,
89 ),
90 HTTPStatus.UNPROCESSABLE_ENTITY,
91 )
92 used_ids.append(list_item[desc_item_id])
93
94 _check_unique_id_name(final_content)
95 # 1. validate again with pyangbind
96 # 1.1. remove internal keys
97 internal_keys = {}
98 for k in ("_id", "_admin"):
99 if k in final_content:
100 internal_keys[k] = final_content.pop(k)
101 storage_params = internal_keys["_admin"].get("storage")
102 serialized = self._validate_input_new(
103 final_content, storage_params, session["force"]
104 )
105
106 # 1.2. modify final_content with a serialized version
107 final_content = copy.deepcopy(serialized)
108 # 1.3. restore internal keys
109 for k, v in internal_keys.items():
110 final_content[k] = v
111 if session["force"]:
112 return final_content
113
114 # 2. check that this id is not present
115 if "id" in edit_content:
116 _filter = self._get_project_filter(session)
117
118 _filter["id"] = final_content["id"]
119 _filter["_id.neq"] = _id
120
121 if self.db.get_one(self.topic, _filter, fail_on_empty=False):
122 raise EngineException(
123 "{} with id '{}' already exists for this project".format(
124 self.topic[:-1], final_content["id"]
125 ),
126 HTTPStatus.CONFLICT,
127 )
128
129 return final_content
130
131 @staticmethod
132 def format_on_new(content, project_id=None, make_public=False):
133 BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
134 content["_admin"]["onboardingState"] = "CREATED"
135 content["_admin"]["operationalState"] = "DISABLED"
136 content["_admin"]["usageState"] = "NOT_IN_USE"
137
138 def delete_extra(self, session, _id, db_content, not_send_msg=None):
139 """
140 Deletes file system storage associated with the descriptor
141 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
142 :param _id: server internal id
143 :param db_content: The database content of the descriptor
144 :param not_send_msg: To not send message (False) or store content (list) instead
145 :return: None if ok or raises EngineException with the problem
146 """
147 self.fs.file_delete(_id, ignore_non_exist=True)
148 self.fs.file_delete(_id + "_", ignore_non_exist=True) # remove temp folder
149 # Remove file revisions
150 if "revision" in db_content["_admin"]:
151 revision = db_content["_admin"]["revision"]
152 while revision > 0:
153 self.fs.file_delete(_id + ":" + str(revision), ignore_non_exist=True)
154 revision = revision - 1
155
156
157 @staticmethod
158 def get_one_by_id(db, session, topic, id):
159 # find owned by this project
160 _filter = BaseTopic._get_project_filter(session)
161 _filter["id"] = id
162 desc_list = db.get_list(topic, _filter)
163 if len(desc_list) == 1:
164 return desc_list[0]
165 elif len(desc_list) > 1:
166 raise DbException(
167 "Found more than one {} with id='{}' belonging to this project".format(
168 topic[:-1], id
169 ),
170 HTTPStatus.CONFLICT,
171 )
172
173 # not found any: try to find public
174 _filter = BaseTopic._get_project_filter(session)
175 _filter["id"] = id
176 desc_list = db.get_list(topic, _filter)
177 if not desc_list:
178 raise DbException(
179 "Not found any {} with id='{}'".format(topic[:-1], id),
180 HTTPStatus.NOT_FOUND,
181 )
182 elif len(desc_list) == 1:
183 return desc_list[0]
184 else:
185 raise DbException(
186 "Found more than one public {} with id='{}'; and no one belonging to this project".format(
187 topic[:-1], id
188 ),
189 HTTPStatus.CONFLICT,
190 )
191
192 def new(self, rollback, session, indata=None, kwargs=None, headers=None):
193 """
194 Creates a new almost empty DISABLED entry into database. Due to SOL005, it does not follow normal procedure.
195 Creating a VNFD or NSD is done in two steps: 1. Creates an empty descriptor (this step) and 2) upload content
196 (self.upload_content)
197 :param rollback: list to append created items at database in case a rollback may to be done
198 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
199 :param indata: data to be inserted
200 :param kwargs: used to override the indata descriptor
201 :param headers: http request headers
202 :return: _id, None: identity of the inserted data; and None as there is not any operation
203 """
204
205 # No needed to capture exceptions
206 # Check Quota
207 self.check_quota(session)
208
209 # _remove_envelop
210 if indata:
211 if "userDefinedData" in indata:
212 indata = indata["userDefinedData"]
213
214 # Override descriptor with query string kwargs
215 self._update_input_with_kwargs(indata, kwargs)
216 # uncomment when this method is implemented.
217 # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors
218 # indata = DescriptorTopic._validate_input_new(self, indata, project_id=session["force"])
219
220 content = {"_admin": {
221 "userDefinedData": indata,
222 "revision": 0
223 }}
224
225 self.format_on_new(
226 content, session["project_id"], make_public=session["public"]
227 )
228 _id = self.db.create(self.topic, content)
229 rollback.append({"topic": self.topic, "_id": _id})
230 self._send_msg("created", {"_id": _id})
231 return _id, None
232
233 def upload_content(self, session, _id, indata, kwargs, headers):
234 """
235 Used for receiving content by chunks (with a transaction_id header and/or gzip file. It will store and extract)
236 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
237 :param _id : the nsd,vnfd is already created, this is the id
238 :param indata: http body request
239 :param kwargs: user query string to override parameters. NOT USED
240 :param headers: http request headers
241 :return: True if package is completely uploaded or False if partial content has been uploded
242 Raise exception on error
243 """
244 # Check that _id exists and it is valid
245 current_desc = self.show(session, _id)
246
247 content_range_text = headers.get("Content-Range")
248 expected_md5 = headers.get("Content-File-MD5")
249 compressed = None
250 content_type = headers.get("Content-Type")
251 if (
252 content_type
253 and "application/gzip" in content_type
254 or "application/x-gzip" in content_type
255 ):
256 compressed = "gzip"
257 if (
258 content_type
259 and "application/zip" in content_type
260 ):
261 compressed = "zip"
262 filename = headers.get("Content-Filename")
263 if not filename and compressed:
264 filename = "package.tar.gz" if compressed == "gzip" else "package.zip"
265 elif not filename:
266 filename = "package"
267
268 revision = 1
269 if "revision" in current_desc["_admin"]:
270 revision = current_desc["_admin"]["revision"] + 1
271
272 # TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266
273 file_pkg = None
274 error_text = ""
275 fs_rollback = []
276
277 try:
278 if content_range_text:
279 content_range = (
280 content_range_text.replace("-", " ").replace("/", " ").split()
281 )
282 if (
283 content_range[0] != "bytes"
284 ): # TODO check x<y not negative < total....
285 raise IndexError()
286 start = int(content_range[1])
287 end = int(content_range[2]) + 1
288 total = int(content_range[3])
289 else:
290 start = 0
291 # Rather than using a temp folder, we will store the package in a folder based on
292 # the current revision.
293 proposed_revision_path = (
294 _id + ":" + str(revision)
295 ) # all the content is upload here and if ok, it is rename from id_ to is folder
296
297 if start:
298 if not self.fs.file_exists(proposed_revision_path, "dir"):
299 raise EngineException(
300 "invalid Transaction-Id header", HTTPStatus.NOT_FOUND
301 )
302 else:
303 self.fs.file_delete(proposed_revision_path, ignore_non_exist=True)
304 self.fs.mkdir(proposed_revision_path)
305 fs_rollback.append(proposed_revision_path)
306
307 storage = self.fs.get_params()
308 storage["folder"] = proposed_revision_path
309
310 file_path = (proposed_revision_path, filename)
311 if self.fs.file_exists(file_path, "file"):
312 file_size = self.fs.file_size(file_path)
313 else:
314 file_size = 0
315 if file_size != start:
316 raise EngineException(
317 "invalid Content-Range start sequence, expected '{}' but received '{}'".format(
318 file_size, start
319 ),
320 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
321 )
322 file_pkg = self.fs.file_open(file_path, "a+b")
323 if isinstance(indata, dict):
324 indata_text = yaml.safe_dump(indata, indent=4, default_flow_style=False)
325 file_pkg.write(indata_text.encode(encoding="utf-8"))
326 else:
327 indata_len = 0
328 while True:
329 indata_text = indata.read(4096)
330 indata_len += len(indata_text)
331 if not indata_text:
332 break
333 file_pkg.write(indata_text)
334 if content_range_text:
335 if indata_len != end - start:
336 raise EngineException(
337 "Mismatch between Content-Range header {}-{} and body length of {}".format(
338 start, end - 1, indata_len
339 ),
340 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
341 )
342 if end != total:
343 # TODO update to UPLOADING
344 return False
345
346 # PACKAGE UPLOADED
347 if expected_md5:
348 file_pkg.seek(0, 0)
349 file_md5 = md5()
350 chunk_data = file_pkg.read(1024)
351 while chunk_data:
352 file_md5.update(chunk_data)
353 chunk_data = file_pkg.read(1024)
354 if expected_md5 != file_md5.hexdigest():
355 raise EngineException("Error, MD5 mismatch", HTTPStatus.CONFLICT)
356 file_pkg.seek(0, 0)
357 if compressed == "gzip":
358 tar = tarfile.open(mode="r", fileobj=file_pkg)
359 descriptor_file_name = None
360 for tarinfo in tar:
361 tarname = tarinfo.name
362 tarname_path = tarname.split("/")
363 if (
364 not tarname_path[0] or ".." in tarname_path
365 ): # if start with "/" means absolute path
366 raise EngineException(
367 "Absolute path or '..' are not allowed for package descriptor tar.gz"
368 )
369 if len(tarname_path) == 1 and not tarinfo.isdir():
370 raise EngineException(
371 "All files must be inside a dir for package descriptor tar.gz"
372 )
373 if (
374 tarname.endswith(".yaml")
375 or tarname.endswith(".json")
376 or tarname.endswith(".yml")
377 ):
378 storage["pkg-dir"] = tarname_path[0]
379 if len(tarname_path) == 2:
380 if descriptor_file_name:
381 raise EngineException(
382 "Found more than one descriptor file at package descriptor tar.gz"
383 )
384 descriptor_file_name = tarname
385 if not descriptor_file_name:
386 raise EngineException(
387 "Not found any descriptor file at package descriptor tar.gz"
388 )
389 storage["descriptor"] = descriptor_file_name
390 storage["zipfile"] = filename
391 self.fs.file_extract(tar, proposed_revision_path)
392 with self.fs.file_open(
393 (proposed_revision_path, descriptor_file_name), "r"
394 ) as descriptor_file:
395 content = descriptor_file.read()
396 elif compressed == "zip":
397 zipfile = ZipFile(file_pkg)
398 descriptor_file_name = None
399 for package_file in zipfile.infolist():
400 zipfilename = package_file.filename
401 file_path = zipfilename.split("/")
402 if (
403 not file_path[0] or ".." in zipfilename
404 ): # if start with "/" means absolute path
405 raise EngineException(
406 "Absolute path or '..' are not allowed for package descriptor zip"
407 )
408
409 if (
410 (
411 zipfilename.endswith(".yaml")
412 or zipfilename.endswith(".json")
413 or zipfilename.endswith(".yml")
414 ) and (
415 zipfilename.find("/") < 0
416 or zipfilename.find("Definitions") >= 0
417 )
418 ):
419 storage["pkg-dir"] = ""
420 if descriptor_file_name:
421 raise EngineException(
422 "Found more than one descriptor file at package descriptor zip"
423 )
424 descriptor_file_name = zipfilename
425 if not descriptor_file_name:
426 raise EngineException(
427 "Not found any descriptor file at package descriptor zip"
428 )
429 storage["descriptor"] = descriptor_file_name
430 storage["zipfile"] = filename
431 self.fs.file_extract(zipfile, proposed_revision_path)
432
433 with self.fs.file_open(
434 (proposed_revision_path, descriptor_file_name), "r"
435 ) as descriptor_file:
436 content = descriptor_file.read()
437 else:
438 content = file_pkg.read()
439 storage["descriptor"] = descriptor_file_name = filename
440
441 if descriptor_file_name.endswith(".json"):
442 error_text = "Invalid json format "
443 indata = json.load(content)
444 else:
445 error_text = "Invalid yaml format "
446 indata = yaml.load(content, Loader=yaml.SafeLoader)
447
448 # Need to close the file package here so it can be copied from the
449 # revision to the current, unrevisioned record
450 if file_pkg:
451 file_pkg.close()
452 file_pkg = None
453
454 # Fetch both the incoming, proposed revision and the original revision so we
455 # can call a validate method to compare them
456 current_revision_path = _id + "/"
457 self.fs.sync(from_path=current_revision_path)
458 self.fs.sync(from_path=proposed_revision_path)
459
460 if revision > 1:
461 try:
462 self._validate_descriptor_changes(
463 descriptor_file_name,
464 current_revision_path,
465 proposed_revision_path)
466 except Exception as e:
467 shutil.rmtree(self.fs.path + current_revision_path, ignore_errors=True)
468 shutil.rmtree(self.fs.path + proposed_revision_path, ignore_errors=True)
469 # Only delete the new revision. We need to keep the original version in place
470 # as it has not been changed.
471 self.fs.file_delete(proposed_revision_path, ignore_non_exist=True)
472 raise e
473
474
475 indata = self._remove_envelop(indata)
476
477 # Override descriptor with query string kwargs
478 if kwargs:
479 self._update_input_with_kwargs(indata, kwargs)
480
481 current_desc["_admin"]["storage"] = storage
482 current_desc["_admin"]["onboardingState"] = "ONBOARDED"
483 current_desc["_admin"]["operationalState"] = "ENABLED"
484 current_desc["_admin"]["modified"] = time()
485 current_desc["_admin"]["revision"] = revision
486
487 deep_update_rfc7396(current_desc, indata)
488 current_desc = self.check_conflict_on_edit(
489 session, current_desc, indata, _id=_id
490 )
491
492 # Copy the revision to the active package name by its original id
493 shutil.rmtree(self.fs.path + current_revision_path, ignore_errors=True)
494 os.rename(self.fs.path + proposed_revision_path, self.fs.path + current_revision_path)
495 self.fs.file_delete(current_revision_path, ignore_non_exist=True)
496 self.fs.mkdir(current_revision_path)
497 self.fs.reverse_sync(from_path=current_revision_path)
498
499 shutil.rmtree(self.fs.path + _id)
500
501 self.db.replace(self.topic, _id, current_desc)
502
503 # Store a copy of the package as a point in time revision
504 revision_desc = dict(current_desc)
505 revision_desc["_id"] = _id + ":" + str(revision_desc["_admin"]["revision"])
506 self.db.create(self.topic + "_revisions", revision_desc)
507 fs_rollback = []
508
509 indata["_id"] = _id
510 self._send_msg("edited", indata)
511
512 # TODO if descriptor has changed because kwargs update content and remove cached zip
513 # TODO if zip is not present creates one
514 return True
515
516 except EngineException:
517 raise
518 except IndexError:
519 raise EngineException(
520 "invalid Content-Range header format. Expected 'bytes start-end/total'",
521 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
522 )
523 except IOError as e:
524 raise EngineException(
525 "invalid upload transaction sequence: '{}'".format(e),
526 HTTPStatus.BAD_REQUEST,
527 )
528 except tarfile.ReadError as e:
529 raise EngineException(
530 "invalid file content {}".format(e), HTTPStatus.BAD_REQUEST
531 )
532 except (ValueError, yaml.YAMLError) as e:
533 raise EngineException(error_text + str(e))
534 except ValidationError as e:
535 raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
536 finally:
537 if file_pkg:
538 file_pkg.close()
539 for file in fs_rollback:
540 self.fs.file_delete(file, ignore_non_exist=True)
541
542 def get_file(self, session, _id, path=None, accept_header=None):
543 """
544 Return the file content of a vnfd or nsd
545 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
546 :param _id: Identity of the vnfd, nsd
547 :param path: artifact path or "$DESCRIPTOR" or None
548 :param accept_header: Content of Accept header. Must contain applition/zip or/and text/plain
549 :return: opened file plus Accept format or raises an exception
550 """
551 accept_text = accept_zip = False
552 if accept_header:
553 if "text/plain" in accept_header or "*/*" in accept_header:
554 accept_text = True
555 if "application/zip" in accept_header or "*/*" in accept_header:
556 accept_zip = "application/zip"
557 elif "application/gzip" in accept_header:
558 accept_zip = "application/gzip"
559
560 if not accept_text and not accept_zip:
561 raise EngineException(
562 "provide request header 'Accept' with 'application/zip' or 'text/plain'",
563 http_code=HTTPStatus.NOT_ACCEPTABLE,
564 )
565
566 content = self.show(session, _id)
567 if content["_admin"]["onboardingState"] != "ONBOARDED":
568 raise EngineException(
569 "Cannot get content because this resource is not at 'ONBOARDED' state. "
570 "onboardingState is {}".format(content["_admin"]["onboardingState"]),
571 http_code=HTTPStatus.CONFLICT,
572 )
573 storage = content["_admin"]["storage"]
574 if path is not None and path != "$DESCRIPTOR": # artifacts
575 if not storage.get("pkg-dir") and not storage.get("folder"):
576 raise EngineException(
577 "Packages does not contains artifacts",
578 http_code=HTTPStatus.BAD_REQUEST,
579 )
580 if self.fs.file_exists(
581 (storage["folder"], storage["pkg-dir"], *path), "dir"
582 ):
583 folder_content = self.fs.dir_ls(
584 (storage["folder"], storage["pkg-dir"], *path)
585 )
586 return folder_content, "text/plain"
587 # TODO manage folders in http
588 else:
589 return (
590 self.fs.file_open(
591 (storage["folder"], storage["pkg-dir"], *path), "rb"
592 ),
593 "application/octet-stream",
594 )
595
596 # pkgtype accept ZIP TEXT -> result
597 # manyfiles yes X -> zip
598 # no yes -> error
599 # onefile yes no -> zip
600 # X yes -> text
601 contain_many_files = False
602 if storage.get("pkg-dir"):
603 # check if there are more than one file in the package, ignoring checksums.txt.
604 pkg_files = self.fs.dir_ls((storage["folder"], storage["pkg-dir"]))
605 if len(pkg_files) >= 3 or (
606 len(pkg_files) == 2 and "checksums.txt" not in pkg_files
607 ):
608 contain_many_files = True
609 if accept_text and (not contain_many_files or path == "$DESCRIPTOR"):
610 return (
611 self.fs.file_open((storage["folder"], storage["descriptor"]), "r"),
612 "text/plain",
613 )
614 elif contain_many_files and not accept_zip:
615 raise EngineException(
616 "Packages that contains several files need to be retrieved with 'application/zip'"
617 "Accept header",
618 http_code=HTTPStatus.NOT_ACCEPTABLE,
619 )
620 else:
621 if not storage.get("zipfile"):
622 # TODO generate zipfile if not present
623 raise EngineException(
624 "Only allowed 'text/plain' Accept header for this descriptor. To be solved in "
625 "future versions",
626 http_code=HTTPStatus.NOT_ACCEPTABLE,
627 )
628 return (
629 self.fs.file_open((storage["folder"], storage["zipfile"]), "rb"),
630 accept_zip,
631 )
632
633 def _remove_yang_prefixes_from_descriptor(self, descriptor):
634 new_descriptor = {}
635 for k, v in descriptor.items():
636 new_v = v
637 if isinstance(v, dict):
638 new_v = self._remove_yang_prefixes_from_descriptor(v)
639 elif isinstance(v, list):
640 new_v = list()
641 for x in v:
642 if isinstance(x, dict):
643 new_v.append(self._remove_yang_prefixes_from_descriptor(x))
644 else:
645 new_v.append(x)
646 new_descriptor[k.split(":")[-1]] = new_v
647 return new_descriptor
648
649 def pyangbind_validation(self, item, data, force=False):
650 raise EngineException(
651 "Not possible to validate '{}' item".format(item),
652 http_code=HTTPStatus.INTERNAL_SERVER_ERROR,
653 )
654
655 def _validate_input_edit(self, indata, content, force=False):
656 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
657 if "_id" in indata:
658 indata.pop("_id")
659 if "_admin" not in indata:
660 indata["_admin"] = {}
661
662 if "operationalState" in indata:
663 if indata["operationalState"] in ("ENABLED", "DISABLED"):
664 indata["_admin"]["operationalState"] = indata.pop("operationalState")
665 else:
666 raise EngineException(
667 "State '{}' is not a valid operational state".format(
668 indata["operationalState"]
669 ),
670 http_code=HTTPStatus.BAD_REQUEST,
671 )
672
673 # In the case of user defined data, we need to put the data in the root of the object
674 # to preserve current expected behaviour
675 if "userDefinedData" in indata:
676 data = indata.pop("userDefinedData")
677 if isinstance(data, dict):
678 indata["_admin"]["userDefinedData"] = data
679 else:
680 raise EngineException(
681 "userDefinedData should be an object, but is '{}' instead".format(
682 type(data)
683 ),
684 http_code=HTTPStatus.BAD_REQUEST,
685 )
686
687 if (
688 "operationalState" in indata["_admin"]
689 and content["_admin"]["operationalState"]
690 == indata["_admin"]["operationalState"]
691 ):
692 raise EngineException(
693 "operationalState already {}".format(
694 content["_admin"]["operationalState"]
695 ),
696 http_code=HTTPStatus.CONFLICT,
697 )
698
699 return indata
700
701 def _validate_descriptor_changes(self,
702 descriptor_file_name,
703 old_descriptor_directory,
704 new_descriptor_directory):
705 # Todo: compare changes and throw a meaningful exception for the user to understand
706 # Example:
707 # raise EngineException(
708 # "Error in validating new descriptor: <NODE> cannot be modified",
709 # http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
710 # )
711 pass
712
713 class VnfdTopic(DescriptorTopic):
714 topic = "vnfds"
715 topic_msg = "vnfd"
716
717 def __init__(self, db, fs, msg, auth):
718 DescriptorTopic.__init__(self, db, fs, msg, auth)
719
720 def pyangbind_validation(self, item, data, force=False):
721 if self._descriptor_data_is_in_old_format(data):
722 raise EngineException(
723 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
724 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
725 )
726 try:
727 myvnfd = etsi_nfv_vnfd.etsi_nfv_vnfd()
728 pybindJSONDecoder.load_ietf_json(
729 {"etsi-nfv-vnfd:vnfd": data},
730 None,
731 None,
732 obj=myvnfd,
733 path_helper=True,
734 skip_unknown=force,
735 )
736 out = pybindJSON.dumps(myvnfd, mode="ietf")
737 desc_out = self._remove_envelop(yaml.safe_load(out))
738 desc_out = self._remove_yang_prefixes_from_descriptor(desc_out)
739 return utils.deep_update_dict(data, desc_out)
740 except Exception as e:
741 raise EngineException(
742 "Error in pyangbind validation: {}".format(str(e)),
743 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
744 )
745
746 @staticmethod
747 def _descriptor_data_is_in_old_format(data):
748 return ("vnfd-catalog" in data) or ("vnfd:vnfd-catalog" in data)
749
750 @staticmethod
751 def _remove_envelop(indata=None):
752 if not indata:
753 return {}
754 clean_indata = indata
755
756 if clean_indata.get("etsi-nfv-vnfd:vnfd"):
757 if not isinstance(clean_indata["etsi-nfv-vnfd:vnfd"], dict):
758 raise EngineException("'etsi-nfv-vnfd:vnfd' must be a dict")
759 clean_indata = clean_indata["etsi-nfv-vnfd:vnfd"]
760 elif clean_indata.get("vnfd"):
761 if not isinstance(clean_indata["vnfd"], dict):
762 raise EngineException("'vnfd' must be dict")
763 clean_indata = clean_indata["vnfd"]
764
765 return clean_indata
766
767 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
768 final_content = super().check_conflict_on_edit(
769 session, final_content, edit_content, _id
770 )
771
772 # set type of vnfd
773 contains_pdu = False
774 contains_vdu = False
775 for vdu in get_iterable(final_content.get("vdu")):
776 if vdu.get("pdu-type"):
777 contains_pdu = True
778 else:
779 contains_vdu = True
780 if contains_pdu:
781 final_content["_admin"]["type"] = "hnfd" if contains_vdu else "pnfd"
782 elif contains_vdu:
783 final_content["_admin"]["type"] = "vnfd"
784 # if neither vud nor pdu do not fill type
785 return final_content
786
787 def check_conflict_on_del(self, session, _id, db_content):
788 """
789 Check that there is not any NSD that uses this VNFD. Only NSDs belonging to this project are considered. Note
790 that VNFD can be public and be used by NSD of other projects. Also check there are not deployments, or vnfr
791 that uses this vnfd
792 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
793 :param _id: vnfd internal id
794 :param db_content: The database content of the _id.
795 :return: None or raises EngineException with the conflict
796 """
797 if session["force"]:
798 return
799 descriptor = db_content
800 descriptor_id = descriptor.get("id")
801 if not descriptor_id: # empty vnfd not uploaded
802 return
803
804 _filter = self._get_project_filter(session)
805
806 # check vnfrs using this vnfd
807 _filter["vnfd-id"] = _id
808 if self.db.get_list("vnfrs", _filter):
809 raise EngineException(
810 "There is at least one VNF instance using this descriptor",
811 http_code=HTTPStatus.CONFLICT,
812 )
813
814 # check NSD referencing this VNFD
815 del _filter["vnfd-id"]
816 _filter["vnfd-id"] = descriptor_id
817 if self.db.get_list("nsds", _filter):
818 raise EngineException(
819 "There is at least one NS package referencing this descriptor",
820 http_code=HTTPStatus.CONFLICT,
821 )
822
823 def _validate_input_new(self, indata, storage_params, force=False):
824 indata.pop("onboardingState", None)
825 indata.pop("operationalState", None)
826 indata.pop("usageState", None)
827 indata.pop("links", None)
828
829 indata = self.pyangbind_validation("vnfds", indata, force)
830 # Cross references validation in the descriptor
831
832 self.validate_mgmt_interface_connection_point(indata)
833
834 for vdu in get_iterable(indata.get("vdu")):
835 self.validate_vdu_internal_connection_points(vdu)
836 self._validate_vdu_cloud_init_in_package(storage_params, vdu, indata)
837 self._validate_vdu_charms_in_package(storage_params, indata)
838
839 self._validate_vnf_charms_in_package(storage_params, indata)
840
841 self.validate_external_connection_points(indata)
842 self.validate_internal_virtual_links(indata)
843 self.validate_monitoring_params(indata)
844 self.validate_scaling_group_descriptor(indata)
845 self.validate_helm_chart(indata)
846
847 return indata
848
849 @staticmethod
850 def validate_helm_chart(indata):
851 kdus = indata.get("kdu", [])
852 for kdu in kdus:
853 helm_chart_value = kdu.get("helm-chart")
854 if not helm_chart_value:
855 continue
856 if not valid_helm_chart_re.match(helm_chart_value):
857 raise EngineException(
858 "helm-chart '{}' is not valid".format(helm_chart_value),
859 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
860 )
861
862 @staticmethod
863 def validate_mgmt_interface_connection_point(indata):
864 if not indata.get("vdu"):
865 return
866 if not indata.get("mgmt-cp"):
867 raise EngineException(
868 "'mgmt-cp' is a mandatory field and it is not defined",
869 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
870 )
871
872 for cp in get_iterable(indata.get("ext-cpd")):
873 if cp["id"] == indata["mgmt-cp"]:
874 break
875 else:
876 raise EngineException(
877 "mgmt-cp='{}' must match an existing ext-cpd".format(indata["mgmt-cp"]),
878 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
879 )
880
881 @staticmethod
882 def validate_vdu_internal_connection_points(vdu):
883 int_cpds = set()
884 for cpd in get_iterable(vdu.get("int-cpd")):
885 cpd_id = cpd.get("id")
886 if cpd_id and cpd_id in int_cpds:
887 raise EngineException(
888 "vdu[id='{}']:int-cpd[id='{}'] is already used by other int-cpd".format(
889 vdu["id"], cpd_id
890 ),
891 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
892 )
893 int_cpds.add(cpd_id)
894
895 @staticmethod
896 def validate_external_connection_points(indata):
897 all_vdus_int_cpds = set()
898 for vdu in get_iterable(indata.get("vdu")):
899 for int_cpd in get_iterable(vdu.get("int-cpd")):
900 all_vdus_int_cpds.add((vdu.get("id"), int_cpd.get("id")))
901
902 ext_cpds = set()
903 for cpd in get_iterable(indata.get("ext-cpd")):
904 cpd_id = cpd.get("id")
905 if cpd_id and cpd_id in ext_cpds:
906 raise EngineException(
907 "ext-cpd[id='{}'] is already used by other ext-cpd".format(cpd_id),
908 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
909 )
910 ext_cpds.add(cpd_id)
911
912 int_cpd = cpd.get("int-cpd")
913 if int_cpd:
914 if (int_cpd.get("vdu-id"), int_cpd.get("cpd")) not in all_vdus_int_cpds:
915 raise EngineException(
916 "ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd".format(
917 cpd_id
918 ),
919 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
920 )
921 # TODO: Validate k8s-cluster-net points to a valid k8s-cluster:nets ?
922
923 def _validate_vdu_charms_in_package(self, storage_params, indata):
924 for df in indata["df"]:
925 if (
926 "lcm-operations-configuration" in df
927 and "operate-vnf-op-config" in df["lcm-operations-configuration"]
928 ):
929 configs = df["lcm-operations-configuration"][
930 "operate-vnf-op-config"
931 ].get("day1-2", [])
932 vdus = df.get("vdu-profile", [])
933 for vdu in vdus:
934 for config in configs:
935 if config["id"] == vdu["id"] and utils.find_in_list(
936 config.get("execution-environment-list", []),
937 lambda ee: "juju" in ee,
938 ):
939 if not self._validate_package_folders(
940 storage_params, "charms"
941 ) and not self._validate_package_folders(
942 storage_params, "Scripts/charms"
943 ):
944 raise EngineException(
945 "Charm defined in vnf[id={}] but not present in "
946 "package".format(indata["id"])
947 )
948
949 def _validate_vdu_cloud_init_in_package(self, storage_params, vdu, indata):
950 if not vdu.get("cloud-init-file"):
951 return
952 if not self._validate_package_folders(
953 storage_params, "cloud_init", vdu["cloud-init-file"]
954 ) and not self._validate_package_folders(
955 storage_params, "Scripts/cloud_init", vdu["cloud-init-file"]
956 ):
957 raise EngineException(
958 "Cloud-init defined in vnf[id={}]:vdu[id={}] but not present in "
959 "package".format(indata["id"], vdu["id"])
960 )
961
962 def _validate_vnf_charms_in_package(self, storage_params, indata):
963 # Get VNF configuration through new container
964 for deployment_flavor in indata.get("df", []):
965 if "lcm-operations-configuration" not in deployment_flavor:
966 return
967 if (
968 "operate-vnf-op-config"
969 not in deployment_flavor["lcm-operations-configuration"]
970 ):
971 return
972 for day_1_2_config in deployment_flavor["lcm-operations-configuration"][
973 "operate-vnf-op-config"
974 ]["day1-2"]:
975 if day_1_2_config["id"] == indata["id"]:
976 if utils.find_in_list(
977 day_1_2_config.get("execution-environment-list", []),
978 lambda ee: "juju" in ee,
979 ):
980 if not self._validate_package_folders(
981 storage_params, "charms"
982 ) and not self._validate_package_folders(
983 storage_params, "Scripts/charms"
984 ):
985 raise EngineException(
986 "Charm defined in vnf[id={}] but not present in "
987 "package".format(indata["id"])
988 )
989
990 def _validate_package_folders(self, storage_params, folder, file=None):
991 if not storage_params:
992 return False
993 elif not storage_params.get("pkg-dir"):
994 if self.fs.file_exists("{}_".format(storage_params["folder"]), "dir"):
995 f = "{}_/{}".format(
996 storage_params["folder"], folder
997 )
998 else:
999 f = "{}/{}".format(
1000 storage_params["folder"], folder
1001 )
1002 if file:
1003 return self.fs.file_exists("{}/{}".format(f, file), "file")
1004 else:
1005 if self.fs.file_exists(f, "dir"):
1006 if self.fs.dir_ls(f):
1007 return True
1008 return False
1009 else:
1010 if self.fs.file_exists("{}_".format(storage_params["folder"]), "dir"):
1011 f = "{}_/{}/{}".format(
1012 storage_params["folder"], storage_params["pkg-dir"], folder
1013 )
1014 else:
1015 f = "{}/{}/{}".format(
1016 storage_params["folder"], storage_params["pkg-dir"], folder
1017 )
1018 if file:
1019 return self.fs.file_exists("{}/{}".format(f, file), "file")
1020 else:
1021 if self.fs.file_exists(f, "dir"):
1022 if self.fs.dir_ls(f):
1023 return True
1024 return False
1025
1026 @staticmethod
1027 def validate_internal_virtual_links(indata):
1028 all_ivld_ids = set()
1029 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
1030 ivld_id = ivld.get("id")
1031 if ivld_id and ivld_id in all_ivld_ids:
1032 raise EngineException(
1033 "Duplicated VLD id in int-virtual-link-desc[id={}]".format(ivld_id),
1034 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1035 )
1036 else:
1037 all_ivld_ids.add(ivld_id)
1038
1039 for vdu in get_iterable(indata.get("vdu")):
1040 for int_cpd in get_iterable(vdu.get("int-cpd")):
1041 int_cpd_ivld_id = int_cpd.get("int-virtual-link-desc")
1042 if int_cpd_ivld_id and int_cpd_ivld_id not in all_ivld_ids:
1043 raise EngineException(
1044 "vdu[id='{}']:int-cpd[id='{}']:int-virtual-link-desc='{}' must match an existing "
1045 "int-virtual-link-desc".format(
1046 vdu["id"], int_cpd["id"], int_cpd_ivld_id
1047 ),
1048 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1049 )
1050
1051 for df in get_iterable(indata.get("df")):
1052 for vlp in get_iterable(df.get("virtual-link-profile")):
1053 vlp_ivld_id = vlp.get("id")
1054 if vlp_ivld_id and vlp_ivld_id not in all_ivld_ids:
1055 raise EngineException(
1056 "df[id='{}']:virtual-link-profile='{}' must match an existing "
1057 "int-virtual-link-desc".format(df["id"], vlp_ivld_id),
1058 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1059 )
1060
1061 @staticmethod
1062 def validate_monitoring_params(indata):
1063 all_monitoring_params = set()
1064 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
1065 for mp in get_iterable(ivld.get("monitoring-parameters")):
1066 mp_id = mp.get("id")
1067 if mp_id and mp_id in all_monitoring_params:
1068 raise EngineException(
1069 "Duplicated monitoring-parameter id in "
1070 "int-virtual-link-desc[id='{}']:monitoring-parameters[id='{}']".format(
1071 ivld["id"], mp_id
1072 ),
1073 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1074 )
1075 else:
1076 all_monitoring_params.add(mp_id)
1077
1078 for vdu in get_iterable(indata.get("vdu")):
1079 for mp in get_iterable(vdu.get("monitoring-parameter")):
1080 mp_id = mp.get("id")
1081 if mp_id and mp_id in all_monitoring_params:
1082 raise EngineException(
1083 "Duplicated monitoring-parameter id in "
1084 "vdu[id='{}']:monitoring-parameter[id='{}']".format(
1085 vdu["id"], mp_id
1086 ),
1087 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1088 )
1089 else:
1090 all_monitoring_params.add(mp_id)
1091
1092 for df in get_iterable(indata.get("df")):
1093 for mp in get_iterable(df.get("monitoring-parameter")):
1094 mp_id = mp.get("id")
1095 if mp_id and mp_id in all_monitoring_params:
1096 raise EngineException(
1097 "Duplicated monitoring-parameter id in "
1098 "df[id='{}']:monitoring-parameter[id='{}']".format(
1099 df["id"], mp_id
1100 ),
1101 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1102 )
1103 else:
1104 all_monitoring_params.add(mp_id)
1105
1106 @staticmethod
1107 def validate_scaling_group_descriptor(indata):
1108 all_monitoring_params = set()
1109 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
1110 for mp in get_iterable(ivld.get("monitoring-parameters")):
1111 all_monitoring_params.add(mp.get("id"))
1112
1113 for vdu in get_iterable(indata.get("vdu")):
1114 for mp in get_iterable(vdu.get("monitoring-parameter")):
1115 all_monitoring_params.add(mp.get("id"))
1116
1117 for df in get_iterable(indata.get("df")):
1118 for mp in get_iterable(df.get("monitoring-parameter")):
1119 all_monitoring_params.add(mp.get("id"))
1120
1121 for df in get_iterable(indata.get("df")):
1122 for sa in get_iterable(df.get("scaling-aspect")):
1123 for sp in get_iterable(sa.get("scaling-policy")):
1124 for sc in get_iterable(sp.get("scaling-criteria")):
1125 sc_monitoring_param = sc.get("vnf-monitoring-param-ref")
1126 if (
1127 sc_monitoring_param
1128 and sc_monitoring_param not in all_monitoring_params
1129 ):
1130 raise EngineException(
1131 "df[id='{}']:scaling-aspect[id='{}']:scaling-policy"
1132 "[name='{}']:scaling-criteria[name='{}']: "
1133 "vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format(
1134 df["id"],
1135 sa["id"],
1136 sp["name"],
1137 sc["name"],
1138 sc_monitoring_param,
1139 ),
1140 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1141 )
1142
1143 for sca in get_iterable(sa.get("scaling-config-action")):
1144 if (
1145 "lcm-operations-configuration" not in df
1146 or "operate-vnf-op-config"
1147 not in df["lcm-operations-configuration"]
1148 or not utils.find_in_list(
1149 df["lcm-operations-configuration"][
1150 "operate-vnf-op-config"
1151 ].get("day1-2", []),
1152 lambda config: config["id"] == indata["id"],
1153 )
1154 ):
1155 raise EngineException(
1156 "'day1-2 configuration' not defined in the descriptor but it is "
1157 "referenced by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action".format(
1158 df["id"], sa["id"]
1159 ),
1160 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1161 )
1162 for configuration in get_iterable(
1163 df["lcm-operations-configuration"]["operate-vnf-op-config"].get(
1164 "day1-2", []
1165 )
1166 ):
1167 for primitive in get_iterable(
1168 configuration.get("config-primitive")
1169 ):
1170 if (
1171 primitive["name"]
1172 == sca["vnf-config-primitive-name-ref"]
1173 ):
1174 break
1175 else:
1176 raise EngineException(
1177 "df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-"
1178 "config-primitive-name-ref='{}' does not match any "
1179 "day1-2 configuration:config-primitive:name".format(
1180 df["id"],
1181 sa["id"],
1182 sca["vnf-config-primitive-name-ref"],
1183 ),
1184 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1185 )
1186
1187 def delete_extra(self, session, _id, db_content, not_send_msg=None):
1188 """
1189 Deletes associate file system storage (via super)
1190 Deletes associated vnfpkgops from database.
1191 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1192 :param _id: server internal id
1193 :param db_content: The database content of the descriptor
1194 :return: None
1195 :raises: FsException in case of error while deleting associated storage
1196 """
1197 super().delete_extra(session, _id, db_content, not_send_msg)
1198 self.db.del_list("vnfpkgops", {"vnfPkgId": _id})
1199 self.db.del_list(self.topic+"_revisions", {"_id": {"$regex": _id}})
1200
1201 def sol005_projection(self, data):
1202 data["onboardingState"] = data["_admin"]["onboardingState"]
1203 data["operationalState"] = data["_admin"]["operationalState"]
1204 data["usageState"] = data["_admin"]["usageState"]
1205
1206 links = {}
1207 links["self"] = {"href": "/vnfpkgm/v1/vnf_packages/{}".format(data["_id"])}
1208 links["vnfd"] = {"href": "/vnfpkgm/v1/vnf_packages/{}/vnfd".format(data["_id"])}
1209 links["packageContent"] = {
1210 "href": "/vnfpkgm/v1/vnf_packages/{}/package_content".format(data["_id"])
1211 }
1212 data["_links"] = links
1213
1214 return super().sol005_projection(data)
1215
1216 @staticmethod
1217 def find_software_version(vnfd: dict) -> str:
1218 """Find the sotware version in the VNFD descriptors
1219
1220 Args:
1221 vnfd (dict): Descriptor as a dictionary
1222
1223 Returns:
1224 software-version (str)
1225 """
1226 default_sw_version = "1.0"
1227 if vnfd.get("vnfd"):
1228 vnfd = vnfd["vnfd"]
1229 if vnfd.get("software-version"):
1230 return vnfd["software-version"]
1231 else:
1232 return default_sw_version
1233
1234 @staticmethod
1235 def extract_policies(vnfd: dict) -> dict:
1236 """Removes the policies from the VNFD descriptors
1237
1238 Args:
1239 vnfd (dict): Descriptor as a dictionary
1240
1241 Returns:
1242 vnfd (dict): VNFD which does not include policies
1243 """
1244 for df in vnfd.get("df", {}):
1245 for policy in ["scaling-aspect", "healing-aspect"]:
1246 if (df.get(policy, {})):
1247 df.pop(policy)
1248 for vdu in vnfd.get("vdu", {}):
1249 for alarm_policy in ["alarm", "monitoring-parameter"]:
1250 if (vdu.get(alarm_policy, {})):
1251 vdu.pop(alarm_policy)
1252 return vnfd
1253
1254 @staticmethod
1255 def extract_day12_primitives(vnfd: dict) -> dict:
1256 """Removes the day12 primitives from the VNFD descriptors
1257
1258 Args:
1259 vnfd (dict): Descriptor as a dictionary
1260
1261 Returns:
1262 vnfd (dict)
1263 """
1264 for df_id, df in enumerate(vnfd.get("df", {})):
1265 if (
1266 df.get("lcm-operations-configuration", {})
1267 .get("operate-vnf-op-config", {})
1268 .get("day1-2")
1269 ):
1270 day12 = df["lcm-operations-configuration"]["operate-vnf-op-config"].get(
1271 "day1-2"
1272 )
1273 for config_id, config in enumerate(day12):
1274 for key in [
1275 "initial-config-primitive",
1276 "config-primitive",
1277 "terminate-config-primitive",
1278 ]:
1279 config.pop(key, None)
1280 day12[config_id] = config
1281 df["lcm-operations-configuration"]["operate-vnf-op-config"][
1282 "day1-2"
1283 ] = day12
1284 vnfd["df"][df_id] = df
1285 return vnfd
1286
1287 def remove_modifiable_items(self, vnfd: dict) -> dict:
1288 """Removes the modifiable parts from the VNFD descriptors
1289
1290 It calls different extract functions according to different update types
1291 to clear all the modifiable items from VNFD
1292
1293 Args:
1294 vnfd (dict): Descriptor as a dictionary
1295
1296 Returns:
1297 vnfd (dict): Descriptor which does not include modifiable contents
1298 """
1299 if vnfd.get("vnfd"):
1300 vnfd = vnfd["vnfd"]
1301 vnfd.pop("_admin", None)
1302 # If the other extractions need to be done from VNFD,
1303 # the new extract methods could be appended to below list.
1304 for extract_function in [self.extract_day12_primitives, self.extract_policies]:
1305 vnfd_temp = extract_function(vnfd)
1306 vnfd = vnfd_temp
1307 return vnfd
1308
1309 def _validate_descriptor_changes(
1310 self,
1311 descriptor_file_name: str,
1312 old_descriptor_directory: str,
1313 new_descriptor_directory: str,
1314 ):
1315 """Compares the old and new VNFD descriptors and validates the new descriptor.
1316
1317 Args:
1318 old_descriptor_directory (str): Directory of descriptor which is in-use
1319 new_descriptor_directory (str): Directory of directory which is proposed to update (new revision)
1320
1321 Returns:
1322 None
1323
1324 Raises:
1325 EngineException: In case of error when there are unallowed changes
1326 """
1327 try:
1328 with self.fs.file_open(
1329 (old_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
1330 ) as old_descriptor_file:
1331 with self.fs.file_open(
1332 (new_descriptor_directory, descriptor_file_name), "r"
1333 ) as new_descriptor_file:
1334 old_content = yaml.load(
1335 old_descriptor_file.read(), Loader=yaml.SafeLoader
1336 )
1337 new_content = yaml.load(
1338 new_descriptor_file.read(), Loader=yaml.SafeLoader
1339 )
1340 if old_content and new_content:
1341 if self.find_software_version(
1342 old_content
1343 ) != self.find_software_version(new_content):
1344 return
1345 disallowed_change = DeepDiff(
1346 self.remove_modifiable_items(old_content),
1347 self.remove_modifiable_items(new_content),
1348 )
1349 if disallowed_change:
1350 changed_nodes = functools.reduce(
1351 lambda a, b: a + " , " + b,
1352 [
1353 node.lstrip("root")
1354 for node in disallowed_change.get(
1355 "values_changed"
1356 ).keys()
1357 ],
1358 )
1359 raise EngineException(
1360 f"Error in validating new descriptor: {changed_nodes} cannot be modified, "
1361 "there are disallowed changes in the vnf descriptor.",
1362 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1363 )
1364 except (
1365 DbException,
1366 AttributeError,
1367 IndexError,
1368 KeyError,
1369 ValueError,
1370 ) as e:
1371 raise type(e)(
1372 "VNF Descriptor could not be processed with error: {}.".format(e)
1373 )
1374
1375
1376 class NsdTopic(DescriptorTopic):
1377 topic = "nsds"
1378 topic_msg = "nsd"
1379
1380 def __init__(self, db, fs, msg, auth):
1381 super().__init__(db, fs, msg, auth)
1382
1383 def pyangbind_validation(self, item, data, force=False):
1384 if self._descriptor_data_is_in_old_format(data):
1385 raise EngineException(
1386 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
1387 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1388 )
1389 try:
1390 nsd_vnf_profiles = data.get("df", [{}])[0].get("vnf-profile", [])
1391 mynsd = etsi_nfv_nsd.etsi_nfv_nsd()
1392 pybindJSONDecoder.load_ietf_json(
1393 {"nsd": {"nsd": [data]}},
1394 None,
1395 None,
1396 obj=mynsd,
1397 path_helper=True,
1398 skip_unknown=force,
1399 )
1400 out = pybindJSON.dumps(mynsd, mode="ietf")
1401 desc_out = self._remove_envelop(yaml.safe_load(out))
1402 desc_out = self._remove_yang_prefixes_from_descriptor(desc_out)
1403 if nsd_vnf_profiles:
1404 desc_out["df"][0]["vnf-profile"] = nsd_vnf_profiles
1405 return desc_out
1406 except Exception as e:
1407 raise EngineException(
1408 "Error in pyangbind validation: {}".format(str(e)),
1409 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1410 )
1411
1412 @staticmethod
1413 def _descriptor_data_is_in_old_format(data):
1414 return ("nsd-catalog" in data) or ("nsd:nsd-catalog" in data)
1415
1416 @staticmethod
1417 def _remove_envelop(indata=None):
1418 if not indata:
1419 return {}
1420 clean_indata = indata
1421
1422 if clean_indata.get("nsd"):
1423 clean_indata = clean_indata["nsd"]
1424 elif clean_indata.get("etsi-nfv-nsd:nsd"):
1425 clean_indata = clean_indata["etsi-nfv-nsd:nsd"]
1426 if clean_indata.get("nsd"):
1427 if (
1428 not isinstance(clean_indata["nsd"], list)
1429 or len(clean_indata["nsd"]) != 1
1430 ):
1431 raise EngineException("'nsd' must be a list of only one element")
1432 clean_indata = clean_indata["nsd"][0]
1433 return clean_indata
1434
1435 def _validate_input_new(self, indata, storage_params, force=False):
1436 indata.pop("nsdOnboardingState", None)
1437 indata.pop("nsdOperationalState", None)
1438 indata.pop("nsdUsageState", None)
1439
1440 indata.pop("links", None)
1441
1442 indata = self.pyangbind_validation("nsds", indata, force)
1443 # Cross references validation in the descriptor
1444 # TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none
1445 for vld in get_iterable(indata.get("virtual-link-desc")):
1446 self.validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata)
1447
1448 self.validate_vnf_profiles_vnfd_id(indata)
1449
1450 return indata
1451
1452 @staticmethod
1453 def validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata):
1454 if not vld.get("mgmt-network"):
1455 return
1456 vld_id = vld.get("id")
1457 for df in get_iterable(indata.get("df")):
1458 for vlp in get_iterable(df.get("virtual-link-profile")):
1459 if vld_id and vld_id == vlp.get("virtual-link-desc-id"):
1460 if vlp.get("virtual-link-protocol-data"):
1461 raise EngineException(
1462 "Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-"
1463 "protocol-data You cannot set a virtual-link-protocol-data "
1464 "when mgmt-network is True".format(df["id"], vlp["id"]),
1465 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1466 )
1467
1468 @staticmethod
1469 def validate_vnf_profiles_vnfd_id(indata):
1470 all_vnfd_ids = set(get_iterable(indata.get("vnfd-id")))
1471 for df in get_iterable(indata.get("df")):
1472 for vnf_profile in get_iterable(df.get("vnf-profile")):
1473 vnfd_id = vnf_profile.get("vnfd-id")
1474 if vnfd_id and vnfd_id not in all_vnfd_ids:
1475 raise EngineException(
1476 "Error at df[id='{}']:vnf_profile[id='{}']:vnfd-id='{}' "
1477 "does not match any vnfd-id".format(
1478 df["id"], vnf_profile["id"], vnfd_id
1479 ),
1480 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1481 )
1482
1483 def _validate_input_edit(self, indata, content, force=False):
1484 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
1485 """
1486 indata looks as follows:
1487 - In the new case (conformant)
1488 {'nsdOperationalState': 'DISABLED', 'userDefinedData': {'id': 'string23',
1489 '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}}
1490 - In the old case (backwards-compatible)
1491 {'id': 'string23', '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}
1492 """
1493 if "_admin" not in indata:
1494 indata["_admin"] = {}
1495
1496 if "nsdOperationalState" in indata:
1497 if indata["nsdOperationalState"] in ("ENABLED", "DISABLED"):
1498 indata["_admin"]["operationalState"] = indata.pop("nsdOperationalState")
1499 else:
1500 raise EngineException(
1501 "State '{}' is not a valid operational state".format(
1502 indata["nsdOperationalState"]
1503 ),
1504 http_code=HTTPStatus.BAD_REQUEST,
1505 )
1506
1507 # In the case of user defined data, we need to put the data in the root of the object
1508 # to preserve current expected behaviour
1509 if "userDefinedData" in indata:
1510 data = indata.pop("userDefinedData")
1511 if isinstance(data, dict):
1512 indata["_admin"]["userDefinedData"] = data
1513 else:
1514 raise EngineException(
1515 "userDefinedData should be an object, but is '{}' instead".format(
1516 type(data)
1517 ),
1518 http_code=HTTPStatus.BAD_REQUEST,
1519 )
1520 if (
1521 "operationalState" in indata["_admin"]
1522 and content["_admin"]["operationalState"]
1523 == indata["_admin"]["operationalState"]
1524 ):
1525 raise EngineException(
1526 "nsdOperationalState already {}".format(
1527 content["_admin"]["operationalState"]
1528 ),
1529 http_code=HTTPStatus.CONFLICT,
1530 )
1531 return indata
1532
1533 def _check_descriptor_dependencies(self, session, descriptor):
1534 """
1535 Check that the dependent descriptors exist on a new descriptor or edition. Also checks references to vnfd
1536 connection points are ok
1537 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1538 :param descriptor: descriptor to be inserted or edit
1539 :return: None or raises exception
1540 """
1541 if session["force"]:
1542 return
1543 vnfds_index = self._get_descriptor_constituent_vnfds_index(session, descriptor)
1544
1545 # Cross references validation in the descriptor and vnfd connection point validation
1546 for df in get_iterable(descriptor.get("df")):
1547 self.validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index)
1548
1549 def _get_descriptor_constituent_vnfds_index(self, session, descriptor):
1550 vnfds_index = {}
1551 if descriptor.get("vnfd-id") and not session["force"]:
1552 for vnfd_id in get_iterable(descriptor.get("vnfd-id")):
1553 query_filter = self._get_project_filter(session)
1554 query_filter["id"] = vnfd_id
1555 vnf_list = self.db.get_list("vnfds", query_filter)
1556 if not vnf_list:
1557 raise EngineException(
1558 "Descriptor error at 'vnfd-id'='{}' references a non "
1559 "existing vnfd".format(vnfd_id),
1560 http_code=HTTPStatus.CONFLICT,
1561 )
1562 vnfds_index[vnfd_id] = vnf_list[0]
1563 return vnfds_index
1564
1565 @staticmethod
1566 def validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index):
1567 for vnf_profile in get_iterable(df.get("vnf-profile")):
1568 vnfd = vnfds_index.get(vnf_profile["vnfd-id"])
1569 all_vnfd_ext_cpds = set()
1570 for ext_cpd in get_iterable(vnfd.get("ext-cpd")):
1571 if ext_cpd.get("id"):
1572 all_vnfd_ext_cpds.add(ext_cpd.get("id"))
1573
1574 for virtual_link in get_iterable(
1575 vnf_profile.get("virtual-link-connectivity")
1576 ):
1577 for vl_cpd in get_iterable(virtual_link.get("constituent-cpd-id")):
1578 vl_cpd_id = vl_cpd.get("constituent-cpd-id")
1579 if vl_cpd_id and vl_cpd_id not in all_vnfd_ext_cpds:
1580 raise EngineException(
1581 "Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity"
1582 "[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a "
1583 "non existing ext-cpd:id inside vnfd '{}'".format(
1584 df["id"],
1585 vnf_profile["id"],
1586 virtual_link["virtual-link-profile-id"],
1587 vl_cpd_id,
1588 vnfd["id"],
1589 ),
1590 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1591 )
1592
1593 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
1594 final_content = super().check_conflict_on_edit(
1595 session, final_content, edit_content, _id
1596 )
1597
1598 self._check_descriptor_dependencies(session, final_content)
1599
1600 return final_content
1601
1602 def check_conflict_on_del(self, session, _id, db_content):
1603 """
1604 Check that there is not any NSR that uses this NSD. Only NSRs belonging to this project are considered. Note
1605 that NSD can be public and be used by other projects.
1606 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1607 :param _id: nsd internal id
1608 :param db_content: The database content of the _id
1609 :return: None or raises EngineException with the conflict
1610 """
1611 if session["force"]:
1612 return
1613 descriptor = db_content
1614 descriptor_id = descriptor.get("id")
1615 if not descriptor_id: # empty nsd not uploaded
1616 return
1617
1618 # check NSD used by NS
1619 _filter = self._get_project_filter(session)
1620 _filter["nsd-id"] = _id
1621 if self.db.get_list("nsrs", _filter):
1622 raise EngineException(
1623 "There is at least one NS instance using this descriptor",
1624 http_code=HTTPStatus.CONFLICT,
1625 )
1626
1627 # check NSD referenced by NST
1628 del _filter["nsd-id"]
1629 _filter["netslice-subnet.ANYINDEX.nsd-ref"] = descriptor_id
1630 if self.db.get_list("nsts", _filter):
1631 raise EngineException(
1632 "There is at least one NetSlice Template referencing this descriptor",
1633 http_code=HTTPStatus.CONFLICT,
1634 )
1635
1636 def delete_extra(self, session, _id, db_content, not_send_msg=None):
1637 """
1638 Deletes associate file system storage (via super)
1639 Deletes associated vnfpkgops from database.
1640 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1641 :param _id: server internal id
1642 :param db_content: The database content of the descriptor
1643 :return: None
1644 :raises: FsException in case of error while deleting associated storage
1645 """
1646 super().delete_extra(session, _id, db_content, not_send_msg)
1647 self.db.del_list(self.topic+"_revisions", { "_id": { "$regex": _id}})
1648
1649 @staticmethod
1650 def extract_day12_primitives(nsd: dict) -> dict:
1651 """Removes the day12 primitives from the NSD descriptors
1652
1653 Args:
1654 nsd (dict): Descriptor as a dictionary
1655
1656 Returns:
1657 nsd (dict): Cleared NSD
1658 """
1659 if nsd.get("ns-configuration"):
1660 for key in [
1661 "config-primitive",
1662 "initial-config-primitive",
1663 "terminate-config-primitive",
1664 ]:
1665 nsd["ns-configuration"].pop(key, None)
1666 return nsd
1667
1668 def remove_modifiable_items(self, nsd: dict) -> dict:
1669 """Removes the modifiable parts from the VNFD descriptors
1670
1671 It calls different extract functions according to different update types
1672 to clear all the modifiable items from NSD
1673
1674 Args:
1675 nsd (dict): Descriptor as a dictionary
1676
1677 Returns:
1678 nsd (dict): Descriptor which does not include modifiable contents
1679 """
1680 while isinstance(nsd, dict) and nsd.get("nsd"):
1681 nsd = nsd["nsd"]
1682 if isinstance(nsd, list):
1683 nsd = nsd[0]
1684 nsd.pop("_admin", None)
1685 # If the more extractions need to be done from NSD,
1686 # the new extract methods could be appended to below list.
1687 for extract_function in [self.extract_day12_primitives]:
1688 nsd_temp = extract_function(nsd)
1689 nsd = nsd_temp
1690 return nsd
1691
1692 def _validate_descriptor_changes(
1693 self,
1694 descriptor_file_name: str,
1695 old_descriptor_directory: str,
1696 new_descriptor_directory: str,
1697 ):
1698 """Compares the old and new NSD descriptors and validates the new descriptor
1699
1700 Args:
1701 old_descriptor_directory: Directory of descriptor which is in-use
1702 new_descriptor_directory: Directory of directory which is proposed to update (new revision)
1703
1704 Returns:
1705 None
1706
1707 Raises:
1708 EngineException: In case of error if the changes are not allowed
1709 """
1710
1711 try:
1712 with self.fs.file_open(
1713 (old_descriptor_directory, descriptor_file_name), "r"
1714 ) as old_descriptor_file:
1715 with self.fs.file_open(
1716 (new_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
1717 ) as new_descriptor_file:
1718 old_content = yaml.load(
1719 old_descriptor_file.read(), Loader=yaml.SafeLoader
1720 )
1721 new_content = yaml.load(
1722 new_descriptor_file.read(), Loader=yaml.SafeLoader
1723 )
1724 if old_content and new_content:
1725 disallowed_change = DeepDiff(
1726 self.remove_modifiable_items(old_content),
1727 self.remove_modifiable_items(new_content),
1728 )
1729 if disallowed_change:
1730 changed_nodes = functools.reduce(
1731 lambda a, b: a + ", " + b,
1732 [
1733 node.lstrip("root")
1734 for node in disallowed_change.get(
1735 "values_changed"
1736 ).keys()
1737 ],
1738 )
1739 raise EngineException(
1740 f"Error in validating new descriptor: {changed_nodes} cannot be modified, "
1741 "there are disallowed changes in the ns descriptor. ",
1742 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1743 )
1744 except (
1745 DbException,
1746 AttributeError,
1747 IndexError,
1748 KeyError,
1749 ValueError,
1750 ) as e:
1751 raise type(e)(
1752 "NS Descriptor could not be processed with error: {}.".format(e)
1753 )
1754
1755 def sol005_projection(self, data):
1756 data["nsdOnboardingState"] = data["_admin"]["onboardingState"]
1757 data["nsdOperationalState"] = data["_admin"]["operationalState"]
1758 data["nsdUsageState"] = data["_admin"]["usageState"]
1759
1760 links = {}
1761 links["self"] = {"href": "/nsd/v1/ns_descriptors/{}".format(data["_id"])}
1762 links["nsd_content"] = {
1763 "href": "/nsd/v1/ns_descriptors/{}/nsd_content".format(data["_id"])
1764 }
1765 data["_links"] = links
1766
1767 return super().sol005_projection(data)
1768
1769
1770 class NstTopic(DescriptorTopic):
1771 topic = "nsts"
1772 topic_msg = "nst"
1773 quota_name = "slice_templates"
1774
1775 def __init__(self, db, fs, msg, auth):
1776 DescriptorTopic.__init__(self, db, fs, msg, auth)
1777
1778 def pyangbind_validation(self, item, data, force=False):
1779 try:
1780 mynst = nst_im()
1781 pybindJSONDecoder.load_ietf_json(
1782 {"nst": [data]},
1783 None,
1784 None,
1785 obj=mynst,
1786 path_helper=True,
1787 skip_unknown=force,
1788 )
1789 out = pybindJSON.dumps(mynst, mode="ietf")
1790 desc_out = self._remove_envelop(yaml.safe_load(out))
1791 return desc_out
1792 except Exception as e:
1793 raise EngineException(
1794 "Error in pyangbind validation: {}".format(str(e)),
1795 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1796 )
1797
1798 @staticmethod
1799 def _remove_envelop(indata=None):
1800 if not indata:
1801 return {}
1802 clean_indata = indata
1803
1804 if clean_indata.get("nst"):
1805 if (
1806 not isinstance(clean_indata["nst"], list)
1807 or len(clean_indata["nst"]) != 1
1808 ):
1809 raise EngineException("'nst' must be a list only one element")
1810 clean_indata = clean_indata["nst"][0]
1811 elif clean_indata.get("nst:nst"):
1812 if (
1813 not isinstance(clean_indata["nst:nst"], list)
1814 or len(clean_indata["nst:nst"]) != 1
1815 ):
1816 raise EngineException("'nst:nst' must be a list only one element")
1817 clean_indata = clean_indata["nst:nst"][0]
1818 return clean_indata
1819
1820 def _validate_input_new(self, indata, storage_params, force=False):
1821 indata.pop("onboardingState", None)
1822 indata.pop("operationalState", None)
1823 indata.pop("usageState", None)
1824 indata = self.pyangbind_validation("nsts", indata, force)
1825 return indata.copy()
1826
1827 def _check_descriptor_dependencies(self, session, descriptor):
1828 """
1829 Check that the dependent descriptors exist on a new descriptor or edition
1830 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1831 :param descriptor: descriptor to be inserted or edit
1832 :return: None or raises exception
1833 """
1834 if not descriptor.get("netslice-subnet"):
1835 return
1836 for nsd in descriptor["netslice-subnet"]:
1837 nsd_id = nsd["nsd-ref"]
1838 filter_q = self._get_project_filter(session)
1839 filter_q["id"] = nsd_id
1840 if not self.db.get_list("nsds", filter_q):
1841 raise EngineException(
1842 "Descriptor error at 'netslice-subnet':'nsd-ref'='{}' references a non "
1843 "existing nsd".format(nsd_id),
1844 http_code=HTTPStatus.CONFLICT,
1845 )
1846
1847 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
1848 final_content = super().check_conflict_on_edit(
1849 session, final_content, edit_content, _id
1850 )
1851
1852 self._check_descriptor_dependencies(session, final_content)
1853 return final_content
1854
1855 def check_conflict_on_del(self, session, _id, db_content):
1856 """
1857 Check that there is not any NSIR that uses this NST. Only NSIRs belonging to this project are considered. Note
1858 that NST can be public and be used by other projects.
1859 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1860 :param _id: nst internal id
1861 :param db_content: The database content of the _id.
1862 :return: None or raises EngineException with the conflict
1863 """
1864 # TODO: Check this method
1865 if session["force"]:
1866 return
1867 # Get Network Slice Template from Database
1868 _filter = self._get_project_filter(session)
1869 _filter["_admin.nst-id"] = _id
1870 if self.db.get_list("nsis", _filter):
1871 raise EngineException(
1872 "there is at least one Netslice Instance using this descriptor",
1873 http_code=HTTPStatus.CONFLICT,
1874 )
1875
1876 def sol005_projection(self, data):
1877 data["onboardingState"] = data["_admin"]["onboardingState"]
1878 data["operationalState"] = data["_admin"]["operationalState"]
1879 data["usageState"] = data["_admin"]["usageState"]
1880
1881 links = {}
1882 links["self"] = {"href": "/nst/v1/netslice_templates/{}".format(data["_id"])}
1883 links["nst"] = {"href": "/nst/v1/netslice_templates/{}/nst".format(data["_id"])}
1884 data["_links"] = links
1885
1886 return super().sol005_projection(data)
1887
1888
1889 class PduTopic(BaseTopic):
1890 topic = "pdus"
1891 topic_msg = "pdu"
1892 quota_name = "pduds"
1893 schema_new = pdu_new_schema
1894 schema_edit = pdu_edit_schema
1895
1896 def __init__(self, db, fs, msg, auth):
1897 BaseTopic.__init__(self, db, fs, msg, auth)
1898
1899 @staticmethod
1900 def format_on_new(content, project_id=None, make_public=False):
1901 BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
1902 content["_admin"]["onboardingState"] = "CREATED"
1903 content["_admin"]["operationalState"] = "ENABLED"
1904 content["_admin"]["usageState"] = "NOT_IN_USE"
1905
1906 def check_conflict_on_del(self, session, _id, db_content):
1907 """
1908 Check that there is not any vnfr that uses this PDU
1909 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1910 :param _id: pdu internal id
1911 :param db_content: The database content of the _id.
1912 :return: None or raises EngineException with the conflict
1913 """
1914 if session["force"]:
1915 return
1916
1917 _filter = self._get_project_filter(session)
1918 _filter["vdur.pdu-id"] = _id
1919 if self.db.get_list("vnfrs", _filter):
1920 raise EngineException(
1921 "There is at least one VNF instance using this PDU",
1922 http_code=HTTPStatus.CONFLICT,
1923 )
1924
1925
1926 class VnfPkgOpTopic(BaseTopic):
1927 topic = "vnfpkgops"
1928 topic_msg = "vnfd"
1929 schema_new = vnfpkgop_new_schema
1930 schema_edit = None
1931
1932 def __init__(self, db, fs, msg, auth):
1933 BaseTopic.__init__(self, db, fs, msg, auth)
1934
1935 def edit(self, session, _id, indata=None, kwargs=None, content=None):
1936 raise EngineException(
1937 "Method 'edit' not allowed for topic '{}'".format(self.topic),
1938 HTTPStatus.METHOD_NOT_ALLOWED,
1939 )
1940
1941 def delete(self, session, _id, dry_run=False):
1942 raise EngineException(
1943 "Method 'delete' not allowed for topic '{}'".format(self.topic),
1944 HTTPStatus.METHOD_NOT_ALLOWED,
1945 )
1946
1947 def delete_list(self, session, filter_q=None):
1948 raise EngineException(
1949 "Method 'delete_list' not allowed for topic '{}'".format(self.topic),
1950 HTTPStatus.METHOD_NOT_ALLOWED,
1951 )
1952
1953 def new(self, rollback, session, indata=None, kwargs=None, headers=None):
1954 """
1955 Creates a new entry into database.
1956 :param rollback: list to append created items at database in case a rollback may to be done
1957 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1958 :param indata: data to be inserted
1959 :param kwargs: used to override the indata descriptor
1960 :param headers: http request headers
1961 :return: _id, op_id:
1962 _id: identity of the inserted data.
1963 op_id: None
1964 """
1965 self._update_input_with_kwargs(indata, kwargs)
1966 validate_input(indata, self.schema_new)
1967 vnfpkg_id = indata["vnfPkgId"]
1968 filter_q = BaseTopic._get_project_filter(session)
1969 filter_q["_id"] = vnfpkg_id
1970 vnfd = self.db.get_one("vnfds", filter_q)
1971 operation = indata["lcmOperationType"]
1972 kdu_name = indata["kdu_name"]
1973 for kdu in vnfd.get("kdu", []):
1974 if kdu["name"] == kdu_name:
1975 helm_chart = kdu.get("helm-chart")
1976 juju_bundle = kdu.get("juju-bundle")
1977 break
1978 else:
1979 raise EngineException(
1980 "Not found vnfd[id='{}']:kdu[name='{}']".format(vnfpkg_id, kdu_name)
1981 )
1982 if helm_chart:
1983 indata["helm-chart"] = helm_chart
1984 match = fullmatch(r"([^/]*)/([^/]*)", helm_chart)
1985 repo_name = match.group(1) if match else None
1986 elif juju_bundle:
1987 indata["juju-bundle"] = juju_bundle
1988 match = fullmatch(r"([^/]*)/([^/]*)", juju_bundle)
1989 repo_name = match.group(1) if match else None
1990 else:
1991 raise EngineException(
1992 "Found neither 'helm-chart' nor 'juju-bundle' in vnfd[id='{}']:kdu[name='{}']".format(
1993 vnfpkg_id, kdu_name
1994 )
1995 )
1996 if repo_name:
1997 del filter_q["_id"]
1998 filter_q["name"] = repo_name
1999 repo = self.db.get_one("k8srepos", filter_q)
2000 k8srepo_id = repo.get("_id")
2001 k8srepo_url = repo.get("url")
2002 else:
2003 k8srepo_id = None
2004 k8srepo_url = None
2005 indata["k8srepoId"] = k8srepo_id
2006 indata["k8srepo_url"] = k8srepo_url
2007 vnfpkgop_id = str(uuid4())
2008 vnfpkgop_desc = {
2009 "_id": vnfpkgop_id,
2010 "operationState": "PROCESSING",
2011 "vnfPkgId": vnfpkg_id,
2012 "lcmOperationType": operation,
2013 "isAutomaticInvocation": False,
2014 "isCancelPending": False,
2015 "operationParams": indata,
2016 "links": {
2017 "self": "/osm/vnfpkgm/v1/vnfpkg_op_occs/" + vnfpkgop_id,
2018 "vnfpkg": "/osm/vnfpkgm/v1/vnf_packages/" + vnfpkg_id,
2019 },
2020 }
2021 self.format_on_new(
2022 vnfpkgop_desc, session["project_id"], make_public=session["public"]
2023 )
2024 ctime = vnfpkgop_desc["_admin"]["created"]
2025 vnfpkgop_desc["statusEnteredTime"] = ctime
2026 vnfpkgop_desc["startTime"] = ctime
2027 self.db.create(self.topic, vnfpkgop_desc)
2028 rollback.append({"topic": self.topic, "_id": vnfpkgop_id})
2029 self.msg.write(self.topic_msg, operation, vnfpkgop_desc)
2030 return vnfpkgop_id, None