Fixes validation of helm charts to include URLs
[osm/NBI.git] / osm_nbi / descriptor_topics.py
1 # -*- coding: utf-8 -*-
2
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
12 # implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
15
16 import tarfile
17 import yaml
18 import json
19 import copy
20 import os
21 import shutil
22 import functools
23 import re
24
25 # import logging
26 from deepdiff import DeepDiff
27 from hashlib import md5
28 from osm_common.dbbase import DbException, deep_update_rfc7396
29 from http import HTTPStatus
30 from time import time
31 from uuid import uuid4
32 from re import fullmatch
33 from zipfile import ZipFile
34 from urllib.parse import urlparse
35 from osm_nbi.validation import (
36 ValidationError,
37 pdu_new_schema,
38 pdu_edit_schema,
39 validate_input,
40 vnfpkgop_new_schema,
41 )
42 from osm_nbi.base_topic import (
43 BaseTopic,
44 EngineException,
45 get_iterable,
46 detect_descriptor_usage,
47 )
48 from osm_im import etsi_nfv_vnfd, etsi_nfv_nsd
49 from osm_im.nst import nst as nst_im
50 from pyangbind.lib.serialise import pybindJSONDecoder
51 import pyangbind.lib.pybindJSON as pybindJSON
52 from osm_nbi import utils
53
54 __author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
55
56 valid_helm_chart_re = re.compile(
57 r"^[a-z0-9]([-a-z0-9]*[a-z0-9]/)?([a-z0-9]([-a-z0-9]*[a-z0-9])?)*$"
58 )
59
60
61 class DescriptorTopic(BaseTopic):
62 def __init__(self, db, fs, msg, auth):
63 super().__init__(db, fs, msg, auth)
64
65 def _validate_input_new(self, indata, storage_params, force=False):
66 return indata
67
68 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
69 final_content = super().check_conflict_on_edit(
70 session, final_content, edit_content, _id
71 )
72
73 def _check_unique_id_name(descriptor, position=""):
74 for desc_key, desc_item in descriptor.items():
75 if isinstance(desc_item, list) and desc_item:
76 used_ids = []
77 desc_item_id = None
78 for index, list_item in enumerate(desc_item):
79 if isinstance(list_item, dict):
80 _check_unique_id_name(
81 list_item, "{}.{}[{}]".format(position, desc_key, index)
82 )
83 # Base case
84 if index == 0 and (
85 list_item.get("id") or list_item.get("name")
86 ):
87 desc_item_id = "id" if list_item.get("id") else "name"
88 if desc_item_id and list_item.get(desc_item_id):
89 if list_item[desc_item_id] in used_ids:
90 position = "{}.{}[{}]".format(
91 position, desc_key, index
92 )
93 raise EngineException(
94 "Error: identifier {} '{}' is not unique and repeats at '{}'".format(
95 desc_item_id,
96 list_item[desc_item_id],
97 position,
98 ),
99 HTTPStatus.UNPROCESSABLE_ENTITY,
100 )
101 used_ids.append(list_item[desc_item_id])
102
103 _check_unique_id_name(final_content)
104 # 1. validate again with pyangbind
105 # 1.1. remove internal keys
106 internal_keys = {}
107 for k in ("_id", "_admin"):
108 if k in final_content:
109 internal_keys[k] = final_content.pop(k)
110 storage_params = internal_keys["_admin"].get("storage")
111 serialized = self._validate_input_new(
112 final_content, storage_params, session["force"]
113 )
114
115 # 1.2. modify final_content with a serialized version
116 final_content = copy.deepcopy(serialized)
117 # 1.3. restore internal keys
118 for k, v in internal_keys.items():
119 final_content[k] = v
120 if session["force"]:
121 return final_content
122
123 # 2. check that this id is not present
124 if "id" in edit_content:
125 _filter = self._get_project_filter(session)
126
127 _filter["id"] = final_content["id"]
128 _filter["_id.neq"] = _id
129
130 if self.db.get_one(self.topic, _filter, fail_on_empty=False):
131 raise EngineException(
132 "{} with id '{}' already exists for this project".format(
133 (str(self.topic))[:-1], final_content["id"]
134 ),
135 HTTPStatus.CONFLICT,
136 )
137
138 return final_content
139
140 @staticmethod
141 def format_on_new(content, project_id=None, make_public=False):
142 BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
143 content["_admin"]["onboardingState"] = "CREATED"
144 content["_admin"]["operationalState"] = "DISABLED"
145 content["_admin"]["usageState"] = "NOT_IN_USE"
146
147 def delete_extra(self, session, _id, db_content, not_send_msg=None):
148 """
149 Deletes file system storage associated with the descriptor
150 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
151 :param _id: server internal id
152 :param db_content: The database content of the descriptor
153 :param not_send_msg: To not send message (False) or store content (list) instead
154 :return: None if ok or raises EngineException with the problem
155 """
156 self.fs.file_delete(_id, ignore_non_exist=True)
157 self.fs.file_delete(_id + "_", ignore_non_exist=True) # remove temp folder
158 # Remove file revisions
159 if "revision" in db_content["_admin"]:
160 revision = db_content["_admin"]["revision"]
161 while revision > 0:
162 self.fs.file_delete(_id + ":" + str(revision), ignore_non_exist=True)
163 revision = revision - 1
164
165 @staticmethod
166 def get_one_by_id(db, session, topic, id):
167 # find owned by this project
168 _filter = BaseTopic._get_project_filter(session)
169 _filter["id"] = id
170 desc_list = db.get_list(topic, _filter)
171 if len(desc_list) == 1:
172 return desc_list[0]
173 elif len(desc_list) > 1:
174 raise DbException(
175 "Found more than one {} with id='{}' belonging to this project".format(
176 topic[:-1], id
177 ),
178 HTTPStatus.CONFLICT,
179 )
180
181 # not found any: try to find public
182 _filter = BaseTopic._get_project_filter(session)
183 _filter["id"] = id
184 desc_list = db.get_list(topic, _filter)
185 if not desc_list:
186 raise DbException(
187 "Not found any {} with id='{}'".format(topic[:-1], id),
188 HTTPStatus.NOT_FOUND,
189 )
190 elif len(desc_list) == 1:
191 return desc_list[0]
192 else:
193 raise DbException(
194 "Found more than one public {} with id='{}'; and no one belonging to this project".format(
195 topic[:-1], id
196 ),
197 HTTPStatus.CONFLICT,
198 )
199
200 def new(self, rollback, session, indata=None, kwargs=None, headers=None):
201 """
202 Creates a new almost empty DISABLED entry into database. Due to SOL005, it does not follow normal procedure.
203 Creating a VNFD or NSD is done in two steps: 1. Creates an empty descriptor (this step) and 2) upload content
204 (self.upload_content)
205 :param rollback: list to append created items at database in case a rollback may to be done
206 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
207 :param indata: data to be inserted
208 :param kwargs: used to override the indata descriptor
209 :param headers: http request headers
210 :return: _id, None: identity of the inserted data; and None as there is not any operation
211 """
212
213 # No needed to capture exceptions
214 # Check Quota
215 self.check_quota(session)
216
217 # _remove_envelop
218 if indata:
219 if "userDefinedData" in indata:
220 indata = indata["userDefinedData"]
221
222 # Override descriptor with query string kwargs
223 self._update_input_with_kwargs(indata, kwargs)
224 # uncomment when this method is implemented.
225 # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors
226 # indata = DescriptorTopic._validate_input_new(self, indata, project_id=session["force"])
227
228 content = {"_admin": {"userDefinedData": indata, "revision": 0}}
229
230 self.format_on_new(
231 content, session["project_id"], make_public=session["public"]
232 )
233 _id = self.db.create(self.topic, content)
234 rollback.append({"topic": self.topic, "_id": _id})
235 self._send_msg("created", {"_id": _id})
236 return _id, None
237
238 def upload_content(self, session, _id, indata, kwargs, headers):
239 """
240 Used for receiving content by chunks (with a transaction_id header and/or gzip file. It will store and extract)
241 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
242 :param _id : the nsd,vnfd is already created, this is the id
243 :param indata: http body request
244 :param kwargs: user query string to override parameters. NOT USED
245 :param headers: http request headers
246 :return: True if package is completely uploaded or False if partial content has been uploded
247 Raise exception on error
248 """
249 # Check that _id exists and it is valid
250 current_desc = self.show(session, _id)
251
252 content_range_text = headers.get("Content-Range")
253 expected_md5 = headers.get("Content-File-MD5")
254 compressed = None
255 content_type = headers.get("Content-Type")
256 if (
257 content_type
258 and "application/gzip" in content_type
259 or "application/x-gzip" in content_type
260 ):
261 compressed = "gzip"
262 if content_type and "application/zip" in content_type:
263 compressed = "zip"
264 filename = headers.get("Content-Filename")
265 if not filename and compressed:
266 filename = "package.tar.gz" if compressed == "gzip" else "package.zip"
267 elif not filename:
268 filename = "package"
269
270 revision = 1
271 if "revision" in current_desc["_admin"]:
272 revision = current_desc["_admin"]["revision"] + 1
273
274 # TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266
275 file_pkg = None
276 error_text = ""
277 fs_rollback = []
278
279 try:
280 if content_range_text:
281 content_range = (
282 content_range_text.replace("-", " ").replace("/", " ").split()
283 )
284 if (
285 content_range[0] != "bytes"
286 ): # TODO check x<y not negative < total....
287 raise IndexError()
288 start = int(content_range[1])
289 end = int(content_range[2]) + 1
290 total = int(content_range[3])
291 else:
292 start = 0
293 # Rather than using a temp folder, we will store the package in a folder based on
294 # the current revision.
295 proposed_revision_path = (
296 _id + ":" + str(revision)
297 ) # all the content is upload here and if ok, it is rename from id_ to is folder
298
299 if start:
300 if not self.fs.file_exists(proposed_revision_path, "dir"):
301 raise EngineException(
302 "invalid Transaction-Id header", HTTPStatus.NOT_FOUND
303 )
304 else:
305 self.fs.file_delete(proposed_revision_path, ignore_non_exist=True)
306 self.fs.mkdir(proposed_revision_path)
307 fs_rollback.append(proposed_revision_path)
308
309 storage = self.fs.get_params()
310 storage["folder"] = proposed_revision_path
311
312 file_path = (proposed_revision_path, filename)
313 if self.fs.file_exists(file_path, "file"):
314 file_size = self.fs.file_size(file_path)
315 else:
316 file_size = 0
317 if file_size != start:
318 raise EngineException(
319 "invalid Content-Range start sequence, expected '{}' but received '{}'".format(
320 file_size, start
321 ),
322 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
323 )
324 file_pkg = self.fs.file_open(file_path, "a+b")
325 if isinstance(indata, dict):
326 indata_text = yaml.safe_dump(indata, indent=4, default_flow_style=False)
327 file_pkg.write(indata_text.encode(encoding="utf-8"))
328 else:
329 indata_len = 0
330 while True:
331 indata_text = indata.read(4096)
332 indata_len += len(indata_text)
333 if not indata_text:
334 break
335 file_pkg.write(indata_text)
336 if content_range_text:
337 if indata_len != end - start:
338 raise EngineException(
339 "Mismatch between Content-Range header {}-{} and body length of {}".format(
340 start, end - 1, indata_len
341 ),
342 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
343 )
344 if end != total:
345 # TODO update to UPLOADING
346 return False
347
348 # PACKAGE UPLOADED
349 if expected_md5:
350 file_pkg.seek(0, 0)
351 file_md5 = md5()
352 chunk_data = file_pkg.read(1024)
353 while chunk_data:
354 file_md5.update(chunk_data)
355 chunk_data = file_pkg.read(1024)
356 if expected_md5 != file_md5.hexdigest():
357 raise EngineException("Error, MD5 mismatch", HTTPStatus.CONFLICT)
358 file_pkg.seek(0, 0)
359 if compressed == "gzip":
360 tar = tarfile.open(mode="r", fileobj=file_pkg)
361 descriptor_file_name = None
362 for tarinfo in tar:
363 tarname = tarinfo.name
364 tarname_path = tarname.split("/")
365 if (
366 not tarname_path[0] or ".." in tarname_path
367 ): # if start with "/" means absolute path
368 raise EngineException(
369 "Absolute path or '..' are not allowed for package descriptor tar.gz"
370 )
371 if len(tarname_path) == 1 and not tarinfo.isdir():
372 raise EngineException(
373 "All files must be inside a dir for package descriptor tar.gz"
374 )
375 if (
376 tarname.endswith(".yaml")
377 or tarname.endswith(".json")
378 or tarname.endswith(".yml")
379 ):
380 storage["pkg-dir"] = tarname_path[0]
381 if len(tarname_path) == 2:
382 if descriptor_file_name:
383 raise EngineException(
384 "Found more than one descriptor file at package descriptor tar.gz"
385 )
386 descriptor_file_name = tarname
387 if not descriptor_file_name:
388 raise EngineException(
389 "Not found any descriptor file at package descriptor tar.gz"
390 )
391 storage["descriptor"] = descriptor_file_name
392 storage["zipfile"] = filename
393 self.fs.file_extract(tar, proposed_revision_path)
394 with self.fs.file_open(
395 (proposed_revision_path, descriptor_file_name), "r"
396 ) as descriptor_file:
397 content = descriptor_file.read()
398 elif compressed == "zip":
399 zipfile = ZipFile(file_pkg)
400 descriptor_file_name = None
401 for package_file in zipfile.infolist():
402 zipfilename = package_file.filename
403 file_path = zipfilename.split("/")
404 if (
405 not file_path[0] or ".." in zipfilename
406 ): # if start with "/" means absolute path
407 raise EngineException(
408 "Absolute path or '..' are not allowed for package descriptor zip"
409 )
410
411 if (
412 zipfilename.endswith(".yaml")
413 or zipfilename.endswith(".json")
414 or zipfilename.endswith(".yml")
415 ) and (
416 zipfilename.find("/") < 0
417 or zipfilename.find("Definitions") >= 0
418 ):
419 storage["pkg-dir"] = ""
420 if descriptor_file_name:
421 raise EngineException(
422 "Found more than one descriptor file at package descriptor zip"
423 )
424 descriptor_file_name = zipfilename
425 if not descriptor_file_name:
426 raise EngineException(
427 "Not found any descriptor file at package descriptor zip"
428 )
429 storage["descriptor"] = descriptor_file_name
430 storage["zipfile"] = filename
431 self.fs.file_extract(zipfile, proposed_revision_path)
432
433 with self.fs.file_open(
434 (proposed_revision_path, descriptor_file_name), "r"
435 ) as descriptor_file:
436 content = descriptor_file.read()
437 else:
438 content = file_pkg.read()
439 storage["descriptor"] = descriptor_file_name = filename
440
441 if descriptor_file_name.endswith(".json"):
442 error_text = "Invalid json format "
443 indata = json.load(content)
444 else:
445 error_text = "Invalid yaml format "
446 indata = yaml.safe_load(content)
447
448 # Need to close the file package here so it can be copied from the
449 # revision to the current, unrevisioned record
450 if file_pkg:
451 file_pkg.close()
452 file_pkg = None
453
454 # Fetch both the incoming, proposed revision and the original revision so we
455 # can call a validate method to compare them
456 current_revision_path = _id + "/"
457 self.fs.sync(from_path=current_revision_path)
458 self.fs.sync(from_path=proposed_revision_path)
459
460 if revision > 1:
461 try:
462 self._validate_descriptor_changes(
463 _id,
464 descriptor_file_name,
465 current_revision_path,
466 proposed_revision_path,
467 )
468 except Exception as e:
469 shutil.rmtree(
470 self.fs.path + current_revision_path, ignore_errors=True
471 )
472 shutil.rmtree(
473 self.fs.path + proposed_revision_path, ignore_errors=True
474 )
475 # Only delete the new revision. We need to keep the original version in place
476 # as it has not been changed.
477 self.fs.file_delete(proposed_revision_path, ignore_non_exist=True)
478 raise e
479
480 indata = self._remove_envelop(indata)
481
482 # Override descriptor with query string kwargs
483 if kwargs:
484 self._update_input_with_kwargs(indata, kwargs)
485
486 current_desc["_admin"]["storage"] = storage
487 current_desc["_admin"]["onboardingState"] = "ONBOARDED"
488 current_desc["_admin"]["operationalState"] = "ENABLED"
489 current_desc["_admin"]["modified"] = time()
490 current_desc["_admin"]["revision"] = revision
491
492 deep_update_rfc7396(current_desc, indata)
493 current_desc = self.check_conflict_on_edit(
494 session, current_desc, indata, _id=_id
495 )
496
497 # Copy the revision to the active package name by its original id
498 shutil.rmtree(self.fs.path + current_revision_path, ignore_errors=True)
499 os.rename(
500 self.fs.path + proposed_revision_path,
501 self.fs.path + current_revision_path,
502 )
503 self.fs.file_delete(current_revision_path, ignore_non_exist=True)
504 self.fs.mkdir(current_revision_path)
505 self.fs.reverse_sync(from_path=current_revision_path)
506
507 shutil.rmtree(self.fs.path + _id)
508
509 self.db.replace(self.topic, _id, current_desc)
510
511 # Store a copy of the package as a point in time revision
512 revision_desc = dict(current_desc)
513 revision_desc["_id"] = _id + ":" + str(revision_desc["_admin"]["revision"])
514 self.db.create(self.topic + "_revisions", revision_desc)
515 fs_rollback = []
516
517 indata["_id"] = _id
518 self._send_msg("edited", indata)
519
520 # TODO if descriptor has changed because kwargs update content and remove cached zip
521 # TODO if zip is not present creates one
522 return True
523
524 except EngineException:
525 raise
526 except IndexError:
527 raise EngineException(
528 "invalid Content-Range header format. Expected 'bytes start-end/total'",
529 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
530 )
531 except IOError as e:
532 raise EngineException(
533 "invalid upload transaction sequence: '{}'".format(e),
534 HTTPStatus.BAD_REQUEST,
535 )
536 except tarfile.ReadError as e:
537 raise EngineException(
538 "invalid file content {}".format(e), HTTPStatus.BAD_REQUEST
539 )
540 except (ValueError, yaml.YAMLError) as e:
541 raise EngineException(error_text + str(e))
542 except ValidationError as e:
543 raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
544 finally:
545 if file_pkg:
546 file_pkg.close()
547 for file in fs_rollback:
548 self.fs.file_delete(file, ignore_non_exist=True)
549
550 def get_file(self, session, _id, path=None, accept_header=None):
551 """
552 Return the file content of a vnfd or nsd
553 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
554 :param _id: Identity of the vnfd, nsd
555 :param path: artifact path or "$DESCRIPTOR" or None
556 :param accept_header: Content of Accept header. Must contain applition/zip or/and text/plain
557 :return: opened file plus Accept format or raises an exception
558 """
559 accept_text = accept_zip = False
560 if accept_header:
561 if "text/plain" in accept_header or "*/*" in accept_header:
562 accept_text = True
563 if "application/zip" in accept_header or "*/*" in accept_header:
564 accept_zip = "application/zip"
565 elif "application/gzip" in accept_header:
566 accept_zip = "application/gzip"
567
568 if not accept_text and not accept_zip:
569 raise EngineException(
570 "provide request header 'Accept' with 'application/zip' or 'text/plain'",
571 http_code=HTTPStatus.NOT_ACCEPTABLE,
572 )
573
574 content = self.show(session, _id)
575 if content["_admin"]["onboardingState"] != "ONBOARDED":
576 raise EngineException(
577 "Cannot get content because this resource is not at 'ONBOARDED' state. "
578 "onboardingState is {}".format(content["_admin"]["onboardingState"]),
579 http_code=HTTPStatus.CONFLICT,
580 )
581 storage = content["_admin"]["storage"]
582 if path is not None and path != "$DESCRIPTOR": # artifacts
583 if not storage.get("pkg-dir") and not storage.get("folder"):
584 raise EngineException(
585 "Packages does not contains artifacts",
586 http_code=HTTPStatus.BAD_REQUEST,
587 )
588 if self.fs.file_exists(
589 (storage["folder"], storage["pkg-dir"], *path), "dir"
590 ):
591 folder_content = self.fs.dir_ls(
592 (storage["folder"], storage["pkg-dir"], *path)
593 )
594 return folder_content, "text/plain"
595 # TODO manage folders in http
596 else:
597 return (
598 self.fs.file_open(
599 (storage["folder"], storage["pkg-dir"], *path), "rb"
600 ),
601 "application/octet-stream",
602 )
603
604 # pkgtype accept ZIP TEXT -> result
605 # manyfiles yes X -> zip
606 # no yes -> error
607 # onefile yes no -> zip
608 # X yes -> text
609 contain_many_files = False
610 if storage.get("pkg-dir"):
611 # check if there are more than one file in the package, ignoring checksums.txt.
612 pkg_files = self.fs.dir_ls((storage["folder"], storage["pkg-dir"]))
613 if len(pkg_files) >= 3 or (
614 len(pkg_files) == 2 and "checksums.txt" not in pkg_files
615 ):
616 contain_many_files = True
617 if accept_text and (not contain_many_files or path == "$DESCRIPTOR"):
618 return (
619 self.fs.file_open((storage["folder"], storage["descriptor"]), "r"),
620 "text/plain",
621 )
622 elif contain_many_files and not accept_zip:
623 raise EngineException(
624 "Packages that contains several files need to be retrieved with 'application/zip'"
625 "Accept header",
626 http_code=HTTPStatus.NOT_ACCEPTABLE,
627 )
628 else:
629 if not storage.get("zipfile"):
630 # TODO generate zipfile if not present
631 raise EngineException(
632 "Only allowed 'text/plain' Accept header for this descriptor. To be solved in "
633 "future versions",
634 http_code=HTTPStatus.NOT_ACCEPTABLE,
635 )
636 return (
637 self.fs.file_open((storage["folder"], storage["zipfile"]), "rb"),
638 accept_zip,
639 )
640
641 def _remove_yang_prefixes_from_descriptor(self, descriptor):
642 new_descriptor = {}
643 for k, v in descriptor.items():
644 new_v = v
645 if isinstance(v, dict):
646 new_v = self._remove_yang_prefixes_from_descriptor(v)
647 elif isinstance(v, list):
648 new_v = list()
649 for x in v:
650 if isinstance(x, dict):
651 new_v.append(self._remove_yang_prefixes_from_descriptor(x))
652 else:
653 new_v.append(x)
654 new_descriptor[k.split(":")[-1]] = new_v
655 return new_descriptor
656
657 def pyangbind_validation(self, item, data, force=False):
658 raise EngineException(
659 "Not possible to validate '{}' item".format(item),
660 http_code=HTTPStatus.INTERNAL_SERVER_ERROR,
661 )
662
663 def _validate_input_edit(self, indata, content, force=False):
664 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
665 if "_id" in indata:
666 indata.pop("_id")
667 if "_admin" not in indata:
668 indata["_admin"] = {}
669
670 if "operationalState" in indata:
671 if indata["operationalState"] in ("ENABLED", "DISABLED"):
672 indata["_admin"]["operationalState"] = indata.pop("operationalState")
673 else:
674 raise EngineException(
675 "State '{}' is not a valid operational state".format(
676 indata["operationalState"]
677 ),
678 http_code=HTTPStatus.BAD_REQUEST,
679 )
680
681 # In the case of user defined data, we need to put the data in the root of the object
682 # to preserve current expected behaviour
683 if "userDefinedData" in indata:
684 data = indata.pop("userDefinedData")
685 if isinstance(data, dict):
686 indata["_admin"]["userDefinedData"] = data
687 else:
688 raise EngineException(
689 "userDefinedData should be an object, but is '{}' instead".format(
690 type(data)
691 ),
692 http_code=HTTPStatus.BAD_REQUEST,
693 )
694
695 if (
696 "operationalState" in indata["_admin"]
697 and content["_admin"]["operationalState"]
698 == indata["_admin"]["operationalState"]
699 ):
700 raise EngineException(
701 "operationalState already {}".format(
702 content["_admin"]["operationalState"]
703 ),
704 http_code=HTTPStatus.CONFLICT,
705 )
706
707 return indata
708
709 def _validate_descriptor_changes(
710 self,
711 descriptor_id,
712 descriptor_file_name,
713 old_descriptor_directory,
714 new_descriptor_directory,
715 ):
716 # Example:
717 # raise EngineException(
718 # "Error in validating new descriptor: <NODE> cannot be modified",
719 # http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
720 # )
721 pass
722
723
724 class VnfdTopic(DescriptorTopic):
725 topic = "vnfds"
726 topic_msg = "vnfd"
727
728 def __init__(self, db, fs, msg, auth):
729 DescriptorTopic.__init__(self, db, fs, msg, auth)
730
731 def pyangbind_validation(self, item, data, force=False):
732 if self._descriptor_data_is_in_old_format(data):
733 raise EngineException(
734 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
735 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
736 )
737 try:
738 myvnfd = etsi_nfv_vnfd.etsi_nfv_vnfd()
739 pybindJSONDecoder.load_ietf_json(
740 {"etsi-nfv-vnfd:vnfd": data},
741 None,
742 None,
743 obj=myvnfd,
744 path_helper=True,
745 skip_unknown=force,
746 )
747 out = pybindJSON.dumps(myvnfd, mode="ietf")
748 desc_out = self._remove_envelop(yaml.safe_load(out))
749 desc_out = self._remove_yang_prefixes_from_descriptor(desc_out)
750 return utils.deep_update_dict(data, desc_out)
751 except Exception as e:
752 raise EngineException(
753 "Error in pyangbind validation: {}".format(str(e)),
754 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
755 )
756
757 @staticmethod
758 def _descriptor_data_is_in_old_format(data):
759 return ("vnfd-catalog" in data) or ("vnfd:vnfd-catalog" in data)
760
761 @staticmethod
762 def _remove_envelop(indata=None):
763 if not indata:
764 return {}
765 clean_indata = indata
766
767 if clean_indata.get("etsi-nfv-vnfd:vnfd"):
768 if not isinstance(clean_indata["etsi-nfv-vnfd:vnfd"], dict):
769 raise EngineException("'etsi-nfv-vnfd:vnfd' must be a dict")
770 clean_indata = clean_indata["etsi-nfv-vnfd:vnfd"]
771 elif clean_indata.get("vnfd"):
772 if not isinstance(clean_indata["vnfd"], dict):
773 raise EngineException("'vnfd' must be dict")
774 clean_indata = clean_indata["vnfd"]
775
776 return clean_indata
777
778 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
779 final_content = super().check_conflict_on_edit(
780 session, final_content, edit_content, _id
781 )
782
783 # set type of vnfd
784 contains_pdu = False
785 contains_vdu = False
786 for vdu in get_iterable(final_content.get("vdu")):
787 if vdu.get("pdu-type"):
788 contains_pdu = True
789 else:
790 contains_vdu = True
791 if contains_pdu:
792 final_content["_admin"]["type"] = "hnfd" if contains_vdu else "pnfd"
793 elif contains_vdu:
794 final_content["_admin"]["type"] = "vnfd"
795 # if neither vud nor pdu do not fill type
796 return final_content
797
798 def check_conflict_on_del(self, session, _id, db_content):
799 """
800 Check that there is not any NSD that uses this VNFD. Only NSDs belonging to this project are considered. Note
801 that VNFD can be public and be used by NSD of other projects. Also check there are not deployments, or vnfr
802 that uses this vnfd
803 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
804 :param _id: vnfd internal id
805 :param db_content: The database content of the _id.
806 :return: None or raises EngineException with the conflict
807 """
808 if session["force"]:
809 return
810 descriptor = db_content
811 descriptor_id = descriptor.get("id")
812 if not descriptor_id: # empty vnfd not uploaded
813 return
814
815 _filter = self._get_project_filter(session)
816
817 # check vnfrs using this vnfd
818 _filter["vnfd-id"] = _id
819 if self.db.get_list("vnfrs", _filter):
820 raise EngineException(
821 "There is at least one VNF instance using this descriptor",
822 http_code=HTTPStatus.CONFLICT,
823 )
824
825 # check NSD referencing this VNFD
826 del _filter["vnfd-id"]
827 _filter["vnfd-id"] = descriptor_id
828 if self.db.get_list("nsds", _filter):
829 raise EngineException(
830 "There is at least one NS package referencing this descriptor",
831 http_code=HTTPStatus.CONFLICT,
832 )
833
834 def _validate_input_new(self, indata, storage_params, force=False):
835 indata.pop("onboardingState", None)
836 indata.pop("operationalState", None)
837 indata.pop("usageState", None)
838 indata.pop("links", None)
839
840 indata = self.pyangbind_validation("vnfds", indata, force)
841 # Cross references validation in the descriptor
842
843 self.validate_mgmt_interface_connection_point(indata)
844
845 for vdu in get_iterable(indata.get("vdu")):
846 self.validate_vdu_internal_connection_points(vdu)
847 self._validate_vdu_cloud_init_in_package(storage_params, vdu, indata)
848 self._validate_vdu_charms_in_package(storage_params, indata)
849
850 self._validate_vnf_charms_in_package(storage_params, indata)
851
852 self.validate_external_connection_points(indata)
853 self.validate_internal_virtual_links(indata)
854 self.validate_monitoring_params(indata)
855 self.validate_scaling_group_descriptor(indata)
856 self.validate_helm_chart(indata)
857
858 return indata
859
860 @staticmethod
861 def validate_helm_chart(indata):
862 def is_url(url):
863 result = urlparse(url)
864 return all([result.scheme, result.netloc])
865
866 kdus = indata.get("kdu", [])
867 for kdu in kdus:
868 helm_chart_value = kdu.get("helm-chart")
869 if not helm_chart_value:
870 continue
871 if not (
872 valid_helm_chart_re.match(helm_chart_value) or is_url(helm_chart_value)
873 ):
874 raise EngineException(
875 "helm-chart '{}' is not valid".format(helm_chart_value),
876 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
877 )
878
879 @staticmethod
880 def validate_mgmt_interface_connection_point(indata):
881 if not indata.get("vdu"):
882 return
883 if not indata.get("mgmt-cp"):
884 raise EngineException(
885 "'mgmt-cp' is a mandatory field and it is not defined",
886 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
887 )
888
889 for cp in get_iterable(indata.get("ext-cpd")):
890 if cp["id"] == indata["mgmt-cp"]:
891 break
892 else:
893 raise EngineException(
894 "mgmt-cp='{}' must match an existing ext-cpd".format(indata["mgmt-cp"]),
895 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
896 )
897
898 @staticmethod
899 def validate_vdu_internal_connection_points(vdu):
900 int_cpds = set()
901 for cpd in get_iterable(vdu.get("int-cpd")):
902 cpd_id = cpd.get("id")
903 if cpd_id and cpd_id in int_cpds:
904 raise EngineException(
905 "vdu[id='{}']:int-cpd[id='{}'] is already used by other int-cpd".format(
906 vdu["id"], cpd_id
907 ),
908 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
909 )
910 int_cpds.add(cpd_id)
911
912 @staticmethod
913 def validate_external_connection_points(indata):
914 all_vdus_int_cpds = set()
915 for vdu in get_iterable(indata.get("vdu")):
916 for int_cpd in get_iterable(vdu.get("int-cpd")):
917 all_vdus_int_cpds.add((vdu.get("id"), int_cpd.get("id")))
918
919 ext_cpds = set()
920 for cpd in get_iterable(indata.get("ext-cpd")):
921 cpd_id = cpd.get("id")
922 if cpd_id and cpd_id in ext_cpds:
923 raise EngineException(
924 "ext-cpd[id='{}'] is already used by other ext-cpd".format(cpd_id),
925 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
926 )
927 ext_cpds.add(cpd_id)
928
929 int_cpd = cpd.get("int-cpd")
930 if int_cpd:
931 if (int_cpd.get("vdu-id"), int_cpd.get("cpd")) not in all_vdus_int_cpds:
932 raise EngineException(
933 "ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd".format(
934 cpd_id
935 ),
936 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
937 )
938 # TODO: Validate k8s-cluster-net points to a valid k8s-cluster:nets ?
939
940 def _validate_vdu_charms_in_package(self, storage_params, indata):
941 for df in indata["df"]:
942 if (
943 "lcm-operations-configuration" in df
944 and "operate-vnf-op-config" in df["lcm-operations-configuration"]
945 ):
946 configs = df["lcm-operations-configuration"][
947 "operate-vnf-op-config"
948 ].get("day1-2", [])
949 vdus = df.get("vdu-profile", [])
950 for vdu in vdus:
951 for config in configs:
952 if config["id"] == vdu["id"] and utils.find_in_list(
953 config.get("execution-environment-list", []),
954 lambda ee: "juju" in ee,
955 ):
956 if not self._validate_package_folders(
957 storage_params, "charms"
958 ) and not self._validate_package_folders(
959 storage_params, "Scripts/charms"
960 ):
961 raise EngineException(
962 "Charm defined in vnf[id={}] but not present in "
963 "package".format(indata["id"])
964 )
965
966 def _validate_vdu_cloud_init_in_package(self, storage_params, vdu, indata):
967 if not vdu.get("cloud-init-file"):
968 return
969 if not self._validate_package_folders(
970 storage_params, "cloud_init", vdu["cloud-init-file"]
971 ) and not self._validate_package_folders(
972 storage_params, "Scripts/cloud_init", vdu["cloud-init-file"]
973 ):
974 raise EngineException(
975 "Cloud-init defined in vnf[id={}]:vdu[id={}] but not present in "
976 "package".format(indata["id"], vdu["id"])
977 )
978
979 def _validate_vnf_charms_in_package(self, storage_params, indata):
980 # Get VNF configuration through new container
981 for deployment_flavor in indata.get("df", []):
982 if "lcm-operations-configuration" not in deployment_flavor:
983 return
984 if (
985 "operate-vnf-op-config"
986 not in deployment_flavor["lcm-operations-configuration"]
987 ):
988 return
989 for day_1_2_config in deployment_flavor["lcm-operations-configuration"][
990 "operate-vnf-op-config"
991 ]["day1-2"]:
992 if day_1_2_config["id"] == indata["id"]:
993 if utils.find_in_list(
994 day_1_2_config.get("execution-environment-list", []),
995 lambda ee: "juju" in ee,
996 ):
997 if not self._validate_package_folders(
998 storage_params, "charms"
999 ) and not self._validate_package_folders(
1000 storage_params, "Scripts/charms"
1001 ):
1002 raise EngineException(
1003 "Charm defined in vnf[id={}] but not present in "
1004 "package".format(indata["id"])
1005 )
1006
1007 def _validate_package_folders(self, storage_params, folder, file=None):
1008 if not storage_params:
1009 return False
1010 elif not storage_params.get("pkg-dir"):
1011 if self.fs.file_exists("{}_".format(storage_params["folder"]), "dir"):
1012 f = "{}_/{}".format(storage_params["folder"], folder)
1013 else:
1014 f = "{}/{}".format(storage_params["folder"], folder)
1015 if file:
1016 return self.fs.file_exists("{}/{}".format(f, file), "file")
1017 else:
1018 if self.fs.file_exists(f, "dir"):
1019 if self.fs.dir_ls(f):
1020 return True
1021 return False
1022 else:
1023 if self.fs.file_exists("{}_".format(storage_params["folder"]), "dir"):
1024 f = "{}_/{}/{}".format(
1025 storage_params["folder"], storage_params["pkg-dir"], folder
1026 )
1027 else:
1028 f = "{}/{}/{}".format(
1029 storage_params["folder"], storage_params["pkg-dir"], folder
1030 )
1031 if file:
1032 return self.fs.file_exists("{}/{}".format(f, file), "file")
1033 else:
1034 if self.fs.file_exists(f, "dir"):
1035 if self.fs.dir_ls(f):
1036 return True
1037 return False
1038
1039 @staticmethod
1040 def validate_internal_virtual_links(indata):
1041 all_ivld_ids = set()
1042 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
1043 ivld_id = ivld.get("id")
1044 if ivld_id and ivld_id in all_ivld_ids:
1045 raise EngineException(
1046 "Duplicated VLD id in int-virtual-link-desc[id={}]".format(ivld_id),
1047 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1048 )
1049 else:
1050 all_ivld_ids.add(ivld_id)
1051
1052 for vdu in get_iterable(indata.get("vdu")):
1053 for int_cpd in get_iterable(vdu.get("int-cpd")):
1054 int_cpd_ivld_id = int_cpd.get("int-virtual-link-desc")
1055 if int_cpd_ivld_id and int_cpd_ivld_id not in all_ivld_ids:
1056 raise EngineException(
1057 "vdu[id='{}']:int-cpd[id='{}']:int-virtual-link-desc='{}' must match an existing "
1058 "int-virtual-link-desc".format(
1059 vdu["id"], int_cpd["id"], int_cpd_ivld_id
1060 ),
1061 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1062 )
1063
1064 for df in get_iterable(indata.get("df")):
1065 for vlp in get_iterable(df.get("virtual-link-profile")):
1066 vlp_ivld_id = vlp.get("id")
1067 if vlp_ivld_id and vlp_ivld_id not in all_ivld_ids:
1068 raise EngineException(
1069 "df[id='{}']:virtual-link-profile='{}' must match an existing "
1070 "int-virtual-link-desc".format(df["id"], vlp_ivld_id),
1071 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1072 )
1073
1074 @staticmethod
1075 def validate_monitoring_params(indata):
1076 all_monitoring_params = set()
1077 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
1078 for mp in get_iterable(ivld.get("monitoring-parameters")):
1079 mp_id = mp.get("id")
1080 if mp_id and mp_id in all_monitoring_params:
1081 raise EngineException(
1082 "Duplicated monitoring-parameter id in "
1083 "int-virtual-link-desc[id='{}']:monitoring-parameters[id='{}']".format(
1084 ivld["id"], mp_id
1085 ),
1086 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1087 )
1088 else:
1089 all_monitoring_params.add(mp_id)
1090
1091 for vdu in get_iterable(indata.get("vdu")):
1092 for mp in get_iterable(vdu.get("monitoring-parameter")):
1093 mp_id = mp.get("id")
1094 if mp_id and mp_id in all_monitoring_params:
1095 raise EngineException(
1096 "Duplicated monitoring-parameter id in "
1097 "vdu[id='{}']:monitoring-parameter[id='{}']".format(
1098 vdu["id"], mp_id
1099 ),
1100 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1101 )
1102 else:
1103 all_monitoring_params.add(mp_id)
1104
1105 for df in get_iterable(indata.get("df")):
1106 for mp in get_iterable(df.get("monitoring-parameter")):
1107 mp_id = mp.get("id")
1108 if mp_id and mp_id in all_monitoring_params:
1109 raise EngineException(
1110 "Duplicated monitoring-parameter id in "
1111 "df[id='{}']:monitoring-parameter[id='{}']".format(
1112 df["id"], mp_id
1113 ),
1114 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1115 )
1116 else:
1117 all_monitoring_params.add(mp_id)
1118
1119 @staticmethod
1120 def validate_scaling_group_descriptor(indata):
1121 all_monitoring_params = set()
1122 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
1123 for mp in get_iterable(ivld.get("monitoring-parameters")):
1124 all_monitoring_params.add(mp.get("id"))
1125
1126 for vdu in get_iterable(indata.get("vdu")):
1127 for mp in get_iterable(vdu.get("monitoring-parameter")):
1128 all_monitoring_params.add(mp.get("id"))
1129
1130 for df in get_iterable(indata.get("df")):
1131 for mp in get_iterable(df.get("monitoring-parameter")):
1132 all_monitoring_params.add(mp.get("id"))
1133
1134 for df in get_iterable(indata.get("df")):
1135 for sa in get_iterable(df.get("scaling-aspect")):
1136 for sp in get_iterable(sa.get("scaling-policy")):
1137 for sc in get_iterable(sp.get("scaling-criteria")):
1138 sc_monitoring_param = sc.get("vnf-monitoring-param-ref")
1139 if (
1140 sc_monitoring_param
1141 and sc_monitoring_param not in all_monitoring_params
1142 ):
1143 raise EngineException(
1144 "df[id='{}']:scaling-aspect[id='{}']:scaling-policy"
1145 "[name='{}']:scaling-criteria[name='{}']: "
1146 "vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format(
1147 df["id"],
1148 sa["id"],
1149 sp["name"],
1150 sc["name"],
1151 sc_monitoring_param,
1152 ),
1153 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1154 )
1155
1156 for sca in get_iterable(sa.get("scaling-config-action")):
1157 if (
1158 "lcm-operations-configuration" not in df
1159 or "operate-vnf-op-config"
1160 not in df["lcm-operations-configuration"]
1161 or not utils.find_in_list(
1162 df["lcm-operations-configuration"][
1163 "operate-vnf-op-config"
1164 ].get("day1-2", []),
1165 lambda config: config["id"] == indata["id"],
1166 )
1167 ):
1168 raise EngineException(
1169 "'day1-2 configuration' not defined in the descriptor but it is "
1170 "referenced by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action".format(
1171 df["id"], sa["id"]
1172 ),
1173 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1174 )
1175 for configuration in get_iterable(
1176 df["lcm-operations-configuration"]["operate-vnf-op-config"].get(
1177 "day1-2", []
1178 )
1179 ):
1180 for primitive in get_iterable(
1181 configuration.get("config-primitive")
1182 ):
1183 if (
1184 primitive["name"]
1185 == sca["vnf-config-primitive-name-ref"]
1186 ):
1187 break
1188 else:
1189 raise EngineException(
1190 "df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-"
1191 "config-primitive-name-ref='{}' does not match any "
1192 "day1-2 configuration:config-primitive:name".format(
1193 df["id"],
1194 sa["id"],
1195 sca["vnf-config-primitive-name-ref"],
1196 ),
1197 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1198 )
1199
1200 def delete_extra(self, session, _id, db_content, not_send_msg=None):
1201 """
1202 Deletes associate file system storage (via super)
1203 Deletes associated vnfpkgops from database.
1204 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1205 :param _id: server internal id
1206 :param db_content: The database content of the descriptor
1207 :return: None
1208 :raises: FsException in case of error while deleting associated storage
1209 """
1210 super().delete_extra(session, _id, db_content, not_send_msg)
1211 self.db.del_list("vnfpkgops", {"vnfPkgId": _id})
1212 self.db.del_list(self.topic + "_revisions", {"_id": {"$regex": _id}})
1213
1214 def sol005_projection(self, data):
1215 data["onboardingState"] = data["_admin"]["onboardingState"]
1216 data["operationalState"] = data["_admin"]["operationalState"]
1217 data["usageState"] = data["_admin"]["usageState"]
1218
1219 links = {}
1220 links["self"] = {"href": "/vnfpkgm/v1/vnf_packages/{}".format(data["_id"])}
1221 links["vnfd"] = {"href": "/vnfpkgm/v1/vnf_packages/{}/vnfd".format(data["_id"])}
1222 links["packageContent"] = {
1223 "href": "/vnfpkgm/v1/vnf_packages/{}/package_content".format(data["_id"])
1224 }
1225 data["_links"] = links
1226
1227 return super().sol005_projection(data)
1228
1229 @staticmethod
1230 def find_software_version(vnfd: dict) -> str:
1231 """Find the sotware version in the VNFD descriptors
1232
1233 Args:
1234 vnfd (dict): Descriptor as a dictionary
1235
1236 Returns:
1237 software-version (str)
1238 """
1239 default_sw_version = "1.0"
1240 if vnfd.get("vnfd"):
1241 vnfd = vnfd["vnfd"]
1242 if vnfd.get("software-version"):
1243 return vnfd["software-version"]
1244 else:
1245 return default_sw_version
1246
1247 @staticmethod
1248 def extract_policies(vnfd: dict) -> dict:
1249 """Removes the policies from the VNFD descriptors
1250
1251 Args:
1252 vnfd (dict): Descriptor as a dictionary
1253
1254 Returns:
1255 vnfd (dict): VNFD which does not include policies
1256 """
1257 for df in vnfd.get("df", {}):
1258 for policy in ["scaling-aspect", "healing-aspect"]:
1259 if df.get(policy, {}):
1260 df.pop(policy)
1261 for vdu in vnfd.get("vdu", {}):
1262 for alarm_policy in ["alarm", "monitoring-parameter"]:
1263 if vdu.get(alarm_policy, {}):
1264 vdu.pop(alarm_policy)
1265 return vnfd
1266
1267 @staticmethod
1268 def extract_day12_primitives(vnfd: dict) -> dict:
1269 """Removes the day12 primitives from the VNFD descriptors
1270
1271 Args:
1272 vnfd (dict): Descriptor as a dictionary
1273
1274 Returns:
1275 vnfd (dict)
1276 """
1277 for df_id, df in enumerate(vnfd.get("df", {})):
1278 if (
1279 df.get("lcm-operations-configuration", {})
1280 .get("operate-vnf-op-config", {})
1281 .get("day1-2")
1282 ):
1283 day12 = df["lcm-operations-configuration"]["operate-vnf-op-config"].get(
1284 "day1-2"
1285 )
1286 for config_id, config in enumerate(day12):
1287 for key in [
1288 "initial-config-primitive",
1289 "config-primitive",
1290 "terminate-config-primitive",
1291 ]:
1292 config.pop(key, None)
1293 day12[config_id] = config
1294 df["lcm-operations-configuration"]["operate-vnf-op-config"][
1295 "day1-2"
1296 ] = day12
1297 vnfd["df"][df_id] = df
1298 return vnfd
1299
1300 def remove_modifiable_items(self, vnfd: dict) -> dict:
1301 """Removes the modifiable parts from the VNFD descriptors
1302
1303 It calls different extract functions according to different update types
1304 to clear all the modifiable items from VNFD
1305
1306 Args:
1307 vnfd (dict): Descriptor as a dictionary
1308
1309 Returns:
1310 vnfd (dict): Descriptor which does not include modifiable contents
1311 """
1312 if vnfd.get("vnfd"):
1313 vnfd = vnfd["vnfd"]
1314 vnfd.pop("_admin", None)
1315 # If the other extractions need to be done from VNFD,
1316 # the new extract methods could be appended to below list.
1317 for extract_function in [self.extract_day12_primitives, self.extract_policies]:
1318 vnfd_temp = extract_function(vnfd)
1319 vnfd = vnfd_temp
1320 return vnfd
1321
1322 def _validate_descriptor_changes(
1323 self,
1324 descriptor_id: str,
1325 descriptor_file_name: str,
1326 old_descriptor_directory: str,
1327 new_descriptor_directory: str,
1328 ):
1329 """Compares the old and new VNFD descriptors and validates the new descriptor.
1330
1331 Args:
1332 old_descriptor_directory (str): Directory of descriptor which is in-use
1333 new_descriptor_directory (str): Directory of descriptor which is proposed to update (new revision)
1334
1335 Returns:
1336 None
1337
1338 Raises:
1339 EngineException: In case of error when there are unallowed changes
1340 """
1341 try:
1342 # If VNFD does not exist in DB or it is not in use by any NS,
1343 # validation is not required.
1344 vnfd = self.db.get_one("vnfds", {"_id": descriptor_id})
1345 if not vnfd or not detect_descriptor_usage(vnfd, "vnfds", self.db):
1346 return
1347
1348 # Get the old and new descriptor contents in order to compare them.
1349 with self.fs.file_open(
1350 (old_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
1351 ) as old_descriptor_file:
1352 with self.fs.file_open(
1353 (new_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
1354 ) as new_descriptor_file:
1355 old_content = yaml.safe_load(old_descriptor_file.read())
1356 new_content = yaml.safe_load(new_descriptor_file.read())
1357
1358 # If software version has changed, we do not need to validate
1359 # the differences anymore.
1360 if old_content and new_content:
1361 if self.find_software_version(
1362 old_content
1363 ) != self.find_software_version(new_content):
1364 return
1365
1366 disallowed_change = DeepDiff(
1367 self.remove_modifiable_items(old_content),
1368 self.remove_modifiable_items(new_content),
1369 )
1370
1371 if disallowed_change:
1372 changed_nodes = functools.reduce(
1373 lambda a, b: a + " , " + b,
1374 [
1375 node.lstrip("root")
1376 for node in disallowed_change.get(
1377 "values_changed"
1378 ).keys()
1379 ],
1380 )
1381
1382 raise EngineException(
1383 f"Error in validating new descriptor: {changed_nodes} cannot be modified, "
1384 "there are disallowed changes in the vnf descriptor.",
1385 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1386 )
1387 except (
1388 DbException,
1389 AttributeError,
1390 IndexError,
1391 KeyError,
1392 ValueError,
1393 ) as e:
1394 raise type(e)(
1395 "VNF Descriptor could not be processed with error: {}.".format(e)
1396 )
1397
1398
1399 class NsdTopic(DescriptorTopic):
1400 topic = "nsds"
1401 topic_msg = "nsd"
1402
1403 def __init__(self, db, fs, msg, auth):
1404 super().__init__(db, fs, msg, auth)
1405
1406 def pyangbind_validation(self, item, data, force=False):
1407 if self._descriptor_data_is_in_old_format(data):
1408 raise EngineException(
1409 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
1410 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1411 )
1412 try:
1413 nsd_vnf_profiles = data.get("df", [{}])[0].get("vnf-profile", [])
1414 mynsd = etsi_nfv_nsd.etsi_nfv_nsd()
1415 pybindJSONDecoder.load_ietf_json(
1416 {"nsd": {"nsd": [data]}},
1417 None,
1418 None,
1419 obj=mynsd,
1420 path_helper=True,
1421 skip_unknown=force,
1422 )
1423 out = pybindJSON.dumps(mynsd, mode="ietf")
1424 desc_out = self._remove_envelop(yaml.safe_load(out))
1425 desc_out = self._remove_yang_prefixes_from_descriptor(desc_out)
1426 if nsd_vnf_profiles:
1427 desc_out["df"][0]["vnf-profile"] = nsd_vnf_profiles
1428 return desc_out
1429 except Exception as e:
1430 raise EngineException(
1431 "Error in pyangbind validation: {}".format(str(e)),
1432 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1433 )
1434
1435 @staticmethod
1436 def _descriptor_data_is_in_old_format(data):
1437 return ("nsd-catalog" in data) or ("nsd:nsd-catalog" in data)
1438
1439 @staticmethod
1440 def _remove_envelop(indata=None):
1441 if not indata:
1442 return {}
1443 clean_indata = indata
1444
1445 if clean_indata.get("nsd"):
1446 clean_indata = clean_indata["nsd"]
1447 elif clean_indata.get("etsi-nfv-nsd:nsd"):
1448 clean_indata = clean_indata["etsi-nfv-nsd:nsd"]
1449 if clean_indata.get("nsd"):
1450 if (
1451 not isinstance(clean_indata["nsd"], list)
1452 or len(clean_indata["nsd"]) != 1
1453 ):
1454 raise EngineException("'nsd' must be a list of only one element")
1455 clean_indata = clean_indata["nsd"][0]
1456 return clean_indata
1457
1458 def _validate_input_new(self, indata, storage_params, force=False):
1459 indata.pop("nsdOnboardingState", None)
1460 indata.pop("nsdOperationalState", None)
1461 indata.pop("nsdUsageState", None)
1462
1463 indata.pop("links", None)
1464
1465 indata = self.pyangbind_validation("nsds", indata, force)
1466 # Cross references validation in the descriptor
1467 # TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none
1468 for vld in get_iterable(indata.get("virtual-link-desc")):
1469 self.validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata)
1470
1471 self.validate_vnf_profiles_vnfd_id(indata)
1472
1473 return indata
1474
1475 @staticmethod
1476 def validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata):
1477 if not vld.get("mgmt-network"):
1478 return
1479 vld_id = vld.get("id")
1480 for df in get_iterable(indata.get("df")):
1481 for vlp in get_iterable(df.get("virtual-link-profile")):
1482 if vld_id and vld_id == vlp.get("virtual-link-desc-id"):
1483 if vlp.get("virtual-link-protocol-data"):
1484 raise EngineException(
1485 "Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-"
1486 "protocol-data You cannot set a virtual-link-protocol-data "
1487 "when mgmt-network is True".format(df["id"], vlp["id"]),
1488 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1489 )
1490
1491 @staticmethod
1492 def validate_vnf_profiles_vnfd_id(indata):
1493 all_vnfd_ids = set(get_iterable(indata.get("vnfd-id")))
1494 for df in get_iterable(indata.get("df")):
1495 for vnf_profile in get_iterable(df.get("vnf-profile")):
1496 vnfd_id = vnf_profile.get("vnfd-id")
1497 if vnfd_id and vnfd_id not in all_vnfd_ids:
1498 raise EngineException(
1499 "Error at df[id='{}']:vnf_profile[id='{}']:vnfd-id='{}' "
1500 "does not match any vnfd-id".format(
1501 df["id"], vnf_profile["id"], vnfd_id
1502 ),
1503 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1504 )
1505
1506 def _validate_input_edit(self, indata, content, force=False):
1507 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
1508 """
1509 indata looks as follows:
1510 - In the new case (conformant)
1511 {'nsdOperationalState': 'DISABLED', 'userDefinedData': {'id': 'string23',
1512 '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}}
1513 - In the old case (backwards-compatible)
1514 {'id': 'string23', '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}
1515 """
1516 if "_admin" not in indata:
1517 indata["_admin"] = {}
1518
1519 if "nsdOperationalState" in indata:
1520 if indata["nsdOperationalState"] in ("ENABLED", "DISABLED"):
1521 indata["_admin"]["operationalState"] = indata.pop("nsdOperationalState")
1522 else:
1523 raise EngineException(
1524 "State '{}' is not a valid operational state".format(
1525 indata["nsdOperationalState"]
1526 ),
1527 http_code=HTTPStatus.BAD_REQUEST,
1528 )
1529
1530 # In the case of user defined data, we need to put the data in the root of the object
1531 # to preserve current expected behaviour
1532 if "userDefinedData" in indata:
1533 data = indata.pop("userDefinedData")
1534 if isinstance(data, dict):
1535 indata["_admin"]["userDefinedData"] = data
1536 else:
1537 raise EngineException(
1538 "userDefinedData should be an object, but is '{}' instead".format(
1539 type(data)
1540 ),
1541 http_code=HTTPStatus.BAD_REQUEST,
1542 )
1543 if (
1544 "operationalState" in indata["_admin"]
1545 and content["_admin"]["operationalState"]
1546 == indata["_admin"]["operationalState"]
1547 ):
1548 raise EngineException(
1549 "nsdOperationalState already {}".format(
1550 content["_admin"]["operationalState"]
1551 ),
1552 http_code=HTTPStatus.CONFLICT,
1553 )
1554 return indata
1555
1556 def _check_descriptor_dependencies(self, session, descriptor):
1557 """
1558 Check that the dependent descriptors exist on a new descriptor or edition. Also checks references to vnfd
1559 connection points are ok
1560 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1561 :param descriptor: descriptor to be inserted or edit
1562 :return: None or raises exception
1563 """
1564 if session["force"]:
1565 return
1566 vnfds_index = self._get_descriptor_constituent_vnfds_index(session, descriptor)
1567
1568 # Cross references validation in the descriptor and vnfd connection point validation
1569 for df in get_iterable(descriptor.get("df")):
1570 self.validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index)
1571
1572 def _get_descriptor_constituent_vnfds_index(self, session, descriptor):
1573 vnfds_index = {}
1574 if descriptor.get("vnfd-id") and not session["force"]:
1575 for vnfd_id in get_iterable(descriptor.get("vnfd-id")):
1576 query_filter = self._get_project_filter(session)
1577 query_filter["id"] = vnfd_id
1578 vnf_list = self.db.get_list("vnfds", query_filter)
1579 if not vnf_list:
1580 raise EngineException(
1581 "Descriptor error at 'vnfd-id'='{}' references a non "
1582 "existing vnfd".format(vnfd_id),
1583 http_code=HTTPStatus.CONFLICT,
1584 )
1585 vnfds_index[vnfd_id] = vnf_list[0]
1586 return vnfds_index
1587
1588 @staticmethod
1589 def validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index):
1590 for vnf_profile in get_iterable(df.get("vnf-profile")):
1591 vnfd = vnfds_index.get(vnf_profile["vnfd-id"])
1592 all_vnfd_ext_cpds = set()
1593 for ext_cpd in get_iterable(vnfd.get("ext-cpd")):
1594 if ext_cpd.get("id"):
1595 all_vnfd_ext_cpds.add(ext_cpd.get("id"))
1596
1597 for virtual_link in get_iterable(
1598 vnf_profile.get("virtual-link-connectivity")
1599 ):
1600 for vl_cpd in get_iterable(virtual_link.get("constituent-cpd-id")):
1601 vl_cpd_id = vl_cpd.get("constituent-cpd-id")
1602 if vl_cpd_id and vl_cpd_id not in all_vnfd_ext_cpds:
1603 raise EngineException(
1604 "Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity"
1605 "[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a "
1606 "non existing ext-cpd:id inside vnfd '{}'".format(
1607 df["id"],
1608 vnf_profile["id"],
1609 virtual_link["virtual-link-profile-id"],
1610 vl_cpd_id,
1611 vnfd["id"],
1612 ),
1613 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1614 )
1615
1616 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
1617 final_content = super().check_conflict_on_edit(
1618 session, final_content, edit_content, _id
1619 )
1620
1621 self._check_descriptor_dependencies(session, final_content)
1622
1623 return final_content
1624
1625 def check_conflict_on_del(self, session, _id, db_content):
1626 """
1627 Check that there is not any NSR that uses this NSD. Only NSRs belonging to this project are considered. Note
1628 that NSD can be public and be used by other projects.
1629 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1630 :param _id: nsd internal id
1631 :param db_content: The database content of the _id
1632 :return: None or raises EngineException with the conflict
1633 """
1634 if session["force"]:
1635 return
1636 descriptor = db_content
1637 descriptor_id = descriptor.get("id")
1638 if not descriptor_id: # empty nsd not uploaded
1639 return
1640
1641 # check NSD used by NS
1642 _filter = self._get_project_filter(session)
1643 _filter["nsd-id"] = _id
1644 if self.db.get_list("nsrs", _filter):
1645 raise EngineException(
1646 "There is at least one NS instance using this descriptor",
1647 http_code=HTTPStatus.CONFLICT,
1648 )
1649
1650 # check NSD referenced by NST
1651 del _filter["nsd-id"]
1652 _filter["netslice-subnet.ANYINDEX.nsd-ref"] = descriptor_id
1653 if self.db.get_list("nsts", _filter):
1654 raise EngineException(
1655 "There is at least one NetSlice Template referencing this descriptor",
1656 http_code=HTTPStatus.CONFLICT,
1657 )
1658
1659 def delete_extra(self, session, _id, db_content, not_send_msg=None):
1660 """
1661 Deletes associate file system storage (via super)
1662 Deletes associated vnfpkgops from database.
1663 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1664 :param _id: server internal id
1665 :param db_content: The database content of the descriptor
1666 :return: None
1667 :raises: FsException in case of error while deleting associated storage
1668 """
1669 super().delete_extra(session, _id, db_content, not_send_msg)
1670 self.db.del_list(self.topic + "_revisions", {"_id": {"$regex": _id}})
1671
1672 @staticmethod
1673 def extract_day12_primitives(nsd: dict) -> dict:
1674 """Removes the day12 primitives from the NSD descriptors
1675
1676 Args:
1677 nsd (dict): Descriptor as a dictionary
1678
1679 Returns:
1680 nsd (dict): Cleared NSD
1681 """
1682 if nsd.get("ns-configuration"):
1683 for key in [
1684 "config-primitive",
1685 "initial-config-primitive",
1686 "terminate-config-primitive",
1687 ]:
1688 nsd["ns-configuration"].pop(key, None)
1689 return nsd
1690
1691 def remove_modifiable_items(self, nsd: dict) -> dict:
1692 """Removes the modifiable parts from the VNFD descriptors
1693
1694 It calls different extract functions according to different update types
1695 to clear all the modifiable items from NSD
1696
1697 Args:
1698 nsd (dict): Descriptor as a dictionary
1699
1700 Returns:
1701 nsd (dict): Descriptor which does not include modifiable contents
1702 """
1703 while isinstance(nsd, dict) and nsd.get("nsd"):
1704 nsd = nsd["nsd"]
1705 if isinstance(nsd, list):
1706 nsd = nsd[0]
1707 nsd.pop("_admin", None)
1708 # If the more extractions need to be done from NSD,
1709 # the new extract methods could be appended to below list.
1710 for extract_function in [self.extract_day12_primitives]:
1711 nsd_temp = extract_function(nsd)
1712 nsd = nsd_temp
1713 return nsd
1714
1715 def _validate_descriptor_changes(
1716 self,
1717 descriptor_id: str,
1718 descriptor_file_name: str,
1719 old_descriptor_directory: str,
1720 new_descriptor_directory: str,
1721 ):
1722 """Compares the old and new NSD descriptors and validates the new descriptor
1723
1724 Args:
1725 old_descriptor_directory: Directory of descriptor which is in-use
1726 new_descriptor_directory: Directory of descriptor which is proposed to update (new revision)
1727
1728 Returns:
1729 None
1730
1731 Raises:
1732 EngineException: In case of error if the changes are not allowed
1733 """
1734
1735 try:
1736 # If NSD does not exist in DB, or it is not in use by any NS,
1737 # validation is not required.
1738 nsd = self.db.get_one("nsds", {"_id": descriptor_id}, fail_on_empty=False)
1739 if not nsd or not detect_descriptor_usage(nsd, "nsds", self.db):
1740 return
1741
1742 # Get the old and new descriptor contents in order to compare them.
1743 with self.fs.file_open(
1744 (old_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
1745 ) as old_descriptor_file:
1746 with self.fs.file_open(
1747 (new_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
1748 ) as new_descriptor_file:
1749 old_content = yaml.safe_load(old_descriptor_file.read())
1750 new_content = yaml.safe_load(new_descriptor_file.read())
1751
1752 if old_content and new_content:
1753 disallowed_change = DeepDiff(
1754 self.remove_modifiable_items(old_content),
1755 self.remove_modifiable_items(new_content),
1756 )
1757
1758 if disallowed_change:
1759 changed_nodes = functools.reduce(
1760 lambda a, b: a + ", " + b,
1761 [
1762 node.lstrip("root")
1763 for node in disallowed_change.get(
1764 "values_changed"
1765 ).keys()
1766 ],
1767 )
1768
1769 raise EngineException(
1770 f"Error in validating new descriptor: {changed_nodes} cannot be modified, "
1771 "there are disallowed changes in the ns descriptor. ",
1772 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1773 )
1774 except (
1775 DbException,
1776 AttributeError,
1777 IndexError,
1778 KeyError,
1779 ValueError,
1780 ) as e:
1781 raise type(e)(
1782 "NS Descriptor could not be processed with error: {}.".format(e)
1783 )
1784
1785 def sol005_projection(self, data):
1786 data["nsdOnboardingState"] = data["_admin"]["onboardingState"]
1787 data["nsdOperationalState"] = data["_admin"]["operationalState"]
1788 data["nsdUsageState"] = data["_admin"]["usageState"]
1789
1790 links = {}
1791 links["self"] = {"href": "/nsd/v1/ns_descriptors/{}".format(data["_id"])}
1792 links["nsd_content"] = {
1793 "href": "/nsd/v1/ns_descriptors/{}/nsd_content".format(data["_id"])
1794 }
1795 data["_links"] = links
1796
1797 return super().sol005_projection(data)
1798
1799
1800 class NstTopic(DescriptorTopic):
1801 topic = "nsts"
1802 topic_msg = "nst"
1803 quota_name = "slice_templates"
1804
1805 def __init__(self, db, fs, msg, auth):
1806 DescriptorTopic.__init__(self, db, fs, msg, auth)
1807
1808 def pyangbind_validation(self, item, data, force=False):
1809 try:
1810 mynst = nst_im()
1811 pybindJSONDecoder.load_ietf_json(
1812 {"nst": [data]},
1813 None,
1814 None,
1815 obj=mynst,
1816 path_helper=True,
1817 skip_unknown=force,
1818 )
1819 out = pybindJSON.dumps(mynst, mode="ietf")
1820 desc_out = self._remove_envelop(yaml.safe_load(out))
1821 return desc_out
1822 except Exception as e:
1823 raise EngineException(
1824 "Error in pyangbind validation: {}".format(str(e)),
1825 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1826 )
1827
1828 @staticmethod
1829 def _remove_envelop(indata=None):
1830 if not indata:
1831 return {}
1832 clean_indata = indata
1833
1834 if clean_indata.get("nst"):
1835 if (
1836 not isinstance(clean_indata["nst"], list)
1837 or len(clean_indata["nst"]) != 1
1838 ):
1839 raise EngineException("'nst' must be a list only one element")
1840 clean_indata = clean_indata["nst"][0]
1841 elif clean_indata.get("nst:nst"):
1842 if (
1843 not isinstance(clean_indata["nst:nst"], list)
1844 or len(clean_indata["nst:nst"]) != 1
1845 ):
1846 raise EngineException("'nst:nst' must be a list only one element")
1847 clean_indata = clean_indata["nst:nst"][0]
1848 return clean_indata
1849
1850 def _validate_input_new(self, indata, storage_params, force=False):
1851 indata.pop("onboardingState", None)
1852 indata.pop("operationalState", None)
1853 indata.pop("usageState", None)
1854 indata = self.pyangbind_validation("nsts", indata, force)
1855 return indata.copy()
1856
1857 def _check_descriptor_dependencies(self, session, descriptor):
1858 """
1859 Check that the dependent descriptors exist on a new descriptor or edition
1860 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1861 :param descriptor: descriptor to be inserted or edit
1862 :return: None or raises exception
1863 """
1864 if not descriptor.get("netslice-subnet"):
1865 return
1866 for nsd in descriptor["netslice-subnet"]:
1867 nsd_id = nsd["nsd-ref"]
1868 filter_q = self._get_project_filter(session)
1869 filter_q["id"] = nsd_id
1870 if not self.db.get_list("nsds", filter_q):
1871 raise EngineException(
1872 "Descriptor error at 'netslice-subnet':'nsd-ref'='{}' references a non "
1873 "existing nsd".format(nsd_id),
1874 http_code=HTTPStatus.CONFLICT,
1875 )
1876
1877 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
1878 final_content = super().check_conflict_on_edit(
1879 session, final_content, edit_content, _id
1880 )
1881
1882 self._check_descriptor_dependencies(session, final_content)
1883 return final_content
1884
1885 def check_conflict_on_del(self, session, _id, db_content):
1886 """
1887 Check that there is not any NSIR that uses this NST. Only NSIRs belonging to this project are considered. Note
1888 that NST can be public and be used by other projects.
1889 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1890 :param _id: nst internal id
1891 :param db_content: The database content of the _id.
1892 :return: None or raises EngineException with the conflict
1893 """
1894 # TODO: Check this method
1895 if session["force"]:
1896 return
1897 # Get Network Slice Template from Database
1898 _filter = self._get_project_filter(session)
1899 _filter["_admin.nst-id"] = _id
1900 if self.db.get_list("nsis", _filter):
1901 raise EngineException(
1902 "there is at least one Netslice Instance using this descriptor",
1903 http_code=HTTPStatus.CONFLICT,
1904 )
1905
1906 def sol005_projection(self, data):
1907 data["onboardingState"] = data["_admin"]["onboardingState"]
1908 data["operationalState"] = data["_admin"]["operationalState"]
1909 data["usageState"] = data["_admin"]["usageState"]
1910
1911 links = {}
1912 links["self"] = {"href": "/nst/v1/netslice_templates/{}".format(data["_id"])}
1913 links["nst"] = {"href": "/nst/v1/netslice_templates/{}/nst".format(data["_id"])}
1914 data["_links"] = links
1915
1916 return super().sol005_projection(data)
1917
1918
1919 class PduTopic(BaseTopic):
1920 topic = "pdus"
1921 topic_msg = "pdu"
1922 quota_name = "pduds"
1923 schema_new = pdu_new_schema
1924 schema_edit = pdu_edit_schema
1925
1926 def __init__(self, db, fs, msg, auth):
1927 BaseTopic.__init__(self, db, fs, msg, auth)
1928
1929 @staticmethod
1930 def format_on_new(content, project_id=None, make_public=False):
1931 BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
1932 content["_admin"]["onboardingState"] = "CREATED"
1933 content["_admin"]["operationalState"] = "ENABLED"
1934 content["_admin"]["usageState"] = "NOT_IN_USE"
1935
1936 def check_conflict_on_del(self, session, _id, db_content):
1937 """
1938 Check that there is not any vnfr that uses this PDU
1939 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1940 :param _id: pdu internal id
1941 :param db_content: The database content of the _id.
1942 :return: None or raises EngineException with the conflict
1943 """
1944 if session["force"]:
1945 return
1946
1947 _filter = self._get_project_filter(session)
1948 _filter["vdur.pdu-id"] = _id
1949 if self.db.get_list("vnfrs", _filter):
1950 raise EngineException(
1951 "There is at least one VNF instance using this PDU",
1952 http_code=HTTPStatus.CONFLICT,
1953 )
1954
1955
1956 class VnfPkgOpTopic(BaseTopic):
1957 topic = "vnfpkgops"
1958 topic_msg = "vnfd"
1959 schema_new = vnfpkgop_new_schema
1960 schema_edit = None
1961
1962 def __init__(self, db, fs, msg, auth):
1963 BaseTopic.__init__(self, db, fs, msg, auth)
1964
1965 def edit(self, session, _id, indata=None, kwargs=None, content=None):
1966 raise EngineException(
1967 "Method 'edit' not allowed for topic '{}'".format(self.topic),
1968 HTTPStatus.METHOD_NOT_ALLOWED,
1969 )
1970
1971 def delete(self, session, _id, dry_run=False):
1972 raise EngineException(
1973 "Method 'delete' not allowed for topic '{}'".format(self.topic),
1974 HTTPStatus.METHOD_NOT_ALLOWED,
1975 )
1976
1977 def delete_list(self, session, filter_q=None):
1978 raise EngineException(
1979 "Method 'delete_list' not allowed for topic '{}'".format(self.topic),
1980 HTTPStatus.METHOD_NOT_ALLOWED,
1981 )
1982
1983 def new(self, rollback, session, indata=None, kwargs=None, headers=None):
1984 """
1985 Creates a new entry into database.
1986 :param rollback: list to append created items at database in case a rollback may to be done
1987 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1988 :param indata: data to be inserted
1989 :param kwargs: used to override the indata descriptor
1990 :param headers: http request headers
1991 :return: _id, op_id:
1992 _id: identity of the inserted data.
1993 op_id: None
1994 """
1995 self._update_input_with_kwargs(indata, kwargs)
1996 validate_input(indata, self.schema_new)
1997 vnfpkg_id = indata["vnfPkgId"]
1998 filter_q = BaseTopic._get_project_filter(session)
1999 filter_q["_id"] = vnfpkg_id
2000 vnfd = self.db.get_one("vnfds", filter_q)
2001 operation = indata["lcmOperationType"]
2002 kdu_name = indata["kdu_name"]
2003 for kdu in vnfd.get("kdu", []):
2004 if kdu["name"] == kdu_name:
2005 helm_chart = kdu.get("helm-chart")
2006 juju_bundle = kdu.get("juju-bundle")
2007 break
2008 else:
2009 raise EngineException(
2010 "Not found vnfd[id='{}']:kdu[name='{}']".format(vnfpkg_id, kdu_name)
2011 )
2012 if helm_chart:
2013 indata["helm-chart"] = helm_chart
2014 match = fullmatch(r"([^/]*)/([^/]*)", helm_chart)
2015 repo_name = match.group(1) if match else None
2016 elif juju_bundle:
2017 indata["juju-bundle"] = juju_bundle
2018 match = fullmatch(r"([^/]*)/([^/]*)", juju_bundle)
2019 repo_name = match.group(1) if match else None
2020 else:
2021 raise EngineException(
2022 "Found neither 'helm-chart' nor 'juju-bundle' in vnfd[id='{}']:kdu[name='{}']".format(
2023 vnfpkg_id, kdu_name
2024 )
2025 )
2026 if repo_name:
2027 del filter_q["_id"]
2028 filter_q["name"] = repo_name
2029 repo = self.db.get_one("k8srepos", filter_q)
2030 k8srepo_id = repo.get("_id")
2031 k8srepo_url = repo.get("url")
2032 else:
2033 k8srepo_id = None
2034 k8srepo_url = None
2035 indata["k8srepoId"] = k8srepo_id
2036 indata["k8srepo_url"] = k8srepo_url
2037 vnfpkgop_id = str(uuid4())
2038 vnfpkgop_desc = {
2039 "_id": vnfpkgop_id,
2040 "operationState": "PROCESSING",
2041 "vnfPkgId": vnfpkg_id,
2042 "lcmOperationType": operation,
2043 "isAutomaticInvocation": False,
2044 "isCancelPending": False,
2045 "operationParams": indata,
2046 "links": {
2047 "self": "/osm/vnfpkgm/v1/vnfpkg_op_occs/" + vnfpkgop_id,
2048 "vnfpkg": "/osm/vnfpkgm/v1/vnf_packages/" + vnfpkg_id,
2049 },
2050 }
2051 self.format_on_new(
2052 vnfpkgop_desc, session["project_id"], make_public=session["public"]
2053 )
2054 ctime = vnfpkgop_desc["_admin"]["created"]
2055 vnfpkgop_desc["statusEnteredTime"] = ctime
2056 vnfpkgop_desc["startTime"] = ctime
2057 self.db.create(self.topic, vnfpkgop_desc)
2058 rollback.append({"topic": self.topic, "_id": vnfpkgop_id})
2059 self.msg.write(self.topic_msg, operation, vnfpkgop_desc)
2060 return vnfpkgop_id, None