Bug 1830 fixed: maps completed operations to original operation types
[osm/NBI.git] / osm_nbi / descriptor_topics.py
1 # -*- coding: utf-8 -*-
2
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
12 # implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
15
16 import tarfile
17 import yaml
18 import json
19 import copy
20 import os
21 import shutil
22 import functools
23
24 # import logging
25 from deepdiff import DeepDiff
26 from hashlib import md5
27 from osm_common.dbbase import DbException, deep_update_rfc7396
28 from http import HTTPStatus
29 from time import time
30 from uuid import uuid4
31 from re import fullmatch
32 from zipfile import ZipFile
33 from osm_nbi.validation import (
34 ValidationError,
35 pdu_new_schema,
36 pdu_edit_schema,
37 validate_input,
38 vnfpkgop_new_schema,
39 )
40 from osm_nbi.base_topic import BaseTopic, EngineException, get_iterable
41 from osm_im import etsi_nfv_vnfd, etsi_nfv_nsd
42 from osm_im.nst import nst as nst_im
43 from pyangbind.lib.serialise import pybindJSONDecoder
44 import pyangbind.lib.pybindJSON as pybindJSON
45 from osm_nbi import utils
46
47 __author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
48
49
50 class DescriptorTopic(BaseTopic):
51 def __init__(self, db, fs, msg, auth):
52
53 BaseTopic.__init__(self, db, fs, msg, auth)
54
55 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
56 final_content = super().check_conflict_on_edit(
57 session, final_content, edit_content, _id
58 )
59
60 def _check_unique_id_name(descriptor, position=""):
61 for desc_key, desc_item in descriptor.items():
62 if isinstance(desc_item, list) and desc_item:
63 used_ids = []
64 desc_item_id = None
65 for index, list_item in enumerate(desc_item):
66 if isinstance(list_item, dict):
67 _check_unique_id_name(
68 list_item, "{}.{}[{}]".format(position, desc_key, index)
69 )
70 # Base case
71 if index == 0 and (
72 list_item.get("id") or list_item.get("name")
73 ):
74 desc_item_id = "id" if list_item.get("id") else "name"
75 if desc_item_id and list_item.get(desc_item_id):
76 if list_item[desc_item_id] in used_ids:
77 position = "{}.{}[{}]".format(
78 position, desc_key, index
79 )
80 raise EngineException(
81 "Error: identifier {} '{}' is not unique and repeats at '{}'".format(
82 desc_item_id,
83 list_item[desc_item_id],
84 position,
85 ),
86 HTTPStatus.UNPROCESSABLE_ENTITY,
87 )
88 used_ids.append(list_item[desc_item_id])
89
90 _check_unique_id_name(final_content)
91 # 1. validate again with pyangbind
92 # 1.1. remove internal keys
93 internal_keys = {}
94 for k in ("_id", "_admin"):
95 if k in final_content:
96 internal_keys[k] = final_content.pop(k)
97 storage_params = internal_keys["_admin"].get("storage")
98 serialized = self._validate_input_new(
99 final_content, storage_params, session["force"]
100 )
101
102 # 1.2. modify final_content with a serialized version
103 final_content = copy.deepcopy(serialized)
104 # 1.3. restore internal keys
105 for k, v in internal_keys.items():
106 final_content[k] = v
107 if session["force"]:
108 return final_content
109
110 # 2. check that this id is not present
111 if "id" in edit_content:
112 _filter = self._get_project_filter(session)
113
114 _filter["id"] = final_content["id"]
115 _filter["_id.neq"] = _id
116
117 if self.db.get_one(self.topic, _filter, fail_on_empty=False):
118 raise EngineException(
119 "{} with id '{}' already exists for this project".format(
120 self.topic[:-1], final_content["id"]
121 ),
122 HTTPStatus.CONFLICT,
123 )
124
125 return final_content
126
127 @staticmethod
128 def format_on_new(content, project_id=None, make_public=False):
129 BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
130 content["_admin"]["onboardingState"] = "CREATED"
131 content["_admin"]["operationalState"] = "DISABLED"
132 content["_admin"]["usageState"] = "NOT_IN_USE"
133
134 def delete_extra(self, session, _id, db_content, not_send_msg=None):
135 """
136 Deletes file system storage associated with the descriptor
137 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
138 :param _id: server internal id
139 :param db_content: The database content of the descriptor
140 :param not_send_msg: To not send message (False) or store content (list) instead
141 :return: None if ok or raises EngineException with the problem
142 """
143 self.fs.file_delete(_id, ignore_non_exist=True)
144 self.fs.file_delete(_id + "_", ignore_non_exist=True) # remove temp folder
145 # Remove file revisions
146 if "revision" in db_content["_admin"]:
147 revision = db_content["_admin"]["revision"]
148 while revision > 0:
149 self.fs.file_delete(_id + ":" + str(revision), ignore_non_exist=True)
150 revision = revision - 1
151
152
153 @staticmethod
154 def get_one_by_id(db, session, topic, id):
155 # find owned by this project
156 _filter = BaseTopic._get_project_filter(session)
157 _filter["id"] = id
158 desc_list = db.get_list(topic, _filter)
159 if len(desc_list) == 1:
160 return desc_list[0]
161 elif len(desc_list) > 1:
162 raise DbException(
163 "Found more than one {} with id='{}' belonging to this project".format(
164 topic[:-1], id
165 ),
166 HTTPStatus.CONFLICT,
167 )
168
169 # not found any: try to find public
170 _filter = BaseTopic._get_project_filter(session)
171 _filter["id"] = id
172 desc_list = db.get_list(topic, _filter)
173 if not desc_list:
174 raise DbException(
175 "Not found any {} with id='{}'".format(topic[:-1], id),
176 HTTPStatus.NOT_FOUND,
177 )
178 elif len(desc_list) == 1:
179 return desc_list[0]
180 else:
181 raise DbException(
182 "Found more than one public {} with id='{}'; and no one belonging to this project".format(
183 topic[:-1], id
184 ),
185 HTTPStatus.CONFLICT,
186 )
187
188 def new(self, rollback, session, indata=None, kwargs=None, headers=None):
189 """
190 Creates a new almost empty DISABLED entry into database. Due to SOL005, it does not follow normal procedure.
191 Creating a VNFD or NSD is done in two steps: 1. Creates an empty descriptor (this step) and 2) upload content
192 (self.upload_content)
193 :param rollback: list to append created items at database in case a rollback may to be done
194 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
195 :param indata: data to be inserted
196 :param kwargs: used to override the indata descriptor
197 :param headers: http request headers
198 :return: _id, None: identity of the inserted data; and None as there is not any operation
199 """
200
201 # No needed to capture exceptions
202 # Check Quota
203 self.check_quota(session)
204
205 # _remove_envelop
206 if indata:
207 if "userDefinedData" in indata:
208 indata = indata["userDefinedData"]
209
210 # Override descriptor with query string kwargs
211 self._update_input_with_kwargs(indata, kwargs)
212 # uncomment when this method is implemented.
213 # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors
214 # indata = DescriptorTopic._validate_input_new(self, indata, project_id=session["force"])
215
216 content = {"_admin": {
217 "userDefinedData": indata,
218 "revision": 0
219 }}
220
221 self.format_on_new(
222 content, session["project_id"], make_public=session["public"]
223 )
224 _id = self.db.create(self.topic, content)
225 rollback.append({"topic": self.topic, "_id": _id})
226 self._send_msg("created", {"_id": _id})
227 return _id, None
228
229 def upload_content(self, session, _id, indata, kwargs, headers):
230 """
231 Used for receiving content by chunks (with a transaction_id header and/or gzip file. It will store and extract)
232 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
233 :param _id : the nsd,vnfd is already created, this is the id
234 :param indata: http body request
235 :param kwargs: user query string to override parameters. NOT USED
236 :param headers: http request headers
237 :return: True if package is completely uploaded or False if partial content has been uploded
238 Raise exception on error
239 """
240 # Check that _id exists and it is valid
241 current_desc = self.show(session, _id)
242
243 content_range_text = headers.get("Content-Range")
244 expected_md5 = headers.get("Content-File-MD5")
245 compressed = None
246 content_type = headers.get("Content-Type")
247 if (
248 content_type
249 and "application/gzip" in content_type
250 or "application/x-gzip" in content_type
251 ):
252 compressed = "gzip"
253 if (
254 content_type
255 and "application/zip" in content_type
256 ):
257 compressed = "zip"
258 filename = headers.get("Content-Filename")
259 if not filename and compressed:
260 filename = "package.tar.gz" if compressed == "gzip" else "package.zip"
261 elif not filename:
262 filename = "package"
263
264 revision = 1
265 if "revision" in current_desc["_admin"]:
266 revision = current_desc["_admin"]["revision"] + 1
267
268 # TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266
269 file_pkg = None
270 error_text = ""
271 try:
272 if content_range_text:
273 content_range = (
274 content_range_text.replace("-", " ").replace("/", " ").split()
275 )
276 if (
277 content_range[0] != "bytes"
278 ): # TODO check x<y not negative < total....
279 raise IndexError()
280 start = int(content_range[1])
281 end = int(content_range[2]) + 1
282 total = int(content_range[3])
283 else:
284 start = 0
285 # Rather than using a temp folder, we will store the package in a folder based on
286 # the current revision.
287 proposed_revision_path = (
288 _id + ":" + str(revision)
289 ) # all the content is upload here and if ok, it is rename from id_ to is folder
290
291 if start:
292 if not self.fs.file_exists(proposed_revision_path, "dir"):
293 raise EngineException(
294 "invalid Transaction-Id header", HTTPStatus.NOT_FOUND
295 )
296 else:
297 self.fs.file_delete(proposed_revision_path, ignore_non_exist=True)
298 self.fs.mkdir(proposed_revision_path)
299
300 storage = self.fs.get_params()
301 storage["folder"] = _id
302
303 file_path = (proposed_revision_path, filename)
304 if self.fs.file_exists(file_path, "file"):
305 file_size = self.fs.file_size(file_path)
306 else:
307 file_size = 0
308 if file_size != start:
309 raise EngineException(
310 "invalid Content-Range start sequence, expected '{}' but received '{}'".format(
311 file_size, start
312 ),
313 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
314 )
315 file_pkg = self.fs.file_open(file_path, "a+b")
316 if isinstance(indata, dict):
317 indata_text = yaml.safe_dump(indata, indent=4, default_flow_style=False)
318 file_pkg.write(indata_text.encode(encoding="utf-8"))
319 else:
320 indata_len = 0
321 while True:
322 indata_text = indata.read(4096)
323 indata_len += len(indata_text)
324 if not indata_text:
325 break
326 file_pkg.write(indata_text)
327 if content_range_text:
328 if indata_len != end - start:
329 raise EngineException(
330 "Mismatch between Content-Range header {}-{} and body length of {}".format(
331 start, end - 1, indata_len
332 ),
333 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
334 )
335 if end != total:
336 # TODO update to UPLOADING
337 return False
338
339 # PACKAGE UPLOADED
340 if expected_md5:
341 file_pkg.seek(0, 0)
342 file_md5 = md5()
343 chunk_data = file_pkg.read(1024)
344 while chunk_data:
345 file_md5.update(chunk_data)
346 chunk_data = file_pkg.read(1024)
347 if expected_md5 != file_md5.hexdigest():
348 raise EngineException("Error, MD5 mismatch", HTTPStatus.CONFLICT)
349 file_pkg.seek(0, 0)
350 if compressed == "gzip":
351 tar = tarfile.open(mode="r", fileobj=file_pkg)
352 descriptor_file_name = None
353 for tarinfo in tar:
354 tarname = tarinfo.name
355 tarname_path = tarname.split("/")
356 if (
357 not tarname_path[0] or ".." in tarname_path
358 ): # if start with "/" means absolute path
359 raise EngineException(
360 "Absolute path or '..' are not allowed for package descriptor tar.gz"
361 )
362 if len(tarname_path) == 1 and not tarinfo.isdir():
363 raise EngineException(
364 "All files must be inside a dir for package descriptor tar.gz"
365 )
366 if (
367 tarname.endswith(".yaml")
368 or tarname.endswith(".json")
369 or tarname.endswith(".yml")
370 ):
371 storage["pkg-dir"] = tarname_path[0]
372 if len(tarname_path) == 2:
373 if descriptor_file_name:
374 raise EngineException(
375 "Found more than one descriptor file at package descriptor tar.gz"
376 )
377 descriptor_file_name = tarname
378 if not descriptor_file_name:
379 raise EngineException(
380 "Not found any descriptor file at package descriptor tar.gz"
381 )
382 storage["descriptor"] = descriptor_file_name
383 storage["zipfile"] = filename
384 self.fs.file_extract(tar, proposed_revision_path)
385 with self.fs.file_open(
386 (proposed_revision_path, descriptor_file_name), "r"
387 ) as descriptor_file:
388 content = descriptor_file.read()
389 elif compressed == "zip":
390 zipfile = ZipFile(file_pkg)
391 descriptor_file_name = None
392 for package_file in zipfile.infolist():
393 zipfilename = package_file.filename
394 file_path = zipfilename.split("/")
395 if (
396 not file_path[0] or ".." in zipfilename
397 ): # if start with "/" means absolute path
398 raise EngineException(
399 "Absolute path or '..' are not allowed for package descriptor zip"
400 )
401
402 if (
403 (
404 zipfilename.endswith(".yaml")
405 or zipfilename.endswith(".json")
406 or zipfilename.endswith(".yml")
407 ) and (
408 zipfilename.find("/") < 0
409 or zipfilename.find("Definitions") >= 0
410 )
411 ):
412 storage["pkg-dir"] = ""
413 if descriptor_file_name:
414 raise EngineException(
415 "Found more than one descriptor file at package descriptor zip"
416 )
417 descriptor_file_name = zipfilename
418 if not descriptor_file_name:
419 raise EngineException(
420 "Not found any descriptor file at package descriptor zip"
421 )
422 storage["descriptor"] = descriptor_file_name
423 storage["zipfile"] = filename
424 self.fs.file_extract(zipfile, proposed_revision_path)
425
426 with self.fs.file_open(
427 (proposed_revision_path, descriptor_file_name), "r"
428 ) as descriptor_file:
429 content = descriptor_file.read()
430 else:
431 content = file_pkg.read()
432 storage["descriptor"] = descriptor_file_name = filename
433
434 if descriptor_file_name.endswith(".json"):
435 error_text = "Invalid json format "
436 indata = json.load(content)
437 else:
438 error_text = "Invalid yaml format "
439 indata = yaml.load(content, Loader=yaml.SafeLoader)
440
441 # Need to close the file package here so it can be copied from the
442 # revision to the current, unrevisioned record
443 if file_pkg:
444 file_pkg.close()
445 file_pkg = None
446
447 # Fetch both the incoming, proposed revision and the original revision so we
448 # can call a validate method to compare them
449 current_revision_path = _id + "/"
450 self.fs.sync(from_path=current_revision_path)
451 self.fs.sync(from_path=proposed_revision_path)
452
453 if revision > 1:
454 try:
455 self._validate_descriptor_changes(
456 descriptor_file_name,
457 current_revision_path,
458 proposed_revision_path)
459 except Exception as e:
460 shutil.rmtree(self.fs.path + current_revision_path, ignore_errors=True)
461 shutil.rmtree(self.fs.path + proposed_revision_path, ignore_errors=True)
462 # Only delete the new revision. We need to keep the original version in place
463 # as it has not been changed.
464 self.fs.file_delete(proposed_revision_path, ignore_non_exist=True)
465 raise e
466
467 # Copy the revision to the active package name by its original id
468 shutil.rmtree(self.fs.path + current_revision_path, ignore_errors=True)
469 os.rename(self.fs.path + proposed_revision_path, self.fs.path + current_revision_path)
470 self.fs.file_delete(current_revision_path, ignore_non_exist=True)
471 self.fs.mkdir(current_revision_path)
472 self.fs.reverse_sync(from_path=current_revision_path)
473 shutil.rmtree(self.fs.path + _id)
474
475 current_desc["_admin"]["storage"] = storage
476 current_desc["_admin"]["onboardingState"] = "ONBOARDED"
477 current_desc["_admin"]["operationalState"] = "ENABLED"
478
479 indata = self._remove_envelop(indata)
480
481 # Override descriptor with query string kwargs
482 if kwargs:
483 self._update_input_with_kwargs(indata, kwargs)
484
485 deep_update_rfc7396(current_desc, indata)
486 current_desc = self.check_conflict_on_edit(
487 session, current_desc, indata, _id=_id
488 )
489 current_desc["_admin"]["modified"] = time()
490 current_desc["_admin"]["revision"] = revision
491 self.db.replace(self.topic, _id, current_desc)
492
493 # Store a copy of the package as a point in time revision
494 revision_desc = dict(current_desc)
495 revision_desc["_id"] = _id + ":" + str(revision_desc["_admin"]["revision"])
496 self.db.create(self.topic + "_revisions", revision_desc)
497
498 indata["_id"] = _id
499 self._send_msg("edited", indata)
500
501 # TODO if descriptor has changed because kwargs update content and remove cached zip
502 # TODO if zip is not present creates one
503 return True
504
505 except EngineException:
506 raise
507 except IndexError:
508 raise EngineException(
509 "invalid Content-Range header format. Expected 'bytes start-end/total'",
510 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
511 )
512 except IOError as e:
513 raise EngineException(
514 "invalid upload transaction sequence: '{}'".format(e),
515 HTTPStatus.BAD_REQUEST,
516 )
517 except tarfile.ReadError as e:
518 raise EngineException(
519 "invalid file content {}".format(e), HTTPStatus.BAD_REQUEST
520 )
521 except (ValueError, yaml.YAMLError) as e:
522 raise EngineException(error_text + str(e))
523 except ValidationError as e:
524 raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
525 finally:
526 if file_pkg:
527 file_pkg.close()
528
529 def get_file(self, session, _id, path=None, accept_header=None):
530 """
531 Return the file content of a vnfd or nsd
532 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
533 :param _id: Identity of the vnfd, nsd
534 :param path: artifact path or "$DESCRIPTOR" or None
535 :param accept_header: Content of Accept header. Must contain applition/zip or/and text/plain
536 :return: opened file plus Accept format or raises an exception
537 """
538 accept_text = accept_zip = False
539 if accept_header:
540 if "text/plain" in accept_header or "*/*" in accept_header:
541 accept_text = True
542 if "application/zip" in accept_header or "*/*" in accept_header:
543 accept_zip = "application/zip"
544 elif "application/gzip" in accept_header:
545 accept_zip = "application/gzip"
546
547 if not accept_text and not accept_zip:
548 raise EngineException(
549 "provide request header 'Accept' with 'application/zip' or 'text/plain'",
550 http_code=HTTPStatus.NOT_ACCEPTABLE,
551 )
552
553 content = self.show(session, _id)
554 if content["_admin"]["onboardingState"] != "ONBOARDED":
555 raise EngineException(
556 "Cannot get content because this resource is not at 'ONBOARDED' state. "
557 "onboardingState is {}".format(content["_admin"]["onboardingState"]),
558 http_code=HTTPStatus.CONFLICT,
559 )
560 storage = content["_admin"]["storage"]
561 if path is not None and path != "$DESCRIPTOR": # artifacts
562 if not storage.get("pkg-dir"):
563 raise EngineException(
564 "Packages does not contains artifacts",
565 http_code=HTTPStatus.BAD_REQUEST,
566 )
567 if self.fs.file_exists(
568 (storage["folder"], storage["pkg-dir"], *path), "dir"
569 ):
570 folder_content = self.fs.dir_ls(
571 (storage["folder"], storage["pkg-dir"], *path)
572 )
573 return folder_content, "text/plain"
574 # TODO manage folders in http
575 else:
576 return (
577 self.fs.file_open(
578 (storage["folder"], storage["pkg-dir"], *path), "rb"
579 ),
580 "application/octet-stream",
581 )
582
583 # pkgtype accept ZIP TEXT -> result
584 # manyfiles yes X -> zip
585 # no yes -> error
586 # onefile yes no -> zip
587 # X yes -> text
588 contain_many_files = False
589 if storage.get("pkg-dir"):
590 # check if there are more than one file in the package, ignoring checksums.txt.
591 pkg_files = self.fs.dir_ls((storage["folder"], storage["pkg-dir"]))
592 if len(pkg_files) >= 3 or (
593 len(pkg_files) == 2 and "checksums.txt" not in pkg_files
594 ):
595 contain_many_files = True
596 if accept_text and (not contain_many_files or path == "$DESCRIPTOR"):
597 return (
598 self.fs.file_open((storage["folder"], storage["descriptor"]), "r"),
599 "text/plain",
600 )
601 elif contain_many_files and not accept_zip:
602 raise EngineException(
603 "Packages that contains several files need to be retrieved with 'application/zip'"
604 "Accept header",
605 http_code=HTTPStatus.NOT_ACCEPTABLE,
606 )
607 else:
608 if not storage.get("zipfile"):
609 # TODO generate zipfile if not present
610 raise EngineException(
611 "Only allowed 'text/plain' Accept header for this descriptor. To be solved in "
612 "future versions",
613 http_code=HTTPStatus.NOT_ACCEPTABLE,
614 )
615 return (
616 self.fs.file_open((storage["folder"], storage["zipfile"]), "rb"),
617 accept_zip,
618 )
619
620 def _remove_yang_prefixes_from_descriptor(self, descriptor):
621 new_descriptor = {}
622 for k, v in descriptor.items():
623 new_v = v
624 if isinstance(v, dict):
625 new_v = self._remove_yang_prefixes_from_descriptor(v)
626 elif isinstance(v, list):
627 new_v = list()
628 for x in v:
629 if isinstance(x, dict):
630 new_v.append(self._remove_yang_prefixes_from_descriptor(x))
631 else:
632 new_v.append(x)
633 new_descriptor[k.split(":")[-1]] = new_v
634 return new_descriptor
635
636 def pyangbind_validation(self, item, data, force=False):
637 raise EngineException(
638 "Not possible to validate '{}' item".format(item),
639 http_code=HTTPStatus.INTERNAL_SERVER_ERROR,
640 )
641
642 def _validate_input_edit(self, indata, content, force=False):
643 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
644 if "_id" in indata:
645 indata.pop("_id")
646 if "_admin" not in indata:
647 indata["_admin"] = {}
648
649 if "operationalState" in indata:
650 if indata["operationalState"] in ("ENABLED", "DISABLED"):
651 indata["_admin"]["operationalState"] = indata.pop("operationalState")
652 else:
653 raise EngineException(
654 "State '{}' is not a valid operational state".format(
655 indata["operationalState"]
656 ),
657 http_code=HTTPStatus.BAD_REQUEST,
658 )
659
660 # In the case of user defined data, we need to put the data in the root of the object
661 # to preserve current expected behaviour
662 if "userDefinedData" in indata:
663 data = indata.pop("userDefinedData")
664 if type(data) == dict:
665 indata["_admin"]["userDefinedData"] = data
666 else:
667 raise EngineException(
668 "userDefinedData should be an object, but is '{}' instead".format(
669 type(data)
670 ),
671 http_code=HTTPStatus.BAD_REQUEST,
672 )
673
674 if (
675 "operationalState" in indata["_admin"]
676 and content["_admin"]["operationalState"]
677 == indata["_admin"]["operationalState"]
678 ):
679 raise EngineException(
680 "operationalState already {}".format(
681 content["_admin"]["operationalState"]
682 ),
683 http_code=HTTPStatus.CONFLICT,
684 )
685
686 return indata
687
688 def _validate_descriptor_changes(self,
689 descriptor_file_name,
690 old_descriptor_directory,
691 new_descriptor_directory):
692 # Todo: compare changes and throw a meaningful exception for the user to understand
693 # Example:
694 # raise EngineException(
695 # "Error in validating new descriptor: <NODE> cannot be modified",
696 # http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
697 # )
698 pass
699
700 class VnfdTopic(DescriptorTopic):
701 topic = "vnfds"
702 topic_msg = "vnfd"
703
704 def __init__(self, db, fs, msg, auth):
705 DescriptorTopic.__init__(self, db, fs, msg, auth)
706
707 def pyangbind_validation(self, item, data, force=False):
708 if self._descriptor_data_is_in_old_format(data):
709 raise EngineException(
710 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
711 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
712 )
713 try:
714 myvnfd = etsi_nfv_vnfd.etsi_nfv_vnfd()
715 pybindJSONDecoder.load_ietf_json(
716 {"etsi-nfv-vnfd:vnfd": data},
717 None,
718 None,
719 obj=myvnfd,
720 path_helper=True,
721 skip_unknown=force,
722 )
723 out = pybindJSON.dumps(myvnfd, mode="ietf")
724 desc_out = self._remove_envelop(yaml.safe_load(out))
725 desc_out = self._remove_yang_prefixes_from_descriptor(desc_out)
726 return utils.deep_update_dict(data, desc_out)
727 except Exception as e:
728 raise EngineException(
729 "Error in pyangbind validation: {}".format(str(e)),
730 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
731 )
732
733 @staticmethod
734 def _descriptor_data_is_in_old_format(data):
735 return ("vnfd-catalog" in data) or ("vnfd:vnfd-catalog" in data)
736
737 @staticmethod
738 def _remove_envelop(indata=None):
739 if not indata:
740 return {}
741 clean_indata = indata
742
743 if clean_indata.get("etsi-nfv-vnfd:vnfd"):
744 if not isinstance(clean_indata["etsi-nfv-vnfd:vnfd"], dict):
745 raise EngineException("'etsi-nfv-vnfd:vnfd' must be a dict")
746 clean_indata = clean_indata["etsi-nfv-vnfd:vnfd"]
747 elif clean_indata.get("vnfd"):
748 if not isinstance(clean_indata["vnfd"], dict):
749 raise EngineException("'vnfd' must be dict")
750 clean_indata = clean_indata["vnfd"]
751
752 return clean_indata
753
754 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
755 final_content = super().check_conflict_on_edit(
756 session, final_content, edit_content, _id
757 )
758
759 # set type of vnfd
760 contains_pdu = False
761 contains_vdu = False
762 for vdu in get_iterable(final_content.get("vdu")):
763 if vdu.get("pdu-type"):
764 contains_pdu = True
765 else:
766 contains_vdu = True
767 if contains_pdu:
768 final_content["_admin"]["type"] = "hnfd" if contains_vdu else "pnfd"
769 elif contains_vdu:
770 final_content["_admin"]["type"] = "vnfd"
771 # if neither vud nor pdu do not fill type
772 return final_content
773
774 def check_conflict_on_del(self, session, _id, db_content):
775 """
776 Check that there is not any NSD that uses this VNFD. Only NSDs belonging to this project are considered. Note
777 that VNFD can be public and be used by NSD of other projects. Also check there are not deployments, or vnfr
778 that uses this vnfd
779 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
780 :param _id: vnfd internal id
781 :param db_content: The database content of the _id.
782 :return: None or raises EngineException with the conflict
783 """
784 if session["force"]:
785 return
786 descriptor = db_content
787 descriptor_id = descriptor.get("id")
788 if not descriptor_id: # empty vnfd not uploaded
789 return
790
791 _filter = self._get_project_filter(session)
792
793 # check vnfrs using this vnfd
794 _filter["vnfd-id"] = _id
795 if self.db.get_list("vnfrs", _filter):
796 raise EngineException(
797 "There is at least one VNF instance using this descriptor",
798 http_code=HTTPStatus.CONFLICT,
799 )
800
801 # check NSD referencing this VNFD
802 del _filter["vnfd-id"]
803 _filter["vnfd-id"] = descriptor_id
804 if self.db.get_list("nsds", _filter):
805 raise EngineException(
806 "There is at least one NS package referencing this descriptor",
807 http_code=HTTPStatus.CONFLICT,
808 )
809
810 def _validate_input_new(self, indata, storage_params, force=False):
811 indata.pop("onboardingState", None)
812 indata.pop("operationalState", None)
813 indata.pop("usageState", None)
814 indata.pop("links", None)
815
816 indata = self.pyangbind_validation("vnfds", indata, force)
817 # Cross references validation in the descriptor
818
819 self.validate_mgmt_interface_connection_point(indata)
820
821 for vdu in get_iterable(indata.get("vdu")):
822 self.validate_vdu_internal_connection_points(vdu)
823 self._validate_vdu_cloud_init_in_package(storage_params, vdu, indata)
824 self._validate_vdu_charms_in_package(storage_params, indata)
825
826 self._validate_vnf_charms_in_package(storage_params, indata)
827
828 self.validate_external_connection_points(indata)
829 self.validate_internal_virtual_links(indata)
830 self.validate_monitoring_params(indata)
831 self.validate_scaling_group_descriptor(indata)
832
833 return indata
834
835 @staticmethod
836 def validate_mgmt_interface_connection_point(indata):
837 if not indata.get("vdu"):
838 return
839 if not indata.get("mgmt-cp"):
840 raise EngineException(
841 "'mgmt-cp' is a mandatory field and it is not defined",
842 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
843 )
844
845 for cp in get_iterable(indata.get("ext-cpd")):
846 if cp["id"] == indata["mgmt-cp"]:
847 break
848 else:
849 raise EngineException(
850 "mgmt-cp='{}' must match an existing ext-cpd".format(indata["mgmt-cp"]),
851 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
852 )
853
854 @staticmethod
855 def validate_vdu_internal_connection_points(vdu):
856 int_cpds = set()
857 for cpd in get_iterable(vdu.get("int-cpd")):
858 cpd_id = cpd.get("id")
859 if cpd_id and cpd_id in int_cpds:
860 raise EngineException(
861 "vdu[id='{}']:int-cpd[id='{}'] is already used by other int-cpd".format(
862 vdu["id"], cpd_id
863 ),
864 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
865 )
866 int_cpds.add(cpd_id)
867
868 @staticmethod
869 def validate_external_connection_points(indata):
870 all_vdus_int_cpds = set()
871 for vdu in get_iterable(indata.get("vdu")):
872 for int_cpd in get_iterable(vdu.get("int-cpd")):
873 all_vdus_int_cpds.add((vdu.get("id"), int_cpd.get("id")))
874
875 ext_cpds = set()
876 for cpd in get_iterable(indata.get("ext-cpd")):
877 cpd_id = cpd.get("id")
878 if cpd_id and cpd_id in ext_cpds:
879 raise EngineException(
880 "ext-cpd[id='{}'] is already used by other ext-cpd".format(cpd_id),
881 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
882 )
883 ext_cpds.add(cpd_id)
884
885 int_cpd = cpd.get("int-cpd")
886 if int_cpd:
887 if (int_cpd.get("vdu-id"), int_cpd.get("cpd")) not in all_vdus_int_cpds:
888 raise EngineException(
889 "ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd".format(
890 cpd_id
891 ),
892 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
893 )
894 # TODO: Validate k8s-cluster-net points to a valid k8s-cluster:nets ?
895
896 def _validate_vdu_charms_in_package(self, storage_params, indata):
897 for df in indata["df"]:
898 if (
899 "lcm-operations-configuration" in df
900 and "operate-vnf-op-config" in df["lcm-operations-configuration"]
901 ):
902 configs = df["lcm-operations-configuration"][
903 "operate-vnf-op-config"
904 ].get("day1-2", [])
905 vdus = df.get("vdu-profile", [])
906 for vdu in vdus:
907 for config in configs:
908 if config["id"] == vdu["id"] and utils.find_in_list(
909 config.get("execution-environment-list", []),
910 lambda ee: "juju" in ee,
911 ):
912 if not self._validate_package_folders(
913 storage_params, "charms"
914 ) and not self._validate_package_folders(
915 storage_params, "Scripts/charms"
916 ):
917 raise EngineException(
918 "Charm defined in vnf[id={}] but not present in "
919 "package".format(indata["id"])
920 )
921
922 def _validate_vdu_cloud_init_in_package(self, storage_params, vdu, indata):
923 if not vdu.get("cloud-init-file"):
924 return
925 if not self._validate_package_folders(
926 storage_params, "cloud_init", vdu["cloud-init-file"]
927 ) and not self._validate_package_folders(
928 storage_params, "Scripts/cloud_init", vdu["cloud-init-file"]
929 ):
930 raise EngineException(
931 "Cloud-init defined in vnf[id={}]:vdu[id={}] but not present in "
932 "package".format(indata["id"], vdu["id"])
933 )
934
935 def _validate_vnf_charms_in_package(self, storage_params, indata):
936 # Get VNF configuration through new container
937 for deployment_flavor in indata.get("df", []):
938 if "lcm-operations-configuration" not in deployment_flavor:
939 return
940 if (
941 "operate-vnf-op-config"
942 not in deployment_flavor["lcm-operations-configuration"]
943 ):
944 return
945 for day_1_2_config in deployment_flavor["lcm-operations-configuration"][
946 "operate-vnf-op-config"
947 ]["day1-2"]:
948 if day_1_2_config["id"] == indata["id"]:
949 if utils.find_in_list(
950 day_1_2_config.get("execution-environment-list", []),
951 lambda ee: "juju" in ee,
952 ):
953 if not self._validate_package_folders(
954 storage_params, "charms"
955 ) and not self._validate_package_folders(
956 storage_params, "Scripts/charms"
957 ):
958 raise EngineException(
959 "Charm defined in vnf[id={}] but not present in "
960 "package".format(indata["id"])
961 )
962
963 def _validate_package_folders(self, storage_params, folder, file=None):
964 if not storage_params:
965 return False
966 elif not storage_params.get("pkg-dir"):
967 if self.fs.file_exists("{}_".format(storage_params["folder"]), "dir"):
968 f = "{}_/{}".format(
969 storage_params["folder"], folder
970 )
971 else:
972 f = "{}/{}".format(
973 storage_params["folder"], folder
974 )
975 if file:
976 return self.fs.file_exists("{}/{}".format(f, file), "file")
977 else:
978 if self.fs.file_exists(f, "dir"):
979 if self.fs.dir_ls(f):
980 return True
981 return False
982 else:
983 if self.fs.file_exists("{}_".format(storage_params["folder"]), "dir"):
984 f = "{}_/{}/{}".format(
985 storage_params["folder"], storage_params["pkg-dir"], folder
986 )
987 else:
988 f = "{}/{}/{}".format(
989 storage_params["folder"], storage_params["pkg-dir"], folder
990 )
991 if file:
992 return self.fs.file_exists("{}/{}".format(f, file), "file")
993 else:
994 if self.fs.file_exists(f, "dir"):
995 if self.fs.dir_ls(f):
996 return True
997 return False
998
999 @staticmethod
1000 def validate_internal_virtual_links(indata):
1001 all_ivld_ids = set()
1002 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
1003 ivld_id = ivld.get("id")
1004 if ivld_id and ivld_id in all_ivld_ids:
1005 raise EngineException(
1006 "Duplicated VLD id in int-virtual-link-desc[id={}]".format(ivld_id),
1007 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1008 )
1009 else:
1010 all_ivld_ids.add(ivld_id)
1011
1012 for vdu in get_iterable(indata.get("vdu")):
1013 for int_cpd in get_iterable(vdu.get("int-cpd")):
1014 int_cpd_ivld_id = int_cpd.get("int-virtual-link-desc")
1015 if int_cpd_ivld_id and int_cpd_ivld_id not in all_ivld_ids:
1016 raise EngineException(
1017 "vdu[id='{}']:int-cpd[id='{}']:int-virtual-link-desc='{}' must match an existing "
1018 "int-virtual-link-desc".format(
1019 vdu["id"], int_cpd["id"], int_cpd_ivld_id
1020 ),
1021 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1022 )
1023
1024 for df in get_iterable(indata.get("df")):
1025 for vlp in get_iterable(df.get("virtual-link-profile")):
1026 vlp_ivld_id = vlp.get("id")
1027 if vlp_ivld_id and vlp_ivld_id not in all_ivld_ids:
1028 raise EngineException(
1029 "df[id='{}']:virtual-link-profile='{}' must match an existing "
1030 "int-virtual-link-desc".format(df["id"], vlp_ivld_id),
1031 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1032 )
1033
1034 @staticmethod
1035 def validate_monitoring_params(indata):
1036 all_monitoring_params = set()
1037 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
1038 for mp in get_iterable(ivld.get("monitoring-parameters")):
1039 mp_id = mp.get("id")
1040 if mp_id and mp_id in all_monitoring_params:
1041 raise EngineException(
1042 "Duplicated monitoring-parameter id in "
1043 "int-virtual-link-desc[id='{}']:monitoring-parameters[id='{}']".format(
1044 ivld["id"], mp_id
1045 ),
1046 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1047 )
1048 else:
1049 all_monitoring_params.add(mp_id)
1050
1051 for vdu in get_iterable(indata.get("vdu")):
1052 for mp in get_iterable(vdu.get("monitoring-parameter")):
1053 mp_id = mp.get("id")
1054 if mp_id and mp_id in all_monitoring_params:
1055 raise EngineException(
1056 "Duplicated monitoring-parameter id in "
1057 "vdu[id='{}']:monitoring-parameter[id='{}']".format(
1058 vdu["id"], mp_id
1059 ),
1060 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1061 )
1062 else:
1063 all_monitoring_params.add(mp_id)
1064
1065 for df in get_iterable(indata.get("df")):
1066 for mp in get_iterable(df.get("monitoring-parameter")):
1067 mp_id = mp.get("id")
1068 if mp_id and mp_id in all_monitoring_params:
1069 raise EngineException(
1070 "Duplicated monitoring-parameter id in "
1071 "df[id='{}']:monitoring-parameter[id='{}']".format(
1072 df["id"], mp_id
1073 ),
1074 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1075 )
1076 else:
1077 all_monitoring_params.add(mp_id)
1078
1079 @staticmethod
1080 def validate_scaling_group_descriptor(indata):
1081 all_monitoring_params = set()
1082 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
1083 for mp in get_iterable(ivld.get("monitoring-parameters")):
1084 all_monitoring_params.add(mp.get("id"))
1085
1086 for vdu in get_iterable(indata.get("vdu")):
1087 for mp in get_iterable(vdu.get("monitoring-parameter")):
1088 all_monitoring_params.add(mp.get("id"))
1089
1090 for df in get_iterable(indata.get("df")):
1091 for mp in get_iterable(df.get("monitoring-parameter")):
1092 all_monitoring_params.add(mp.get("id"))
1093
1094 for df in get_iterable(indata.get("df")):
1095 for sa in get_iterable(df.get("scaling-aspect")):
1096 for sp in get_iterable(sa.get("scaling-policy")):
1097 for sc in get_iterable(sp.get("scaling-criteria")):
1098 sc_monitoring_param = sc.get("vnf-monitoring-param-ref")
1099 if (
1100 sc_monitoring_param
1101 and sc_monitoring_param not in all_monitoring_params
1102 ):
1103 raise EngineException(
1104 "df[id='{}']:scaling-aspect[id='{}']:scaling-policy"
1105 "[name='{}']:scaling-criteria[name='{}']: "
1106 "vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format(
1107 df["id"],
1108 sa["id"],
1109 sp["name"],
1110 sc["name"],
1111 sc_monitoring_param,
1112 ),
1113 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1114 )
1115
1116 for sca in get_iterable(sa.get("scaling-config-action")):
1117 if (
1118 "lcm-operations-configuration" not in df
1119 or "operate-vnf-op-config"
1120 not in df["lcm-operations-configuration"]
1121 or not utils.find_in_list(
1122 df["lcm-operations-configuration"][
1123 "operate-vnf-op-config"
1124 ].get("day1-2", []),
1125 lambda config: config["id"] == indata["id"],
1126 )
1127 ):
1128 raise EngineException(
1129 "'day1-2 configuration' not defined in the descriptor but it is "
1130 "referenced by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action".format(
1131 df["id"], sa["id"]
1132 ),
1133 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1134 )
1135 for configuration in get_iterable(
1136 df["lcm-operations-configuration"]["operate-vnf-op-config"].get(
1137 "day1-2", []
1138 )
1139 ):
1140 for primitive in get_iterable(
1141 configuration.get("config-primitive")
1142 ):
1143 if (
1144 primitive["name"]
1145 == sca["vnf-config-primitive-name-ref"]
1146 ):
1147 break
1148 else:
1149 raise EngineException(
1150 "df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-"
1151 "config-primitive-name-ref='{}' does not match any "
1152 "day1-2 configuration:config-primitive:name".format(
1153 df["id"],
1154 sa["id"],
1155 sca["vnf-config-primitive-name-ref"],
1156 ),
1157 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1158 )
1159
1160 def delete_extra(self, session, _id, db_content, not_send_msg=None):
1161 """
1162 Deletes associate file system storage (via super)
1163 Deletes associated vnfpkgops from database.
1164 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1165 :param _id: server internal id
1166 :param db_content: The database content of the descriptor
1167 :return: None
1168 :raises: FsException in case of error while deleting associated storage
1169 """
1170 super().delete_extra(session, _id, db_content, not_send_msg)
1171 self.db.del_list("vnfpkgops", {"vnfPkgId": _id})
1172 self.db.del_list(self.topic+"_revisions", {"_id": {"$regex": _id}})
1173
1174 def sol005_projection(self, data):
1175 data["onboardingState"] = data["_admin"]["onboardingState"]
1176 data["operationalState"] = data["_admin"]["operationalState"]
1177 data["usageState"] = data["_admin"]["usageState"]
1178
1179 links = {}
1180 links["self"] = {"href": "/vnfpkgm/v1/vnf_packages/{}".format(data["_id"])}
1181 links["vnfd"] = {"href": "/vnfpkgm/v1/vnf_packages/{}/vnfd".format(data["_id"])}
1182 links["packageContent"] = {
1183 "href": "/vnfpkgm/v1/vnf_packages/{}/package_content".format(data["_id"])
1184 }
1185 data["_links"] = links
1186
1187 return super().sol005_projection(data)
1188
1189 @staticmethod
1190 def find_software_version(vnfd: dict) -> str:
1191 """Find the sotware version in the VNFD descriptors
1192
1193 Args:
1194 vnfd (dict): Descriptor as a dictionary
1195
1196 Returns:
1197 software-version (str)
1198 """
1199 default_sw_version = "1.0"
1200 if vnfd.get("vnfd"):
1201 vnfd = vnfd["vnfd"]
1202 if vnfd.get("software-version"):
1203 return vnfd["software-version"]
1204 else:
1205 return default_sw_version
1206
1207 @staticmethod
1208 def extract_policies(vnfd: dict) -> dict:
1209 """Removes the policies from the VNFD descriptors
1210
1211 Args:
1212 vnfd (dict): Descriptor as a dictionary
1213
1214 Returns:
1215 vnfd (dict): VNFD which does not include policies
1216 """
1217 # TODO: Extract the policy related parts from the VNFD
1218 return vnfd
1219
1220 @staticmethod
1221 def extract_day12_primitives(vnfd: dict) -> dict:
1222 """Removes the day12 primitives from the VNFD descriptors
1223
1224 Args:
1225 vnfd (dict): Descriptor as a dictionary
1226
1227 Returns:
1228 vnfd (dict)
1229 """
1230 for df_id, df in enumerate(vnfd.get("df", {})):
1231 if (
1232 df.get("lcm-operations-configuration", {})
1233 .get("operate-vnf-op-config", {})
1234 .get("day1-2")
1235 ):
1236 day12 = df["lcm-operations-configuration"]["operate-vnf-op-config"].get(
1237 "day1-2"
1238 )
1239 for config_id, config in enumerate(day12):
1240 for key in [
1241 "initial-config-primitive",
1242 "config-primitive",
1243 "terminate-config-primitive",
1244 ]:
1245 config.pop(key, None)
1246 day12[config_id] = config
1247 df["lcm-operations-configuration"]["operate-vnf-op-config"][
1248 "day1-2"
1249 ] = day12
1250 vnfd["df"][df_id] = df
1251 return vnfd
1252
1253 def remove_modifiable_items(self, vnfd: dict) -> dict:
1254 """Removes the modifiable parts from the VNFD descriptors
1255
1256 It calls different extract functions according to different update types
1257 to clear all the modifiable items from VNFD
1258
1259 Args:
1260 vnfd (dict): Descriptor as a dictionary
1261
1262 Returns:
1263 vnfd (dict): Descriptor which does not include modifiable contents
1264 """
1265 if vnfd.get("vnfd"):
1266 vnfd = vnfd["vnfd"]
1267 vnfd.pop("_admin", None)
1268 # If the other extractions need to be done from VNFD,
1269 # the new extract methods could be appended to below list.
1270 for extract_function in [self.extract_day12_primitives, self.extract_policies]:
1271 vnfd_temp = extract_function(vnfd)
1272 vnfd = vnfd_temp
1273 return vnfd
1274
1275 def _validate_descriptor_changes(
1276 self,
1277 descriptor_file_name: str,
1278 old_descriptor_directory: str,
1279 new_descriptor_directory: str,
1280 ):
1281 """Compares the old and new VNFD descriptors and validates the new descriptor.
1282
1283 Args:
1284 old_descriptor_directory (str): Directory of descriptor which is in-use
1285 new_descriptor_directory (str): Directory of directory which is proposed to update (new revision)
1286
1287 Returns:
1288 None
1289
1290 Raises:
1291 EngineException: In case of error when there are unallowed changes
1292 """
1293 try:
1294 with self.fs.file_open(
1295 (old_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
1296 ) as old_descriptor_file:
1297 with self.fs.file_open(
1298 (new_descriptor_directory, descriptor_file_name), "r"
1299 ) as new_descriptor_file:
1300 old_content = yaml.load(
1301 old_descriptor_file.read(), Loader=yaml.SafeLoader
1302 )
1303 new_content = yaml.load(
1304 new_descriptor_file.read(), Loader=yaml.SafeLoader
1305 )
1306 if old_content and new_content:
1307 if self.find_software_version(
1308 old_content
1309 ) != self.find_software_version(new_content):
1310 return
1311 disallowed_change = DeepDiff(
1312 self.remove_modifiable_items(old_content),
1313 self.remove_modifiable_items(new_content),
1314 )
1315 if disallowed_change:
1316 changed_nodes = functools.reduce(
1317 lambda a, b: a + " , " + b,
1318 [
1319 node.lstrip("root")
1320 for node in disallowed_change.get(
1321 "values_changed"
1322 ).keys()
1323 ],
1324 )
1325 raise EngineException(
1326 f"Error in validating new descriptor: {changed_nodes} cannot be modified, "
1327 "there are disallowed changes in the vnf descriptor.",
1328 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1329 )
1330 except (
1331 DbException,
1332 AttributeError,
1333 IndexError,
1334 KeyError,
1335 ValueError,
1336 ) as e:
1337 raise type(e)(
1338 "VNF Descriptor could not be processed with error: {}.".format(e)
1339 )
1340
1341
1342 class NsdTopic(DescriptorTopic):
1343 topic = "nsds"
1344 topic_msg = "nsd"
1345
1346 def __init__(self, db, fs, msg, auth):
1347 DescriptorTopic.__init__(self, db, fs, msg, auth)
1348
1349 def pyangbind_validation(self, item, data, force=False):
1350 if self._descriptor_data_is_in_old_format(data):
1351 raise EngineException(
1352 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
1353 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1354 )
1355 try:
1356 nsd_vnf_profiles = data.get("df", [{}])[0].get("vnf-profile", [])
1357 mynsd = etsi_nfv_nsd.etsi_nfv_nsd()
1358 pybindJSONDecoder.load_ietf_json(
1359 {"nsd": {"nsd": [data]}},
1360 None,
1361 None,
1362 obj=mynsd,
1363 path_helper=True,
1364 skip_unknown=force,
1365 )
1366 out = pybindJSON.dumps(mynsd, mode="ietf")
1367 desc_out = self._remove_envelop(yaml.safe_load(out))
1368 desc_out = self._remove_yang_prefixes_from_descriptor(desc_out)
1369 if nsd_vnf_profiles:
1370 desc_out["df"][0]["vnf-profile"] = nsd_vnf_profiles
1371 return desc_out
1372 except Exception as e:
1373 raise EngineException(
1374 "Error in pyangbind validation: {}".format(str(e)),
1375 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1376 )
1377
1378 @staticmethod
1379 def _descriptor_data_is_in_old_format(data):
1380 return ("nsd-catalog" in data) or ("nsd:nsd-catalog" in data)
1381
1382 @staticmethod
1383 def _remove_envelop(indata=None):
1384 if not indata:
1385 return {}
1386 clean_indata = indata
1387
1388 if clean_indata.get("nsd"):
1389 clean_indata = clean_indata["nsd"]
1390 elif clean_indata.get("etsi-nfv-nsd:nsd"):
1391 clean_indata = clean_indata["etsi-nfv-nsd:nsd"]
1392 if clean_indata.get("nsd"):
1393 if (
1394 not isinstance(clean_indata["nsd"], list)
1395 or len(clean_indata["nsd"]) != 1
1396 ):
1397 raise EngineException("'nsd' must be a list of only one element")
1398 clean_indata = clean_indata["nsd"][0]
1399 return clean_indata
1400
1401 def _validate_input_new(self, indata, storage_params, force=False):
1402 indata.pop("nsdOnboardingState", None)
1403 indata.pop("nsdOperationalState", None)
1404 indata.pop("nsdUsageState", None)
1405
1406 indata.pop("links", None)
1407
1408 indata = self.pyangbind_validation("nsds", indata, force)
1409 # Cross references validation in the descriptor
1410 # TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none
1411 for vld in get_iterable(indata.get("virtual-link-desc")):
1412 self.validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata)
1413
1414 self.validate_vnf_profiles_vnfd_id(indata)
1415
1416 return indata
1417
1418 @staticmethod
1419 def validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata):
1420 if not vld.get("mgmt-network"):
1421 return
1422 vld_id = vld.get("id")
1423 for df in get_iterable(indata.get("df")):
1424 for vlp in get_iterable(df.get("virtual-link-profile")):
1425 if vld_id and vld_id == vlp.get("virtual-link-desc-id"):
1426 if vlp.get("virtual-link-protocol-data"):
1427 raise EngineException(
1428 "Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-"
1429 "protocol-data You cannot set a virtual-link-protocol-data "
1430 "when mgmt-network is True".format(df["id"], vlp["id"]),
1431 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1432 )
1433
1434 @staticmethod
1435 def validate_vnf_profiles_vnfd_id(indata):
1436 all_vnfd_ids = set(get_iterable(indata.get("vnfd-id")))
1437 for df in get_iterable(indata.get("df")):
1438 for vnf_profile in get_iterable(df.get("vnf-profile")):
1439 vnfd_id = vnf_profile.get("vnfd-id")
1440 if vnfd_id and vnfd_id not in all_vnfd_ids:
1441 raise EngineException(
1442 "Error at df[id='{}']:vnf_profile[id='{}']:vnfd-id='{}' "
1443 "does not match any vnfd-id".format(
1444 df["id"], vnf_profile["id"], vnfd_id
1445 ),
1446 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1447 )
1448
1449 def _validate_input_edit(self, indata, content, force=False):
1450 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
1451 """
1452 indata looks as follows:
1453 - In the new case (conformant)
1454 {'nsdOperationalState': 'DISABLED', 'userDefinedData': {'id': 'string23',
1455 '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}}
1456 - In the old case (backwards-compatible)
1457 {'id': 'string23', '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}
1458 """
1459 if "_admin" not in indata:
1460 indata["_admin"] = {}
1461
1462 if "nsdOperationalState" in indata:
1463 if indata["nsdOperationalState"] in ("ENABLED", "DISABLED"):
1464 indata["_admin"]["operationalState"] = indata.pop("nsdOperationalState")
1465 else:
1466 raise EngineException(
1467 "State '{}' is not a valid operational state".format(
1468 indata["nsdOperationalState"]
1469 ),
1470 http_code=HTTPStatus.BAD_REQUEST,
1471 )
1472
1473 # In the case of user defined data, we need to put the data in the root of the object
1474 # to preserve current expected behaviour
1475 if "userDefinedData" in indata:
1476 data = indata.pop("userDefinedData")
1477 if type(data) == dict:
1478 indata["_admin"]["userDefinedData"] = data
1479 else:
1480 raise EngineException(
1481 "userDefinedData should be an object, but is '{}' instead".format(
1482 type(data)
1483 ),
1484 http_code=HTTPStatus.BAD_REQUEST,
1485 )
1486 if (
1487 "operationalState" in indata["_admin"]
1488 and content["_admin"]["operationalState"]
1489 == indata["_admin"]["operationalState"]
1490 ):
1491 raise EngineException(
1492 "nsdOperationalState already {}".format(
1493 content["_admin"]["operationalState"]
1494 ),
1495 http_code=HTTPStatus.CONFLICT,
1496 )
1497 return indata
1498
1499 def _check_descriptor_dependencies(self, session, descriptor):
1500 """
1501 Check that the dependent descriptors exist on a new descriptor or edition. Also checks references to vnfd
1502 connection points are ok
1503 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1504 :param descriptor: descriptor to be inserted or edit
1505 :return: None or raises exception
1506 """
1507 if session["force"]:
1508 return
1509 vnfds_index = self._get_descriptor_constituent_vnfds_index(session, descriptor)
1510
1511 # Cross references validation in the descriptor and vnfd connection point validation
1512 for df in get_iterable(descriptor.get("df")):
1513 self.validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index)
1514
1515 def _get_descriptor_constituent_vnfds_index(self, session, descriptor):
1516 vnfds_index = {}
1517 if descriptor.get("vnfd-id") and not session["force"]:
1518 for vnfd_id in get_iterable(descriptor.get("vnfd-id")):
1519 query_filter = self._get_project_filter(session)
1520 query_filter["id"] = vnfd_id
1521 vnf_list = self.db.get_list("vnfds", query_filter)
1522 if not vnf_list:
1523 raise EngineException(
1524 "Descriptor error at 'vnfd-id'='{}' references a non "
1525 "existing vnfd".format(vnfd_id),
1526 http_code=HTTPStatus.CONFLICT,
1527 )
1528 vnfds_index[vnfd_id] = vnf_list[0]
1529 return vnfds_index
1530
1531 @staticmethod
1532 def validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index):
1533 for vnf_profile in get_iterable(df.get("vnf-profile")):
1534 vnfd = vnfds_index.get(vnf_profile["vnfd-id"])
1535 all_vnfd_ext_cpds = set()
1536 for ext_cpd in get_iterable(vnfd.get("ext-cpd")):
1537 if ext_cpd.get("id"):
1538 all_vnfd_ext_cpds.add(ext_cpd.get("id"))
1539
1540 for virtual_link in get_iterable(
1541 vnf_profile.get("virtual-link-connectivity")
1542 ):
1543 for vl_cpd in get_iterable(virtual_link.get("constituent-cpd-id")):
1544 vl_cpd_id = vl_cpd.get("constituent-cpd-id")
1545 if vl_cpd_id and vl_cpd_id not in all_vnfd_ext_cpds:
1546 raise EngineException(
1547 "Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity"
1548 "[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a "
1549 "non existing ext-cpd:id inside vnfd '{}'".format(
1550 df["id"],
1551 vnf_profile["id"],
1552 virtual_link["virtual-link-profile-id"],
1553 vl_cpd_id,
1554 vnfd["id"],
1555 ),
1556 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1557 )
1558
1559 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
1560 final_content = super().check_conflict_on_edit(
1561 session, final_content, edit_content, _id
1562 )
1563
1564 self._check_descriptor_dependencies(session, final_content)
1565
1566 return final_content
1567
1568 def check_conflict_on_del(self, session, _id, db_content):
1569 """
1570 Check that there is not any NSR that uses this NSD. Only NSRs belonging to this project are considered. Note
1571 that NSD can be public and be used by other projects.
1572 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1573 :param _id: nsd internal id
1574 :param db_content: The database content of the _id
1575 :return: None or raises EngineException with the conflict
1576 """
1577 if session["force"]:
1578 return
1579 descriptor = db_content
1580 descriptor_id = descriptor.get("id")
1581 if not descriptor_id: # empty nsd not uploaded
1582 return
1583
1584 # check NSD used by NS
1585 _filter = self._get_project_filter(session)
1586 _filter["nsd-id"] = _id
1587 if self.db.get_list("nsrs", _filter):
1588 raise EngineException(
1589 "There is at least one NS instance using this descriptor",
1590 http_code=HTTPStatus.CONFLICT,
1591 )
1592
1593 # check NSD referenced by NST
1594 del _filter["nsd-id"]
1595 _filter["netslice-subnet.ANYINDEX.nsd-ref"] = descriptor_id
1596 if self.db.get_list("nsts", _filter):
1597 raise EngineException(
1598 "There is at least one NetSlice Template referencing this descriptor",
1599 http_code=HTTPStatus.CONFLICT,
1600 )
1601
1602 def delete_extra(self, session, _id, db_content, not_send_msg=None):
1603 """
1604 Deletes associate file system storage (via super)
1605 Deletes associated vnfpkgops from database.
1606 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1607 :param _id: server internal id
1608 :param db_content: The database content of the descriptor
1609 :return: None
1610 :raises: FsException in case of error while deleting associated storage
1611 """
1612 super().delete_extra(session, _id, db_content, not_send_msg)
1613 self.db.del_list(self.topic+"_revisions", { "_id": { "$regex": _id}})
1614
1615 @staticmethod
1616 def extract_day12_primitives(nsd: dict) -> dict:
1617 """Removes the day12 primitives from the NSD descriptors
1618
1619 Args:
1620 nsd (dict): Descriptor as a dictionary
1621
1622 Returns:
1623 nsd (dict): Cleared NSD
1624 """
1625 if nsd.get("ns-configuration"):
1626 for key in [
1627 "config-primitive",
1628 "initial-config-primitive",
1629 "terminate-config-primitive",
1630 ]:
1631 nsd["ns-configuration"].pop(key, None)
1632 return nsd
1633
1634 def remove_modifiable_items(self, nsd: dict) -> dict:
1635 """Removes the modifiable parts from the VNFD descriptors
1636
1637 It calls different extract functions according to different update types
1638 to clear all the modifiable items from NSD
1639
1640 Args:
1641 nsd (dict): Descriptor as a dictionary
1642
1643 Returns:
1644 nsd (dict): Descriptor which does not include modifiable contents
1645 """
1646 while isinstance(nsd, dict) and nsd.get("nsd"):
1647 nsd = nsd["nsd"]
1648 if isinstance(nsd, list):
1649 nsd = nsd[0]
1650 nsd.pop("_admin", None)
1651 # If the more extractions need to be done from NSD,
1652 # the new extract methods could be appended to below list.
1653 for extract_function in [self.extract_day12_primitives]:
1654 nsd_temp = extract_function(nsd)
1655 nsd = nsd_temp
1656 return nsd
1657
1658 def _validate_descriptor_changes(
1659 self,
1660 descriptor_file_name: str,
1661 old_descriptor_directory: str,
1662 new_descriptor_directory: str,
1663 ):
1664 """Compares the old and new NSD descriptors and validates the new descriptor
1665
1666 Args:
1667 old_descriptor_directory: Directory of descriptor which is in-use
1668 new_descriptor_directory: Directory of directory which is proposed to update (new revision)
1669
1670 Returns:
1671 None
1672
1673 Raises:
1674 EngineException: In case of error if the changes are not allowed
1675 """
1676
1677 try:
1678 with self.fs.file_open(
1679 (old_descriptor_directory, descriptor_file_name), "r"
1680 ) as old_descriptor_file:
1681 with self.fs.file_open(
1682 (new_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
1683 ) as new_descriptor_file:
1684 old_content = yaml.load(
1685 old_descriptor_file.read(), Loader=yaml.SafeLoader
1686 )
1687 new_content = yaml.load(
1688 new_descriptor_file.read(), Loader=yaml.SafeLoader
1689 )
1690 if old_content and new_content:
1691 disallowed_change = DeepDiff(
1692 self.remove_modifiable_items(old_content),
1693 self.remove_modifiable_items(new_content),
1694 )
1695 if disallowed_change:
1696 changed_nodes = functools.reduce(
1697 lambda a, b: a + ", " + b,
1698 [
1699 node.lstrip("root")
1700 for node in disallowed_change.get(
1701 "values_changed"
1702 ).keys()
1703 ],
1704 )
1705 raise EngineException(
1706 f"Error in validating new descriptor: {changed_nodes} cannot be modified, "
1707 "there are disallowed changes in the ns descriptor. ",
1708 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1709 )
1710 except (
1711 DbException,
1712 AttributeError,
1713 IndexError,
1714 KeyError,
1715 ValueError,
1716 ) as e:
1717 raise type(e)(
1718 "NS Descriptor could not be processed with error: {}.".format(e)
1719 )
1720
1721 def sol005_projection(self, data):
1722 data["nsdOnboardingState"] = data["_admin"]["onboardingState"]
1723 data["nsdOperationalState"] = data["_admin"]["operationalState"]
1724 data["nsdUsageState"] = data["_admin"]["usageState"]
1725
1726 links = {}
1727 links["self"] = {"href": "/nsd/v1/ns_descriptors/{}".format(data["_id"])}
1728 links["nsd_content"] = {
1729 "href": "/nsd/v1/ns_descriptors/{}/nsd_content".format(data["_id"])
1730 }
1731 data["_links"] = links
1732
1733 return super().sol005_projection(data)
1734
1735
1736 class NstTopic(DescriptorTopic):
1737 topic = "nsts"
1738 topic_msg = "nst"
1739 quota_name = "slice_templates"
1740
1741 def __init__(self, db, fs, msg, auth):
1742 DescriptorTopic.__init__(self, db, fs, msg, auth)
1743
1744 def pyangbind_validation(self, item, data, force=False):
1745 try:
1746 mynst = nst_im()
1747 pybindJSONDecoder.load_ietf_json(
1748 {"nst": [data]},
1749 None,
1750 None,
1751 obj=mynst,
1752 path_helper=True,
1753 skip_unknown=force,
1754 )
1755 out = pybindJSON.dumps(mynst, mode="ietf")
1756 desc_out = self._remove_envelop(yaml.safe_load(out))
1757 return desc_out
1758 except Exception as e:
1759 raise EngineException(
1760 "Error in pyangbind validation: {}".format(str(e)),
1761 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1762 )
1763
1764 @staticmethod
1765 def _remove_envelop(indata=None):
1766 if not indata:
1767 return {}
1768 clean_indata = indata
1769
1770 if clean_indata.get("nst"):
1771 if (
1772 not isinstance(clean_indata["nst"], list)
1773 or len(clean_indata["nst"]) != 1
1774 ):
1775 raise EngineException("'nst' must be a list only one element")
1776 clean_indata = clean_indata["nst"][0]
1777 elif clean_indata.get("nst:nst"):
1778 if (
1779 not isinstance(clean_indata["nst:nst"], list)
1780 or len(clean_indata["nst:nst"]) != 1
1781 ):
1782 raise EngineException("'nst:nst' must be a list only one element")
1783 clean_indata = clean_indata["nst:nst"][0]
1784 return clean_indata
1785
1786 def _validate_input_new(self, indata, storage_params, force=False):
1787 indata.pop("onboardingState", None)
1788 indata.pop("operationalState", None)
1789 indata.pop("usageState", None)
1790 indata = self.pyangbind_validation("nsts", indata, force)
1791 return indata.copy()
1792
1793 def _check_descriptor_dependencies(self, session, descriptor):
1794 """
1795 Check that the dependent descriptors exist on a new descriptor or edition
1796 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1797 :param descriptor: descriptor to be inserted or edit
1798 :return: None or raises exception
1799 """
1800 if not descriptor.get("netslice-subnet"):
1801 return
1802 for nsd in descriptor["netslice-subnet"]:
1803 nsd_id = nsd["nsd-ref"]
1804 filter_q = self._get_project_filter(session)
1805 filter_q["id"] = nsd_id
1806 if not self.db.get_list("nsds", filter_q):
1807 raise EngineException(
1808 "Descriptor error at 'netslice-subnet':'nsd-ref'='{}' references a non "
1809 "existing nsd".format(nsd_id),
1810 http_code=HTTPStatus.CONFLICT,
1811 )
1812
1813 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
1814 final_content = super().check_conflict_on_edit(
1815 session, final_content, edit_content, _id
1816 )
1817
1818 self._check_descriptor_dependencies(session, final_content)
1819 return final_content
1820
1821 def check_conflict_on_del(self, session, _id, db_content):
1822 """
1823 Check that there is not any NSIR that uses this NST. Only NSIRs belonging to this project are considered. Note
1824 that NST can be public and be used by other projects.
1825 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1826 :param _id: nst internal id
1827 :param db_content: The database content of the _id.
1828 :return: None or raises EngineException with the conflict
1829 """
1830 # TODO: Check this method
1831 if session["force"]:
1832 return
1833 # Get Network Slice Template from Database
1834 _filter = self._get_project_filter(session)
1835 _filter["_admin.nst-id"] = _id
1836 if self.db.get_list("nsis", _filter):
1837 raise EngineException(
1838 "there is at least one Netslice Instance using this descriptor",
1839 http_code=HTTPStatus.CONFLICT,
1840 )
1841
1842 def sol005_projection(self, data):
1843 data["onboardingState"] = data["_admin"]["onboardingState"]
1844 data["operationalState"] = data["_admin"]["operationalState"]
1845 data["usageState"] = data["_admin"]["usageState"]
1846
1847 links = {}
1848 links["self"] = {"href": "/nst/v1/netslice_templates/{}".format(data["_id"])}
1849 links["nst"] = {"href": "/nst/v1/netslice_templates/{}/nst".format(data["_id"])}
1850 data["_links"] = links
1851
1852 return super().sol005_projection(data)
1853
1854
1855 class PduTopic(BaseTopic):
1856 topic = "pdus"
1857 topic_msg = "pdu"
1858 quota_name = "pduds"
1859 schema_new = pdu_new_schema
1860 schema_edit = pdu_edit_schema
1861
1862 def __init__(self, db, fs, msg, auth):
1863 BaseTopic.__init__(self, db, fs, msg, auth)
1864
1865 @staticmethod
1866 def format_on_new(content, project_id=None, make_public=False):
1867 BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
1868 content["_admin"]["onboardingState"] = "CREATED"
1869 content["_admin"]["operationalState"] = "ENABLED"
1870 content["_admin"]["usageState"] = "NOT_IN_USE"
1871
1872 def check_conflict_on_del(self, session, _id, db_content):
1873 """
1874 Check that there is not any vnfr that uses this PDU
1875 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1876 :param _id: pdu internal id
1877 :param db_content: The database content of the _id.
1878 :return: None or raises EngineException with the conflict
1879 """
1880 if session["force"]:
1881 return
1882
1883 _filter = self._get_project_filter(session)
1884 _filter["vdur.pdu-id"] = _id
1885 if self.db.get_list("vnfrs", _filter):
1886 raise EngineException(
1887 "There is at least one VNF instance using this PDU",
1888 http_code=HTTPStatus.CONFLICT,
1889 )
1890
1891
1892 class VnfPkgOpTopic(BaseTopic):
1893 topic = "vnfpkgops"
1894 topic_msg = "vnfd"
1895 schema_new = vnfpkgop_new_schema
1896 schema_edit = None
1897
1898 def __init__(self, db, fs, msg, auth):
1899 BaseTopic.__init__(self, db, fs, msg, auth)
1900
1901 def edit(self, session, _id, indata=None, kwargs=None, content=None):
1902 raise EngineException(
1903 "Method 'edit' not allowed for topic '{}'".format(self.topic),
1904 HTTPStatus.METHOD_NOT_ALLOWED,
1905 )
1906
1907 def delete(self, session, _id, dry_run=False):
1908 raise EngineException(
1909 "Method 'delete' not allowed for topic '{}'".format(self.topic),
1910 HTTPStatus.METHOD_NOT_ALLOWED,
1911 )
1912
1913 def delete_list(self, session, filter_q=None):
1914 raise EngineException(
1915 "Method 'delete_list' not allowed for topic '{}'".format(self.topic),
1916 HTTPStatus.METHOD_NOT_ALLOWED,
1917 )
1918
1919 def new(self, rollback, session, indata=None, kwargs=None, headers=None):
1920 """
1921 Creates a new entry into database.
1922 :param rollback: list to append created items at database in case a rollback may to be done
1923 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1924 :param indata: data to be inserted
1925 :param kwargs: used to override the indata descriptor
1926 :param headers: http request headers
1927 :return: _id, op_id:
1928 _id: identity of the inserted data.
1929 op_id: None
1930 """
1931 self._update_input_with_kwargs(indata, kwargs)
1932 validate_input(indata, self.schema_new)
1933 vnfpkg_id = indata["vnfPkgId"]
1934 filter_q = BaseTopic._get_project_filter(session)
1935 filter_q["_id"] = vnfpkg_id
1936 vnfd = self.db.get_one("vnfds", filter_q)
1937 operation = indata["lcmOperationType"]
1938 kdu_name = indata["kdu_name"]
1939 for kdu in vnfd.get("kdu", []):
1940 if kdu["name"] == kdu_name:
1941 helm_chart = kdu.get("helm-chart")
1942 juju_bundle = kdu.get("juju-bundle")
1943 break
1944 else:
1945 raise EngineException(
1946 "Not found vnfd[id='{}']:kdu[name='{}']".format(vnfpkg_id, kdu_name)
1947 )
1948 if helm_chart:
1949 indata["helm-chart"] = helm_chart
1950 match = fullmatch(r"([^/]*)/([^/]*)", helm_chart)
1951 repo_name = match.group(1) if match else None
1952 elif juju_bundle:
1953 indata["juju-bundle"] = juju_bundle
1954 match = fullmatch(r"([^/]*)/([^/]*)", juju_bundle)
1955 repo_name = match.group(1) if match else None
1956 else:
1957 raise EngineException(
1958 "Found neither 'helm-chart' nor 'juju-bundle' in vnfd[id='{}']:kdu[name='{}']".format(
1959 vnfpkg_id, kdu_name
1960 )
1961 )
1962 if repo_name:
1963 del filter_q["_id"]
1964 filter_q["name"] = repo_name
1965 repo = self.db.get_one("k8srepos", filter_q)
1966 k8srepo_id = repo.get("_id")
1967 k8srepo_url = repo.get("url")
1968 else:
1969 k8srepo_id = None
1970 k8srepo_url = None
1971 indata["k8srepoId"] = k8srepo_id
1972 indata["k8srepo_url"] = k8srepo_url
1973 vnfpkgop_id = str(uuid4())
1974 vnfpkgop_desc = {
1975 "_id": vnfpkgop_id,
1976 "operationState": "PROCESSING",
1977 "vnfPkgId": vnfpkg_id,
1978 "lcmOperationType": operation,
1979 "isAutomaticInvocation": False,
1980 "isCancelPending": False,
1981 "operationParams": indata,
1982 "links": {
1983 "self": "/osm/vnfpkgm/v1/vnfpkg_op_occs/" + vnfpkgop_id,
1984 "vnfpkg": "/osm/vnfpkgm/v1/vnf_packages/" + vnfpkg_id,
1985 },
1986 }
1987 self.format_on_new(
1988 vnfpkgop_desc, session["project_id"], make_public=session["public"]
1989 )
1990 ctime = vnfpkgop_desc["_admin"]["created"]
1991 vnfpkgop_desc["statusEnteredTime"] = ctime
1992 vnfpkgop_desc["startTime"] = ctime
1993 self.db.create(self.topic, vnfpkgop_desc)
1994 rollback.append({"topic": self.topic, "_id": vnfpkgop_id})
1995 self.msg.write(self.topic_msg, operation, vnfpkgop_desc)
1996 return vnfpkgop_id, None