1 # -*- coding: utf-8 -*-
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
7 # http://www.apache.org/licenses/LICENSE-2.0
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
26 from deepdiff
import DeepDiff
27 from hashlib
import md5
28 from osm_common
.dbbase
import DbException
, deep_update_rfc7396
29 from http
import HTTPStatus
31 from uuid
import uuid4
32 from re
import fullmatch
33 from zipfile
import ZipFile
34 from osm_nbi
.validation
import (
41 from osm_nbi
.base_topic
import (
45 detect_descriptor_usage
,
47 from osm_im
import etsi_nfv_vnfd
, etsi_nfv_nsd
48 from osm_im
.nst
import nst
as nst_im
49 from pyangbind
.lib
.serialise
import pybindJSONDecoder
50 import pyangbind
.lib
.pybindJSON
as pybindJSON
51 from osm_nbi
import utils
53 __author__
= "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
55 valid_helm_chart_re
= re
.compile(
56 r
"^[a-z0-9]([-a-z0-9]*[a-z0-9]/)?([a-z0-9]([-a-z0-9]*[a-z0-9])?)*$"
60 class DescriptorTopic(BaseTopic
):
61 def __init__(self
, db
, fs
, msg
, auth
):
62 super().__init
__(db
, fs
, msg
, auth
)
64 def _validate_input_new(self
, indata
, storage_params
, force
=False):
67 def check_conflict_on_edit(self
, session
, final_content
, edit_content
, _id
):
68 final_content
= super().check_conflict_on_edit(
69 session
, final_content
, edit_content
, _id
72 def _check_unique_id_name(descriptor
, position
=""):
73 for desc_key
, desc_item
in descriptor
.items():
74 if isinstance(desc_item
, list) and desc_item
:
77 for index
, list_item
in enumerate(desc_item
):
78 if isinstance(list_item
, dict):
79 _check_unique_id_name(
80 list_item
, "{}.{}[{}]".format(position
, desc_key
, index
)
84 list_item
.get("id") or list_item
.get("name")
86 desc_item_id
= "id" if list_item
.get("id") else "name"
87 if desc_item_id
and list_item
.get(desc_item_id
):
88 if list_item
[desc_item_id
] in used_ids
:
89 position
= "{}.{}[{}]".format(
90 position
, desc_key
, index
92 raise EngineException(
93 "Error: identifier {} '{}' is not unique and repeats at '{}'".format(
95 list_item
[desc_item_id
],
98 HTTPStatus
.UNPROCESSABLE_ENTITY
,
100 used_ids
.append(list_item
[desc_item_id
])
102 _check_unique_id_name(final_content
)
103 # 1. validate again with pyangbind
104 # 1.1. remove internal keys
106 for k
in ("_id", "_admin"):
107 if k
in final_content
:
108 internal_keys
[k
] = final_content
.pop(k
)
109 storage_params
= internal_keys
["_admin"].get("storage")
110 serialized
= self
._validate
_input
_new
(
111 final_content
, storage_params
, session
["force"]
114 # 1.2. modify final_content with a serialized version
115 final_content
= copy
.deepcopy(serialized
)
116 # 1.3. restore internal keys
117 for k
, v
in internal_keys
.items():
122 # 2. check that this id is not present
123 if "id" in edit_content
:
124 _filter
= self
._get
_project
_filter
(session
)
126 _filter
["id"] = final_content
["id"]
127 _filter
["_id.neq"] = _id
129 if self
.db
.get_one(self
.topic
, _filter
, fail_on_empty
=False):
130 raise EngineException(
131 "{} with id '{}' already exists for this project".format(
132 (str(self
.topic
))[:-1], final_content
["id"]
140 def format_on_new(content
, project_id
=None, make_public
=False):
141 BaseTopic
.format_on_new(content
, project_id
=project_id
, make_public
=make_public
)
142 content
["_admin"]["onboardingState"] = "CREATED"
143 content
["_admin"]["operationalState"] = "DISABLED"
144 content
["_admin"]["usageState"] = "NOT_IN_USE"
146 def delete_extra(self
, session
, _id
, db_content
, not_send_msg
=None):
148 Deletes file system storage associated with the descriptor
149 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
150 :param _id: server internal id
151 :param db_content: The database content of the descriptor
152 :param not_send_msg: To not send message (False) or store content (list) instead
153 :return: None if ok or raises EngineException with the problem
155 self
.fs
.file_delete(_id
, ignore_non_exist
=True)
156 self
.fs
.file_delete(_id
+ "_", ignore_non_exist
=True) # remove temp folder
157 # Remove file revisions
158 if "revision" in db_content
["_admin"]:
159 revision
= db_content
["_admin"]["revision"]
161 self
.fs
.file_delete(_id
+ ":" + str(revision
), ignore_non_exist
=True)
162 revision
= revision
- 1
165 def get_one_by_id(db
, session
, topic
, id):
166 # find owned by this project
167 _filter
= BaseTopic
._get
_project
_filter
(session
)
169 desc_list
= db
.get_list(topic
, _filter
)
170 if len(desc_list
) == 1:
172 elif len(desc_list
) > 1:
174 "Found more than one {} with id='{}' belonging to this project".format(
180 # not found any: try to find public
181 _filter
= BaseTopic
._get
_project
_filter
(session
)
183 desc_list
= db
.get_list(topic
, _filter
)
186 "Not found any {} with id='{}'".format(topic
[:-1], id),
187 HTTPStatus
.NOT_FOUND
,
189 elif len(desc_list
) == 1:
193 "Found more than one public {} with id='{}'; and no one belonging to this project".format(
199 def new(self
, rollback
, session
, indata
=None, kwargs
=None, headers
=None):
201 Creates a new almost empty DISABLED entry into database. Due to SOL005, it does not follow normal procedure.
202 Creating a VNFD or NSD is done in two steps: 1. Creates an empty descriptor (this step) and 2) upload content
203 (self.upload_content)
204 :param rollback: list to append created items at database in case a rollback may to be done
205 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
206 :param indata: data to be inserted
207 :param kwargs: used to override the indata descriptor
208 :param headers: http request headers
209 :return: _id, None: identity of the inserted data; and None as there is not any operation
212 # No needed to capture exceptions
214 self
.check_quota(session
)
218 if "userDefinedData" in indata
:
219 indata
= indata
["userDefinedData"]
221 # Override descriptor with query string kwargs
222 self
._update
_input
_with
_kwargs
(indata
, kwargs
)
223 # uncomment when this method is implemented.
224 # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors
225 # indata = DescriptorTopic._validate_input_new(self, indata, project_id=session["force"])
227 content
= {"_admin": {"userDefinedData": indata
, "revision": 0}}
230 content
, session
["project_id"], make_public
=session
["public"]
232 _id
= self
.db
.create(self
.topic
, content
)
233 rollback
.append({"topic": self
.topic
, "_id": _id
})
234 self
._send
_msg
("created", {"_id": _id
})
237 def upload_content(self
, session
, _id
, indata
, kwargs
, headers
):
239 Used for receiving content by chunks (with a transaction_id header and/or gzip file. It will store and extract)
240 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
241 :param _id : the nsd,vnfd is already created, this is the id
242 :param indata: http body request
243 :param kwargs: user query string to override parameters. NOT USED
244 :param headers: http request headers
245 :return: True if package is completely uploaded or False if partial content has been uploded
246 Raise exception on error
248 # Check that _id exists and it is valid
249 current_desc
= self
.show(session
, _id
)
251 content_range_text
= headers
.get("Content-Range")
252 expected_md5
= headers
.get("Content-File-MD5")
254 content_type
= headers
.get("Content-Type")
257 and "application/gzip" in content_type
258 or "application/x-gzip" in content_type
261 if content_type
and "application/zip" in content_type
:
263 filename
= headers
.get("Content-Filename")
264 if not filename
and compressed
:
265 filename
= "package.tar.gz" if compressed
== "gzip" else "package.zip"
270 if "revision" in current_desc
["_admin"]:
271 revision
= current_desc
["_admin"]["revision"] + 1
273 # TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266
279 if content_range_text
:
281 content_range_text
.replace("-", " ").replace("/", " ").split()
284 content_range
[0] != "bytes"
285 ): # TODO check x<y not negative < total....
287 start
= int(content_range
[1])
288 end
= int(content_range
[2]) + 1
289 total
= int(content_range
[3])
292 # Rather than using a temp folder, we will store the package in a folder based on
293 # the current revision.
294 proposed_revision_path
= (
295 _id
+ ":" + str(revision
)
296 ) # all the content is upload here and if ok, it is rename from id_ to is folder
299 if not self
.fs
.file_exists(proposed_revision_path
, "dir"):
300 raise EngineException(
301 "invalid Transaction-Id header", HTTPStatus
.NOT_FOUND
304 self
.fs
.file_delete(proposed_revision_path
, ignore_non_exist
=True)
305 self
.fs
.mkdir(proposed_revision_path
)
306 fs_rollback
.append(proposed_revision_path
)
308 storage
= self
.fs
.get_params()
309 storage
["folder"] = proposed_revision_path
311 file_path
= (proposed_revision_path
, filename
)
312 if self
.fs
.file_exists(file_path
, "file"):
313 file_size
= self
.fs
.file_size(file_path
)
316 if file_size
!= start
:
317 raise EngineException(
318 "invalid Content-Range start sequence, expected '{}' but received '{}'".format(
321 HTTPStatus
.REQUESTED_RANGE_NOT_SATISFIABLE
,
323 file_pkg
= self
.fs
.file_open(file_path
, "a+b")
324 if isinstance(indata
, dict):
325 indata_text
= yaml
.safe_dump(indata
, indent
=4, default_flow_style
=False)
326 file_pkg
.write(indata_text
.encode(encoding
="utf-8"))
330 indata_text
= indata
.read(4096)
331 indata_len
+= len(indata_text
)
334 file_pkg
.write(indata_text
)
335 if content_range_text
:
336 if indata_len
!= end
- start
:
337 raise EngineException(
338 "Mismatch between Content-Range header {}-{} and body length of {}".format(
339 start
, end
- 1, indata_len
341 HTTPStatus
.REQUESTED_RANGE_NOT_SATISFIABLE
,
344 # TODO update to UPLOADING
351 chunk_data
= file_pkg
.read(1024)
353 file_md5
.update(chunk_data
)
354 chunk_data
= file_pkg
.read(1024)
355 if expected_md5
!= file_md5
.hexdigest():
356 raise EngineException("Error, MD5 mismatch", HTTPStatus
.CONFLICT
)
358 if compressed
== "gzip":
359 tar
= tarfile
.open(mode
="r", fileobj
=file_pkg
)
360 descriptor_file_name
= None
362 tarname
= tarinfo
.name
363 tarname_path
= tarname
.split("/")
365 not tarname_path
[0] or ".." in tarname_path
366 ): # if start with "/" means absolute path
367 raise EngineException(
368 "Absolute path or '..' are not allowed for package descriptor tar.gz"
370 if len(tarname_path
) == 1 and not tarinfo
.isdir():
371 raise EngineException(
372 "All files must be inside a dir for package descriptor tar.gz"
375 tarname
.endswith(".yaml")
376 or tarname
.endswith(".json")
377 or tarname
.endswith(".yml")
379 storage
["pkg-dir"] = tarname_path
[0]
380 if len(tarname_path
) == 2:
381 if descriptor_file_name
:
382 raise EngineException(
383 "Found more than one descriptor file at package descriptor tar.gz"
385 descriptor_file_name
= tarname
386 if not descriptor_file_name
:
387 raise EngineException(
388 "Not found any descriptor file at package descriptor tar.gz"
390 storage
["descriptor"] = descriptor_file_name
391 storage
["zipfile"] = filename
392 self
.fs
.file_extract(tar
, proposed_revision_path
)
393 with self
.fs
.file_open(
394 (proposed_revision_path
, descriptor_file_name
), "r"
395 ) as descriptor_file
:
396 content
= descriptor_file
.read()
397 elif compressed
== "zip":
398 zipfile
= ZipFile(file_pkg
)
399 descriptor_file_name
= None
400 for package_file
in zipfile
.infolist():
401 zipfilename
= package_file
.filename
402 file_path
= zipfilename
.split("/")
404 not file_path
[0] or ".." in zipfilename
405 ): # if start with "/" means absolute path
406 raise EngineException(
407 "Absolute path or '..' are not allowed for package descriptor zip"
411 zipfilename
.endswith(".yaml")
412 or zipfilename
.endswith(".json")
413 or zipfilename
.endswith(".yml")
415 zipfilename
.find("/") < 0
416 or zipfilename
.find("Definitions") >= 0
418 storage
["pkg-dir"] = ""
419 if descriptor_file_name
:
420 raise EngineException(
421 "Found more than one descriptor file at package descriptor zip"
423 descriptor_file_name
= zipfilename
424 if not descriptor_file_name
:
425 raise EngineException(
426 "Not found any descriptor file at package descriptor zip"
428 storage
["descriptor"] = descriptor_file_name
429 storage
["zipfile"] = filename
430 self
.fs
.file_extract(zipfile
, proposed_revision_path
)
432 with self
.fs
.file_open(
433 (proposed_revision_path
, descriptor_file_name
), "r"
434 ) as descriptor_file
:
435 content
= descriptor_file
.read()
437 content
= file_pkg
.read()
438 storage
["descriptor"] = descriptor_file_name
= filename
440 if descriptor_file_name
.endswith(".json"):
441 error_text
= "Invalid json format "
442 indata
= json
.load(content
)
444 error_text
= "Invalid yaml format "
445 indata
= yaml
.safe_load(content
)
447 # Need to close the file package here so it can be copied from the
448 # revision to the current, unrevisioned record
453 # Fetch both the incoming, proposed revision and the original revision so we
454 # can call a validate method to compare them
455 current_revision_path
= _id
+ "/"
456 self
.fs
.sync(from_path
=current_revision_path
)
457 self
.fs
.sync(from_path
=proposed_revision_path
)
461 self
._validate
_descriptor
_changes
(
463 descriptor_file_name
,
464 current_revision_path
,
465 proposed_revision_path
,
467 except Exception as e
:
469 self
.fs
.path
+ current_revision_path
, ignore_errors
=True
472 self
.fs
.path
+ proposed_revision_path
, ignore_errors
=True
474 # Only delete the new revision. We need to keep the original version in place
475 # as it has not been changed.
476 self
.fs
.file_delete(proposed_revision_path
, ignore_non_exist
=True)
479 indata
= self
._remove
_envelop
(indata
)
481 # Override descriptor with query string kwargs
483 self
._update
_input
_with
_kwargs
(indata
, kwargs
)
485 current_desc
["_admin"]["storage"] = storage
486 current_desc
["_admin"]["onboardingState"] = "ONBOARDED"
487 current_desc
["_admin"]["operationalState"] = "ENABLED"
488 current_desc
["_admin"]["modified"] = time()
489 current_desc
["_admin"]["revision"] = revision
491 deep_update_rfc7396(current_desc
, indata
)
492 current_desc
= self
.check_conflict_on_edit(
493 session
, current_desc
, indata
, _id
=_id
496 # Copy the revision to the active package name by its original id
497 shutil
.rmtree(self
.fs
.path
+ current_revision_path
, ignore_errors
=True)
499 self
.fs
.path
+ proposed_revision_path
,
500 self
.fs
.path
+ current_revision_path
,
502 self
.fs
.file_delete(current_revision_path
, ignore_non_exist
=True)
503 self
.fs
.mkdir(current_revision_path
)
504 self
.fs
.reverse_sync(from_path
=current_revision_path
)
506 shutil
.rmtree(self
.fs
.path
+ _id
)
508 self
.db
.replace(self
.topic
, _id
, current_desc
)
510 # Store a copy of the package as a point in time revision
511 revision_desc
= dict(current_desc
)
512 revision_desc
["_id"] = _id
+ ":" + str(revision_desc
["_admin"]["revision"])
513 self
.db
.create(self
.topic
+ "_revisions", revision_desc
)
517 self
._send
_msg
("edited", indata
)
519 # TODO if descriptor has changed because kwargs update content and remove cached zip
520 # TODO if zip is not present creates one
523 except EngineException
:
526 raise EngineException(
527 "invalid Content-Range header format. Expected 'bytes start-end/total'",
528 HTTPStatus
.REQUESTED_RANGE_NOT_SATISFIABLE
,
531 raise EngineException(
532 "invalid upload transaction sequence: '{}'".format(e
),
533 HTTPStatus
.BAD_REQUEST
,
535 except tarfile
.ReadError
as e
:
536 raise EngineException(
537 "invalid file content {}".format(e
), HTTPStatus
.BAD_REQUEST
539 except (ValueError, yaml
.YAMLError
) as e
:
540 raise EngineException(error_text
+ str(e
))
541 except ValidationError
as e
:
542 raise EngineException(e
, HTTPStatus
.UNPROCESSABLE_ENTITY
)
546 for file in fs_rollback
:
547 self
.fs
.file_delete(file, ignore_non_exist
=True)
549 def get_file(self
, session
, _id
, path
=None, accept_header
=None):
551 Return the file content of a vnfd or nsd
552 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
553 :param _id: Identity of the vnfd, nsd
554 :param path: artifact path or "$DESCRIPTOR" or None
555 :param accept_header: Content of Accept header. Must contain applition/zip or/and text/plain
556 :return: opened file plus Accept format or raises an exception
558 accept_text
= accept_zip
= False
560 if "text/plain" in accept_header
or "*/*" in accept_header
:
562 if "application/zip" in accept_header
or "*/*" in accept_header
:
563 accept_zip
= "application/zip"
564 elif "application/gzip" in accept_header
:
565 accept_zip
= "application/gzip"
567 if not accept_text
and not accept_zip
:
568 raise EngineException(
569 "provide request header 'Accept' with 'application/zip' or 'text/plain'",
570 http_code
=HTTPStatus
.NOT_ACCEPTABLE
,
573 content
= self
.show(session
, _id
)
574 if content
["_admin"]["onboardingState"] != "ONBOARDED":
575 raise EngineException(
576 "Cannot get content because this resource is not at 'ONBOARDED' state. "
577 "onboardingState is {}".format(content
["_admin"]["onboardingState"]),
578 http_code
=HTTPStatus
.CONFLICT
,
580 storage
= content
["_admin"]["storage"]
581 if path
is not None and path
!= "$DESCRIPTOR": # artifacts
582 if not storage
.get("pkg-dir") and not storage
.get("folder"):
583 raise EngineException(
584 "Packages does not contains artifacts",
585 http_code
=HTTPStatus
.BAD_REQUEST
,
587 if self
.fs
.file_exists(
588 (storage
["folder"], storage
["pkg-dir"], *path
), "dir"
590 folder_content
= self
.fs
.dir_ls(
591 (storage
["folder"], storage
["pkg-dir"], *path
)
593 return folder_content
, "text/plain"
594 # TODO manage folders in http
598 (storage
["folder"], storage
["pkg-dir"], *path
), "rb"
600 "application/octet-stream",
603 # pkgtype accept ZIP TEXT -> result
604 # manyfiles yes X -> zip
606 # onefile yes no -> zip
608 contain_many_files
= False
609 if storage
.get("pkg-dir"):
610 # check if there are more than one file in the package, ignoring checksums.txt.
611 pkg_files
= self
.fs
.dir_ls((storage
["folder"], storage
["pkg-dir"]))
612 if len(pkg_files
) >= 3 or (
613 len(pkg_files
) == 2 and "checksums.txt" not in pkg_files
615 contain_many_files
= True
616 if accept_text
and (not contain_many_files
or path
== "$DESCRIPTOR"):
618 self
.fs
.file_open((storage
["folder"], storage
["descriptor"]), "r"),
621 elif contain_many_files
and not accept_zip
:
622 raise EngineException(
623 "Packages that contains several files need to be retrieved with 'application/zip'"
625 http_code
=HTTPStatus
.NOT_ACCEPTABLE
,
628 if not storage
.get("zipfile"):
629 # TODO generate zipfile if not present
630 raise EngineException(
631 "Only allowed 'text/plain' Accept header for this descriptor. To be solved in "
633 http_code
=HTTPStatus
.NOT_ACCEPTABLE
,
636 self
.fs
.file_open((storage
["folder"], storage
["zipfile"]), "rb"),
640 def _remove_yang_prefixes_from_descriptor(self
, descriptor
):
642 for k
, v
in descriptor
.items():
644 if isinstance(v
, dict):
645 new_v
= self
._remove
_yang
_prefixes
_from
_descriptor
(v
)
646 elif isinstance(v
, list):
649 if isinstance(x
, dict):
650 new_v
.append(self
._remove
_yang
_prefixes
_from
_descriptor
(x
))
653 new_descriptor
[k
.split(":")[-1]] = new_v
654 return new_descriptor
656 def pyangbind_validation(self
, item
, data
, force
=False):
657 raise EngineException(
658 "Not possible to validate '{}' item".format(item
),
659 http_code
=HTTPStatus
.INTERNAL_SERVER_ERROR
,
662 def _validate_input_edit(self
, indata
, content
, force
=False):
663 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
666 if "_admin" not in indata
:
667 indata
["_admin"] = {}
669 if "operationalState" in indata
:
670 if indata
["operationalState"] in ("ENABLED", "DISABLED"):
671 indata
["_admin"]["operationalState"] = indata
.pop("operationalState")
673 raise EngineException(
674 "State '{}' is not a valid operational state".format(
675 indata
["operationalState"]
677 http_code
=HTTPStatus
.BAD_REQUEST
,
680 # In the case of user defined data, we need to put the data in the root of the object
681 # to preserve current expected behaviour
682 if "userDefinedData" in indata
:
683 data
= indata
.pop("userDefinedData")
684 if isinstance(data
, dict):
685 indata
["_admin"]["userDefinedData"] = data
687 raise EngineException(
688 "userDefinedData should be an object, but is '{}' instead".format(
691 http_code
=HTTPStatus
.BAD_REQUEST
,
695 "operationalState" in indata
["_admin"]
696 and content
["_admin"]["operationalState"]
697 == indata
["_admin"]["operationalState"]
699 raise EngineException(
700 "operationalState already {}".format(
701 content
["_admin"]["operationalState"]
703 http_code
=HTTPStatus
.CONFLICT
,
708 def _validate_descriptor_changes(
711 descriptor_file_name
,
712 old_descriptor_directory
,
713 new_descriptor_directory
,
716 # raise EngineException(
717 # "Error in validating new descriptor: <NODE> cannot be modified",
718 # http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
723 class VnfdTopic(DescriptorTopic
):
727 def __init__(self
, db
, fs
, msg
, auth
):
728 DescriptorTopic
.__init
__(self
, db
, fs
, msg
, auth
)
730 def pyangbind_validation(self
, item
, data
, force
=False):
731 if self
._descriptor
_data
_is
_in
_old
_format
(data
):
732 raise EngineException(
733 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
734 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
737 myvnfd
= etsi_nfv_vnfd
.etsi_nfv_vnfd()
738 pybindJSONDecoder
.load_ietf_json(
739 {"etsi-nfv-vnfd:vnfd": data
},
746 out
= pybindJSON
.dumps(myvnfd
, mode
="ietf")
747 desc_out
= self
._remove
_envelop
(yaml
.safe_load(out
))
748 desc_out
= self
._remove
_yang
_prefixes
_from
_descriptor
(desc_out
)
749 return utils
.deep_update_dict(data
, desc_out
)
750 except Exception as e
:
751 raise EngineException(
752 "Error in pyangbind validation: {}".format(str(e
)),
753 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
757 def _descriptor_data_is_in_old_format(data
):
758 return ("vnfd-catalog" in data
) or ("vnfd:vnfd-catalog" in data
)
761 def _remove_envelop(indata
=None):
764 clean_indata
= indata
766 if clean_indata
.get("etsi-nfv-vnfd:vnfd"):
767 if not isinstance(clean_indata
["etsi-nfv-vnfd:vnfd"], dict):
768 raise EngineException("'etsi-nfv-vnfd:vnfd' must be a dict")
769 clean_indata
= clean_indata
["etsi-nfv-vnfd:vnfd"]
770 elif clean_indata
.get("vnfd"):
771 if not isinstance(clean_indata
["vnfd"], dict):
772 raise EngineException("'vnfd' must be dict")
773 clean_indata
= clean_indata
["vnfd"]
777 def check_conflict_on_edit(self
, session
, final_content
, edit_content
, _id
):
778 final_content
= super().check_conflict_on_edit(
779 session
, final_content
, edit_content
, _id
785 for vdu
in get_iterable(final_content
.get("vdu")):
786 if vdu
.get("pdu-type"):
791 final_content
["_admin"]["type"] = "hnfd" if contains_vdu
else "pnfd"
793 final_content
["_admin"]["type"] = "vnfd"
794 # if neither vud nor pdu do not fill type
797 def check_conflict_on_del(self
, session
, _id
, db_content
):
799 Check that there is not any NSD that uses this VNFD. Only NSDs belonging to this project are considered. Note
800 that VNFD can be public and be used by NSD of other projects. Also check there are not deployments, or vnfr
802 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
803 :param _id: vnfd internal id
804 :param db_content: The database content of the _id.
805 :return: None or raises EngineException with the conflict
809 descriptor
= db_content
810 descriptor_id
= descriptor
.get("id")
811 if not descriptor_id
: # empty vnfd not uploaded
814 _filter
= self
._get
_project
_filter
(session
)
816 # check vnfrs using this vnfd
817 _filter
["vnfd-id"] = _id
818 if self
.db
.get_list("vnfrs", _filter
):
819 raise EngineException(
820 "There is at least one VNF instance using this descriptor",
821 http_code
=HTTPStatus
.CONFLICT
,
824 # check NSD referencing this VNFD
825 del _filter
["vnfd-id"]
826 _filter
["vnfd-id"] = descriptor_id
827 if self
.db
.get_list("nsds", _filter
):
828 raise EngineException(
829 "There is at least one NS package referencing this descriptor",
830 http_code
=HTTPStatus
.CONFLICT
,
833 def _validate_input_new(self
, indata
, storage_params
, force
=False):
834 indata
.pop("onboardingState", None)
835 indata
.pop("operationalState", None)
836 indata
.pop("usageState", None)
837 indata
.pop("links", None)
839 indata
= self
.pyangbind_validation("vnfds", indata
, force
)
840 # Cross references validation in the descriptor
842 self
.validate_mgmt_interface_connection_point(indata
)
844 for vdu
in get_iterable(indata
.get("vdu")):
845 self
.validate_vdu_internal_connection_points(vdu
)
846 self
._validate
_vdu
_cloud
_init
_in
_package
(storage_params
, vdu
, indata
)
847 self
._validate
_vdu
_charms
_in
_package
(storage_params
, indata
)
849 self
._validate
_vnf
_charms
_in
_package
(storage_params
, indata
)
851 self
.validate_external_connection_points(indata
)
852 self
.validate_internal_virtual_links(indata
)
853 self
.validate_monitoring_params(indata
)
854 self
.validate_scaling_group_descriptor(indata
)
855 self
.validate_healing_group_descriptor(indata
)
856 self
.validate_alarm_group_descriptor(indata
)
857 self
.validate_storage_compute_descriptor(indata
)
858 self
.validate_helm_chart(indata
)
863 def validate_helm_chart(indata
):
864 kdus
= indata
.get("kdu", [])
866 helm_chart_value
= kdu
.get("helm-chart")
867 if not helm_chart_value
:
869 if not valid_helm_chart_re
.match(helm_chart_value
):
870 raise EngineException(
871 "helm-chart '{}' is not valid".format(helm_chart_value
),
872 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
876 def validate_mgmt_interface_connection_point(indata
):
877 if not indata
.get("vdu"):
879 if not indata
.get("mgmt-cp"):
880 raise EngineException(
881 "'mgmt-cp' is a mandatory field and it is not defined",
882 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
885 for cp
in get_iterable(indata
.get("ext-cpd")):
886 if cp
["id"] == indata
["mgmt-cp"]:
889 raise EngineException(
890 "mgmt-cp='{}' must match an existing ext-cpd".format(indata
["mgmt-cp"]),
891 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
895 def validate_vdu_internal_connection_points(vdu
):
897 for cpd
in get_iterable(vdu
.get("int-cpd")):
898 cpd_id
= cpd
.get("id")
899 if cpd_id
and cpd_id
in int_cpds
:
900 raise EngineException(
901 "vdu[id='{}']:int-cpd[id='{}'] is already used by other int-cpd".format(
904 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
909 def validate_external_connection_points(indata
):
910 all_vdus_int_cpds
= set()
911 for vdu
in get_iterable(indata
.get("vdu")):
912 for int_cpd
in get_iterable(vdu
.get("int-cpd")):
913 all_vdus_int_cpds
.add((vdu
.get("id"), int_cpd
.get("id")))
916 for cpd
in get_iterable(indata
.get("ext-cpd")):
917 cpd_id
= cpd
.get("id")
918 if cpd_id
and cpd_id
in ext_cpds
:
919 raise EngineException(
920 "ext-cpd[id='{}'] is already used by other ext-cpd".format(cpd_id
),
921 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
925 int_cpd
= cpd
.get("int-cpd")
927 if (int_cpd
.get("vdu-id"), int_cpd
.get("cpd")) not in all_vdus_int_cpds
:
928 raise EngineException(
929 "ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd".format(
932 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
934 # TODO: Validate k8s-cluster-net points to a valid k8s-cluster:nets ?
936 def _validate_vdu_charms_in_package(self
, storage_params
, indata
):
937 for df
in indata
["df"]:
939 "lcm-operations-configuration" in df
940 and "operate-vnf-op-config" in df
["lcm-operations-configuration"]
942 configs
= df
["lcm-operations-configuration"][
943 "operate-vnf-op-config"
945 vdus
= df
.get("vdu-profile", [])
947 for config
in configs
:
948 if config
["id"] == vdu
["id"] and utils
.find_in_list(
949 config
.get("execution-environment-list", []),
950 lambda ee
: "juju" in ee
,
952 if not self
._validate
_package
_folders
(
953 storage_params
, "charms"
954 ) and not self
._validate
_package
_folders
(
955 storage_params
, "Scripts/charms"
957 raise EngineException(
958 "Charm defined in vnf[id={}] but not present in "
959 "package".format(indata
["id"])
962 def _validate_vdu_cloud_init_in_package(self
, storage_params
, vdu
, indata
):
963 if not vdu
.get("cloud-init-file"):
965 if not self
._validate
_package
_folders
(
966 storage_params
, "cloud_init", vdu
["cloud-init-file"]
967 ) and not self
._validate
_package
_folders
(
968 storage_params
, "Scripts/cloud_init", vdu
["cloud-init-file"]
970 raise EngineException(
971 "Cloud-init defined in vnf[id={}]:vdu[id={}] but not present in "
972 "package".format(indata
["id"], vdu
["id"])
975 def _validate_vnf_charms_in_package(self
, storage_params
, indata
):
976 # Get VNF configuration through new container
977 for deployment_flavor
in indata
.get("df", []):
978 if "lcm-operations-configuration" not in deployment_flavor
:
981 "operate-vnf-op-config"
982 not in deployment_flavor
["lcm-operations-configuration"]
985 for day_1_2_config
in deployment_flavor
["lcm-operations-configuration"][
986 "operate-vnf-op-config"
988 if day_1_2_config
["id"] == indata
["id"]:
989 if utils
.find_in_list(
990 day_1_2_config
.get("execution-environment-list", []),
991 lambda ee
: "juju" in ee
,
993 if not self
._validate
_package
_folders
(
994 storage_params
, "charms"
995 ) and not self
._validate
_package
_folders
(
996 storage_params
, "Scripts/charms"
998 raise EngineException(
999 "Charm defined in vnf[id={}] but not present in "
1000 "package".format(indata
["id"])
1003 def _validate_package_folders(self
, storage_params
, folder
, file=None):
1004 if not storage_params
:
1006 elif not storage_params
.get("pkg-dir"):
1007 if self
.fs
.file_exists("{}_".format(storage_params
["folder"]), "dir"):
1008 f
= "{}_/{}".format(storage_params
["folder"], folder
)
1010 f
= "{}/{}".format(storage_params
["folder"], folder
)
1012 return self
.fs
.file_exists("{}/{}".format(f
, file), "file")
1014 if self
.fs
.file_exists(f
, "dir"):
1015 if self
.fs
.dir_ls(f
):
1019 if self
.fs
.file_exists("{}_".format(storage_params
["folder"]), "dir"):
1020 f
= "{}_/{}/{}".format(
1021 storage_params
["folder"], storage_params
["pkg-dir"], folder
1024 f
= "{}/{}/{}".format(
1025 storage_params
["folder"], storage_params
["pkg-dir"], folder
1028 return self
.fs
.file_exists("{}/{}".format(f
, file), "file")
1030 if self
.fs
.file_exists(f
, "dir"):
1031 if self
.fs
.dir_ls(f
):
1036 def validate_internal_virtual_links(indata
):
1037 all_ivld_ids
= set()
1038 for ivld
in get_iterable(indata
.get("int-virtual-link-desc")):
1039 ivld_id
= ivld
.get("id")
1040 if ivld_id
and ivld_id
in all_ivld_ids
:
1041 raise EngineException(
1042 "Duplicated VLD id in int-virtual-link-desc[id={}]".format(ivld_id
),
1043 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1046 all_ivld_ids
.add(ivld_id
)
1048 for vdu
in get_iterable(indata
.get("vdu")):
1049 for int_cpd
in get_iterable(vdu
.get("int-cpd")):
1050 int_cpd_ivld_id
= int_cpd
.get("int-virtual-link-desc")
1051 if int_cpd_ivld_id
and int_cpd_ivld_id
not in all_ivld_ids
:
1052 raise EngineException(
1053 "vdu[id='{}']:int-cpd[id='{}']:int-virtual-link-desc='{}' must match an existing "
1054 "int-virtual-link-desc".format(
1055 vdu
["id"], int_cpd
["id"], int_cpd_ivld_id
1057 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1060 for df
in get_iterable(indata
.get("df")):
1061 for vlp
in get_iterable(df
.get("virtual-link-profile")):
1062 vlp_ivld_id
= vlp
.get("id")
1063 if vlp_ivld_id
and vlp_ivld_id
not in all_ivld_ids
:
1064 raise EngineException(
1065 "df[id='{}']:virtual-link-profile='{}' must match an existing "
1066 "int-virtual-link-desc".format(df
["id"], vlp_ivld_id
),
1067 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1071 def validate_monitoring_params(indata
):
1072 all_monitoring_params
= set()
1073 for ivld
in get_iterable(indata
.get("int-virtual-link-desc")):
1074 for mp
in get_iterable(ivld
.get("monitoring-parameters")):
1075 mp_id
= mp
.get("id")
1076 if mp_id
and mp_id
in all_monitoring_params
:
1077 raise EngineException(
1078 "Duplicated monitoring-parameter id in "
1079 "int-virtual-link-desc[id='{}']:monitoring-parameters[id='{}']".format(
1082 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1085 all_monitoring_params
.add(mp_id
)
1087 for vdu
in get_iterable(indata
.get("vdu")):
1088 for mp
in get_iterable(vdu
.get("monitoring-parameter")):
1089 mp_id
= mp
.get("id")
1090 if mp_id
and mp_id
in all_monitoring_params
:
1091 raise EngineException(
1092 "Duplicated monitoring-parameter id in "
1093 "vdu[id='{}']:monitoring-parameter[id='{}']".format(
1096 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1099 all_monitoring_params
.add(mp_id
)
1101 for df
in get_iterable(indata
.get("df")):
1102 for mp
in get_iterable(df
.get("monitoring-parameter")):
1103 mp_id
= mp
.get("id")
1104 if mp_id
and mp_id
in all_monitoring_params
:
1105 raise EngineException(
1106 "Duplicated monitoring-parameter id in "
1107 "df[id='{}']:monitoring-parameter[id='{}']".format(
1110 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1113 all_monitoring_params
.add(mp_id
)
1116 def validate_scaling_group_descriptor(indata
):
1117 all_monitoring_params
= set()
1119 for df
in get_iterable(indata
.get("df")):
1120 for il
in get_iterable(df
.get("instantiation-level")):
1121 for vl
in get_iterable(il
.get("vdu-level")):
1122 all_vdu_ids
.add(vl
.get("vdu-id"))
1124 for ivld
in get_iterable(indata
.get("int-virtual-link-desc")):
1125 for mp
in get_iterable(ivld
.get("monitoring-parameters")):
1126 all_monitoring_params
.add(mp
.get("id"))
1128 for vdu
in get_iterable(indata
.get("vdu")):
1129 for mp
in get_iterable(vdu
.get("monitoring-parameter")):
1130 all_monitoring_params
.add(mp
.get("id"))
1132 for df
in get_iterable(indata
.get("df")):
1133 for mp
in get_iterable(df
.get("monitoring-parameter")):
1134 all_monitoring_params
.add(mp
.get("id"))
1136 for df
in get_iterable(indata
.get("df")):
1137 for sa
in get_iterable(df
.get("scaling-aspect")):
1138 for deltas
in get_iterable(
1139 sa
.get("aspect-delta-details").get("deltas")
1141 for vds
in get_iterable(deltas
.get("vdu-delta")):
1142 sa_vdu_id
= vds
.get("id")
1143 if sa_vdu_id
and sa_vdu_id
not in all_vdu_ids
:
1144 raise EngineException(
1145 "df[id='{}']:scaling-aspect[id='{}']:aspect-delta-details"
1147 "vdu-id='{}' not defined in vdu".format(
1153 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1156 for df
in get_iterable(indata
.get("df")):
1157 for sa
in get_iterable(df
.get("scaling-aspect")):
1158 for sp
in get_iterable(sa
.get("scaling-policy")):
1159 for sc
in get_iterable(sp
.get("scaling-criteria")):
1160 sc_monitoring_param
= sc
.get("vnf-monitoring-param-ref")
1163 and sc_monitoring_param
not in all_monitoring_params
1165 raise EngineException(
1166 "df[id='{}']:scaling-aspect[id='{}']:scaling-policy"
1167 "[name='{}']:scaling-criteria[name='{}']: "
1168 "vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format(
1173 sc_monitoring_param
,
1175 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1178 for sca
in get_iterable(sa
.get("scaling-config-action")):
1180 "lcm-operations-configuration" not in df
1181 or "operate-vnf-op-config"
1182 not in df
["lcm-operations-configuration"]
1183 or not utils
.find_in_list(
1184 df
["lcm-operations-configuration"][
1185 "operate-vnf-op-config"
1186 ].get("day1-2", []),
1187 lambda config
: config
["id"] == indata
["id"],
1190 raise EngineException(
1191 "'day1-2 configuration' not defined in the descriptor but it is "
1192 "referenced by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action".format(
1195 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1197 for configuration
in get_iterable(
1198 df
["lcm-operations-configuration"]["operate-vnf-op-config"].get(
1202 for primitive
in get_iterable(
1203 configuration
.get("config-primitive")
1207 == sca
["vnf-config-primitive-name-ref"]
1211 raise EngineException(
1212 "df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-"
1213 "config-primitive-name-ref='{}' does not match any "
1214 "day1-2 configuration:config-primitive:name".format(
1217 sca
["vnf-config-primitive-name-ref"],
1219 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1223 def validate_healing_group_descriptor(indata
):
1225 for df
in get_iterable(indata
.get("df")):
1226 for il
in get_iterable(df
.get("instantiation-level")):
1227 for vl
in get_iterable(il
.get("vdu-level")):
1228 all_vdu_ids
.add(vl
.get("vdu-id"))
1230 for df
in get_iterable(indata
.get("df")):
1231 for ha
in get_iterable(df
.get("healing-aspect")):
1232 for hp
in get_iterable(ha
.get("healing-policy")):
1233 hp_monitoring_param
= hp
.get("vdu-id")
1234 if hp_monitoring_param
and hp_monitoring_param
not in all_vdu_ids
:
1235 raise EngineException(
1236 "df[id='{}']:healing-aspect[id='{}']:healing-policy"
1238 "vdu-id='{}' not defined in vdu".format(
1242 hp_monitoring_param
,
1244 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1248 def validate_alarm_group_descriptor(indata
):
1249 all_monitoring_params
= set()
1250 for ivld
in get_iterable(indata
.get("int-virtual-link-desc")):
1251 for mp
in get_iterable(ivld
.get("monitoring-parameters")):
1252 all_monitoring_params
.add(mp
.get("id"))
1254 for vdu
in get_iterable(indata
.get("vdu")):
1255 for mp
in get_iterable(vdu
.get("monitoring-parameter")):
1256 all_monitoring_params
.add(mp
.get("id"))
1258 for df
in get_iterable(indata
.get("df")):
1259 for mp
in get_iterable(df
.get("monitoring-parameter")):
1260 all_monitoring_params
.add(mp
.get("id"))
1262 for vdus
in get_iterable(indata
.get("vdu")):
1263 for alarms
in get_iterable(vdus
.get("alarm")):
1264 alarm_monitoring_param
= alarms
.get("vnf-monitoring-param-ref")
1266 alarm_monitoring_param
1267 and alarm_monitoring_param
not in all_monitoring_params
1269 raise EngineException(
1270 "vdu[id='{}']:alarm[id='{}']:"
1271 "vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format(
1274 alarm_monitoring_param
,
1276 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1280 def validate_storage_compute_descriptor(indata
):
1282 for vsd
in get_iterable(indata
.get("virtual-storage-desc")):
1283 all_vsd_ids
.add(vsd
.get("id"))
1286 for vcd
in get_iterable(indata
.get("virtual-compute-desc")):
1287 all_vcd_ids
.add(vcd
.get("id"))
1289 for vdus
in get_iterable(indata
.get("vdu")):
1290 for vsd_ref
in vdus
.get("virtual-storage-desc"):
1291 if vsd_ref
and vsd_ref
not in all_vsd_ids
:
1292 raise EngineException(
1293 "vdu[virtual-storage-desc='{}']"
1294 "not defined in vnfd".format(
1297 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1300 for vdus
in get_iterable(indata
.get("vdu")):
1301 vcd_ref
= vdus
.get("virtual-compute-desc")
1302 if vcd_ref
and vcd_ref
not in all_vcd_ids
:
1303 raise EngineException(
1304 "vdu[virtual-compute-desc='{}']"
1305 "not defined in vnfd".format(
1306 vdus
["virtual-compute-desc"],
1308 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1311 def delete_extra(self
, session
, _id
, db_content
, not_send_msg
=None):
1313 Deletes associate file system storage (via super)
1314 Deletes associated vnfpkgops from database.
1315 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1316 :param _id: server internal id
1317 :param db_content: The database content of the descriptor
1319 :raises: FsException in case of error while deleting associated storage
1321 super().delete_extra(session
, _id
, db_content
, not_send_msg
)
1322 self
.db
.del_list("vnfpkgops", {"vnfPkgId": _id
})
1323 self
.db
.del_list(self
.topic
+ "_revisions", {"_id": {"$regex": _id
}})
1325 def sol005_projection(self
, data
):
1326 data
["onboardingState"] = data
["_admin"]["onboardingState"]
1327 data
["operationalState"] = data
["_admin"]["operationalState"]
1328 data
["usageState"] = data
["_admin"]["usageState"]
1331 links
["self"] = {"href": "/vnfpkgm/v1/vnf_packages/{}".format(data
["_id"])}
1332 links
["vnfd"] = {"href": "/vnfpkgm/v1/vnf_packages/{}/vnfd".format(data
["_id"])}
1333 links
["packageContent"] = {
1334 "href": "/vnfpkgm/v1/vnf_packages/{}/package_content".format(data
["_id"])
1336 data
["_links"] = links
1338 return super().sol005_projection(data
)
1341 def find_software_version(vnfd
: dict) -> str:
1342 """Find the sotware version in the VNFD descriptors
1345 vnfd (dict): Descriptor as a dictionary
1348 software-version (str)
1350 default_sw_version
= "1.0"
1351 if vnfd
.get("vnfd"):
1353 if vnfd
.get("software-version"):
1354 return vnfd
["software-version"]
1356 return default_sw_version
1359 def extract_policies(vnfd
: dict) -> dict:
1360 """Removes the policies from the VNFD descriptors
1363 vnfd (dict): Descriptor as a dictionary
1366 vnfd (dict): VNFD which does not include policies
1368 for df
in vnfd
.get("df", {}):
1369 for policy
in ["scaling-aspect", "healing-aspect"]:
1370 if df
.get(policy
, {}):
1372 for vdu
in vnfd
.get("vdu", {}):
1373 for alarm_policy
in ["alarm", "monitoring-parameter"]:
1374 if vdu
.get(alarm_policy
, {}):
1375 vdu
.pop(alarm_policy
)
1379 def extract_day12_primitives(vnfd
: dict) -> dict:
1380 """Removes the day12 primitives from the VNFD descriptors
1383 vnfd (dict): Descriptor as a dictionary
1388 for df_id
, df
in enumerate(vnfd
.get("df", {})):
1390 df
.get("lcm-operations-configuration", {})
1391 .get("operate-vnf-op-config", {})
1394 day12
= df
["lcm-operations-configuration"]["operate-vnf-op-config"].get(
1397 for config_id
, config
in enumerate(day12
):
1399 "initial-config-primitive",
1401 "terminate-config-primitive",
1403 config
.pop(key
, None)
1404 day12
[config_id
] = config
1405 df
["lcm-operations-configuration"]["operate-vnf-op-config"][
1408 vnfd
["df"][df_id
] = df
1411 def remove_modifiable_items(self
, vnfd
: dict) -> dict:
1412 """Removes the modifiable parts from the VNFD descriptors
1414 It calls different extract functions according to different update types
1415 to clear all the modifiable items from VNFD
1418 vnfd (dict): Descriptor as a dictionary
1421 vnfd (dict): Descriptor which does not include modifiable contents
1423 if vnfd
.get("vnfd"):
1425 vnfd
.pop("_admin", None)
1426 # If the other extractions need to be done from VNFD,
1427 # the new extract methods could be appended to below list.
1428 for extract_function
in [self
.extract_day12_primitives
, self
.extract_policies
]:
1429 vnfd_temp
= extract_function(vnfd
)
1433 def _validate_descriptor_changes(
1436 descriptor_file_name
: str,
1437 old_descriptor_directory
: str,
1438 new_descriptor_directory
: str,
1440 """Compares the old and new VNFD descriptors and validates the new descriptor.
1443 old_descriptor_directory (str): Directory of descriptor which is in-use
1444 new_descriptor_directory (str): Directory of descriptor which is proposed to update (new revision)
1450 EngineException: In case of error when there are unallowed changes
1453 # If VNFD does not exist in DB or it is not in use by any NS,
1454 # validation is not required.
1455 vnfd
= self
.db
.get_one("vnfds", {"_id": descriptor_id
})
1456 if not vnfd
or not detect_descriptor_usage(vnfd
, "vnfds", self
.db
):
1459 # Get the old and new descriptor contents in order to compare them.
1460 with self
.fs
.file_open(
1461 (old_descriptor_directory
.rstrip("/"), descriptor_file_name
), "r"
1462 ) as old_descriptor_file
:
1463 with self
.fs
.file_open(
1464 (new_descriptor_directory
.rstrip("/"), descriptor_file_name
), "r"
1465 ) as new_descriptor_file
:
1466 old_content
= yaml
.safe_load(old_descriptor_file
.read())
1467 new_content
= yaml
.safe_load(new_descriptor_file
.read())
1469 # If software version has changed, we do not need to validate
1470 # the differences anymore.
1471 if old_content
and new_content
:
1472 if self
.find_software_version(
1474 ) != self
.find_software_version(new_content
):
1477 disallowed_change
= DeepDiff(
1478 self
.remove_modifiable_items(old_content
),
1479 self
.remove_modifiable_items(new_content
),
1482 if disallowed_change
:
1483 changed_nodes
= functools
.reduce(
1484 lambda a
, b
: a
+ " , " + b
,
1487 for node
in disallowed_change
.get(
1493 raise EngineException(
1494 f
"Error in validating new descriptor: {changed_nodes} cannot be modified, "
1495 "there are disallowed changes in the vnf descriptor.",
1496 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1506 "VNF Descriptor could not be processed with error: {}.".format(e
)
1510 class NsdTopic(DescriptorTopic
):
1514 def __init__(self
, db
, fs
, msg
, auth
):
1515 super().__init
__(db
, fs
, msg
, auth
)
1517 def pyangbind_validation(self
, item
, data
, force
=False):
1518 if self
._descriptor
_data
_is
_in
_old
_format
(data
):
1519 raise EngineException(
1520 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
1521 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1524 nsd_vnf_profiles
= data
.get("df", [{}])[0].get("vnf-profile", [])
1525 mynsd
= etsi_nfv_nsd
.etsi_nfv_nsd()
1526 pybindJSONDecoder
.load_ietf_json(
1527 {"nsd": {"nsd": [data
]}},
1534 out
= pybindJSON
.dumps(mynsd
, mode
="ietf")
1535 desc_out
= self
._remove
_envelop
(yaml
.safe_load(out
))
1536 desc_out
= self
._remove
_yang
_prefixes
_from
_descriptor
(desc_out
)
1537 if nsd_vnf_profiles
:
1538 desc_out
["df"][0]["vnf-profile"] = nsd_vnf_profiles
1540 except Exception as e
:
1541 raise EngineException(
1542 "Error in pyangbind validation: {}".format(str(e
)),
1543 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1547 def _descriptor_data_is_in_old_format(data
):
1548 return ("nsd-catalog" in data
) or ("nsd:nsd-catalog" in data
)
1551 def _remove_envelop(indata
=None):
1554 clean_indata
= indata
1556 if clean_indata
.get("nsd"):
1557 clean_indata
= clean_indata
["nsd"]
1558 elif clean_indata
.get("etsi-nfv-nsd:nsd"):
1559 clean_indata
= clean_indata
["etsi-nfv-nsd:nsd"]
1560 if clean_indata
.get("nsd"):
1562 not isinstance(clean_indata
["nsd"], list)
1563 or len(clean_indata
["nsd"]) != 1
1565 raise EngineException("'nsd' must be a list of only one element")
1566 clean_indata
= clean_indata
["nsd"][0]
1569 def _validate_input_new(self
, indata
, storage_params
, force
=False):
1570 indata
.pop("nsdOnboardingState", None)
1571 indata
.pop("nsdOperationalState", None)
1572 indata
.pop("nsdUsageState", None)
1574 indata
.pop("links", None)
1576 indata
= self
.pyangbind_validation("nsds", indata
, force
)
1577 # Cross references validation in the descriptor
1578 # TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none
1579 for vld
in get_iterable(indata
.get("virtual-link-desc")):
1580 self
.validate_vld_mgmt_network_with_virtual_link_protocol_data(vld
, indata
)
1582 self
.validate_vnf_profiles_vnfd_id(indata
)
1587 def validate_vld_mgmt_network_with_virtual_link_protocol_data(vld
, indata
):
1588 if not vld
.get("mgmt-network"):
1590 vld_id
= vld
.get("id")
1591 for df
in get_iterable(indata
.get("df")):
1592 for vlp
in get_iterable(df
.get("virtual-link-profile")):
1593 if vld_id
and vld_id
== vlp
.get("virtual-link-desc-id"):
1594 if vlp
.get("virtual-link-protocol-data"):
1595 raise EngineException(
1596 "Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-"
1597 "protocol-data You cannot set a virtual-link-protocol-data "
1598 "when mgmt-network is True".format(df
["id"], vlp
["id"]),
1599 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1603 def validate_vnf_profiles_vnfd_id(indata
):
1604 all_vnfd_ids
= set(get_iterable(indata
.get("vnfd-id")))
1605 for df
in get_iterable(indata
.get("df")):
1606 for vnf_profile
in get_iterable(df
.get("vnf-profile")):
1607 vnfd_id
= vnf_profile
.get("vnfd-id")
1608 if vnfd_id
and vnfd_id
not in all_vnfd_ids
:
1609 raise EngineException(
1610 "Error at df[id='{}']:vnf_profile[id='{}']:vnfd-id='{}' "
1611 "does not match any vnfd-id".format(
1612 df
["id"], vnf_profile
["id"], vnfd_id
1614 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1617 def _validate_input_edit(self
, indata
, content
, force
=False):
1618 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
1620 indata looks as follows:
1621 - In the new case (conformant)
1622 {'nsdOperationalState': 'DISABLED', 'userDefinedData': {'id': 'string23',
1623 '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}}
1624 - In the old case (backwards-compatible)
1625 {'id': 'string23', '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}
1627 if "_admin" not in indata
:
1628 indata
["_admin"] = {}
1630 if "nsdOperationalState" in indata
:
1631 if indata
["nsdOperationalState"] in ("ENABLED", "DISABLED"):
1632 indata
["_admin"]["operationalState"] = indata
.pop("nsdOperationalState")
1634 raise EngineException(
1635 "State '{}' is not a valid operational state".format(
1636 indata
["nsdOperationalState"]
1638 http_code
=HTTPStatus
.BAD_REQUEST
,
1641 # In the case of user defined data, we need to put the data in the root of the object
1642 # to preserve current expected behaviour
1643 if "userDefinedData" in indata
:
1644 data
= indata
.pop("userDefinedData")
1645 if isinstance(data
, dict):
1646 indata
["_admin"]["userDefinedData"] = data
1648 raise EngineException(
1649 "userDefinedData should be an object, but is '{}' instead".format(
1652 http_code
=HTTPStatus
.BAD_REQUEST
,
1655 "operationalState" in indata
["_admin"]
1656 and content
["_admin"]["operationalState"]
1657 == indata
["_admin"]["operationalState"]
1659 raise EngineException(
1660 "nsdOperationalState already {}".format(
1661 content
["_admin"]["operationalState"]
1663 http_code
=HTTPStatus
.CONFLICT
,
1667 def _check_descriptor_dependencies(self
, session
, descriptor
):
1669 Check that the dependent descriptors exist on a new descriptor or edition. Also checks references to vnfd
1670 connection points are ok
1671 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1672 :param descriptor: descriptor to be inserted or edit
1673 :return: None or raises exception
1675 if session
["force"]:
1677 vnfds_index
= self
._get
_descriptor
_constituent
_vnfds
_index
(session
, descriptor
)
1679 # Cross references validation in the descriptor and vnfd connection point validation
1680 for df
in get_iterable(descriptor
.get("df")):
1681 self
.validate_df_vnf_profiles_constituent_connection_points(df
, vnfds_index
)
1683 def _get_descriptor_constituent_vnfds_index(self
, session
, descriptor
):
1685 if descriptor
.get("vnfd-id") and not session
["force"]:
1686 for vnfd_id
in get_iterable(descriptor
.get("vnfd-id")):
1687 query_filter
= self
._get
_project
_filter
(session
)
1688 query_filter
["id"] = vnfd_id
1689 vnf_list
= self
.db
.get_list("vnfds", query_filter
)
1691 raise EngineException(
1692 "Descriptor error at 'vnfd-id'='{}' references a non "
1693 "existing vnfd".format(vnfd_id
),
1694 http_code
=HTTPStatus
.CONFLICT
,
1696 vnfds_index
[vnfd_id
] = vnf_list
[0]
1700 def validate_df_vnf_profiles_constituent_connection_points(df
, vnfds_index
):
1701 for vnf_profile
in get_iterable(df
.get("vnf-profile")):
1702 vnfd
= vnfds_index
.get(vnf_profile
["vnfd-id"])
1703 all_vnfd_ext_cpds
= set()
1704 for ext_cpd
in get_iterable(vnfd
.get("ext-cpd")):
1705 if ext_cpd
.get("id"):
1706 all_vnfd_ext_cpds
.add(ext_cpd
.get("id"))
1708 for virtual_link
in get_iterable(
1709 vnf_profile
.get("virtual-link-connectivity")
1711 for vl_cpd
in get_iterable(virtual_link
.get("constituent-cpd-id")):
1712 vl_cpd_id
= vl_cpd
.get("constituent-cpd-id")
1713 if vl_cpd_id
and vl_cpd_id
not in all_vnfd_ext_cpds
:
1714 raise EngineException(
1715 "Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity"
1716 "[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a "
1717 "non existing ext-cpd:id inside vnfd '{}'".format(
1720 virtual_link
["virtual-link-profile-id"],
1724 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1727 def check_conflict_on_edit(self
, session
, final_content
, edit_content
, _id
):
1728 final_content
= super().check_conflict_on_edit(
1729 session
, final_content
, edit_content
, _id
1732 self
._check
_descriptor
_dependencies
(session
, final_content
)
1734 return final_content
1736 def check_conflict_on_del(self
, session
, _id
, db_content
):
1738 Check that there is not any NSR that uses this NSD. Only NSRs belonging to this project are considered. Note
1739 that NSD can be public and be used by other projects.
1740 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1741 :param _id: nsd internal id
1742 :param db_content: The database content of the _id
1743 :return: None or raises EngineException with the conflict
1745 if session
["force"]:
1747 descriptor
= db_content
1748 descriptor_id
= descriptor
.get("id")
1749 if not descriptor_id
: # empty nsd not uploaded
1752 # check NSD used by NS
1753 _filter
= self
._get
_project
_filter
(session
)
1754 _filter
["nsd-id"] = _id
1755 if self
.db
.get_list("nsrs", _filter
):
1756 raise EngineException(
1757 "There is at least one NS instance using this descriptor",
1758 http_code
=HTTPStatus
.CONFLICT
,
1761 # check NSD referenced by NST
1762 del _filter
["nsd-id"]
1763 _filter
["netslice-subnet.ANYINDEX.nsd-ref"] = descriptor_id
1764 if self
.db
.get_list("nsts", _filter
):
1765 raise EngineException(
1766 "There is at least one NetSlice Template referencing this descriptor",
1767 http_code
=HTTPStatus
.CONFLICT
,
1770 def delete_extra(self
, session
, _id
, db_content
, not_send_msg
=None):
1772 Deletes associate file system storage (via super)
1773 Deletes associated vnfpkgops from database.
1774 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1775 :param _id: server internal id
1776 :param db_content: The database content of the descriptor
1778 :raises: FsException in case of error while deleting associated storage
1780 super().delete_extra(session
, _id
, db_content
, not_send_msg
)
1781 self
.db
.del_list(self
.topic
+ "_revisions", {"_id": {"$regex": _id
}})
1784 def extract_day12_primitives(nsd
: dict) -> dict:
1785 """Removes the day12 primitives from the NSD descriptors
1788 nsd (dict): Descriptor as a dictionary
1791 nsd (dict): Cleared NSD
1793 if nsd
.get("ns-configuration"):
1796 "initial-config-primitive",
1797 "terminate-config-primitive",
1799 nsd
["ns-configuration"].pop(key
, None)
1802 def remove_modifiable_items(self
, nsd
: dict) -> dict:
1803 """Removes the modifiable parts from the VNFD descriptors
1805 It calls different extract functions according to different update types
1806 to clear all the modifiable items from NSD
1809 nsd (dict): Descriptor as a dictionary
1812 nsd (dict): Descriptor which does not include modifiable contents
1814 while isinstance(nsd
, dict) and nsd
.get("nsd"):
1816 if isinstance(nsd
, list):
1818 nsd
.pop("_admin", None)
1819 # If the more extractions need to be done from NSD,
1820 # the new extract methods could be appended to below list.
1821 for extract_function
in [self
.extract_day12_primitives
]:
1822 nsd_temp
= extract_function(nsd
)
1826 def _validate_descriptor_changes(
1829 descriptor_file_name
: str,
1830 old_descriptor_directory
: str,
1831 new_descriptor_directory
: str,
1833 """Compares the old and new NSD descriptors and validates the new descriptor
1836 old_descriptor_directory: Directory of descriptor which is in-use
1837 new_descriptor_directory: Directory of descriptor which is proposed to update (new revision)
1843 EngineException: In case of error if the changes are not allowed
1847 # If NSD does not exist in DB, or it is not in use by any NS,
1848 # validation is not required.
1849 nsd
= self
.db
.get_one("nsds", {"_id": descriptor_id
}, fail_on_empty
=False)
1850 if not nsd
or not detect_descriptor_usage(nsd
, "nsds", self
.db
):
1853 # Get the old and new descriptor contents in order to compare them.
1854 with self
.fs
.file_open(
1855 (old_descriptor_directory
.rstrip("/"), descriptor_file_name
), "r"
1856 ) as old_descriptor_file
:
1857 with self
.fs
.file_open(
1858 (new_descriptor_directory
.rstrip("/"), descriptor_file_name
), "r"
1859 ) as new_descriptor_file
:
1860 old_content
= yaml
.safe_load(old_descriptor_file
.read())
1861 new_content
= yaml
.safe_load(new_descriptor_file
.read())
1863 if old_content
and new_content
:
1864 disallowed_change
= DeepDiff(
1865 self
.remove_modifiable_items(old_content
),
1866 self
.remove_modifiable_items(new_content
),
1869 if disallowed_change
:
1870 changed_nodes
= functools
.reduce(
1871 lambda a
, b
: a
+ ", " + b
,
1874 for node
in disallowed_change
.get(
1880 raise EngineException(
1881 f
"Error in validating new descriptor: {changed_nodes} cannot be modified, "
1882 "there are disallowed changes in the ns descriptor. ",
1883 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1893 "NS Descriptor could not be processed with error: {}.".format(e
)
1896 def sol005_projection(self
, data
):
1897 data
["nsdOnboardingState"] = data
["_admin"]["onboardingState"]
1898 data
["nsdOperationalState"] = data
["_admin"]["operationalState"]
1899 data
["nsdUsageState"] = data
["_admin"]["usageState"]
1902 links
["self"] = {"href": "/nsd/v1/ns_descriptors/{}".format(data
["_id"])}
1903 links
["nsd_content"] = {
1904 "href": "/nsd/v1/ns_descriptors/{}/nsd_content".format(data
["_id"])
1906 data
["_links"] = links
1908 return super().sol005_projection(data
)
1911 class NstTopic(DescriptorTopic
):
1914 quota_name
= "slice_templates"
1916 def __init__(self
, db
, fs
, msg
, auth
):
1917 DescriptorTopic
.__init
__(self
, db
, fs
, msg
, auth
)
1919 def pyangbind_validation(self
, item
, data
, force
=False):
1922 pybindJSONDecoder
.load_ietf_json(
1930 out
= pybindJSON
.dumps(mynst
, mode
="ietf")
1931 desc_out
= self
._remove
_envelop
(yaml
.safe_load(out
))
1933 except Exception as e
:
1934 raise EngineException(
1935 "Error in pyangbind validation: {}".format(str(e
)),
1936 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1940 def _remove_envelop(indata
=None):
1943 clean_indata
= indata
1945 if clean_indata
.get("nst"):
1947 not isinstance(clean_indata
["nst"], list)
1948 or len(clean_indata
["nst"]) != 1
1950 raise EngineException("'nst' must be a list only one element")
1951 clean_indata
= clean_indata
["nst"][0]
1952 elif clean_indata
.get("nst:nst"):
1954 not isinstance(clean_indata
["nst:nst"], list)
1955 or len(clean_indata
["nst:nst"]) != 1
1957 raise EngineException("'nst:nst' must be a list only one element")
1958 clean_indata
= clean_indata
["nst:nst"][0]
1961 def _validate_input_new(self
, indata
, storage_params
, force
=False):
1962 indata
.pop("onboardingState", None)
1963 indata
.pop("operationalState", None)
1964 indata
.pop("usageState", None)
1965 indata
= self
.pyangbind_validation("nsts", indata
, force
)
1966 return indata
.copy()
1968 def _check_descriptor_dependencies(self
, session
, descriptor
):
1970 Check that the dependent descriptors exist on a new descriptor or edition
1971 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1972 :param descriptor: descriptor to be inserted or edit
1973 :return: None or raises exception
1975 if not descriptor
.get("netslice-subnet"):
1977 for nsd
in descriptor
["netslice-subnet"]:
1978 nsd_id
= nsd
["nsd-ref"]
1979 filter_q
= self
._get
_project
_filter
(session
)
1980 filter_q
["id"] = nsd_id
1981 if not self
.db
.get_list("nsds", filter_q
):
1982 raise EngineException(
1983 "Descriptor error at 'netslice-subnet':'nsd-ref'='{}' references a non "
1984 "existing nsd".format(nsd_id
),
1985 http_code
=HTTPStatus
.CONFLICT
,
1988 def check_conflict_on_edit(self
, session
, final_content
, edit_content
, _id
):
1989 final_content
= super().check_conflict_on_edit(
1990 session
, final_content
, edit_content
, _id
1993 self
._check
_descriptor
_dependencies
(session
, final_content
)
1994 return final_content
1996 def check_conflict_on_del(self
, session
, _id
, db_content
):
1998 Check that there is not any NSIR that uses this NST. Only NSIRs belonging to this project are considered. Note
1999 that NST can be public and be used by other projects.
2000 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
2001 :param _id: nst internal id
2002 :param db_content: The database content of the _id.
2003 :return: None or raises EngineException with the conflict
2005 # TODO: Check this method
2006 if session
["force"]:
2008 # Get Network Slice Template from Database
2009 _filter
= self
._get
_project
_filter
(session
)
2010 _filter
["_admin.nst-id"] = _id
2011 if self
.db
.get_list("nsis", _filter
):
2012 raise EngineException(
2013 "there is at least one Netslice Instance using this descriptor",
2014 http_code
=HTTPStatus
.CONFLICT
,
2017 def sol005_projection(self
, data
):
2018 data
["onboardingState"] = data
["_admin"]["onboardingState"]
2019 data
["operationalState"] = data
["_admin"]["operationalState"]
2020 data
["usageState"] = data
["_admin"]["usageState"]
2023 links
["self"] = {"href": "/nst/v1/netslice_templates/{}".format(data
["_id"])}
2024 links
["nst"] = {"href": "/nst/v1/netslice_templates/{}/nst".format(data
["_id"])}
2025 data
["_links"] = links
2027 return super().sol005_projection(data
)
2030 class PduTopic(BaseTopic
):
2033 quota_name
= "pduds"
2034 schema_new
= pdu_new_schema
2035 schema_edit
= pdu_edit_schema
2037 def __init__(self
, db
, fs
, msg
, auth
):
2038 BaseTopic
.__init
__(self
, db
, fs
, msg
, auth
)
2041 def format_on_new(content
, project_id
=None, make_public
=False):
2042 BaseTopic
.format_on_new(content
, project_id
=project_id
, make_public
=make_public
)
2043 content
["_admin"]["onboardingState"] = "CREATED"
2044 content
["_admin"]["operationalState"] = "ENABLED"
2045 content
["_admin"]["usageState"] = "NOT_IN_USE"
2047 def check_conflict_on_del(self
, session
, _id
, db_content
):
2049 Check that there is not any vnfr that uses this PDU
2050 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
2051 :param _id: pdu internal id
2052 :param db_content: The database content of the _id.
2053 :return: None or raises EngineException with the conflict
2055 if session
["force"]:
2058 _filter
= self
._get
_project
_filter
(session
)
2059 _filter
["vdur.pdu-id"] = _id
2060 if self
.db
.get_list("vnfrs", _filter
):
2061 raise EngineException(
2062 "There is at least one VNF instance using this PDU",
2063 http_code
=HTTPStatus
.CONFLICT
,
2067 class VnfPkgOpTopic(BaseTopic
):
2070 schema_new
= vnfpkgop_new_schema
2073 def __init__(self
, db
, fs
, msg
, auth
):
2074 BaseTopic
.__init
__(self
, db
, fs
, msg
, auth
)
2076 def edit(self
, session
, _id
, indata
=None, kwargs
=None, content
=None):
2077 raise EngineException(
2078 "Method 'edit' not allowed for topic '{}'".format(self
.topic
),
2079 HTTPStatus
.METHOD_NOT_ALLOWED
,
2082 def delete(self
, session
, _id
, dry_run
=False):
2083 raise EngineException(
2084 "Method 'delete' not allowed for topic '{}'".format(self
.topic
),
2085 HTTPStatus
.METHOD_NOT_ALLOWED
,
2088 def delete_list(self
, session
, filter_q
=None):
2089 raise EngineException(
2090 "Method 'delete_list' not allowed for topic '{}'".format(self
.topic
),
2091 HTTPStatus
.METHOD_NOT_ALLOWED
,
2094 def new(self
, rollback
, session
, indata
=None, kwargs
=None, headers
=None):
2096 Creates a new entry into database.
2097 :param rollback: list to append created items at database in case a rollback may to be done
2098 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
2099 :param indata: data to be inserted
2100 :param kwargs: used to override the indata descriptor
2101 :param headers: http request headers
2102 :return: _id, op_id:
2103 _id: identity of the inserted data.
2106 self
._update
_input
_with
_kwargs
(indata
, kwargs
)
2107 validate_input(indata
, self
.schema_new
)
2108 vnfpkg_id
= indata
["vnfPkgId"]
2109 filter_q
= BaseTopic
._get
_project
_filter
(session
)
2110 filter_q
["_id"] = vnfpkg_id
2111 vnfd
= self
.db
.get_one("vnfds", filter_q
)
2112 operation
= indata
["lcmOperationType"]
2113 kdu_name
= indata
["kdu_name"]
2114 for kdu
in vnfd
.get("kdu", []):
2115 if kdu
["name"] == kdu_name
:
2116 helm_chart
= kdu
.get("helm-chart")
2117 juju_bundle
= kdu
.get("juju-bundle")
2120 raise EngineException(
2121 "Not found vnfd[id='{}']:kdu[name='{}']".format(vnfpkg_id
, kdu_name
)
2124 indata
["helm-chart"] = helm_chart
2125 match
= fullmatch(r
"([^/]*)/([^/]*)", helm_chart
)
2126 repo_name
= match
.group(1) if match
else None
2128 indata
["juju-bundle"] = juju_bundle
2129 match
= fullmatch(r
"([^/]*)/([^/]*)", juju_bundle
)
2130 repo_name
= match
.group(1) if match
else None
2132 raise EngineException(
2133 "Found neither 'helm-chart' nor 'juju-bundle' in vnfd[id='{}']:kdu[name='{}']".format(
2139 filter_q
["name"] = repo_name
2140 repo
= self
.db
.get_one("k8srepos", filter_q
)
2141 k8srepo_id
= repo
.get("_id")
2142 k8srepo_url
= repo
.get("url")
2146 indata
["k8srepoId"] = k8srepo_id
2147 indata
["k8srepo_url"] = k8srepo_url
2148 vnfpkgop_id
= str(uuid4())
2151 "operationState": "PROCESSING",
2152 "vnfPkgId": vnfpkg_id
,
2153 "lcmOperationType": operation
,
2154 "isAutomaticInvocation": False,
2155 "isCancelPending": False,
2156 "operationParams": indata
,
2158 "self": "/osm/vnfpkgm/v1/vnfpkg_op_occs/" + vnfpkgop_id
,
2159 "vnfpkg": "/osm/vnfpkgm/v1/vnf_packages/" + vnfpkg_id
,
2163 vnfpkgop_desc
, session
["project_id"], make_public
=session
["public"]
2165 ctime
= vnfpkgop_desc
["_admin"]["created"]
2166 vnfpkgop_desc
["statusEnteredTime"] = ctime
2167 vnfpkgop_desc
["startTime"] = ctime
2168 self
.db
.create(self
.topic
, vnfpkgop_desc
)
2169 rollback
.append({"topic": self
.topic
, "_id": vnfpkgop_id
})
2170 self
.msg
.write(self
.topic_msg
, operation
, vnfpkgop_desc
)
2171 return vnfpkgop_id
, None