1 # -*- coding: utf-8 -*-
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
7 # http://www.apache.org/licenses/LICENSE-2.0
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
26 from deepdiff
import DeepDiff
27 from hashlib
import md5
28 from osm_common
.dbbase
import DbException
, deep_update_rfc7396
29 from http
import HTTPStatus
31 from uuid
import uuid4
32 from re
import fullmatch
33 from zipfile
import ZipFile
34 from urllib
.parse
import urlparse
35 from osm_nbi
.validation
import (
42 from osm_nbi
.base_topic
import (
46 detect_descriptor_usage
,
48 from osm_im
import etsi_nfv_vnfd
, etsi_nfv_nsd
49 from osm_im
.nst
import nst
as nst_im
50 from pyangbind
.lib
.serialise
import pybindJSONDecoder
51 import pyangbind
.lib
.pybindJSON
as pybindJSON
52 from osm_nbi
import utils
54 __author__
= "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
56 valid_helm_chart_re
= re
.compile(
57 r
"^[a-z0-9]([-a-z0-9]*[a-z0-9]/)?([a-z0-9]([-a-z0-9]*[a-z0-9])?)*$"
61 class DescriptorTopic(BaseTopic
):
62 def __init__(self
, db
, fs
, msg
, auth
):
63 super().__init
__(db
, fs
, msg
, auth
)
65 def _validate_input_new(self
, indata
, storage_params
, force
=False):
68 def check_conflict_on_edit(self
, session
, final_content
, edit_content
, _id
):
69 final_content
= super().check_conflict_on_edit(
70 session
, final_content
, edit_content
, _id
73 def _check_unique_id_name(descriptor
, position
=""):
74 for desc_key
, desc_item
in descriptor
.items():
75 if isinstance(desc_item
, list) and desc_item
:
78 for index
, list_item
in enumerate(desc_item
):
79 if isinstance(list_item
, dict):
80 _check_unique_id_name(
81 list_item
, "{}.{}[{}]".format(position
, desc_key
, index
)
85 list_item
.get("id") or list_item
.get("name")
87 desc_item_id
= "id" if list_item
.get("id") else "name"
88 if desc_item_id
and list_item
.get(desc_item_id
):
89 if list_item
[desc_item_id
] in used_ids
:
90 position
= "{}.{}[{}]".format(
91 position
, desc_key
, index
93 raise EngineException(
94 "Error: identifier {} '{}' is not unique and repeats at '{}'".format(
96 list_item
[desc_item_id
],
99 HTTPStatus
.UNPROCESSABLE_ENTITY
,
101 used_ids
.append(list_item
[desc_item_id
])
103 _check_unique_id_name(final_content
)
104 # 1. validate again with pyangbind
105 # 1.1. remove internal keys
107 for k
in ("_id", "_admin"):
108 if k
in final_content
:
109 internal_keys
[k
] = final_content
.pop(k
)
110 storage_params
= internal_keys
["_admin"].get("storage")
111 serialized
= self
._validate
_input
_new
(
112 final_content
, storage_params
, session
["force"]
115 # 1.2. modify final_content with a serialized version
116 final_content
= copy
.deepcopy(serialized
)
117 # 1.3. restore internal keys
118 for k
, v
in internal_keys
.items():
123 # 2. check that this id is not present
124 if "id" in edit_content
:
125 _filter
= self
._get
_project
_filter
(session
)
127 _filter
["id"] = final_content
["id"]
128 _filter
["_id.neq"] = _id
130 if self
.db
.get_one(self
.topic
, _filter
, fail_on_empty
=False):
131 raise EngineException(
132 "{} with id '{}' already exists for this project".format(
133 (str(self
.topic
))[:-1], final_content
["id"]
141 def format_on_new(content
, project_id
=None, make_public
=False):
142 BaseTopic
.format_on_new(content
, project_id
=project_id
, make_public
=make_public
)
143 content
["_admin"]["onboardingState"] = "CREATED"
144 content
["_admin"]["operationalState"] = "DISABLED"
145 content
["_admin"]["usageState"] = "NOT_IN_USE"
147 def delete_extra(self
, session
, _id
, db_content
, not_send_msg
=None):
149 Deletes file system storage associated with the descriptor
150 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
151 :param _id: server internal id
152 :param db_content: The database content of the descriptor
153 :param not_send_msg: To not send message (False) or store content (list) instead
154 :return: None if ok or raises EngineException with the problem
156 self
.fs
.file_delete(_id
, ignore_non_exist
=True)
157 self
.fs
.file_delete(_id
+ "_", ignore_non_exist
=True) # remove temp folder
158 # Remove file revisions
159 if "revision" in db_content
["_admin"]:
160 revision
= db_content
["_admin"]["revision"]
162 self
.fs
.file_delete(_id
+ ":" + str(revision
), ignore_non_exist
=True)
163 revision
= revision
- 1
166 def get_one_by_id(db
, session
, topic
, id):
167 # find owned by this project
168 _filter
= BaseTopic
._get
_project
_filter
(session
)
170 desc_list
= db
.get_list(topic
, _filter
)
171 if len(desc_list
) == 1:
173 elif len(desc_list
) > 1:
175 "Found more than one {} with id='{}' belonging to this project".format(
181 # not found any: try to find public
182 _filter
= BaseTopic
._get
_project
_filter
(session
)
184 desc_list
= db
.get_list(topic
, _filter
)
187 "Not found any {} with id='{}'".format(topic
[:-1], id),
188 HTTPStatus
.NOT_FOUND
,
190 elif len(desc_list
) == 1:
194 "Found more than one public {} with id='{}'; and no one belonging to this project".format(
200 def new(self
, rollback
, session
, indata
=None, kwargs
=None, headers
=None):
202 Creates a new almost empty DISABLED entry into database. Due to SOL005, it does not follow normal procedure.
203 Creating a VNFD or NSD is done in two steps: 1. Creates an empty descriptor (this step) and 2) upload content
204 (self.upload_content)
205 :param rollback: list to append created items at database in case a rollback may to be done
206 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
207 :param indata: data to be inserted
208 :param kwargs: used to override the indata descriptor
209 :param headers: http request headers
210 :return: _id, None: identity of the inserted data; and None as there is not any operation
213 # No needed to capture exceptions
215 self
.check_quota(session
)
219 if "userDefinedData" in indata
:
220 indata
= indata
["userDefinedData"]
222 # Override descriptor with query string kwargs
223 self
._update
_input
_with
_kwargs
(indata
, kwargs
)
224 # uncomment when this method is implemented.
225 # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors
226 # indata = DescriptorTopic._validate_input_new(self, indata, project_id=session["force"])
228 content
= {"_admin": {"userDefinedData": indata
, "revision": 0}}
231 content
, session
["project_id"], make_public
=session
["public"]
233 _id
= self
.db
.create(self
.topic
, content
)
234 rollback
.append({"topic": self
.topic
, "_id": _id
})
235 self
._send
_msg
("created", {"_id": _id
})
238 def upload_content(self
, session
, _id
, indata
, kwargs
, headers
):
240 Used for receiving content by chunks (with a transaction_id header and/or gzip file. It will store and extract)
241 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
242 :param _id : the nsd,vnfd is already created, this is the id
243 :param indata: http body request
244 :param kwargs: user query string to override parameters. NOT USED
245 :param headers: http request headers
246 :return: True if package is completely uploaded or False if partial content has been uploded
247 Raise exception on error
249 # Check that _id exists and it is valid
250 current_desc
= self
.show(session
, _id
)
252 content_range_text
= headers
.get("Content-Range")
253 expected_md5
= headers
.get("Content-File-MD5")
255 content_type
= headers
.get("Content-Type")
258 and "application/gzip" in content_type
259 or "application/x-gzip" in content_type
262 if content_type
and "application/zip" in content_type
:
264 filename
= headers
.get("Content-Filename")
265 if not filename
and compressed
:
266 filename
= "package.tar.gz" if compressed
== "gzip" else "package.zip"
271 if "revision" in current_desc
["_admin"]:
272 revision
= current_desc
["_admin"]["revision"] + 1
274 # TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266
280 if content_range_text
:
282 content_range_text
.replace("-", " ").replace("/", " ").split()
285 content_range
[0] != "bytes"
286 ): # TODO check x<y not negative < total....
288 start
= int(content_range
[1])
289 end
= int(content_range
[2]) + 1
290 total
= int(content_range
[3])
293 # Rather than using a temp folder, we will store the package in a folder based on
294 # the current revision.
295 proposed_revision_path
= (
296 _id
+ ":" + str(revision
)
297 ) # all the content is upload here and if ok, it is rename from id_ to is folder
300 if not self
.fs
.file_exists(proposed_revision_path
, "dir"):
301 raise EngineException(
302 "invalid Transaction-Id header", HTTPStatus
.NOT_FOUND
305 self
.fs
.file_delete(proposed_revision_path
, ignore_non_exist
=True)
306 self
.fs
.mkdir(proposed_revision_path
)
307 fs_rollback
.append(proposed_revision_path
)
309 storage
= self
.fs
.get_params()
310 storage
["folder"] = proposed_revision_path
312 file_path
= (proposed_revision_path
, filename
)
313 if self
.fs
.file_exists(file_path
, "file"):
314 file_size
= self
.fs
.file_size(file_path
)
317 if file_size
!= start
:
318 raise EngineException(
319 "invalid Content-Range start sequence, expected '{}' but received '{}'".format(
322 HTTPStatus
.REQUESTED_RANGE_NOT_SATISFIABLE
,
324 file_pkg
= self
.fs
.file_open(file_path
, "a+b")
325 if isinstance(indata
, dict):
326 indata_text
= yaml
.safe_dump(indata
, indent
=4, default_flow_style
=False)
327 file_pkg
.write(indata_text
.encode(encoding
="utf-8"))
331 indata_text
= indata
.read(4096)
332 indata_len
+= len(indata_text
)
335 file_pkg
.write(indata_text
)
336 if content_range_text
:
337 if indata_len
!= end
- start
:
338 raise EngineException(
339 "Mismatch between Content-Range header {}-{} and body length of {}".format(
340 start
, end
- 1, indata_len
342 HTTPStatus
.REQUESTED_RANGE_NOT_SATISFIABLE
,
345 # TODO update to UPLOADING
352 chunk_data
= file_pkg
.read(1024)
354 file_md5
.update(chunk_data
)
355 chunk_data
= file_pkg
.read(1024)
356 if expected_md5
!= file_md5
.hexdigest():
357 raise EngineException("Error, MD5 mismatch", HTTPStatus
.CONFLICT
)
359 if compressed
== "gzip":
360 tar
= tarfile
.open(mode
="r", fileobj
=file_pkg
)
361 descriptor_file_name
= None
363 tarname
= tarinfo
.name
364 tarname_path
= tarname
.split("/")
366 not tarname_path
[0] or ".." in tarname_path
367 ): # if start with "/" means absolute path
368 raise EngineException(
369 "Absolute path or '..' are not allowed for package descriptor tar.gz"
371 if len(tarname_path
) == 1 and not tarinfo
.isdir():
372 raise EngineException(
373 "All files must be inside a dir for package descriptor tar.gz"
376 tarname
.endswith(".yaml")
377 or tarname
.endswith(".json")
378 or tarname
.endswith(".yml")
380 storage
["pkg-dir"] = tarname_path
[0]
381 if len(tarname_path
) == 2:
382 if descriptor_file_name
:
383 raise EngineException(
384 "Found more than one descriptor file at package descriptor tar.gz"
386 descriptor_file_name
= tarname
387 if not descriptor_file_name
:
388 raise EngineException(
389 "Not found any descriptor file at package descriptor tar.gz"
391 storage
["descriptor"] = descriptor_file_name
392 storage
["zipfile"] = filename
393 self
.fs
.file_extract(tar
, proposed_revision_path
)
394 with self
.fs
.file_open(
395 (proposed_revision_path
, descriptor_file_name
), "r"
396 ) as descriptor_file
:
397 content
= descriptor_file
.read()
398 elif compressed
== "zip":
399 zipfile
= ZipFile(file_pkg
)
400 descriptor_file_name
= None
401 for package_file
in zipfile
.infolist():
402 zipfilename
= package_file
.filename
403 file_path
= zipfilename
.split("/")
405 not file_path
[0] or ".." in zipfilename
406 ): # if start with "/" means absolute path
407 raise EngineException(
408 "Absolute path or '..' are not allowed for package descriptor zip"
412 zipfilename
.endswith(".yaml")
413 or zipfilename
.endswith(".json")
414 or zipfilename
.endswith(".yml")
416 zipfilename
.find("/") < 0
417 or zipfilename
.find("Definitions") >= 0
419 storage
["pkg-dir"] = ""
420 if descriptor_file_name
:
421 raise EngineException(
422 "Found more than one descriptor file at package descriptor zip"
424 descriptor_file_name
= zipfilename
425 if not descriptor_file_name
:
426 raise EngineException(
427 "Not found any descriptor file at package descriptor zip"
429 storage
["descriptor"] = descriptor_file_name
430 storage
["zipfile"] = filename
431 self
.fs
.file_extract(zipfile
, proposed_revision_path
)
433 with self
.fs
.file_open(
434 (proposed_revision_path
, descriptor_file_name
), "r"
435 ) as descriptor_file
:
436 content
= descriptor_file
.read()
438 content
= file_pkg
.read()
439 storage
["descriptor"] = descriptor_file_name
= filename
441 if descriptor_file_name
.endswith(".json"):
442 error_text
= "Invalid json format "
443 indata
= json
.load(content
)
445 error_text
= "Invalid yaml format "
446 indata
= yaml
.safe_load(content
)
448 # Need to close the file package here so it can be copied from the
449 # revision to the current, unrevisioned record
454 # Fetch both the incoming, proposed revision and the original revision so we
455 # can call a validate method to compare them
456 current_revision_path
= _id
+ "/"
457 self
.fs
.sync(from_path
=current_revision_path
)
458 self
.fs
.sync(from_path
=proposed_revision_path
)
462 self
._validate
_descriptor
_changes
(
464 descriptor_file_name
,
465 current_revision_path
,
466 proposed_revision_path
,
468 except Exception as e
:
470 self
.fs
.path
+ current_revision_path
, ignore_errors
=True
473 self
.fs
.path
+ proposed_revision_path
, ignore_errors
=True
475 # Only delete the new revision. We need to keep the original version in place
476 # as it has not been changed.
477 self
.fs
.file_delete(proposed_revision_path
, ignore_non_exist
=True)
480 indata
= self
._remove
_envelop
(indata
)
482 # Override descriptor with query string kwargs
484 self
._update
_input
_with
_kwargs
(indata
, kwargs
)
486 current_desc
["_admin"]["storage"] = storage
487 current_desc
["_admin"]["onboardingState"] = "ONBOARDED"
488 current_desc
["_admin"]["operationalState"] = "ENABLED"
489 current_desc
["_admin"]["modified"] = time()
490 current_desc
["_admin"]["revision"] = revision
492 deep_update_rfc7396(current_desc
, indata
)
493 current_desc
= self
.check_conflict_on_edit(
494 session
, current_desc
, indata
, _id
=_id
497 # Copy the revision to the active package name by its original id
498 shutil
.rmtree(self
.fs
.path
+ current_revision_path
, ignore_errors
=True)
500 self
.fs
.path
+ proposed_revision_path
,
501 self
.fs
.path
+ current_revision_path
,
503 self
.fs
.file_delete(current_revision_path
, ignore_non_exist
=True)
504 self
.fs
.mkdir(current_revision_path
)
505 self
.fs
.reverse_sync(from_path
=current_revision_path
)
507 shutil
.rmtree(self
.fs
.path
+ _id
)
509 self
.db
.replace(self
.topic
, _id
, current_desc
)
511 # Store a copy of the package as a point in time revision
512 revision_desc
= dict(current_desc
)
513 revision_desc
["_id"] = _id
+ ":" + str(revision_desc
["_admin"]["revision"])
514 self
.db
.create(self
.topic
+ "_revisions", revision_desc
)
518 self
._send
_msg
("edited", indata
)
520 # TODO if descriptor has changed because kwargs update content and remove cached zip
521 # TODO if zip is not present creates one
524 except EngineException
:
527 raise EngineException(
528 "invalid Content-Range header format. Expected 'bytes start-end/total'",
529 HTTPStatus
.REQUESTED_RANGE_NOT_SATISFIABLE
,
532 raise EngineException(
533 "invalid upload transaction sequence: '{}'".format(e
),
534 HTTPStatus
.BAD_REQUEST
,
536 except tarfile
.ReadError
as e
:
537 raise EngineException(
538 "invalid file content {}".format(e
), HTTPStatus
.BAD_REQUEST
540 except (ValueError, yaml
.YAMLError
) as e
:
541 raise EngineException(error_text
+ str(e
))
542 except ValidationError
as e
:
543 raise EngineException(e
, HTTPStatus
.UNPROCESSABLE_ENTITY
)
547 for file in fs_rollback
:
548 self
.fs
.file_delete(file, ignore_non_exist
=True)
550 def get_file(self
, session
, _id
, path
=None, accept_header
=None):
552 Return the file content of a vnfd or nsd
553 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
554 :param _id: Identity of the vnfd, nsd
555 :param path: artifact path or "$DESCRIPTOR" or None
556 :param accept_header: Content of Accept header. Must contain applition/zip or/and text/plain
557 :return: opened file plus Accept format or raises an exception
559 accept_text
= accept_zip
= False
561 if "text/plain" in accept_header
or "*/*" in accept_header
:
563 if "application/zip" in accept_header
or "*/*" in accept_header
:
564 accept_zip
= "application/zip"
565 elif "application/gzip" in accept_header
:
566 accept_zip
= "application/gzip"
568 if not accept_text
and not accept_zip
:
569 raise EngineException(
570 "provide request header 'Accept' with 'application/zip' or 'text/plain'",
571 http_code
=HTTPStatus
.NOT_ACCEPTABLE
,
574 content
= self
.show(session
, _id
)
575 if content
["_admin"]["onboardingState"] != "ONBOARDED":
576 raise EngineException(
577 "Cannot get content because this resource is not at 'ONBOARDED' state. "
578 "onboardingState is {}".format(content
["_admin"]["onboardingState"]),
579 http_code
=HTTPStatus
.CONFLICT
,
581 storage
= content
["_admin"]["storage"]
582 if path
is not None and path
!= "$DESCRIPTOR": # artifacts
583 if not storage
.get("pkg-dir") and not storage
.get("folder"):
584 raise EngineException(
585 "Packages does not contains artifacts",
586 http_code
=HTTPStatus
.BAD_REQUEST
,
588 if self
.fs
.file_exists(
589 (storage
["folder"], storage
["pkg-dir"], *path
), "dir"
591 folder_content
= self
.fs
.dir_ls(
592 (storage
["folder"], storage
["pkg-dir"], *path
)
594 return folder_content
, "text/plain"
595 # TODO manage folders in http
599 (storage
["folder"], storage
["pkg-dir"], *path
), "rb"
601 "application/octet-stream",
604 # pkgtype accept ZIP TEXT -> result
605 # manyfiles yes X -> zip
607 # onefile yes no -> zip
609 contain_many_files
= False
610 if storage
.get("pkg-dir"):
611 # check if there are more than one file in the package, ignoring checksums.txt.
612 pkg_files
= self
.fs
.dir_ls((storage
["folder"], storage
["pkg-dir"]))
613 if len(pkg_files
) >= 3 or (
614 len(pkg_files
) == 2 and "checksums.txt" not in pkg_files
616 contain_many_files
= True
617 if accept_text
and (not contain_many_files
or path
== "$DESCRIPTOR"):
619 self
.fs
.file_open((storage
["folder"], storage
["descriptor"]), "r"),
622 elif contain_many_files
and not accept_zip
:
623 raise EngineException(
624 "Packages that contains several files need to be retrieved with 'application/zip'"
626 http_code
=HTTPStatus
.NOT_ACCEPTABLE
,
629 if not storage
.get("zipfile"):
630 # TODO generate zipfile if not present
631 raise EngineException(
632 "Only allowed 'text/plain' Accept header for this descriptor. To be solved in "
634 http_code
=HTTPStatus
.NOT_ACCEPTABLE
,
637 self
.fs
.file_open((storage
["folder"], storage
["zipfile"]), "rb"),
641 def _remove_yang_prefixes_from_descriptor(self
, descriptor
):
643 for k
, v
in descriptor
.items():
645 if isinstance(v
, dict):
646 new_v
= self
._remove
_yang
_prefixes
_from
_descriptor
(v
)
647 elif isinstance(v
, list):
650 if isinstance(x
, dict):
651 new_v
.append(self
._remove
_yang
_prefixes
_from
_descriptor
(x
))
654 new_descriptor
[k
.split(":")[-1]] = new_v
655 return new_descriptor
657 def pyangbind_validation(self
, item
, data
, force
=False):
658 raise EngineException(
659 "Not possible to validate '{}' item".format(item
),
660 http_code
=HTTPStatus
.INTERNAL_SERVER_ERROR
,
663 def _validate_input_edit(self
, indata
, content
, force
=False):
664 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
667 if "_admin" not in indata
:
668 indata
["_admin"] = {}
670 if "operationalState" in indata
:
671 if indata
["operationalState"] in ("ENABLED", "DISABLED"):
672 indata
["_admin"]["operationalState"] = indata
.pop("operationalState")
674 raise EngineException(
675 "State '{}' is not a valid operational state".format(
676 indata
["operationalState"]
678 http_code
=HTTPStatus
.BAD_REQUEST
,
681 # In the case of user defined data, we need to put the data in the root of the object
682 # to preserve current expected behaviour
683 if "userDefinedData" in indata
:
684 data
= indata
.pop("userDefinedData")
685 if isinstance(data
, dict):
686 indata
["_admin"]["userDefinedData"] = data
688 raise EngineException(
689 "userDefinedData should be an object, but is '{}' instead".format(
692 http_code
=HTTPStatus
.BAD_REQUEST
,
696 "operationalState" in indata
["_admin"]
697 and content
["_admin"]["operationalState"]
698 == indata
["_admin"]["operationalState"]
700 raise EngineException(
701 "operationalState already {}".format(
702 content
["_admin"]["operationalState"]
704 http_code
=HTTPStatus
.CONFLICT
,
709 def _validate_descriptor_changes(
712 descriptor_file_name
,
713 old_descriptor_directory
,
714 new_descriptor_directory
,
717 # raise EngineException(
718 # "Error in validating new descriptor: <NODE> cannot be modified",
719 # http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
724 class VnfdTopic(DescriptorTopic
):
728 def __init__(self
, db
, fs
, msg
, auth
):
729 DescriptorTopic
.__init
__(self
, db
, fs
, msg
, auth
)
731 def pyangbind_validation(self
, item
, data
, force
=False):
732 if self
._descriptor
_data
_is
_in
_old
_format
(data
):
733 raise EngineException(
734 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
735 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
738 myvnfd
= etsi_nfv_vnfd
.etsi_nfv_vnfd()
739 pybindJSONDecoder
.load_ietf_json(
740 {"etsi-nfv-vnfd:vnfd": data
},
747 out
= pybindJSON
.dumps(myvnfd
, mode
="ietf")
748 desc_out
= self
._remove
_envelop
(yaml
.safe_load(out
))
749 desc_out
= self
._remove
_yang
_prefixes
_from
_descriptor
(desc_out
)
750 return utils
.deep_update_dict(data
, desc_out
)
751 except Exception as e
:
752 raise EngineException(
753 "Error in pyangbind validation: {}".format(str(e
)),
754 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
758 def _descriptor_data_is_in_old_format(data
):
759 return ("vnfd-catalog" in data
) or ("vnfd:vnfd-catalog" in data
)
762 def _remove_envelop(indata
=None):
765 clean_indata
= indata
767 if clean_indata
.get("etsi-nfv-vnfd:vnfd"):
768 if not isinstance(clean_indata
["etsi-nfv-vnfd:vnfd"], dict):
769 raise EngineException("'etsi-nfv-vnfd:vnfd' must be a dict")
770 clean_indata
= clean_indata
["etsi-nfv-vnfd:vnfd"]
771 elif clean_indata
.get("vnfd"):
772 if not isinstance(clean_indata
["vnfd"], dict):
773 raise EngineException("'vnfd' must be dict")
774 clean_indata
= clean_indata
["vnfd"]
778 def check_conflict_on_edit(self
, session
, final_content
, edit_content
, _id
):
779 final_content
= super().check_conflict_on_edit(
780 session
, final_content
, edit_content
, _id
786 for vdu
in get_iterable(final_content
.get("vdu")):
787 if vdu
.get("pdu-type"):
792 final_content
["_admin"]["type"] = "hnfd" if contains_vdu
else "pnfd"
794 final_content
["_admin"]["type"] = "vnfd"
795 # if neither vud nor pdu do not fill type
798 def check_conflict_on_del(self
, session
, _id
, db_content
):
800 Check that there is not any NSD that uses this VNFD. Only NSDs belonging to this project are considered. Note
801 that VNFD can be public and be used by NSD of other projects. Also check there are not deployments, or vnfr
803 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
804 :param _id: vnfd internal id
805 :param db_content: The database content of the _id.
806 :return: None or raises EngineException with the conflict
810 descriptor
= db_content
811 descriptor_id
= descriptor
.get("id")
812 if not descriptor_id
: # empty vnfd not uploaded
815 _filter
= self
._get
_project
_filter
(session
)
817 # check vnfrs using this vnfd
818 _filter
["vnfd-id"] = _id
819 if self
.db
.get_list("vnfrs", _filter
):
820 raise EngineException(
821 "There is at least one VNF instance using this descriptor",
822 http_code
=HTTPStatus
.CONFLICT
,
825 # check NSD referencing this VNFD
826 del _filter
["vnfd-id"]
827 _filter
["vnfd-id"] = descriptor_id
828 if self
.db
.get_list("nsds", _filter
):
829 raise EngineException(
830 "There is at least one NS package referencing this descriptor",
831 http_code
=HTTPStatus
.CONFLICT
,
834 def _validate_input_new(self
, indata
, storage_params
, force
=False):
835 indata
.pop("onboardingState", None)
836 indata
.pop("operationalState", None)
837 indata
.pop("usageState", None)
838 indata
.pop("links", None)
840 indata
= self
.pyangbind_validation("vnfds", indata
, force
)
841 # Cross references validation in the descriptor
843 self
.validate_mgmt_interface_connection_point(indata
)
845 for vdu
in get_iterable(indata
.get("vdu")):
846 self
.validate_vdu_internal_connection_points(vdu
)
847 self
._validate
_vdu
_cloud
_init
_in
_package
(storage_params
, vdu
, indata
)
848 self
._validate
_vdu
_charms
_in
_package
(storage_params
, indata
)
850 self
._validate
_vnf
_charms
_in
_package
(storage_params
, indata
)
852 self
.validate_external_connection_points(indata
)
853 self
.validate_internal_virtual_links(indata
)
854 self
.validate_monitoring_params(indata
)
855 self
.validate_scaling_group_descriptor(indata
)
856 self
.validate_healing_group_descriptor(indata
)
857 self
.validate_alarm_group_descriptor(indata
)
858 self
.validate_storage_compute_descriptor(indata
)
859 self
.validate_helm_chart(indata
)
864 def validate_helm_chart(indata
):
866 result
= urlparse(url
)
867 return all([result
.scheme
, result
.netloc
])
869 kdus
= indata
.get("kdu", [])
871 helm_chart_value
= kdu
.get("helm-chart")
872 if not helm_chart_value
:
875 valid_helm_chart_re
.match(helm_chart_value
) or is_url(helm_chart_value
)
877 raise EngineException(
878 "helm-chart '{}' is not valid".format(helm_chart_value
),
879 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
883 def validate_mgmt_interface_connection_point(indata
):
884 if not indata
.get("vdu"):
886 if not indata
.get("mgmt-cp"):
887 raise EngineException(
888 "'mgmt-cp' is a mandatory field and it is not defined",
889 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
892 for cp
in get_iterable(indata
.get("ext-cpd")):
893 if cp
["id"] == indata
["mgmt-cp"]:
896 raise EngineException(
897 "mgmt-cp='{}' must match an existing ext-cpd".format(indata
["mgmt-cp"]),
898 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
902 def validate_vdu_internal_connection_points(vdu
):
904 for cpd
in get_iterable(vdu
.get("int-cpd")):
905 cpd_id
= cpd
.get("id")
906 if cpd_id
and cpd_id
in int_cpds
:
907 raise EngineException(
908 "vdu[id='{}']:int-cpd[id='{}'] is already used by other int-cpd".format(
911 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
916 def validate_external_connection_points(indata
):
917 all_vdus_int_cpds
= set()
918 for vdu
in get_iterable(indata
.get("vdu")):
919 for int_cpd
in get_iterable(vdu
.get("int-cpd")):
920 all_vdus_int_cpds
.add((vdu
.get("id"), int_cpd
.get("id")))
923 for cpd
in get_iterable(indata
.get("ext-cpd")):
924 cpd_id
= cpd
.get("id")
925 if cpd_id
and cpd_id
in ext_cpds
:
926 raise EngineException(
927 "ext-cpd[id='{}'] is already used by other ext-cpd".format(cpd_id
),
928 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
932 int_cpd
= cpd
.get("int-cpd")
934 if (int_cpd
.get("vdu-id"), int_cpd
.get("cpd")) not in all_vdus_int_cpds
:
935 raise EngineException(
936 "ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd".format(
939 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
941 # TODO: Validate k8s-cluster-net points to a valid k8s-cluster:nets ?
943 def _validate_vdu_charms_in_package(self
, storage_params
, indata
):
944 for df
in indata
["df"]:
946 "lcm-operations-configuration" in df
947 and "operate-vnf-op-config" in df
["lcm-operations-configuration"]
949 configs
= df
["lcm-operations-configuration"][
950 "operate-vnf-op-config"
952 vdus
= df
.get("vdu-profile", [])
954 for config
in configs
:
955 if config
["id"] == vdu
["id"] and utils
.find_in_list(
956 config
.get("execution-environment-list", []),
957 lambda ee
: "juju" in ee
,
959 if not self
._validate
_package
_folders
(
960 storage_params
, "charms"
961 ) and not self
._validate
_package
_folders
(
962 storage_params
, "Scripts/charms"
964 raise EngineException(
965 "Charm defined in vnf[id={}] but not present in "
966 "package".format(indata
["id"])
969 def _validate_vdu_cloud_init_in_package(self
, storage_params
, vdu
, indata
):
970 if not vdu
.get("cloud-init-file"):
972 if not self
._validate
_package
_folders
(
973 storage_params
, "cloud_init", vdu
["cloud-init-file"]
974 ) and not self
._validate
_package
_folders
(
975 storage_params
, "Scripts/cloud_init", vdu
["cloud-init-file"]
977 raise EngineException(
978 "Cloud-init defined in vnf[id={}]:vdu[id={}] but not present in "
979 "package".format(indata
["id"], vdu
["id"])
982 def _validate_vnf_charms_in_package(self
, storage_params
, indata
):
983 # Get VNF configuration through new container
984 for deployment_flavor
in indata
.get("df", []):
985 if "lcm-operations-configuration" not in deployment_flavor
:
988 "operate-vnf-op-config"
989 not in deployment_flavor
["lcm-operations-configuration"]
992 for day_1_2_config
in deployment_flavor
["lcm-operations-configuration"][
993 "operate-vnf-op-config"
995 if day_1_2_config
["id"] == indata
["id"]:
996 if utils
.find_in_list(
997 day_1_2_config
.get("execution-environment-list", []),
998 lambda ee
: "juju" in ee
,
1000 if not self
._validate
_package
_folders
(
1001 storage_params
, "charms"
1002 ) and not self
._validate
_package
_folders
(
1003 storage_params
, "Scripts/charms"
1005 raise EngineException(
1006 "Charm defined in vnf[id={}] but not present in "
1007 "package".format(indata
["id"])
1010 def _validate_package_folders(self
, storage_params
, folder
, file=None):
1011 if not storage_params
:
1013 elif not storage_params
.get("pkg-dir"):
1014 if self
.fs
.file_exists("{}_".format(storage_params
["folder"]), "dir"):
1015 f
= "{}_/{}".format(storage_params
["folder"], folder
)
1017 f
= "{}/{}".format(storage_params
["folder"], folder
)
1019 return self
.fs
.file_exists("{}/{}".format(f
, file), "file")
1021 if self
.fs
.file_exists(f
, "dir"):
1022 if self
.fs
.dir_ls(f
):
1026 if self
.fs
.file_exists("{}_".format(storage_params
["folder"]), "dir"):
1027 f
= "{}_/{}/{}".format(
1028 storage_params
["folder"], storage_params
["pkg-dir"], folder
1031 f
= "{}/{}/{}".format(
1032 storage_params
["folder"], storage_params
["pkg-dir"], folder
1035 return self
.fs
.file_exists("{}/{}".format(f
, file), "file")
1037 if self
.fs
.file_exists(f
, "dir"):
1038 if self
.fs
.dir_ls(f
):
1043 def validate_internal_virtual_links(indata
):
1044 all_ivld_ids
= set()
1045 for ivld
in get_iterable(indata
.get("int-virtual-link-desc")):
1046 ivld_id
= ivld
.get("id")
1047 if ivld_id
and ivld_id
in all_ivld_ids
:
1048 raise EngineException(
1049 "Duplicated VLD id in int-virtual-link-desc[id={}]".format(ivld_id
),
1050 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1053 all_ivld_ids
.add(ivld_id
)
1055 for vdu
in get_iterable(indata
.get("vdu")):
1056 for int_cpd
in get_iterable(vdu
.get("int-cpd")):
1057 int_cpd_ivld_id
= int_cpd
.get("int-virtual-link-desc")
1058 if int_cpd_ivld_id
and int_cpd_ivld_id
not in all_ivld_ids
:
1059 raise EngineException(
1060 "vdu[id='{}']:int-cpd[id='{}']:int-virtual-link-desc='{}' must match an existing "
1061 "int-virtual-link-desc".format(
1062 vdu
["id"], int_cpd
["id"], int_cpd_ivld_id
1064 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1067 for df
in get_iterable(indata
.get("df")):
1068 for vlp
in get_iterable(df
.get("virtual-link-profile")):
1069 vlp_ivld_id
= vlp
.get("id")
1070 if vlp_ivld_id
and vlp_ivld_id
not in all_ivld_ids
:
1071 raise EngineException(
1072 "df[id='{}']:virtual-link-profile='{}' must match an existing "
1073 "int-virtual-link-desc".format(df
["id"], vlp_ivld_id
),
1074 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1078 def validate_monitoring_params(indata
):
1079 all_monitoring_params
= set()
1080 for ivld
in get_iterable(indata
.get("int-virtual-link-desc")):
1081 for mp
in get_iterable(ivld
.get("monitoring-parameters")):
1082 mp_id
= mp
.get("id")
1083 if mp_id
and mp_id
in all_monitoring_params
:
1084 raise EngineException(
1085 "Duplicated monitoring-parameter id in "
1086 "int-virtual-link-desc[id='{}']:monitoring-parameters[id='{}']".format(
1089 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1092 all_monitoring_params
.add(mp_id
)
1094 for vdu
in get_iterable(indata
.get("vdu")):
1095 for mp
in get_iterable(vdu
.get("monitoring-parameter")):
1096 mp_id
= mp
.get("id")
1097 if mp_id
and mp_id
in all_monitoring_params
:
1098 raise EngineException(
1099 "Duplicated monitoring-parameter id in "
1100 "vdu[id='{}']:monitoring-parameter[id='{}']".format(
1103 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1106 all_monitoring_params
.add(mp_id
)
1108 for df
in get_iterable(indata
.get("df")):
1109 for mp
in get_iterable(df
.get("monitoring-parameter")):
1110 mp_id
= mp
.get("id")
1111 if mp_id
and mp_id
in all_monitoring_params
:
1112 raise EngineException(
1113 "Duplicated monitoring-parameter id in "
1114 "df[id='{}']:monitoring-parameter[id='{}']".format(
1117 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1120 all_monitoring_params
.add(mp_id
)
1123 def validate_scaling_group_descriptor(indata
):
1124 all_monitoring_params
= set()
1126 for df
in get_iterable(indata
.get("df")):
1127 for il
in get_iterable(df
.get("instantiation-level")):
1128 for vl
in get_iterable(il
.get("vdu-level")):
1129 all_vdu_ids
.add(vl
.get("vdu-id"))
1131 for ivld
in get_iterable(indata
.get("int-virtual-link-desc")):
1132 for mp
in get_iterable(ivld
.get("monitoring-parameters")):
1133 all_monitoring_params
.add(mp
.get("id"))
1135 for vdu
in get_iterable(indata
.get("vdu")):
1136 for mp
in get_iterable(vdu
.get("monitoring-parameter")):
1137 all_monitoring_params
.add(mp
.get("id"))
1139 for df
in get_iterable(indata
.get("df")):
1140 for mp
in get_iterable(df
.get("monitoring-parameter")):
1141 all_monitoring_params
.add(mp
.get("id"))
1143 for df
in get_iterable(indata
.get("df")):
1144 for sa
in get_iterable(df
.get("scaling-aspect")):
1145 for deltas
in get_iterable(
1146 sa
.get("aspect-delta-details").get("deltas")
1148 for vds
in get_iterable(deltas
.get("vdu-delta")):
1149 sa_vdu_id
= vds
.get("id")
1150 if sa_vdu_id
and sa_vdu_id
not in all_vdu_ids
:
1151 raise EngineException(
1152 "df[id='{}']:scaling-aspect[id='{}']:aspect-delta-details"
1154 "vdu-id='{}' not defined in vdu".format(
1160 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1163 for df
in get_iterable(indata
.get("df")):
1164 for sa
in get_iterable(df
.get("scaling-aspect")):
1165 for sp
in get_iterable(sa
.get("scaling-policy")):
1166 for sc
in get_iterable(sp
.get("scaling-criteria")):
1167 sc_monitoring_param
= sc
.get("vnf-monitoring-param-ref")
1170 and sc_monitoring_param
not in all_monitoring_params
1172 raise EngineException(
1173 "df[id='{}']:scaling-aspect[id='{}']:scaling-policy"
1174 "[name='{}']:scaling-criteria[name='{}']: "
1175 "vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format(
1180 sc_monitoring_param
,
1182 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1185 for sca
in get_iterable(sa
.get("scaling-config-action")):
1187 "lcm-operations-configuration" not in df
1188 or "operate-vnf-op-config"
1189 not in df
["lcm-operations-configuration"]
1190 or not utils
.find_in_list(
1191 df
["lcm-operations-configuration"][
1192 "operate-vnf-op-config"
1193 ].get("day1-2", []),
1194 lambda config
: config
["id"] == indata
["id"],
1197 raise EngineException(
1198 "'day1-2 configuration' not defined in the descriptor but it is "
1199 "referenced by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action".format(
1202 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1204 for configuration
in get_iterable(
1205 df
["lcm-operations-configuration"]["operate-vnf-op-config"].get(
1209 for primitive
in get_iterable(
1210 configuration
.get("config-primitive")
1214 == sca
["vnf-config-primitive-name-ref"]
1218 raise EngineException(
1219 "df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-"
1220 "config-primitive-name-ref='{}' does not match any "
1221 "day1-2 configuration:config-primitive:name".format(
1224 sca
["vnf-config-primitive-name-ref"],
1226 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1230 def validate_healing_group_descriptor(indata
):
1232 for df
in get_iterable(indata
.get("df")):
1233 for il
in get_iterable(df
.get("instantiation-level")):
1234 for vl
in get_iterable(il
.get("vdu-level")):
1235 all_vdu_ids
.add(vl
.get("vdu-id"))
1237 for df
in get_iterable(indata
.get("df")):
1238 for ha
in get_iterable(df
.get("healing-aspect")):
1239 for hp
in get_iterable(ha
.get("healing-policy")):
1240 hp_monitoring_param
= hp
.get("vdu-id")
1241 if hp_monitoring_param
and hp_monitoring_param
not in all_vdu_ids
:
1242 raise EngineException(
1243 "df[id='{}']:healing-aspect[id='{}']:healing-policy"
1245 "vdu-id='{}' not defined in vdu".format(
1249 hp_monitoring_param
,
1251 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1255 def validate_alarm_group_descriptor(indata
):
1256 all_monitoring_params
= set()
1257 for ivld
in get_iterable(indata
.get("int-virtual-link-desc")):
1258 for mp
in get_iterable(ivld
.get("monitoring-parameters")):
1259 all_monitoring_params
.add(mp
.get("id"))
1261 for vdu
in get_iterable(indata
.get("vdu")):
1262 for mp
in get_iterable(vdu
.get("monitoring-parameter")):
1263 all_monitoring_params
.add(mp
.get("id"))
1265 for df
in get_iterable(indata
.get("df")):
1266 for mp
in get_iterable(df
.get("monitoring-parameter")):
1267 all_monitoring_params
.add(mp
.get("id"))
1269 for vdus
in get_iterable(indata
.get("vdu")):
1270 for alarms
in get_iterable(vdus
.get("alarm")):
1271 alarm_monitoring_param
= alarms
.get("vnf-monitoring-param-ref")
1273 alarm_monitoring_param
1274 and alarm_monitoring_param
not in all_monitoring_params
1276 raise EngineException(
1277 "vdu[id='{}']:alarm[id='{}']:"
1278 "vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format(
1281 alarm_monitoring_param
,
1283 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1287 def validate_storage_compute_descriptor(indata
):
1289 for vsd
in get_iterable(indata
.get("virtual-storage-desc")):
1290 all_vsd_ids
.add(vsd
.get("id"))
1293 for vcd
in get_iterable(indata
.get("virtual-compute-desc")):
1294 all_vcd_ids
.add(vcd
.get("id"))
1296 for vdus
in get_iterable(indata
.get("vdu")):
1297 for vsd_ref
in vdus
.get("virtual-storage-desc"):
1298 if vsd_ref
and vsd_ref
not in all_vsd_ids
:
1299 raise EngineException(
1300 "vdu[virtual-storage-desc='{}']"
1301 "not defined in vnfd".format(
1304 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1307 for vdus
in get_iterable(indata
.get("vdu")):
1308 vcd_ref
= vdus
.get("virtual-compute-desc")
1309 if vcd_ref
and vcd_ref
not in all_vcd_ids
:
1310 raise EngineException(
1311 "vdu[virtual-compute-desc='{}']"
1312 "not defined in vnfd".format(
1313 vdus
["virtual-compute-desc"],
1315 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1318 def delete_extra(self
, session
, _id
, db_content
, not_send_msg
=None):
1320 Deletes associate file system storage (via super)
1321 Deletes associated vnfpkgops from database.
1322 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1323 :param _id: server internal id
1324 :param db_content: The database content of the descriptor
1326 :raises: FsException in case of error while deleting associated storage
1328 super().delete_extra(session
, _id
, db_content
, not_send_msg
)
1329 self
.db
.del_list("vnfpkgops", {"vnfPkgId": _id
})
1330 self
.db
.del_list(self
.topic
+ "_revisions", {"_id": {"$regex": _id
}})
1332 def sol005_projection(self
, data
):
1333 data
["onboardingState"] = data
["_admin"]["onboardingState"]
1334 data
["operationalState"] = data
["_admin"]["operationalState"]
1335 data
["usageState"] = data
["_admin"]["usageState"]
1338 links
["self"] = {"href": "/vnfpkgm/v1/vnf_packages/{}".format(data
["_id"])}
1339 links
["vnfd"] = {"href": "/vnfpkgm/v1/vnf_packages/{}/vnfd".format(data
["_id"])}
1340 links
["packageContent"] = {
1341 "href": "/vnfpkgm/v1/vnf_packages/{}/package_content".format(data
["_id"])
1343 data
["_links"] = links
1345 return super().sol005_projection(data
)
1348 def find_software_version(vnfd
: dict) -> str:
1349 """Find the sotware version in the VNFD descriptors
1352 vnfd (dict): Descriptor as a dictionary
1355 software-version (str)
1357 default_sw_version
= "1.0"
1358 if vnfd
.get("vnfd"):
1360 if vnfd
.get("software-version"):
1361 return vnfd
["software-version"]
1363 return default_sw_version
1366 def extract_policies(vnfd
: dict) -> dict:
1367 """Removes the policies from the VNFD descriptors
1370 vnfd (dict): Descriptor as a dictionary
1373 vnfd (dict): VNFD which does not include policies
1375 for df
in vnfd
.get("df", {}):
1376 for policy
in ["scaling-aspect", "healing-aspect"]:
1377 if df
.get(policy
, {}):
1379 for vdu
in vnfd
.get("vdu", {}):
1380 for alarm_policy
in ["alarm", "monitoring-parameter"]:
1381 if vdu
.get(alarm_policy
, {}):
1382 vdu
.pop(alarm_policy
)
1386 def extract_day12_primitives(vnfd
: dict) -> dict:
1387 """Removes the day12 primitives from the VNFD descriptors
1390 vnfd (dict): Descriptor as a dictionary
1395 for df_id
, df
in enumerate(vnfd
.get("df", {})):
1397 df
.get("lcm-operations-configuration", {})
1398 .get("operate-vnf-op-config", {})
1401 day12
= df
["lcm-operations-configuration"]["operate-vnf-op-config"].get(
1404 for config_id
, config
in enumerate(day12
):
1406 "initial-config-primitive",
1408 "terminate-config-primitive",
1410 config
.pop(key
, None)
1411 day12
[config_id
] = config
1412 df
["lcm-operations-configuration"]["operate-vnf-op-config"][
1415 vnfd
["df"][df_id
] = df
1418 def remove_modifiable_items(self
, vnfd
: dict) -> dict:
1419 """Removes the modifiable parts from the VNFD descriptors
1421 It calls different extract functions according to different update types
1422 to clear all the modifiable items from VNFD
1425 vnfd (dict): Descriptor as a dictionary
1428 vnfd (dict): Descriptor which does not include modifiable contents
1430 if vnfd
.get("vnfd"):
1432 vnfd
.pop("_admin", None)
1433 # If the other extractions need to be done from VNFD,
1434 # the new extract methods could be appended to below list.
1435 for extract_function
in [self
.extract_day12_primitives
, self
.extract_policies
]:
1436 vnfd_temp
= extract_function(vnfd
)
1440 def _validate_descriptor_changes(
1443 descriptor_file_name
: str,
1444 old_descriptor_directory
: str,
1445 new_descriptor_directory
: str,
1447 """Compares the old and new VNFD descriptors and validates the new descriptor.
1450 old_descriptor_directory (str): Directory of descriptor which is in-use
1451 new_descriptor_directory (str): Directory of descriptor which is proposed to update (new revision)
1457 EngineException: In case of error when there are unallowed changes
1460 # If VNFD does not exist in DB or it is not in use by any NS,
1461 # validation is not required.
1462 vnfd
= self
.db
.get_one("vnfds", {"_id": descriptor_id
})
1463 if not vnfd
or not detect_descriptor_usage(vnfd
, "vnfds", self
.db
):
1466 # Get the old and new descriptor contents in order to compare them.
1467 with self
.fs
.file_open(
1468 (old_descriptor_directory
.rstrip("/"), descriptor_file_name
), "r"
1469 ) as old_descriptor_file
:
1470 with self
.fs
.file_open(
1471 (new_descriptor_directory
.rstrip("/"), descriptor_file_name
), "r"
1472 ) as new_descriptor_file
:
1473 old_content
= yaml
.safe_load(old_descriptor_file
.read())
1474 new_content
= yaml
.safe_load(new_descriptor_file
.read())
1476 # If software version has changed, we do not need to validate
1477 # the differences anymore.
1478 if old_content
and new_content
:
1479 if self
.find_software_version(
1481 ) != self
.find_software_version(new_content
):
1484 disallowed_change
= DeepDiff(
1485 self
.remove_modifiable_items(old_content
),
1486 self
.remove_modifiable_items(new_content
),
1489 if disallowed_change
:
1490 changed_nodes
= functools
.reduce(
1491 lambda a
, b
: a
+ " , " + b
,
1494 for node
in disallowed_change
.get(
1500 raise EngineException(
1501 f
"Error in validating new descriptor: {changed_nodes} cannot be modified, "
1502 "there are disallowed changes in the vnf descriptor.",
1503 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1513 "VNF Descriptor could not be processed with error: {}.".format(e
)
1517 class NsdTopic(DescriptorTopic
):
1521 def __init__(self
, db
, fs
, msg
, auth
):
1522 super().__init
__(db
, fs
, msg
, auth
)
1524 def pyangbind_validation(self
, item
, data
, force
=False):
1525 if self
._descriptor
_data
_is
_in
_old
_format
(data
):
1526 raise EngineException(
1527 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
1528 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1531 nsd_vnf_profiles
= data
.get("df", [{}])[0].get("vnf-profile", [])
1532 mynsd
= etsi_nfv_nsd
.etsi_nfv_nsd()
1533 pybindJSONDecoder
.load_ietf_json(
1534 {"nsd": {"nsd": [data
]}},
1541 out
= pybindJSON
.dumps(mynsd
, mode
="ietf")
1542 desc_out
= self
._remove
_envelop
(yaml
.safe_load(out
))
1543 desc_out
= self
._remove
_yang
_prefixes
_from
_descriptor
(desc_out
)
1544 if nsd_vnf_profiles
:
1545 desc_out
["df"][0]["vnf-profile"] = nsd_vnf_profiles
1547 except Exception as e
:
1548 raise EngineException(
1549 "Error in pyangbind validation: {}".format(str(e
)),
1550 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1554 def _descriptor_data_is_in_old_format(data
):
1555 return ("nsd-catalog" in data
) or ("nsd:nsd-catalog" in data
)
1558 def _remove_envelop(indata
=None):
1561 clean_indata
= indata
1563 if clean_indata
.get("nsd"):
1564 clean_indata
= clean_indata
["nsd"]
1565 elif clean_indata
.get("etsi-nfv-nsd:nsd"):
1566 clean_indata
= clean_indata
["etsi-nfv-nsd:nsd"]
1567 if clean_indata
.get("nsd"):
1569 not isinstance(clean_indata
["nsd"], list)
1570 or len(clean_indata
["nsd"]) != 1
1572 raise EngineException("'nsd' must be a list of only one element")
1573 clean_indata
= clean_indata
["nsd"][0]
1576 def _validate_input_new(self
, indata
, storage_params
, force
=False):
1577 indata
.pop("nsdOnboardingState", None)
1578 indata
.pop("nsdOperationalState", None)
1579 indata
.pop("nsdUsageState", None)
1581 indata
.pop("links", None)
1583 indata
= self
.pyangbind_validation("nsds", indata
, force
)
1584 # Cross references validation in the descriptor
1585 # TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none
1586 for vld
in get_iterable(indata
.get("virtual-link-desc")):
1587 self
.validate_vld_mgmt_network_with_virtual_link_protocol_data(vld
, indata
)
1588 for fg
in get_iterable(indata
.get("vnffgd")):
1589 self
.validate_vnffgd_data(fg
, indata
)
1591 self
.validate_vnf_profiles_vnfd_id(indata
)
1596 def validate_vld_mgmt_network_with_virtual_link_protocol_data(vld
, indata
):
1597 if not vld
.get("mgmt-network"):
1599 vld_id
= vld
.get("id")
1600 for df
in get_iterable(indata
.get("df")):
1601 for vlp
in get_iterable(df
.get("virtual-link-profile")):
1602 if vld_id
and vld_id
== vlp
.get("virtual-link-desc-id"):
1603 if vlp
.get("virtual-link-protocol-data"):
1604 raise EngineException(
1605 "Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-"
1606 "protocol-data You cannot set a virtual-link-protocol-data "
1607 "when mgmt-network is True".format(df
["id"], vlp
["id"]),
1608 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1612 def validate_vnffgd_data(fg
, indata
):
1614 all_vnf_ids
= set(get_iterable(fg
.get("vnf-profile-id")))
1615 for fgposition
in get_iterable(fg
.get("nfp-position-element")):
1616 position_list
.append(fgposition
["id"])
1618 for nfpd
in get_iterable(fg
.get("nfpd")):
1620 for position
in get_iterable(nfpd
.get("position-desc-id")):
1621 nfp_position
= position
.get("nfp-position-element-id")
1622 if position
== "nfp-position-element-id":
1623 nfp_position
= position
.get("nfp-position-element-id")
1624 if nfp_position
[0] not in position_list
:
1625 raise EngineException(
1626 "Error at vnffgd nfpd[id='{}']:nfp-position-element-id='{}' "
1627 "does not match any nfp-position-element".format(
1628 nfpd
["id"], nfp_position
[0]
1630 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1633 for cp
in get_iterable(position
.get("cp-profile-id")):
1634 for cpe
in get_iterable(cp
.get("constituent-profile-elements")):
1635 constituent_base_element_id
= cpe
.get(
1636 "constituent-base-element-id"
1639 constituent_base_element_id
1640 and constituent_base_element_id
not in all_vnf_ids
1642 raise EngineException(
1643 "Error at vnffgd constituent_profile[id='{}']:vnfd-id='{}' "
1644 "does not match any constituent-base-element-id".format(
1645 cpe
["id"], constituent_base_element_id
1647 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1651 def validate_vnf_profiles_vnfd_id(indata
):
1652 all_vnfd_ids
= set(get_iterable(indata
.get("vnfd-id")))
1653 for df
in get_iterable(indata
.get("df")):
1654 for vnf_profile
in get_iterable(df
.get("vnf-profile")):
1655 vnfd_id
= vnf_profile
.get("vnfd-id")
1656 if vnfd_id
and vnfd_id
not in all_vnfd_ids
:
1657 raise EngineException(
1658 "Error at df[id='{}']:vnf_profile[id='{}']:vnfd-id='{}' "
1659 "does not match any vnfd-id".format(
1660 df
["id"], vnf_profile
["id"], vnfd_id
1662 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1665 def _validate_input_edit(self
, indata
, content
, force
=False):
1666 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
1668 indata looks as follows:
1669 - In the new case (conformant)
1670 {'nsdOperationalState': 'DISABLED', 'userDefinedData': {'id': 'string23',
1671 '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}}
1672 - In the old case (backwards-compatible)
1673 {'id': 'string23', '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}
1675 if "_admin" not in indata
:
1676 indata
["_admin"] = {}
1678 if "nsdOperationalState" in indata
:
1679 if indata
["nsdOperationalState"] in ("ENABLED", "DISABLED"):
1680 indata
["_admin"]["operationalState"] = indata
.pop("nsdOperationalState")
1682 raise EngineException(
1683 "State '{}' is not a valid operational state".format(
1684 indata
["nsdOperationalState"]
1686 http_code
=HTTPStatus
.BAD_REQUEST
,
1689 # In the case of user defined data, we need to put the data in the root of the object
1690 # to preserve current expected behaviour
1691 if "userDefinedData" in indata
:
1692 data
= indata
.pop("userDefinedData")
1693 if isinstance(data
, dict):
1694 indata
["_admin"]["userDefinedData"] = data
1696 raise EngineException(
1697 "userDefinedData should be an object, but is '{}' instead".format(
1700 http_code
=HTTPStatus
.BAD_REQUEST
,
1703 "operationalState" in indata
["_admin"]
1704 and content
["_admin"]["operationalState"]
1705 == indata
["_admin"]["operationalState"]
1707 raise EngineException(
1708 "nsdOperationalState already {}".format(
1709 content
["_admin"]["operationalState"]
1711 http_code
=HTTPStatus
.CONFLICT
,
1715 def _check_descriptor_dependencies(self
, session
, descriptor
):
1717 Check that the dependent descriptors exist on a new descriptor or edition. Also checks references to vnfd
1718 connection points are ok
1719 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1720 :param descriptor: descriptor to be inserted or edit
1721 :return: None or raises exception
1723 if session
["force"]:
1725 vnfds_index
= self
._get
_descriptor
_constituent
_vnfds
_index
(session
, descriptor
)
1727 # Cross references validation in the descriptor and vnfd connection point validation
1728 for df
in get_iterable(descriptor
.get("df")):
1729 self
.validate_df_vnf_profiles_constituent_connection_points(df
, vnfds_index
)
1731 def _get_descriptor_constituent_vnfds_index(self
, session
, descriptor
):
1733 if descriptor
.get("vnfd-id") and not session
["force"]:
1734 for vnfd_id
in get_iterable(descriptor
.get("vnfd-id")):
1735 query_filter
= self
._get
_project
_filter
(session
)
1736 query_filter
["id"] = vnfd_id
1737 vnf_list
= self
.db
.get_list("vnfds", query_filter
)
1739 raise EngineException(
1740 "Descriptor error at 'vnfd-id'='{}' references a non "
1741 "existing vnfd".format(vnfd_id
),
1742 http_code
=HTTPStatus
.CONFLICT
,
1744 vnfds_index
[vnfd_id
] = vnf_list
[0]
1748 def validate_df_vnf_profiles_constituent_connection_points(df
, vnfds_index
):
1749 for vnf_profile
in get_iterable(df
.get("vnf-profile")):
1750 vnfd
= vnfds_index
.get(vnf_profile
["vnfd-id"])
1751 all_vnfd_ext_cpds
= set()
1752 for ext_cpd
in get_iterable(vnfd
.get("ext-cpd")):
1753 if ext_cpd
.get("id"):
1754 all_vnfd_ext_cpds
.add(ext_cpd
.get("id"))
1756 for virtual_link
in get_iterable(
1757 vnf_profile
.get("virtual-link-connectivity")
1759 for vl_cpd
in get_iterable(virtual_link
.get("constituent-cpd-id")):
1760 vl_cpd_id
= vl_cpd
.get("constituent-cpd-id")
1761 if vl_cpd_id
and vl_cpd_id
not in all_vnfd_ext_cpds
:
1762 raise EngineException(
1763 "Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity"
1764 "[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a "
1765 "non existing ext-cpd:id inside vnfd '{}'".format(
1768 virtual_link
["virtual-link-profile-id"],
1772 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1775 def check_conflict_on_edit(self
, session
, final_content
, edit_content
, _id
):
1776 final_content
= super().check_conflict_on_edit(
1777 session
, final_content
, edit_content
, _id
1780 self
._check
_descriptor
_dependencies
(session
, final_content
)
1782 return final_content
1784 def check_conflict_on_del(self
, session
, _id
, db_content
):
1786 Check that there is not any NSR that uses this NSD. Only NSRs belonging to this project are considered. Note
1787 that NSD can be public and be used by other projects.
1788 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1789 :param _id: nsd internal id
1790 :param db_content: The database content of the _id
1791 :return: None or raises EngineException with the conflict
1793 if session
["force"]:
1795 descriptor
= db_content
1796 descriptor_id
= descriptor
.get("id")
1797 if not descriptor_id
: # empty nsd not uploaded
1800 # check NSD used by NS
1801 _filter
= self
._get
_project
_filter
(session
)
1802 _filter
["nsd-id"] = _id
1803 if self
.db
.get_list("nsrs", _filter
):
1804 raise EngineException(
1805 "There is at least one NS instance using this descriptor",
1806 http_code
=HTTPStatus
.CONFLICT
,
1809 # check NSD referenced by NST
1810 del _filter
["nsd-id"]
1811 _filter
["netslice-subnet.ANYINDEX.nsd-ref"] = descriptor_id
1812 if self
.db
.get_list("nsts", _filter
):
1813 raise EngineException(
1814 "There is at least one NetSlice Template referencing this descriptor",
1815 http_code
=HTTPStatus
.CONFLICT
,
1818 def delete_extra(self
, session
, _id
, db_content
, not_send_msg
=None):
1820 Deletes associate file system storage (via super)
1821 Deletes associated vnfpkgops from database.
1822 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1823 :param _id: server internal id
1824 :param db_content: The database content of the descriptor
1826 :raises: FsException in case of error while deleting associated storage
1828 super().delete_extra(session
, _id
, db_content
, not_send_msg
)
1829 self
.db
.del_list(self
.topic
+ "_revisions", {"_id": {"$regex": _id
}})
1832 def extract_day12_primitives(nsd
: dict) -> dict:
1833 """Removes the day12 primitives from the NSD descriptors
1836 nsd (dict): Descriptor as a dictionary
1839 nsd (dict): Cleared NSD
1841 if nsd
.get("ns-configuration"):
1844 "initial-config-primitive",
1845 "terminate-config-primitive",
1847 nsd
["ns-configuration"].pop(key
, None)
1850 def remove_modifiable_items(self
, nsd
: dict) -> dict:
1851 """Removes the modifiable parts from the VNFD descriptors
1853 It calls different extract functions according to different update types
1854 to clear all the modifiable items from NSD
1857 nsd (dict): Descriptor as a dictionary
1860 nsd (dict): Descriptor which does not include modifiable contents
1862 while isinstance(nsd
, dict) and nsd
.get("nsd"):
1864 if isinstance(nsd
, list):
1866 nsd
.pop("_admin", None)
1867 # If the more extractions need to be done from NSD,
1868 # the new extract methods could be appended to below list.
1869 for extract_function
in [self
.extract_day12_primitives
]:
1870 nsd_temp
= extract_function(nsd
)
1874 def _validate_descriptor_changes(
1877 descriptor_file_name
: str,
1878 old_descriptor_directory
: str,
1879 new_descriptor_directory
: str,
1881 """Compares the old and new NSD descriptors and validates the new descriptor
1884 old_descriptor_directory: Directory of descriptor which is in-use
1885 new_descriptor_directory: Directory of descriptor which is proposed to update (new revision)
1891 EngineException: In case of error if the changes are not allowed
1895 # If NSD does not exist in DB, or it is not in use by any NS,
1896 # validation is not required.
1897 nsd
= self
.db
.get_one("nsds", {"_id": descriptor_id
}, fail_on_empty
=False)
1898 if not nsd
or not detect_descriptor_usage(nsd
, "nsds", self
.db
):
1901 # Get the old and new descriptor contents in order to compare them.
1902 with self
.fs
.file_open(
1903 (old_descriptor_directory
.rstrip("/"), descriptor_file_name
), "r"
1904 ) as old_descriptor_file
:
1905 with self
.fs
.file_open(
1906 (new_descriptor_directory
.rstrip("/"), descriptor_file_name
), "r"
1907 ) as new_descriptor_file
:
1908 old_content
= yaml
.safe_load(old_descriptor_file
.read())
1909 new_content
= yaml
.safe_load(new_descriptor_file
.read())
1911 if old_content
and new_content
:
1912 disallowed_change
= DeepDiff(
1913 self
.remove_modifiable_items(old_content
),
1914 self
.remove_modifiable_items(new_content
),
1917 if disallowed_change
:
1918 changed_nodes
= functools
.reduce(
1919 lambda a
, b
: a
+ ", " + b
,
1922 for node
in disallowed_change
.get(
1928 raise EngineException(
1929 f
"Error in validating new descriptor: {changed_nodes} cannot be modified, "
1930 "there are disallowed changes in the ns descriptor. ",
1931 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1941 "NS Descriptor could not be processed with error: {}.".format(e
)
1944 def sol005_projection(self
, data
):
1945 data
["nsdOnboardingState"] = data
["_admin"]["onboardingState"]
1946 data
["nsdOperationalState"] = data
["_admin"]["operationalState"]
1947 data
["nsdUsageState"] = data
["_admin"]["usageState"]
1950 links
["self"] = {"href": "/nsd/v1/ns_descriptors/{}".format(data
["_id"])}
1951 links
["nsd_content"] = {
1952 "href": "/nsd/v1/ns_descriptors/{}/nsd_content".format(data
["_id"])
1954 data
["_links"] = links
1956 return super().sol005_projection(data
)
1959 class NstTopic(DescriptorTopic
):
1962 quota_name
= "slice_templates"
1964 def __init__(self
, db
, fs
, msg
, auth
):
1965 DescriptorTopic
.__init
__(self
, db
, fs
, msg
, auth
)
1967 def pyangbind_validation(self
, item
, data
, force
=False):
1970 pybindJSONDecoder
.load_ietf_json(
1978 out
= pybindJSON
.dumps(mynst
, mode
="ietf")
1979 desc_out
= self
._remove
_envelop
(yaml
.safe_load(out
))
1981 except Exception as e
:
1982 raise EngineException(
1983 "Error in pyangbind validation: {}".format(str(e
)),
1984 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1988 def _remove_envelop(indata
=None):
1991 clean_indata
= indata
1993 if clean_indata
.get("nst"):
1995 not isinstance(clean_indata
["nst"], list)
1996 or len(clean_indata
["nst"]) != 1
1998 raise EngineException("'nst' must be a list only one element")
1999 clean_indata
= clean_indata
["nst"][0]
2000 elif clean_indata
.get("nst:nst"):
2002 not isinstance(clean_indata
["nst:nst"], list)
2003 or len(clean_indata
["nst:nst"]) != 1
2005 raise EngineException("'nst:nst' must be a list only one element")
2006 clean_indata
= clean_indata
["nst:nst"][0]
2009 def _validate_input_new(self
, indata
, storage_params
, force
=False):
2010 indata
.pop("onboardingState", None)
2011 indata
.pop("operationalState", None)
2012 indata
.pop("usageState", None)
2013 indata
= self
.pyangbind_validation("nsts", indata
, force
)
2014 return indata
.copy()
2016 def _check_descriptor_dependencies(self
, session
, descriptor
):
2018 Check that the dependent descriptors exist on a new descriptor or edition
2019 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
2020 :param descriptor: descriptor to be inserted or edit
2021 :return: None or raises exception
2023 if not descriptor
.get("netslice-subnet"):
2025 for nsd
in descriptor
["netslice-subnet"]:
2026 nsd_id
= nsd
["nsd-ref"]
2027 filter_q
= self
._get
_project
_filter
(session
)
2028 filter_q
["id"] = nsd_id
2029 if not self
.db
.get_list("nsds", filter_q
):
2030 raise EngineException(
2031 "Descriptor error at 'netslice-subnet':'nsd-ref'='{}' references a non "
2032 "existing nsd".format(nsd_id
),
2033 http_code
=HTTPStatus
.CONFLICT
,
2036 def check_conflict_on_edit(self
, session
, final_content
, edit_content
, _id
):
2037 final_content
= super().check_conflict_on_edit(
2038 session
, final_content
, edit_content
, _id
2041 self
._check
_descriptor
_dependencies
(session
, final_content
)
2042 return final_content
2044 def check_conflict_on_del(self
, session
, _id
, db_content
):
2046 Check that there is not any NSIR that uses this NST. Only NSIRs belonging to this project are considered. Note
2047 that NST can be public and be used by other projects.
2048 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
2049 :param _id: nst internal id
2050 :param db_content: The database content of the _id.
2051 :return: None or raises EngineException with the conflict
2053 # TODO: Check this method
2054 if session
["force"]:
2056 # Get Network Slice Template from Database
2057 _filter
= self
._get
_project
_filter
(session
)
2058 _filter
["_admin.nst-id"] = _id
2059 if self
.db
.get_list("nsis", _filter
):
2060 raise EngineException(
2061 "there is at least one Netslice Instance using this descriptor",
2062 http_code
=HTTPStatus
.CONFLICT
,
2065 def sol005_projection(self
, data
):
2066 data
["onboardingState"] = data
["_admin"]["onboardingState"]
2067 data
["operationalState"] = data
["_admin"]["operationalState"]
2068 data
["usageState"] = data
["_admin"]["usageState"]
2071 links
["self"] = {"href": "/nst/v1/netslice_templates/{}".format(data
["_id"])}
2072 links
["nst"] = {"href": "/nst/v1/netslice_templates/{}/nst".format(data
["_id"])}
2073 data
["_links"] = links
2075 return super().sol005_projection(data
)
2078 class PduTopic(BaseTopic
):
2081 quota_name
= "pduds"
2082 schema_new
= pdu_new_schema
2083 schema_edit
= pdu_edit_schema
2085 def __init__(self
, db
, fs
, msg
, auth
):
2086 BaseTopic
.__init
__(self
, db
, fs
, msg
, auth
)
2089 def format_on_new(content
, project_id
=None, make_public
=False):
2090 BaseTopic
.format_on_new(content
, project_id
=project_id
, make_public
=make_public
)
2091 content
["_admin"]["onboardingState"] = "CREATED"
2092 content
["_admin"]["operationalState"] = "ENABLED"
2093 content
["_admin"]["usageState"] = "NOT_IN_USE"
2095 def check_conflict_on_del(self
, session
, _id
, db_content
):
2097 Check that there is not any vnfr that uses this PDU
2098 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
2099 :param _id: pdu internal id
2100 :param db_content: The database content of the _id.
2101 :return: None or raises EngineException with the conflict
2103 if session
["force"]:
2106 _filter
= self
._get
_project
_filter
(session
)
2107 _filter
["vdur.pdu-id"] = _id
2108 if self
.db
.get_list("vnfrs", _filter
):
2109 raise EngineException(
2110 "There is at least one VNF instance using this PDU",
2111 http_code
=HTTPStatus
.CONFLICT
,
2115 class VnfPkgOpTopic(BaseTopic
):
2118 schema_new
= vnfpkgop_new_schema
2121 def __init__(self
, db
, fs
, msg
, auth
):
2122 BaseTopic
.__init
__(self
, db
, fs
, msg
, auth
)
2124 def edit(self
, session
, _id
, indata
=None, kwargs
=None, content
=None):
2125 raise EngineException(
2126 "Method 'edit' not allowed for topic '{}'".format(self
.topic
),
2127 HTTPStatus
.METHOD_NOT_ALLOWED
,
2130 def delete(self
, session
, _id
, dry_run
=False):
2131 raise EngineException(
2132 "Method 'delete' not allowed for topic '{}'".format(self
.topic
),
2133 HTTPStatus
.METHOD_NOT_ALLOWED
,
2136 def delete_list(self
, session
, filter_q
=None):
2137 raise EngineException(
2138 "Method 'delete_list' not allowed for topic '{}'".format(self
.topic
),
2139 HTTPStatus
.METHOD_NOT_ALLOWED
,
2142 def new(self
, rollback
, session
, indata
=None, kwargs
=None, headers
=None):
2144 Creates a new entry into database.
2145 :param rollback: list to append created items at database in case a rollback may to be done
2146 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
2147 :param indata: data to be inserted
2148 :param kwargs: used to override the indata descriptor
2149 :param headers: http request headers
2150 :return: _id, op_id:
2151 _id: identity of the inserted data.
2154 self
._update
_input
_with
_kwargs
(indata
, kwargs
)
2155 validate_input(indata
, self
.schema_new
)
2156 vnfpkg_id
= indata
["vnfPkgId"]
2157 filter_q
= BaseTopic
._get
_project
_filter
(session
)
2158 filter_q
["_id"] = vnfpkg_id
2159 vnfd
= self
.db
.get_one("vnfds", filter_q
)
2160 operation
= indata
["lcmOperationType"]
2161 kdu_name
= indata
["kdu_name"]
2162 for kdu
in vnfd
.get("kdu", []):
2163 if kdu
["name"] == kdu_name
:
2164 helm_chart
= kdu
.get("helm-chart")
2165 juju_bundle
= kdu
.get("juju-bundle")
2168 raise EngineException(
2169 "Not found vnfd[id='{}']:kdu[name='{}']".format(vnfpkg_id
, kdu_name
)
2172 indata
["helm-chart"] = helm_chart
2173 match
= fullmatch(r
"([^/]*)/([^/]*)", helm_chart
)
2174 repo_name
= match
.group(1) if match
else None
2176 indata
["juju-bundle"] = juju_bundle
2177 match
= fullmatch(r
"([^/]*)/([^/]*)", juju_bundle
)
2178 repo_name
= match
.group(1) if match
else None
2180 raise EngineException(
2181 "Found neither 'helm-chart' nor 'juju-bundle' in vnfd[id='{}']:kdu[name='{}']".format(
2187 filter_q
["name"] = repo_name
2188 repo
= self
.db
.get_one("k8srepos", filter_q
)
2189 k8srepo_id
= repo
.get("_id")
2190 k8srepo_url
= repo
.get("url")
2194 indata
["k8srepoId"] = k8srepo_id
2195 indata
["k8srepo_url"] = k8srepo_url
2196 vnfpkgop_id
= str(uuid4())
2199 "operationState": "PROCESSING",
2200 "vnfPkgId": vnfpkg_id
,
2201 "lcmOperationType": operation
,
2202 "isAutomaticInvocation": False,
2203 "isCancelPending": False,
2204 "operationParams": indata
,
2206 "self": "/osm/vnfpkgm/v1/vnfpkg_op_occs/" + vnfpkgop_id
,
2207 "vnfpkg": "/osm/vnfpkgm/v1/vnf_packages/" + vnfpkg_id
,
2211 vnfpkgop_desc
, session
["project_id"], make_public
=session
["public"]
2213 ctime
= vnfpkgop_desc
["_admin"]["created"]
2214 vnfpkgop_desc
["statusEnteredTime"] = ctime
2215 vnfpkgop_desc
["startTime"] = ctime
2216 self
.db
.create(self
.topic
, vnfpkgop_desc
)
2217 rollback
.append({"topic": self
.topic
, "_id": vnfpkgop_id
})
2218 self
.msg
.write(self
.topic_msg
, operation
, vnfpkgop_desc
)
2219 return vnfpkgop_id
, None