1 # -*- coding: utf-8 -*-
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
7 # http://www.apache.org/licenses/LICENSE-2.0
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
26 from deepdiff
import DeepDiff
27 from hashlib
import md5
28 from osm_common
.dbbase
import DbException
, deep_update_rfc7396
29 from http
import HTTPStatus
31 from uuid
import uuid4
32 from re
import fullmatch
33 from zipfile
import ZipFile
34 from osm_nbi
.validation
import (
41 from osm_nbi
.base_topic
import BaseTopic
, EngineException
, get_iterable
42 from osm_im
import etsi_nfv_vnfd
, etsi_nfv_nsd
43 from osm_im
.nst
import nst
as nst_im
44 from pyangbind
.lib
.serialise
import pybindJSONDecoder
45 import pyangbind
.lib
.pybindJSON
as pybindJSON
46 from osm_nbi
import utils
48 __author__
= "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
50 valid_helm_chart_re
= re
.compile(
51 r
"^[a-z0-9]([-a-z0-9]*[a-z0-9]/)?([a-z0-9]([-a-z0-9]*[a-z0-9])?)*$"
55 class DescriptorTopic(BaseTopic
):
56 def __init__(self
, db
, fs
, msg
, auth
):
57 super().__init
__(db
, fs
, msg
, auth
)
59 def check_conflict_on_edit(self
, session
, final_content
, edit_content
, _id
):
60 final_content
= super().check_conflict_on_edit(
61 session
, final_content
, edit_content
, _id
64 def _check_unique_id_name(descriptor
, position
=""):
65 for desc_key
, desc_item
in descriptor
.items():
66 if isinstance(desc_item
, list) and desc_item
:
69 for index
, list_item
in enumerate(desc_item
):
70 if isinstance(list_item
, dict):
71 _check_unique_id_name(
72 list_item
, "{}.{}[{}]".format(position
, desc_key
, index
)
76 list_item
.get("id") or list_item
.get("name")
78 desc_item_id
= "id" if list_item
.get("id") else "name"
79 if desc_item_id
and list_item
.get(desc_item_id
):
80 if list_item
[desc_item_id
] in used_ids
:
81 position
= "{}.{}[{}]".format(
82 position
, desc_key
, index
84 raise EngineException(
85 "Error: identifier {} '{}' is not unique and repeats at '{}'".format(
87 list_item
[desc_item_id
],
90 HTTPStatus
.UNPROCESSABLE_ENTITY
,
92 used_ids
.append(list_item
[desc_item_id
])
94 _check_unique_id_name(final_content
)
95 # 1. validate again with pyangbind
96 # 1.1. remove internal keys
98 for k
in ("_id", "_admin"):
99 if k
in final_content
:
100 internal_keys
[k
] = final_content
.pop(k
)
101 storage_params
= internal_keys
["_admin"].get("storage")
102 serialized
= self
._validate
_input
_new
(
103 final_content
, storage_params
, session
["force"]
106 # 1.2. modify final_content with a serialized version
107 final_content
= copy
.deepcopy(serialized
)
108 # 1.3. restore internal keys
109 for k
, v
in internal_keys
.items():
114 # 2. check that this id is not present
115 if "id" in edit_content
:
116 _filter
= self
._get
_project
_filter
(session
)
118 _filter
["id"] = final_content
["id"]
119 _filter
["_id.neq"] = _id
121 if self
.db
.get_one(self
.topic
, _filter
, fail_on_empty
=False):
122 raise EngineException(
123 "{} with id '{}' already exists for this project".format(
124 self
.topic
[:-1], final_content
["id"]
132 def format_on_new(content
, project_id
=None, make_public
=False):
133 BaseTopic
.format_on_new(content
, project_id
=project_id
, make_public
=make_public
)
134 content
["_admin"]["onboardingState"] = "CREATED"
135 content
["_admin"]["operationalState"] = "DISABLED"
136 content
["_admin"]["usageState"] = "NOT_IN_USE"
138 def delete_extra(self
, session
, _id
, db_content
, not_send_msg
=None):
140 Deletes file system storage associated with the descriptor
141 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
142 :param _id: server internal id
143 :param db_content: The database content of the descriptor
144 :param not_send_msg: To not send message (False) or store content (list) instead
145 :return: None if ok or raises EngineException with the problem
147 self
.fs
.file_delete(_id
, ignore_non_exist
=True)
148 self
.fs
.file_delete(_id
+ "_", ignore_non_exist
=True) # remove temp folder
149 # Remove file revisions
150 if "revision" in db_content
["_admin"]:
151 revision
= db_content
["_admin"]["revision"]
153 self
.fs
.file_delete(_id
+ ":" + str(revision
), ignore_non_exist
=True)
154 revision
= revision
- 1
158 def get_one_by_id(db
, session
, topic
, id):
159 # find owned by this project
160 _filter
= BaseTopic
._get
_project
_filter
(session
)
162 desc_list
= db
.get_list(topic
, _filter
)
163 if len(desc_list
) == 1:
165 elif len(desc_list
) > 1:
167 "Found more than one {} with id='{}' belonging to this project".format(
173 # not found any: try to find public
174 _filter
= BaseTopic
._get
_project
_filter
(session
)
176 desc_list
= db
.get_list(topic
, _filter
)
179 "Not found any {} with id='{}'".format(topic
[:-1], id),
180 HTTPStatus
.NOT_FOUND
,
182 elif len(desc_list
) == 1:
186 "Found more than one public {} with id='{}'; and no one belonging to this project".format(
192 def new(self
, rollback
, session
, indata
=None, kwargs
=None, headers
=None):
194 Creates a new almost empty DISABLED entry into database. Due to SOL005, it does not follow normal procedure.
195 Creating a VNFD or NSD is done in two steps: 1. Creates an empty descriptor (this step) and 2) upload content
196 (self.upload_content)
197 :param rollback: list to append created items at database in case a rollback may to be done
198 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
199 :param indata: data to be inserted
200 :param kwargs: used to override the indata descriptor
201 :param headers: http request headers
202 :return: _id, None: identity of the inserted data; and None as there is not any operation
205 # No needed to capture exceptions
207 self
.check_quota(session
)
211 if "userDefinedData" in indata
:
212 indata
= indata
["userDefinedData"]
214 # Override descriptor with query string kwargs
215 self
._update
_input
_with
_kwargs
(indata
, kwargs
)
216 # uncomment when this method is implemented.
217 # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors
218 # indata = DescriptorTopic._validate_input_new(self, indata, project_id=session["force"])
220 content
= {"_admin": {
221 "userDefinedData": indata
,
226 content
, session
["project_id"], make_public
=session
["public"]
228 _id
= self
.db
.create(self
.topic
, content
)
229 rollback
.append({"topic": self
.topic
, "_id": _id
})
230 self
._send
_msg
("created", {"_id": _id
})
233 def upload_content(self
, session
, _id
, indata
, kwargs
, headers
):
235 Used for receiving content by chunks (with a transaction_id header and/or gzip file. It will store and extract)
236 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
237 :param _id : the nsd,vnfd is already created, this is the id
238 :param indata: http body request
239 :param kwargs: user query string to override parameters. NOT USED
240 :param headers: http request headers
241 :return: True if package is completely uploaded or False if partial content has been uploded
242 Raise exception on error
244 # Check that _id exists and it is valid
245 current_desc
= self
.show(session
, _id
)
247 content_range_text
= headers
.get("Content-Range")
248 expected_md5
= headers
.get("Content-File-MD5")
250 content_type
= headers
.get("Content-Type")
253 and "application/gzip" in content_type
254 or "application/x-gzip" in content_type
259 and "application/zip" in content_type
262 filename
= headers
.get("Content-Filename")
263 if not filename
and compressed
:
264 filename
= "package.tar.gz" if compressed
== "gzip" else "package.zip"
269 if "revision" in current_desc
["_admin"]:
270 revision
= current_desc
["_admin"]["revision"] + 1
272 # TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266
278 if content_range_text
:
280 content_range_text
.replace("-", " ").replace("/", " ").split()
283 content_range
[0] != "bytes"
284 ): # TODO check x<y not negative < total....
286 start
= int(content_range
[1])
287 end
= int(content_range
[2]) + 1
288 total
= int(content_range
[3])
291 # Rather than using a temp folder, we will store the package in a folder based on
292 # the current revision.
293 proposed_revision_path
= (
294 _id
+ ":" + str(revision
)
295 ) # all the content is upload here and if ok, it is rename from id_ to is folder
298 if not self
.fs
.file_exists(proposed_revision_path
, "dir"):
299 raise EngineException(
300 "invalid Transaction-Id header", HTTPStatus
.NOT_FOUND
303 self
.fs
.file_delete(proposed_revision_path
, ignore_non_exist
=True)
304 self
.fs
.mkdir(proposed_revision_path
)
305 fs_rollback
.append(proposed_revision_path
)
307 storage
= self
.fs
.get_params()
308 storage
["folder"] = proposed_revision_path
310 file_path
= (proposed_revision_path
, filename
)
311 if self
.fs
.file_exists(file_path
, "file"):
312 file_size
= self
.fs
.file_size(file_path
)
315 if file_size
!= start
:
316 raise EngineException(
317 "invalid Content-Range start sequence, expected '{}' but received '{}'".format(
320 HTTPStatus
.REQUESTED_RANGE_NOT_SATISFIABLE
,
322 file_pkg
= self
.fs
.file_open(file_path
, "a+b")
323 if isinstance(indata
, dict):
324 indata_text
= yaml
.safe_dump(indata
, indent
=4, default_flow_style
=False)
325 file_pkg
.write(indata_text
.encode(encoding
="utf-8"))
329 indata_text
= indata
.read(4096)
330 indata_len
+= len(indata_text
)
333 file_pkg
.write(indata_text
)
334 if content_range_text
:
335 if indata_len
!= end
- start
:
336 raise EngineException(
337 "Mismatch between Content-Range header {}-{} and body length of {}".format(
338 start
, end
- 1, indata_len
340 HTTPStatus
.REQUESTED_RANGE_NOT_SATISFIABLE
,
343 # TODO update to UPLOADING
350 chunk_data
= file_pkg
.read(1024)
352 file_md5
.update(chunk_data
)
353 chunk_data
= file_pkg
.read(1024)
354 if expected_md5
!= file_md5
.hexdigest():
355 raise EngineException("Error, MD5 mismatch", HTTPStatus
.CONFLICT
)
357 if compressed
== "gzip":
358 tar
= tarfile
.open(mode
="r", fileobj
=file_pkg
)
359 descriptor_file_name
= None
361 tarname
= tarinfo
.name
362 tarname_path
= tarname
.split("/")
364 not tarname_path
[0] or ".." in tarname_path
365 ): # if start with "/" means absolute path
366 raise EngineException(
367 "Absolute path or '..' are not allowed for package descriptor tar.gz"
369 if len(tarname_path
) == 1 and not tarinfo
.isdir():
370 raise EngineException(
371 "All files must be inside a dir for package descriptor tar.gz"
374 tarname
.endswith(".yaml")
375 or tarname
.endswith(".json")
376 or tarname
.endswith(".yml")
378 storage
["pkg-dir"] = tarname_path
[0]
379 if len(tarname_path
) == 2:
380 if descriptor_file_name
:
381 raise EngineException(
382 "Found more than one descriptor file at package descriptor tar.gz"
384 descriptor_file_name
= tarname
385 if not descriptor_file_name
:
386 raise EngineException(
387 "Not found any descriptor file at package descriptor tar.gz"
389 storage
["descriptor"] = descriptor_file_name
390 storage
["zipfile"] = filename
391 self
.fs
.file_extract(tar
, proposed_revision_path
)
392 with self
.fs
.file_open(
393 (proposed_revision_path
, descriptor_file_name
), "r"
394 ) as descriptor_file
:
395 content
= descriptor_file
.read()
396 elif compressed
== "zip":
397 zipfile
= ZipFile(file_pkg
)
398 descriptor_file_name
= None
399 for package_file
in zipfile
.infolist():
400 zipfilename
= package_file
.filename
401 file_path
= zipfilename
.split("/")
403 not file_path
[0] or ".." in zipfilename
404 ): # if start with "/" means absolute path
405 raise EngineException(
406 "Absolute path or '..' are not allowed for package descriptor zip"
411 zipfilename
.endswith(".yaml")
412 or zipfilename
.endswith(".json")
413 or zipfilename
.endswith(".yml")
415 zipfilename
.find("/") < 0
416 or zipfilename
.find("Definitions") >= 0
419 storage
["pkg-dir"] = ""
420 if descriptor_file_name
:
421 raise EngineException(
422 "Found more than one descriptor file at package descriptor zip"
424 descriptor_file_name
= zipfilename
425 if not descriptor_file_name
:
426 raise EngineException(
427 "Not found any descriptor file at package descriptor zip"
429 storage
["descriptor"] = descriptor_file_name
430 storage
["zipfile"] = filename
431 self
.fs
.file_extract(zipfile
, proposed_revision_path
)
433 with self
.fs
.file_open(
434 (proposed_revision_path
, descriptor_file_name
), "r"
435 ) as descriptor_file
:
436 content
= descriptor_file
.read()
438 content
= file_pkg
.read()
439 storage
["descriptor"] = descriptor_file_name
= filename
441 if descriptor_file_name
.endswith(".json"):
442 error_text
= "Invalid json format "
443 indata
= json
.load(content
)
445 error_text
= "Invalid yaml format "
446 indata
= yaml
.load(content
, Loader
=yaml
.SafeLoader
)
448 # Need to close the file package here so it can be copied from the
449 # revision to the current, unrevisioned record
454 # Fetch both the incoming, proposed revision and the original revision so we
455 # can call a validate method to compare them
456 current_revision_path
= _id
+ "/"
457 self
.fs
.sync(from_path
=current_revision_path
)
458 self
.fs
.sync(from_path
=proposed_revision_path
)
462 self
._validate
_descriptor
_changes
(
463 descriptor_file_name
,
464 current_revision_path
,
465 proposed_revision_path
)
466 except Exception as e
:
467 shutil
.rmtree(self
.fs
.path
+ current_revision_path
, ignore_errors
=True)
468 shutil
.rmtree(self
.fs
.path
+ proposed_revision_path
, ignore_errors
=True)
469 # Only delete the new revision. We need to keep the original version in place
470 # as it has not been changed.
471 self
.fs
.file_delete(proposed_revision_path
, ignore_non_exist
=True)
475 indata
= self
._remove
_envelop
(indata
)
477 # Override descriptor with query string kwargs
479 self
._update
_input
_with
_kwargs
(indata
, kwargs
)
481 current_desc
["_admin"]["storage"] = storage
482 current_desc
["_admin"]["onboardingState"] = "ONBOARDED"
483 current_desc
["_admin"]["operationalState"] = "ENABLED"
484 current_desc
["_admin"]["modified"] = time()
485 current_desc
["_admin"]["revision"] = revision
487 deep_update_rfc7396(current_desc
, indata
)
488 current_desc
= self
.check_conflict_on_edit(
489 session
, current_desc
, indata
, _id
=_id
492 # Copy the revision to the active package name by its original id
493 shutil
.rmtree(self
.fs
.path
+ current_revision_path
, ignore_errors
=True)
494 os
.rename(self
.fs
.path
+ proposed_revision_path
, self
.fs
.path
+ current_revision_path
)
495 self
.fs
.file_delete(current_revision_path
, ignore_non_exist
=True)
496 self
.fs
.mkdir(current_revision_path
)
497 self
.fs
.reverse_sync(from_path
=current_revision_path
)
499 shutil
.rmtree(self
.fs
.path
+ _id
)
501 self
.db
.replace(self
.topic
, _id
, current_desc
)
503 # Store a copy of the package as a point in time revision
504 revision_desc
= dict(current_desc
)
505 revision_desc
["_id"] = _id
+ ":" + str(revision_desc
["_admin"]["revision"])
506 self
.db
.create(self
.topic
+ "_revisions", revision_desc
)
510 self
._send
_msg
("edited", indata
)
512 # TODO if descriptor has changed because kwargs update content and remove cached zip
513 # TODO if zip is not present creates one
516 except EngineException
:
519 raise EngineException(
520 "invalid Content-Range header format. Expected 'bytes start-end/total'",
521 HTTPStatus
.REQUESTED_RANGE_NOT_SATISFIABLE
,
524 raise EngineException(
525 "invalid upload transaction sequence: '{}'".format(e
),
526 HTTPStatus
.BAD_REQUEST
,
528 except tarfile
.ReadError
as e
:
529 raise EngineException(
530 "invalid file content {}".format(e
), HTTPStatus
.BAD_REQUEST
532 except (ValueError, yaml
.YAMLError
) as e
:
533 raise EngineException(error_text
+ str(e
))
534 except ValidationError
as e
:
535 raise EngineException(e
, HTTPStatus
.UNPROCESSABLE_ENTITY
)
539 for file in fs_rollback
:
540 self
.fs
.file_delete(file, ignore_non_exist
=True)
542 def get_file(self
, session
, _id
, path
=None, accept_header
=None):
544 Return the file content of a vnfd or nsd
545 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
546 :param _id: Identity of the vnfd, nsd
547 :param path: artifact path or "$DESCRIPTOR" or None
548 :param accept_header: Content of Accept header. Must contain applition/zip or/and text/plain
549 :return: opened file plus Accept format or raises an exception
551 accept_text
= accept_zip
= False
553 if "text/plain" in accept_header
or "*/*" in accept_header
:
555 if "application/zip" in accept_header
or "*/*" in accept_header
:
556 accept_zip
= "application/zip"
557 elif "application/gzip" in accept_header
:
558 accept_zip
= "application/gzip"
560 if not accept_text
and not accept_zip
:
561 raise EngineException(
562 "provide request header 'Accept' with 'application/zip' or 'text/plain'",
563 http_code
=HTTPStatus
.NOT_ACCEPTABLE
,
566 content
= self
.show(session
, _id
)
567 if content
["_admin"]["onboardingState"] != "ONBOARDED":
568 raise EngineException(
569 "Cannot get content because this resource is not at 'ONBOARDED' state. "
570 "onboardingState is {}".format(content
["_admin"]["onboardingState"]),
571 http_code
=HTTPStatus
.CONFLICT
,
573 storage
= content
["_admin"]["storage"]
574 if path
is not None and path
!= "$DESCRIPTOR": # artifacts
575 if not storage
.get("pkg-dir") and not storage
.get("folder"):
576 raise EngineException(
577 "Packages does not contains artifacts",
578 http_code
=HTTPStatus
.BAD_REQUEST
,
580 if self
.fs
.file_exists(
581 (storage
["folder"], storage
["pkg-dir"], *path
), "dir"
583 folder_content
= self
.fs
.dir_ls(
584 (storage
["folder"], storage
["pkg-dir"], *path
)
586 return folder_content
, "text/plain"
587 # TODO manage folders in http
591 (storage
["folder"], storage
["pkg-dir"], *path
), "rb"
593 "application/octet-stream",
596 # pkgtype accept ZIP TEXT -> result
597 # manyfiles yes X -> zip
599 # onefile yes no -> zip
601 contain_many_files
= False
602 if storage
.get("pkg-dir"):
603 # check if there are more than one file in the package, ignoring checksums.txt.
604 pkg_files
= self
.fs
.dir_ls((storage
["folder"], storage
["pkg-dir"]))
605 if len(pkg_files
) >= 3 or (
606 len(pkg_files
) == 2 and "checksums.txt" not in pkg_files
608 contain_many_files
= True
609 if accept_text
and (not contain_many_files
or path
== "$DESCRIPTOR"):
611 self
.fs
.file_open((storage
["folder"], storage
["descriptor"]), "r"),
614 elif contain_many_files
and not accept_zip
:
615 raise EngineException(
616 "Packages that contains several files need to be retrieved with 'application/zip'"
618 http_code
=HTTPStatus
.NOT_ACCEPTABLE
,
621 if not storage
.get("zipfile"):
622 # TODO generate zipfile if not present
623 raise EngineException(
624 "Only allowed 'text/plain' Accept header for this descriptor. To be solved in "
626 http_code
=HTTPStatus
.NOT_ACCEPTABLE
,
629 self
.fs
.file_open((storage
["folder"], storage
["zipfile"]), "rb"),
633 def _remove_yang_prefixes_from_descriptor(self
, descriptor
):
635 for k
, v
in descriptor
.items():
637 if isinstance(v
, dict):
638 new_v
= self
._remove
_yang
_prefixes
_from
_descriptor
(v
)
639 elif isinstance(v
, list):
642 if isinstance(x
, dict):
643 new_v
.append(self
._remove
_yang
_prefixes
_from
_descriptor
(x
))
646 new_descriptor
[k
.split(":")[-1]] = new_v
647 return new_descriptor
649 def pyangbind_validation(self
, item
, data
, force
=False):
650 raise EngineException(
651 "Not possible to validate '{}' item".format(item
),
652 http_code
=HTTPStatus
.INTERNAL_SERVER_ERROR
,
655 def _validate_input_edit(self
, indata
, content
, force
=False):
656 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
659 if "_admin" not in indata
:
660 indata
["_admin"] = {}
662 if "operationalState" in indata
:
663 if indata
["operationalState"] in ("ENABLED", "DISABLED"):
664 indata
["_admin"]["operationalState"] = indata
.pop("operationalState")
666 raise EngineException(
667 "State '{}' is not a valid operational state".format(
668 indata
["operationalState"]
670 http_code
=HTTPStatus
.BAD_REQUEST
,
673 # In the case of user defined data, we need to put the data in the root of the object
674 # to preserve current expected behaviour
675 if "userDefinedData" in indata
:
676 data
= indata
.pop("userDefinedData")
677 if isinstance(data
, dict):
678 indata
["_admin"]["userDefinedData"] = data
680 raise EngineException(
681 "userDefinedData should be an object, but is '{}' instead".format(
684 http_code
=HTTPStatus
.BAD_REQUEST
,
688 "operationalState" in indata
["_admin"]
689 and content
["_admin"]["operationalState"]
690 == indata
["_admin"]["operationalState"]
692 raise EngineException(
693 "operationalState already {}".format(
694 content
["_admin"]["operationalState"]
696 http_code
=HTTPStatus
.CONFLICT
,
701 def _validate_descriptor_changes(self
,
702 descriptor_file_name
,
703 old_descriptor_directory
,
704 new_descriptor_directory
):
705 # Todo: compare changes and throw a meaningful exception for the user to understand
707 # raise EngineException(
708 # "Error in validating new descriptor: <NODE> cannot be modified",
709 # http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
713 class VnfdTopic(DescriptorTopic
):
717 def __init__(self
, db
, fs
, msg
, auth
):
718 DescriptorTopic
.__init
__(self
, db
, fs
, msg
, auth
)
720 def pyangbind_validation(self
, item
, data
, force
=False):
721 if self
._descriptor
_data
_is
_in
_old
_format
(data
):
722 raise EngineException(
723 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
724 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
727 myvnfd
= etsi_nfv_vnfd
.etsi_nfv_vnfd()
728 pybindJSONDecoder
.load_ietf_json(
729 {"etsi-nfv-vnfd:vnfd": data
},
736 out
= pybindJSON
.dumps(myvnfd
, mode
="ietf")
737 desc_out
= self
._remove
_envelop
(yaml
.safe_load(out
))
738 desc_out
= self
._remove
_yang
_prefixes
_from
_descriptor
(desc_out
)
739 return utils
.deep_update_dict(data
, desc_out
)
740 except Exception as e
:
741 raise EngineException(
742 "Error in pyangbind validation: {}".format(str(e
)),
743 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
747 def _descriptor_data_is_in_old_format(data
):
748 return ("vnfd-catalog" in data
) or ("vnfd:vnfd-catalog" in data
)
751 def _remove_envelop(indata
=None):
754 clean_indata
= indata
756 if clean_indata
.get("etsi-nfv-vnfd:vnfd"):
757 if not isinstance(clean_indata
["etsi-nfv-vnfd:vnfd"], dict):
758 raise EngineException("'etsi-nfv-vnfd:vnfd' must be a dict")
759 clean_indata
= clean_indata
["etsi-nfv-vnfd:vnfd"]
760 elif clean_indata
.get("vnfd"):
761 if not isinstance(clean_indata
["vnfd"], dict):
762 raise EngineException("'vnfd' must be dict")
763 clean_indata
= clean_indata
["vnfd"]
767 def check_conflict_on_edit(self
, session
, final_content
, edit_content
, _id
):
768 final_content
= super().check_conflict_on_edit(
769 session
, final_content
, edit_content
, _id
775 for vdu
in get_iterable(final_content
.get("vdu")):
776 if vdu
.get("pdu-type"):
781 final_content
["_admin"]["type"] = "hnfd" if contains_vdu
else "pnfd"
783 final_content
["_admin"]["type"] = "vnfd"
784 # if neither vud nor pdu do not fill type
787 def check_conflict_on_del(self
, session
, _id
, db_content
):
789 Check that there is not any NSD that uses this VNFD. Only NSDs belonging to this project are considered. Note
790 that VNFD can be public and be used by NSD of other projects. Also check there are not deployments, or vnfr
792 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
793 :param _id: vnfd internal id
794 :param db_content: The database content of the _id.
795 :return: None or raises EngineException with the conflict
799 descriptor
= db_content
800 descriptor_id
= descriptor
.get("id")
801 if not descriptor_id
: # empty vnfd not uploaded
804 _filter
= self
._get
_project
_filter
(session
)
806 # check vnfrs using this vnfd
807 _filter
["vnfd-id"] = _id
808 if self
.db
.get_list("vnfrs", _filter
):
809 raise EngineException(
810 "There is at least one VNF instance using this descriptor",
811 http_code
=HTTPStatus
.CONFLICT
,
814 # check NSD referencing this VNFD
815 del _filter
["vnfd-id"]
816 _filter
["vnfd-id"] = descriptor_id
817 if self
.db
.get_list("nsds", _filter
):
818 raise EngineException(
819 "There is at least one NS package referencing this descriptor",
820 http_code
=HTTPStatus
.CONFLICT
,
823 def _validate_input_new(self
, indata
, storage_params
, force
=False):
824 indata
.pop("onboardingState", None)
825 indata
.pop("operationalState", None)
826 indata
.pop("usageState", None)
827 indata
.pop("links", None)
829 indata
= self
.pyangbind_validation("vnfds", indata
, force
)
830 # Cross references validation in the descriptor
832 self
.validate_mgmt_interface_connection_point(indata
)
834 for vdu
in get_iterable(indata
.get("vdu")):
835 self
.validate_vdu_internal_connection_points(vdu
)
836 self
._validate
_vdu
_cloud
_init
_in
_package
(storage_params
, vdu
, indata
)
837 self
._validate
_vdu
_charms
_in
_package
(storage_params
, indata
)
839 self
._validate
_vnf
_charms
_in
_package
(storage_params
, indata
)
841 self
.validate_external_connection_points(indata
)
842 self
.validate_internal_virtual_links(indata
)
843 self
.validate_monitoring_params(indata
)
844 self
.validate_scaling_group_descriptor(indata
)
845 self
.validate_helm_chart(indata
)
850 def validate_helm_chart(indata
):
851 kdus
= indata
.get("kdu", [])
853 helm_chart_value
= kdu
.get("helm-chart")
854 if not helm_chart_value
:
856 if not valid_helm_chart_re
.match(helm_chart_value
):
857 raise EngineException(
858 "helm-chart '{}' is not valid".format(helm_chart_value
),
859 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
863 def validate_mgmt_interface_connection_point(indata
):
864 if not indata
.get("vdu"):
866 if not indata
.get("mgmt-cp"):
867 raise EngineException(
868 "'mgmt-cp' is a mandatory field and it is not defined",
869 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
872 for cp
in get_iterable(indata
.get("ext-cpd")):
873 if cp
["id"] == indata
["mgmt-cp"]:
876 raise EngineException(
877 "mgmt-cp='{}' must match an existing ext-cpd".format(indata
["mgmt-cp"]),
878 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
882 def validate_vdu_internal_connection_points(vdu
):
884 for cpd
in get_iterable(vdu
.get("int-cpd")):
885 cpd_id
= cpd
.get("id")
886 if cpd_id
and cpd_id
in int_cpds
:
887 raise EngineException(
888 "vdu[id='{}']:int-cpd[id='{}'] is already used by other int-cpd".format(
891 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
896 def validate_external_connection_points(indata
):
897 all_vdus_int_cpds
= set()
898 for vdu
in get_iterable(indata
.get("vdu")):
899 for int_cpd
in get_iterable(vdu
.get("int-cpd")):
900 all_vdus_int_cpds
.add((vdu
.get("id"), int_cpd
.get("id")))
903 for cpd
in get_iterable(indata
.get("ext-cpd")):
904 cpd_id
= cpd
.get("id")
905 if cpd_id
and cpd_id
in ext_cpds
:
906 raise EngineException(
907 "ext-cpd[id='{}'] is already used by other ext-cpd".format(cpd_id
),
908 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
912 int_cpd
= cpd
.get("int-cpd")
914 if (int_cpd
.get("vdu-id"), int_cpd
.get("cpd")) not in all_vdus_int_cpds
:
915 raise EngineException(
916 "ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd".format(
919 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
921 # TODO: Validate k8s-cluster-net points to a valid k8s-cluster:nets ?
923 def _validate_vdu_charms_in_package(self
, storage_params
, indata
):
924 for df
in indata
["df"]:
926 "lcm-operations-configuration" in df
927 and "operate-vnf-op-config" in df
["lcm-operations-configuration"]
929 configs
= df
["lcm-operations-configuration"][
930 "operate-vnf-op-config"
932 vdus
= df
.get("vdu-profile", [])
934 for config
in configs
:
935 if config
["id"] == vdu
["id"] and utils
.find_in_list(
936 config
.get("execution-environment-list", []),
937 lambda ee
: "juju" in ee
,
939 if not self
._validate
_package
_folders
(
940 storage_params
, "charms"
941 ) and not self
._validate
_package
_folders
(
942 storage_params
, "Scripts/charms"
944 raise EngineException(
945 "Charm defined in vnf[id={}] but not present in "
946 "package".format(indata
["id"])
949 def _validate_vdu_cloud_init_in_package(self
, storage_params
, vdu
, indata
):
950 if not vdu
.get("cloud-init-file"):
952 if not self
._validate
_package
_folders
(
953 storage_params
, "cloud_init", vdu
["cloud-init-file"]
954 ) and not self
._validate
_package
_folders
(
955 storage_params
, "Scripts/cloud_init", vdu
["cloud-init-file"]
957 raise EngineException(
958 "Cloud-init defined in vnf[id={}]:vdu[id={}] but not present in "
959 "package".format(indata
["id"], vdu
["id"])
962 def _validate_vnf_charms_in_package(self
, storage_params
, indata
):
963 # Get VNF configuration through new container
964 for deployment_flavor
in indata
.get("df", []):
965 if "lcm-operations-configuration" not in deployment_flavor
:
968 "operate-vnf-op-config"
969 not in deployment_flavor
["lcm-operations-configuration"]
972 for day_1_2_config
in deployment_flavor
["lcm-operations-configuration"][
973 "operate-vnf-op-config"
975 if day_1_2_config
["id"] == indata
["id"]:
976 if utils
.find_in_list(
977 day_1_2_config
.get("execution-environment-list", []),
978 lambda ee
: "juju" in ee
,
980 if not self
._validate
_package
_folders
(
981 storage_params
, "charms"
982 ) and not self
._validate
_package
_folders
(
983 storage_params
, "Scripts/charms"
985 raise EngineException(
986 "Charm defined in vnf[id={}] but not present in "
987 "package".format(indata
["id"])
990 def _validate_package_folders(self
, storage_params
, folder
, file=None):
991 if not storage_params
:
993 elif not storage_params
.get("pkg-dir"):
994 if self
.fs
.file_exists("{}_".format(storage_params
["folder"]), "dir"):
996 storage_params
["folder"], folder
1000 storage_params
["folder"], folder
1003 return self
.fs
.file_exists("{}/{}".format(f
, file), "file")
1005 if self
.fs
.file_exists(f
, "dir"):
1006 if self
.fs
.dir_ls(f
):
1010 if self
.fs
.file_exists("{}_".format(storage_params
["folder"]), "dir"):
1011 f
= "{}_/{}/{}".format(
1012 storage_params
["folder"], storage_params
["pkg-dir"], folder
1015 f
= "{}/{}/{}".format(
1016 storage_params
["folder"], storage_params
["pkg-dir"], folder
1019 return self
.fs
.file_exists("{}/{}".format(f
, file), "file")
1021 if self
.fs
.file_exists(f
, "dir"):
1022 if self
.fs
.dir_ls(f
):
1027 def validate_internal_virtual_links(indata
):
1028 all_ivld_ids
= set()
1029 for ivld
in get_iterable(indata
.get("int-virtual-link-desc")):
1030 ivld_id
= ivld
.get("id")
1031 if ivld_id
and ivld_id
in all_ivld_ids
:
1032 raise EngineException(
1033 "Duplicated VLD id in int-virtual-link-desc[id={}]".format(ivld_id
),
1034 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1037 all_ivld_ids
.add(ivld_id
)
1039 for vdu
in get_iterable(indata
.get("vdu")):
1040 for int_cpd
in get_iterable(vdu
.get("int-cpd")):
1041 int_cpd_ivld_id
= int_cpd
.get("int-virtual-link-desc")
1042 if int_cpd_ivld_id
and int_cpd_ivld_id
not in all_ivld_ids
:
1043 raise EngineException(
1044 "vdu[id='{}']:int-cpd[id='{}']:int-virtual-link-desc='{}' must match an existing "
1045 "int-virtual-link-desc".format(
1046 vdu
["id"], int_cpd
["id"], int_cpd_ivld_id
1048 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1051 for df
in get_iterable(indata
.get("df")):
1052 for vlp
in get_iterable(df
.get("virtual-link-profile")):
1053 vlp_ivld_id
= vlp
.get("id")
1054 if vlp_ivld_id
and vlp_ivld_id
not in all_ivld_ids
:
1055 raise EngineException(
1056 "df[id='{}']:virtual-link-profile='{}' must match an existing "
1057 "int-virtual-link-desc".format(df
["id"], vlp_ivld_id
),
1058 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1062 def validate_monitoring_params(indata
):
1063 all_monitoring_params
= set()
1064 for ivld
in get_iterable(indata
.get("int-virtual-link-desc")):
1065 for mp
in get_iterable(ivld
.get("monitoring-parameters")):
1066 mp_id
= mp
.get("id")
1067 if mp_id
and mp_id
in all_monitoring_params
:
1068 raise EngineException(
1069 "Duplicated monitoring-parameter id in "
1070 "int-virtual-link-desc[id='{}']:monitoring-parameters[id='{}']".format(
1073 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1076 all_monitoring_params
.add(mp_id
)
1078 for vdu
in get_iterable(indata
.get("vdu")):
1079 for mp
in get_iterable(vdu
.get("monitoring-parameter")):
1080 mp_id
= mp
.get("id")
1081 if mp_id
and mp_id
in all_monitoring_params
:
1082 raise EngineException(
1083 "Duplicated monitoring-parameter id in "
1084 "vdu[id='{}']:monitoring-parameter[id='{}']".format(
1087 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1090 all_monitoring_params
.add(mp_id
)
1092 for df
in get_iterable(indata
.get("df")):
1093 for mp
in get_iterable(df
.get("monitoring-parameter")):
1094 mp_id
= mp
.get("id")
1095 if mp_id
and mp_id
in all_monitoring_params
:
1096 raise EngineException(
1097 "Duplicated monitoring-parameter id in "
1098 "df[id='{}']:monitoring-parameter[id='{}']".format(
1101 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1104 all_monitoring_params
.add(mp_id
)
1107 def validate_scaling_group_descriptor(indata
):
1108 all_monitoring_params
= set()
1109 for ivld
in get_iterable(indata
.get("int-virtual-link-desc")):
1110 for mp
in get_iterable(ivld
.get("monitoring-parameters")):
1111 all_monitoring_params
.add(mp
.get("id"))
1113 for vdu
in get_iterable(indata
.get("vdu")):
1114 for mp
in get_iterable(vdu
.get("monitoring-parameter")):
1115 all_monitoring_params
.add(mp
.get("id"))
1117 for df
in get_iterable(indata
.get("df")):
1118 for mp
in get_iterable(df
.get("monitoring-parameter")):
1119 all_monitoring_params
.add(mp
.get("id"))
1121 for df
in get_iterable(indata
.get("df")):
1122 for sa
in get_iterable(df
.get("scaling-aspect")):
1123 for sp
in get_iterable(sa
.get("scaling-policy")):
1124 for sc
in get_iterable(sp
.get("scaling-criteria")):
1125 sc_monitoring_param
= sc
.get("vnf-monitoring-param-ref")
1128 and sc_monitoring_param
not in all_monitoring_params
1130 raise EngineException(
1131 "df[id='{}']:scaling-aspect[id='{}']:scaling-policy"
1132 "[name='{}']:scaling-criteria[name='{}']: "
1133 "vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format(
1138 sc_monitoring_param
,
1140 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1143 for sca
in get_iterable(sa
.get("scaling-config-action")):
1145 "lcm-operations-configuration" not in df
1146 or "operate-vnf-op-config"
1147 not in df
["lcm-operations-configuration"]
1148 or not utils
.find_in_list(
1149 df
["lcm-operations-configuration"][
1150 "operate-vnf-op-config"
1151 ].get("day1-2", []),
1152 lambda config
: config
["id"] == indata
["id"],
1155 raise EngineException(
1156 "'day1-2 configuration' not defined in the descriptor but it is "
1157 "referenced by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action".format(
1160 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1162 for configuration
in get_iterable(
1163 df
["lcm-operations-configuration"]["operate-vnf-op-config"].get(
1167 for primitive
in get_iterable(
1168 configuration
.get("config-primitive")
1172 == sca
["vnf-config-primitive-name-ref"]
1176 raise EngineException(
1177 "df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-"
1178 "config-primitive-name-ref='{}' does not match any "
1179 "day1-2 configuration:config-primitive:name".format(
1182 sca
["vnf-config-primitive-name-ref"],
1184 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1187 def delete_extra(self
, session
, _id
, db_content
, not_send_msg
=None):
1189 Deletes associate file system storage (via super)
1190 Deletes associated vnfpkgops from database.
1191 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1192 :param _id: server internal id
1193 :param db_content: The database content of the descriptor
1195 :raises: FsException in case of error while deleting associated storage
1197 super().delete_extra(session
, _id
, db_content
, not_send_msg
)
1198 self
.db
.del_list("vnfpkgops", {"vnfPkgId": _id
})
1199 self
.db
.del_list(self
.topic
+"_revisions", {"_id": {"$regex": _id
}})
1201 def sol005_projection(self
, data
):
1202 data
["onboardingState"] = data
["_admin"]["onboardingState"]
1203 data
["operationalState"] = data
["_admin"]["operationalState"]
1204 data
["usageState"] = data
["_admin"]["usageState"]
1207 links
["self"] = {"href": "/vnfpkgm/v1/vnf_packages/{}".format(data
["_id"])}
1208 links
["vnfd"] = {"href": "/vnfpkgm/v1/vnf_packages/{}/vnfd".format(data
["_id"])}
1209 links
["packageContent"] = {
1210 "href": "/vnfpkgm/v1/vnf_packages/{}/package_content".format(data
["_id"])
1212 data
["_links"] = links
1214 return super().sol005_projection(data
)
1217 def find_software_version(vnfd
: dict) -> str:
1218 """Find the sotware version in the VNFD descriptors
1221 vnfd (dict): Descriptor as a dictionary
1224 software-version (str)
1226 default_sw_version
= "1.0"
1227 if vnfd
.get("vnfd"):
1229 if vnfd
.get("software-version"):
1230 return vnfd
["software-version"]
1232 return default_sw_version
1235 def extract_policies(vnfd
: dict) -> dict:
1236 """Removes the policies from the VNFD descriptors
1239 vnfd (dict): Descriptor as a dictionary
1242 vnfd (dict): VNFD which does not include policies
1244 for df
in vnfd
.get("df", {}):
1245 for policy
in ["scaling-aspect", "healing-aspect"]:
1246 if (df
.get(policy
, {})):
1248 for vdu
in vnfd
.get("vdu", {}):
1249 for alarm_policy
in ["alarm", "monitoring-parameter"]:
1250 if (vdu
.get(alarm_policy
, {})):
1251 vdu
.pop(alarm_policy
)
1255 def extract_day12_primitives(vnfd
: dict) -> dict:
1256 """Removes the day12 primitives from the VNFD descriptors
1259 vnfd (dict): Descriptor as a dictionary
1264 for df_id
, df
in enumerate(vnfd
.get("df", {})):
1266 df
.get("lcm-operations-configuration", {})
1267 .get("operate-vnf-op-config", {})
1270 day12
= df
["lcm-operations-configuration"]["operate-vnf-op-config"].get(
1273 for config_id
, config
in enumerate(day12
):
1275 "initial-config-primitive",
1277 "terminate-config-primitive",
1279 config
.pop(key
, None)
1280 day12
[config_id
] = config
1281 df
["lcm-operations-configuration"]["operate-vnf-op-config"][
1284 vnfd
["df"][df_id
] = df
1287 def remove_modifiable_items(self
, vnfd
: dict) -> dict:
1288 """Removes the modifiable parts from the VNFD descriptors
1290 It calls different extract functions according to different update types
1291 to clear all the modifiable items from VNFD
1294 vnfd (dict): Descriptor as a dictionary
1297 vnfd (dict): Descriptor which does not include modifiable contents
1299 if vnfd
.get("vnfd"):
1301 vnfd
.pop("_admin", None)
1302 # If the other extractions need to be done from VNFD,
1303 # the new extract methods could be appended to below list.
1304 for extract_function
in [self
.extract_day12_primitives
, self
.extract_policies
]:
1305 vnfd_temp
= extract_function(vnfd
)
1309 def _validate_descriptor_changes(
1311 descriptor_file_name
: str,
1312 old_descriptor_directory
: str,
1313 new_descriptor_directory
: str,
1315 """Compares the old and new VNFD descriptors and validates the new descriptor.
1318 old_descriptor_directory (str): Directory of descriptor which is in-use
1319 new_descriptor_directory (str): Directory of directory which is proposed to update (new revision)
1325 EngineException: In case of error when there are unallowed changes
1328 with self
.fs
.file_open(
1329 (old_descriptor_directory
.rstrip("/"), descriptor_file_name
), "r"
1330 ) as old_descriptor_file
:
1331 with self
.fs
.file_open(
1332 (new_descriptor_directory
, descriptor_file_name
), "r"
1333 ) as new_descriptor_file
:
1334 old_content
= yaml
.load(
1335 old_descriptor_file
.read(), Loader
=yaml
.SafeLoader
1337 new_content
= yaml
.load(
1338 new_descriptor_file
.read(), Loader
=yaml
.SafeLoader
1340 if old_content
and new_content
:
1341 if self
.find_software_version(
1343 ) != self
.find_software_version(new_content
):
1345 disallowed_change
= DeepDiff(
1346 self
.remove_modifiable_items(old_content
),
1347 self
.remove_modifiable_items(new_content
),
1349 if disallowed_change
:
1350 changed_nodes
= functools
.reduce(
1351 lambda a
, b
: a
+ " , " + b
,
1354 for node
in disallowed_change
.get(
1359 raise EngineException(
1360 f
"Error in validating new descriptor: {changed_nodes} cannot be modified, "
1361 "there are disallowed changes in the vnf descriptor.",
1362 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1372 "VNF Descriptor could not be processed with error: {}.".format(e
)
1376 class NsdTopic(DescriptorTopic
):
1380 def __init__(self
, db
, fs
, msg
, auth
):
1381 super().__init
__(db
, fs
, msg
, auth
)
1383 def pyangbind_validation(self
, item
, data
, force
=False):
1384 if self
._descriptor
_data
_is
_in
_old
_format
(data
):
1385 raise EngineException(
1386 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
1387 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1390 nsd_vnf_profiles
= data
.get("df", [{}])[0].get("vnf-profile", [])
1391 mynsd
= etsi_nfv_nsd
.etsi_nfv_nsd()
1392 pybindJSONDecoder
.load_ietf_json(
1393 {"nsd": {"nsd": [data
]}},
1400 out
= pybindJSON
.dumps(mynsd
, mode
="ietf")
1401 desc_out
= self
._remove
_envelop
(yaml
.safe_load(out
))
1402 desc_out
= self
._remove
_yang
_prefixes
_from
_descriptor
(desc_out
)
1403 if nsd_vnf_profiles
:
1404 desc_out
["df"][0]["vnf-profile"] = nsd_vnf_profiles
1406 except Exception as e
:
1407 raise EngineException(
1408 "Error in pyangbind validation: {}".format(str(e
)),
1409 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1413 def _descriptor_data_is_in_old_format(data
):
1414 return ("nsd-catalog" in data
) or ("nsd:nsd-catalog" in data
)
1417 def _remove_envelop(indata
=None):
1420 clean_indata
= indata
1422 if clean_indata
.get("nsd"):
1423 clean_indata
= clean_indata
["nsd"]
1424 elif clean_indata
.get("etsi-nfv-nsd:nsd"):
1425 clean_indata
= clean_indata
["etsi-nfv-nsd:nsd"]
1426 if clean_indata
.get("nsd"):
1428 not isinstance(clean_indata
["nsd"], list)
1429 or len(clean_indata
["nsd"]) != 1
1431 raise EngineException("'nsd' must be a list of only one element")
1432 clean_indata
= clean_indata
["nsd"][0]
1435 def _validate_input_new(self
, indata
, storage_params
, force
=False):
1436 indata
.pop("nsdOnboardingState", None)
1437 indata
.pop("nsdOperationalState", None)
1438 indata
.pop("nsdUsageState", None)
1440 indata
.pop("links", None)
1442 indata
= self
.pyangbind_validation("nsds", indata
, force
)
1443 # Cross references validation in the descriptor
1444 # TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none
1445 for vld
in get_iterable(indata
.get("virtual-link-desc")):
1446 self
.validate_vld_mgmt_network_with_virtual_link_protocol_data(vld
, indata
)
1448 self
.validate_vnf_profiles_vnfd_id(indata
)
1453 def validate_vld_mgmt_network_with_virtual_link_protocol_data(vld
, indata
):
1454 if not vld
.get("mgmt-network"):
1456 vld_id
= vld
.get("id")
1457 for df
in get_iterable(indata
.get("df")):
1458 for vlp
in get_iterable(df
.get("virtual-link-profile")):
1459 if vld_id
and vld_id
== vlp
.get("virtual-link-desc-id"):
1460 if vlp
.get("virtual-link-protocol-data"):
1461 raise EngineException(
1462 "Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-"
1463 "protocol-data You cannot set a virtual-link-protocol-data "
1464 "when mgmt-network is True".format(df
["id"], vlp
["id"]),
1465 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1469 def validate_vnf_profiles_vnfd_id(indata
):
1470 all_vnfd_ids
= set(get_iterable(indata
.get("vnfd-id")))
1471 for df
in get_iterable(indata
.get("df")):
1472 for vnf_profile
in get_iterable(df
.get("vnf-profile")):
1473 vnfd_id
= vnf_profile
.get("vnfd-id")
1474 if vnfd_id
and vnfd_id
not in all_vnfd_ids
:
1475 raise EngineException(
1476 "Error at df[id='{}']:vnf_profile[id='{}']:vnfd-id='{}' "
1477 "does not match any vnfd-id".format(
1478 df
["id"], vnf_profile
["id"], vnfd_id
1480 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1483 def _validate_input_edit(self
, indata
, content
, force
=False):
1484 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
1486 indata looks as follows:
1487 - In the new case (conformant)
1488 {'nsdOperationalState': 'DISABLED', 'userDefinedData': {'id': 'string23',
1489 '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}}
1490 - In the old case (backwards-compatible)
1491 {'id': 'string23', '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}
1493 if "_admin" not in indata
:
1494 indata
["_admin"] = {}
1496 if "nsdOperationalState" in indata
:
1497 if indata
["nsdOperationalState"] in ("ENABLED", "DISABLED"):
1498 indata
["_admin"]["operationalState"] = indata
.pop("nsdOperationalState")
1500 raise EngineException(
1501 "State '{}' is not a valid operational state".format(
1502 indata
["nsdOperationalState"]
1504 http_code
=HTTPStatus
.BAD_REQUEST
,
1507 # In the case of user defined data, we need to put the data in the root of the object
1508 # to preserve current expected behaviour
1509 if "userDefinedData" in indata
:
1510 data
= indata
.pop("userDefinedData")
1511 if isinstance(data
, dict):
1512 indata
["_admin"]["userDefinedData"] = data
1514 raise EngineException(
1515 "userDefinedData should be an object, but is '{}' instead".format(
1518 http_code
=HTTPStatus
.BAD_REQUEST
,
1521 "operationalState" in indata
["_admin"]
1522 and content
["_admin"]["operationalState"]
1523 == indata
["_admin"]["operationalState"]
1525 raise EngineException(
1526 "nsdOperationalState already {}".format(
1527 content
["_admin"]["operationalState"]
1529 http_code
=HTTPStatus
.CONFLICT
,
1533 def _check_descriptor_dependencies(self
, session
, descriptor
):
1535 Check that the dependent descriptors exist on a new descriptor or edition. Also checks references to vnfd
1536 connection points are ok
1537 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1538 :param descriptor: descriptor to be inserted or edit
1539 :return: None or raises exception
1541 if session
["force"]:
1543 vnfds_index
= self
._get
_descriptor
_constituent
_vnfds
_index
(session
, descriptor
)
1545 # Cross references validation in the descriptor and vnfd connection point validation
1546 for df
in get_iterable(descriptor
.get("df")):
1547 self
.validate_df_vnf_profiles_constituent_connection_points(df
, vnfds_index
)
1549 def _get_descriptor_constituent_vnfds_index(self
, session
, descriptor
):
1551 if descriptor
.get("vnfd-id") and not session
["force"]:
1552 for vnfd_id
in get_iterable(descriptor
.get("vnfd-id")):
1553 query_filter
= self
._get
_project
_filter
(session
)
1554 query_filter
["id"] = vnfd_id
1555 vnf_list
= self
.db
.get_list("vnfds", query_filter
)
1557 raise EngineException(
1558 "Descriptor error at 'vnfd-id'='{}' references a non "
1559 "existing vnfd".format(vnfd_id
),
1560 http_code
=HTTPStatus
.CONFLICT
,
1562 vnfds_index
[vnfd_id
] = vnf_list
[0]
1566 def validate_df_vnf_profiles_constituent_connection_points(df
, vnfds_index
):
1567 for vnf_profile
in get_iterable(df
.get("vnf-profile")):
1568 vnfd
= vnfds_index
.get(vnf_profile
["vnfd-id"])
1569 all_vnfd_ext_cpds
= set()
1570 for ext_cpd
in get_iterable(vnfd
.get("ext-cpd")):
1571 if ext_cpd
.get("id"):
1572 all_vnfd_ext_cpds
.add(ext_cpd
.get("id"))
1574 for virtual_link
in get_iterable(
1575 vnf_profile
.get("virtual-link-connectivity")
1577 for vl_cpd
in get_iterable(virtual_link
.get("constituent-cpd-id")):
1578 vl_cpd_id
= vl_cpd
.get("constituent-cpd-id")
1579 if vl_cpd_id
and vl_cpd_id
not in all_vnfd_ext_cpds
:
1580 raise EngineException(
1581 "Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity"
1582 "[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a "
1583 "non existing ext-cpd:id inside vnfd '{}'".format(
1586 virtual_link
["virtual-link-profile-id"],
1590 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1593 def check_conflict_on_edit(self
, session
, final_content
, edit_content
, _id
):
1594 final_content
= super().check_conflict_on_edit(
1595 session
, final_content
, edit_content
, _id
1598 self
._check
_descriptor
_dependencies
(session
, final_content
)
1600 return final_content
1602 def check_conflict_on_del(self
, session
, _id
, db_content
):
1604 Check that there is not any NSR that uses this NSD. Only NSRs belonging to this project are considered. Note
1605 that NSD can be public and be used by other projects.
1606 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1607 :param _id: nsd internal id
1608 :param db_content: The database content of the _id
1609 :return: None or raises EngineException with the conflict
1611 if session
["force"]:
1613 descriptor
= db_content
1614 descriptor_id
= descriptor
.get("id")
1615 if not descriptor_id
: # empty nsd not uploaded
1618 # check NSD used by NS
1619 _filter
= self
._get
_project
_filter
(session
)
1620 _filter
["nsd-id"] = _id
1621 if self
.db
.get_list("nsrs", _filter
):
1622 raise EngineException(
1623 "There is at least one NS instance using this descriptor",
1624 http_code
=HTTPStatus
.CONFLICT
,
1627 # check NSD referenced by NST
1628 del _filter
["nsd-id"]
1629 _filter
["netslice-subnet.ANYINDEX.nsd-ref"] = descriptor_id
1630 if self
.db
.get_list("nsts", _filter
):
1631 raise EngineException(
1632 "There is at least one NetSlice Template referencing this descriptor",
1633 http_code
=HTTPStatus
.CONFLICT
,
1636 def delete_extra(self
, session
, _id
, db_content
, not_send_msg
=None):
1638 Deletes associate file system storage (via super)
1639 Deletes associated vnfpkgops from database.
1640 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1641 :param _id: server internal id
1642 :param db_content: The database content of the descriptor
1644 :raises: FsException in case of error while deleting associated storage
1646 super().delete_extra(session
, _id
, db_content
, not_send_msg
)
1647 self
.db
.del_list(self
.topic
+"_revisions", { "_id": { "$regex": _id
}})
1650 def extract_day12_primitives(nsd
: dict) -> dict:
1651 """Removes the day12 primitives from the NSD descriptors
1654 nsd (dict): Descriptor as a dictionary
1657 nsd (dict): Cleared NSD
1659 if nsd
.get("ns-configuration"):
1662 "initial-config-primitive",
1663 "terminate-config-primitive",
1665 nsd
["ns-configuration"].pop(key
, None)
1668 def remove_modifiable_items(self
, nsd
: dict) -> dict:
1669 """Removes the modifiable parts from the VNFD descriptors
1671 It calls different extract functions according to different update types
1672 to clear all the modifiable items from NSD
1675 nsd (dict): Descriptor as a dictionary
1678 nsd (dict): Descriptor which does not include modifiable contents
1680 while isinstance(nsd
, dict) and nsd
.get("nsd"):
1682 if isinstance(nsd
, list):
1684 nsd
.pop("_admin", None)
1685 # If the more extractions need to be done from NSD,
1686 # the new extract methods could be appended to below list.
1687 for extract_function
in [self
.extract_day12_primitives
]:
1688 nsd_temp
= extract_function(nsd
)
1692 def _validate_descriptor_changes(
1694 descriptor_file_name
: str,
1695 old_descriptor_directory
: str,
1696 new_descriptor_directory
: str,
1698 """Compares the old and new NSD descriptors and validates the new descriptor
1701 old_descriptor_directory: Directory of descriptor which is in-use
1702 new_descriptor_directory: Directory of directory which is proposed to update (new revision)
1708 EngineException: In case of error if the changes are not allowed
1712 with self
.fs
.file_open(
1713 (old_descriptor_directory
, descriptor_file_name
), "r"
1714 ) as old_descriptor_file
:
1715 with self
.fs
.file_open(
1716 (new_descriptor_directory
.rstrip("/"), descriptor_file_name
), "r"
1717 ) as new_descriptor_file
:
1718 old_content
= yaml
.load(
1719 old_descriptor_file
.read(), Loader
=yaml
.SafeLoader
1721 new_content
= yaml
.load(
1722 new_descriptor_file
.read(), Loader
=yaml
.SafeLoader
1724 if old_content
and new_content
:
1725 disallowed_change
= DeepDiff(
1726 self
.remove_modifiable_items(old_content
),
1727 self
.remove_modifiable_items(new_content
),
1729 if disallowed_change
:
1730 changed_nodes
= functools
.reduce(
1731 lambda a
, b
: a
+ ", " + b
,
1734 for node
in disallowed_change
.get(
1739 raise EngineException(
1740 f
"Error in validating new descriptor: {changed_nodes} cannot be modified, "
1741 "there are disallowed changes in the ns descriptor. ",
1742 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1752 "NS Descriptor could not be processed with error: {}.".format(e
)
1755 def sol005_projection(self
, data
):
1756 data
["nsdOnboardingState"] = data
["_admin"]["onboardingState"]
1757 data
["nsdOperationalState"] = data
["_admin"]["operationalState"]
1758 data
["nsdUsageState"] = data
["_admin"]["usageState"]
1761 links
["self"] = {"href": "/nsd/v1/ns_descriptors/{}".format(data
["_id"])}
1762 links
["nsd_content"] = {
1763 "href": "/nsd/v1/ns_descriptors/{}/nsd_content".format(data
["_id"])
1765 data
["_links"] = links
1767 return super().sol005_projection(data
)
1770 class NstTopic(DescriptorTopic
):
1773 quota_name
= "slice_templates"
1775 def __init__(self
, db
, fs
, msg
, auth
):
1776 DescriptorTopic
.__init
__(self
, db
, fs
, msg
, auth
)
1778 def pyangbind_validation(self
, item
, data
, force
=False):
1781 pybindJSONDecoder
.load_ietf_json(
1789 out
= pybindJSON
.dumps(mynst
, mode
="ietf")
1790 desc_out
= self
._remove
_envelop
(yaml
.safe_load(out
))
1792 except Exception as e
:
1793 raise EngineException(
1794 "Error in pyangbind validation: {}".format(str(e
)),
1795 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1799 def _remove_envelop(indata
=None):
1802 clean_indata
= indata
1804 if clean_indata
.get("nst"):
1806 not isinstance(clean_indata
["nst"], list)
1807 or len(clean_indata
["nst"]) != 1
1809 raise EngineException("'nst' must be a list only one element")
1810 clean_indata
= clean_indata
["nst"][0]
1811 elif clean_indata
.get("nst:nst"):
1813 not isinstance(clean_indata
["nst:nst"], list)
1814 or len(clean_indata
["nst:nst"]) != 1
1816 raise EngineException("'nst:nst' must be a list only one element")
1817 clean_indata
= clean_indata
["nst:nst"][0]
1820 def _validate_input_new(self
, indata
, storage_params
, force
=False):
1821 indata
.pop("onboardingState", None)
1822 indata
.pop("operationalState", None)
1823 indata
.pop("usageState", None)
1824 indata
= self
.pyangbind_validation("nsts", indata
, force
)
1825 return indata
.copy()
1827 def _check_descriptor_dependencies(self
, session
, descriptor
):
1829 Check that the dependent descriptors exist on a new descriptor or edition
1830 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1831 :param descriptor: descriptor to be inserted or edit
1832 :return: None or raises exception
1834 if not descriptor
.get("netslice-subnet"):
1836 for nsd
in descriptor
["netslice-subnet"]:
1837 nsd_id
= nsd
["nsd-ref"]
1838 filter_q
= self
._get
_project
_filter
(session
)
1839 filter_q
["id"] = nsd_id
1840 if not self
.db
.get_list("nsds", filter_q
):
1841 raise EngineException(
1842 "Descriptor error at 'netslice-subnet':'nsd-ref'='{}' references a non "
1843 "existing nsd".format(nsd_id
),
1844 http_code
=HTTPStatus
.CONFLICT
,
1847 def check_conflict_on_edit(self
, session
, final_content
, edit_content
, _id
):
1848 final_content
= super().check_conflict_on_edit(
1849 session
, final_content
, edit_content
, _id
1852 self
._check
_descriptor
_dependencies
(session
, final_content
)
1853 return final_content
1855 def check_conflict_on_del(self
, session
, _id
, db_content
):
1857 Check that there is not any NSIR that uses this NST. Only NSIRs belonging to this project are considered. Note
1858 that NST can be public and be used by other projects.
1859 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1860 :param _id: nst internal id
1861 :param db_content: The database content of the _id.
1862 :return: None or raises EngineException with the conflict
1864 # TODO: Check this method
1865 if session
["force"]:
1867 # Get Network Slice Template from Database
1868 _filter
= self
._get
_project
_filter
(session
)
1869 _filter
["_admin.nst-id"] = _id
1870 if self
.db
.get_list("nsis", _filter
):
1871 raise EngineException(
1872 "there is at least one Netslice Instance using this descriptor",
1873 http_code
=HTTPStatus
.CONFLICT
,
1876 def sol005_projection(self
, data
):
1877 data
["onboardingState"] = data
["_admin"]["onboardingState"]
1878 data
["operationalState"] = data
["_admin"]["operationalState"]
1879 data
["usageState"] = data
["_admin"]["usageState"]
1882 links
["self"] = {"href": "/nst/v1/netslice_templates/{}".format(data
["_id"])}
1883 links
["nst"] = {"href": "/nst/v1/netslice_templates/{}/nst".format(data
["_id"])}
1884 data
["_links"] = links
1886 return super().sol005_projection(data
)
1889 class PduTopic(BaseTopic
):
1892 quota_name
= "pduds"
1893 schema_new
= pdu_new_schema
1894 schema_edit
= pdu_edit_schema
1896 def __init__(self
, db
, fs
, msg
, auth
):
1897 BaseTopic
.__init
__(self
, db
, fs
, msg
, auth
)
1900 def format_on_new(content
, project_id
=None, make_public
=False):
1901 BaseTopic
.format_on_new(content
, project_id
=project_id
, make_public
=make_public
)
1902 content
["_admin"]["onboardingState"] = "CREATED"
1903 content
["_admin"]["operationalState"] = "ENABLED"
1904 content
["_admin"]["usageState"] = "NOT_IN_USE"
1906 def check_conflict_on_del(self
, session
, _id
, db_content
):
1908 Check that there is not any vnfr that uses this PDU
1909 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1910 :param _id: pdu internal id
1911 :param db_content: The database content of the _id.
1912 :return: None or raises EngineException with the conflict
1914 if session
["force"]:
1917 _filter
= self
._get
_project
_filter
(session
)
1918 _filter
["vdur.pdu-id"] = _id
1919 if self
.db
.get_list("vnfrs", _filter
):
1920 raise EngineException(
1921 "There is at least one VNF instance using this PDU",
1922 http_code
=HTTPStatus
.CONFLICT
,
1926 class VnfPkgOpTopic(BaseTopic
):
1929 schema_new
= vnfpkgop_new_schema
1932 def __init__(self
, db
, fs
, msg
, auth
):
1933 BaseTopic
.__init
__(self
, db
, fs
, msg
, auth
)
1935 def edit(self
, session
, _id
, indata
=None, kwargs
=None, content
=None):
1936 raise EngineException(
1937 "Method 'edit' not allowed for topic '{}'".format(self
.topic
),
1938 HTTPStatus
.METHOD_NOT_ALLOWED
,
1941 def delete(self
, session
, _id
, dry_run
=False):
1942 raise EngineException(
1943 "Method 'delete' not allowed for topic '{}'".format(self
.topic
),
1944 HTTPStatus
.METHOD_NOT_ALLOWED
,
1947 def delete_list(self
, session
, filter_q
=None):
1948 raise EngineException(
1949 "Method 'delete_list' not allowed for topic '{}'".format(self
.topic
),
1950 HTTPStatus
.METHOD_NOT_ALLOWED
,
1953 def new(self
, rollback
, session
, indata
=None, kwargs
=None, headers
=None):
1955 Creates a new entry into database.
1956 :param rollback: list to append created items at database in case a rollback may to be done
1957 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1958 :param indata: data to be inserted
1959 :param kwargs: used to override the indata descriptor
1960 :param headers: http request headers
1961 :return: _id, op_id:
1962 _id: identity of the inserted data.
1965 self
._update
_input
_with
_kwargs
(indata
, kwargs
)
1966 validate_input(indata
, self
.schema_new
)
1967 vnfpkg_id
= indata
["vnfPkgId"]
1968 filter_q
= BaseTopic
._get
_project
_filter
(session
)
1969 filter_q
["_id"] = vnfpkg_id
1970 vnfd
= self
.db
.get_one("vnfds", filter_q
)
1971 operation
= indata
["lcmOperationType"]
1972 kdu_name
= indata
["kdu_name"]
1973 for kdu
in vnfd
.get("kdu", []):
1974 if kdu
["name"] == kdu_name
:
1975 helm_chart
= kdu
.get("helm-chart")
1976 juju_bundle
= kdu
.get("juju-bundle")
1979 raise EngineException(
1980 "Not found vnfd[id='{}']:kdu[name='{}']".format(vnfpkg_id
, kdu_name
)
1983 indata
["helm-chart"] = helm_chart
1984 match
= fullmatch(r
"([^/]*)/([^/]*)", helm_chart
)
1985 repo_name
= match
.group(1) if match
else None
1987 indata
["juju-bundle"] = juju_bundle
1988 match
= fullmatch(r
"([^/]*)/([^/]*)", juju_bundle
)
1989 repo_name
= match
.group(1) if match
else None
1991 raise EngineException(
1992 "Found neither 'helm-chart' nor 'juju-bundle' in vnfd[id='{}']:kdu[name='{}']".format(
1998 filter_q
["name"] = repo_name
1999 repo
= self
.db
.get_one("k8srepos", filter_q
)
2000 k8srepo_id
= repo
.get("_id")
2001 k8srepo_url
= repo
.get("url")
2005 indata
["k8srepoId"] = k8srepo_id
2006 indata
["k8srepo_url"] = k8srepo_url
2007 vnfpkgop_id
= str(uuid4())
2010 "operationState": "PROCESSING",
2011 "vnfPkgId": vnfpkg_id
,
2012 "lcmOperationType": operation
,
2013 "isAutomaticInvocation": False,
2014 "isCancelPending": False,
2015 "operationParams": indata
,
2017 "self": "/osm/vnfpkgm/v1/vnfpkg_op_occs/" + vnfpkgop_id
,
2018 "vnfpkg": "/osm/vnfpkgm/v1/vnf_packages/" + vnfpkg_id
,
2022 vnfpkgop_desc
, session
["project_id"], make_public
=session
["public"]
2024 ctime
= vnfpkgop_desc
["_admin"]["created"]
2025 vnfpkgop_desc
["statusEnteredTime"] = ctime
2026 vnfpkgop_desc
["startTime"] = ctime
2027 self
.db
.create(self
.topic
, vnfpkgop_desc
)
2028 rollback
.append({"topic": self
.topic
, "_id": vnfpkgop_id
})
2029 self
.msg
.write(self
.topic_msg
, operation
, vnfpkgop_desc
)
2030 return vnfpkgop_id
, None