1 # -*- coding: utf-8 -*-
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
7 # http://www.apache.org/licenses/LICENSE-2.0
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
26 from deepdiff
import DeepDiff
27 from hashlib
import md5
28 from osm_common
.dbbase
import DbException
, deep_update_rfc7396
29 from http
import HTTPStatus
31 from uuid
import uuid4
32 from re
import fullmatch
33 from zipfile
import ZipFile
34 from osm_nbi
.validation
import (
41 from osm_nbi
.base_topic
import (
45 detect_descriptor_usage
,
47 from osm_im
import etsi_nfv_vnfd
, etsi_nfv_nsd
48 from osm_im
.nst
import nst
as nst_im
49 from pyangbind
.lib
.serialise
import pybindJSONDecoder
50 import pyangbind
.lib
.pybindJSON
as pybindJSON
51 from osm_nbi
import utils
53 __author__
= "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
55 valid_helm_chart_re
= re
.compile(
56 r
"^[a-z0-9]([-a-z0-9]*[a-z0-9]/)?([a-z0-9]([-a-z0-9]*[a-z0-9])?)*$"
60 class DescriptorTopic(BaseTopic
):
61 def __init__(self
, db
, fs
, msg
, auth
):
62 super().__init
__(db
, fs
, msg
, auth
)
64 def _validate_input_new(self
, indata
, storage_params
, force
=False):
67 def check_conflict_on_edit(self
, session
, final_content
, edit_content
, _id
):
68 final_content
= super().check_conflict_on_edit(
69 session
, final_content
, edit_content
, _id
72 def _check_unique_id_name(descriptor
, position
=""):
73 for desc_key
, desc_item
in descriptor
.items():
74 if isinstance(desc_item
, list) and desc_item
:
77 for index
, list_item
in enumerate(desc_item
):
78 if isinstance(list_item
, dict):
79 _check_unique_id_name(
80 list_item
, "{}.{}[{}]".format(position
, desc_key
, index
)
84 list_item
.get("id") or list_item
.get("name")
86 desc_item_id
= "id" if list_item
.get("id") else "name"
87 if desc_item_id
and list_item
.get(desc_item_id
):
88 if list_item
[desc_item_id
] in used_ids
:
89 position
= "{}.{}[{}]".format(
90 position
, desc_key
, index
92 raise EngineException(
93 "Error: identifier {} '{}' is not unique and repeats at '{}'".format(
95 list_item
[desc_item_id
],
98 HTTPStatus
.UNPROCESSABLE_ENTITY
,
100 used_ids
.append(list_item
[desc_item_id
])
102 _check_unique_id_name(final_content
)
103 # 1. validate again with pyangbind
104 # 1.1. remove internal keys
106 for k
in ("_id", "_admin"):
107 if k
in final_content
:
108 internal_keys
[k
] = final_content
.pop(k
)
109 storage_params
= internal_keys
["_admin"].get("storage")
110 serialized
= self
._validate
_input
_new
(
111 final_content
, storage_params
, session
["force"]
114 # 1.2. modify final_content with a serialized version
115 final_content
= copy
.deepcopy(serialized
)
116 # 1.3. restore internal keys
117 for k
, v
in internal_keys
.items():
122 # 2. check that this id is not present
123 if "id" in edit_content
:
124 _filter
= self
._get
_project
_filter
(session
)
126 _filter
["id"] = final_content
["id"]
127 _filter
["_id.neq"] = _id
129 if self
.db
.get_one(self
.topic
, _filter
, fail_on_empty
=False):
130 raise EngineException(
131 "{} with id '{}' already exists for this project".format(
132 (str(self
.topic
))[:-1], final_content
["id"]
140 def format_on_new(content
, project_id
=None, make_public
=False):
141 BaseTopic
.format_on_new(content
, project_id
=project_id
, make_public
=make_public
)
142 content
["_admin"]["onboardingState"] = "CREATED"
143 content
["_admin"]["operationalState"] = "DISABLED"
144 content
["_admin"]["usageState"] = "NOT_IN_USE"
146 def delete_extra(self
, session
, _id
, db_content
, not_send_msg
=None):
148 Deletes file system storage associated with the descriptor
149 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
150 :param _id: server internal id
151 :param db_content: The database content of the descriptor
152 :param not_send_msg: To not send message (False) or store content (list) instead
153 :return: None if ok or raises EngineException with the problem
155 self
.fs
.file_delete(_id
, ignore_non_exist
=True)
156 self
.fs
.file_delete(_id
+ "_", ignore_non_exist
=True) # remove temp folder
157 # Remove file revisions
158 if "revision" in db_content
["_admin"]:
159 revision
= db_content
["_admin"]["revision"]
161 self
.fs
.file_delete(_id
+ ":" + str(revision
), ignore_non_exist
=True)
162 revision
= revision
- 1
165 def get_one_by_id(db
, session
, topic
, id):
166 # find owned by this project
167 _filter
= BaseTopic
._get
_project
_filter
(session
)
169 desc_list
= db
.get_list(topic
, _filter
)
170 if len(desc_list
) == 1:
172 elif len(desc_list
) > 1:
174 "Found more than one {} with id='{}' belonging to this project".format(
180 # not found any: try to find public
181 _filter
= BaseTopic
._get
_project
_filter
(session
)
183 desc_list
= db
.get_list(topic
, _filter
)
186 "Not found any {} with id='{}'".format(topic
[:-1], id),
187 HTTPStatus
.NOT_FOUND
,
189 elif len(desc_list
) == 1:
193 "Found more than one public {} with id='{}'; and no one belonging to this project".format(
199 def new(self
, rollback
, session
, indata
=None, kwargs
=None, headers
=None):
201 Creates a new almost empty DISABLED entry into database. Due to SOL005, it does not follow normal procedure.
202 Creating a VNFD or NSD is done in two steps: 1. Creates an empty descriptor (this step) and 2) upload content
203 (self.upload_content)
204 :param rollback: list to append created items at database in case a rollback may to be done
205 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
206 :param indata: data to be inserted
207 :param kwargs: used to override the indata descriptor
208 :param headers: http request headers
209 :return: _id, None: identity of the inserted data; and None as there is not any operation
212 # No needed to capture exceptions
214 self
.check_quota(session
)
218 if "userDefinedData" in indata
:
219 indata
= indata
["userDefinedData"]
221 # Override descriptor with query string kwargs
222 self
._update
_input
_with
_kwargs
(indata
, kwargs
)
223 # uncomment when this method is implemented.
224 # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors
225 # indata = DescriptorTopic._validate_input_new(self, indata, project_id=session["force"])
227 content
= {"_admin": {"userDefinedData": indata
, "revision": 0}}
230 content
, session
["project_id"], make_public
=session
["public"]
232 _id
= self
.db
.create(self
.topic
, content
)
233 rollback
.append({"topic": self
.topic
, "_id": _id
})
234 self
._send
_msg
("created", {"_id": _id
})
237 def upload_content(self
, session
, _id
, indata
, kwargs
, headers
):
239 Used for receiving content by chunks (with a transaction_id header and/or gzip file. It will store and extract)
240 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
241 :param _id : the nsd,vnfd is already created, this is the id
242 :param indata: http body request
243 :param kwargs: user query string to override parameters. NOT USED
244 :param headers: http request headers
245 :return: True if package is completely uploaded or False if partial content has been uploded
246 Raise exception on error
248 # Check that _id exists and it is valid
249 current_desc
= self
.show(session
, _id
)
251 content_range_text
= headers
.get("Content-Range")
252 expected_md5
= headers
.get("Content-File-MD5")
254 content_type
= headers
.get("Content-Type")
257 and "application/gzip" in content_type
258 or "application/x-gzip" in content_type
261 if content_type
and "application/zip" in content_type
:
263 filename
= headers
.get("Content-Filename")
264 if not filename
and compressed
:
265 filename
= "package.tar.gz" if compressed
== "gzip" else "package.zip"
270 if "revision" in current_desc
["_admin"]:
271 revision
= current_desc
["_admin"]["revision"] + 1
273 # TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266
279 if content_range_text
:
281 content_range_text
.replace("-", " ").replace("/", " ").split()
284 content_range
[0] != "bytes"
285 ): # TODO check x<y not negative < total....
287 start
= int(content_range
[1])
288 end
= int(content_range
[2]) + 1
289 total
= int(content_range
[3])
292 # Rather than using a temp folder, we will store the package in a folder based on
293 # the current revision.
294 proposed_revision_path
= (
295 _id
+ ":" + str(revision
)
296 ) # all the content is upload here and if ok, it is rename from id_ to is folder
299 if not self
.fs
.file_exists(proposed_revision_path
, "dir"):
300 raise EngineException(
301 "invalid Transaction-Id header", HTTPStatus
.NOT_FOUND
304 self
.fs
.file_delete(proposed_revision_path
, ignore_non_exist
=True)
305 self
.fs
.mkdir(proposed_revision_path
)
306 fs_rollback
.append(proposed_revision_path
)
308 storage
= self
.fs
.get_params()
309 storage
["folder"] = proposed_revision_path
311 file_path
= (proposed_revision_path
, filename
)
312 if self
.fs
.file_exists(file_path
, "file"):
313 file_size
= self
.fs
.file_size(file_path
)
316 if file_size
!= start
:
317 raise EngineException(
318 "invalid Content-Range start sequence, expected '{}' but received '{}'".format(
321 HTTPStatus
.REQUESTED_RANGE_NOT_SATISFIABLE
,
323 file_pkg
= self
.fs
.file_open(file_path
, "a+b")
324 if isinstance(indata
, dict):
325 indata_text
= yaml
.safe_dump(indata
, indent
=4, default_flow_style
=False)
326 file_pkg
.write(indata_text
.encode(encoding
="utf-8"))
330 indata_text
= indata
.read(4096)
331 indata_len
+= len(indata_text
)
334 file_pkg
.write(indata_text
)
335 if content_range_text
:
336 if indata_len
!= end
- start
:
337 raise EngineException(
338 "Mismatch between Content-Range header {}-{} and body length of {}".format(
339 start
, end
- 1, indata_len
341 HTTPStatus
.REQUESTED_RANGE_NOT_SATISFIABLE
,
344 # TODO update to UPLOADING
351 chunk_data
= file_pkg
.read(1024)
353 file_md5
.update(chunk_data
)
354 chunk_data
= file_pkg
.read(1024)
355 if expected_md5
!= file_md5
.hexdigest():
356 raise EngineException("Error, MD5 mismatch", HTTPStatus
.CONFLICT
)
358 if compressed
== "gzip":
359 tar
= tarfile
.open(mode
="r", fileobj
=file_pkg
)
360 descriptor_file_name
= None
362 tarname
= tarinfo
.name
363 tarname_path
= tarname
.split("/")
365 not tarname_path
[0] or ".." in tarname_path
366 ): # if start with "/" means absolute path
367 raise EngineException(
368 "Absolute path or '..' are not allowed for package descriptor tar.gz"
370 if len(tarname_path
) == 1 and not tarinfo
.isdir():
371 raise EngineException(
372 "All files must be inside a dir for package descriptor tar.gz"
375 tarname
.endswith(".yaml")
376 or tarname
.endswith(".json")
377 or tarname
.endswith(".yml")
379 storage
["pkg-dir"] = tarname_path
[0]
380 if len(tarname_path
) == 2:
381 if descriptor_file_name
:
382 raise EngineException(
383 "Found more than one descriptor file at package descriptor tar.gz"
385 descriptor_file_name
= tarname
386 if not descriptor_file_name
:
387 raise EngineException(
388 "Not found any descriptor file at package descriptor tar.gz"
390 storage
["descriptor"] = descriptor_file_name
391 storage
["zipfile"] = filename
392 self
.fs
.file_extract(tar
, proposed_revision_path
)
393 with self
.fs
.file_open(
394 (proposed_revision_path
, descriptor_file_name
), "r"
395 ) as descriptor_file
:
396 content
= descriptor_file
.read()
397 elif compressed
== "zip":
398 zipfile
= ZipFile(file_pkg
)
399 descriptor_file_name
= None
400 for package_file
in zipfile
.infolist():
401 zipfilename
= package_file
.filename
402 file_path
= zipfilename
.split("/")
404 not file_path
[0] or ".." in zipfilename
405 ): # if start with "/" means absolute path
406 raise EngineException(
407 "Absolute path or '..' are not allowed for package descriptor zip"
411 zipfilename
.endswith(".yaml")
412 or zipfilename
.endswith(".json")
413 or zipfilename
.endswith(".yml")
415 zipfilename
.find("/") < 0
416 or zipfilename
.find("Definitions") >= 0
418 storage
["pkg-dir"] = ""
419 if descriptor_file_name
:
420 raise EngineException(
421 "Found more than one descriptor file at package descriptor zip"
423 descriptor_file_name
= zipfilename
424 if not descriptor_file_name
:
425 raise EngineException(
426 "Not found any descriptor file at package descriptor zip"
428 storage
["descriptor"] = descriptor_file_name
429 storage
["zipfile"] = filename
430 self
.fs
.file_extract(zipfile
, proposed_revision_path
)
432 with self
.fs
.file_open(
433 (proposed_revision_path
, descriptor_file_name
), "r"
434 ) as descriptor_file
:
435 content
= descriptor_file
.read()
437 content
= file_pkg
.read()
438 storage
["descriptor"] = descriptor_file_name
= filename
440 if descriptor_file_name
.endswith(".json"):
441 error_text
= "Invalid json format "
442 indata
= json
.load(content
)
444 error_text
= "Invalid yaml format "
445 indata
= yaml
.safe_load(content
)
447 # Need to close the file package here so it can be copied from the
448 # revision to the current, unrevisioned record
453 # Fetch both the incoming, proposed revision and the original revision so we
454 # can call a validate method to compare them
455 current_revision_path
= _id
+ "/"
456 self
.fs
.sync(from_path
=current_revision_path
)
457 self
.fs
.sync(from_path
=proposed_revision_path
)
461 self
._validate
_descriptor
_changes
(
463 descriptor_file_name
,
464 current_revision_path
,
465 proposed_revision_path
,
467 except Exception as e
:
469 self
.fs
.path
+ current_revision_path
, ignore_errors
=True
472 self
.fs
.path
+ proposed_revision_path
, ignore_errors
=True
474 # Only delete the new revision. We need to keep the original version in place
475 # as it has not been changed.
476 self
.fs
.file_delete(proposed_revision_path
, ignore_non_exist
=True)
479 indata
= self
._remove
_envelop
(indata
)
481 # Override descriptor with query string kwargs
483 self
._update
_input
_with
_kwargs
(indata
, kwargs
)
485 current_desc
["_admin"]["storage"] = storage
486 current_desc
["_admin"]["onboardingState"] = "ONBOARDED"
487 current_desc
["_admin"]["operationalState"] = "ENABLED"
488 current_desc
["_admin"]["modified"] = time()
489 current_desc
["_admin"]["revision"] = revision
491 deep_update_rfc7396(current_desc
, indata
)
492 current_desc
= self
.check_conflict_on_edit(
493 session
, current_desc
, indata
, _id
=_id
496 # Copy the revision to the active package name by its original id
497 shutil
.rmtree(self
.fs
.path
+ current_revision_path
, ignore_errors
=True)
499 self
.fs
.path
+ proposed_revision_path
,
500 self
.fs
.path
+ current_revision_path
,
502 self
.fs
.file_delete(current_revision_path
, ignore_non_exist
=True)
503 self
.fs
.mkdir(current_revision_path
)
504 self
.fs
.reverse_sync(from_path
=current_revision_path
)
506 shutil
.rmtree(self
.fs
.path
+ _id
)
508 self
.db
.replace(self
.topic
, _id
, current_desc
)
510 # Store a copy of the package as a point in time revision
511 revision_desc
= dict(current_desc
)
512 revision_desc
["_id"] = _id
+ ":" + str(revision_desc
["_admin"]["revision"])
513 self
.db
.create(self
.topic
+ "_revisions", revision_desc
)
517 self
._send
_msg
("edited", indata
)
519 # TODO if descriptor has changed because kwargs update content and remove cached zip
520 # TODO if zip is not present creates one
523 except EngineException
:
526 raise EngineException(
527 "invalid Content-Range header format. Expected 'bytes start-end/total'",
528 HTTPStatus
.REQUESTED_RANGE_NOT_SATISFIABLE
,
531 raise EngineException(
532 "invalid upload transaction sequence: '{}'".format(e
),
533 HTTPStatus
.BAD_REQUEST
,
535 except tarfile
.ReadError
as e
:
536 raise EngineException(
537 "invalid file content {}".format(e
), HTTPStatus
.BAD_REQUEST
539 except (ValueError, yaml
.YAMLError
) as e
:
540 raise EngineException(error_text
+ str(e
))
541 except ValidationError
as e
:
542 raise EngineException(e
, HTTPStatus
.UNPROCESSABLE_ENTITY
)
546 for file in fs_rollback
:
547 self
.fs
.file_delete(file, ignore_non_exist
=True)
549 def get_file(self
, session
, _id
, path
=None, accept_header
=None):
551 Return the file content of a vnfd or nsd
552 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
553 :param _id: Identity of the vnfd, nsd
554 :param path: artifact path or "$DESCRIPTOR" or None
555 :param accept_header: Content of Accept header. Must contain applition/zip or/and text/plain
556 :return: opened file plus Accept format or raises an exception
558 accept_text
= accept_zip
= False
560 if "text/plain" in accept_header
or "*/*" in accept_header
:
562 if "application/zip" in accept_header
or "*/*" in accept_header
:
563 accept_zip
= "application/zip"
564 elif "application/gzip" in accept_header
:
565 accept_zip
= "application/gzip"
567 if not accept_text
and not accept_zip
:
568 raise EngineException(
569 "provide request header 'Accept' with 'application/zip' or 'text/plain'",
570 http_code
=HTTPStatus
.NOT_ACCEPTABLE
,
573 content
= self
.show(session
, _id
)
574 if content
["_admin"]["onboardingState"] != "ONBOARDED":
575 raise EngineException(
576 "Cannot get content because this resource is not at 'ONBOARDED' state. "
577 "onboardingState is {}".format(content
["_admin"]["onboardingState"]),
578 http_code
=HTTPStatus
.CONFLICT
,
580 storage
= content
["_admin"]["storage"]
581 if path
is not None and path
!= "$DESCRIPTOR": # artifacts
582 if not storage
.get("pkg-dir") and not storage
.get("folder"):
583 raise EngineException(
584 "Packages does not contains artifacts",
585 http_code
=HTTPStatus
.BAD_REQUEST
,
587 if self
.fs
.file_exists(
588 (storage
["folder"], storage
["pkg-dir"], *path
), "dir"
590 folder_content
= self
.fs
.dir_ls(
591 (storage
["folder"], storage
["pkg-dir"], *path
)
593 return folder_content
, "text/plain"
594 # TODO manage folders in http
598 (storage
["folder"], storage
["pkg-dir"], *path
), "rb"
600 "application/octet-stream",
603 # pkgtype accept ZIP TEXT -> result
604 # manyfiles yes X -> zip
606 # onefile yes no -> zip
608 contain_many_files
= False
609 if storage
.get("pkg-dir"):
610 # check if there are more than one file in the package, ignoring checksums.txt.
611 pkg_files
= self
.fs
.dir_ls((storage
["folder"], storage
["pkg-dir"]))
612 if len(pkg_files
) >= 3 or (
613 len(pkg_files
) == 2 and "checksums.txt" not in pkg_files
615 contain_many_files
= True
616 if accept_text
and (not contain_many_files
or path
== "$DESCRIPTOR"):
618 self
.fs
.file_open((storage
["folder"], storage
["descriptor"]), "r"),
621 elif contain_many_files
and not accept_zip
:
622 raise EngineException(
623 "Packages that contains several files need to be retrieved with 'application/zip'"
625 http_code
=HTTPStatus
.NOT_ACCEPTABLE
,
628 if not storage
.get("zipfile"):
629 # TODO generate zipfile if not present
630 raise EngineException(
631 "Only allowed 'text/plain' Accept header for this descriptor. To be solved in "
633 http_code
=HTTPStatus
.NOT_ACCEPTABLE
,
636 self
.fs
.file_open((storage
["folder"], storage
["zipfile"]), "rb"),
640 def _remove_yang_prefixes_from_descriptor(self
, descriptor
):
642 for k
, v
in descriptor
.items():
644 if isinstance(v
, dict):
645 new_v
= self
._remove
_yang
_prefixes
_from
_descriptor
(v
)
646 elif isinstance(v
, list):
649 if isinstance(x
, dict):
650 new_v
.append(self
._remove
_yang
_prefixes
_from
_descriptor
(x
))
653 new_descriptor
[k
.split(":")[-1]] = new_v
654 return new_descriptor
656 def pyangbind_validation(self
, item
, data
, force
=False):
657 raise EngineException(
658 "Not possible to validate '{}' item".format(item
),
659 http_code
=HTTPStatus
.INTERNAL_SERVER_ERROR
,
662 def _validate_input_edit(self
, indata
, content
, force
=False):
663 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
666 if "_admin" not in indata
:
667 indata
["_admin"] = {}
669 if "operationalState" in indata
:
670 if indata
["operationalState"] in ("ENABLED", "DISABLED"):
671 indata
["_admin"]["operationalState"] = indata
.pop("operationalState")
673 raise EngineException(
674 "State '{}' is not a valid operational state".format(
675 indata
["operationalState"]
677 http_code
=HTTPStatus
.BAD_REQUEST
,
680 # In the case of user defined data, we need to put the data in the root of the object
681 # to preserve current expected behaviour
682 if "userDefinedData" in indata
:
683 data
= indata
.pop("userDefinedData")
684 if type(data
) == dict:
685 indata
["_admin"]["userDefinedData"] = data
687 raise EngineException(
688 "userDefinedData should be an object, but is '{}' instead".format(
691 http_code
=HTTPStatus
.BAD_REQUEST
,
695 "operationalState" in indata
["_admin"]
696 and content
["_admin"]["operationalState"]
697 == indata
["_admin"]["operationalState"]
699 raise EngineException(
700 "operationalState already {}".format(
701 content
["_admin"]["operationalState"]
703 http_code
=HTTPStatus
.CONFLICT
,
708 def _validate_descriptor_changes(
711 descriptor_file_name
,
712 old_descriptor_directory
,
713 new_descriptor_directory
,
716 # raise EngineException(
717 # "Error in validating new descriptor: <NODE> cannot be modified",
718 # http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
723 class VnfdTopic(DescriptorTopic
):
727 def __init__(self
, db
, fs
, msg
, auth
):
728 DescriptorTopic
.__init
__(self
, db
, fs
, msg
, auth
)
730 def pyangbind_validation(self
, item
, data
, force
=False):
731 if self
._descriptor
_data
_is
_in
_old
_format
(data
):
732 raise EngineException(
733 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
734 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
737 myvnfd
= etsi_nfv_vnfd
.etsi_nfv_vnfd()
738 pybindJSONDecoder
.load_ietf_json(
739 {"etsi-nfv-vnfd:vnfd": data
},
746 out
= pybindJSON
.dumps(myvnfd
, mode
="ietf")
747 desc_out
= self
._remove
_envelop
(yaml
.safe_load(out
))
748 desc_out
= self
._remove
_yang
_prefixes
_from
_descriptor
(desc_out
)
749 return utils
.deep_update_dict(data
, desc_out
)
750 except Exception as e
:
751 raise EngineException(
752 "Error in pyangbind validation: {}".format(str(e
)),
753 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
757 def _descriptor_data_is_in_old_format(data
):
758 return ("vnfd-catalog" in data
) or ("vnfd:vnfd-catalog" in data
)
761 def _remove_envelop(indata
=None):
764 clean_indata
= indata
766 if clean_indata
.get("etsi-nfv-vnfd:vnfd"):
767 if not isinstance(clean_indata
["etsi-nfv-vnfd:vnfd"], dict):
768 raise EngineException("'etsi-nfv-vnfd:vnfd' must be a dict")
769 clean_indata
= clean_indata
["etsi-nfv-vnfd:vnfd"]
770 elif clean_indata
.get("vnfd"):
771 if not isinstance(clean_indata
["vnfd"], dict):
772 raise EngineException("'vnfd' must be dict")
773 clean_indata
= clean_indata
["vnfd"]
777 def check_conflict_on_edit(self
, session
, final_content
, edit_content
, _id
):
778 final_content
= super().check_conflict_on_edit(
779 session
, final_content
, edit_content
, _id
785 for vdu
in get_iterable(final_content
.get("vdu")):
786 if vdu
.get("pdu-type"):
791 final_content
["_admin"]["type"] = "hnfd" if contains_vdu
else "pnfd"
793 final_content
["_admin"]["type"] = "vnfd"
794 # if neither vud nor pdu do not fill type
797 def check_conflict_on_del(self
, session
, _id
, db_content
):
799 Check that there is not any NSD that uses this VNFD. Only NSDs belonging to this project are considered. Note
800 that VNFD can be public and be used by NSD of other projects. Also check there are not deployments, or vnfr
802 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
803 :param _id: vnfd internal id
804 :param db_content: The database content of the _id.
805 :return: None or raises EngineException with the conflict
809 descriptor
= db_content
810 descriptor_id
= descriptor
.get("id")
811 if not descriptor_id
: # empty vnfd not uploaded
814 _filter
= self
._get
_project
_filter
(session
)
816 # check vnfrs using this vnfd
817 _filter
["vnfd-id"] = _id
818 if self
.db
.get_list("vnfrs", _filter
):
819 raise EngineException(
820 "There is at least one VNF instance using this descriptor",
821 http_code
=HTTPStatus
.CONFLICT
,
824 # check NSD referencing this VNFD
825 del _filter
["vnfd-id"]
826 _filter
["vnfd-id"] = descriptor_id
827 if self
.db
.get_list("nsds", _filter
):
828 raise EngineException(
829 "There is at least one NS package referencing this descriptor",
830 http_code
=HTTPStatus
.CONFLICT
,
833 def _validate_input_new(self
, indata
, storage_params
, force
=False):
834 indata
.pop("onboardingState", None)
835 indata
.pop("operationalState", None)
836 indata
.pop("usageState", None)
837 indata
.pop("links", None)
839 indata
= self
.pyangbind_validation("vnfds", indata
, force
)
840 # Cross references validation in the descriptor
842 self
.validate_mgmt_interface_connection_point(indata
)
844 for vdu
in get_iterable(indata
.get("vdu")):
845 self
.validate_vdu_internal_connection_points(vdu
)
846 self
._validate
_vdu
_cloud
_init
_in
_package
(storage_params
, vdu
, indata
)
847 self
._validate
_vdu
_charms
_in
_package
(storage_params
, indata
)
849 self
._validate
_vnf
_charms
_in
_package
(storage_params
, indata
)
851 self
.validate_external_connection_points(indata
)
852 self
.validate_internal_virtual_links(indata
)
853 self
.validate_monitoring_params(indata
)
854 self
.validate_scaling_group_descriptor(indata
)
855 self
.validate_helm_chart(indata
)
860 def validate_helm_chart(indata
):
861 kdus
= indata
.get("kdu", [])
863 helm_chart_value
= kdu
.get("helm-chart")
864 if not helm_chart_value
:
866 if not valid_helm_chart_re
.match(helm_chart_value
):
867 raise EngineException(
868 "helm-chart '{}' is not valid".format(helm_chart_value
),
869 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
873 def validate_mgmt_interface_connection_point(indata
):
874 if not indata
.get("vdu"):
876 if not indata
.get("mgmt-cp"):
877 raise EngineException(
878 "'mgmt-cp' is a mandatory field and it is not defined",
879 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
882 for cp
in get_iterable(indata
.get("ext-cpd")):
883 if cp
["id"] == indata
["mgmt-cp"]:
886 raise EngineException(
887 "mgmt-cp='{}' must match an existing ext-cpd".format(indata
["mgmt-cp"]),
888 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
892 def validate_vdu_internal_connection_points(vdu
):
894 for cpd
in get_iterable(vdu
.get("int-cpd")):
895 cpd_id
= cpd
.get("id")
896 if cpd_id
and cpd_id
in int_cpds
:
897 raise EngineException(
898 "vdu[id='{}']:int-cpd[id='{}'] is already used by other int-cpd".format(
901 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
906 def validate_external_connection_points(indata
):
907 all_vdus_int_cpds
= set()
908 for vdu
in get_iterable(indata
.get("vdu")):
909 for int_cpd
in get_iterable(vdu
.get("int-cpd")):
910 all_vdus_int_cpds
.add((vdu
.get("id"), int_cpd
.get("id")))
913 for cpd
in get_iterable(indata
.get("ext-cpd")):
914 cpd_id
= cpd
.get("id")
915 if cpd_id
and cpd_id
in ext_cpds
:
916 raise EngineException(
917 "ext-cpd[id='{}'] is already used by other ext-cpd".format(cpd_id
),
918 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
922 int_cpd
= cpd
.get("int-cpd")
924 if (int_cpd
.get("vdu-id"), int_cpd
.get("cpd")) not in all_vdus_int_cpds
:
925 raise EngineException(
926 "ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd".format(
929 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
931 # TODO: Validate k8s-cluster-net points to a valid k8s-cluster:nets ?
933 def _validate_vdu_charms_in_package(self
, storage_params
, indata
):
934 for df
in indata
["df"]:
936 "lcm-operations-configuration" in df
937 and "operate-vnf-op-config" in df
["lcm-operations-configuration"]
939 configs
= df
["lcm-operations-configuration"][
940 "operate-vnf-op-config"
942 vdus
= df
.get("vdu-profile", [])
944 for config
in configs
:
945 if config
["id"] == vdu
["id"] and utils
.find_in_list(
946 config
.get("execution-environment-list", []),
947 lambda ee
: "juju" in ee
,
949 if not self
._validate
_package
_folders
(
950 storage_params
, "charms"
951 ) and not self
._validate
_package
_folders
(
952 storage_params
, "Scripts/charms"
954 raise EngineException(
955 "Charm defined in vnf[id={}] but not present in "
956 "package".format(indata
["id"])
959 def _validate_vdu_cloud_init_in_package(self
, storage_params
, vdu
, indata
):
960 if not vdu
.get("cloud-init-file"):
962 if not self
._validate
_package
_folders
(
963 storage_params
, "cloud_init", vdu
["cloud-init-file"]
964 ) and not self
._validate
_package
_folders
(
965 storage_params
, "Scripts/cloud_init", vdu
["cloud-init-file"]
967 raise EngineException(
968 "Cloud-init defined in vnf[id={}]:vdu[id={}] but not present in "
969 "package".format(indata
["id"], vdu
["id"])
972 def _validate_vnf_charms_in_package(self
, storage_params
, indata
):
973 # Get VNF configuration through new container
974 for deployment_flavor
in indata
.get("df", []):
975 if "lcm-operations-configuration" not in deployment_flavor
:
978 "operate-vnf-op-config"
979 not in deployment_flavor
["lcm-operations-configuration"]
982 for day_1_2_config
in deployment_flavor
["lcm-operations-configuration"][
983 "operate-vnf-op-config"
985 if day_1_2_config
["id"] == indata
["id"]:
986 if utils
.find_in_list(
987 day_1_2_config
.get("execution-environment-list", []),
988 lambda ee
: "juju" in ee
,
990 if not self
._validate
_package
_folders
(
991 storage_params
, "charms"
992 ) and not self
._validate
_package
_folders
(
993 storage_params
, "Scripts/charms"
995 raise EngineException(
996 "Charm defined in vnf[id={}] but not present in "
997 "package".format(indata
["id"])
1000 def _validate_package_folders(self
, storage_params
, folder
, file=None):
1001 if not storage_params
:
1003 elif not storage_params
.get("pkg-dir"):
1004 if self
.fs
.file_exists("{}_".format(storage_params
["folder"]), "dir"):
1005 f
= "{}_/{}".format(storage_params
["folder"], folder
)
1007 f
= "{}/{}".format(storage_params
["folder"], folder
)
1009 return self
.fs
.file_exists("{}/{}".format(f
, file), "file")
1011 if self
.fs
.file_exists(f
, "dir"):
1012 if self
.fs
.dir_ls(f
):
1016 if self
.fs
.file_exists("{}_".format(storage_params
["folder"]), "dir"):
1017 f
= "{}_/{}/{}".format(
1018 storage_params
["folder"], storage_params
["pkg-dir"], folder
1021 f
= "{}/{}/{}".format(
1022 storage_params
["folder"], storage_params
["pkg-dir"], folder
1025 return self
.fs
.file_exists("{}/{}".format(f
, file), "file")
1027 if self
.fs
.file_exists(f
, "dir"):
1028 if self
.fs
.dir_ls(f
):
1033 def validate_internal_virtual_links(indata
):
1034 all_ivld_ids
= set()
1035 for ivld
in get_iterable(indata
.get("int-virtual-link-desc")):
1036 ivld_id
= ivld
.get("id")
1037 if ivld_id
and ivld_id
in all_ivld_ids
:
1038 raise EngineException(
1039 "Duplicated VLD id in int-virtual-link-desc[id={}]".format(ivld_id
),
1040 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1043 all_ivld_ids
.add(ivld_id
)
1045 for vdu
in get_iterable(indata
.get("vdu")):
1046 for int_cpd
in get_iterable(vdu
.get("int-cpd")):
1047 int_cpd_ivld_id
= int_cpd
.get("int-virtual-link-desc")
1048 if int_cpd_ivld_id
and int_cpd_ivld_id
not in all_ivld_ids
:
1049 raise EngineException(
1050 "vdu[id='{}']:int-cpd[id='{}']:int-virtual-link-desc='{}' must match an existing "
1051 "int-virtual-link-desc".format(
1052 vdu
["id"], int_cpd
["id"], int_cpd_ivld_id
1054 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1057 for df
in get_iterable(indata
.get("df")):
1058 for vlp
in get_iterable(df
.get("virtual-link-profile")):
1059 vlp_ivld_id
= vlp
.get("id")
1060 if vlp_ivld_id
and vlp_ivld_id
not in all_ivld_ids
:
1061 raise EngineException(
1062 "df[id='{}']:virtual-link-profile='{}' must match an existing "
1063 "int-virtual-link-desc".format(df
["id"], vlp_ivld_id
),
1064 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1068 def validate_monitoring_params(indata
):
1069 all_monitoring_params
= set()
1070 for ivld
in get_iterable(indata
.get("int-virtual-link-desc")):
1071 for mp
in get_iterable(ivld
.get("monitoring-parameters")):
1072 mp_id
= mp
.get("id")
1073 if mp_id
and mp_id
in all_monitoring_params
:
1074 raise EngineException(
1075 "Duplicated monitoring-parameter id in "
1076 "int-virtual-link-desc[id='{}']:monitoring-parameters[id='{}']".format(
1079 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1082 all_monitoring_params
.add(mp_id
)
1084 for vdu
in get_iterable(indata
.get("vdu")):
1085 for mp
in get_iterable(vdu
.get("monitoring-parameter")):
1086 mp_id
= mp
.get("id")
1087 if mp_id
and mp_id
in all_monitoring_params
:
1088 raise EngineException(
1089 "Duplicated monitoring-parameter id in "
1090 "vdu[id='{}']:monitoring-parameter[id='{}']".format(
1093 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1096 all_monitoring_params
.add(mp_id
)
1098 for df
in get_iterable(indata
.get("df")):
1099 for mp
in get_iterable(df
.get("monitoring-parameter")):
1100 mp_id
= mp
.get("id")
1101 if mp_id
and mp_id
in all_monitoring_params
:
1102 raise EngineException(
1103 "Duplicated monitoring-parameter id in "
1104 "df[id='{}']:monitoring-parameter[id='{}']".format(
1107 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1110 all_monitoring_params
.add(mp_id
)
1113 def validate_scaling_group_descriptor(indata
):
1114 all_monitoring_params
= set()
1115 for ivld
in get_iterable(indata
.get("int-virtual-link-desc")):
1116 for mp
in get_iterable(ivld
.get("monitoring-parameters")):
1117 all_monitoring_params
.add(mp
.get("id"))
1119 for vdu
in get_iterable(indata
.get("vdu")):
1120 for mp
in get_iterable(vdu
.get("monitoring-parameter")):
1121 all_monitoring_params
.add(mp
.get("id"))
1123 for df
in get_iterable(indata
.get("df")):
1124 for mp
in get_iterable(df
.get("monitoring-parameter")):
1125 all_monitoring_params
.add(mp
.get("id"))
1127 for df
in get_iterable(indata
.get("df")):
1128 for sa
in get_iterable(df
.get("scaling-aspect")):
1129 for sp
in get_iterable(sa
.get("scaling-policy")):
1130 for sc
in get_iterable(sp
.get("scaling-criteria")):
1131 sc_monitoring_param
= sc
.get("vnf-monitoring-param-ref")
1134 and sc_monitoring_param
not in all_monitoring_params
1136 raise EngineException(
1137 "df[id='{}']:scaling-aspect[id='{}']:scaling-policy"
1138 "[name='{}']:scaling-criteria[name='{}']: "
1139 "vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format(
1144 sc_monitoring_param
,
1146 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1149 for sca
in get_iterable(sa
.get("scaling-config-action")):
1151 "lcm-operations-configuration" not in df
1152 or "operate-vnf-op-config"
1153 not in df
["lcm-operations-configuration"]
1154 or not utils
.find_in_list(
1155 df
["lcm-operations-configuration"][
1156 "operate-vnf-op-config"
1157 ].get("day1-2", []),
1158 lambda config
: config
["id"] == indata
["id"],
1161 raise EngineException(
1162 "'day1-2 configuration' not defined in the descriptor but it is "
1163 "referenced by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action".format(
1166 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1168 for configuration
in get_iterable(
1169 df
["lcm-operations-configuration"]["operate-vnf-op-config"].get(
1173 for primitive
in get_iterable(
1174 configuration
.get("config-primitive")
1178 == sca
["vnf-config-primitive-name-ref"]
1182 raise EngineException(
1183 "df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-"
1184 "config-primitive-name-ref='{}' does not match any "
1185 "day1-2 configuration:config-primitive:name".format(
1188 sca
["vnf-config-primitive-name-ref"],
1190 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1193 def delete_extra(self
, session
, _id
, db_content
, not_send_msg
=None):
1195 Deletes associate file system storage (via super)
1196 Deletes associated vnfpkgops from database.
1197 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1198 :param _id: server internal id
1199 :param db_content: The database content of the descriptor
1201 :raises: FsException in case of error while deleting associated storage
1203 super().delete_extra(session
, _id
, db_content
, not_send_msg
)
1204 self
.db
.del_list("vnfpkgops", {"vnfPkgId": _id
})
1205 self
.db
.del_list(self
.topic
+ "_revisions", {"_id": {"$regex": _id
}})
1207 def sol005_projection(self
, data
):
1208 data
["onboardingState"] = data
["_admin"]["onboardingState"]
1209 data
["operationalState"] = data
["_admin"]["operationalState"]
1210 data
["usageState"] = data
["_admin"]["usageState"]
1213 links
["self"] = {"href": "/vnfpkgm/v1/vnf_packages/{}".format(data
["_id"])}
1214 links
["vnfd"] = {"href": "/vnfpkgm/v1/vnf_packages/{}/vnfd".format(data
["_id"])}
1215 links
["packageContent"] = {
1216 "href": "/vnfpkgm/v1/vnf_packages/{}/package_content".format(data
["_id"])
1218 data
["_links"] = links
1220 return super().sol005_projection(data
)
1223 def find_software_version(vnfd
: dict) -> str:
1224 """Find the sotware version in the VNFD descriptors
1227 vnfd (dict): Descriptor as a dictionary
1230 software-version (str)
1232 default_sw_version
= "1.0"
1233 if vnfd
.get("vnfd"):
1235 if vnfd
.get("software-version"):
1236 return vnfd
["software-version"]
1238 return default_sw_version
1241 def extract_policies(vnfd
: dict) -> dict:
1242 """Removes the policies from the VNFD descriptors
1245 vnfd (dict): Descriptor as a dictionary
1248 vnfd (dict): VNFD which does not include policies
1250 for df
in vnfd
.get("df", {}):
1251 for policy
in ["scaling-aspect", "healing-aspect"]:
1252 if df
.get(policy
, {}):
1254 for vdu
in vnfd
.get("vdu", {}):
1255 for alarm_policy
in ["alarm", "monitoring-parameter"]:
1256 if vdu
.get(alarm_policy
, {}):
1257 vdu
.pop(alarm_policy
)
1261 def extract_day12_primitives(vnfd
: dict) -> dict:
1262 """Removes the day12 primitives from the VNFD descriptors
1265 vnfd (dict): Descriptor as a dictionary
1270 for df_id
, df
in enumerate(vnfd
.get("df", {})):
1272 df
.get("lcm-operations-configuration", {})
1273 .get("operate-vnf-op-config", {})
1276 day12
= df
["lcm-operations-configuration"]["operate-vnf-op-config"].get(
1279 for config_id
, config
in enumerate(day12
):
1281 "initial-config-primitive",
1283 "terminate-config-primitive",
1285 config
.pop(key
, None)
1286 day12
[config_id
] = config
1287 df
["lcm-operations-configuration"]["operate-vnf-op-config"][
1290 vnfd
["df"][df_id
] = df
1293 def remove_modifiable_items(self
, vnfd
: dict) -> dict:
1294 """Removes the modifiable parts from the VNFD descriptors
1296 It calls different extract functions according to different update types
1297 to clear all the modifiable items from VNFD
1300 vnfd (dict): Descriptor as a dictionary
1303 vnfd (dict): Descriptor which does not include modifiable contents
1305 if vnfd
.get("vnfd"):
1307 vnfd
.pop("_admin", None)
1308 # If the other extractions need to be done from VNFD,
1309 # the new extract methods could be appended to below list.
1310 for extract_function
in [self
.extract_day12_primitives
, self
.extract_policies
]:
1311 vnfd_temp
= extract_function(vnfd
)
1315 def _validate_descriptor_changes(
1318 descriptor_file_name
: str,
1319 old_descriptor_directory
: str,
1320 new_descriptor_directory
: str,
1322 """Compares the old and new VNFD descriptors and validates the new descriptor.
1325 old_descriptor_directory (str): Directory of descriptor which is in-use
1326 new_descriptor_directory (str): Directory of descriptor which is proposed to update (new revision)
1332 EngineException: In case of error when there are unallowed changes
1335 # If VNFD does not exist in DB or it is not in use by any NS,
1336 # validation is not required.
1337 vnfd
= self
.db
.get_one("vnfds", {"_id": descriptor_id
})
1338 if not vnfd
or not detect_descriptor_usage(vnfd
, "vnfds", self
.db
):
1341 # Get the old and new descriptor contents in order to compare them.
1342 with self
.fs
.file_open(
1343 (old_descriptor_directory
.rstrip("/"), descriptor_file_name
), "r"
1344 ) as old_descriptor_file
:
1345 with self
.fs
.file_open(
1346 (new_descriptor_directory
.rstrip("/"), descriptor_file_name
), "r"
1347 ) as new_descriptor_file
:
1348 old_content
= yaml
.safe_load(old_descriptor_file
.read())
1349 new_content
= yaml
.safe_load(new_descriptor_file
.read())
1351 # If software version has changed, we do not need to validate
1352 # the differences anymore.
1353 if old_content
and new_content
:
1354 if self
.find_software_version(
1356 ) != self
.find_software_version(new_content
):
1359 disallowed_change
= DeepDiff(
1360 self
.remove_modifiable_items(old_content
),
1361 self
.remove_modifiable_items(new_content
),
1364 if disallowed_change
:
1365 changed_nodes
= functools
.reduce(
1366 lambda a
, b
: a
+ " , " + b
,
1369 for node
in disallowed_change
.get(
1375 raise EngineException(
1376 f
"Error in validating new descriptor: {changed_nodes} cannot be modified, "
1377 "there are disallowed changes in the vnf descriptor.",
1378 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1388 "VNF Descriptor could not be processed with error: {}.".format(e
)
1392 class NsdTopic(DescriptorTopic
):
1396 def __init__(self
, db
, fs
, msg
, auth
):
1397 super().__init
__(db
, fs
, msg
, auth
)
1399 def pyangbind_validation(self
, item
, data
, force
=False):
1400 if self
._descriptor
_data
_is
_in
_old
_format
(data
):
1401 raise EngineException(
1402 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
1403 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1406 nsd_vnf_profiles
= data
.get("df", [{}])[0].get("vnf-profile", [])
1407 mynsd
= etsi_nfv_nsd
.etsi_nfv_nsd()
1408 pybindJSONDecoder
.load_ietf_json(
1409 {"nsd": {"nsd": [data
]}},
1416 out
= pybindJSON
.dumps(mynsd
, mode
="ietf")
1417 desc_out
= self
._remove
_envelop
(yaml
.safe_load(out
))
1418 desc_out
= self
._remove
_yang
_prefixes
_from
_descriptor
(desc_out
)
1419 if nsd_vnf_profiles
:
1420 desc_out
["df"][0]["vnf-profile"] = nsd_vnf_profiles
1422 except Exception as e
:
1423 raise EngineException(
1424 "Error in pyangbind validation: {}".format(str(e
)),
1425 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1429 def _descriptor_data_is_in_old_format(data
):
1430 return ("nsd-catalog" in data
) or ("nsd:nsd-catalog" in data
)
1433 def _remove_envelop(indata
=None):
1436 clean_indata
= indata
1438 if clean_indata
.get("nsd"):
1439 clean_indata
= clean_indata
["nsd"]
1440 elif clean_indata
.get("etsi-nfv-nsd:nsd"):
1441 clean_indata
= clean_indata
["etsi-nfv-nsd:nsd"]
1442 if clean_indata
.get("nsd"):
1444 not isinstance(clean_indata
["nsd"], list)
1445 or len(clean_indata
["nsd"]) != 1
1447 raise EngineException("'nsd' must be a list of only one element")
1448 clean_indata
= clean_indata
["nsd"][0]
1451 def _validate_input_new(self
, indata
, storage_params
, force
=False):
1452 indata
.pop("nsdOnboardingState", None)
1453 indata
.pop("nsdOperationalState", None)
1454 indata
.pop("nsdUsageState", None)
1456 indata
.pop("links", None)
1458 indata
= self
.pyangbind_validation("nsds", indata
, force
)
1459 # Cross references validation in the descriptor
1460 # TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none
1461 for vld
in get_iterable(indata
.get("virtual-link-desc")):
1462 self
.validate_vld_mgmt_network_with_virtual_link_protocol_data(vld
, indata
)
1464 self
.validate_vnf_profiles_vnfd_id(indata
)
1469 def validate_vld_mgmt_network_with_virtual_link_protocol_data(vld
, indata
):
1470 if not vld
.get("mgmt-network"):
1472 vld_id
= vld
.get("id")
1473 for df
in get_iterable(indata
.get("df")):
1474 for vlp
in get_iterable(df
.get("virtual-link-profile")):
1475 if vld_id
and vld_id
== vlp
.get("virtual-link-desc-id"):
1476 if vlp
.get("virtual-link-protocol-data"):
1477 raise EngineException(
1478 "Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-"
1479 "protocol-data You cannot set a virtual-link-protocol-data "
1480 "when mgmt-network is True".format(df
["id"], vlp
["id"]),
1481 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1485 def validate_vnf_profiles_vnfd_id(indata
):
1486 all_vnfd_ids
= set(get_iterable(indata
.get("vnfd-id")))
1487 for df
in get_iterable(indata
.get("df")):
1488 for vnf_profile
in get_iterable(df
.get("vnf-profile")):
1489 vnfd_id
= vnf_profile
.get("vnfd-id")
1490 if vnfd_id
and vnfd_id
not in all_vnfd_ids
:
1491 raise EngineException(
1492 "Error at df[id='{}']:vnf_profile[id='{}']:vnfd-id='{}' "
1493 "does not match any vnfd-id".format(
1494 df
["id"], vnf_profile
["id"], vnfd_id
1496 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1499 def _validate_input_edit(self
, indata
, content
, force
=False):
1500 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
1502 indata looks as follows:
1503 - In the new case (conformant)
1504 {'nsdOperationalState': 'DISABLED', 'userDefinedData': {'id': 'string23',
1505 '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}}
1506 - In the old case (backwards-compatible)
1507 {'id': 'string23', '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}
1509 if "_admin" not in indata
:
1510 indata
["_admin"] = {}
1512 if "nsdOperationalState" in indata
:
1513 if indata
["nsdOperationalState"] in ("ENABLED", "DISABLED"):
1514 indata
["_admin"]["operationalState"] = indata
.pop("nsdOperationalState")
1516 raise EngineException(
1517 "State '{}' is not a valid operational state".format(
1518 indata
["nsdOperationalState"]
1520 http_code
=HTTPStatus
.BAD_REQUEST
,
1523 # In the case of user defined data, we need to put the data in the root of the object
1524 # to preserve current expected behaviour
1525 if "userDefinedData" in indata
:
1526 data
= indata
.pop("userDefinedData")
1527 if type(data
) == dict:
1528 indata
["_admin"]["userDefinedData"] = data
1530 raise EngineException(
1531 "userDefinedData should be an object, but is '{}' instead".format(
1534 http_code
=HTTPStatus
.BAD_REQUEST
,
1537 "operationalState" in indata
["_admin"]
1538 and content
["_admin"]["operationalState"]
1539 == indata
["_admin"]["operationalState"]
1541 raise EngineException(
1542 "nsdOperationalState already {}".format(
1543 content
["_admin"]["operationalState"]
1545 http_code
=HTTPStatus
.CONFLICT
,
1549 def _check_descriptor_dependencies(self
, session
, descriptor
):
1551 Check that the dependent descriptors exist on a new descriptor or edition. Also checks references to vnfd
1552 connection points are ok
1553 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1554 :param descriptor: descriptor to be inserted or edit
1555 :return: None or raises exception
1557 if session
["force"]:
1559 vnfds_index
= self
._get
_descriptor
_constituent
_vnfds
_index
(session
, descriptor
)
1561 # Cross references validation in the descriptor and vnfd connection point validation
1562 for df
in get_iterable(descriptor
.get("df")):
1563 self
.validate_df_vnf_profiles_constituent_connection_points(df
, vnfds_index
)
1565 def _get_descriptor_constituent_vnfds_index(self
, session
, descriptor
):
1567 if descriptor
.get("vnfd-id") and not session
["force"]:
1568 for vnfd_id
in get_iterable(descriptor
.get("vnfd-id")):
1569 query_filter
= self
._get
_project
_filter
(session
)
1570 query_filter
["id"] = vnfd_id
1571 vnf_list
= self
.db
.get_list("vnfds", query_filter
)
1573 raise EngineException(
1574 "Descriptor error at 'vnfd-id'='{}' references a non "
1575 "existing vnfd".format(vnfd_id
),
1576 http_code
=HTTPStatus
.CONFLICT
,
1578 vnfds_index
[vnfd_id
] = vnf_list
[0]
1582 def validate_df_vnf_profiles_constituent_connection_points(df
, vnfds_index
):
1583 for vnf_profile
in get_iterable(df
.get("vnf-profile")):
1584 vnfd
= vnfds_index
.get(vnf_profile
["vnfd-id"])
1585 all_vnfd_ext_cpds
= set()
1586 for ext_cpd
in get_iterable(vnfd
.get("ext-cpd")):
1587 if ext_cpd
.get("id"):
1588 all_vnfd_ext_cpds
.add(ext_cpd
.get("id"))
1590 for virtual_link
in get_iterable(
1591 vnf_profile
.get("virtual-link-connectivity")
1593 for vl_cpd
in get_iterable(virtual_link
.get("constituent-cpd-id")):
1594 vl_cpd_id
= vl_cpd
.get("constituent-cpd-id")
1595 if vl_cpd_id
and vl_cpd_id
not in all_vnfd_ext_cpds
:
1596 raise EngineException(
1597 "Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity"
1598 "[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a "
1599 "non existing ext-cpd:id inside vnfd '{}'".format(
1602 virtual_link
["virtual-link-profile-id"],
1606 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1609 def check_conflict_on_edit(self
, session
, final_content
, edit_content
, _id
):
1610 final_content
= super().check_conflict_on_edit(
1611 session
, final_content
, edit_content
, _id
1614 self
._check
_descriptor
_dependencies
(session
, final_content
)
1616 return final_content
1618 def check_conflict_on_del(self
, session
, _id
, db_content
):
1620 Check that there is not any NSR that uses this NSD. Only NSRs belonging to this project are considered. Note
1621 that NSD can be public and be used by other projects.
1622 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1623 :param _id: nsd internal id
1624 :param db_content: The database content of the _id
1625 :return: None or raises EngineException with the conflict
1627 if session
["force"]:
1629 descriptor
= db_content
1630 descriptor_id
= descriptor
.get("id")
1631 if not descriptor_id
: # empty nsd not uploaded
1634 # check NSD used by NS
1635 _filter
= self
._get
_project
_filter
(session
)
1636 _filter
["nsd-id"] = _id
1637 if self
.db
.get_list("nsrs", _filter
):
1638 raise EngineException(
1639 "There is at least one NS instance using this descriptor",
1640 http_code
=HTTPStatus
.CONFLICT
,
1643 # check NSD referenced by NST
1644 del _filter
["nsd-id"]
1645 _filter
["netslice-subnet.ANYINDEX.nsd-ref"] = descriptor_id
1646 if self
.db
.get_list("nsts", _filter
):
1647 raise EngineException(
1648 "There is at least one NetSlice Template referencing this descriptor",
1649 http_code
=HTTPStatus
.CONFLICT
,
1652 def delete_extra(self
, session
, _id
, db_content
, not_send_msg
=None):
1654 Deletes associate file system storage (via super)
1655 Deletes associated vnfpkgops from database.
1656 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1657 :param _id: server internal id
1658 :param db_content: The database content of the descriptor
1660 :raises: FsException in case of error while deleting associated storage
1662 super().delete_extra(session
, _id
, db_content
, not_send_msg
)
1663 self
.db
.del_list(self
.topic
+ "_revisions", {"_id": {"$regex": _id
}})
1666 def extract_day12_primitives(nsd
: dict) -> dict:
1667 """Removes the day12 primitives from the NSD descriptors
1670 nsd (dict): Descriptor as a dictionary
1673 nsd (dict): Cleared NSD
1675 if nsd
.get("ns-configuration"):
1678 "initial-config-primitive",
1679 "terminate-config-primitive",
1681 nsd
["ns-configuration"].pop(key
, None)
1684 def remove_modifiable_items(self
, nsd
: dict) -> dict:
1685 """Removes the modifiable parts from the VNFD descriptors
1687 It calls different extract functions according to different update types
1688 to clear all the modifiable items from NSD
1691 nsd (dict): Descriptor as a dictionary
1694 nsd (dict): Descriptor which does not include modifiable contents
1696 while isinstance(nsd
, dict) and nsd
.get("nsd"):
1698 if isinstance(nsd
, list):
1700 nsd
.pop("_admin", None)
1701 # If the more extractions need to be done from NSD,
1702 # the new extract methods could be appended to below list.
1703 for extract_function
in [self
.extract_day12_primitives
]:
1704 nsd_temp
= extract_function(nsd
)
1708 def _validate_descriptor_changes(
1711 descriptor_file_name
: str,
1712 old_descriptor_directory
: str,
1713 new_descriptor_directory
: str,
1715 """Compares the old and new NSD descriptors and validates the new descriptor
1718 old_descriptor_directory: Directory of descriptor which is in-use
1719 new_descriptor_directory: Directory of descriptor which is proposed to update (new revision)
1725 EngineException: In case of error if the changes are not allowed
1729 # If NSD does not exist in DB, or it is not in use by any NS,
1730 # validation is not required.
1731 nsd
= self
.db
.get_one("nsds", {"_id": descriptor_id
}, fail_on_empty
=False)
1732 if not nsd
or not detect_descriptor_usage(nsd
, "nsds", self
.db
):
1735 # Get the old and new descriptor contents in order to compare them.
1736 with self
.fs
.file_open(
1737 (old_descriptor_directory
.rstrip("/"), descriptor_file_name
), "r"
1738 ) as old_descriptor_file
:
1739 with self
.fs
.file_open(
1740 (new_descriptor_directory
.rstrip("/"), descriptor_file_name
), "r"
1741 ) as new_descriptor_file
:
1742 old_content
= yaml
.safe_load(old_descriptor_file
.read())
1743 new_content
= yaml
.safe_load(new_descriptor_file
.read())
1745 if old_content
and new_content
:
1746 disallowed_change
= DeepDiff(
1747 self
.remove_modifiable_items(old_content
),
1748 self
.remove_modifiable_items(new_content
),
1751 if disallowed_change
:
1752 changed_nodes
= functools
.reduce(
1753 lambda a
, b
: a
+ ", " + b
,
1756 for node
in disallowed_change
.get(
1762 raise EngineException(
1763 f
"Error in validating new descriptor: {changed_nodes} cannot be modified, "
1764 "there are disallowed changes in the ns descriptor. ",
1765 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1775 "NS Descriptor could not be processed with error: {}.".format(e
)
1778 def sol005_projection(self
, data
):
1779 data
["nsdOnboardingState"] = data
["_admin"]["onboardingState"]
1780 data
["nsdOperationalState"] = data
["_admin"]["operationalState"]
1781 data
["nsdUsageState"] = data
["_admin"]["usageState"]
1784 links
["self"] = {"href": "/nsd/v1/ns_descriptors/{}".format(data
["_id"])}
1785 links
["nsd_content"] = {
1786 "href": "/nsd/v1/ns_descriptors/{}/nsd_content".format(data
["_id"])
1788 data
["_links"] = links
1790 return super().sol005_projection(data
)
1793 class NstTopic(DescriptorTopic
):
1796 quota_name
= "slice_templates"
1798 def __init__(self
, db
, fs
, msg
, auth
):
1799 DescriptorTopic
.__init
__(self
, db
, fs
, msg
, auth
)
1801 def pyangbind_validation(self
, item
, data
, force
=False):
1804 pybindJSONDecoder
.load_ietf_json(
1812 out
= pybindJSON
.dumps(mynst
, mode
="ietf")
1813 desc_out
= self
._remove
_envelop
(yaml
.safe_load(out
))
1815 except Exception as e
:
1816 raise EngineException(
1817 "Error in pyangbind validation: {}".format(str(e
)),
1818 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1822 def _remove_envelop(indata
=None):
1825 clean_indata
= indata
1827 if clean_indata
.get("nst"):
1829 not isinstance(clean_indata
["nst"], list)
1830 or len(clean_indata
["nst"]) != 1
1832 raise EngineException("'nst' must be a list only one element")
1833 clean_indata
= clean_indata
["nst"][0]
1834 elif clean_indata
.get("nst:nst"):
1836 not isinstance(clean_indata
["nst:nst"], list)
1837 or len(clean_indata
["nst:nst"]) != 1
1839 raise EngineException("'nst:nst' must be a list only one element")
1840 clean_indata
= clean_indata
["nst:nst"][0]
1843 def _validate_input_new(self
, indata
, storage_params
, force
=False):
1844 indata
.pop("onboardingState", None)
1845 indata
.pop("operationalState", None)
1846 indata
.pop("usageState", None)
1847 indata
= self
.pyangbind_validation("nsts", indata
, force
)
1848 return indata
.copy()
1850 def _check_descriptor_dependencies(self
, session
, descriptor
):
1852 Check that the dependent descriptors exist on a new descriptor or edition
1853 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1854 :param descriptor: descriptor to be inserted or edit
1855 :return: None or raises exception
1857 if not descriptor
.get("netslice-subnet"):
1859 for nsd
in descriptor
["netslice-subnet"]:
1860 nsd_id
= nsd
["nsd-ref"]
1861 filter_q
= self
._get
_project
_filter
(session
)
1862 filter_q
["id"] = nsd_id
1863 if not self
.db
.get_list("nsds", filter_q
):
1864 raise EngineException(
1865 "Descriptor error at 'netslice-subnet':'nsd-ref'='{}' references a non "
1866 "existing nsd".format(nsd_id
),
1867 http_code
=HTTPStatus
.CONFLICT
,
1870 def check_conflict_on_edit(self
, session
, final_content
, edit_content
, _id
):
1871 final_content
= super().check_conflict_on_edit(
1872 session
, final_content
, edit_content
, _id
1875 self
._check
_descriptor
_dependencies
(session
, final_content
)
1876 return final_content
1878 def check_conflict_on_del(self
, session
, _id
, db_content
):
1880 Check that there is not any NSIR that uses this NST. Only NSIRs belonging to this project are considered. Note
1881 that NST can be public and be used by other projects.
1882 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1883 :param _id: nst internal id
1884 :param db_content: The database content of the _id.
1885 :return: None or raises EngineException with the conflict
1887 # TODO: Check this method
1888 if session
["force"]:
1890 # Get Network Slice Template from Database
1891 _filter
= self
._get
_project
_filter
(session
)
1892 _filter
["_admin.nst-id"] = _id
1893 if self
.db
.get_list("nsis", _filter
):
1894 raise EngineException(
1895 "there is at least one Netslice Instance using this descriptor",
1896 http_code
=HTTPStatus
.CONFLICT
,
1899 def sol005_projection(self
, data
):
1900 data
["onboardingState"] = data
["_admin"]["onboardingState"]
1901 data
["operationalState"] = data
["_admin"]["operationalState"]
1902 data
["usageState"] = data
["_admin"]["usageState"]
1905 links
["self"] = {"href": "/nst/v1/netslice_templates/{}".format(data
["_id"])}
1906 links
["nst"] = {"href": "/nst/v1/netslice_templates/{}/nst".format(data
["_id"])}
1907 data
["_links"] = links
1909 return super().sol005_projection(data
)
1912 class PduTopic(BaseTopic
):
1915 quota_name
= "pduds"
1916 schema_new
= pdu_new_schema
1917 schema_edit
= pdu_edit_schema
1919 def __init__(self
, db
, fs
, msg
, auth
):
1920 BaseTopic
.__init
__(self
, db
, fs
, msg
, auth
)
1923 def format_on_new(content
, project_id
=None, make_public
=False):
1924 BaseTopic
.format_on_new(content
, project_id
=project_id
, make_public
=make_public
)
1925 content
["_admin"]["onboardingState"] = "CREATED"
1926 content
["_admin"]["operationalState"] = "ENABLED"
1927 content
["_admin"]["usageState"] = "NOT_IN_USE"
1929 def check_conflict_on_del(self
, session
, _id
, db_content
):
1931 Check that there is not any vnfr that uses this PDU
1932 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1933 :param _id: pdu internal id
1934 :param db_content: The database content of the _id.
1935 :return: None or raises EngineException with the conflict
1937 if session
["force"]:
1940 _filter
= self
._get
_project
_filter
(session
)
1941 _filter
["vdur.pdu-id"] = _id
1942 if self
.db
.get_list("vnfrs", _filter
):
1943 raise EngineException(
1944 "There is at least one VNF instance using this PDU",
1945 http_code
=HTTPStatus
.CONFLICT
,
1949 class VnfPkgOpTopic(BaseTopic
):
1952 schema_new
= vnfpkgop_new_schema
1955 def __init__(self
, db
, fs
, msg
, auth
):
1956 BaseTopic
.__init
__(self
, db
, fs
, msg
, auth
)
1958 def edit(self
, session
, _id
, indata
=None, kwargs
=None, content
=None):
1959 raise EngineException(
1960 "Method 'edit' not allowed for topic '{}'".format(self
.topic
),
1961 HTTPStatus
.METHOD_NOT_ALLOWED
,
1964 def delete(self
, session
, _id
, dry_run
=False):
1965 raise EngineException(
1966 "Method 'delete' not allowed for topic '{}'".format(self
.topic
),
1967 HTTPStatus
.METHOD_NOT_ALLOWED
,
1970 def delete_list(self
, session
, filter_q
=None):
1971 raise EngineException(
1972 "Method 'delete_list' not allowed for topic '{}'".format(self
.topic
),
1973 HTTPStatus
.METHOD_NOT_ALLOWED
,
1976 def new(self
, rollback
, session
, indata
=None, kwargs
=None, headers
=None):
1978 Creates a new entry into database.
1979 :param rollback: list to append created items at database in case a rollback may to be done
1980 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1981 :param indata: data to be inserted
1982 :param kwargs: used to override the indata descriptor
1983 :param headers: http request headers
1984 :return: _id, op_id:
1985 _id: identity of the inserted data.
1988 self
._update
_input
_with
_kwargs
(indata
, kwargs
)
1989 validate_input(indata
, self
.schema_new
)
1990 vnfpkg_id
= indata
["vnfPkgId"]
1991 filter_q
= BaseTopic
._get
_project
_filter
(session
)
1992 filter_q
["_id"] = vnfpkg_id
1993 vnfd
= self
.db
.get_one("vnfds", filter_q
)
1994 operation
= indata
["lcmOperationType"]
1995 kdu_name
= indata
["kdu_name"]
1996 for kdu
in vnfd
.get("kdu", []):
1997 if kdu
["name"] == kdu_name
:
1998 helm_chart
= kdu
.get("helm-chart")
1999 juju_bundle
= kdu
.get("juju-bundle")
2002 raise EngineException(
2003 "Not found vnfd[id='{}']:kdu[name='{}']".format(vnfpkg_id
, kdu_name
)
2006 indata
["helm-chart"] = helm_chart
2007 match
= fullmatch(r
"([^/]*)/([^/]*)", helm_chart
)
2008 repo_name
= match
.group(1) if match
else None
2010 indata
["juju-bundle"] = juju_bundle
2011 match
= fullmatch(r
"([^/]*)/([^/]*)", juju_bundle
)
2012 repo_name
= match
.group(1) if match
else None
2014 raise EngineException(
2015 "Found neither 'helm-chart' nor 'juju-bundle' in vnfd[id='{}']:kdu[name='{}']".format(
2021 filter_q
["name"] = repo_name
2022 repo
= self
.db
.get_one("k8srepos", filter_q
)
2023 k8srepo_id
= repo
.get("_id")
2024 k8srepo_url
= repo
.get("url")
2028 indata
["k8srepoId"] = k8srepo_id
2029 indata
["k8srepo_url"] = k8srepo_url
2030 vnfpkgop_id
= str(uuid4())
2033 "operationState": "PROCESSING",
2034 "vnfPkgId": vnfpkg_id
,
2035 "lcmOperationType": operation
,
2036 "isAutomaticInvocation": False,
2037 "isCancelPending": False,
2038 "operationParams": indata
,
2040 "self": "/osm/vnfpkgm/v1/vnfpkg_op_occs/" + vnfpkgop_id
,
2041 "vnfpkg": "/osm/vnfpkgm/v1/vnf_packages/" + vnfpkg_id
,
2045 vnfpkgop_desc
, session
["project_id"], make_public
=session
["public"]
2047 ctime
= vnfpkgop_desc
["_admin"]["created"]
2048 vnfpkgop_desc
["statusEnteredTime"] = ctime
2049 vnfpkgop_desc
["startTime"] = ctime
2050 self
.db
.create(self
.topic
, vnfpkgop_desc
)
2051 rollback
.append({"topic": self
.topic
, "_id": vnfpkgop_id
})
2052 self
.msg
.write(self
.topic_msg
, operation
, vnfpkgop_desc
)
2053 return vnfpkgop_id
, None