1 # -*- coding: utf-8 -*-
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
7 # http://www.apache.org/licenses/LICENSE-2.0
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
25 from deepdiff
import DeepDiff
26 from hashlib
import md5
27 from osm_common
.dbbase
import DbException
, deep_update_rfc7396
28 from http
import HTTPStatus
30 from uuid
import uuid4
31 from re
import fullmatch
32 from zipfile
import ZipFile
33 from osm_nbi
.validation
import (
40 from osm_nbi
.base_topic
import (
44 detect_descriptor_usage
,
46 from osm_im
import etsi_nfv_vnfd
, etsi_nfv_nsd
47 from osm_im
.nst
import nst
as nst_im
48 from pyangbind
.lib
.serialise
import pybindJSONDecoder
49 import pyangbind
.lib
.pybindJSON
as pybindJSON
50 from osm_nbi
import utils
52 __author__
= "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
55 class DescriptorTopic(BaseTopic
):
56 def __init__(self
, db
, fs
, msg
, auth
):
57 BaseTopic
.__init
__(self
, db
, fs
, msg
, auth
)
59 def _validate_input_new(self
, indata
, storage_params
, force
=False):
62 def check_conflict_on_edit(self
, session
, final_content
, edit_content
, _id
):
63 final_content
= super().check_conflict_on_edit(
64 session
, final_content
, edit_content
, _id
67 def _check_unique_id_name(descriptor
, position
=""):
68 for desc_key
, desc_item
in descriptor
.items():
69 if isinstance(desc_item
, list) and desc_item
:
72 for index
, list_item
in enumerate(desc_item
):
73 if isinstance(list_item
, dict):
74 _check_unique_id_name(
75 list_item
, "{}.{}[{}]".format(position
, desc_key
, index
)
79 list_item
.get("id") or list_item
.get("name")
81 desc_item_id
= "id" if list_item
.get("id") else "name"
82 if desc_item_id
and list_item
.get(desc_item_id
):
83 if list_item
[desc_item_id
] in used_ids
:
84 position
= "{}.{}[{}]".format(
85 position
, desc_key
, index
87 raise EngineException(
88 "Error: identifier {} '{}' is not unique and repeats at '{}'".format(
90 list_item
[desc_item_id
],
93 HTTPStatus
.UNPROCESSABLE_ENTITY
,
95 used_ids
.append(list_item
[desc_item_id
])
97 _check_unique_id_name(final_content
)
98 # 1. validate again with pyangbind
99 # 1.1. remove internal keys
101 for k
in ("_id", "_admin"):
102 if k
in final_content
:
103 internal_keys
[k
] = final_content
.pop(k
)
104 storage_params
= internal_keys
["_admin"].get("storage")
105 serialized
= self
._validate
_input
_new
(
106 final_content
, storage_params
, session
["force"]
109 # 1.2. modify final_content with a serialized version
110 final_content
= copy
.deepcopy(serialized
)
111 # 1.3. restore internal keys
112 for k
, v
in internal_keys
.items():
117 # 2. check that this id is not present
118 if "id" in edit_content
:
119 _filter
= self
._get
_project
_filter
(session
)
121 _filter
["id"] = final_content
["id"]
122 _filter
["_id.neq"] = _id
124 if self
.db
.get_one(self
.topic
, _filter
, fail_on_empty
=False):
125 raise EngineException(
126 "{} with id '{}' already exists for this project".format(
127 (str(self
.topic
))[:-1], final_content
["id"]
135 def format_on_new(content
, project_id
=None, make_public
=False):
136 BaseTopic
.format_on_new(content
, project_id
=project_id
, make_public
=make_public
)
137 content
["_admin"]["onboardingState"] = "CREATED"
138 content
["_admin"]["operationalState"] = "DISABLED"
139 content
["_admin"]["usageState"] = "NOT_IN_USE"
141 def delete_extra(self
, session
, _id
, db_content
, not_send_msg
=None):
143 Deletes file system storage associated with the descriptor
144 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
145 :param _id: server internal id
146 :param db_content: The database content of the descriptor
147 :param not_send_msg: To not send message (False) or store content (list) instead
148 :return: None if ok or raises EngineException with the problem
150 self
.fs
.file_delete(_id
, ignore_non_exist
=True)
151 self
.fs
.file_delete(_id
+ "_", ignore_non_exist
=True) # remove temp folder
152 # Remove file revisions
153 if "revision" in db_content
["_admin"]:
154 revision
= db_content
["_admin"]["revision"]
156 self
.fs
.file_delete(_id
+ ":" + str(revision
), ignore_non_exist
=True)
157 revision
= revision
- 1
160 def get_one_by_id(db
, session
, topic
, id):
161 # find owned by this project
162 _filter
= BaseTopic
._get
_project
_filter
(session
)
164 desc_list
= db
.get_list(topic
, _filter
)
165 if len(desc_list
) == 1:
167 elif len(desc_list
) > 1:
169 "Found more than one {} with id='{}' belonging to this project".format(
175 # not found any: try to find public
176 _filter
= BaseTopic
._get
_project
_filter
(session
)
178 desc_list
= db
.get_list(topic
, _filter
)
181 "Not found any {} with id='{}'".format(topic
[:-1], id),
182 HTTPStatus
.NOT_FOUND
,
184 elif len(desc_list
) == 1:
188 "Found more than one public {} with id='{}'; and no one belonging to this project".format(
194 def new(self
, rollback
, session
, indata
=None, kwargs
=None, headers
=None):
196 Creates a new almost empty DISABLED entry into database. Due to SOL005, it does not follow normal procedure.
197 Creating a VNFD or NSD is done in two steps: 1. Creates an empty descriptor (this step) and 2) upload content
198 (self.upload_content)
199 :param rollback: list to append created items at database in case a rollback may to be done
200 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
201 :param indata: data to be inserted
202 :param kwargs: used to override the indata descriptor
203 :param headers: http request headers
204 :return: _id, None: identity of the inserted data; and None as there is not any operation
207 # No needed to capture exceptions
209 self
.check_quota(session
)
213 if "userDefinedData" in indata
:
214 indata
= indata
["userDefinedData"]
216 # Override descriptor with query string kwargs
217 self
._update
_input
_with
_kwargs
(indata
, kwargs
)
218 # uncomment when this method is implemented.
219 # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors
220 # indata = DescriptorTopic._validate_input_new(self, indata, project_id=session["force"])
222 content
= {"_admin": {"userDefinedData": indata
, "revision": 0}}
225 content
, session
["project_id"], make_public
=session
["public"]
227 _id
= self
.db
.create(self
.topic
, content
)
228 rollback
.append({"topic": self
.topic
, "_id": _id
})
229 self
._send
_msg
("created", {"_id": _id
})
232 def upload_content(self
, session
, _id
, indata
, kwargs
, headers
):
234 Used for receiving content by chunks (with a transaction_id header and/or gzip file. It will store and extract)
235 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
236 :param _id : the nsd,vnfd is already created, this is the id
237 :param indata: http body request
238 :param kwargs: user query string to override parameters. NOT USED
239 :param headers: http request headers
240 :return: True if package is completely uploaded or False if partial content has been uploded
241 Raise exception on error
243 # Check that _id exists and it is valid
244 current_desc
= self
.show(session
, _id
)
246 content_range_text
= headers
.get("Content-Range")
247 expected_md5
= headers
.get("Content-File-MD5")
249 content_type
= headers
.get("Content-Type")
252 and "application/gzip" in content_type
253 or "application/x-gzip" in content_type
256 if content_type
and "application/zip" in content_type
:
258 filename
= headers
.get("Content-Filename")
259 if not filename
and compressed
:
260 filename
= "package.tar.gz" if compressed
== "gzip" else "package.zip"
265 if "revision" in current_desc
["_admin"]:
266 revision
= current_desc
["_admin"]["revision"] + 1
268 # TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266
274 if content_range_text
:
276 content_range_text
.replace("-", " ").replace("/", " ").split()
279 content_range
[0] != "bytes"
280 ): # TODO check x<y not negative < total....
282 start
= int(content_range
[1])
283 end
= int(content_range
[2]) + 1
284 total
= int(content_range
[3])
287 # Rather than using a temp folder, we will store the package in a folder based on
288 # the current revision.
289 proposed_revision_path
= (
290 _id
+ ":" + str(revision
)
291 ) # all the content is upload here and if ok, it is rename from id_ to is folder
294 if not self
.fs
.file_exists(proposed_revision_path
, "dir"):
295 raise EngineException(
296 "invalid Transaction-Id header", HTTPStatus
.NOT_FOUND
299 self
.fs
.file_delete(proposed_revision_path
, ignore_non_exist
=True)
300 self
.fs
.mkdir(proposed_revision_path
)
301 fs_rollback
.append(proposed_revision_path
)
303 storage
= self
.fs
.get_params()
304 storage
["folder"] = proposed_revision_path
306 file_path
= (proposed_revision_path
, filename
)
307 if self
.fs
.file_exists(file_path
, "file"):
308 file_size
= self
.fs
.file_size(file_path
)
311 if file_size
!= start
:
312 raise EngineException(
313 "invalid Content-Range start sequence, expected '{}' but received '{}'".format(
316 HTTPStatus
.REQUESTED_RANGE_NOT_SATISFIABLE
,
318 file_pkg
= self
.fs
.file_open(file_path
, "a+b")
319 if isinstance(indata
, dict):
320 indata_text
= yaml
.safe_dump(indata
, indent
=4, default_flow_style
=False)
321 file_pkg
.write(indata_text
.encode(encoding
="utf-8"))
325 indata_text
= indata
.read(4096)
326 indata_len
+= len(indata_text
)
329 file_pkg
.write(indata_text
)
330 if content_range_text
:
331 if indata_len
!= end
- start
:
332 raise EngineException(
333 "Mismatch between Content-Range header {}-{} and body length of {}".format(
334 start
, end
- 1, indata_len
336 HTTPStatus
.REQUESTED_RANGE_NOT_SATISFIABLE
,
339 # TODO update to UPLOADING
346 chunk_data
= file_pkg
.read(1024)
348 file_md5
.update(chunk_data
)
349 chunk_data
= file_pkg
.read(1024)
350 if expected_md5
!= file_md5
.hexdigest():
351 raise EngineException("Error, MD5 mismatch", HTTPStatus
.CONFLICT
)
353 if compressed
== "gzip":
354 tar
= tarfile
.open(mode
="r", fileobj
=file_pkg
)
355 descriptor_file_name
= None
357 tarname
= tarinfo
.name
358 tarname_path
= tarname
.split("/")
360 not tarname_path
[0] or ".." in tarname_path
361 ): # if start with "/" means absolute path
362 raise EngineException(
363 "Absolute path or '..' are not allowed for package descriptor tar.gz"
365 if len(tarname_path
) == 1 and not tarinfo
.isdir():
366 raise EngineException(
367 "All files must be inside a dir for package descriptor tar.gz"
370 tarname
.endswith(".yaml")
371 or tarname
.endswith(".json")
372 or tarname
.endswith(".yml")
374 storage
["pkg-dir"] = tarname_path
[0]
375 if len(tarname_path
) == 2:
376 if descriptor_file_name
:
377 raise EngineException(
378 "Found more than one descriptor file at package descriptor tar.gz"
380 descriptor_file_name
= tarname
381 if not descriptor_file_name
:
382 raise EngineException(
383 "Not found any descriptor file at package descriptor tar.gz"
385 storage
["descriptor"] = descriptor_file_name
386 storage
["zipfile"] = filename
387 self
.fs
.file_extract(tar
, proposed_revision_path
)
388 with self
.fs
.file_open(
389 (proposed_revision_path
, descriptor_file_name
), "r"
390 ) as descriptor_file
:
391 content
= descriptor_file
.read()
392 elif compressed
== "zip":
393 zipfile
= ZipFile(file_pkg
)
394 descriptor_file_name
= None
395 for package_file
in zipfile
.infolist():
396 zipfilename
= package_file
.filename
397 file_path
= zipfilename
.split("/")
399 not file_path
[0] or ".." in zipfilename
400 ): # if start with "/" means absolute path
401 raise EngineException(
402 "Absolute path or '..' are not allowed for package descriptor zip"
406 zipfilename
.endswith(".yaml")
407 or zipfilename
.endswith(".json")
408 or zipfilename
.endswith(".yml")
410 zipfilename
.find("/") < 0
411 or zipfilename
.find("Definitions") >= 0
413 storage
["pkg-dir"] = ""
414 if descriptor_file_name
:
415 raise EngineException(
416 "Found more than one descriptor file at package descriptor zip"
418 descriptor_file_name
= zipfilename
419 if not descriptor_file_name
:
420 raise EngineException(
421 "Not found any descriptor file at package descriptor zip"
423 storage
["descriptor"] = descriptor_file_name
424 storage
["zipfile"] = filename
425 self
.fs
.file_extract(zipfile
, proposed_revision_path
)
427 with self
.fs
.file_open(
428 (proposed_revision_path
, descriptor_file_name
), "r"
429 ) as descriptor_file
:
430 content
= descriptor_file
.read()
432 content
= file_pkg
.read()
433 storage
["descriptor"] = descriptor_file_name
= filename
435 if descriptor_file_name
.endswith(".json"):
436 error_text
= "Invalid json format "
437 indata
= json
.load(content
)
439 error_text
= "Invalid yaml format "
440 indata
= yaml
.load(content
, Loader
=yaml
.SafeLoader
)
442 # Need to close the file package here so it can be copied from the
443 # revision to the current, unrevisioned record
448 # Fetch both the incoming, proposed revision and the original revision so we
449 # can call a validate method to compare them
450 current_revision_path
= _id
+ "/"
451 self
.fs
.sync(from_path
=current_revision_path
)
452 self
.fs
.sync(from_path
=proposed_revision_path
)
456 self
._validate
_descriptor
_changes
(
458 descriptor_file_name
,
459 current_revision_path
,
460 proposed_revision_path
,
462 except Exception as e
:
464 self
.fs
.path
+ current_revision_path
, ignore_errors
=True
467 self
.fs
.path
+ proposed_revision_path
, ignore_errors
=True
469 # Only delete the new revision. We need to keep the original version in place
470 # as it has not been changed.
471 self
.fs
.file_delete(proposed_revision_path
, ignore_non_exist
=True)
474 indata
= self
._remove
_envelop
(indata
)
476 # Override descriptor with query string kwargs
478 self
._update
_input
_with
_kwargs
(indata
, kwargs
)
480 current_desc
["_admin"]["storage"] = storage
481 current_desc
["_admin"]["onboardingState"] = "ONBOARDED"
482 current_desc
["_admin"]["operationalState"] = "ENABLED"
483 current_desc
["_admin"]["modified"] = time()
484 current_desc
["_admin"]["revision"] = revision
486 deep_update_rfc7396(current_desc
, indata
)
487 current_desc
= self
.check_conflict_on_edit(
488 session
, current_desc
, indata
, _id
=_id
491 # Copy the revision to the active package name by its original id
492 shutil
.rmtree(self
.fs
.path
+ current_revision_path
, ignore_errors
=True)
494 self
.fs
.path
+ proposed_revision_path
,
495 self
.fs
.path
+ current_revision_path
,
497 self
.fs
.file_delete(current_revision_path
, ignore_non_exist
=True)
498 self
.fs
.mkdir(current_revision_path
)
499 self
.fs
.reverse_sync(from_path
=current_revision_path
)
501 shutil
.rmtree(self
.fs
.path
+ _id
)
503 self
.db
.replace(self
.topic
, _id
, current_desc
)
505 # Store a copy of the package as a point in time revision
506 revision_desc
= dict(current_desc
)
507 revision_desc
["_id"] = _id
+ ":" + str(revision_desc
["_admin"]["revision"])
508 self
.db
.create(self
.topic
+ "_revisions", revision_desc
)
512 self
._send
_msg
("edited", indata
)
514 # TODO if descriptor has changed because kwargs update content and remove cached zip
515 # TODO if zip is not present creates one
518 except EngineException
:
521 raise EngineException(
522 "invalid Content-Range header format. Expected 'bytes start-end/total'",
523 HTTPStatus
.REQUESTED_RANGE_NOT_SATISFIABLE
,
526 raise EngineException(
527 "invalid upload transaction sequence: '{}'".format(e
),
528 HTTPStatus
.BAD_REQUEST
,
530 except tarfile
.ReadError
as e
:
531 raise EngineException(
532 "invalid file content {}".format(e
), HTTPStatus
.BAD_REQUEST
534 except (ValueError, yaml
.YAMLError
) as e
:
535 raise EngineException(error_text
+ str(e
))
536 except ValidationError
as e
:
537 raise EngineException(e
, HTTPStatus
.UNPROCESSABLE_ENTITY
)
541 for file in fs_rollback
:
542 self
.fs
.file_delete(file, ignore_non_exist
=True)
544 def get_file(self
, session
, _id
, path
=None, accept_header
=None):
546 Return the file content of a vnfd or nsd
547 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
548 :param _id: Identity of the vnfd, nsd
549 :param path: artifact path or "$DESCRIPTOR" or None
550 :param accept_header: Content of Accept header. Must contain applition/zip or/and text/plain
551 :return: opened file plus Accept format or raises an exception
553 accept_text
= accept_zip
= False
555 if "text/plain" in accept_header
or "*/*" in accept_header
:
557 if "application/zip" in accept_header
or "*/*" in accept_header
:
558 accept_zip
= "application/zip"
559 elif "application/gzip" in accept_header
:
560 accept_zip
= "application/gzip"
562 if not accept_text
and not accept_zip
:
563 raise EngineException(
564 "provide request header 'Accept' with 'application/zip' or 'text/plain'",
565 http_code
=HTTPStatus
.NOT_ACCEPTABLE
,
568 content
= self
.show(session
, _id
)
569 if content
["_admin"]["onboardingState"] != "ONBOARDED":
570 raise EngineException(
571 "Cannot get content because this resource is not at 'ONBOARDED' state. "
572 "onboardingState is {}".format(content
["_admin"]["onboardingState"]),
573 http_code
=HTTPStatus
.CONFLICT
,
575 storage
= content
["_admin"]["storage"]
576 if path
is not None and path
!= "$DESCRIPTOR": # artifacts
577 if not storage
.get("pkg-dir") and not storage
.get("folder"):
578 raise EngineException(
579 "Packages does not contains artifacts",
580 http_code
=HTTPStatus
.BAD_REQUEST
,
582 if self
.fs
.file_exists(
583 (storage
["folder"], storage
["pkg-dir"], *path
), "dir"
585 folder_content
= self
.fs
.dir_ls(
586 (storage
["folder"], storage
["pkg-dir"], *path
)
588 return folder_content
, "text/plain"
589 # TODO manage folders in http
593 (storage
["folder"], storage
["pkg-dir"], *path
), "rb"
595 "application/octet-stream",
598 # pkgtype accept ZIP TEXT -> result
599 # manyfiles yes X -> zip
601 # onefile yes no -> zip
603 contain_many_files
= False
604 if storage
.get("pkg-dir"):
605 # check if there are more than one file in the package, ignoring checksums.txt.
606 pkg_files
= self
.fs
.dir_ls((storage
["folder"], storage
["pkg-dir"]))
607 if len(pkg_files
) >= 3 or (
608 len(pkg_files
) == 2 and "checksums.txt" not in pkg_files
610 contain_many_files
= True
611 if accept_text
and (not contain_many_files
or path
== "$DESCRIPTOR"):
613 self
.fs
.file_open((storage
["folder"], storage
["descriptor"]), "r"),
616 elif contain_many_files
and not accept_zip
:
617 raise EngineException(
618 "Packages that contains several files need to be retrieved with 'application/zip'"
620 http_code
=HTTPStatus
.NOT_ACCEPTABLE
,
623 if not storage
.get("zipfile"):
624 # TODO generate zipfile if not present
625 raise EngineException(
626 "Only allowed 'text/plain' Accept header for this descriptor. To be solved in "
628 http_code
=HTTPStatus
.NOT_ACCEPTABLE
,
631 self
.fs
.file_open((storage
["folder"], storage
["zipfile"]), "rb"),
635 def _remove_yang_prefixes_from_descriptor(self
, descriptor
):
637 for k
, v
in descriptor
.items():
639 if isinstance(v
, dict):
640 new_v
= self
._remove
_yang
_prefixes
_from
_descriptor
(v
)
641 elif isinstance(v
, list):
644 if isinstance(x
, dict):
645 new_v
.append(self
._remove
_yang
_prefixes
_from
_descriptor
(x
))
648 new_descriptor
[k
.split(":")[-1]] = new_v
649 return new_descriptor
651 def pyangbind_validation(self
, item
, data
, force
=False):
652 raise EngineException(
653 "Not possible to validate '{}' item".format(item
),
654 http_code
=HTTPStatus
.INTERNAL_SERVER_ERROR
,
657 def _validate_input_edit(self
, indata
, content
, force
=False):
658 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
661 if "_admin" not in indata
:
662 indata
["_admin"] = {}
664 if "operationalState" in indata
:
665 if indata
["operationalState"] in ("ENABLED", "DISABLED"):
666 indata
["_admin"]["operationalState"] = indata
.pop("operationalState")
668 raise EngineException(
669 "State '{}' is not a valid operational state".format(
670 indata
["operationalState"]
672 http_code
=HTTPStatus
.BAD_REQUEST
,
675 # In the case of user defined data, we need to put the data in the root of the object
676 # to preserve current expected behaviour
677 if "userDefinedData" in indata
:
678 data
= indata
.pop("userDefinedData")
679 if type(data
) == dict:
680 indata
["_admin"]["userDefinedData"] = data
682 raise EngineException(
683 "userDefinedData should be an object, but is '{}' instead".format(
686 http_code
=HTTPStatus
.BAD_REQUEST
,
690 "operationalState" in indata
["_admin"]
691 and content
["_admin"]["operationalState"]
692 == indata
["_admin"]["operationalState"]
694 raise EngineException(
695 "operationalState already {}".format(
696 content
["_admin"]["operationalState"]
698 http_code
=HTTPStatus
.CONFLICT
,
703 def _validate_descriptor_changes(
706 descriptor_file_name
,
707 old_descriptor_directory
,
708 new_descriptor_directory
,
711 # raise EngineException(
712 # "Error in validating new descriptor: <NODE> cannot be modified",
713 # http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
718 class VnfdTopic(DescriptorTopic
):
722 def __init__(self
, db
, fs
, msg
, auth
):
723 DescriptorTopic
.__init
__(self
, db
, fs
, msg
, auth
)
725 def pyangbind_validation(self
, item
, data
, force
=False):
726 if self
._descriptor
_data
_is
_in
_old
_format
(data
):
727 raise EngineException(
728 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
729 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
732 myvnfd
= etsi_nfv_vnfd
.etsi_nfv_vnfd()
733 pybindJSONDecoder
.load_ietf_json(
734 {"etsi-nfv-vnfd:vnfd": data
},
741 out
= pybindJSON
.dumps(myvnfd
, mode
="ietf")
742 desc_out
= self
._remove
_envelop
(yaml
.safe_load(out
))
743 desc_out
= self
._remove
_yang
_prefixes
_from
_descriptor
(desc_out
)
744 return utils
.deep_update_dict(data
, desc_out
)
745 except Exception as e
:
746 raise EngineException(
747 "Error in pyangbind validation: {}".format(str(e
)),
748 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
752 def _descriptor_data_is_in_old_format(data
):
753 return ("vnfd-catalog" in data
) or ("vnfd:vnfd-catalog" in data
)
756 def _remove_envelop(indata
=None):
759 clean_indata
= indata
761 if clean_indata
.get("etsi-nfv-vnfd:vnfd"):
762 if not isinstance(clean_indata
["etsi-nfv-vnfd:vnfd"], dict):
763 raise EngineException("'etsi-nfv-vnfd:vnfd' must be a dict")
764 clean_indata
= clean_indata
["etsi-nfv-vnfd:vnfd"]
765 elif clean_indata
.get("vnfd"):
766 if not isinstance(clean_indata
["vnfd"], dict):
767 raise EngineException("'vnfd' must be dict")
768 clean_indata
= clean_indata
["vnfd"]
772 def check_conflict_on_edit(self
, session
, final_content
, edit_content
, _id
):
773 final_content
= super().check_conflict_on_edit(
774 session
, final_content
, edit_content
, _id
780 for vdu
in get_iterable(final_content
.get("vdu")):
781 if vdu
.get("pdu-type"):
786 final_content
["_admin"]["type"] = "hnfd" if contains_vdu
else "pnfd"
788 final_content
["_admin"]["type"] = "vnfd"
789 # if neither vud nor pdu do not fill type
792 def check_conflict_on_del(self
, session
, _id
, db_content
):
794 Check that there is not any NSD that uses this VNFD. Only NSDs belonging to this project are considered. Note
795 that VNFD can be public and be used by NSD of other projects. Also check there are not deployments, or vnfr
797 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
798 :param _id: vnfd internal id
799 :param db_content: The database content of the _id.
800 :return: None or raises EngineException with the conflict
804 descriptor
= db_content
805 descriptor_id
= descriptor
.get("id")
806 if not descriptor_id
: # empty vnfd not uploaded
809 _filter
= self
._get
_project
_filter
(session
)
811 # check vnfrs using this vnfd
812 _filter
["vnfd-id"] = _id
813 if self
.db
.get_list("vnfrs", _filter
):
814 raise EngineException(
815 "There is at least one VNF instance using this descriptor",
816 http_code
=HTTPStatus
.CONFLICT
,
819 # check NSD referencing this VNFD
820 del _filter
["vnfd-id"]
821 _filter
["vnfd-id"] = descriptor_id
822 if self
.db
.get_list("nsds", _filter
):
823 raise EngineException(
824 "There is at least one NS package referencing this descriptor",
825 http_code
=HTTPStatus
.CONFLICT
,
828 def _validate_input_new(self
, indata
, storage_params
, force
=False):
829 indata
.pop("onboardingState", None)
830 indata
.pop("operationalState", None)
831 indata
.pop("usageState", None)
832 indata
.pop("links", None)
834 indata
= self
.pyangbind_validation("vnfds", indata
, force
)
835 # Cross references validation in the descriptor
837 self
.validate_mgmt_interface_connection_point(indata
)
839 for vdu
in get_iterable(indata
.get("vdu")):
840 self
.validate_vdu_internal_connection_points(vdu
)
841 self
._validate
_vdu
_cloud
_init
_in
_package
(storage_params
, vdu
, indata
)
842 self
._validate
_vdu
_charms
_in
_package
(storage_params
, indata
)
844 self
._validate
_vnf
_charms
_in
_package
(storage_params
, indata
)
846 self
.validate_external_connection_points(indata
)
847 self
.validate_internal_virtual_links(indata
)
848 self
.validate_monitoring_params(indata
)
849 self
.validate_scaling_group_descriptor(indata
)
854 def validate_mgmt_interface_connection_point(indata
):
855 if not indata
.get("vdu"):
857 if not indata
.get("mgmt-cp"):
858 raise EngineException(
859 "'mgmt-cp' is a mandatory field and it is not defined",
860 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
863 for cp
in get_iterable(indata
.get("ext-cpd")):
864 if cp
["id"] == indata
["mgmt-cp"]:
867 raise EngineException(
868 "mgmt-cp='{}' must match an existing ext-cpd".format(indata
["mgmt-cp"]),
869 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
873 def validate_vdu_internal_connection_points(vdu
):
875 for cpd
in get_iterable(vdu
.get("int-cpd")):
876 cpd_id
= cpd
.get("id")
877 if cpd_id
and cpd_id
in int_cpds
:
878 raise EngineException(
879 "vdu[id='{}']:int-cpd[id='{}'] is already used by other int-cpd".format(
882 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
887 def validate_external_connection_points(indata
):
888 all_vdus_int_cpds
= set()
889 for vdu
in get_iterable(indata
.get("vdu")):
890 for int_cpd
in get_iterable(vdu
.get("int-cpd")):
891 all_vdus_int_cpds
.add((vdu
.get("id"), int_cpd
.get("id")))
894 for cpd
in get_iterable(indata
.get("ext-cpd")):
895 cpd_id
= cpd
.get("id")
896 if cpd_id
and cpd_id
in ext_cpds
:
897 raise EngineException(
898 "ext-cpd[id='{}'] is already used by other ext-cpd".format(cpd_id
),
899 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
903 int_cpd
= cpd
.get("int-cpd")
905 if (int_cpd
.get("vdu-id"), int_cpd
.get("cpd")) not in all_vdus_int_cpds
:
906 raise EngineException(
907 "ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd".format(
910 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
912 # TODO: Validate k8s-cluster-net points to a valid k8s-cluster:nets ?
914 def _validate_vdu_charms_in_package(self
, storage_params
, indata
):
915 for df
in indata
["df"]:
917 "lcm-operations-configuration" in df
918 and "operate-vnf-op-config" in df
["lcm-operations-configuration"]
920 configs
= df
["lcm-operations-configuration"][
921 "operate-vnf-op-config"
923 vdus
= df
.get("vdu-profile", [])
925 for config
in configs
:
926 if config
["id"] == vdu
["id"] and utils
.find_in_list(
927 config
.get("execution-environment-list", []),
928 lambda ee
: "juju" in ee
,
930 if not self
._validate
_package
_folders
(
931 storage_params
, "charms"
932 ) and not self
._validate
_package
_folders
(
933 storage_params
, "Scripts/charms"
935 raise EngineException(
936 "Charm defined in vnf[id={}] but not present in "
937 "package".format(indata
["id"])
940 def _validate_vdu_cloud_init_in_package(self
, storage_params
, vdu
, indata
):
941 if not vdu
.get("cloud-init-file"):
943 if not self
._validate
_package
_folders
(
944 storage_params
, "cloud_init", vdu
["cloud-init-file"]
945 ) and not self
._validate
_package
_folders
(
946 storage_params
, "Scripts/cloud_init", vdu
["cloud-init-file"]
948 raise EngineException(
949 "Cloud-init defined in vnf[id={}]:vdu[id={}] but not present in "
950 "package".format(indata
["id"], vdu
["id"])
953 def _validate_vnf_charms_in_package(self
, storage_params
, indata
):
954 # Get VNF configuration through new container
955 for deployment_flavor
in indata
.get("df", []):
956 if "lcm-operations-configuration" not in deployment_flavor
:
959 "operate-vnf-op-config"
960 not in deployment_flavor
["lcm-operations-configuration"]
963 for day_1_2_config
in deployment_flavor
["lcm-operations-configuration"][
964 "operate-vnf-op-config"
966 if day_1_2_config
["id"] == indata
["id"]:
967 if utils
.find_in_list(
968 day_1_2_config
.get("execution-environment-list", []),
969 lambda ee
: "juju" in ee
,
971 if not self
._validate
_package
_folders
(
972 storage_params
, "charms"
973 ) and not self
._validate
_package
_folders
(
974 storage_params
, "Scripts/charms"
976 raise EngineException(
977 "Charm defined in vnf[id={}] but not present in "
978 "package".format(indata
["id"])
981 def _validate_package_folders(self
, storage_params
, folder
, file=None):
982 if not storage_params
:
984 elif not storage_params
.get("pkg-dir"):
985 if self
.fs
.file_exists("{}_".format(storage_params
["folder"]), "dir"):
986 f
= "{}_/{}".format(storage_params
["folder"], folder
)
988 f
= "{}/{}".format(storage_params
["folder"], folder
)
990 return self
.fs
.file_exists("{}/{}".format(f
, file), "file")
992 if self
.fs
.file_exists(f
, "dir"):
993 if self
.fs
.dir_ls(f
):
997 if self
.fs
.file_exists("{}_".format(storage_params
["folder"]), "dir"):
998 f
= "{}_/{}/{}".format(
999 storage_params
["folder"], storage_params
["pkg-dir"], folder
1002 f
= "{}/{}/{}".format(
1003 storage_params
["folder"], storage_params
["pkg-dir"], folder
1006 return self
.fs
.file_exists("{}/{}".format(f
, file), "file")
1008 if self
.fs
.file_exists(f
, "dir"):
1009 if self
.fs
.dir_ls(f
):
1014 def validate_internal_virtual_links(indata
):
1015 all_ivld_ids
= set()
1016 for ivld
in get_iterable(indata
.get("int-virtual-link-desc")):
1017 ivld_id
= ivld
.get("id")
1018 if ivld_id
and ivld_id
in all_ivld_ids
:
1019 raise EngineException(
1020 "Duplicated VLD id in int-virtual-link-desc[id={}]".format(ivld_id
),
1021 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1024 all_ivld_ids
.add(ivld_id
)
1026 for vdu
in get_iterable(indata
.get("vdu")):
1027 for int_cpd
in get_iterable(vdu
.get("int-cpd")):
1028 int_cpd_ivld_id
= int_cpd
.get("int-virtual-link-desc")
1029 if int_cpd_ivld_id
and int_cpd_ivld_id
not in all_ivld_ids
:
1030 raise EngineException(
1031 "vdu[id='{}']:int-cpd[id='{}']:int-virtual-link-desc='{}' must match an existing "
1032 "int-virtual-link-desc".format(
1033 vdu
["id"], int_cpd
["id"], int_cpd_ivld_id
1035 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1038 for df
in get_iterable(indata
.get("df")):
1039 for vlp
in get_iterable(df
.get("virtual-link-profile")):
1040 vlp_ivld_id
= vlp
.get("id")
1041 if vlp_ivld_id
and vlp_ivld_id
not in all_ivld_ids
:
1042 raise EngineException(
1043 "df[id='{}']:virtual-link-profile='{}' must match an existing "
1044 "int-virtual-link-desc".format(df
["id"], vlp_ivld_id
),
1045 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1049 def validate_monitoring_params(indata
):
1050 all_monitoring_params
= set()
1051 for ivld
in get_iterable(indata
.get("int-virtual-link-desc")):
1052 for mp
in get_iterable(ivld
.get("monitoring-parameters")):
1053 mp_id
= mp
.get("id")
1054 if mp_id
and mp_id
in all_monitoring_params
:
1055 raise EngineException(
1056 "Duplicated monitoring-parameter id in "
1057 "int-virtual-link-desc[id='{}']:monitoring-parameters[id='{}']".format(
1060 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1063 all_monitoring_params
.add(mp_id
)
1065 for vdu
in get_iterable(indata
.get("vdu")):
1066 for mp
in get_iterable(vdu
.get("monitoring-parameter")):
1067 mp_id
= mp
.get("id")
1068 if mp_id
and mp_id
in all_monitoring_params
:
1069 raise EngineException(
1070 "Duplicated monitoring-parameter id in "
1071 "vdu[id='{}']:monitoring-parameter[id='{}']".format(
1074 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1077 all_monitoring_params
.add(mp_id
)
1079 for df
in get_iterable(indata
.get("df")):
1080 for mp
in get_iterable(df
.get("monitoring-parameter")):
1081 mp_id
= mp
.get("id")
1082 if mp_id
and mp_id
in all_monitoring_params
:
1083 raise EngineException(
1084 "Duplicated monitoring-parameter id in "
1085 "df[id='{}']:monitoring-parameter[id='{}']".format(
1088 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1091 all_monitoring_params
.add(mp_id
)
1094 def validate_scaling_group_descriptor(indata
):
1095 all_monitoring_params
= set()
1096 for ivld
in get_iterable(indata
.get("int-virtual-link-desc")):
1097 for mp
in get_iterable(ivld
.get("monitoring-parameters")):
1098 all_monitoring_params
.add(mp
.get("id"))
1100 for vdu
in get_iterable(indata
.get("vdu")):
1101 for mp
in get_iterable(vdu
.get("monitoring-parameter")):
1102 all_monitoring_params
.add(mp
.get("id"))
1104 for df
in get_iterable(indata
.get("df")):
1105 for mp
in get_iterable(df
.get("monitoring-parameter")):
1106 all_monitoring_params
.add(mp
.get("id"))
1108 for df
in get_iterable(indata
.get("df")):
1109 for sa
in get_iterable(df
.get("scaling-aspect")):
1110 for sp
in get_iterable(sa
.get("scaling-policy")):
1111 for sc
in get_iterable(sp
.get("scaling-criteria")):
1112 sc_monitoring_param
= sc
.get("vnf-monitoring-param-ref")
1115 and sc_monitoring_param
not in all_monitoring_params
1117 raise EngineException(
1118 "df[id='{}']:scaling-aspect[id='{}']:scaling-policy"
1119 "[name='{}']:scaling-criteria[name='{}']: "
1120 "vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format(
1125 sc_monitoring_param
,
1127 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1130 for sca
in get_iterable(sa
.get("scaling-config-action")):
1132 "lcm-operations-configuration" not in df
1133 or "operate-vnf-op-config"
1134 not in df
["lcm-operations-configuration"]
1135 or not utils
.find_in_list(
1136 df
["lcm-operations-configuration"][
1137 "operate-vnf-op-config"
1138 ].get("day1-2", []),
1139 lambda config
: config
["id"] == indata
["id"],
1142 raise EngineException(
1143 "'day1-2 configuration' not defined in the descriptor but it is "
1144 "referenced by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action".format(
1147 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1149 for configuration
in get_iterable(
1150 df
["lcm-operations-configuration"]["operate-vnf-op-config"].get(
1154 for primitive
in get_iterable(
1155 configuration
.get("config-primitive")
1159 == sca
["vnf-config-primitive-name-ref"]
1163 raise EngineException(
1164 "df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-"
1165 "config-primitive-name-ref='{}' does not match any "
1166 "day1-2 configuration:config-primitive:name".format(
1169 sca
["vnf-config-primitive-name-ref"],
1171 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1174 def delete_extra(self
, session
, _id
, db_content
, not_send_msg
=None):
1176 Deletes associate file system storage (via super)
1177 Deletes associated vnfpkgops from database.
1178 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1179 :param _id: server internal id
1180 :param db_content: The database content of the descriptor
1182 :raises: FsException in case of error while deleting associated storage
1184 super().delete_extra(session
, _id
, db_content
, not_send_msg
)
1185 self
.db
.del_list("vnfpkgops", {"vnfPkgId": _id
})
1186 self
.db
.del_list(self
.topic
+ "_revisions", {"_id": {"$regex": _id
}})
1188 def sol005_projection(self
, data
):
1189 data
["onboardingState"] = data
["_admin"]["onboardingState"]
1190 data
["operationalState"] = data
["_admin"]["operationalState"]
1191 data
["usageState"] = data
["_admin"]["usageState"]
1194 links
["self"] = {"href": "/vnfpkgm/v1/vnf_packages/{}".format(data
["_id"])}
1195 links
["vnfd"] = {"href": "/vnfpkgm/v1/vnf_packages/{}/vnfd".format(data
["_id"])}
1196 links
["packageContent"] = {
1197 "href": "/vnfpkgm/v1/vnf_packages/{}/package_content".format(data
["_id"])
1199 data
["_links"] = links
1201 return super().sol005_projection(data
)
1204 def find_software_version(vnfd
: dict) -> str:
1205 """Find the sotware version in the VNFD descriptors
1208 vnfd (dict): Descriptor as a dictionary
1211 software-version (str)
1213 default_sw_version
= "1.0"
1214 if vnfd
.get("vnfd"):
1216 if vnfd
.get("software-version"):
1217 return vnfd
["software-version"]
1219 return default_sw_version
1222 def extract_policies(vnfd
: dict) -> dict:
1223 """Removes the policies from the VNFD descriptors
1226 vnfd (dict): Descriptor as a dictionary
1229 vnfd (dict): VNFD which does not include policies
1231 for df
in vnfd
.get("df", {}):
1232 for policy
in ["scaling-aspect", "healing-aspect"]:
1233 if df
.get(policy
, {}):
1235 for vdu
in vnfd
.get("vdu", {}):
1236 for alarm_policy
in ["alarm", "monitoring-parameter"]:
1237 if vdu
.get(alarm_policy
, {}):
1238 vdu
.pop(alarm_policy
)
1242 def extract_day12_primitives(vnfd
: dict) -> dict:
1243 """Removes the day12 primitives from the VNFD descriptors
1246 vnfd (dict): Descriptor as a dictionary
1251 for df_id
, df
in enumerate(vnfd
.get("df", {})):
1253 df
.get("lcm-operations-configuration", {})
1254 .get("operate-vnf-op-config", {})
1257 day12
= df
["lcm-operations-configuration"]["operate-vnf-op-config"].get(
1260 for config_id
, config
in enumerate(day12
):
1262 "initial-config-primitive",
1264 "terminate-config-primitive",
1266 config
.pop(key
, None)
1267 day12
[config_id
] = config
1268 df
["lcm-operations-configuration"]["operate-vnf-op-config"][
1271 vnfd
["df"][df_id
] = df
1274 def remove_modifiable_items(self
, vnfd
: dict) -> dict:
1275 """Removes the modifiable parts from the VNFD descriptors
1277 It calls different extract functions according to different update types
1278 to clear all the modifiable items from VNFD
1281 vnfd (dict): Descriptor as a dictionary
1284 vnfd (dict): Descriptor which does not include modifiable contents
1286 if vnfd
.get("vnfd"):
1288 vnfd
.pop("_admin", None)
1289 # If the other extractions need to be done from VNFD,
1290 # the new extract methods could be appended to below list.
1291 for extract_function
in [self
.extract_day12_primitives
, self
.extract_policies
]:
1292 vnfd_temp
= extract_function(vnfd
)
1296 def _validate_descriptor_changes(
1299 descriptor_file_name
: str,
1300 old_descriptor_directory
: str,
1301 new_descriptor_directory
: str,
1303 """Compares the old and new VNFD descriptors and validates the new descriptor.
1306 old_descriptor_directory (str): Directory of descriptor which is in-use
1307 new_descriptor_directory (str): Directory of descriptor which is proposed to update (new revision)
1313 EngineException: In case of error when there are unallowed changes
1316 # If VNFD does not exist in DB or it is not in use by any NS,
1317 # validation is not required.
1318 vnfd
= self
.db
.get_one("vnfds", {"_id": descriptor_id
})
1319 if not vnfd
or not detect_descriptor_usage(vnfd
, "vnfds", self
.db
):
1322 # Get the old and new descriptor contents in order to compare them.
1323 with self
.fs
.file_open(
1324 (old_descriptor_directory
.rstrip("/"), descriptor_file_name
), "r"
1325 ) as old_descriptor_file
:
1326 with self
.fs
.file_open(
1327 (new_descriptor_directory
.rstrip("/"), descriptor_file_name
), "r"
1328 ) as new_descriptor_file
:
1329 old_content
= yaml
.safe_load(old_descriptor_file
.read())
1330 new_content
= yaml
.safe_load(new_descriptor_file
.read())
1332 # If software version has changed, we do not need to validate
1333 # the differences anymore.
1334 if old_content
and new_content
:
1335 if self
.find_software_version(
1337 ) != self
.find_software_version(new_content
):
1340 disallowed_change
= DeepDiff(
1341 self
.remove_modifiable_items(old_content
),
1342 self
.remove_modifiable_items(new_content
),
1345 if disallowed_change
:
1346 changed_nodes
= functools
.reduce(
1347 lambda a
, b
: a
+ " , " + b
,
1350 for node
in disallowed_change
.get(
1356 raise EngineException(
1357 f
"Error in validating new descriptor: {changed_nodes} cannot be modified, "
1358 "there are disallowed changes in the vnf descriptor.",
1359 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1369 "VNF Descriptor could not be processed with error: {}.".format(e
)
1373 class NsdTopic(DescriptorTopic
):
1377 def __init__(self
, db
, fs
, msg
, auth
):
1378 DescriptorTopic
.__init
__(self
, db
, fs
, msg
, auth
)
1380 def pyangbind_validation(self
, item
, data
, force
=False):
1381 if self
._descriptor
_data
_is
_in
_old
_format
(data
):
1382 raise EngineException(
1383 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
1384 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1387 nsd_vnf_profiles
= data
.get("df", [{}])[0].get("vnf-profile", [])
1388 mynsd
= etsi_nfv_nsd
.etsi_nfv_nsd()
1389 pybindJSONDecoder
.load_ietf_json(
1390 {"nsd": {"nsd": [data
]}},
1397 out
= pybindJSON
.dumps(mynsd
, mode
="ietf")
1398 desc_out
= self
._remove
_envelop
(yaml
.safe_load(out
))
1399 desc_out
= self
._remove
_yang
_prefixes
_from
_descriptor
(desc_out
)
1400 if nsd_vnf_profiles
:
1401 desc_out
["df"][0]["vnf-profile"] = nsd_vnf_profiles
1403 except Exception as e
:
1404 raise EngineException(
1405 "Error in pyangbind validation: {}".format(str(e
)),
1406 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1410 def _descriptor_data_is_in_old_format(data
):
1411 return ("nsd-catalog" in data
) or ("nsd:nsd-catalog" in data
)
1414 def _remove_envelop(indata
=None):
1417 clean_indata
= indata
1419 if clean_indata
.get("nsd"):
1420 clean_indata
= clean_indata
["nsd"]
1421 elif clean_indata
.get("etsi-nfv-nsd:nsd"):
1422 clean_indata
= clean_indata
["etsi-nfv-nsd:nsd"]
1423 if clean_indata
.get("nsd"):
1425 not isinstance(clean_indata
["nsd"], list)
1426 or len(clean_indata
["nsd"]) != 1
1428 raise EngineException("'nsd' must be a list of only one element")
1429 clean_indata
= clean_indata
["nsd"][0]
1432 def _validate_input_new(self
, indata
, storage_params
, force
=False):
1433 indata
.pop("nsdOnboardingState", None)
1434 indata
.pop("nsdOperationalState", None)
1435 indata
.pop("nsdUsageState", None)
1437 indata
.pop("links", None)
1439 indata
= self
.pyangbind_validation("nsds", indata
, force
)
1440 # Cross references validation in the descriptor
1441 # TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none
1442 for vld
in get_iterable(indata
.get("virtual-link-desc")):
1443 self
.validate_vld_mgmt_network_with_virtual_link_protocol_data(vld
, indata
)
1445 self
.validate_vnf_profiles_vnfd_id(indata
)
1450 def validate_vld_mgmt_network_with_virtual_link_protocol_data(vld
, indata
):
1451 if not vld
.get("mgmt-network"):
1453 vld_id
= vld
.get("id")
1454 for df
in get_iterable(indata
.get("df")):
1455 for vlp
in get_iterable(df
.get("virtual-link-profile")):
1456 if vld_id
and vld_id
== vlp
.get("virtual-link-desc-id"):
1457 if vlp
.get("virtual-link-protocol-data"):
1458 raise EngineException(
1459 "Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-"
1460 "protocol-data You cannot set a virtual-link-protocol-data "
1461 "when mgmt-network is True".format(df
["id"], vlp
["id"]),
1462 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1466 def validate_vnf_profiles_vnfd_id(indata
):
1467 all_vnfd_ids
= set(get_iterable(indata
.get("vnfd-id")))
1468 for df
in get_iterable(indata
.get("df")):
1469 for vnf_profile
in get_iterable(df
.get("vnf-profile")):
1470 vnfd_id
= vnf_profile
.get("vnfd-id")
1471 if vnfd_id
and vnfd_id
not in all_vnfd_ids
:
1472 raise EngineException(
1473 "Error at df[id='{}']:vnf_profile[id='{}']:vnfd-id='{}' "
1474 "does not match any vnfd-id".format(
1475 df
["id"], vnf_profile
["id"], vnfd_id
1477 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1480 def _validate_input_edit(self
, indata
, content
, force
=False):
1481 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
1483 indata looks as follows:
1484 - In the new case (conformant)
1485 {'nsdOperationalState': 'DISABLED', 'userDefinedData': {'id': 'string23',
1486 '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}}
1487 - In the old case (backwards-compatible)
1488 {'id': 'string23', '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}
1490 if "_admin" not in indata
:
1491 indata
["_admin"] = {}
1493 if "nsdOperationalState" in indata
:
1494 if indata
["nsdOperationalState"] in ("ENABLED", "DISABLED"):
1495 indata
["_admin"]["operationalState"] = indata
.pop("nsdOperationalState")
1497 raise EngineException(
1498 "State '{}' is not a valid operational state".format(
1499 indata
["nsdOperationalState"]
1501 http_code
=HTTPStatus
.BAD_REQUEST
,
1504 # In the case of user defined data, we need to put the data in the root of the object
1505 # to preserve current expected behaviour
1506 if "userDefinedData" in indata
:
1507 data
= indata
.pop("userDefinedData")
1508 if type(data
) == dict:
1509 indata
["_admin"]["userDefinedData"] = data
1511 raise EngineException(
1512 "userDefinedData should be an object, but is '{}' instead".format(
1515 http_code
=HTTPStatus
.BAD_REQUEST
,
1518 "operationalState" in indata
["_admin"]
1519 and content
["_admin"]["operationalState"]
1520 == indata
["_admin"]["operationalState"]
1522 raise EngineException(
1523 "nsdOperationalState already {}".format(
1524 content
["_admin"]["operationalState"]
1526 http_code
=HTTPStatus
.CONFLICT
,
1530 def _check_descriptor_dependencies(self
, session
, descriptor
):
1532 Check that the dependent descriptors exist on a new descriptor or edition. Also checks references to vnfd
1533 connection points are ok
1534 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1535 :param descriptor: descriptor to be inserted or edit
1536 :return: None or raises exception
1538 if session
["force"]:
1540 vnfds_index
= self
._get
_descriptor
_constituent
_vnfds
_index
(session
, descriptor
)
1542 # Cross references validation in the descriptor and vnfd connection point validation
1543 for df
in get_iterable(descriptor
.get("df")):
1544 self
.validate_df_vnf_profiles_constituent_connection_points(df
, vnfds_index
)
1546 def _get_descriptor_constituent_vnfds_index(self
, session
, descriptor
):
1548 if descriptor
.get("vnfd-id") and not session
["force"]:
1549 for vnfd_id
in get_iterable(descriptor
.get("vnfd-id")):
1550 query_filter
= self
._get
_project
_filter
(session
)
1551 query_filter
["id"] = vnfd_id
1552 vnf_list
= self
.db
.get_list("vnfds", query_filter
)
1554 raise EngineException(
1555 "Descriptor error at 'vnfd-id'='{}' references a non "
1556 "existing vnfd".format(vnfd_id
),
1557 http_code
=HTTPStatus
.CONFLICT
,
1559 vnfds_index
[vnfd_id
] = vnf_list
[0]
1563 def validate_df_vnf_profiles_constituent_connection_points(df
, vnfds_index
):
1564 for vnf_profile
in get_iterable(df
.get("vnf-profile")):
1565 vnfd
= vnfds_index
.get(vnf_profile
["vnfd-id"])
1566 all_vnfd_ext_cpds
= set()
1567 for ext_cpd
in get_iterable(vnfd
.get("ext-cpd")):
1568 if ext_cpd
.get("id"):
1569 all_vnfd_ext_cpds
.add(ext_cpd
.get("id"))
1571 for virtual_link
in get_iterable(
1572 vnf_profile
.get("virtual-link-connectivity")
1574 for vl_cpd
in get_iterable(virtual_link
.get("constituent-cpd-id")):
1575 vl_cpd_id
= vl_cpd
.get("constituent-cpd-id")
1576 if vl_cpd_id
and vl_cpd_id
not in all_vnfd_ext_cpds
:
1577 raise EngineException(
1578 "Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity"
1579 "[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a "
1580 "non existing ext-cpd:id inside vnfd '{}'".format(
1583 virtual_link
["virtual-link-profile-id"],
1587 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1590 def check_conflict_on_edit(self
, session
, final_content
, edit_content
, _id
):
1591 final_content
= super().check_conflict_on_edit(
1592 session
, final_content
, edit_content
, _id
1595 self
._check
_descriptor
_dependencies
(session
, final_content
)
1597 return final_content
1599 def check_conflict_on_del(self
, session
, _id
, db_content
):
1601 Check that there is not any NSR that uses this NSD. Only NSRs belonging to this project are considered. Note
1602 that NSD can be public and be used by other projects.
1603 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1604 :param _id: nsd internal id
1605 :param db_content: The database content of the _id
1606 :return: None or raises EngineException with the conflict
1608 if session
["force"]:
1610 descriptor
= db_content
1611 descriptor_id
= descriptor
.get("id")
1612 if not descriptor_id
: # empty nsd not uploaded
1615 # check NSD used by NS
1616 _filter
= self
._get
_project
_filter
(session
)
1617 _filter
["nsd-id"] = _id
1618 if self
.db
.get_list("nsrs", _filter
):
1619 raise EngineException(
1620 "There is at least one NS instance using this descriptor",
1621 http_code
=HTTPStatus
.CONFLICT
,
1624 # check NSD referenced by NST
1625 del _filter
["nsd-id"]
1626 _filter
["netslice-subnet.ANYINDEX.nsd-ref"] = descriptor_id
1627 if self
.db
.get_list("nsts", _filter
):
1628 raise EngineException(
1629 "There is at least one NetSlice Template referencing this descriptor",
1630 http_code
=HTTPStatus
.CONFLICT
,
1633 def delete_extra(self
, session
, _id
, db_content
, not_send_msg
=None):
1635 Deletes associate file system storage (via super)
1636 Deletes associated vnfpkgops from database.
1637 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1638 :param _id: server internal id
1639 :param db_content: The database content of the descriptor
1641 :raises: FsException in case of error while deleting associated storage
1643 super().delete_extra(session
, _id
, db_content
, not_send_msg
)
1644 self
.db
.del_list(self
.topic
+ "_revisions", {"_id": {"$regex": _id
}})
1647 def extract_day12_primitives(nsd
: dict) -> dict:
1648 """Removes the day12 primitives from the NSD descriptors
1651 nsd (dict): Descriptor as a dictionary
1654 nsd (dict): Cleared NSD
1656 if nsd
.get("ns-configuration"):
1659 "initial-config-primitive",
1660 "terminate-config-primitive",
1662 nsd
["ns-configuration"].pop(key
, None)
1665 def remove_modifiable_items(self
, nsd
: dict) -> dict:
1666 """Removes the modifiable parts from the VNFD descriptors
1668 It calls different extract functions according to different update types
1669 to clear all the modifiable items from NSD
1672 nsd (dict): Descriptor as a dictionary
1675 nsd (dict): Descriptor which does not include modifiable contents
1677 while isinstance(nsd
, dict) and nsd
.get("nsd"):
1679 if isinstance(nsd
, list):
1681 nsd
.pop("_admin", None)
1682 # If the more extractions need to be done from NSD,
1683 # the new extract methods could be appended to below list.
1684 for extract_function
in [self
.extract_day12_primitives
]:
1685 nsd_temp
= extract_function(nsd
)
1689 def _validate_descriptor_changes(
1692 descriptor_file_name
: str,
1693 old_descriptor_directory
: str,
1694 new_descriptor_directory
: str,
1696 """Compares the old and new NSD descriptors and validates the new descriptor
1699 old_descriptor_directory: Directory of descriptor which is in-use
1700 new_descriptor_directory: Directory of descriptor which is proposed to update (new revision)
1706 EngineException: In case of error if the changes are not allowed
1710 # If NSD does not exist in DB, or it is not in use by any NS,
1711 # validation is not required.
1712 nsd
= self
.db
.get_one("nsds", {"_id": descriptor_id
}, fail_on_empty
=False)
1713 if not nsd
or not detect_descriptor_usage(nsd
, "nsds", self
.db
):
1716 # Get the old and new descriptor contents in order to compare them.
1717 with self
.fs
.file_open(
1718 (old_descriptor_directory
.rstrip("/"), descriptor_file_name
), "r"
1719 ) as old_descriptor_file
:
1720 with self
.fs
.file_open(
1721 (new_descriptor_directory
.rstrip("/"), descriptor_file_name
), "r"
1722 ) as new_descriptor_file
:
1723 old_content
= yaml
.safe_load(old_descriptor_file
.read())
1724 new_content
= yaml
.safe_load(new_descriptor_file
.read())
1726 if old_content
and new_content
:
1727 disallowed_change
= DeepDiff(
1728 self
.remove_modifiable_items(old_content
),
1729 self
.remove_modifiable_items(new_content
),
1732 if disallowed_change
:
1733 changed_nodes
= functools
.reduce(
1734 lambda a
, b
: a
+ ", " + b
,
1737 for node
in disallowed_change
.get(
1743 raise EngineException(
1744 f
"Error in validating new descriptor: {changed_nodes} cannot be modified, "
1745 "there are disallowed changes in the ns descriptor. ",
1746 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1756 "NS Descriptor could not be processed with error: {}.".format(e
)
1759 def sol005_projection(self
, data
):
1760 data
["nsdOnboardingState"] = data
["_admin"]["onboardingState"]
1761 data
["nsdOperationalState"] = data
["_admin"]["operationalState"]
1762 data
["nsdUsageState"] = data
["_admin"]["usageState"]
1765 links
["self"] = {"href": "/nsd/v1/ns_descriptors/{}".format(data
["_id"])}
1766 links
["nsd_content"] = {
1767 "href": "/nsd/v1/ns_descriptors/{}/nsd_content".format(data
["_id"])
1769 data
["_links"] = links
1771 return super().sol005_projection(data
)
1774 class NstTopic(DescriptorTopic
):
1777 quota_name
= "slice_templates"
1779 def __init__(self
, db
, fs
, msg
, auth
):
1780 DescriptorTopic
.__init
__(self
, db
, fs
, msg
, auth
)
1782 def pyangbind_validation(self
, item
, data
, force
=False):
1785 pybindJSONDecoder
.load_ietf_json(
1793 out
= pybindJSON
.dumps(mynst
, mode
="ietf")
1794 desc_out
= self
._remove
_envelop
(yaml
.safe_load(out
))
1796 except Exception as e
:
1797 raise EngineException(
1798 "Error in pyangbind validation: {}".format(str(e
)),
1799 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1803 def _remove_envelop(indata
=None):
1806 clean_indata
= indata
1808 if clean_indata
.get("nst"):
1810 not isinstance(clean_indata
["nst"], list)
1811 or len(clean_indata
["nst"]) != 1
1813 raise EngineException("'nst' must be a list only one element")
1814 clean_indata
= clean_indata
["nst"][0]
1815 elif clean_indata
.get("nst:nst"):
1817 not isinstance(clean_indata
["nst:nst"], list)
1818 or len(clean_indata
["nst:nst"]) != 1
1820 raise EngineException("'nst:nst' must be a list only one element")
1821 clean_indata
= clean_indata
["nst:nst"][0]
1824 def _validate_input_new(self
, indata
, storage_params
, force
=False):
1825 indata
.pop("onboardingState", None)
1826 indata
.pop("operationalState", None)
1827 indata
.pop("usageState", None)
1828 indata
= self
.pyangbind_validation("nsts", indata
, force
)
1829 return indata
.copy()
1831 def _check_descriptor_dependencies(self
, session
, descriptor
):
1833 Check that the dependent descriptors exist on a new descriptor or edition
1834 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1835 :param descriptor: descriptor to be inserted or edit
1836 :return: None or raises exception
1838 if not descriptor
.get("netslice-subnet"):
1840 for nsd
in descriptor
["netslice-subnet"]:
1841 nsd_id
= nsd
["nsd-ref"]
1842 filter_q
= self
._get
_project
_filter
(session
)
1843 filter_q
["id"] = nsd_id
1844 if not self
.db
.get_list("nsds", filter_q
):
1845 raise EngineException(
1846 "Descriptor error at 'netslice-subnet':'nsd-ref'='{}' references a non "
1847 "existing nsd".format(nsd_id
),
1848 http_code
=HTTPStatus
.CONFLICT
,
1851 def check_conflict_on_edit(self
, session
, final_content
, edit_content
, _id
):
1852 final_content
= super().check_conflict_on_edit(
1853 session
, final_content
, edit_content
, _id
1856 self
._check
_descriptor
_dependencies
(session
, final_content
)
1857 return final_content
1859 def check_conflict_on_del(self
, session
, _id
, db_content
):
1861 Check that there is not any NSIR that uses this NST. Only NSIRs belonging to this project are considered. Note
1862 that NST can be public and be used by other projects.
1863 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1864 :param _id: nst internal id
1865 :param db_content: The database content of the _id.
1866 :return: None or raises EngineException with the conflict
1868 # TODO: Check this method
1869 if session
["force"]:
1871 # Get Network Slice Template from Database
1872 _filter
= self
._get
_project
_filter
(session
)
1873 _filter
["_admin.nst-id"] = _id
1874 if self
.db
.get_list("nsis", _filter
):
1875 raise EngineException(
1876 "there is at least one Netslice Instance using this descriptor",
1877 http_code
=HTTPStatus
.CONFLICT
,
1880 def sol005_projection(self
, data
):
1881 data
["onboardingState"] = data
["_admin"]["onboardingState"]
1882 data
["operationalState"] = data
["_admin"]["operationalState"]
1883 data
["usageState"] = data
["_admin"]["usageState"]
1886 links
["self"] = {"href": "/nst/v1/netslice_templates/{}".format(data
["_id"])}
1887 links
["nst"] = {"href": "/nst/v1/netslice_templates/{}/nst".format(data
["_id"])}
1888 data
["_links"] = links
1890 return super().sol005_projection(data
)
1893 class PduTopic(BaseTopic
):
1896 quota_name
= "pduds"
1897 schema_new
= pdu_new_schema
1898 schema_edit
= pdu_edit_schema
1900 def __init__(self
, db
, fs
, msg
, auth
):
1901 BaseTopic
.__init
__(self
, db
, fs
, msg
, auth
)
1904 def format_on_new(content
, project_id
=None, make_public
=False):
1905 BaseTopic
.format_on_new(content
, project_id
=project_id
, make_public
=make_public
)
1906 content
["_admin"]["onboardingState"] = "CREATED"
1907 content
["_admin"]["operationalState"] = "ENABLED"
1908 content
["_admin"]["usageState"] = "NOT_IN_USE"
1910 def check_conflict_on_del(self
, session
, _id
, db_content
):
1912 Check that there is not any vnfr that uses this PDU
1913 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1914 :param _id: pdu internal id
1915 :param db_content: The database content of the _id.
1916 :return: None or raises EngineException with the conflict
1918 if session
["force"]:
1921 _filter
= self
._get
_project
_filter
(session
)
1922 _filter
["vdur.pdu-id"] = _id
1923 if self
.db
.get_list("vnfrs", _filter
):
1924 raise EngineException(
1925 "There is at least one VNF instance using this PDU",
1926 http_code
=HTTPStatus
.CONFLICT
,
1930 class VnfPkgOpTopic(BaseTopic
):
1933 schema_new
= vnfpkgop_new_schema
1936 def __init__(self
, db
, fs
, msg
, auth
):
1937 BaseTopic
.__init
__(self
, db
, fs
, msg
, auth
)
1939 def edit(self
, session
, _id
, indata
=None, kwargs
=None, content
=None):
1940 raise EngineException(
1941 "Method 'edit' not allowed for topic '{}'".format(self
.topic
),
1942 HTTPStatus
.METHOD_NOT_ALLOWED
,
1945 def delete(self
, session
, _id
, dry_run
=False):
1946 raise EngineException(
1947 "Method 'delete' not allowed for topic '{}'".format(self
.topic
),
1948 HTTPStatus
.METHOD_NOT_ALLOWED
,
1951 def delete_list(self
, session
, filter_q
=None):
1952 raise EngineException(
1953 "Method 'delete_list' not allowed for topic '{}'".format(self
.topic
),
1954 HTTPStatus
.METHOD_NOT_ALLOWED
,
1957 def new(self
, rollback
, session
, indata
=None, kwargs
=None, headers
=None):
1959 Creates a new entry into database.
1960 :param rollback: list to append created items at database in case a rollback may to be done
1961 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1962 :param indata: data to be inserted
1963 :param kwargs: used to override the indata descriptor
1964 :param headers: http request headers
1965 :return: _id, op_id:
1966 _id: identity of the inserted data.
1969 self
._update
_input
_with
_kwargs
(indata
, kwargs
)
1970 validate_input(indata
, self
.schema_new
)
1971 vnfpkg_id
= indata
["vnfPkgId"]
1972 filter_q
= BaseTopic
._get
_project
_filter
(session
)
1973 filter_q
["_id"] = vnfpkg_id
1974 vnfd
= self
.db
.get_one("vnfds", filter_q
)
1975 operation
= indata
["lcmOperationType"]
1976 kdu_name
= indata
["kdu_name"]
1977 for kdu
in vnfd
.get("kdu", []):
1978 if kdu
["name"] == kdu_name
:
1979 helm_chart
= kdu
.get("helm-chart")
1980 juju_bundle
= kdu
.get("juju-bundle")
1983 raise EngineException(
1984 "Not found vnfd[id='{}']:kdu[name='{}']".format(vnfpkg_id
, kdu_name
)
1987 indata
["helm-chart"] = helm_chart
1988 match
= fullmatch(r
"([^/]*)/([^/]*)", helm_chart
)
1989 repo_name
= match
.group(1) if match
else None
1991 indata
["juju-bundle"] = juju_bundle
1992 match
= fullmatch(r
"([^/]*)/([^/]*)", juju_bundle
)
1993 repo_name
= match
.group(1) if match
else None
1995 raise EngineException(
1996 "Found neither 'helm-chart' nor 'juju-bundle' in vnfd[id='{}']:kdu[name='{}']".format(
2002 filter_q
["name"] = repo_name
2003 repo
= self
.db
.get_one("k8srepos", filter_q
)
2004 k8srepo_id
= repo
.get("_id")
2005 k8srepo_url
= repo
.get("url")
2009 indata
["k8srepoId"] = k8srepo_id
2010 indata
["k8srepo_url"] = k8srepo_url
2011 vnfpkgop_id
= str(uuid4())
2014 "operationState": "PROCESSING",
2015 "vnfPkgId": vnfpkg_id
,
2016 "lcmOperationType": operation
,
2017 "isAutomaticInvocation": False,
2018 "isCancelPending": False,
2019 "operationParams": indata
,
2021 "self": "/osm/vnfpkgm/v1/vnfpkg_op_occs/" + vnfpkgop_id
,
2022 "vnfpkg": "/osm/vnfpkgm/v1/vnf_packages/" + vnfpkg_id
,
2026 vnfpkgop_desc
, session
["project_id"], make_public
=session
["public"]
2028 ctime
= vnfpkgop_desc
["_admin"]["created"]
2029 vnfpkgop_desc
["statusEnteredTime"] = ctime
2030 vnfpkgop_desc
["startTime"] = ctime
2031 self
.db
.create(self
.topic
, vnfpkgop_desc
)
2032 rollback
.append({"topic": self
.topic
, "_id": vnfpkgop_id
})
2033 self
.msg
.write(self
.topic_msg
, operation
, vnfpkgop_desc
)
2034 return vnfpkgop_id
, None