1 # -*- coding: utf-8 -*-
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
7 # http://www.apache.org/licenses/LICENSE-2.0
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
25 from deepdiff
import DeepDiff
26 from hashlib
import md5
27 from osm_common
.dbbase
import DbException
, deep_update_rfc7396
28 from http
import HTTPStatus
30 from uuid
import uuid4
31 from re
import fullmatch
32 from zipfile
import ZipFile
33 from osm_nbi
.validation
import (
40 from osm_nbi
.base_topic
import BaseTopic
, EngineException
, get_iterable
41 from osm_im
import etsi_nfv_vnfd
, etsi_nfv_nsd
42 from osm_im
.nst
import nst
as nst_im
43 from pyangbind
.lib
.serialise
import pybindJSONDecoder
44 import pyangbind
.lib
.pybindJSON
as pybindJSON
45 from osm_nbi
import utils
47 __author__
= "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
50 class DescriptorTopic(BaseTopic
):
51 def __init__(self
, db
, fs
, msg
, auth
):
53 BaseTopic
.__init
__(self
, db
, fs
, msg
, auth
)
55 def check_conflict_on_edit(self
, session
, final_content
, edit_content
, _id
):
56 final_content
= super().check_conflict_on_edit(
57 session
, final_content
, edit_content
, _id
60 def _check_unique_id_name(descriptor
, position
=""):
61 for desc_key
, desc_item
in descriptor
.items():
62 if isinstance(desc_item
, list) and desc_item
:
65 for index
, list_item
in enumerate(desc_item
):
66 if isinstance(list_item
, dict):
67 _check_unique_id_name(
68 list_item
, "{}.{}[{}]".format(position
, desc_key
, index
)
72 list_item
.get("id") or list_item
.get("name")
74 desc_item_id
= "id" if list_item
.get("id") else "name"
75 if desc_item_id
and list_item
.get(desc_item_id
):
76 if list_item
[desc_item_id
] in used_ids
:
77 position
= "{}.{}[{}]".format(
78 position
, desc_key
, index
80 raise EngineException(
81 "Error: identifier {} '{}' is not unique and repeats at '{}'".format(
83 list_item
[desc_item_id
],
86 HTTPStatus
.UNPROCESSABLE_ENTITY
,
88 used_ids
.append(list_item
[desc_item_id
])
90 _check_unique_id_name(final_content
)
91 # 1. validate again with pyangbind
92 # 1.1. remove internal keys
94 for k
in ("_id", "_admin"):
95 if k
in final_content
:
96 internal_keys
[k
] = final_content
.pop(k
)
97 storage_params
= internal_keys
["_admin"].get("storage")
98 serialized
= self
._validate
_input
_new
(
99 final_content
, storage_params
, session
["force"]
102 # 1.2. modify final_content with a serialized version
103 final_content
= copy
.deepcopy(serialized
)
104 # 1.3. restore internal keys
105 for k
, v
in internal_keys
.items():
110 # 2. check that this id is not present
111 if "id" in edit_content
:
112 _filter
= self
._get
_project
_filter
(session
)
114 _filter
["id"] = final_content
["id"]
115 _filter
["_id.neq"] = _id
117 if self
.db
.get_one(self
.topic
, _filter
, fail_on_empty
=False):
118 raise EngineException(
119 "{} with id '{}' already exists for this project".format(
120 self
.topic
[:-1], final_content
["id"]
128 def format_on_new(content
, project_id
=None, make_public
=False):
129 BaseTopic
.format_on_new(content
, project_id
=project_id
, make_public
=make_public
)
130 content
["_admin"]["onboardingState"] = "CREATED"
131 content
["_admin"]["operationalState"] = "DISABLED"
132 content
["_admin"]["usageState"] = "NOT_IN_USE"
134 def delete_extra(self
, session
, _id
, db_content
, not_send_msg
=None):
136 Deletes file system storage associated with the descriptor
137 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
138 :param _id: server internal id
139 :param db_content: The database content of the descriptor
140 :param not_send_msg: To not send message (False) or store content (list) instead
141 :return: None if ok or raises EngineException with the problem
143 self
.fs
.file_delete(_id
, ignore_non_exist
=True)
144 self
.fs
.file_delete(_id
+ "_", ignore_non_exist
=True) # remove temp folder
145 # Remove file revisions
146 if "revision" in db_content
["_admin"]:
147 revision
= db_content
["_admin"]["revision"]
149 self
.fs
.file_delete(_id
+ ":" + str(revision
), ignore_non_exist
=True)
150 revision
= revision
- 1
154 def get_one_by_id(db
, session
, topic
, id):
155 # find owned by this project
156 _filter
= BaseTopic
._get
_project
_filter
(session
)
158 desc_list
= db
.get_list(topic
, _filter
)
159 if len(desc_list
) == 1:
161 elif len(desc_list
) > 1:
163 "Found more than one {} with id='{}' belonging to this project".format(
169 # not found any: try to find public
170 _filter
= BaseTopic
._get
_project
_filter
(session
)
172 desc_list
= db
.get_list(topic
, _filter
)
175 "Not found any {} with id='{}'".format(topic
[:-1], id),
176 HTTPStatus
.NOT_FOUND
,
178 elif len(desc_list
) == 1:
182 "Found more than one public {} with id='{}'; and no one belonging to this project".format(
188 def new(self
, rollback
, session
, indata
=None, kwargs
=None, headers
=None):
190 Creates a new almost empty DISABLED entry into database. Due to SOL005, it does not follow normal procedure.
191 Creating a VNFD or NSD is done in two steps: 1. Creates an empty descriptor (this step) and 2) upload content
192 (self.upload_content)
193 :param rollback: list to append created items at database in case a rollback may to be done
194 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
195 :param indata: data to be inserted
196 :param kwargs: used to override the indata descriptor
197 :param headers: http request headers
198 :return: _id, None: identity of the inserted data; and None as there is not any operation
201 # No needed to capture exceptions
203 self
.check_quota(session
)
207 if "userDefinedData" in indata
:
208 indata
= indata
["userDefinedData"]
210 # Override descriptor with query string kwargs
211 self
._update
_input
_with
_kwargs
(indata
, kwargs
)
212 # uncomment when this method is implemented.
213 # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors
214 # indata = DescriptorTopic._validate_input_new(self, indata, project_id=session["force"])
216 content
= {"_admin": {
217 "userDefinedData": indata
,
222 content
, session
["project_id"], make_public
=session
["public"]
224 _id
= self
.db
.create(self
.topic
, content
)
225 rollback
.append({"topic": self
.topic
, "_id": _id
})
226 self
._send
_msg
("created", {"_id": _id
})
229 def upload_content(self
, session
, _id
, indata
, kwargs
, headers
):
231 Used for receiving content by chunks (with a transaction_id header and/or gzip file. It will store and extract)
232 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
233 :param _id : the nsd,vnfd is already created, this is the id
234 :param indata: http body request
235 :param kwargs: user query string to override parameters. NOT USED
236 :param headers: http request headers
237 :return: True if package is completely uploaded or False if partial content has been uploded
238 Raise exception on error
240 # Check that _id exists and it is valid
241 current_desc
= self
.show(session
, _id
)
243 content_range_text
= headers
.get("Content-Range")
244 expected_md5
= headers
.get("Content-File-MD5")
246 content_type
= headers
.get("Content-Type")
249 and "application/gzip" in content_type
250 or "application/x-gzip" in content_type
255 and "application/zip" in content_type
258 filename
= headers
.get("Content-Filename")
259 if not filename
and compressed
:
260 filename
= "package.tar.gz" if compressed
== "gzip" else "package.zip"
265 if "revision" in current_desc
["_admin"]:
266 revision
= current_desc
["_admin"]["revision"] + 1
268 # TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266
274 if content_range_text
:
276 content_range_text
.replace("-", " ").replace("/", " ").split()
279 content_range
[0] != "bytes"
280 ): # TODO check x<y not negative < total....
282 start
= int(content_range
[1])
283 end
= int(content_range
[2]) + 1
284 total
= int(content_range
[3])
287 # Rather than using a temp folder, we will store the package in a folder based on
288 # the current revision.
289 proposed_revision_path
= (
290 _id
+ ":" + str(revision
)
291 ) # all the content is upload here and if ok, it is rename from id_ to is folder
294 if not self
.fs
.file_exists(proposed_revision_path
, "dir"):
295 raise EngineException(
296 "invalid Transaction-Id header", HTTPStatus
.NOT_FOUND
299 self
.fs
.file_delete(proposed_revision_path
, ignore_non_exist
=True)
300 self
.fs
.mkdir(proposed_revision_path
)
301 fs_rollback
.append(proposed_revision_path
)
303 storage
= self
.fs
.get_params()
304 storage
["folder"] = proposed_revision_path
306 file_path
= (proposed_revision_path
, filename
)
307 if self
.fs
.file_exists(file_path
, "file"):
308 file_size
= self
.fs
.file_size(file_path
)
311 if file_size
!= start
:
312 raise EngineException(
313 "invalid Content-Range start sequence, expected '{}' but received '{}'".format(
316 HTTPStatus
.REQUESTED_RANGE_NOT_SATISFIABLE
,
318 file_pkg
= self
.fs
.file_open(file_path
, "a+b")
319 if isinstance(indata
, dict):
320 indata_text
= yaml
.safe_dump(indata
, indent
=4, default_flow_style
=False)
321 file_pkg
.write(indata_text
.encode(encoding
="utf-8"))
325 indata_text
= indata
.read(4096)
326 indata_len
+= len(indata_text
)
329 file_pkg
.write(indata_text
)
330 if content_range_text
:
331 if indata_len
!= end
- start
:
332 raise EngineException(
333 "Mismatch between Content-Range header {}-{} and body length of {}".format(
334 start
, end
- 1, indata_len
336 HTTPStatus
.REQUESTED_RANGE_NOT_SATISFIABLE
,
339 # TODO update to UPLOADING
346 chunk_data
= file_pkg
.read(1024)
348 file_md5
.update(chunk_data
)
349 chunk_data
= file_pkg
.read(1024)
350 if expected_md5
!= file_md5
.hexdigest():
351 raise EngineException("Error, MD5 mismatch", HTTPStatus
.CONFLICT
)
353 if compressed
== "gzip":
354 tar
= tarfile
.open(mode
="r", fileobj
=file_pkg
)
355 descriptor_file_name
= None
357 tarname
= tarinfo
.name
358 tarname_path
= tarname
.split("/")
360 not tarname_path
[0] or ".." in tarname_path
361 ): # if start with "/" means absolute path
362 raise EngineException(
363 "Absolute path or '..' are not allowed for package descriptor tar.gz"
365 if len(tarname_path
) == 1 and not tarinfo
.isdir():
366 raise EngineException(
367 "All files must be inside a dir for package descriptor tar.gz"
370 tarname
.endswith(".yaml")
371 or tarname
.endswith(".json")
372 or tarname
.endswith(".yml")
374 storage
["pkg-dir"] = tarname_path
[0]
375 if len(tarname_path
) == 2:
376 if descriptor_file_name
:
377 raise EngineException(
378 "Found more than one descriptor file at package descriptor tar.gz"
380 descriptor_file_name
= tarname
381 if not descriptor_file_name
:
382 raise EngineException(
383 "Not found any descriptor file at package descriptor tar.gz"
385 storage
["descriptor"] = descriptor_file_name
386 storage
["zipfile"] = filename
387 self
.fs
.file_extract(tar
, proposed_revision_path
)
388 with self
.fs
.file_open(
389 (proposed_revision_path
, descriptor_file_name
), "r"
390 ) as descriptor_file
:
391 content
= descriptor_file
.read()
392 elif compressed
== "zip":
393 zipfile
= ZipFile(file_pkg
)
394 descriptor_file_name
= None
395 for package_file
in zipfile
.infolist():
396 zipfilename
= package_file
.filename
397 file_path
= zipfilename
.split("/")
399 not file_path
[0] or ".." in zipfilename
400 ): # if start with "/" means absolute path
401 raise EngineException(
402 "Absolute path or '..' are not allowed for package descriptor zip"
407 zipfilename
.endswith(".yaml")
408 or zipfilename
.endswith(".json")
409 or zipfilename
.endswith(".yml")
411 zipfilename
.find("/") < 0
412 or zipfilename
.find("Definitions") >= 0
415 storage
["pkg-dir"] = ""
416 if descriptor_file_name
:
417 raise EngineException(
418 "Found more than one descriptor file at package descriptor zip"
420 descriptor_file_name
= zipfilename
421 if not descriptor_file_name
:
422 raise EngineException(
423 "Not found any descriptor file at package descriptor zip"
425 storage
["descriptor"] = descriptor_file_name
426 storage
["zipfile"] = filename
427 self
.fs
.file_extract(zipfile
, proposed_revision_path
)
429 with self
.fs
.file_open(
430 (proposed_revision_path
, descriptor_file_name
), "r"
431 ) as descriptor_file
:
432 content
= descriptor_file
.read()
434 content
= file_pkg
.read()
435 storage
["descriptor"] = descriptor_file_name
= filename
437 if descriptor_file_name
.endswith(".json"):
438 error_text
= "Invalid json format "
439 indata
= json
.load(content
)
441 error_text
= "Invalid yaml format "
442 indata
= yaml
.load(content
, Loader
=yaml
.SafeLoader
)
444 # Need to close the file package here so it can be copied from the
445 # revision to the current, unrevisioned record
450 # Fetch both the incoming, proposed revision and the original revision so we
451 # can call a validate method to compare them
452 current_revision_path
= _id
+ "/"
453 self
.fs
.sync(from_path
=current_revision_path
)
454 self
.fs
.sync(from_path
=proposed_revision_path
)
458 self
._validate
_descriptor
_changes
(
459 descriptor_file_name
,
460 current_revision_path
,
461 proposed_revision_path
)
462 except Exception as e
:
463 shutil
.rmtree(self
.fs
.path
+ current_revision_path
, ignore_errors
=True)
464 shutil
.rmtree(self
.fs
.path
+ proposed_revision_path
, ignore_errors
=True)
465 # Only delete the new revision. We need to keep the original version in place
466 # as it has not been changed.
467 self
.fs
.file_delete(proposed_revision_path
, ignore_non_exist
=True)
471 indata
= self
._remove
_envelop
(indata
)
473 # Override descriptor with query string kwargs
475 self
._update
_input
_with
_kwargs
(indata
, kwargs
)
477 current_desc
["_admin"]["storage"] = storage
478 current_desc
["_admin"]["onboardingState"] = "ONBOARDED"
479 current_desc
["_admin"]["operationalState"] = "ENABLED"
480 current_desc
["_admin"]["modified"] = time()
481 current_desc
["_admin"]["revision"] = revision
483 deep_update_rfc7396(current_desc
, indata
)
484 current_desc
= self
.check_conflict_on_edit(
485 session
, current_desc
, indata
, _id
=_id
488 # Copy the revision to the active package name by its original id
489 shutil
.rmtree(self
.fs
.path
+ current_revision_path
, ignore_errors
=True)
490 os
.rename(self
.fs
.path
+ proposed_revision_path
, self
.fs
.path
+ current_revision_path
)
491 self
.fs
.file_delete(current_revision_path
, ignore_non_exist
=True)
492 self
.fs
.mkdir(current_revision_path
)
493 self
.fs
.reverse_sync(from_path
=current_revision_path
)
495 shutil
.rmtree(self
.fs
.path
+ _id
)
497 self
.db
.replace(self
.topic
, _id
, current_desc
)
499 # Store a copy of the package as a point in time revision
500 revision_desc
= dict(current_desc
)
501 revision_desc
["_id"] = _id
+ ":" + str(revision_desc
["_admin"]["revision"])
502 self
.db
.create(self
.topic
+ "_revisions", revision_desc
)
506 self
._send
_msg
("edited", indata
)
508 # TODO if descriptor has changed because kwargs update content and remove cached zip
509 # TODO if zip is not present creates one
512 except EngineException
:
515 raise EngineException(
516 "invalid Content-Range header format. Expected 'bytes start-end/total'",
517 HTTPStatus
.REQUESTED_RANGE_NOT_SATISFIABLE
,
520 raise EngineException(
521 "invalid upload transaction sequence: '{}'".format(e
),
522 HTTPStatus
.BAD_REQUEST
,
524 except tarfile
.ReadError
as e
:
525 raise EngineException(
526 "invalid file content {}".format(e
), HTTPStatus
.BAD_REQUEST
528 except (ValueError, yaml
.YAMLError
) as e
:
529 raise EngineException(error_text
+ str(e
))
530 except ValidationError
as e
:
531 raise EngineException(e
, HTTPStatus
.UNPROCESSABLE_ENTITY
)
535 for file in fs_rollback
:
536 self
.fs
.file_delete(file, ignore_non_exist
=True)
538 def get_file(self
, session
, _id
, path
=None, accept_header
=None):
540 Return the file content of a vnfd or nsd
541 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
542 :param _id: Identity of the vnfd, nsd
543 :param path: artifact path or "$DESCRIPTOR" or None
544 :param accept_header: Content of Accept header. Must contain applition/zip or/and text/plain
545 :return: opened file plus Accept format or raises an exception
547 accept_text
= accept_zip
= False
549 if "text/plain" in accept_header
or "*/*" in accept_header
:
551 if "application/zip" in accept_header
or "*/*" in accept_header
:
552 accept_zip
= "application/zip"
553 elif "application/gzip" in accept_header
:
554 accept_zip
= "application/gzip"
556 if not accept_text
and not accept_zip
:
557 raise EngineException(
558 "provide request header 'Accept' with 'application/zip' or 'text/plain'",
559 http_code
=HTTPStatus
.NOT_ACCEPTABLE
,
562 content
= self
.show(session
, _id
)
563 if content
["_admin"]["onboardingState"] != "ONBOARDED":
564 raise EngineException(
565 "Cannot get content because this resource is not at 'ONBOARDED' state. "
566 "onboardingState is {}".format(content
["_admin"]["onboardingState"]),
567 http_code
=HTTPStatus
.CONFLICT
,
569 storage
= content
["_admin"]["storage"]
570 if path
is not None and path
!= "$DESCRIPTOR": # artifacts
571 if not storage
.get("pkg-dir"):
572 raise EngineException(
573 "Packages does not contains artifacts",
574 http_code
=HTTPStatus
.BAD_REQUEST
,
576 if self
.fs
.file_exists(
577 (storage
["folder"], storage
["pkg-dir"], *path
), "dir"
579 folder_content
= self
.fs
.dir_ls(
580 (storage
["folder"], storage
["pkg-dir"], *path
)
582 return folder_content
, "text/plain"
583 # TODO manage folders in http
587 (storage
["folder"], storage
["pkg-dir"], *path
), "rb"
589 "application/octet-stream",
592 # pkgtype accept ZIP TEXT -> result
593 # manyfiles yes X -> zip
595 # onefile yes no -> zip
597 contain_many_files
= False
598 if storage
.get("pkg-dir"):
599 # check if there are more than one file in the package, ignoring checksums.txt.
600 pkg_files
= self
.fs
.dir_ls((storage
["folder"], storage
["pkg-dir"]))
601 if len(pkg_files
) >= 3 or (
602 len(pkg_files
) == 2 and "checksums.txt" not in pkg_files
604 contain_many_files
= True
605 if accept_text
and (not contain_many_files
or path
== "$DESCRIPTOR"):
607 self
.fs
.file_open((storage
["folder"], storage
["descriptor"]), "r"),
610 elif contain_many_files
and not accept_zip
:
611 raise EngineException(
612 "Packages that contains several files need to be retrieved with 'application/zip'"
614 http_code
=HTTPStatus
.NOT_ACCEPTABLE
,
617 if not storage
.get("zipfile"):
618 # TODO generate zipfile if not present
619 raise EngineException(
620 "Only allowed 'text/plain' Accept header for this descriptor. To be solved in "
622 http_code
=HTTPStatus
.NOT_ACCEPTABLE
,
625 self
.fs
.file_open((storage
["folder"], storage
["zipfile"]), "rb"),
629 def _remove_yang_prefixes_from_descriptor(self
, descriptor
):
631 for k
, v
in descriptor
.items():
633 if isinstance(v
, dict):
634 new_v
= self
._remove
_yang
_prefixes
_from
_descriptor
(v
)
635 elif isinstance(v
, list):
638 if isinstance(x
, dict):
639 new_v
.append(self
._remove
_yang
_prefixes
_from
_descriptor
(x
))
642 new_descriptor
[k
.split(":")[-1]] = new_v
643 return new_descriptor
645 def pyangbind_validation(self
, item
, data
, force
=False):
646 raise EngineException(
647 "Not possible to validate '{}' item".format(item
),
648 http_code
=HTTPStatus
.INTERNAL_SERVER_ERROR
,
651 def _validate_input_edit(self
, indata
, content
, force
=False):
652 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
655 if "_admin" not in indata
:
656 indata
["_admin"] = {}
658 if "operationalState" in indata
:
659 if indata
["operationalState"] in ("ENABLED", "DISABLED"):
660 indata
["_admin"]["operationalState"] = indata
.pop("operationalState")
662 raise EngineException(
663 "State '{}' is not a valid operational state".format(
664 indata
["operationalState"]
666 http_code
=HTTPStatus
.BAD_REQUEST
,
669 # In the case of user defined data, we need to put the data in the root of the object
670 # to preserve current expected behaviour
671 if "userDefinedData" in indata
:
672 data
= indata
.pop("userDefinedData")
673 if type(data
) == dict:
674 indata
["_admin"]["userDefinedData"] = data
676 raise EngineException(
677 "userDefinedData should be an object, but is '{}' instead".format(
680 http_code
=HTTPStatus
.BAD_REQUEST
,
684 "operationalState" in indata
["_admin"]
685 and content
["_admin"]["operationalState"]
686 == indata
["_admin"]["operationalState"]
688 raise EngineException(
689 "operationalState already {}".format(
690 content
["_admin"]["operationalState"]
692 http_code
=HTTPStatus
.CONFLICT
,
697 def _validate_descriptor_changes(self
,
698 descriptor_file_name
,
699 old_descriptor_directory
,
700 new_descriptor_directory
):
701 # Todo: compare changes and throw a meaningful exception for the user to understand
703 # raise EngineException(
704 # "Error in validating new descriptor: <NODE> cannot be modified",
705 # http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
709 class VnfdTopic(DescriptorTopic
):
713 def __init__(self
, db
, fs
, msg
, auth
):
714 DescriptorTopic
.__init
__(self
, db
, fs
, msg
, auth
)
716 def pyangbind_validation(self
, item
, data
, force
=False):
717 if self
._descriptor
_data
_is
_in
_old
_format
(data
):
718 raise EngineException(
719 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
720 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
723 myvnfd
= etsi_nfv_vnfd
.etsi_nfv_vnfd()
724 pybindJSONDecoder
.load_ietf_json(
725 {"etsi-nfv-vnfd:vnfd": data
},
732 out
= pybindJSON
.dumps(myvnfd
, mode
="ietf")
733 desc_out
= self
._remove
_envelop
(yaml
.safe_load(out
))
734 desc_out
= self
._remove
_yang
_prefixes
_from
_descriptor
(desc_out
)
735 return utils
.deep_update_dict(data
, desc_out
)
736 except Exception as e
:
737 raise EngineException(
738 "Error in pyangbind validation: {}".format(str(e
)),
739 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
743 def _descriptor_data_is_in_old_format(data
):
744 return ("vnfd-catalog" in data
) or ("vnfd:vnfd-catalog" in data
)
747 def _remove_envelop(indata
=None):
750 clean_indata
= indata
752 if clean_indata
.get("etsi-nfv-vnfd:vnfd"):
753 if not isinstance(clean_indata
["etsi-nfv-vnfd:vnfd"], dict):
754 raise EngineException("'etsi-nfv-vnfd:vnfd' must be a dict")
755 clean_indata
= clean_indata
["etsi-nfv-vnfd:vnfd"]
756 elif clean_indata
.get("vnfd"):
757 if not isinstance(clean_indata
["vnfd"], dict):
758 raise EngineException("'vnfd' must be dict")
759 clean_indata
= clean_indata
["vnfd"]
763 def check_conflict_on_edit(self
, session
, final_content
, edit_content
, _id
):
764 final_content
= super().check_conflict_on_edit(
765 session
, final_content
, edit_content
, _id
771 for vdu
in get_iterable(final_content
.get("vdu")):
772 if vdu
.get("pdu-type"):
777 final_content
["_admin"]["type"] = "hnfd" if contains_vdu
else "pnfd"
779 final_content
["_admin"]["type"] = "vnfd"
780 # if neither vud nor pdu do not fill type
783 def check_conflict_on_del(self
, session
, _id
, db_content
):
785 Check that there is not any NSD that uses this VNFD. Only NSDs belonging to this project are considered. Note
786 that VNFD can be public and be used by NSD of other projects. Also check there are not deployments, or vnfr
788 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
789 :param _id: vnfd internal id
790 :param db_content: The database content of the _id.
791 :return: None or raises EngineException with the conflict
795 descriptor
= db_content
796 descriptor_id
= descriptor
.get("id")
797 if not descriptor_id
: # empty vnfd not uploaded
800 _filter
= self
._get
_project
_filter
(session
)
802 # check vnfrs using this vnfd
803 _filter
["vnfd-id"] = _id
804 if self
.db
.get_list("vnfrs", _filter
):
805 raise EngineException(
806 "There is at least one VNF instance using this descriptor",
807 http_code
=HTTPStatus
.CONFLICT
,
810 # check NSD referencing this VNFD
811 del _filter
["vnfd-id"]
812 _filter
["vnfd-id"] = descriptor_id
813 if self
.db
.get_list("nsds", _filter
):
814 raise EngineException(
815 "There is at least one NS package referencing this descriptor",
816 http_code
=HTTPStatus
.CONFLICT
,
819 def _validate_input_new(self
, indata
, storage_params
, force
=False):
820 indata
.pop("onboardingState", None)
821 indata
.pop("operationalState", None)
822 indata
.pop("usageState", None)
823 indata
.pop("links", None)
825 indata
= self
.pyangbind_validation("vnfds", indata
, force
)
826 # Cross references validation in the descriptor
828 self
.validate_mgmt_interface_connection_point(indata
)
830 for vdu
in get_iterable(indata
.get("vdu")):
831 self
.validate_vdu_internal_connection_points(vdu
)
832 self
._validate
_vdu
_cloud
_init
_in
_package
(storage_params
, vdu
, indata
)
833 self
._validate
_vdu
_charms
_in
_package
(storage_params
, indata
)
835 self
._validate
_vnf
_charms
_in
_package
(storage_params
, indata
)
837 self
.validate_external_connection_points(indata
)
838 self
.validate_internal_virtual_links(indata
)
839 self
.validate_monitoring_params(indata
)
840 self
.validate_scaling_group_descriptor(indata
)
845 def validate_mgmt_interface_connection_point(indata
):
846 if not indata
.get("vdu"):
848 if not indata
.get("mgmt-cp"):
849 raise EngineException(
850 "'mgmt-cp' is a mandatory field and it is not defined",
851 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
854 for cp
in get_iterable(indata
.get("ext-cpd")):
855 if cp
["id"] == indata
["mgmt-cp"]:
858 raise EngineException(
859 "mgmt-cp='{}' must match an existing ext-cpd".format(indata
["mgmt-cp"]),
860 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
864 def validate_vdu_internal_connection_points(vdu
):
866 for cpd
in get_iterable(vdu
.get("int-cpd")):
867 cpd_id
= cpd
.get("id")
868 if cpd_id
and cpd_id
in int_cpds
:
869 raise EngineException(
870 "vdu[id='{}']:int-cpd[id='{}'] is already used by other int-cpd".format(
873 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
878 def validate_external_connection_points(indata
):
879 all_vdus_int_cpds
= set()
880 for vdu
in get_iterable(indata
.get("vdu")):
881 for int_cpd
in get_iterable(vdu
.get("int-cpd")):
882 all_vdus_int_cpds
.add((vdu
.get("id"), int_cpd
.get("id")))
885 for cpd
in get_iterable(indata
.get("ext-cpd")):
886 cpd_id
= cpd
.get("id")
887 if cpd_id
and cpd_id
in ext_cpds
:
888 raise EngineException(
889 "ext-cpd[id='{}'] is already used by other ext-cpd".format(cpd_id
),
890 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
894 int_cpd
= cpd
.get("int-cpd")
896 if (int_cpd
.get("vdu-id"), int_cpd
.get("cpd")) not in all_vdus_int_cpds
:
897 raise EngineException(
898 "ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd".format(
901 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
903 # TODO: Validate k8s-cluster-net points to a valid k8s-cluster:nets ?
905 def _validate_vdu_charms_in_package(self
, storage_params
, indata
):
906 for df
in indata
["df"]:
908 "lcm-operations-configuration" in df
909 and "operate-vnf-op-config" in df
["lcm-operations-configuration"]
911 configs
= df
["lcm-operations-configuration"][
912 "operate-vnf-op-config"
914 vdus
= df
.get("vdu-profile", [])
916 for config
in configs
:
917 if config
["id"] == vdu
["id"] and utils
.find_in_list(
918 config
.get("execution-environment-list", []),
919 lambda ee
: "juju" in ee
,
921 if not self
._validate
_package
_folders
(
922 storage_params
, "charms"
923 ) and not self
._validate
_package
_folders
(
924 storage_params
, "Scripts/charms"
926 raise EngineException(
927 "Charm defined in vnf[id={}] but not present in "
928 "package".format(indata
["id"])
931 def _validate_vdu_cloud_init_in_package(self
, storage_params
, vdu
, indata
):
932 if not vdu
.get("cloud-init-file"):
934 if not self
._validate
_package
_folders
(
935 storage_params
, "cloud_init", vdu
["cloud-init-file"]
936 ) and not self
._validate
_package
_folders
(
937 storage_params
, "Scripts/cloud_init", vdu
["cloud-init-file"]
939 raise EngineException(
940 "Cloud-init defined in vnf[id={}]:vdu[id={}] but not present in "
941 "package".format(indata
["id"], vdu
["id"])
944 def _validate_vnf_charms_in_package(self
, storage_params
, indata
):
945 # Get VNF configuration through new container
946 for deployment_flavor
in indata
.get("df", []):
947 if "lcm-operations-configuration" not in deployment_flavor
:
950 "operate-vnf-op-config"
951 not in deployment_flavor
["lcm-operations-configuration"]
954 for day_1_2_config
in deployment_flavor
["lcm-operations-configuration"][
955 "operate-vnf-op-config"
957 if day_1_2_config
["id"] == indata
["id"]:
958 if utils
.find_in_list(
959 day_1_2_config
.get("execution-environment-list", []),
960 lambda ee
: "juju" in ee
,
962 if not self
._validate
_package
_folders
(
963 storage_params
, "charms"
964 ) and not self
._validate
_package
_folders
(
965 storage_params
, "Scripts/charms"
967 raise EngineException(
968 "Charm defined in vnf[id={}] but not present in "
969 "package".format(indata
["id"])
972 def _validate_package_folders(self
, storage_params
, folder
, file=None):
973 if not storage_params
:
975 elif not storage_params
.get("pkg-dir"):
976 if self
.fs
.file_exists("{}_".format(storage_params
["folder"]), "dir"):
978 storage_params
["folder"], folder
982 storage_params
["folder"], folder
985 return self
.fs
.file_exists("{}/{}".format(f
, file), "file")
987 if self
.fs
.file_exists(f
, "dir"):
988 if self
.fs
.dir_ls(f
):
992 if self
.fs
.file_exists("{}_".format(storage_params
["folder"]), "dir"):
993 f
= "{}_/{}/{}".format(
994 storage_params
["folder"], storage_params
["pkg-dir"], folder
997 f
= "{}/{}/{}".format(
998 storage_params
["folder"], storage_params
["pkg-dir"], folder
1001 return self
.fs
.file_exists("{}/{}".format(f
, file), "file")
1003 if self
.fs
.file_exists(f
, "dir"):
1004 if self
.fs
.dir_ls(f
):
1009 def validate_internal_virtual_links(indata
):
1010 all_ivld_ids
= set()
1011 for ivld
in get_iterable(indata
.get("int-virtual-link-desc")):
1012 ivld_id
= ivld
.get("id")
1013 if ivld_id
and ivld_id
in all_ivld_ids
:
1014 raise EngineException(
1015 "Duplicated VLD id in int-virtual-link-desc[id={}]".format(ivld_id
),
1016 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1019 all_ivld_ids
.add(ivld_id
)
1021 for vdu
in get_iterable(indata
.get("vdu")):
1022 for int_cpd
in get_iterable(vdu
.get("int-cpd")):
1023 int_cpd_ivld_id
= int_cpd
.get("int-virtual-link-desc")
1024 if int_cpd_ivld_id
and int_cpd_ivld_id
not in all_ivld_ids
:
1025 raise EngineException(
1026 "vdu[id='{}']:int-cpd[id='{}']:int-virtual-link-desc='{}' must match an existing "
1027 "int-virtual-link-desc".format(
1028 vdu
["id"], int_cpd
["id"], int_cpd_ivld_id
1030 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1033 for df
in get_iterable(indata
.get("df")):
1034 for vlp
in get_iterable(df
.get("virtual-link-profile")):
1035 vlp_ivld_id
= vlp
.get("id")
1036 if vlp_ivld_id
and vlp_ivld_id
not in all_ivld_ids
:
1037 raise EngineException(
1038 "df[id='{}']:virtual-link-profile='{}' must match an existing "
1039 "int-virtual-link-desc".format(df
["id"], vlp_ivld_id
),
1040 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1044 def validate_monitoring_params(indata
):
1045 all_monitoring_params
= set()
1046 for ivld
in get_iterable(indata
.get("int-virtual-link-desc")):
1047 for mp
in get_iterable(ivld
.get("monitoring-parameters")):
1048 mp_id
= mp
.get("id")
1049 if mp_id
and mp_id
in all_monitoring_params
:
1050 raise EngineException(
1051 "Duplicated monitoring-parameter id in "
1052 "int-virtual-link-desc[id='{}']:monitoring-parameters[id='{}']".format(
1055 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1058 all_monitoring_params
.add(mp_id
)
1060 for vdu
in get_iterable(indata
.get("vdu")):
1061 for mp
in get_iterable(vdu
.get("monitoring-parameter")):
1062 mp_id
= mp
.get("id")
1063 if mp_id
and mp_id
in all_monitoring_params
:
1064 raise EngineException(
1065 "Duplicated monitoring-parameter id in "
1066 "vdu[id='{}']:monitoring-parameter[id='{}']".format(
1069 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1072 all_monitoring_params
.add(mp_id
)
1074 for df
in get_iterable(indata
.get("df")):
1075 for mp
in get_iterable(df
.get("monitoring-parameter")):
1076 mp_id
= mp
.get("id")
1077 if mp_id
and mp_id
in all_monitoring_params
:
1078 raise EngineException(
1079 "Duplicated monitoring-parameter id in "
1080 "df[id='{}']:monitoring-parameter[id='{}']".format(
1083 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1086 all_monitoring_params
.add(mp_id
)
1089 def validate_scaling_group_descriptor(indata
):
1090 all_monitoring_params
= set()
1091 for ivld
in get_iterable(indata
.get("int-virtual-link-desc")):
1092 for mp
in get_iterable(ivld
.get("monitoring-parameters")):
1093 all_monitoring_params
.add(mp
.get("id"))
1095 for vdu
in get_iterable(indata
.get("vdu")):
1096 for mp
in get_iterable(vdu
.get("monitoring-parameter")):
1097 all_monitoring_params
.add(mp
.get("id"))
1099 for df
in get_iterable(indata
.get("df")):
1100 for mp
in get_iterable(df
.get("monitoring-parameter")):
1101 all_monitoring_params
.add(mp
.get("id"))
1103 for df
in get_iterable(indata
.get("df")):
1104 for sa
in get_iterable(df
.get("scaling-aspect")):
1105 for sp
in get_iterable(sa
.get("scaling-policy")):
1106 for sc
in get_iterable(sp
.get("scaling-criteria")):
1107 sc_monitoring_param
= sc
.get("vnf-monitoring-param-ref")
1110 and sc_monitoring_param
not in all_monitoring_params
1112 raise EngineException(
1113 "df[id='{}']:scaling-aspect[id='{}']:scaling-policy"
1114 "[name='{}']:scaling-criteria[name='{}']: "
1115 "vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format(
1120 sc_monitoring_param
,
1122 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1125 for sca
in get_iterable(sa
.get("scaling-config-action")):
1127 "lcm-operations-configuration" not in df
1128 or "operate-vnf-op-config"
1129 not in df
["lcm-operations-configuration"]
1130 or not utils
.find_in_list(
1131 df
["lcm-operations-configuration"][
1132 "operate-vnf-op-config"
1133 ].get("day1-2", []),
1134 lambda config
: config
["id"] == indata
["id"],
1137 raise EngineException(
1138 "'day1-2 configuration' not defined in the descriptor but it is "
1139 "referenced by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action".format(
1142 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1144 for configuration
in get_iterable(
1145 df
["lcm-operations-configuration"]["operate-vnf-op-config"].get(
1149 for primitive
in get_iterable(
1150 configuration
.get("config-primitive")
1154 == sca
["vnf-config-primitive-name-ref"]
1158 raise EngineException(
1159 "df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-"
1160 "config-primitive-name-ref='{}' does not match any "
1161 "day1-2 configuration:config-primitive:name".format(
1164 sca
["vnf-config-primitive-name-ref"],
1166 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1169 def delete_extra(self
, session
, _id
, db_content
, not_send_msg
=None):
1171 Deletes associate file system storage (via super)
1172 Deletes associated vnfpkgops from database.
1173 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1174 :param _id: server internal id
1175 :param db_content: The database content of the descriptor
1177 :raises: FsException in case of error while deleting associated storage
1179 super().delete_extra(session
, _id
, db_content
, not_send_msg
)
1180 self
.db
.del_list("vnfpkgops", {"vnfPkgId": _id
})
1181 self
.db
.del_list(self
.topic
+"_revisions", {"_id": {"$regex": _id
}})
1183 def sol005_projection(self
, data
):
1184 data
["onboardingState"] = data
["_admin"]["onboardingState"]
1185 data
["operationalState"] = data
["_admin"]["operationalState"]
1186 data
["usageState"] = data
["_admin"]["usageState"]
1189 links
["self"] = {"href": "/vnfpkgm/v1/vnf_packages/{}".format(data
["_id"])}
1190 links
["vnfd"] = {"href": "/vnfpkgm/v1/vnf_packages/{}/vnfd".format(data
["_id"])}
1191 links
["packageContent"] = {
1192 "href": "/vnfpkgm/v1/vnf_packages/{}/package_content".format(data
["_id"])
1194 data
["_links"] = links
1196 return super().sol005_projection(data
)
1199 def find_software_version(vnfd
: dict) -> str:
1200 """Find the sotware version in the VNFD descriptors
1203 vnfd (dict): Descriptor as a dictionary
1206 software-version (str)
1208 default_sw_version
= "1.0"
1209 if vnfd
.get("vnfd"):
1211 if vnfd
.get("software-version"):
1212 return vnfd
["software-version"]
1214 return default_sw_version
1217 def extract_policies(vnfd
: dict) -> dict:
1218 """Removes the policies from the VNFD descriptors
1221 vnfd (dict): Descriptor as a dictionary
1224 vnfd (dict): VNFD which does not include policies
1226 for df
in vnfd
.get("df", {}):
1227 for policy
in ["scaling-aspect", "healing-aspect"]:
1228 if (df
.get(policy
, {})):
1230 for vdu
in vnfd
.get("vdu", {}):
1231 for alarm_policy
in ["alarm", "monitoring-parameter"]:
1232 if (vdu
.get(alarm_policy
, {})):
1233 vdu
.pop(alarm_policy
)
1237 def extract_day12_primitives(vnfd
: dict) -> dict:
1238 """Removes the day12 primitives from the VNFD descriptors
1241 vnfd (dict): Descriptor as a dictionary
1246 for df_id
, df
in enumerate(vnfd
.get("df", {})):
1248 df
.get("lcm-operations-configuration", {})
1249 .get("operate-vnf-op-config", {})
1252 day12
= df
["lcm-operations-configuration"]["operate-vnf-op-config"].get(
1255 for config_id
, config
in enumerate(day12
):
1257 "initial-config-primitive",
1259 "terminate-config-primitive",
1261 config
.pop(key
, None)
1262 day12
[config_id
] = config
1263 df
["lcm-operations-configuration"]["operate-vnf-op-config"][
1266 vnfd
["df"][df_id
] = df
1269 def remove_modifiable_items(self
, vnfd
: dict) -> dict:
1270 """Removes the modifiable parts from the VNFD descriptors
1272 It calls different extract functions according to different update types
1273 to clear all the modifiable items from VNFD
1276 vnfd (dict): Descriptor as a dictionary
1279 vnfd (dict): Descriptor which does not include modifiable contents
1281 if vnfd
.get("vnfd"):
1283 vnfd
.pop("_admin", None)
1284 # If the other extractions need to be done from VNFD,
1285 # the new extract methods could be appended to below list.
1286 for extract_function
in [self
.extract_day12_primitives
, self
.extract_policies
]:
1287 vnfd_temp
= extract_function(vnfd
)
1291 def _validate_descriptor_changes(
1293 descriptor_file_name
: str,
1294 old_descriptor_directory
: str,
1295 new_descriptor_directory
: str,
1297 """Compares the old and new VNFD descriptors and validates the new descriptor.
1300 old_descriptor_directory (str): Directory of descriptor which is in-use
1301 new_descriptor_directory (str): Directory of directory which is proposed to update (new revision)
1307 EngineException: In case of error when there are unallowed changes
1310 with self
.fs
.file_open(
1311 (old_descriptor_directory
.rstrip("/"), descriptor_file_name
), "r"
1312 ) as old_descriptor_file
:
1313 with self
.fs
.file_open(
1314 (new_descriptor_directory
, descriptor_file_name
), "r"
1315 ) as new_descriptor_file
:
1316 old_content
= yaml
.load(
1317 old_descriptor_file
.read(), Loader
=yaml
.SafeLoader
1319 new_content
= yaml
.load(
1320 new_descriptor_file
.read(), Loader
=yaml
.SafeLoader
1322 if old_content
and new_content
:
1323 if self
.find_software_version(
1325 ) != self
.find_software_version(new_content
):
1327 disallowed_change
= DeepDiff(
1328 self
.remove_modifiable_items(old_content
),
1329 self
.remove_modifiable_items(new_content
),
1331 if disallowed_change
:
1332 changed_nodes
= functools
.reduce(
1333 lambda a
, b
: a
+ " , " + b
,
1336 for node
in disallowed_change
.get(
1341 raise EngineException(
1342 f
"Error in validating new descriptor: {changed_nodes} cannot be modified, "
1343 "there are disallowed changes in the vnf descriptor.",
1344 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1354 "VNF Descriptor could not be processed with error: {}.".format(e
)
1358 class NsdTopic(DescriptorTopic
):
1362 def __init__(self
, db
, fs
, msg
, auth
):
1363 DescriptorTopic
.__init
__(self
, db
, fs
, msg
, auth
)
1365 def pyangbind_validation(self
, item
, data
, force
=False):
1366 if self
._descriptor
_data
_is
_in
_old
_format
(data
):
1367 raise EngineException(
1368 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
1369 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1372 nsd_vnf_profiles
= data
.get("df", [{}])[0].get("vnf-profile", [])
1373 mynsd
= etsi_nfv_nsd
.etsi_nfv_nsd()
1374 pybindJSONDecoder
.load_ietf_json(
1375 {"nsd": {"nsd": [data
]}},
1382 out
= pybindJSON
.dumps(mynsd
, mode
="ietf")
1383 desc_out
= self
._remove
_envelop
(yaml
.safe_load(out
))
1384 desc_out
= self
._remove
_yang
_prefixes
_from
_descriptor
(desc_out
)
1385 if nsd_vnf_profiles
:
1386 desc_out
["df"][0]["vnf-profile"] = nsd_vnf_profiles
1388 except Exception as e
:
1389 raise EngineException(
1390 "Error in pyangbind validation: {}".format(str(e
)),
1391 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1395 def _descriptor_data_is_in_old_format(data
):
1396 return ("nsd-catalog" in data
) or ("nsd:nsd-catalog" in data
)
1399 def _remove_envelop(indata
=None):
1402 clean_indata
= indata
1404 if clean_indata
.get("nsd"):
1405 clean_indata
= clean_indata
["nsd"]
1406 elif clean_indata
.get("etsi-nfv-nsd:nsd"):
1407 clean_indata
= clean_indata
["etsi-nfv-nsd:nsd"]
1408 if clean_indata
.get("nsd"):
1410 not isinstance(clean_indata
["nsd"], list)
1411 or len(clean_indata
["nsd"]) != 1
1413 raise EngineException("'nsd' must be a list of only one element")
1414 clean_indata
= clean_indata
["nsd"][0]
1417 def _validate_input_new(self
, indata
, storage_params
, force
=False):
1418 indata
.pop("nsdOnboardingState", None)
1419 indata
.pop("nsdOperationalState", None)
1420 indata
.pop("nsdUsageState", None)
1422 indata
.pop("links", None)
1424 indata
= self
.pyangbind_validation("nsds", indata
, force
)
1425 # Cross references validation in the descriptor
1426 # TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none
1427 for vld
in get_iterable(indata
.get("virtual-link-desc")):
1428 self
.validate_vld_mgmt_network_with_virtual_link_protocol_data(vld
, indata
)
1430 self
.validate_vnf_profiles_vnfd_id(indata
)
1435 def validate_vld_mgmt_network_with_virtual_link_protocol_data(vld
, indata
):
1436 if not vld
.get("mgmt-network"):
1438 vld_id
= vld
.get("id")
1439 for df
in get_iterable(indata
.get("df")):
1440 for vlp
in get_iterable(df
.get("virtual-link-profile")):
1441 if vld_id
and vld_id
== vlp
.get("virtual-link-desc-id"):
1442 if vlp
.get("virtual-link-protocol-data"):
1443 raise EngineException(
1444 "Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-"
1445 "protocol-data You cannot set a virtual-link-protocol-data "
1446 "when mgmt-network is True".format(df
["id"], vlp
["id"]),
1447 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1451 def validate_vnf_profiles_vnfd_id(indata
):
1452 all_vnfd_ids
= set(get_iterable(indata
.get("vnfd-id")))
1453 for df
in get_iterable(indata
.get("df")):
1454 for vnf_profile
in get_iterable(df
.get("vnf-profile")):
1455 vnfd_id
= vnf_profile
.get("vnfd-id")
1456 if vnfd_id
and vnfd_id
not in all_vnfd_ids
:
1457 raise EngineException(
1458 "Error at df[id='{}']:vnf_profile[id='{}']:vnfd-id='{}' "
1459 "does not match any vnfd-id".format(
1460 df
["id"], vnf_profile
["id"], vnfd_id
1462 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1465 def _validate_input_edit(self
, indata
, content
, force
=False):
1466 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
1468 indata looks as follows:
1469 - In the new case (conformant)
1470 {'nsdOperationalState': 'DISABLED', 'userDefinedData': {'id': 'string23',
1471 '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}}
1472 - In the old case (backwards-compatible)
1473 {'id': 'string23', '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}
1475 if "_admin" not in indata
:
1476 indata
["_admin"] = {}
1478 if "nsdOperationalState" in indata
:
1479 if indata
["nsdOperationalState"] in ("ENABLED", "DISABLED"):
1480 indata
["_admin"]["operationalState"] = indata
.pop("nsdOperationalState")
1482 raise EngineException(
1483 "State '{}' is not a valid operational state".format(
1484 indata
["nsdOperationalState"]
1486 http_code
=HTTPStatus
.BAD_REQUEST
,
1489 # In the case of user defined data, we need to put the data in the root of the object
1490 # to preserve current expected behaviour
1491 if "userDefinedData" in indata
:
1492 data
= indata
.pop("userDefinedData")
1493 if type(data
) == dict:
1494 indata
["_admin"]["userDefinedData"] = data
1496 raise EngineException(
1497 "userDefinedData should be an object, but is '{}' instead".format(
1500 http_code
=HTTPStatus
.BAD_REQUEST
,
1503 "operationalState" in indata
["_admin"]
1504 and content
["_admin"]["operationalState"]
1505 == indata
["_admin"]["operationalState"]
1507 raise EngineException(
1508 "nsdOperationalState already {}".format(
1509 content
["_admin"]["operationalState"]
1511 http_code
=HTTPStatus
.CONFLICT
,
1515 def _check_descriptor_dependencies(self
, session
, descriptor
):
1517 Check that the dependent descriptors exist on a new descriptor or edition. Also checks references to vnfd
1518 connection points are ok
1519 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1520 :param descriptor: descriptor to be inserted or edit
1521 :return: None or raises exception
1523 if session
["force"]:
1525 vnfds_index
= self
._get
_descriptor
_constituent
_vnfds
_index
(session
, descriptor
)
1527 # Cross references validation in the descriptor and vnfd connection point validation
1528 for df
in get_iterable(descriptor
.get("df")):
1529 self
.validate_df_vnf_profiles_constituent_connection_points(df
, vnfds_index
)
1531 def _get_descriptor_constituent_vnfds_index(self
, session
, descriptor
):
1533 if descriptor
.get("vnfd-id") and not session
["force"]:
1534 for vnfd_id
in get_iterable(descriptor
.get("vnfd-id")):
1535 query_filter
= self
._get
_project
_filter
(session
)
1536 query_filter
["id"] = vnfd_id
1537 vnf_list
= self
.db
.get_list("vnfds", query_filter
)
1539 raise EngineException(
1540 "Descriptor error at 'vnfd-id'='{}' references a non "
1541 "existing vnfd".format(vnfd_id
),
1542 http_code
=HTTPStatus
.CONFLICT
,
1544 vnfds_index
[vnfd_id
] = vnf_list
[0]
1548 def validate_df_vnf_profiles_constituent_connection_points(df
, vnfds_index
):
1549 for vnf_profile
in get_iterable(df
.get("vnf-profile")):
1550 vnfd
= vnfds_index
.get(vnf_profile
["vnfd-id"])
1551 all_vnfd_ext_cpds
= set()
1552 for ext_cpd
in get_iterable(vnfd
.get("ext-cpd")):
1553 if ext_cpd
.get("id"):
1554 all_vnfd_ext_cpds
.add(ext_cpd
.get("id"))
1556 for virtual_link
in get_iterable(
1557 vnf_profile
.get("virtual-link-connectivity")
1559 for vl_cpd
in get_iterable(virtual_link
.get("constituent-cpd-id")):
1560 vl_cpd_id
= vl_cpd
.get("constituent-cpd-id")
1561 if vl_cpd_id
and vl_cpd_id
not in all_vnfd_ext_cpds
:
1562 raise EngineException(
1563 "Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity"
1564 "[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a "
1565 "non existing ext-cpd:id inside vnfd '{}'".format(
1568 virtual_link
["virtual-link-profile-id"],
1572 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1575 def check_conflict_on_edit(self
, session
, final_content
, edit_content
, _id
):
1576 final_content
= super().check_conflict_on_edit(
1577 session
, final_content
, edit_content
, _id
1580 self
._check
_descriptor
_dependencies
(session
, final_content
)
1582 return final_content
1584 def check_conflict_on_del(self
, session
, _id
, db_content
):
1586 Check that there is not any NSR that uses this NSD. Only NSRs belonging to this project are considered. Note
1587 that NSD can be public and be used by other projects.
1588 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1589 :param _id: nsd internal id
1590 :param db_content: The database content of the _id
1591 :return: None or raises EngineException with the conflict
1593 if session
["force"]:
1595 descriptor
= db_content
1596 descriptor_id
= descriptor
.get("id")
1597 if not descriptor_id
: # empty nsd not uploaded
1600 # check NSD used by NS
1601 _filter
= self
._get
_project
_filter
(session
)
1602 _filter
["nsd-id"] = _id
1603 if self
.db
.get_list("nsrs", _filter
):
1604 raise EngineException(
1605 "There is at least one NS instance using this descriptor",
1606 http_code
=HTTPStatus
.CONFLICT
,
1609 # check NSD referenced by NST
1610 del _filter
["nsd-id"]
1611 _filter
["netslice-subnet.ANYINDEX.nsd-ref"] = descriptor_id
1612 if self
.db
.get_list("nsts", _filter
):
1613 raise EngineException(
1614 "There is at least one NetSlice Template referencing this descriptor",
1615 http_code
=HTTPStatus
.CONFLICT
,
1618 def delete_extra(self
, session
, _id
, db_content
, not_send_msg
=None):
1620 Deletes associate file system storage (via super)
1621 Deletes associated vnfpkgops from database.
1622 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1623 :param _id: server internal id
1624 :param db_content: The database content of the descriptor
1626 :raises: FsException in case of error while deleting associated storage
1628 super().delete_extra(session
, _id
, db_content
, not_send_msg
)
1629 self
.db
.del_list(self
.topic
+"_revisions", { "_id": { "$regex": _id
}})
1632 def extract_day12_primitives(nsd
: dict) -> dict:
1633 """Removes the day12 primitives from the NSD descriptors
1636 nsd (dict): Descriptor as a dictionary
1639 nsd (dict): Cleared NSD
1641 if nsd
.get("ns-configuration"):
1644 "initial-config-primitive",
1645 "terminate-config-primitive",
1647 nsd
["ns-configuration"].pop(key
, None)
1650 def remove_modifiable_items(self
, nsd
: dict) -> dict:
1651 """Removes the modifiable parts from the VNFD descriptors
1653 It calls different extract functions according to different update types
1654 to clear all the modifiable items from NSD
1657 nsd (dict): Descriptor as a dictionary
1660 nsd (dict): Descriptor which does not include modifiable contents
1662 while isinstance(nsd
, dict) and nsd
.get("nsd"):
1664 if isinstance(nsd
, list):
1666 nsd
.pop("_admin", None)
1667 # If the more extractions need to be done from NSD,
1668 # the new extract methods could be appended to below list.
1669 for extract_function
in [self
.extract_day12_primitives
]:
1670 nsd_temp
= extract_function(nsd
)
1674 def _validate_descriptor_changes(
1676 descriptor_file_name
: str,
1677 old_descriptor_directory
: str,
1678 new_descriptor_directory
: str,
1680 """Compares the old and new NSD descriptors and validates the new descriptor
1683 old_descriptor_directory: Directory of descriptor which is in-use
1684 new_descriptor_directory: Directory of directory which is proposed to update (new revision)
1690 EngineException: In case of error if the changes are not allowed
1694 with self
.fs
.file_open(
1695 (old_descriptor_directory
, descriptor_file_name
), "r"
1696 ) as old_descriptor_file
:
1697 with self
.fs
.file_open(
1698 (new_descriptor_directory
.rstrip("/"), descriptor_file_name
), "r"
1699 ) as new_descriptor_file
:
1700 old_content
= yaml
.load(
1701 old_descriptor_file
.read(), Loader
=yaml
.SafeLoader
1703 new_content
= yaml
.load(
1704 new_descriptor_file
.read(), Loader
=yaml
.SafeLoader
1706 if old_content
and new_content
:
1707 disallowed_change
= DeepDiff(
1708 self
.remove_modifiable_items(old_content
),
1709 self
.remove_modifiable_items(new_content
),
1711 if disallowed_change
:
1712 changed_nodes
= functools
.reduce(
1713 lambda a
, b
: a
+ ", " + b
,
1716 for node
in disallowed_change
.get(
1721 raise EngineException(
1722 f
"Error in validating new descriptor: {changed_nodes} cannot be modified, "
1723 "there are disallowed changes in the ns descriptor. ",
1724 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1734 "NS Descriptor could not be processed with error: {}.".format(e
)
1737 def sol005_projection(self
, data
):
1738 data
["nsdOnboardingState"] = data
["_admin"]["onboardingState"]
1739 data
["nsdOperationalState"] = data
["_admin"]["operationalState"]
1740 data
["nsdUsageState"] = data
["_admin"]["usageState"]
1743 links
["self"] = {"href": "/nsd/v1/ns_descriptors/{}".format(data
["_id"])}
1744 links
["nsd_content"] = {
1745 "href": "/nsd/v1/ns_descriptors/{}/nsd_content".format(data
["_id"])
1747 data
["_links"] = links
1749 return super().sol005_projection(data
)
1752 class NstTopic(DescriptorTopic
):
1755 quota_name
= "slice_templates"
1757 def __init__(self
, db
, fs
, msg
, auth
):
1758 DescriptorTopic
.__init
__(self
, db
, fs
, msg
, auth
)
1760 def pyangbind_validation(self
, item
, data
, force
=False):
1763 pybindJSONDecoder
.load_ietf_json(
1771 out
= pybindJSON
.dumps(mynst
, mode
="ietf")
1772 desc_out
= self
._remove
_envelop
(yaml
.safe_load(out
))
1774 except Exception as e
:
1775 raise EngineException(
1776 "Error in pyangbind validation: {}".format(str(e
)),
1777 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1781 def _remove_envelop(indata
=None):
1784 clean_indata
= indata
1786 if clean_indata
.get("nst"):
1788 not isinstance(clean_indata
["nst"], list)
1789 or len(clean_indata
["nst"]) != 1
1791 raise EngineException("'nst' must be a list only one element")
1792 clean_indata
= clean_indata
["nst"][0]
1793 elif clean_indata
.get("nst:nst"):
1795 not isinstance(clean_indata
["nst:nst"], list)
1796 or len(clean_indata
["nst:nst"]) != 1
1798 raise EngineException("'nst:nst' must be a list only one element")
1799 clean_indata
= clean_indata
["nst:nst"][0]
1802 def _validate_input_new(self
, indata
, storage_params
, force
=False):
1803 indata
.pop("onboardingState", None)
1804 indata
.pop("operationalState", None)
1805 indata
.pop("usageState", None)
1806 indata
= self
.pyangbind_validation("nsts", indata
, force
)
1807 return indata
.copy()
1809 def _check_descriptor_dependencies(self
, session
, descriptor
):
1811 Check that the dependent descriptors exist on a new descriptor or edition
1812 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1813 :param descriptor: descriptor to be inserted or edit
1814 :return: None or raises exception
1816 if not descriptor
.get("netslice-subnet"):
1818 for nsd
in descriptor
["netslice-subnet"]:
1819 nsd_id
= nsd
["nsd-ref"]
1820 filter_q
= self
._get
_project
_filter
(session
)
1821 filter_q
["id"] = nsd_id
1822 if not self
.db
.get_list("nsds", filter_q
):
1823 raise EngineException(
1824 "Descriptor error at 'netslice-subnet':'nsd-ref'='{}' references a non "
1825 "existing nsd".format(nsd_id
),
1826 http_code
=HTTPStatus
.CONFLICT
,
1829 def check_conflict_on_edit(self
, session
, final_content
, edit_content
, _id
):
1830 final_content
= super().check_conflict_on_edit(
1831 session
, final_content
, edit_content
, _id
1834 self
._check
_descriptor
_dependencies
(session
, final_content
)
1835 return final_content
1837 def check_conflict_on_del(self
, session
, _id
, db_content
):
1839 Check that there is not any NSIR that uses this NST. Only NSIRs belonging to this project are considered. Note
1840 that NST can be public and be used by other projects.
1841 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1842 :param _id: nst internal id
1843 :param db_content: The database content of the _id.
1844 :return: None or raises EngineException with the conflict
1846 # TODO: Check this method
1847 if session
["force"]:
1849 # Get Network Slice Template from Database
1850 _filter
= self
._get
_project
_filter
(session
)
1851 _filter
["_admin.nst-id"] = _id
1852 if self
.db
.get_list("nsis", _filter
):
1853 raise EngineException(
1854 "there is at least one Netslice Instance using this descriptor",
1855 http_code
=HTTPStatus
.CONFLICT
,
1858 def sol005_projection(self
, data
):
1859 data
["onboardingState"] = data
["_admin"]["onboardingState"]
1860 data
["operationalState"] = data
["_admin"]["operationalState"]
1861 data
["usageState"] = data
["_admin"]["usageState"]
1864 links
["self"] = {"href": "/nst/v1/netslice_templates/{}".format(data
["_id"])}
1865 links
["nst"] = {"href": "/nst/v1/netslice_templates/{}/nst".format(data
["_id"])}
1866 data
["_links"] = links
1868 return super().sol005_projection(data
)
1871 class PduTopic(BaseTopic
):
1874 quota_name
= "pduds"
1875 schema_new
= pdu_new_schema
1876 schema_edit
= pdu_edit_schema
1878 def __init__(self
, db
, fs
, msg
, auth
):
1879 BaseTopic
.__init
__(self
, db
, fs
, msg
, auth
)
1882 def format_on_new(content
, project_id
=None, make_public
=False):
1883 BaseTopic
.format_on_new(content
, project_id
=project_id
, make_public
=make_public
)
1884 content
["_admin"]["onboardingState"] = "CREATED"
1885 content
["_admin"]["operationalState"] = "ENABLED"
1886 content
["_admin"]["usageState"] = "NOT_IN_USE"
1888 def check_conflict_on_del(self
, session
, _id
, db_content
):
1890 Check that there is not any vnfr that uses this PDU
1891 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1892 :param _id: pdu internal id
1893 :param db_content: The database content of the _id.
1894 :return: None or raises EngineException with the conflict
1896 if session
["force"]:
1899 _filter
= self
._get
_project
_filter
(session
)
1900 _filter
["vdur.pdu-id"] = _id
1901 if self
.db
.get_list("vnfrs", _filter
):
1902 raise EngineException(
1903 "There is at least one VNF instance using this PDU",
1904 http_code
=HTTPStatus
.CONFLICT
,
1908 class VnfPkgOpTopic(BaseTopic
):
1911 schema_new
= vnfpkgop_new_schema
1914 def __init__(self
, db
, fs
, msg
, auth
):
1915 BaseTopic
.__init
__(self
, db
, fs
, msg
, auth
)
1917 def edit(self
, session
, _id
, indata
=None, kwargs
=None, content
=None):
1918 raise EngineException(
1919 "Method 'edit' not allowed for topic '{}'".format(self
.topic
),
1920 HTTPStatus
.METHOD_NOT_ALLOWED
,
1923 def delete(self
, session
, _id
, dry_run
=False):
1924 raise EngineException(
1925 "Method 'delete' not allowed for topic '{}'".format(self
.topic
),
1926 HTTPStatus
.METHOD_NOT_ALLOWED
,
1929 def delete_list(self
, session
, filter_q
=None):
1930 raise EngineException(
1931 "Method 'delete_list' not allowed for topic '{}'".format(self
.topic
),
1932 HTTPStatus
.METHOD_NOT_ALLOWED
,
1935 def new(self
, rollback
, session
, indata
=None, kwargs
=None, headers
=None):
1937 Creates a new entry into database.
1938 :param rollback: list to append created items at database in case a rollback may to be done
1939 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1940 :param indata: data to be inserted
1941 :param kwargs: used to override the indata descriptor
1942 :param headers: http request headers
1943 :return: _id, op_id:
1944 _id: identity of the inserted data.
1947 self
._update
_input
_with
_kwargs
(indata
, kwargs
)
1948 validate_input(indata
, self
.schema_new
)
1949 vnfpkg_id
= indata
["vnfPkgId"]
1950 filter_q
= BaseTopic
._get
_project
_filter
(session
)
1951 filter_q
["_id"] = vnfpkg_id
1952 vnfd
= self
.db
.get_one("vnfds", filter_q
)
1953 operation
= indata
["lcmOperationType"]
1954 kdu_name
= indata
["kdu_name"]
1955 for kdu
in vnfd
.get("kdu", []):
1956 if kdu
["name"] == kdu_name
:
1957 helm_chart
= kdu
.get("helm-chart")
1958 juju_bundle
= kdu
.get("juju-bundle")
1961 raise EngineException(
1962 "Not found vnfd[id='{}']:kdu[name='{}']".format(vnfpkg_id
, kdu_name
)
1965 indata
["helm-chart"] = helm_chart
1966 match
= fullmatch(r
"([^/]*)/([^/]*)", helm_chart
)
1967 repo_name
= match
.group(1) if match
else None
1969 indata
["juju-bundle"] = juju_bundle
1970 match
= fullmatch(r
"([^/]*)/([^/]*)", juju_bundle
)
1971 repo_name
= match
.group(1) if match
else None
1973 raise EngineException(
1974 "Found neither 'helm-chart' nor 'juju-bundle' in vnfd[id='{}']:kdu[name='{}']".format(
1980 filter_q
["name"] = repo_name
1981 repo
= self
.db
.get_one("k8srepos", filter_q
)
1982 k8srepo_id
= repo
.get("_id")
1983 k8srepo_url
= repo
.get("url")
1987 indata
["k8srepoId"] = k8srepo_id
1988 indata
["k8srepo_url"] = k8srepo_url
1989 vnfpkgop_id
= str(uuid4())
1992 "operationState": "PROCESSING",
1993 "vnfPkgId": vnfpkg_id
,
1994 "lcmOperationType": operation
,
1995 "isAutomaticInvocation": False,
1996 "isCancelPending": False,
1997 "operationParams": indata
,
1999 "self": "/osm/vnfpkgm/v1/vnfpkg_op_occs/" + vnfpkgop_id
,
2000 "vnfpkg": "/osm/vnfpkgm/v1/vnf_packages/" + vnfpkg_id
,
2004 vnfpkgop_desc
, session
["project_id"], make_public
=session
["public"]
2006 ctime
= vnfpkgop_desc
["_admin"]["created"]
2007 vnfpkgop_desc
["statusEnteredTime"] = ctime
2008 vnfpkgop_desc
["startTime"] = ctime
2009 self
.db
.create(self
.topic
, vnfpkgop_desc
)
2010 rollback
.append({"topic": self
.topic
, "_id": vnfpkgop_id
})
2011 self
.msg
.write(self
.topic_msg
, operation
, vnfpkgop_desc
)
2012 return vnfpkgop_id
, None