1 # -*- coding: utf-8 -*-
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
7 # http://www.apache.org/licenses/LICENSE-2.0
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
26 from deepdiff
import DeepDiff
27 from hashlib
import md5
28 from osm_common
.dbbase
import DbException
, deep_update_rfc7396
29 from http
import HTTPStatus
31 from uuid
import uuid4
32 from re
import fullmatch
33 from zipfile
import ZipFile
34 from urllib
.parse
import urlparse
35 from osm_nbi
.validation
import (
42 from osm_nbi
.base_topic
import (
46 detect_descriptor_usage
,
48 from osm_im
import etsi_nfv_vnfd
, etsi_nfv_nsd
49 from osm_im
.nst
import nst
as nst_im
50 from pyangbind
.lib
.serialise
import pybindJSONDecoder
51 import pyangbind
.lib
.pybindJSON
as pybindJSON
52 from osm_nbi
import utils
54 __author__
= "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
56 valid_helm_chart_re
= re
.compile(
57 r
"^[a-z0-9]([-a-z0-9]*[a-z0-9]/)?([a-z0-9]([-a-z0-9]*[a-z0-9])?)*$"
61 class DescriptorTopic(BaseTopic
):
62 def __init__(self
, db
, fs
, msg
, auth
):
63 super().__init
__(db
, fs
, msg
, auth
)
65 def _validate_input_new(self
, indata
, storage_params
, force
=False):
68 def check_conflict_on_edit(self
, session
, final_content
, edit_content
, _id
):
69 final_content
= super().check_conflict_on_edit(
70 session
, final_content
, edit_content
, _id
73 def _check_unique_id_name(descriptor
, position
=""):
74 for desc_key
, desc_item
in descriptor
.items():
75 if isinstance(desc_item
, list) and desc_item
:
78 for index
, list_item
in enumerate(desc_item
):
79 if isinstance(list_item
, dict):
80 _check_unique_id_name(
81 list_item
, "{}.{}[{}]".format(position
, desc_key
, index
)
85 list_item
.get("id") or list_item
.get("name")
87 desc_item_id
= "id" if list_item
.get("id") else "name"
88 if desc_item_id
and list_item
.get(desc_item_id
):
89 if list_item
[desc_item_id
] in used_ids
:
90 position
= "{}.{}[{}]".format(
91 position
, desc_key
, index
93 raise EngineException(
94 "Error: identifier {} '{}' is not unique and repeats at '{}'".format(
96 list_item
[desc_item_id
],
99 HTTPStatus
.UNPROCESSABLE_ENTITY
,
101 used_ids
.append(list_item
[desc_item_id
])
103 _check_unique_id_name(final_content
)
104 # 1. validate again with pyangbind
105 # 1.1. remove internal keys
107 for k
in ("_id", "_admin"):
108 if k
in final_content
:
109 internal_keys
[k
] = final_content
.pop(k
)
110 storage_params
= internal_keys
["_admin"].get("storage")
111 serialized
= self
._validate
_input
_new
(
112 final_content
, storage_params
, session
["force"]
115 # 1.2. modify final_content with a serialized version
116 final_content
= copy
.deepcopy(serialized
)
117 # 1.3. restore internal keys
118 for k
, v
in internal_keys
.items():
123 # 2. check that this id is not present
124 if "id" in edit_content
:
125 _filter
= self
._get
_project
_filter
(session
)
127 _filter
["id"] = final_content
["id"]
128 _filter
["_id.neq"] = _id
130 if self
.db
.get_one(self
.topic
, _filter
, fail_on_empty
=False):
131 raise EngineException(
132 "{} with id '{}' already exists for this project".format(
133 (str(self
.topic
))[:-1], final_content
["id"]
141 def format_on_new(content
, project_id
=None, make_public
=False):
142 BaseTopic
.format_on_new(content
, project_id
=project_id
, make_public
=make_public
)
143 content
["_admin"]["onboardingState"] = "CREATED"
144 content
["_admin"]["operationalState"] = "DISABLED"
145 content
["_admin"]["usageState"] = "NOT_IN_USE"
147 def delete_extra(self
, session
, _id
, db_content
, not_send_msg
=None):
149 Deletes file system storage associated with the descriptor
150 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
151 :param _id: server internal id
152 :param db_content: The database content of the descriptor
153 :param not_send_msg: To not send message (False) or store content (list) instead
154 :return: None if ok or raises EngineException with the problem
156 self
.fs
.file_delete(_id
, ignore_non_exist
=True)
157 self
.fs
.file_delete(_id
+ "_", ignore_non_exist
=True) # remove temp folder
158 # Remove file revisions
159 if "revision" in db_content
["_admin"]:
160 revision
= db_content
["_admin"]["revision"]
162 self
.fs
.file_delete(_id
+ ":" + str(revision
), ignore_non_exist
=True)
163 revision
= revision
- 1
166 def get_one_by_id(db
, session
, topic
, id):
167 # find owned by this project
168 _filter
= BaseTopic
._get
_project
_filter
(session
)
170 desc_list
= db
.get_list(topic
, _filter
)
171 if len(desc_list
) == 1:
173 elif len(desc_list
) > 1:
175 "Found more than one {} with id='{}' belonging to this project".format(
181 # not found any: try to find public
182 _filter
= BaseTopic
._get
_project
_filter
(session
)
184 desc_list
= db
.get_list(topic
, _filter
)
187 "Not found any {} with id='{}'".format(topic
[:-1], id),
188 HTTPStatus
.NOT_FOUND
,
190 elif len(desc_list
) == 1:
194 "Found more than one public {} with id='{}'; and no one belonging to this project".format(
200 def new(self
, rollback
, session
, indata
=None, kwargs
=None, headers
=None):
202 Creates a new almost empty DISABLED entry into database. Due to SOL005, it does not follow normal procedure.
203 Creating a VNFD or NSD is done in two steps: 1. Creates an empty descriptor (this step) and 2) upload content
204 (self.upload_content)
205 :param rollback: list to append created items at database in case a rollback may to be done
206 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
207 :param indata: data to be inserted
208 :param kwargs: used to override the indata descriptor
209 :param headers: http request headers
210 :return: _id, None: identity of the inserted data; and None as there is not any operation
213 # No needed to capture exceptions
215 self
.check_quota(session
)
219 if "userDefinedData" in indata
:
220 indata
= indata
["userDefinedData"]
222 # Override descriptor with query string kwargs
223 self
._update
_input
_with
_kwargs
(indata
, kwargs
)
224 # uncomment when this method is implemented.
225 # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors
226 # indata = DescriptorTopic._validate_input_new(self, indata, project_id=session["force"])
228 content
= {"_admin": {"userDefinedData": indata
, "revision": 0}}
231 content
, session
["project_id"], make_public
=session
["public"]
233 _id
= self
.db
.create(self
.topic
, content
)
234 rollback
.append({"topic": self
.topic
, "_id": _id
})
235 self
._send
_msg
("created", {"_id": _id
})
238 def upload_content(self
, session
, _id
, indata
, kwargs
, headers
):
240 Used for receiving content by chunks (with a transaction_id header and/or gzip file. It will store and extract)
241 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
242 :param _id : the nsd,vnfd is already created, this is the id
243 :param indata: http body request
244 :param kwargs: user query string to override parameters. NOT USED
245 :param headers: http request headers
246 :return: True if package is completely uploaded or False if partial content has been uploded
247 Raise exception on error
249 # Check that _id exists and it is valid
250 current_desc
= self
.show(session
, _id
)
252 content_range_text
= headers
.get("Content-Range")
253 expected_md5
= headers
.get("Content-File-MD5")
255 content_type
= headers
.get("Content-Type")
258 and "application/gzip" in content_type
259 or "application/x-gzip" in content_type
262 if content_type
and "application/zip" in content_type
:
264 filename
= headers
.get("Content-Filename")
265 if not filename
and compressed
:
266 filename
= "package.tar.gz" if compressed
== "gzip" else "package.zip"
271 if "revision" in current_desc
["_admin"]:
272 revision
= current_desc
["_admin"]["revision"] + 1
274 # TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266
280 if content_range_text
:
282 content_range_text
.replace("-", " ").replace("/", " ").split()
285 content_range
[0] != "bytes"
286 ): # TODO check x<y not negative < total....
288 start
= int(content_range
[1])
289 end
= int(content_range
[2]) + 1
290 total
= int(content_range
[3])
293 # Rather than using a temp folder, we will store the package in a folder based on
294 # the current revision.
295 proposed_revision_path
= (
296 _id
+ ":" + str(revision
)
297 ) # all the content is upload here and if ok, it is rename from id_ to is folder
300 if not self
.fs
.file_exists(proposed_revision_path
, "dir"):
301 raise EngineException(
302 "invalid Transaction-Id header", HTTPStatus
.NOT_FOUND
305 self
.fs
.file_delete(proposed_revision_path
, ignore_non_exist
=True)
306 self
.fs
.mkdir(proposed_revision_path
)
307 fs_rollback
.append(proposed_revision_path
)
309 storage
= self
.fs
.get_params()
310 storage
["folder"] = proposed_revision_path
312 file_path
= (proposed_revision_path
, filename
)
313 if self
.fs
.file_exists(file_path
, "file"):
314 file_size
= self
.fs
.file_size(file_path
)
317 if file_size
!= start
:
318 raise EngineException(
319 "invalid Content-Range start sequence, expected '{}' but received '{}'".format(
322 HTTPStatus
.REQUESTED_RANGE_NOT_SATISFIABLE
,
324 file_pkg
= self
.fs
.file_open(file_path
, "a+b")
325 if isinstance(indata
, dict):
326 indata_text
= yaml
.safe_dump(indata
, indent
=4, default_flow_style
=False)
327 file_pkg
.write(indata_text
.encode(encoding
="utf-8"))
331 indata_text
= indata
.read(4096)
332 indata_len
+= len(indata_text
)
335 file_pkg
.write(indata_text
)
336 if content_range_text
:
337 if indata_len
!= end
- start
:
338 raise EngineException(
339 "Mismatch between Content-Range header {}-{} and body length of {}".format(
340 start
, end
- 1, indata_len
342 HTTPStatus
.REQUESTED_RANGE_NOT_SATISFIABLE
,
345 # TODO update to UPLOADING
352 chunk_data
= file_pkg
.read(1024)
354 file_md5
.update(chunk_data
)
355 chunk_data
= file_pkg
.read(1024)
356 if expected_md5
!= file_md5
.hexdigest():
357 raise EngineException("Error, MD5 mismatch", HTTPStatus
.CONFLICT
)
359 if compressed
== "gzip":
360 tar
= tarfile
.open(mode
="r", fileobj
=file_pkg
)
361 descriptor_file_name
= None
363 tarname
= tarinfo
.name
364 tarname_path
= tarname
.split("/")
366 not tarname_path
[0] or ".." in tarname_path
367 ): # if start with "/" means absolute path
368 raise EngineException(
369 "Absolute path or '..' are not allowed for package descriptor tar.gz"
371 if len(tarname_path
) == 1 and not tarinfo
.isdir():
372 raise EngineException(
373 "All files must be inside a dir for package descriptor tar.gz"
376 tarname
.endswith(".yaml")
377 or tarname
.endswith(".json")
378 or tarname
.endswith(".yml")
380 storage
["pkg-dir"] = tarname_path
[0]
381 if len(tarname_path
) == 2:
382 if descriptor_file_name
:
383 raise EngineException(
384 "Found more than one descriptor file at package descriptor tar.gz"
386 descriptor_file_name
= tarname
387 if not descriptor_file_name
:
388 raise EngineException(
389 "Not found any descriptor file at package descriptor tar.gz"
391 storage
["descriptor"] = descriptor_file_name
392 storage
["zipfile"] = filename
393 self
.fs
.file_extract(tar
, proposed_revision_path
)
394 with self
.fs
.file_open(
395 (proposed_revision_path
, descriptor_file_name
), "r"
396 ) as descriptor_file
:
397 content
= descriptor_file
.read()
398 elif compressed
== "zip":
399 zipfile
= ZipFile(file_pkg
)
400 descriptor_file_name
= None
401 for package_file
in zipfile
.infolist():
402 zipfilename
= package_file
.filename
403 file_path
= zipfilename
.split("/")
405 not file_path
[0] or ".." in zipfilename
406 ): # if start with "/" means absolute path
407 raise EngineException(
408 "Absolute path or '..' are not allowed for package descriptor zip"
412 zipfilename
.endswith(".yaml")
413 or zipfilename
.endswith(".json")
414 or zipfilename
.endswith(".yml")
416 zipfilename
.find("/") < 0
417 or zipfilename
.find("Definitions") >= 0
419 storage
["pkg-dir"] = ""
420 if descriptor_file_name
:
421 raise EngineException(
422 "Found more than one descriptor file at package descriptor zip"
424 descriptor_file_name
= zipfilename
425 if not descriptor_file_name
:
426 raise EngineException(
427 "Not found any descriptor file at package descriptor zip"
429 storage
["descriptor"] = descriptor_file_name
430 storage
["zipfile"] = filename
431 self
.fs
.file_extract(zipfile
, proposed_revision_path
)
433 with self
.fs
.file_open(
434 (proposed_revision_path
, descriptor_file_name
), "r"
435 ) as descriptor_file
:
436 content
= descriptor_file
.read()
438 content
= file_pkg
.read()
439 storage
["descriptor"] = descriptor_file_name
= filename
441 if descriptor_file_name
.endswith(".json"):
442 error_text
= "Invalid json format "
443 indata
= json
.load(content
)
445 error_text
= "Invalid yaml format "
446 indata
= yaml
.safe_load(content
)
448 # Need to close the file package here so it can be copied from the
449 # revision to the current, unrevisioned record
454 # Fetch both the incoming, proposed revision and the original revision so we
455 # can call a validate method to compare them
456 current_revision_path
= _id
+ "/"
457 self
.fs
.sync(from_path
=current_revision_path
)
458 self
.fs
.sync(from_path
=proposed_revision_path
)
462 self
._validate
_descriptor
_changes
(
464 descriptor_file_name
,
465 current_revision_path
,
466 proposed_revision_path
,
468 except Exception as e
:
470 self
.fs
.path
+ current_revision_path
, ignore_errors
=True
473 self
.fs
.path
+ proposed_revision_path
, ignore_errors
=True
475 # Only delete the new revision. We need to keep the original version in place
476 # as it has not been changed.
477 self
.fs
.file_delete(proposed_revision_path
, ignore_non_exist
=True)
480 indata
= self
._remove
_envelop
(indata
)
482 # Override descriptor with query string kwargs
484 self
._update
_input
_with
_kwargs
(indata
, kwargs
)
486 current_desc
["_admin"]["storage"] = storage
487 current_desc
["_admin"]["onboardingState"] = "ONBOARDED"
488 current_desc
["_admin"]["operationalState"] = "ENABLED"
489 current_desc
["_admin"]["modified"] = time()
490 current_desc
["_admin"]["revision"] = revision
492 deep_update_rfc7396(current_desc
, indata
)
493 current_desc
= self
.check_conflict_on_edit(
494 session
, current_desc
, indata
, _id
=_id
497 # Copy the revision to the active package name by its original id
498 shutil
.rmtree(self
.fs
.path
+ current_revision_path
, ignore_errors
=True)
500 self
.fs
.path
+ proposed_revision_path
,
501 self
.fs
.path
+ current_revision_path
,
503 self
.fs
.file_delete(current_revision_path
, ignore_non_exist
=True)
504 self
.fs
.mkdir(current_revision_path
)
505 self
.fs
.reverse_sync(from_path
=current_revision_path
)
507 shutil
.rmtree(self
.fs
.path
+ _id
)
509 self
.db
.replace(self
.topic
, _id
, current_desc
)
511 # Store a copy of the package as a point in time revision
512 revision_desc
= dict(current_desc
)
513 revision_desc
["_id"] = _id
+ ":" + str(revision_desc
["_admin"]["revision"])
514 self
.db
.create(self
.topic
+ "_revisions", revision_desc
)
518 self
._send
_msg
("edited", indata
)
520 # TODO if descriptor has changed because kwargs update content and remove cached zip
521 # TODO if zip is not present creates one
524 except EngineException
:
527 raise EngineException(
528 "invalid Content-Range header format. Expected 'bytes start-end/total'",
529 HTTPStatus
.REQUESTED_RANGE_NOT_SATISFIABLE
,
532 raise EngineException(
533 "invalid upload transaction sequence: '{}'".format(e
),
534 HTTPStatus
.BAD_REQUEST
,
536 except tarfile
.ReadError
as e
:
537 raise EngineException(
538 "invalid file content {}".format(e
), HTTPStatus
.BAD_REQUEST
540 except (ValueError, yaml
.YAMLError
) as e
:
541 raise EngineException(error_text
+ str(e
))
542 except ValidationError
as e
:
543 raise EngineException(e
, HTTPStatus
.UNPROCESSABLE_ENTITY
)
547 for file in fs_rollback
:
548 self
.fs
.file_delete(file, ignore_non_exist
=True)
550 def get_file(self
, session
, _id
, path
=None, accept_header
=None):
552 Return the file content of a vnfd or nsd
553 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
554 :param _id: Identity of the vnfd, nsd
555 :param path: artifact path or "$DESCRIPTOR" or None
556 :param accept_header: Content of Accept header. Must contain applition/zip or/and text/plain
557 :return: opened file plus Accept format or raises an exception
559 accept_text
= accept_zip
= False
561 if "text/plain" in accept_header
or "*/*" in accept_header
:
563 if "application/zip" in accept_header
or "*/*" in accept_header
:
564 accept_zip
= "application/zip"
565 elif "application/gzip" in accept_header
:
566 accept_zip
= "application/gzip"
568 if not accept_text
and not accept_zip
:
569 raise EngineException(
570 "provide request header 'Accept' with 'application/zip' or 'text/plain'",
571 http_code
=HTTPStatus
.NOT_ACCEPTABLE
,
574 content
= self
.show(session
, _id
)
575 if content
["_admin"]["onboardingState"] != "ONBOARDED":
576 raise EngineException(
577 "Cannot get content because this resource is not at 'ONBOARDED' state. "
578 "onboardingState is {}".format(content
["_admin"]["onboardingState"]),
579 http_code
=HTTPStatus
.CONFLICT
,
581 storage
= content
["_admin"]["storage"]
582 if path
is not None and path
!= "$DESCRIPTOR": # artifacts
583 if not storage
.get("pkg-dir") and not storage
.get("folder"):
584 raise EngineException(
585 "Packages does not contains artifacts",
586 http_code
=HTTPStatus
.BAD_REQUEST
,
588 if self
.fs
.file_exists(
589 (storage
["folder"], storage
["pkg-dir"], *path
), "dir"
591 folder_content
= self
.fs
.dir_ls(
592 (storage
["folder"], storage
["pkg-dir"], *path
)
594 return folder_content
, "text/plain"
595 # TODO manage folders in http
599 (storage
["folder"], storage
["pkg-dir"], *path
), "rb"
601 "application/octet-stream",
604 # pkgtype accept ZIP TEXT -> result
605 # manyfiles yes X -> zip
607 # onefile yes no -> zip
609 contain_many_files
= False
610 if storage
.get("pkg-dir"):
611 # check if there are more than one file in the package, ignoring checksums.txt.
612 pkg_files
= self
.fs
.dir_ls((storage
["folder"], storage
["pkg-dir"]))
613 if len(pkg_files
) >= 3 or (
614 len(pkg_files
) == 2 and "checksums.txt" not in pkg_files
616 contain_many_files
= True
617 if accept_text
and (not contain_many_files
or path
== "$DESCRIPTOR"):
619 self
.fs
.file_open((storage
["folder"], storage
["descriptor"]), "r"),
622 elif contain_many_files
and not accept_zip
:
623 raise EngineException(
624 "Packages that contains several files need to be retrieved with 'application/zip'"
626 http_code
=HTTPStatus
.NOT_ACCEPTABLE
,
629 if not storage
.get("zipfile"):
630 # TODO generate zipfile if not present
631 raise EngineException(
632 "Only allowed 'text/plain' Accept header for this descriptor. To be solved in "
634 http_code
=HTTPStatus
.NOT_ACCEPTABLE
,
637 self
.fs
.file_open((storage
["folder"], storage
["zipfile"]), "rb"),
641 def _remove_yang_prefixes_from_descriptor(self
, descriptor
):
643 for k
, v
in descriptor
.items():
645 if isinstance(v
, dict):
646 new_v
= self
._remove
_yang
_prefixes
_from
_descriptor
(v
)
647 elif isinstance(v
, list):
650 if isinstance(x
, dict):
651 new_v
.append(self
._remove
_yang
_prefixes
_from
_descriptor
(x
))
654 new_descriptor
[k
.split(":")[-1]] = new_v
655 return new_descriptor
657 def pyangbind_validation(self
, item
, data
, force
=False):
658 raise EngineException(
659 "Not possible to validate '{}' item".format(item
),
660 http_code
=HTTPStatus
.INTERNAL_SERVER_ERROR
,
663 def _validate_input_edit(self
, indata
, content
, force
=False):
664 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
667 if "_admin" not in indata
:
668 indata
["_admin"] = {}
670 if "operationalState" in indata
:
671 if indata
["operationalState"] in ("ENABLED", "DISABLED"):
672 indata
["_admin"]["operationalState"] = indata
.pop("operationalState")
674 raise EngineException(
675 "State '{}' is not a valid operational state".format(
676 indata
["operationalState"]
678 http_code
=HTTPStatus
.BAD_REQUEST
,
681 # In the case of user defined data, we need to put the data in the root of the object
682 # to preserve current expected behaviour
683 if "userDefinedData" in indata
:
684 data
= indata
.pop("userDefinedData")
685 if isinstance(data
, dict):
686 indata
["_admin"]["userDefinedData"] = data
688 raise EngineException(
689 "userDefinedData should be an object, but is '{}' instead".format(
692 http_code
=HTTPStatus
.BAD_REQUEST
,
696 "operationalState" in indata
["_admin"]
697 and content
["_admin"]["operationalState"]
698 == indata
["_admin"]["operationalState"]
700 raise EngineException(
701 "operationalState already {}".format(
702 content
["_admin"]["operationalState"]
704 http_code
=HTTPStatus
.CONFLICT
,
709 def _validate_descriptor_changes(
712 descriptor_file_name
,
713 old_descriptor_directory
,
714 new_descriptor_directory
,
717 # raise EngineException(
718 # "Error in validating new descriptor: <NODE> cannot be modified",
719 # http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
724 class VnfdTopic(DescriptorTopic
):
728 def __init__(self
, db
, fs
, msg
, auth
):
729 DescriptorTopic
.__init
__(self
, db
, fs
, msg
, auth
)
731 def pyangbind_validation(self
, item
, data
, force
=False):
732 if self
._descriptor
_data
_is
_in
_old
_format
(data
):
733 raise EngineException(
734 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
735 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
738 myvnfd
= etsi_nfv_vnfd
.etsi_nfv_vnfd()
739 pybindJSONDecoder
.load_ietf_json(
740 {"etsi-nfv-vnfd:vnfd": data
},
747 out
= pybindJSON
.dumps(myvnfd
, mode
="ietf")
748 desc_out
= self
._remove
_envelop
(yaml
.safe_load(out
))
749 desc_out
= self
._remove
_yang
_prefixes
_from
_descriptor
(desc_out
)
750 return utils
.deep_update_dict(data
, desc_out
)
751 except Exception as e
:
752 raise EngineException(
753 "Error in pyangbind validation: {}".format(str(e
)),
754 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
758 def _descriptor_data_is_in_old_format(data
):
759 return ("vnfd-catalog" in data
) or ("vnfd:vnfd-catalog" in data
)
762 def _remove_envelop(indata
=None):
765 clean_indata
= indata
767 if clean_indata
.get("etsi-nfv-vnfd:vnfd"):
768 if not isinstance(clean_indata
["etsi-nfv-vnfd:vnfd"], dict):
769 raise EngineException("'etsi-nfv-vnfd:vnfd' must be a dict")
770 clean_indata
= clean_indata
["etsi-nfv-vnfd:vnfd"]
771 elif clean_indata
.get("vnfd"):
772 if not isinstance(clean_indata
["vnfd"], dict):
773 raise EngineException("'vnfd' must be dict")
774 clean_indata
= clean_indata
["vnfd"]
778 def check_conflict_on_edit(self
, session
, final_content
, edit_content
, _id
):
779 final_content
= super().check_conflict_on_edit(
780 session
, final_content
, edit_content
, _id
786 for vdu
in get_iterable(final_content
.get("vdu")):
787 if vdu
.get("pdu-type"):
792 final_content
["_admin"]["type"] = "hnfd" if contains_vdu
else "pnfd"
794 final_content
["_admin"]["type"] = "vnfd"
795 # if neither vud nor pdu do not fill type
798 def check_conflict_on_del(self
, session
, _id
, db_content
):
800 Check that there is not any NSD that uses this VNFD. Only NSDs belonging to this project are considered. Note
801 that VNFD can be public and be used by NSD of other projects. Also check there are not deployments, or vnfr
803 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
804 :param _id: vnfd internal id
805 :param db_content: The database content of the _id.
806 :return: None or raises EngineException with the conflict
810 descriptor
= db_content
811 descriptor_id
= descriptor
.get("id")
812 if not descriptor_id
: # empty vnfd not uploaded
815 _filter
= self
._get
_project
_filter
(session
)
817 # check vnfrs using this vnfd
818 _filter
["vnfd-id"] = _id
819 if self
.db
.get_list("vnfrs", _filter
):
820 raise EngineException(
821 "There is at least one VNF instance using this descriptor",
822 http_code
=HTTPStatus
.CONFLICT
,
825 # check NSD referencing this VNFD
826 del _filter
["vnfd-id"]
827 _filter
["vnfd-id"] = descriptor_id
828 if self
.db
.get_list("nsds", _filter
):
829 raise EngineException(
830 "There is at least one NS package referencing this descriptor",
831 http_code
=HTTPStatus
.CONFLICT
,
834 def _validate_input_new(self
, indata
, storage_params
, force
=False):
835 indata
.pop("onboardingState", None)
836 indata
.pop("operationalState", None)
837 indata
.pop("usageState", None)
838 indata
.pop("links", None)
840 indata
= self
.pyangbind_validation("vnfds", indata
, force
)
841 # Cross references validation in the descriptor
843 self
.validate_mgmt_interface_connection_point(indata
)
845 for vdu
in get_iterable(indata
.get("vdu")):
846 self
.validate_vdu_internal_connection_points(vdu
)
847 self
._validate
_vdu
_cloud
_init
_in
_package
(storage_params
, vdu
, indata
)
848 self
._validate
_vdu
_charms
_in
_package
(storage_params
, indata
)
850 self
._validate
_vnf
_charms
_in
_package
(storage_params
, indata
)
852 self
.validate_external_connection_points(indata
)
853 self
.validate_internal_virtual_links(indata
)
854 self
.validate_monitoring_params(indata
)
855 self
.validate_scaling_group_descriptor(indata
)
856 self
.validate_helm_chart(indata
)
861 def validate_helm_chart(indata
):
863 result
= urlparse(url
)
864 return all([result
.scheme
, result
.netloc
])
866 kdus
= indata
.get("kdu", [])
868 helm_chart_value
= kdu
.get("helm-chart")
869 if not helm_chart_value
:
872 valid_helm_chart_re
.match(helm_chart_value
) or is_url(helm_chart_value
)
874 raise EngineException(
875 "helm-chart '{}' is not valid".format(helm_chart_value
),
876 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
880 def validate_mgmt_interface_connection_point(indata
):
881 if not indata
.get("vdu"):
883 if not indata
.get("mgmt-cp"):
884 raise EngineException(
885 "'mgmt-cp' is a mandatory field and it is not defined",
886 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
889 for cp
in get_iterable(indata
.get("ext-cpd")):
890 if cp
["id"] == indata
["mgmt-cp"]:
893 raise EngineException(
894 "mgmt-cp='{}' must match an existing ext-cpd".format(indata
["mgmt-cp"]),
895 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
899 def validate_vdu_internal_connection_points(vdu
):
901 for cpd
in get_iterable(vdu
.get("int-cpd")):
902 cpd_id
= cpd
.get("id")
903 if cpd_id
and cpd_id
in int_cpds
:
904 raise EngineException(
905 "vdu[id='{}']:int-cpd[id='{}'] is already used by other int-cpd".format(
908 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
913 def validate_external_connection_points(indata
):
914 all_vdus_int_cpds
= set()
915 for vdu
in get_iterable(indata
.get("vdu")):
916 for int_cpd
in get_iterable(vdu
.get("int-cpd")):
917 all_vdus_int_cpds
.add((vdu
.get("id"), int_cpd
.get("id")))
920 for cpd
in get_iterable(indata
.get("ext-cpd")):
921 cpd_id
= cpd
.get("id")
922 if cpd_id
and cpd_id
in ext_cpds
:
923 raise EngineException(
924 "ext-cpd[id='{}'] is already used by other ext-cpd".format(cpd_id
),
925 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
929 int_cpd
= cpd
.get("int-cpd")
931 if (int_cpd
.get("vdu-id"), int_cpd
.get("cpd")) not in all_vdus_int_cpds
:
932 raise EngineException(
933 "ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd".format(
936 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
938 # TODO: Validate k8s-cluster-net points to a valid k8s-cluster:nets ?
940 def _validate_vdu_charms_in_package(self
, storage_params
, indata
):
941 for df
in indata
["df"]:
943 "lcm-operations-configuration" in df
944 and "operate-vnf-op-config" in df
["lcm-operations-configuration"]
946 configs
= df
["lcm-operations-configuration"][
947 "operate-vnf-op-config"
949 vdus
= df
.get("vdu-profile", [])
951 for config
in configs
:
952 if config
["id"] == vdu
["id"] and utils
.find_in_list(
953 config
.get("execution-environment-list", []),
954 lambda ee
: "juju" in ee
,
956 if not self
._validate
_package
_folders
(
957 storage_params
, "charms"
958 ) and not self
._validate
_package
_folders
(
959 storage_params
, "Scripts/charms"
961 raise EngineException(
962 "Charm defined in vnf[id={}] but not present in "
963 "package".format(indata
["id"])
966 def _validate_vdu_cloud_init_in_package(self
, storage_params
, vdu
, indata
):
967 if not vdu
.get("cloud-init-file"):
969 if not self
._validate
_package
_folders
(
970 storage_params
, "cloud_init", vdu
["cloud-init-file"]
971 ) and not self
._validate
_package
_folders
(
972 storage_params
, "Scripts/cloud_init", vdu
["cloud-init-file"]
974 raise EngineException(
975 "Cloud-init defined in vnf[id={}]:vdu[id={}] but not present in "
976 "package".format(indata
["id"], vdu
["id"])
979 def _validate_vnf_charms_in_package(self
, storage_params
, indata
):
980 # Get VNF configuration through new container
981 for deployment_flavor
in indata
.get("df", []):
982 if "lcm-operations-configuration" not in deployment_flavor
:
985 "operate-vnf-op-config"
986 not in deployment_flavor
["lcm-operations-configuration"]
989 for day_1_2_config
in deployment_flavor
["lcm-operations-configuration"][
990 "operate-vnf-op-config"
992 if day_1_2_config
["id"] == indata
["id"]:
993 if utils
.find_in_list(
994 day_1_2_config
.get("execution-environment-list", []),
995 lambda ee
: "juju" in ee
,
997 if not self
._validate
_package
_folders
(
998 storage_params
, "charms"
999 ) and not self
._validate
_package
_folders
(
1000 storage_params
, "Scripts/charms"
1002 raise EngineException(
1003 "Charm defined in vnf[id={}] but not present in "
1004 "package".format(indata
["id"])
1007 def _validate_package_folders(self
, storage_params
, folder
, file=None):
1008 if not storage_params
:
1010 elif not storage_params
.get("pkg-dir"):
1011 if self
.fs
.file_exists("{}_".format(storage_params
["folder"]), "dir"):
1012 f
= "{}_/{}".format(storage_params
["folder"], folder
)
1014 f
= "{}/{}".format(storage_params
["folder"], folder
)
1016 return self
.fs
.file_exists("{}/{}".format(f
, file), "file")
1018 if self
.fs
.file_exists(f
, "dir"):
1019 if self
.fs
.dir_ls(f
):
1023 if self
.fs
.file_exists("{}_".format(storage_params
["folder"]), "dir"):
1024 f
= "{}_/{}/{}".format(
1025 storage_params
["folder"], storage_params
["pkg-dir"], folder
1028 f
= "{}/{}/{}".format(
1029 storage_params
["folder"], storage_params
["pkg-dir"], folder
1032 return self
.fs
.file_exists("{}/{}".format(f
, file), "file")
1034 if self
.fs
.file_exists(f
, "dir"):
1035 if self
.fs
.dir_ls(f
):
1040 def validate_internal_virtual_links(indata
):
1041 all_ivld_ids
= set()
1042 for ivld
in get_iterable(indata
.get("int-virtual-link-desc")):
1043 ivld_id
= ivld
.get("id")
1044 if ivld_id
and ivld_id
in all_ivld_ids
:
1045 raise EngineException(
1046 "Duplicated VLD id in int-virtual-link-desc[id={}]".format(ivld_id
),
1047 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1050 all_ivld_ids
.add(ivld_id
)
1052 for vdu
in get_iterable(indata
.get("vdu")):
1053 for int_cpd
in get_iterable(vdu
.get("int-cpd")):
1054 int_cpd_ivld_id
= int_cpd
.get("int-virtual-link-desc")
1055 if int_cpd_ivld_id
and int_cpd_ivld_id
not in all_ivld_ids
:
1056 raise EngineException(
1057 "vdu[id='{}']:int-cpd[id='{}']:int-virtual-link-desc='{}' must match an existing "
1058 "int-virtual-link-desc".format(
1059 vdu
["id"], int_cpd
["id"], int_cpd_ivld_id
1061 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1064 for df
in get_iterable(indata
.get("df")):
1065 for vlp
in get_iterable(df
.get("virtual-link-profile")):
1066 vlp_ivld_id
= vlp
.get("id")
1067 if vlp_ivld_id
and vlp_ivld_id
not in all_ivld_ids
:
1068 raise EngineException(
1069 "df[id='{}']:virtual-link-profile='{}' must match an existing "
1070 "int-virtual-link-desc".format(df
["id"], vlp_ivld_id
),
1071 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1075 def validate_monitoring_params(indata
):
1076 all_monitoring_params
= set()
1077 for ivld
in get_iterable(indata
.get("int-virtual-link-desc")):
1078 for mp
in get_iterable(ivld
.get("monitoring-parameters")):
1079 mp_id
= mp
.get("id")
1080 if mp_id
and mp_id
in all_monitoring_params
:
1081 raise EngineException(
1082 "Duplicated monitoring-parameter id in "
1083 "int-virtual-link-desc[id='{}']:monitoring-parameters[id='{}']".format(
1086 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1089 all_monitoring_params
.add(mp_id
)
1091 for vdu
in get_iterable(indata
.get("vdu")):
1092 for mp
in get_iterable(vdu
.get("monitoring-parameter")):
1093 mp_id
= mp
.get("id")
1094 if mp_id
and mp_id
in all_monitoring_params
:
1095 raise EngineException(
1096 "Duplicated monitoring-parameter id in "
1097 "vdu[id='{}']:monitoring-parameter[id='{}']".format(
1100 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1103 all_monitoring_params
.add(mp_id
)
1105 for df
in get_iterable(indata
.get("df")):
1106 for mp
in get_iterable(df
.get("monitoring-parameter")):
1107 mp_id
= mp
.get("id")
1108 if mp_id
and mp_id
in all_monitoring_params
:
1109 raise EngineException(
1110 "Duplicated monitoring-parameter id in "
1111 "df[id='{}']:monitoring-parameter[id='{}']".format(
1114 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1117 all_monitoring_params
.add(mp_id
)
1120 def validate_scaling_group_descriptor(indata
):
1121 all_monitoring_params
= set()
1122 for ivld
in get_iterable(indata
.get("int-virtual-link-desc")):
1123 for mp
in get_iterable(ivld
.get("monitoring-parameters")):
1124 all_monitoring_params
.add(mp
.get("id"))
1126 for vdu
in get_iterable(indata
.get("vdu")):
1127 for mp
in get_iterable(vdu
.get("monitoring-parameter")):
1128 all_monitoring_params
.add(mp
.get("id"))
1130 for df
in get_iterable(indata
.get("df")):
1131 for mp
in get_iterable(df
.get("monitoring-parameter")):
1132 all_monitoring_params
.add(mp
.get("id"))
1134 for df
in get_iterable(indata
.get("df")):
1135 for sa
in get_iterable(df
.get("scaling-aspect")):
1136 for sp
in get_iterable(sa
.get("scaling-policy")):
1137 for sc
in get_iterable(sp
.get("scaling-criteria")):
1138 sc_monitoring_param
= sc
.get("vnf-monitoring-param-ref")
1141 and sc_monitoring_param
not in all_monitoring_params
1143 raise EngineException(
1144 "df[id='{}']:scaling-aspect[id='{}']:scaling-policy"
1145 "[name='{}']:scaling-criteria[name='{}']: "
1146 "vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format(
1151 sc_monitoring_param
,
1153 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1156 for sca
in get_iterable(sa
.get("scaling-config-action")):
1158 "lcm-operations-configuration" not in df
1159 or "operate-vnf-op-config"
1160 not in df
["lcm-operations-configuration"]
1161 or not utils
.find_in_list(
1162 df
["lcm-operations-configuration"][
1163 "operate-vnf-op-config"
1164 ].get("day1-2", []),
1165 lambda config
: config
["id"] == indata
["id"],
1168 raise EngineException(
1169 "'day1-2 configuration' not defined in the descriptor but it is "
1170 "referenced by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action".format(
1173 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1175 for configuration
in get_iterable(
1176 df
["lcm-operations-configuration"]["operate-vnf-op-config"].get(
1180 for primitive
in get_iterable(
1181 configuration
.get("config-primitive")
1185 == sca
["vnf-config-primitive-name-ref"]
1189 raise EngineException(
1190 "df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-"
1191 "config-primitive-name-ref='{}' does not match any "
1192 "day1-2 configuration:config-primitive:name".format(
1195 sca
["vnf-config-primitive-name-ref"],
1197 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1200 def delete_extra(self
, session
, _id
, db_content
, not_send_msg
=None):
1202 Deletes associate file system storage (via super)
1203 Deletes associated vnfpkgops from database.
1204 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1205 :param _id: server internal id
1206 :param db_content: The database content of the descriptor
1208 :raises: FsException in case of error while deleting associated storage
1210 super().delete_extra(session
, _id
, db_content
, not_send_msg
)
1211 self
.db
.del_list("vnfpkgops", {"vnfPkgId": _id
})
1212 self
.db
.del_list(self
.topic
+ "_revisions", {"_id": {"$regex": _id
}})
1214 def sol005_projection(self
, data
):
1215 data
["onboardingState"] = data
["_admin"]["onboardingState"]
1216 data
["operationalState"] = data
["_admin"]["operationalState"]
1217 data
["usageState"] = data
["_admin"]["usageState"]
1220 links
["self"] = {"href": "/vnfpkgm/v1/vnf_packages/{}".format(data
["_id"])}
1221 links
["vnfd"] = {"href": "/vnfpkgm/v1/vnf_packages/{}/vnfd".format(data
["_id"])}
1222 links
["packageContent"] = {
1223 "href": "/vnfpkgm/v1/vnf_packages/{}/package_content".format(data
["_id"])
1225 data
["_links"] = links
1227 return super().sol005_projection(data
)
1230 def find_software_version(vnfd
: dict) -> str:
1231 """Find the sotware version in the VNFD descriptors
1234 vnfd (dict): Descriptor as a dictionary
1237 software-version (str)
1239 default_sw_version
= "1.0"
1240 if vnfd
.get("vnfd"):
1242 if vnfd
.get("software-version"):
1243 return vnfd
["software-version"]
1245 return default_sw_version
1248 def extract_policies(vnfd
: dict) -> dict:
1249 """Removes the policies from the VNFD descriptors
1252 vnfd (dict): Descriptor as a dictionary
1255 vnfd (dict): VNFD which does not include policies
1257 for df
in vnfd
.get("df", {}):
1258 for policy
in ["scaling-aspect", "healing-aspect"]:
1259 if df
.get(policy
, {}):
1261 for vdu
in vnfd
.get("vdu", {}):
1262 for alarm_policy
in ["alarm", "monitoring-parameter"]:
1263 if vdu
.get(alarm_policy
, {}):
1264 vdu
.pop(alarm_policy
)
1268 def extract_day12_primitives(vnfd
: dict) -> dict:
1269 """Removes the day12 primitives from the VNFD descriptors
1272 vnfd (dict): Descriptor as a dictionary
1277 for df_id
, df
in enumerate(vnfd
.get("df", {})):
1279 df
.get("lcm-operations-configuration", {})
1280 .get("operate-vnf-op-config", {})
1283 day12
= df
["lcm-operations-configuration"]["operate-vnf-op-config"].get(
1286 for config_id
, config
in enumerate(day12
):
1288 "initial-config-primitive",
1290 "terminate-config-primitive",
1292 config
.pop(key
, None)
1293 day12
[config_id
] = config
1294 df
["lcm-operations-configuration"]["operate-vnf-op-config"][
1297 vnfd
["df"][df_id
] = df
1300 def remove_modifiable_items(self
, vnfd
: dict) -> dict:
1301 """Removes the modifiable parts from the VNFD descriptors
1303 It calls different extract functions according to different update types
1304 to clear all the modifiable items from VNFD
1307 vnfd (dict): Descriptor as a dictionary
1310 vnfd (dict): Descriptor which does not include modifiable contents
1312 if vnfd
.get("vnfd"):
1314 vnfd
.pop("_admin", None)
1315 # If the other extractions need to be done from VNFD,
1316 # the new extract methods could be appended to below list.
1317 for extract_function
in [self
.extract_day12_primitives
, self
.extract_policies
]:
1318 vnfd_temp
= extract_function(vnfd
)
1322 def _validate_descriptor_changes(
1325 descriptor_file_name
: str,
1326 old_descriptor_directory
: str,
1327 new_descriptor_directory
: str,
1329 """Compares the old and new VNFD descriptors and validates the new descriptor.
1332 old_descriptor_directory (str): Directory of descriptor which is in-use
1333 new_descriptor_directory (str): Directory of descriptor which is proposed to update (new revision)
1339 EngineException: In case of error when there are unallowed changes
1342 # If VNFD does not exist in DB or it is not in use by any NS,
1343 # validation is not required.
1344 vnfd
= self
.db
.get_one("vnfds", {"_id": descriptor_id
})
1345 if not vnfd
or not detect_descriptor_usage(vnfd
, "vnfds", self
.db
):
1348 # Get the old and new descriptor contents in order to compare them.
1349 with self
.fs
.file_open(
1350 (old_descriptor_directory
.rstrip("/"), descriptor_file_name
), "r"
1351 ) as old_descriptor_file
:
1352 with self
.fs
.file_open(
1353 (new_descriptor_directory
.rstrip("/"), descriptor_file_name
), "r"
1354 ) as new_descriptor_file
:
1355 old_content
= yaml
.safe_load(old_descriptor_file
.read())
1356 new_content
= yaml
.safe_load(new_descriptor_file
.read())
1358 # If software version has changed, we do not need to validate
1359 # the differences anymore.
1360 if old_content
and new_content
:
1361 if self
.find_software_version(
1363 ) != self
.find_software_version(new_content
):
1366 disallowed_change
= DeepDiff(
1367 self
.remove_modifiable_items(old_content
),
1368 self
.remove_modifiable_items(new_content
),
1371 if disallowed_change
:
1372 changed_nodes
= functools
.reduce(
1373 lambda a
, b
: a
+ " , " + b
,
1376 for node
in disallowed_change
.get(
1382 raise EngineException(
1383 f
"Error in validating new descriptor: {changed_nodes} cannot be modified, "
1384 "there are disallowed changes in the vnf descriptor.",
1385 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1395 "VNF Descriptor could not be processed with error: {}.".format(e
)
1399 class NsdTopic(DescriptorTopic
):
1403 def __init__(self
, db
, fs
, msg
, auth
):
1404 super().__init
__(db
, fs
, msg
, auth
)
1406 def pyangbind_validation(self
, item
, data
, force
=False):
1407 if self
._descriptor
_data
_is
_in
_old
_format
(data
):
1408 raise EngineException(
1409 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
1410 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1413 nsd_vnf_profiles
= data
.get("df", [{}])[0].get("vnf-profile", [])
1414 mynsd
= etsi_nfv_nsd
.etsi_nfv_nsd()
1415 pybindJSONDecoder
.load_ietf_json(
1416 {"nsd": {"nsd": [data
]}},
1423 out
= pybindJSON
.dumps(mynsd
, mode
="ietf")
1424 desc_out
= self
._remove
_envelop
(yaml
.safe_load(out
))
1425 desc_out
= self
._remove
_yang
_prefixes
_from
_descriptor
(desc_out
)
1426 if nsd_vnf_profiles
:
1427 desc_out
["df"][0]["vnf-profile"] = nsd_vnf_profiles
1429 except Exception as e
:
1430 raise EngineException(
1431 "Error in pyangbind validation: {}".format(str(e
)),
1432 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1436 def _descriptor_data_is_in_old_format(data
):
1437 return ("nsd-catalog" in data
) or ("nsd:nsd-catalog" in data
)
1440 def _remove_envelop(indata
=None):
1443 clean_indata
= indata
1445 if clean_indata
.get("nsd"):
1446 clean_indata
= clean_indata
["nsd"]
1447 elif clean_indata
.get("etsi-nfv-nsd:nsd"):
1448 clean_indata
= clean_indata
["etsi-nfv-nsd:nsd"]
1449 if clean_indata
.get("nsd"):
1451 not isinstance(clean_indata
["nsd"], list)
1452 or len(clean_indata
["nsd"]) != 1
1454 raise EngineException("'nsd' must be a list of only one element")
1455 clean_indata
= clean_indata
["nsd"][0]
1458 def _validate_input_new(self
, indata
, storage_params
, force
=False):
1459 indata
.pop("nsdOnboardingState", None)
1460 indata
.pop("nsdOperationalState", None)
1461 indata
.pop("nsdUsageState", None)
1463 indata
.pop("links", None)
1465 indata
= self
.pyangbind_validation("nsds", indata
, force
)
1466 # Cross references validation in the descriptor
1467 # TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none
1468 for vld
in get_iterable(indata
.get("virtual-link-desc")):
1469 self
.validate_vld_mgmt_network_with_virtual_link_protocol_data(vld
, indata
)
1470 for fg
in get_iterable(indata
.get("vnffgd")):
1471 self
.validate_vnffgd_data(fg
, indata
)
1473 self
.validate_vnf_profiles_vnfd_id(indata
)
1478 def validate_vld_mgmt_network_with_virtual_link_protocol_data(vld
, indata
):
1479 if not vld
.get("mgmt-network"):
1481 vld_id
= vld
.get("id")
1482 for df
in get_iterable(indata
.get("df")):
1483 for vlp
in get_iterable(df
.get("virtual-link-profile")):
1484 if vld_id
and vld_id
== vlp
.get("virtual-link-desc-id"):
1485 if vlp
.get("virtual-link-protocol-data"):
1486 raise EngineException(
1487 "Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-"
1488 "protocol-data You cannot set a virtual-link-protocol-data "
1489 "when mgmt-network is True".format(df
["id"], vlp
["id"]),
1490 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1494 def validate_vnffgd_data(fg
, indata
):
1496 all_vnf_ids
= set(get_iterable(fg
.get("vnf-profile-id")))
1497 for fgposition
in get_iterable(fg
.get("nfp-position-element")):
1498 position_list
.append(fgposition
["id"])
1500 for nfpd
in get_iterable(fg
.get("nfpd")):
1502 for position
in get_iterable(nfpd
.get("position-desc-id")):
1503 nfp_position
= position
.get("nfp-position-element-id")
1504 if position
== "nfp-position-element-id":
1505 nfp_position
= position
.get("nfp-position-element-id")
1506 if nfp_position
[0] not in position_list
:
1507 raise EngineException(
1508 "Error at vnffgd nfpd[id='{}']:nfp-position-element-id='{}' "
1509 "does not match any nfp-position-element".format(
1510 nfpd
["id"], nfp_position
[0]
1512 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1515 for cp
in get_iterable(position
.get("cp-profile-id")):
1516 for cpe
in get_iterable(cp
.get("constituent-profile-elements")):
1517 constituent_base_element_id
= cpe
.get(
1518 "constituent-base-element-id"
1521 constituent_base_element_id
1522 and constituent_base_element_id
not in all_vnf_ids
1524 raise EngineException(
1525 "Error at vnffgd constituent_profile[id='{}']:vnfd-id='{}' "
1526 "does not match any constituent-base-element-id".format(
1527 cpe
["id"], constituent_base_element_id
1529 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1533 def validate_vnf_profiles_vnfd_id(indata
):
1534 all_vnfd_ids
= set(get_iterable(indata
.get("vnfd-id")))
1535 for df
in get_iterable(indata
.get("df")):
1536 for vnf_profile
in get_iterable(df
.get("vnf-profile")):
1537 vnfd_id
= vnf_profile
.get("vnfd-id")
1538 if vnfd_id
and vnfd_id
not in all_vnfd_ids
:
1539 raise EngineException(
1540 "Error at df[id='{}']:vnf_profile[id='{}']:vnfd-id='{}' "
1541 "does not match any vnfd-id".format(
1542 df
["id"], vnf_profile
["id"], vnfd_id
1544 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1547 def _validate_input_edit(self
, indata
, content
, force
=False):
1548 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
1550 indata looks as follows:
1551 - In the new case (conformant)
1552 {'nsdOperationalState': 'DISABLED', 'userDefinedData': {'id': 'string23',
1553 '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}}
1554 - In the old case (backwards-compatible)
1555 {'id': 'string23', '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}
1557 if "_admin" not in indata
:
1558 indata
["_admin"] = {}
1560 if "nsdOperationalState" in indata
:
1561 if indata
["nsdOperationalState"] in ("ENABLED", "DISABLED"):
1562 indata
["_admin"]["operationalState"] = indata
.pop("nsdOperationalState")
1564 raise EngineException(
1565 "State '{}' is not a valid operational state".format(
1566 indata
["nsdOperationalState"]
1568 http_code
=HTTPStatus
.BAD_REQUEST
,
1571 # In the case of user defined data, we need to put the data in the root of the object
1572 # to preserve current expected behaviour
1573 if "userDefinedData" in indata
:
1574 data
= indata
.pop("userDefinedData")
1575 if isinstance(data
, dict):
1576 indata
["_admin"]["userDefinedData"] = data
1578 raise EngineException(
1579 "userDefinedData should be an object, but is '{}' instead".format(
1582 http_code
=HTTPStatus
.BAD_REQUEST
,
1585 "operationalState" in indata
["_admin"]
1586 and content
["_admin"]["operationalState"]
1587 == indata
["_admin"]["operationalState"]
1589 raise EngineException(
1590 "nsdOperationalState already {}".format(
1591 content
["_admin"]["operationalState"]
1593 http_code
=HTTPStatus
.CONFLICT
,
1597 def _check_descriptor_dependencies(self
, session
, descriptor
):
1599 Check that the dependent descriptors exist on a new descriptor or edition. Also checks references to vnfd
1600 connection points are ok
1601 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1602 :param descriptor: descriptor to be inserted or edit
1603 :return: None or raises exception
1605 if session
["force"]:
1607 vnfds_index
= self
._get
_descriptor
_constituent
_vnfds
_index
(session
, descriptor
)
1609 # Cross references validation in the descriptor and vnfd connection point validation
1610 for df
in get_iterable(descriptor
.get("df")):
1611 self
.validate_df_vnf_profiles_constituent_connection_points(df
, vnfds_index
)
1613 def _get_descriptor_constituent_vnfds_index(self
, session
, descriptor
):
1615 if descriptor
.get("vnfd-id") and not session
["force"]:
1616 for vnfd_id
in get_iterable(descriptor
.get("vnfd-id")):
1617 query_filter
= self
._get
_project
_filter
(session
)
1618 query_filter
["id"] = vnfd_id
1619 vnf_list
= self
.db
.get_list("vnfds", query_filter
)
1621 raise EngineException(
1622 "Descriptor error at 'vnfd-id'='{}' references a non "
1623 "existing vnfd".format(vnfd_id
),
1624 http_code
=HTTPStatus
.CONFLICT
,
1626 vnfds_index
[vnfd_id
] = vnf_list
[0]
1630 def validate_df_vnf_profiles_constituent_connection_points(df
, vnfds_index
):
1631 for vnf_profile
in get_iterable(df
.get("vnf-profile")):
1632 vnfd
= vnfds_index
.get(vnf_profile
["vnfd-id"])
1633 all_vnfd_ext_cpds
= set()
1634 for ext_cpd
in get_iterable(vnfd
.get("ext-cpd")):
1635 if ext_cpd
.get("id"):
1636 all_vnfd_ext_cpds
.add(ext_cpd
.get("id"))
1638 for virtual_link
in get_iterable(
1639 vnf_profile
.get("virtual-link-connectivity")
1641 for vl_cpd
in get_iterable(virtual_link
.get("constituent-cpd-id")):
1642 vl_cpd_id
= vl_cpd
.get("constituent-cpd-id")
1643 if vl_cpd_id
and vl_cpd_id
not in all_vnfd_ext_cpds
:
1644 raise EngineException(
1645 "Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity"
1646 "[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a "
1647 "non existing ext-cpd:id inside vnfd '{}'".format(
1650 virtual_link
["virtual-link-profile-id"],
1654 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1657 def check_conflict_on_edit(self
, session
, final_content
, edit_content
, _id
):
1658 final_content
= super().check_conflict_on_edit(
1659 session
, final_content
, edit_content
, _id
1662 self
._check
_descriptor
_dependencies
(session
, final_content
)
1664 return final_content
1666 def check_conflict_on_del(self
, session
, _id
, db_content
):
1668 Check that there is not any NSR that uses this NSD. Only NSRs belonging to this project are considered. Note
1669 that NSD can be public and be used by other projects.
1670 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1671 :param _id: nsd internal id
1672 :param db_content: The database content of the _id
1673 :return: None or raises EngineException with the conflict
1675 if session
["force"]:
1677 descriptor
= db_content
1678 descriptor_id
= descriptor
.get("id")
1679 if not descriptor_id
: # empty nsd not uploaded
1682 # check NSD used by NS
1683 _filter
= self
._get
_project
_filter
(session
)
1684 _filter
["nsd-id"] = _id
1685 if self
.db
.get_list("nsrs", _filter
):
1686 raise EngineException(
1687 "There is at least one NS instance using this descriptor",
1688 http_code
=HTTPStatus
.CONFLICT
,
1691 # check NSD referenced by NST
1692 del _filter
["nsd-id"]
1693 _filter
["netslice-subnet.ANYINDEX.nsd-ref"] = descriptor_id
1694 if self
.db
.get_list("nsts", _filter
):
1695 raise EngineException(
1696 "There is at least one NetSlice Template referencing this descriptor",
1697 http_code
=HTTPStatus
.CONFLICT
,
1700 def delete_extra(self
, session
, _id
, db_content
, not_send_msg
=None):
1702 Deletes associate file system storage (via super)
1703 Deletes associated vnfpkgops from database.
1704 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1705 :param _id: server internal id
1706 :param db_content: The database content of the descriptor
1708 :raises: FsException in case of error while deleting associated storage
1710 super().delete_extra(session
, _id
, db_content
, not_send_msg
)
1711 self
.db
.del_list(self
.topic
+ "_revisions", {"_id": {"$regex": _id
}})
1714 def extract_day12_primitives(nsd
: dict) -> dict:
1715 """Removes the day12 primitives from the NSD descriptors
1718 nsd (dict): Descriptor as a dictionary
1721 nsd (dict): Cleared NSD
1723 if nsd
.get("ns-configuration"):
1726 "initial-config-primitive",
1727 "terminate-config-primitive",
1729 nsd
["ns-configuration"].pop(key
, None)
1732 def remove_modifiable_items(self
, nsd
: dict) -> dict:
1733 """Removes the modifiable parts from the VNFD descriptors
1735 It calls different extract functions according to different update types
1736 to clear all the modifiable items from NSD
1739 nsd (dict): Descriptor as a dictionary
1742 nsd (dict): Descriptor which does not include modifiable contents
1744 while isinstance(nsd
, dict) and nsd
.get("nsd"):
1746 if isinstance(nsd
, list):
1748 nsd
.pop("_admin", None)
1749 # If the more extractions need to be done from NSD,
1750 # the new extract methods could be appended to below list.
1751 for extract_function
in [self
.extract_day12_primitives
]:
1752 nsd_temp
= extract_function(nsd
)
1756 def _validate_descriptor_changes(
1759 descriptor_file_name
: str,
1760 old_descriptor_directory
: str,
1761 new_descriptor_directory
: str,
1763 """Compares the old and new NSD descriptors and validates the new descriptor
1766 old_descriptor_directory: Directory of descriptor which is in-use
1767 new_descriptor_directory: Directory of descriptor which is proposed to update (new revision)
1773 EngineException: In case of error if the changes are not allowed
1777 # If NSD does not exist in DB, or it is not in use by any NS,
1778 # validation is not required.
1779 nsd
= self
.db
.get_one("nsds", {"_id": descriptor_id
}, fail_on_empty
=False)
1780 if not nsd
or not detect_descriptor_usage(nsd
, "nsds", self
.db
):
1783 # Get the old and new descriptor contents in order to compare them.
1784 with self
.fs
.file_open(
1785 (old_descriptor_directory
.rstrip("/"), descriptor_file_name
), "r"
1786 ) as old_descriptor_file
:
1787 with self
.fs
.file_open(
1788 (new_descriptor_directory
.rstrip("/"), descriptor_file_name
), "r"
1789 ) as new_descriptor_file
:
1790 old_content
= yaml
.safe_load(old_descriptor_file
.read())
1791 new_content
= yaml
.safe_load(new_descriptor_file
.read())
1793 if old_content
and new_content
:
1794 disallowed_change
= DeepDiff(
1795 self
.remove_modifiable_items(old_content
),
1796 self
.remove_modifiable_items(new_content
),
1799 if disallowed_change
:
1800 changed_nodes
= functools
.reduce(
1801 lambda a
, b
: a
+ ", " + b
,
1804 for node
in disallowed_change
.get(
1810 raise EngineException(
1811 f
"Error in validating new descriptor: {changed_nodes} cannot be modified, "
1812 "there are disallowed changes in the ns descriptor. ",
1813 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1823 "NS Descriptor could not be processed with error: {}.".format(e
)
1826 def sol005_projection(self
, data
):
1827 data
["nsdOnboardingState"] = data
["_admin"]["onboardingState"]
1828 data
["nsdOperationalState"] = data
["_admin"]["operationalState"]
1829 data
["nsdUsageState"] = data
["_admin"]["usageState"]
1832 links
["self"] = {"href": "/nsd/v1/ns_descriptors/{}".format(data
["_id"])}
1833 links
["nsd_content"] = {
1834 "href": "/nsd/v1/ns_descriptors/{}/nsd_content".format(data
["_id"])
1836 data
["_links"] = links
1838 return super().sol005_projection(data
)
1841 class NstTopic(DescriptorTopic
):
1844 quota_name
= "slice_templates"
1846 def __init__(self
, db
, fs
, msg
, auth
):
1847 DescriptorTopic
.__init
__(self
, db
, fs
, msg
, auth
)
1849 def pyangbind_validation(self
, item
, data
, force
=False):
1852 pybindJSONDecoder
.load_ietf_json(
1860 out
= pybindJSON
.dumps(mynst
, mode
="ietf")
1861 desc_out
= self
._remove
_envelop
(yaml
.safe_load(out
))
1863 except Exception as e
:
1864 raise EngineException(
1865 "Error in pyangbind validation: {}".format(str(e
)),
1866 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1870 def _remove_envelop(indata
=None):
1873 clean_indata
= indata
1875 if clean_indata
.get("nst"):
1877 not isinstance(clean_indata
["nst"], list)
1878 or len(clean_indata
["nst"]) != 1
1880 raise EngineException("'nst' must be a list only one element")
1881 clean_indata
= clean_indata
["nst"][0]
1882 elif clean_indata
.get("nst:nst"):
1884 not isinstance(clean_indata
["nst:nst"], list)
1885 or len(clean_indata
["nst:nst"]) != 1
1887 raise EngineException("'nst:nst' must be a list only one element")
1888 clean_indata
= clean_indata
["nst:nst"][0]
1891 def _validate_input_new(self
, indata
, storage_params
, force
=False):
1892 indata
.pop("onboardingState", None)
1893 indata
.pop("operationalState", None)
1894 indata
.pop("usageState", None)
1895 indata
= self
.pyangbind_validation("nsts", indata
, force
)
1896 return indata
.copy()
1898 def _check_descriptor_dependencies(self
, session
, descriptor
):
1900 Check that the dependent descriptors exist on a new descriptor or edition
1901 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1902 :param descriptor: descriptor to be inserted or edit
1903 :return: None or raises exception
1905 if not descriptor
.get("netslice-subnet"):
1907 for nsd
in descriptor
["netslice-subnet"]:
1908 nsd_id
= nsd
["nsd-ref"]
1909 filter_q
= self
._get
_project
_filter
(session
)
1910 filter_q
["id"] = nsd_id
1911 if not self
.db
.get_list("nsds", filter_q
):
1912 raise EngineException(
1913 "Descriptor error at 'netslice-subnet':'nsd-ref'='{}' references a non "
1914 "existing nsd".format(nsd_id
),
1915 http_code
=HTTPStatus
.CONFLICT
,
1918 def check_conflict_on_edit(self
, session
, final_content
, edit_content
, _id
):
1919 final_content
= super().check_conflict_on_edit(
1920 session
, final_content
, edit_content
, _id
1923 self
._check
_descriptor
_dependencies
(session
, final_content
)
1924 return final_content
1926 def check_conflict_on_del(self
, session
, _id
, db_content
):
1928 Check that there is not any NSIR that uses this NST. Only NSIRs belonging to this project are considered. Note
1929 that NST can be public and be used by other projects.
1930 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1931 :param _id: nst internal id
1932 :param db_content: The database content of the _id.
1933 :return: None or raises EngineException with the conflict
1935 # TODO: Check this method
1936 if session
["force"]:
1938 # Get Network Slice Template from Database
1939 _filter
= self
._get
_project
_filter
(session
)
1940 _filter
["_admin.nst-id"] = _id
1941 if self
.db
.get_list("nsis", _filter
):
1942 raise EngineException(
1943 "there is at least one Netslice Instance using this descriptor",
1944 http_code
=HTTPStatus
.CONFLICT
,
1947 def sol005_projection(self
, data
):
1948 data
["onboardingState"] = data
["_admin"]["onboardingState"]
1949 data
["operationalState"] = data
["_admin"]["operationalState"]
1950 data
["usageState"] = data
["_admin"]["usageState"]
1953 links
["self"] = {"href": "/nst/v1/netslice_templates/{}".format(data
["_id"])}
1954 links
["nst"] = {"href": "/nst/v1/netslice_templates/{}/nst".format(data
["_id"])}
1955 data
["_links"] = links
1957 return super().sol005_projection(data
)
1960 class PduTopic(BaseTopic
):
1963 quota_name
= "pduds"
1964 schema_new
= pdu_new_schema
1965 schema_edit
= pdu_edit_schema
1967 def __init__(self
, db
, fs
, msg
, auth
):
1968 BaseTopic
.__init
__(self
, db
, fs
, msg
, auth
)
1971 def format_on_new(content
, project_id
=None, make_public
=False):
1972 BaseTopic
.format_on_new(content
, project_id
=project_id
, make_public
=make_public
)
1973 content
["_admin"]["onboardingState"] = "CREATED"
1974 content
["_admin"]["operationalState"] = "ENABLED"
1975 content
["_admin"]["usageState"] = "NOT_IN_USE"
1977 def check_conflict_on_del(self
, session
, _id
, db_content
):
1979 Check that there is not any vnfr that uses this PDU
1980 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1981 :param _id: pdu internal id
1982 :param db_content: The database content of the _id.
1983 :return: None or raises EngineException with the conflict
1985 if session
["force"]:
1988 _filter
= self
._get
_project
_filter
(session
)
1989 _filter
["vdur.pdu-id"] = _id
1990 if self
.db
.get_list("vnfrs", _filter
):
1991 raise EngineException(
1992 "There is at least one VNF instance using this PDU",
1993 http_code
=HTTPStatus
.CONFLICT
,
1997 class VnfPkgOpTopic(BaseTopic
):
2000 schema_new
= vnfpkgop_new_schema
2003 def __init__(self
, db
, fs
, msg
, auth
):
2004 BaseTopic
.__init
__(self
, db
, fs
, msg
, auth
)
2006 def edit(self
, session
, _id
, indata
=None, kwargs
=None, content
=None):
2007 raise EngineException(
2008 "Method 'edit' not allowed for topic '{}'".format(self
.topic
),
2009 HTTPStatus
.METHOD_NOT_ALLOWED
,
2012 def delete(self
, session
, _id
, dry_run
=False):
2013 raise EngineException(
2014 "Method 'delete' not allowed for topic '{}'".format(self
.topic
),
2015 HTTPStatus
.METHOD_NOT_ALLOWED
,
2018 def delete_list(self
, session
, filter_q
=None):
2019 raise EngineException(
2020 "Method 'delete_list' not allowed for topic '{}'".format(self
.topic
),
2021 HTTPStatus
.METHOD_NOT_ALLOWED
,
2024 def new(self
, rollback
, session
, indata
=None, kwargs
=None, headers
=None):
2026 Creates a new entry into database.
2027 :param rollback: list to append created items at database in case a rollback may to be done
2028 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
2029 :param indata: data to be inserted
2030 :param kwargs: used to override the indata descriptor
2031 :param headers: http request headers
2032 :return: _id, op_id:
2033 _id: identity of the inserted data.
2036 self
._update
_input
_with
_kwargs
(indata
, kwargs
)
2037 validate_input(indata
, self
.schema_new
)
2038 vnfpkg_id
= indata
["vnfPkgId"]
2039 filter_q
= BaseTopic
._get
_project
_filter
(session
)
2040 filter_q
["_id"] = vnfpkg_id
2041 vnfd
= self
.db
.get_one("vnfds", filter_q
)
2042 operation
= indata
["lcmOperationType"]
2043 kdu_name
= indata
["kdu_name"]
2044 for kdu
in vnfd
.get("kdu", []):
2045 if kdu
["name"] == kdu_name
:
2046 helm_chart
= kdu
.get("helm-chart")
2047 juju_bundle
= kdu
.get("juju-bundle")
2050 raise EngineException(
2051 "Not found vnfd[id='{}']:kdu[name='{}']".format(vnfpkg_id
, kdu_name
)
2054 indata
["helm-chart"] = helm_chart
2055 match
= fullmatch(r
"([^/]*)/([^/]*)", helm_chart
)
2056 repo_name
= match
.group(1) if match
else None
2058 indata
["juju-bundle"] = juju_bundle
2059 match
= fullmatch(r
"([^/]*)/([^/]*)", juju_bundle
)
2060 repo_name
= match
.group(1) if match
else None
2062 raise EngineException(
2063 "Found neither 'helm-chart' nor 'juju-bundle' in vnfd[id='{}']:kdu[name='{}']".format(
2069 filter_q
["name"] = repo_name
2070 repo
= self
.db
.get_one("k8srepos", filter_q
)
2071 k8srepo_id
= repo
.get("_id")
2072 k8srepo_url
= repo
.get("url")
2076 indata
["k8srepoId"] = k8srepo_id
2077 indata
["k8srepo_url"] = k8srepo_url
2078 vnfpkgop_id
= str(uuid4())
2081 "operationState": "PROCESSING",
2082 "vnfPkgId": vnfpkg_id
,
2083 "lcmOperationType": operation
,
2084 "isAutomaticInvocation": False,
2085 "isCancelPending": False,
2086 "operationParams": indata
,
2088 "self": "/osm/vnfpkgm/v1/vnfpkg_op_occs/" + vnfpkgop_id
,
2089 "vnfpkg": "/osm/vnfpkgm/v1/vnf_packages/" + vnfpkg_id
,
2093 vnfpkgop_desc
, session
["project_id"], make_public
=session
["public"]
2095 ctime
= vnfpkgop_desc
["_admin"]["created"]
2096 vnfpkgop_desc
["statusEnteredTime"] = ctime
2097 vnfpkgop_desc
["startTime"] = ctime
2098 self
.db
.create(self
.topic
, vnfpkgop_desc
)
2099 rollback
.append({"topic": self
.topic
, "_id": vnfpkgop_id
})
2100 self
.msg
.write(self
.topic_msg
, operation
, vnfpkgop_desc
)
2101 return vnfpkgop_id
, None