1 # -*- coding: utf-8 -*-
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
7 # http://www.apache.org/licenses/LICENSE-2.0
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
24 from hashlib
import md5
25 from osm_common
.dbbase
import DbException
, deep_update_rfc7396
26 from http
import HTTPStatus
28 from uuid
import uuid4
29 from re
import fullmatch
30 from zipfile
import ZipFile
31 from osm_nbi
.validation
import (
38 from osm_nbi
.base_topic
import BaseTopic
, EngineException
, get_iterable
39 from osm_im
import etsi_nfv_vnfd
, etsi_nfv_nsd
40 from osm_im
.nst
import nst
as nst_im
41 from pyangbind
.lib
.serialise
import pybindJSONDecoder
42 import pyangbind
.lib
.pybindJSON
as pybindJSON
43 from osm_nbi
import utils
45 __author__
= "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
48 class DescriptorTopic(BaseTopic
):
49 def __init__(self
, db
, fs
, msg
, auth
):
51 BaseTopic
.__init
__(self
, db
, fs
, msg
, auth
)
53 def check_conflict_on_edit(self
, session
, final_content
, edit_content
, _id
):
54 final_content
= super().check_conflict_on_edit(
55 session
, final_content
, edit_content
, _id
58 def _check_unique_id_name(descriptor
, position
=""):
59 for desc_key
, desc_item
in descriptor
.items():
60 if isinstance(desc_item
, list) and desc_item
:
63 for index
, list_item
in enumerate(desc_item
):
64 if isinstance(list_item
, dict):
65 _check_unique_id_name(
66 list_item
, "{}.{}[{}]".format(position
, desc_key
, index
)
70 list_item
.get("id") or list_item
.get("name")
72 desc_item_id
= "id" if list_item
.get("id") else "name"
73 if desc_item_id
and list_item
.get(desc_item_id
):
74 if list_item
[desc_item_id
] in used_ids
:
75 position
= "{}.{}[{}]".format(
76 position
, desc_key
, index
78 raise EngineException(
79 "Error: identifier {} '{}' is not unique and repeats at '{}'".format(
81 list_item
[desc_item_id
],
84 HTTPStatus
.UNPROCESSABLE_ENTITY
,
86 used_ids
.append(list_item
[desc_item_id
])
88 _check_unique_id_name(final_content
)
89 # 1. validate again with pyangbind
90 # 1.1. remove internal keys
92 for k
in ("_id", "_admin"):
93 if k
in final_content
:
94 internal_keys
[k
] = final_content
.pop(k
)
95 storage_params
= internal_keys
["_admin"].get("storage")
96 serialized
= self
._validate
_input
_new
(
97 final_content
, storage_params
, session
["force"]
100 # 1.2. modify final_content with a serialized version
101 final_content
= copy
.deepcopy(serialized
)
102 # 1.3. restore internal keys
103 for k
, v
in internal_keys
.items():
108 # 2. check that this id is not present
109 if "id" in edit_content
:
110 _filter
= self
._get
_project
_filter
(session
)
112 _filter
["id"] = final_content
["id"]
113 _filter
["_id.neq"] = _id
115 if self
.db
.get_one(self
.topic
, _filter
, fail_on_empty
=False):
116 raise EngineException(
117 "{} with id '{}' already exists for this project".format(
118 self
.topic
[:-1], final_content
["id"]
126 def format_on_new(content
, project_id
=None, make_public
=False):
127 BaseTopic
.format_on_new(content
, project_id
=project_id
, make_public
=make_public
)
128 content
["_admin"]["onboardingState"] = "CREATED"
129 content
["_admin"]["operationalState"] = "DISABLED"
130 content
["_admin"]["usageState"] = "NOT_IN_USE"
132 def delete_extra(self
, session
, _id
, db_content
, not_send_msg
=None):
134 Deletes file system storage associated with the descriptor
135 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
136 :param _id: server internal id
137 :param db_content: The database content of the descriptor
138 :param not_send_msg: To not send message (False) or store content (list) instead
139 :return: None if ok or raises EngineException with the problem
141 self
.fs
.file_delete(_id
, ignore_non_exist
=True)
142 self
.fs
.file_delete(_id
+ "_", ignore_non_exist
=True) # remove temp folder
143 # Remove file revisions
144 if "revision" in db_content
["_admin"]:
145 revision
= db_content
["_admin"]["revision"]
147 self
.fs
.file_delete(_id
+ ":" + str(revision
), ignore_non_exist
=True)
148 revision
= revision
- 1
152 def get_one_by_id(db
, session
, topic
, id):
153 # find owned by this project
154 _filter
= BaseTopic
._get
_project
_filter
(session
)
156 desc_list
= db
.get_list(topic
, _filter
)
157 if len(desc_list
) == 1:
159 elif len(desc_list
) > 1:
161 "Found more than one {} with id='{}' belonging to this project".format(
167 # not found any: try to find public
168 _filter
= BaseTopic
._get
_project
_filter
(session
)
170 desc_list
= db
.get_list(topic
, _filter
)
173 "Not found any {} with id='{}'".format(topic
[:-1], id),
174 HTTPStatus
.NOT_FOUND
,
176 elif len(desc_list
) == 1:
180 "Found more than one public {} with id='{}'; and no one belonging to this project".format(
186 def new(self
, rollback
, session
, indata
=None, kwargs
=None, headers
=None):
188 Creates a new almost empty DISABLED entry into database. Due to SOL005, it does not follow normal procedure.
189 Creating a VNFD or NSD is done in two steps: 1. Creates an empty descriptor (this step) and 2) upload content
190 (self.upload_content)
191 :param rollback: list to append created items at database in case a rollback may to be done
192 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
193 :param indata: data to be inserted
194 :param kwargs: used to override the indata descriptor
195 :param headers: http request headers
196 :return: _id, None: identity of the inserted data; and None as there is not any operation
199 # No needed to capture exceptions
201 self
.check_quota(session
)
205 if "userDefinedData" in indata
:
206 indata
= indata
["userDefinedData"]
208 # Override descriptor with query string kwargs
209 self
._update
_input
_with
_kwargs
(indata
, kwargs
)
210 # uncomment when this method is implemented.
211 # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors
212 # indata = DescriptorTopic._validate_input_new(self, indata, project_id=session["force"])
214 content
= {"_admin": {
215 "userDefinedData": indata
,
220 content
, session
["project_id"], make_public
=session
["public"]
222 _id
= self
.db
.create(self
.topic
, content
)
223 rollback
.append({"topic": self
.topic
, "_id": _id
})
224 self
._send
_msg
("created", {"_id": _id
})
227 def upload_content(self
, session
, _id
, indata
, kwargs
, headers
):
229 Used for receiving content by chunks (with a transaction_id header and/or gzip file. It will store and extract)
230 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
231 :param _id : the nsd,vnfd is already created, this is the id
232 :param indata: http body request
233 :param kwargs: user query string to override parameters. NOT USED
234 :param headers: http request headers
235 :return: True if package is completely uploaded or False if partial content has been uploded
236 Raise exception on error
238 # Check that _id exists and it is valid
239 current_desc
= self
.show(session
, _id
)
241 content_range_text
= headers
.get("Content-Range")
242 expected_md5
= headers
.get("Content-File-MD5")
244 content_type
= headers
.get("Content-Type")
247 and "application/gzip" in content_type
248 or "application/x-gzip" in content_type
253 and "application/zip" in content_type
256 filename
= headers
.get("Content-Filename")
257 if not filename
and compressed
:
258 filename
= "package.tar.gz" if compressed
== "gzip" else "package.zip"
263 if "revision" in current_desc
["_admin"]:
264 revision
= current_desc
["_admin"]["revision"] + 1
266 # TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266
270 if content_range_text
:
272 content_range_text
.replace("-", " ").replace("/", " ").split()
275 content_range
[0] != "bytes"
276 ): # TODO check x<y not negative < total....
278 start
= int(content_range
[1])
279 end
= int(content_range
[2]) + 1
280 total
= int(content_range
[3])
283 # Rather than using a temp folder, we will store the package in a folder based on
284 # the current revision.
285 proposed_revision_path
= (
286 _id
+ ":" + str(revision
)
287 ) # all the content is upload here and if ok, it is rename from id_ to is folder
290 if not self
.fs
.file_exists(proposed_revision_path
, "dir"):
291 raise EngineException(
292 "invalid Transaction-Id header", HTTPStatus
.NOT_FOUND
295 self
.fs
.file_delete(proposed_revision_path
, ignore_non_exist
=True)
296 self
.fs
.mkdir(proposed_revision_path
)
298 storage
= self
.fs
.get_params()
299 storage
["folder"] = _id
301 file_path
= (proposed_revision_path
, filename
)
302 if self
.fs
.file_exists(file_path
, "file"):
303 file_size
= self
.fs
.file_size(file_path
)
306 if file_size
!= start
:
307 raise EngineException(
308 "invalid Content-Range start sequence, expected '{}' but received '{}'".format(
311 HTTPStatus
.REQUESTED_RANGE_NOT_SATISFIABLE
,
313 file_pkg
= self
.fs
.file_open(file_path
, "a+b")
314 if isinstance(indata
, dict):
315 indata_text
= yaml
.safe_dump(indata
, indent
=4, default_flow_style
=False)
316 file_pkg
.write(indata_text
.encode(encoding
="utf-8"))
320 indata_text
= indata
.read(4096)
321 indata_len
+= len(indata_text
)
324 file_pkg
.write(indata_text
)
325 if content_range_text
:
326 if indata_len
!= end
- start
:
327 raise EngineException(
328 "Mismatch between Content-Range header {}-{} and body length of {}".format(
329 start
, end
- 1, indata_len
331 HTTPStatus
.REQUESTED_RANGE_NOT_SATISFIABLE
,
334 # TODO update to UPLOADING
341 chunk_data
= file_pkg
.read(1024)
343 file_md5
.update(chunk_data
)
344 chunk_data
= file_pkg
.read(1024)
345 if expected_md5
!= file_md5
.hexdigest():
346 raise EngineException("Error, MD5 mismatch", HTTPStatus
.CONFLICT
)
348 if compressed
== "gzip":
349 tar
= tarfile
.open(mode
="r", fileobj
=file_pkg
)
350 descriptor_file_name
= None
352 tarname
= tarinfo
.name
353 tarname_path
= tarname
.split("/")
355 not tarname_path
[0] or ".." in tarname_path
356 ): # if start with "/" means absolute path
357 raise EngineException(
358 "Absolute path or '..' are not allowed for package descriptor tar.gz"
360 if len(tarname_path
) == 1 and not tarinfo
.isdir():
361 raise EngineException(
362 "All files must be inside a dir for package descriptor tar.gz"
365 tarname
.endswith(".yaml")
366 or tarname
.endswith(".json")
367 or tarname
.endswith(".yml")
369 storage
["pkg-dir"] = tarname_path
[0]
370 if len(tarname_path
) == 2:
371 if descriptor_file_name
:
372 raise EngineException(
373 "Found more than one descriptor file at package descriptor tar.gz"
375 descriptor_file_name
= tarname
376 if not descriptor_file_name
:
377 raise EngineException(
378 "Not found any descriptor file at package descriptor tar.gz"
380 storage
["descriptor"] = descriptor_file_name
381 storage
["zipfile"] = filename
382 self
.fs
.file_extract(tar
, proposed_revision_path
)
383 with self
.fs
.file_open(
384 (proposed_revision_path
, descriptor_file_name
), "r"
385 ) as descriptor_file
:
386 content
= descriptor_file
.read()
387 elif compressed
== "zip":
388 zipfile
= ZipFile(file_pkg
)
389 descriptor_file_name
= None
390 for package_file
in zipfile
.infolist():
391 zipfilename
= package_file
.filename
392 file_path
= zipfilename
.split("/")
394 not file_path
[0] or ".." in zipfilename
395 ): # if start with "/" means absolute path
396 raise EngineException(
397 "Absolute path or '..' are not allowed for package descriptor zip"
402 zipfilename
.endswith(".yaml")
403 or zipfilename
.endswith(".json")
404 or zipfilename
.endswith(".yml")
406 zipfilename
.find("/") < 0
407 or zipfilename
.find("Definitions") >= 0
410 storage
["pkg-dir"] = ""
411 if descriptor_file_name
:
412 raise EngineException(
413 "Found more than one descriptor file at package descriptor zip"
415 descriptor_file_name
= zipfilename
416 if not descriptor_file_name
:
417 raise EngineException(
418 "Not found any descriptor file at package descriptor zip"
420 storage
["descriptor"] = descriptor_file_name
421 storage
["zipfile"] = filename
422 self
.fs
.file_extract(zipfile
, proposed_revision_path
)
424 with self
.fs
.file_open(
425 (proposed_revision_path
, descriptor_file_name
), "r"
426 ) as descriptor_file
:
427 content
= descriptor_file
.read()
429 content
= file_pkg
.read()
430 storage
["descriptor"] = descriptor_file_name
= filename
432 if descriptor_file_name
.endswith(".json"):
433 error_text
= "Invalid json format "
434 indata
= json
.load(content
)
436 error_text
= "Invalid yaml format "
437 indata
= yaml
.load(content
, Loader
=yaml
.SafeLoader
)
439 # Need to close the file package here so it can be copied from the
440 # revision to the current, unrevisioned record
445 # Fetch both the incoming, proposed revision and the original revision so we
446 # can call a validate method to compare them
447 current_revision_path
= _id
+ "/"
448 self
.fs
.sync(from_path
=current_revision_path
)
449 self
.fs
.sync(from_path
=proposed_revision_path
)
453 self
._validate
_descriptor
_changes
(
454 descriptor_file_name
,
455 current_revision_path
,
456 proposed_revision_path
)
457 except Exception as e
:
458 shutil
.rmtree(self
.fs
.path
+ current_revision_path
, ignore_errors
=True)
459 shutil
.rmtree(self
.fs
.path
+ proposed_revision_path
, ignore_errors
=True)
460 # Only delete the new revision. We need to keep the original version in place
461 # as it has not been changed.
462 self
.fs
.file_delete(proposed_revision_path
, ignore_non_exist
=True)
465 # Copy the revision to the active package name by its original id
466 shutil
.rmtree(self
.fs
.path
+ current_revision_path
, ignore_errors
=True)
467 os
.rename(self
.fs
.path
+ proposed_revision_path
, self
.fs
.path
+ current_revision_path
)
468 self
.fs
.file_delete(current_revision_path
, ignore_non_exist
=True)
469 self
.fs
.mkdir(current_revision_path
)
470 self
.fs
.reverse_sync(from_path
=current_revision_path
)
471 shutil
.rmtree(self
.fs
.path
+ _id
)
473 current_desc
["_admin"]["storage"] = storage
474 current_desc
["_admin"]["onboardingState"] = "ONBOARDED"
475 current_desc
["_admin"]["operationalState"] = "ENABLED"
477 indata
= self
._remove
_envelop
(indata
)
479 # Override descriptor with query string kwargs
481 self
._update
_input
_with
_kwargs
(indata
, kwargs
)
483 deep_update_rfc7396(current_desc
, indata
)
484 current_desc
= self
.check_conflict_on_edit(
485 session
, current_desc
, indata
, _id
=_id
487 current_desc
["_admin"]["modified"] = time()
488 current_desc
["_admin"]["revision"] = revision
489 self
.db
.replace(self
.topic
, _id
, current_desc
)
491 # Store a copy of the package as a point in time revision
492 revision_desc
= dict(current_desc
)
493 revision_desc
["_id"] = _id
+ ":" + str(revision_desc
["_admin"]["revision"])
494 self
.db
.create(self
.topic
+ "_revisions", revision_desc
)
497 self
._send
_msg
("edited", indata
)
499 # TODO if descriptor has changed because kwargs update content and remove cached zip
500 # TODO if zip is not present creates one
503 except EngineException
:
506 raise EngineException(
507 "invalid Content-Range header format. Expected 'bytes start-end/total'",
508 HTTPStatus
.REQUESTED_RANGE_NOT_SATISFIABLE
,
511 raise EngineException(
512 "invalid upload transaction sequence: '{}'".format(e
),
513 HTTPStatus
.BAD_REQUEST
,
515 except tarfile
.ReadError
as e
:
516 raise EngineException(
517 "invalid file content {}".format(e
), HTTPStatus
.BAD_REQUEST
519 except (ValueError, yaml
.YAMLError
) as e
:
520 raise EngineException(error_text
+ str(e
))
521 except ValidationError
as e
:
522 raise EngineException(e
, HTTPStatus
.UNPROCESSABLE_ENTITY
)
527 def get_file(self
, session
, _id
, path
=None, accept_header
=None):
529 Return the file content of a vnfd or nsd
530 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
531 :param _id: Identity of the vnfd, nsd
532 :param path: artifact path or "$DESCRIPTOR" or None
533 :param accept_header: Content of Accept header. Must contain applition/zip or/and text/plain
534 :return: opened file plus Accept format or raises an exception
536 accept_text
= accept_zip
= False
538 if "text/plain" in accept_header
or "*/*" in accept_header
:
540 if "application/zip" in accept_header
or "*/*" in accept_header
:
541 accept_zip
= "application/zip"
542 elif "application/gzip" in accept_header
:
543 accept_zip
= "application/gzip"
545 if not accept_text
and not accept_zip
:
546 raise EngineException(
547 "provide request header 'Accept' with 'application/zip' or 'text/plain'",
548 http_code
=HTTPStatus
.NOT_ACCEPTABLE
,
551 content
= self
.show(session
, _id
)
552 if content
["_admin"]["onboardingState"] != "ONBOARDED":
553 raise EngineException(
554 "Cannot get content because this resource is not at 'ONBOARDED' state. "
555 "onboardingState is {}".format(content
["_admin"]["onboardingState"]),
556 http_code
=HTTPStatus
.CONFLICT
,
558 storage
= content
["_admin"]["storage"]
559 if path
is not None and path
!= "$DESCRIPTOR": # artifacts
560 if not storage
.get("pkg-dir"):
561 raise EngineException(
562 "Packages does not contains artifacts",
563 http_code
=HTTPStatus
.BAD_REQUEST
,
565 if self
.fs
.file_exists(
566 (storage
["folder"], storage
["pkg-dir"], *path
), "dir"
568 folder_content
= self
.fs
.dir_ls(
569 (storage
["folder"], storage
["pkg-dir"], *path
)
571 return folder_content
, "text/plain"
572 # TODO manage folders in http
576 (storage
["folder"], storage
["pkg-dir"], *path
), "rb"
578 "application/octet-stream",
581 # pkgtype accept ZIP TEXT -> result
582 # manyfiles yes X -> zip
584 # onefile yes no -> zip
586 contain_many_files
= False
587 if storage
.get("pkg-dir"):
588 # check if there are more than one file in the package, ignoring checksums.txt.
589 pkg_files
= self
.fs
.dir_ls((storage
["folder"], storage
["pkg-dir"]))
590 if len(pkg_files
) >= 3 or (
591 len(pkg_files
) == 2 and "checksums.txt" not in pkg_files
593 contain_many_files
= True
594 if accept_text
and (not contain_many_files
or path
== "$DESCRIPTOR"):
596 self
.fs
.file_open((storage
["folder"], storage
["descriptor"]), "r"),
599 elif contain_many_files
and not accept_zip
:
600 raise EngineException(
601 "Packages that contains several files need to be retrieved with 'application/zip'"
603 http_code
=HTTPStatus
.NOT_ACCEPTABLE
,
606 if not storage
.get("zipfile"):
607 # TODO generate zipfile if not present
608 raise EngineException(
609 "Only allowed 'text/plain' Accept header for this descriptor. To be solved in "
611 http_code
=HTTPStatus
.NOT_ACCEPTABLE
,
614 self
.fs
.file_open((storage
["folder"], storage
["zipfile"]), "rb"),
618 def _remove_yang_prefixes_from_descriptor(self
, descriptor
):
620 for k
, v
in descriptor
.items():
622 if isinstance(v
, dict):
623 new_v
= self
._remove
_yang
_prefixes
_from
_descriptor
(v
)
624 elif isinstance(v
, list):
627 if isinstance(x
, dict):
628 new_v
.append(self
._remove
_yang
_prefixes
_from
_descriptor
(x
))
631 new_descriptor
[k
.split(":")[-1]] = new_v
632 return new_descriptor
634 def pyangbind_validation(self
, item
, data
, force
=False):
635 raise EngineException(
636 "Not possible to validate '{}' item".format(item
),
637 http_code
=HTTPStatus
.INTERNAL_SERVER_ERROR
,
640 def _validate_input_edit(self
, indata
, content
, force
=False):
641 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
644 if "_admin" not in indata
:
645 indata
["_admin"] = {}
647 if "operationalState" in indata
:
648 if indata
["operationalState"] in ("ENABLED", "DISABLED"):
649 indata
["_admin"]["operationalState"] = indata
.pop("operationalState")
651 raise EngineException(
652 "State '{}' is not a valid operational state".format(
653 indata
["operationalState"]
655 http_code
=HTTPStatus
.BAD_REQUEST
,
658 # In the case of user defined data, we need to put the data in the root of the object
659 # to preserve current expected behaviour
660 if "userDefinedData" in indata
:
661 data
= indata
.pop("userDefinedData")
662 if type(data
) == dict:
663 indata
["_admin"]["userDefinedData"] = data
665 raise EngineException(
666 "userDefinedData should be an object, but is '{}' instead".format(
669 http_code
=HTTPStatus
.BAD_REQUEST
,
673 "operationalState" in indata
["_admin"]
674 and content
["_admin"]["operationalState"]
675 == indata
["_admin"]["operationalState"]
677 raise EngineException(
678 "operationalState already {}".format(
679 content
["_admin"]["operationalState"]
681 http_code
=HTTPStatus
.CONFLICT
,
686 def _validate_descriptor_changes(self
,
687 descriptor_file_name
,
688 old_descriptor_directory
,
689 new_descriptor_directory
):
690 # Todo: compare changes and throw a meaningful exception for the user to understand
692 # raise EngineException(
693 # "Error in validating new descriptor: <NODE> cannot be modified",
694 # http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
698 class VnfdTopic(DescriptorTopic
):
702 def __init__(self
, db
, fs
, msg
, auth
):
703 DescriptorTopic
.__init
__(self
, db
, fs
, msg
, auth
)
705 def pyangbind_validation(self
, item
, data
, force
=False):
706 if self
._descriptor
_data
_is
_in
_old
_format
(data
):
707 raise EngineException(
708 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
709 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
712 myvnfd
= etsi_nfv_vnfd
.etsi_nfv_vnfd()
713 pybindJSONDecoder
.load_ietf_json(
714 {"etsi-nfv-vnfd:vnfd": data
},
721 out
= pybindJSON
.dumps(myvnfd
, mode
="ietf")
722 desc_out
= self
._remove
_envelop
(yaml
.safe_load(out
))
723 desc_out
= self
._remove
_yang
_prefixes
_from
_descriptor
(desc_out
)
724 return utils
.deep_update_dict(data
, desc_out
)
725 except Exception as e
:
726 raise EngineException(
727 "Error in pyangbind validation: {}".format(str(e
)),
728 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
732 def _descriptor_data_is_in_old_format(data
):
733 return ("vnfd-catalog" in data
) or ("vnfd:vnfd-catalog" in data
)
736 def _remove_envelop(indata
=None):
739 clean_indata
= indata
741 if clean_indata
.get("etsi-nfv-vnfd:vnfd"):
742 if not isinstance(clean_indata
["etsi-nfv-vnfd:vnfd"], dict):
743 raise EngineException("'etsi-nfv-vnfd:vnfd' must be a dict")
744 clean_indata
= clean_indata
["etsi-nfv-vnfd:vnfd"]
745 elif clean_indata
.get("vnfd"):
746 if not isinstance(clean_indata
["vnfd"], dict):
747 raise EngineException("'vnfd' must be dict")
748 clean_indata
= clean_indata
["vnfd"]
752 def check_conflict_on_edit(self
, session
, final_content
, edit_content
, _id
):
753 final_content
= super().check_conflict_on_edit(
754 session
, final_content
, edit_content
, _id
760 for vdu
in get_iterable(final_content
.get("vdu")):
761 if vdu
.get("pdu-type"):
766 final_content
["_admin"]["type"] = "hnfd" if contains_vdu
else "pnfd"
768 final_content
["_admin"]["type"] = "vnfd"
769 # if neither vud nor pdu do not fill type
772 def check_conflict_on_del(self
, session
, _id
, db_content
):
774 Check that there is not any NSD that uses this VNFD. Only NSDs belonging to this project are considered. Note
775 that VNFD can be public and be used by NSD of other projects. Also check there are not deployments, or vnfr
777 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
778 :param _id: vnfd internal id
779 :param db_content: The database content of the _id.
780 :return: None or raises EngineException with the conflict
784 descriptor
= db_content
785 descriptor_id
= descriptor
.get("id")
786 if not descriptor_id
: # empty vnfd not uploaded
789 _filter
= self
._get
_project
_filter
(session
)
791 # check vnfrs using this vnfd
792 _filter
["vnfd-id"] = _id
793 if self
.db
.get_list("vnfrs", _filter
):
794 raise EngineException(
795 "There is at least one VNF instance using this descriptor",
796 http_code
=HTTPStatus
.CONFLICT
,
799 # check NSD referencing this VNFD
800 del _filter
["vnfd-id"]
801 _filter
["vnfd-id"] = descriptor_id
802 if self
.db
.get_list("nsds", _filter
):
803 raise EngineException(
804 "There is at least one NS package referencing this descriptor",
805 http_code
=HTTPStatus
.CONFLICT
,
808 def _validate_input_new(self
, indata
, storage_params
, force
=False):
809 indata
.pop("onboardingState", None)
810 indata
.pop("operationalState", None)
811 indata
.pop("usageState", None)
812 indata
.pop("links", None)
814 indata
= self
.pyangbind_validation("vnfds", indata
, force
)
815 # Cross references validation in the descriptor
817 self
.validate_mgmt_interface_connection_point(indata
)
819 for vdu
in get_iterable(indata
.get("vdu")):
820 self
.validate_vdu_internal_connection_points(vdu
)
821 self
._validate
_vdu
_cloud
_init
_in
_package
(storage_params
, vdu
, indata
)
822 self
._validate
_vdu
_charms
_in
_package
(storage_params
, indata
)
824 self
._validate
_vnf
_charms
_in
_package
(storage_params
, indata
)
826 self
.validate_external_connection_points(indata
)
827 self
.validate_internal_virtual_links(indata
)
828 self
.validate_monitoring_params(indata
)
829 self
.validate_scaling_group_descriptor(indata
)
834 def validate_mgmt_interface_connection_point(indata
):
835 if not indata
.get("vdu"):
837 if not indata
.get("mgmt-cp"):
838 raise EngineException(
839 "'mgmt-cp' is a mandatory field and it is not defined",
840 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
843 for cp
in get_iterable(indata
.get("ext-cpd")):
844 if cp
["id"] == indata
["mgmt-cp"]:
847 raise EngineException(
848 "mgmt-cp='{}' must match an existing ext-cpd".format(indata
["mgmt-cp"]),
849 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
853 def validate_vdu_internal_connection_points(vdu
):
855 for cpd
in get_iterable(vdu
.get("int-cpd")):
856 cpd_id
= cpd
.get("id")
857 if cpd_id
and cpd_id
in int_cpds
:
858 raise EngineException(
859 "vdu[id='{}']:int-cpd[id='{}'] is already used by other int-cpd".format(
862 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
867 def validate_external_connection_points(indata
):
868 all_vdus_int_cpds
= set()
869 for vdu
in get_iterable(indata
.get("vdu")):
870 for int_cpd
in get_iterable(vdu
.get("int-cpd")):
871 all_vdus_int_cpds
.add((vdu
.get("id"), int_cpd
.get("id")))
874 for cpd
in get_iterable(indata
.get("ext-cpd")):
875 cpd_id
= cpd
.get("id")
876 if cpd_id
and cpd_id
in ext_cpds
:
877 raise EngineException(
878 "ext-cpd[id='{}'] is already used by other ext-cpd".format(cpd_id
),
879 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
883 int_cpd
= cpd
.get("int-cpd")
885 if (int_cpd
.get("vdu-id"), int_cpd
.get("cpd")) not in all_vdus_int_cpds
:
886 raise EngineException(
887 "ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd".format(
890 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
892 # TODO: Validate k8s-cluster-net points to a valid k8s-cluster:nets ?
894 def _validate_vdu_charms_in_package(self
, storage_params
, indata
):
895 for df
in indata
["df"]:
897 "lcm-operations-configuration" in df
898 and "operate-vnf-op-config" in df
["lcm-operations-configuration"]
900 configs
= df
["lcm-operations-configuration"][
901 "operate-vnf-op-config"
903 vdus
= df
.get("vdu-profile", [])
905 for config
in configs
:
906 if config
["id"] == vdu
["id"] and utils
.find_in_list(
907 config
.get("execution-environment-list", []),
908 lambda ee
: "juju" in ee
,
910 if not self
._validate
_package
_folders
(
911 storage_params
, "charms"
912 ) and not self
._validate
_package
_folders
(
913 storage_params
, "Scripts/charms"
915 raise EngineException(
916 "Charm defined in vnf[id={}] but not present in "
917 "package".format(indata
["id"])
920 def _validate_vdu_cloud_init_in_package(self
, storage_params
, vdu
, indata
):
921 if not vdu
.get("cloud-init-file"):
923 if not self
._validate
_package
_folders
(
924 storage_params
, "cloud_init", vdu
["cloud-init-file"]
925 ) and not self
._validate
_package
_folders
(
926 storage_params
, "Scripts/cloud_init", vdu
["cloud-init-file"]
928 raise EngineException(
929 "Cloud-init defined in vnf[id={}]:vdu[id={}] but not present in "
930 "package".format(indata
["id"], vdu
["id"])
933 def _validate_vnf_charms_in_package(self
, storage_params
, indata
):
934 # Get VNF configuration through new container
935 for deployment_flavor
in indata
.get("df", []):
936 if "lcm-operations-configuration" not in deployment_flavor
:
939 "operate-vnf-op-config"
940 not in deployment_flavor
["lcm-operations-configuration"]
943 for day_1_2_config
in deployment_flavor
["lcm-operations-configuration"][
944 "operate-vnf-op-config"
946 if day_1_2_config
["id"] == indata
["id"]:
947 if utils
.find_in_list(
948 day_1_2_config
.get("execution-environment-list", []),
949 lambda ee
: "juju" in ee
,
951 if not self
._validate
_package
_folders
(
952 storage_params
, "charms"
953 ) and not self
._validate
_package
_folders
(
954 storage_params
, "Scripts/charms"
956 raise EngineException(
957 "Charm defined in vnf[id={}] but not present in "
958 "package".format(indata
["id"])
961 def _validate_package_folders(self
, storage_params
, folder
, file=None):
962 if not storage_params
:
964 elif not storage_params
.get("pkg-dir"):
965 if self
.fs
.file_exists("{}_".format(storage_params
["folder"]), "dir"):
967 storage_params
["folder"], folder
971 storage_params
["folder"], folder
974 return self
.fs
.file_exists("{}/{}".format(f
, file), "file")
976 if self
.fs
.file_exists(f
, "dir"):
977 if self
.fs
.dir_ls(f
):
981 if self
.fs
.file_exists("{}_".format(storage_params
["folder"]), "dir"):
982 f
= "{}_/{}/{}".format(
983 storage_params
["folder"], storage_params
["pkg-dir"], folder
986 f
= "{}/{}/{}".format(
987 storage_params
["folder"], storage_params
["pkg-dir"], folder
990 return self
.fs
.file_exists("{}/{}".format(f
, file), "file")
992 if self
.fs
.file_exists(f
, "dir"):
993 if self
.fs
.dir_ls(f
):
998 def validate_internal_virtual_links(indata
):
1000 for ivld
in get_iterable(indata
.get("int-virtual-link-desc")):
1001 ivld_id
= ivld
.get("id")
1002 if ivld_id
and ivld_id
in all_ivld_ids
:
1003 raise EngineException(
1004 "Duplicated VLD id in int-virtual-link-desc[id={}]".format(ivld_id
),
1005 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1008 all_ivld_ids
.add(ivld_id
)
1010 for vdu
in get_iterable(indata
.get("vdu")):
1011 for int_cpd
in get_iterable(vdu
.get("int-cpd")):
1012 int_cpd_ivld_id
= int_cpd
.get("int-virtual-link-desc")
1013 if int_cpd_ivld_id
and int_cpd_ivld_id
not in all_ivld_ids
:
1014 raise EngineException(
1015 "vdu[id='{}']:int-cpd[id='{}']:int-virtual-link-desc='{}' must match an existing "
1016 "int-virtual-link-desc".format(
1017 vdu
["id"], int_cpd
["id"], int_cpd_ivld_id
1019 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1022 for df
in get_iterable(indata
.get("df")):
1023 for vlp
in get_iterable(df
.get("virtual-link-profile")):
1024 vlp_ivld_id
= vlp
.get("id")
1025 if vlp_ivld_id
and vlp_ivld_id
not in all_ivld_ids
:
1026 raise EngineException(
1027 "df[id='{}']:virtual-link-profile='{}' must match an existing "
1028 "int-virtual-link-desc".format(df
["id"], vlp_ivld_id
),
1029 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1033 def validate_monitoring_params(indata
):
1034 all_monitoring_params
= set()
1035 for ivld
in get_iterable(indata
.get("int-virtual-link-desc")):
1036 for mp
in get_iterable(ivld
.get("monitoring-parameters")):
1037 mp_id
= mp
.get("id")
1038 if mp_id
and mp_id
in all_monitoring_params
:
1039 raise EngineException(
1040 "Duplicated monitoring-parameter id in "
1041 "int-virtual-link-desc[id='{}']:monitoring-parameters[id='{}']".format(
1044 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1047 all_monitoring_params
.add(mp_id
)
1049 for vdu
in get_iterable(indata
.get("vdu")):
1050 for mp
in get_iterable(vdu
.get("monitoring-parameter")):
1051 mp_id
= mp
.get("id")
1052 if mp_id
and mp_id
in all_monitoring_params
:
1053 raise EngineException(
1054 "Duplicated monitoring-parameter id in "
1055 "vdu[id='{}']:monitoring-parameter[id='{}']".format(
1058 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1061 all_monitoring_params
.add(mp_id
)
1063 for df
in get_iterable(indata
.get("df")):
1064 for mp
in get_iterable(df
.get("monitoring-parameter")):
1065 mp_id
= mp
.get("id")
1066 if mp_id
and mp_id
in all_monitoring_params
:
1067 raise EngineException(
1068 "Duplicated monitoring-parameter id in "
1069 "df[id='{}']:monitoring-parameter[id='{}']".format(
1072 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1075 all_monitoring_params
.add(mp_id
)
1078 def validate_scaling_group_descriptor(indata
):
1079 all_monitoring_params
= set()
1080 for ivld
in get_iterable(indata
.get("int-virtual-link-desc")):
1081 for mp
in get_iterable(ivld
.get("monitoring-parameters")):
1082 all_monitoring_params
.add(mp
.get("id"))
1084 for vdu
in get_iterable(indata
.get("vdu")):
1085 for mp
in get_iterable(vdu
.get("monitoring-parameter")):
1086 all_monitoring_params
.add(mp
.get("id"))
1088 for df
in get_iterable(indata
.get("df")):
1089 for mp
in get_iterable(df
.get("monitoring-parameter")):
1090 all_monitoring_params
.add(mp
.get("id"))
1092 for df
in get_iterable(indata
.get("df")):
1093 for sa
in get_iterable(df
.get("scaling-aspect")):
1094 for sp
in get_iterable(sa
.get("scaling-policy")):
1095 for sc
in get_iterable(sp
.get("scaling-criteria")):
1096 sc_monitoring_param
= sc
.get("vnf-monitoring-param-ref")
1099 and sc_monitoring_param
not in all_monitoring_params
1101 raise EngineException(
1102 "df[id='{}']:scaling-aspect[id='{}']:scaling-policy"
1103 "[name='{}']:scaling-criteria[name='{}']: "
1104 "vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format(
1109 sc_monitoring_param
,
1111 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1114 for sca
in get_iterable(sa
.get("scaling-config-action")):
1116 "lcm-operations-configuration" not in df
1117 or "operate-vnf-op-config"
1118 not in df
["lcm-operations-configuration"]
1119 or not utils
.find_in_list(
1120 df
["lcm-operations-configuration"][
1121 "operate-vnf-op-config"
1122 ].get("day1-2", []),
1123 lambda config
: config
["id"] == indata
["id"],
1126 raise EngineException(
1127 "'day1-2 configuration' not defined in the descriptor but it is "
1128 "referenced by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action".format(
1131 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1133 for configuration
in get_iterable(
1134 df
["lcm-operations-configuration"]["operate-vnf-op-config"].get(
1138 for primitive
in get_iterable(
1139 configuration
.get("config-primitive")
1143 == sca
["vnf-config-primitive-name-ref"]
1147 raise EngineException(
1148 "df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-"
1149 "config-primitive-name-ref='{}' does not match any "
1150 "day1-2 configuration:config-primitive:name".format(
1153 sca
["vnf-config-primitive-name-ref"],
1155 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1158 def delete_extra(self
, session
, _id
, db_content
, not_send_msg
=None):
1160 Deletes associate file system storage (via super)
1161 Deletes associated vnfpkgops from database.
1162 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1163 :param _id: server internal id
1164 :param db_content: The database content of the descriptor
1166 :raises: FsException in case of error while deleting associated storage
1168 super().delete_extra(session
, _id
, db_content
, not_send_msg
)
1169 self
.db
.del_list("vnfpkgops", {"vnfPkgId": _id
})
1170 self
.db
.del_list(self
.topic
+"_revisions", {"_id": {"$regex": _id
}})
1172 def sol005_projection(self
, data
):
1173 data
["onboardingState"] = data
["_admin"]["onboardingState"]
1174 data
["operationalState"] = data
["_admin"]["operationalState"]
1175 data
["usageState"] = data
["_admin"]["usageState"]
1178 links
["self"] = {"href": "/vnfpkgm/v1/vnf_packages/{}".format(data
["_id"])}
1179 links
["vnfd"] = {"href": "/vnfpkgm/v1/vnf_packages/{}/vnfd".format(data
["_id"])}
1180 links
["packageContent"] = {
1181 "href": "/vnfpkgm/v1/vnf_packages/{}/package_content".format(data
["_id"])
1183 data
["_links"] = links
1185 return super().sol005_projection(data
)
1188 class NsdTopic(DescriptorTopic
):
1192 def __init__(self
, db
, fs
, msg
, auth
):
1193 DescriptorTopic
.__init
__(self
, db
, fs
, msg
, auth
)
1195 def pyangbind_validation(self
, item
, data
, force
=False):
1196 if self
._descriptor
_data
_is
_in
_old
_format
(data
):
1197 raise EngineException(
1198 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
1199 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1202 nsd_vnf_profiles
= data
.get("df", [{}])[0].get("vnf-profile", [])
1203 mynsd
= etsi_nfv_nsd
.etsi_nfv_nsd()
1204 pybindJSONDecoder
.load_ietf_json(
1205 {"nsd": {"nsd": [data
]}},
1212 out
= pybindJSON
.dumps(mynsd
, mode
="ietf")
1213 desc_out
= self
._remove
_envelop
(yaml
.safe_load(out
))
1214 desc_out
= self
._remove
_yang
_prefixes
_from
_descriptor
(desc_out
)
1215 if nsd_vnf_profiles
:
1216 desc_out
["df"][0]["vnf-profile"] = nsd_vnf_profiles
1218 except Exception as e
:
1219 raise EngineException(
1220 "Error in pyangbind validation: {}".format(str(e
)),
1221 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1225 def _descriptor_data_is_in_old_format(data
):
1226 return ("nsd-catalog" in data
) or ("nsd:nsd-catalog" in data
)
1229 def _remove_envelop(indata
=None):
1232 clean_indata
= indata
1234 if clean_indata
.get("nsd"):
1235 clean_indata
= clean_indata
["nsd"]
1236 elif clean_indata
.get("etsi-nfv-nsd:nsd"):
1237 clean_indata
= clean_indata
["etsi-nfv-nsd:nsd"]
1238 if clean_indata
.get("nsd"):
1240 not isinstance(clean_indata
["nsd"], list)
1241 or len(clean_indata
["nsd"]) != 1
1243 raise EngineException("'nsd' must be a list of only one element")
1244 clean_indata
= clean_indata
["nsd"][0]
1247 def _validate_input_new(self
, indata
, storage_params
, force
=False):
1248 indata
.pop("nsdOnboardingState", None)
1249 indata
.pop("nsdOperationalState", None)
1250 indata
.pop("nsdUsageState", None)
1252 indata
.pop("links", None)
1254 indata
= self
.pyangbind_validation("nsds", indata
, force
)
1255 # Cross references validation in the descriptor
1256 # TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none
1257 for vld
in get_iterable(indata
.get("virtual-link-desc")):
1258 self
.validate_vld_mgmt_network_with_virtual_link_protocol_data(vld
, indata
)
1260 self
.validate_vnf_profiles_vnfd_id(indata
)
1265 def validate_vld_mgmt_network_with_virtual_link_protocol_data(vld
, indata
):
1266 if not vld
.get("mgmt-network"):
1268 vld_id
= vld
.get("id")
1269 for df
in get_iterable(indata
.get("df")):
1270 for vlp
in get_iterable(df
.get("virtual-link-profile")):
1271 if vld_id
and vld_id
== vlp
.get("virtual-link-desc-id"):
1272 if vlp
.get("virtual-link-protocol-data"):
1273 raise EngineException(
1274 "Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-"
1275 "protocol-data You cannot set a virtual-link-protocol-data "
1276 "when mgmt-network is True".format(df
["id"], vlp
["id"]),
1277 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1281 def validate_vnf_profiles_vnfd_id(indata
):
1282 all_vnfd_ids
= set(get_iterable(indata
.get("vnfd-id")))
1283 for df
in get_iterable(indata
.get("df")):
1284 for vnf_profile
in get_iterable(df
.get("vnf-profile")):
1285 vnfd_id
= vnf_profile
.get("vnfd-id")
1286 if vnfd_id
and vnfd_id
not in all_vnfd_ids
:
1287 raise EngineException(
1288 "Error at df[id='{}']:vnf_profile[id='{}']:vnfd-id='{}' "
1289 "does not match any vnfd-id".format(
1290 df
["id"], vnf_profile
["id"], vnfd_id
1292 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1295 def _validate_input_edit(self
, indata
, content
, force
=False):
1296 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
1298 indata looks as follows:
1299 - In the new case (conformant)
1300 {'nsdOperationalState': 'DISABLED', 'userDefinedData': {'id': 'string23',
1301 '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}}
1302 - In the old case (backwards-compatible)
1303 {'id': 'string23', '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}
1305 if "_admin" not in indata
:
1306 indata
["_admin"] = {}
1308 if "nsdOperationalState" in indata
:
1309 if indata
["nsdOperationalState"] in ("ENABLED", "DISABLED"):
1310 indata
["_admin"]["operationalState"] = indata
.pop("nsdOperationalState")
1312 raise EngineException(
1313 "State '{}' is not a valid operational state".format(
1314 indata
["nsdOperationalState"]
1316 http_code
=HTTPStatus
.BAD_REQUEST
,
1319 # In the case of user defined data, we need to put the data in the root of the object
1320 # to preserve current expected behaviour
1321 if "userDefinedData" in indata
:
1322 data
= indata
.pop("userDefinedData")
1323 if type(data
) == dict:
1324 indata
["_admin"]["userDefinedData"] = data
1326 raise EngineException(
1327 "userDefinedData should be an object, but is '{}' instead".format(
1330 http_code
=HTTPStatus
.BAD_REQUEST
,
1333 "operationalState" in indata
["_admin"]
1334 and content
["_admin"]["operationalState"]
1335 == indata
["_admin"]["operationalState"]
1337 raise EngineException(
1338 "nsdOperationalState already {}".format(
1339 content
["_admin"]["operationalState"]
1341 http_code
=HTTPStatus
.CONFLICT
,
1345 def _check_descriptor_dependencies(self
, session
, descriptor
):
1347 Check that the dependent descriptors exist on a new descriptor or edition. Also checks references to vnfd
1348 connection points are ok
1349 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1350 :param descriptor: descriptor to be inserted or edit
1351 :return: None or raises exception
1353 if session
["force"]:
1355 vnfds_index
= self
._get
_descriptor
_constituent
_vnfds
_index
(session
, descriptor
)
1357 # Cross references validation in the descriptor and vnfd connection point validation
1358 for df
in get_iterable(descriptor
.get("df")):
1359 self
.validate_df_vnf_profiles_constituent_connection_points(df
, vnfds_index
)
1361 def _get_descriptor_constituent_vnfds_index(self
, session
, descriptor
):
1363 if descriptor
.get("vnfd-id") and not session
["force"]:
1364 for vnfd_id
in get_iterable(descriptor
.get("vnfd-id")):
1365 query_filter
= self
._get
_project
_filter
(session
)
1366 query_filter
["id"] = vnfd_id
1367 vnf_list
= self
.db
.get_list("vnfds", query_filter
)
1369 raise EngineException(
1370 "Descriptor error at 'vnfd-id'='{}' references a non "
1371 "existing vnfd".format(vnfd_id
),
1372 http_code
=HTTPStatus
.CONFLICT
,
1374 vnfds_index
[vnfd_id
] = vnf_list
[0]
1378 def validate_df_vnf_profiles_constituent_connection_points(df
, vnfds_index
):
1379 for vnf_profile
in get_iterable(df
.get("vnf-profile")):
1380 vnfd
= vnfds_index
.get(vnf_profile
["vnfd-id"])
1381 all_vnfd_ext_cpds
= set()
1382 for ext_cpd
in get_iterable(vnfd
.get("ext-cpd")):
1383 if ext_cpd
.get("id"):
1384 all_vnfd_ext_cpds
.add(ext_cpd
.get("id"))
1386 for virtual_link
in get_iterable(
1387 vnf_profile
.get("virtual-link-connectivity")
1389 for vl_cpd
in get_iterable(virtual_link
.get("constituent-cpd-id")):
1390 vl_cpd_id
= vl_cpd
.get("constituent-cpd-id")
1391 if vl_cpd_id
and vl_cpd_id
not in all_vnfd_ext_cpds
:
1392 raise EngineException(
1393 "Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity"
1394 "[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a "
1395 "non existing ext-cpd:id inside vnfd '{}'".format(
1398 virtual_link
["virtual-link-profile-id"],
1402 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1405 def check_conflict_on_edit(self
, session
, final_content
, edit_content
, _id
):
1406 final_content
= super().check_conflict_on_edit(
1407 session
, final_content
, edit_content
, _id
1410 self
._check
_descriptor
_dependencies
(session
, final_content
)
1412 return final_content
1414 def check_conflict_on_del(self
, session
, _id
, db_content
):
1416 Check that there is not any NSR that uses this NSD. Only NSRs belonging to this project are considered. Note
1417 that NSD can be public and be used by other projects.
1418 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1419 :param _id: nsd internal id
1420 :param db_content: The database content of the _id
1421 :return: None or raises EngineException with the conflict
1423 if session
["force"]:
1425 descriptor
= db_content
1426 descriptor_id
= descriptor
.get("id")
1427 if not descriptor_id
: # empty nsd not uploaded
1430 # check NSD used by NS
1431 _filter
= self
._get
_project
_filter
(session
)
1432 _filter
["nsd-id"] = _id
1433 if self
.db
.get_list("nsrs", _filter
):
1434 raise EngineException(
1435 "There is at least one NS instance using this descriptor",
1436 http_code
=HTTPStatus
.CONFLICT
,
1439 # check NSD referenced by NST
1440 del _filter
["nsd-id"]
1441 _filter
["netslice-subnet.ANYINDEX.nsd-ref"] = descriptor_id
1442 if self
.db
.get_list("nsts", _filter
):
1443 raise EngineException(
1444 "There is at least one NetSlice Template referencing this descriptor",
1445 http_code
=HTTPStatus
.CONFLICT
,
1448 def delete_extra(self
, session
, _id
, db_content
, not_send_msg
=None):
1450 Deletes associate file system storage (via super)
1451 Deletes associated vnfpkgops from database.
1452 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1453 :param _id: server internal id
1454 :param db_content: The database content of the descriptor
1456 :raises: FsException in case of error while deleting associated storage
1458 super().delete_extra(session
, _id
, db_content
, not_send_msg
)
1459 self
.db
.del_list(self
.topic
+"_revisions", { "_id": { "$regex": _id
}})
1461 def sol005_projection(self
, data
):
1462 data
["nsdOnboardingState"] = data
["_admin"]["onboardingState"]
1463 data
["nsdOperationalState"] = data
["_admin"]["operationalState"]
1464 data
["nsdUsageState"] = data
["_admin"]["usageState"]
1467 links
["self"] = {"href": "/nsd/v1/ns_descriptors/{}".format(data
["_id"])}
1468 links
["nsd_content"] = {
1469 "href": "/nsd/v1/ns_descriptors/{}/nsd_content".format(data
["_id"])
1471 data
["_links"] = links
1473 return super().sol005_projection(data
)
1476 class NstTopic(DescriptorTopic
):
1479 quota_name
= "slice_templates"
1481 def __init__(self
, db
, fs
, msg
, auth
):
1482 DescriptorTopic
.__init
__(self
, db
, fs
, msg
, auth
)
1484 def pyangbind_validation(self
, item
, data
, force
=False):
1487 pybindJSONDecoder
.load_ietf_json(
1495 out
= pybindJSON
.dumps(mynst
, mode
="ietf")
1496 desc_out
= self
._remove
_envelop
(yaml
.safe_load(out
))
1498 except Exception as e
:
1499 raise EngineException(
1500 "Error in pyangbind validation: {}".format(str(e
)),
1501 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1505 def _remove_envelop(indata
=None):
1508 clean_indata
= indata
1510 if clean_indata
.get("nst"):
1512 not isinstance(clean_indata
["nst"], list)
1513 or len(clean_indata
["nst"]) != 1
1515 raise EngineException("'nst' must be a list only one element")
1516 clean_indata
= clean_indata
["nst"][0]
1517 elif clean_indata
.get("nst:nst"):
1519 not isinstance(clean_indata
["nst:nst"], list)
1520 or len(clean_indata
["nst:nst"]) != 1
1522 raise EngineException("'nst:nst' must be a list only one element")
1523 clean_indata
= clean_indata
["nst:nst"][0]
1526 def _validate_input_new(self
, indata
, storage_params
, force
=False):
1527 indata
.pop("onboardingState", None)
1528 indata
.pop("operationalState", None)
1529 indata
.pop("usageState", None)
1530 indata
= self
.pyangbind_validation("nsts", indata
, force
)
1531 return indata
.copy()
1533 def _check_descriptor_dependencies(self
, session
, descriptor
):
1535 Check that the dependent descriptors exist on a new descriptor or edition
1536 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1537 :param descriptor: descriptor to be inserted or edit
1538 :return: None or raises exception
1540 if not descriptor
.get("netslice-subnet"):
1542 for nsd
in descriptor
["netslice-subnet"]:
1543 nsd_id
= nsd
["nsd-ref"]
1544 filter_q
= self
._get
_project
_filter
(session
)
1545 filter_q
["id"] = nsd_id
1546 if not self
.db
.get_list("nsds", filter_q
):
1547 raise EngineException(
1548 "Descriptor error at 'netslice-subnet':'nsd-ref'='{}' references a non "
1549 "existing nsd".format(nsd_id
),
1550 http_code
=HTTPStatus
.CONFLICT
,
1553 def check_conflict_on_edit(self
, session
, final_content
, edit_content
, _id
):
1554 final_content
= super().check_conflict_on_edit(
1555 session
, final_content
, edit_content
, _id
1558 self
._check
_descriptor
_dependencies
(session
, final_content
)
1559 return final_content
1561 def check_conflict_on_del(self
, session
, _id
, db_content
):
1563 Check that there is not any NSIR that uses this NST. Only NSIRs belonging to this project are considered. Note
1564 that NST can be public and be used by other projects.
1565 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1566 :param _id: nst internal id
1567 :param db_content: The database content of the _id.
1568 :return: None or raises EngineException with the conflict
1570 # TODO: Check this method
1571 if session
["force"]:
1573 # Get Network Slice Template from Database
1574 _filter
= self
._get
_project
_filter
(session
)
1575 _filter
["_admin.nst-id"] = _id
1576 if self
.db
.get_list("nsis", _filter
):
1577 raise EngineException(
1578 "there is at least one Netslice Instance using this descriptor",
1579 http_code
=HTTPStatus
.CONFLICT
,
1582 def sol005_projection(self
, data
):
1583 data
["onboardingState"] = data
["_admin"]["onboardingState"]
1584 data
["operationalState"] = data
["_admin"]["operationalState"]
1585 data
["usageState"] = data
["_admin"]["usageState"]
1588 links
["self"] = {"href": "/nst/v1/netslice_templates/{}".format(data
["_id"])}
1589 links
["nst"] = {"href": "/nst/v1/netslice_templates/{}/nst".format(data
["_id"])}
1590 data
["_links"] = links
1592 return super().sol005_projection(data
)
1595 class PduTopic(BaseTopic
):
1598 quota_name
= "pduds"
1599 schema_new
= pdu_new_schema
1600 schema_edit
= pdu_edit_schema
1602 def __init__(self
, db
, fs
, msg
, auth
):
1603 BaseTopic
.__init
__(self
, db
, fs
, msg
, auth
)
1606 def format_on_new(content
, project_id
=None, make_public
=False):
1607 BaseTopic
.format_on_new(content
, project_id
=project_id
, make_public
=make_public
)
1608 content
["_admin"]["onboardingState"] = "CREATED"
1609 content
["_admin"]["operationalState"] = "ENABLED"
1610 content
["_admin"]["usageState"] = "NOT_IN_USE"
1612 def check_conflict_on_del(self
, session
, _id
, db_content
):
1614 Check that there is not any vnfr that uses this PDU
1615 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1616 :param _id: pdu internal id
1617 :param db_content: The database content of the _id.
1618 :return: None or raises EngineException with the conflict
1620 if session
["force"]:
1623 _filter
= self
._get
_project
_filter
(session
)
1624 _filter
["vdur.pdu-id"] = _id
1625 if self
.db
.get_list("vnfrs", _filter
):
1626 raise EngineException(
1627 "There is at least one VNF instance using this PDU",
1628 http_code
=HTTPStatus
.CONFLICT
,
1632 class VnfPkgOpTopic(BaseTopic
):
1635 schema_new
= vnfpkgop_new_schema
1638 def __init__(self
, db
, fs
, msg
, auth
):
1639 BaseTopic
.__init
__(self
, db
, fs
, msg
, auth
)
1641 def edit(self
, session
, _id
, indata
=None, kwargs
=None, content
=None):
1642 raise EngineException(
1643 "Method 'edit' not allowed for topic '{}'".format(self
.topic
),
1644 HTTPStatus
.METHOD_NOT_ALLOWED
,
1647 def delete(self
, session
, _id
, dry_run
=False):
1648 raise EngineException(
1649 "Method 'delete' not allowed for topic '{}'".format(self
.topic
),
1650 HTTPStatus
.METHOD_NOT_ALLOWED
,
1653 def delete_list(self
, session
, filter_q
=None):
1654 raise EngineException(
1655 "Method 'delete_list' not allowed for topic '{}'".format(self
.topic
),
1656 HTTPStatus
.METHOD_NOT_ALLOWED
,
1659 def new(self
, rollback
, session
, indata
=None, kwargs
=None, headers
=None):
1661 Creates a new entry into database.
1662 :param rollback: list to append created items at database in case a rollback may to be done
1663 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1664 :param indata: data to be inserted
1665 :param kwargs: used to override the indata descriptor
1666 :param headers: http request headers
1667 :return: _id, op_id:
1668 _id: identity of the inserted data.
1671 self
._update
_input
_with
_kwargs
(indata
, kwargs
)
1672 validate_input(indata
, self
.schema_new
)
1673 vnfpkg_id
= indata
["vnfPkgId"]
1674 filter_q
= BaseTopic
._get
_project
_filter
(session
)
1675 filter_q
["_id"] = vnfpkg_id
1676 vnfd
= self
.db
.get_one("vnfds", filter_q
)
1677 operation
= indata
["lcmOperationType"]
1678 kdu_name
= indata
["kdu_name"]
1679 for kdu
in vnfd
.get("kdu", []):
1680 if kdu
["name"] == kdu_name
:
1681 helm_chart
= kdu
.get("helm-chart")
1682 juju_bundle
= kdu
.get("juju-bundle")
1685 raise EngineException(
1686 "Not found vnfd[id='{}']:kdu[name='{}']".format(vnfpkg_id
, kdu_name
)
1689 indata
["helm-chart"] = helm_chart
1690 match
= fullmatch(r
"([^/]*)/([^/]*)", helm_chart
)
1691 repo_name
= match
.group(1) if match
else None
1693 indata
["juju-bundle"] = juju_bundle
1694 match
= fullmatch(r
"([^/]*)/([^/]*)", juju_bundle
)
1695 repo_name
= match
.group(1) if match
else None
1697 raise EngineException(
1698 "Found neither 'helm-chart' nor 'juju-bundle' in vnfd[id='{}']:kdu[name='{}']".format(
1704 filter_q
["name"] = repo_name
1705 repo
= self
.db
.get_one("k8srepos", filter_q
)
1706 k8srepo_id
= repo
.get("_id")
1707 k8srepo_url
= repo
.get("url")
1711 indata
["k8srepoId"] = k8srepo_id
1712 indata
["k8srepo_url"] = k8srepo_url
1713 vnfpkgop_id
= str(uuid4())
1716 "operationState": "PROCESSING",
1717 "vnfPkgId": vnfpkg_id
,
1718 "lcmOperationType": operation
,
1719 "isAutomaticInvocation": False,
1720 "isCancelPending": False,
1721 "operationParams": indata
,
1723 "self": "/osm/vnfpkgm/v1/vnfpkg_op_occs/" + vnfpkgop_id
,
1724 "vnfpkg": "/osm/vnfpkgm/v1/vnf_packages/" + vnfpkg_id
,
1728 vnfpkgop_desc
, session
["project_id"], make_public
=session
["public"]
1730 ctime
= vnfpkgop_desc
["_admin"]["created"]
1731 vnfpkgop_desc
["statusEnteredTime"] = ctime
1732 vnfpkgop_desc
["startTime"] = ctime
1733 self
.db
.create(self
.topic
, vnfpkgop_desc
)
1734 rollback
.append({"topic": self
.topic
, "_id": vnfpkgop_id
})
1735 self
.msg
.write(self
.topic_msg
, operation
, vnfpkgop_desc
)
1736 return vnfpkgop_id
, None