1 # -*- coding: utf-8 -*-
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
7 # http://www.apache.org/licenses/LICENSE-2.0
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
22 from hashlib
import md5
23 from osm_common
.dbbase
import DbException
, deep_update_rfc7396
24 from http
import HTTPStatus
26 from uuid
import uuid4
27 from re
import fullmatch
28 from zipfile
import ZipFile
29 from osm_nbi
.validation
import (
36 from osm_nbi
.base_topic
import BaseTopic
, EngineException
, get_iterable
37 from osm_im
import etsi_nfv_vnfd
, etsi_nfv_nsd
38 from osm_im
.nst
import nst
as nst_im
39 from pyangbind
.lib
.serialise
import pybindJSONDecoder
40 import pyangbind
.lib
.pybindJSON
as pybindJSON
41 from osm_nbi
import utils
43 __author__
= "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
46 class DescriptorTopic(BaseTopic
):
47 def __init__(self
, db
, fs
, msg
, auth
):
48 BaseTopic
.__init
__(self
, db
, fs
, msg
, auth
)
50 def check_conflict_on_edit(self
, session
, final_content
, edit_content
, _id
):
51 final_content
= super().check_conflict_on_edit(
52 session
, final_content
, edit_content
, _id
55 def _check_unique_id_name(descriptor
, position
=""):
56 for desc_key
, desc_item
in descriptor
.items():
57 if isinstance(desc_item
, list) and desc_item
:
60 for index
, list_item
in enumerate(desc_item
):
61 if isinstance(list_item
, dict):
62 _check_unique_id_name(
63 list_item
, "{}.{}[{}]".format(position
, desc_key
, index
)
67 list_item
.get("id") or list_item
.get("name")
69 desc_item_id
= "id" if list_item
.get("id") else "name"
70 if desc_item_id
and list_item
.get(desc_item_id
):
71 if list_item
[desc_item_id
] in used_ids
:
72 position
= "{}.{}[{}]".format(
73 position
, desc_key
, index
75 raise EngineException(
76 "Error: identifier {} '{}' is not unique and repeats at '{}'".format(
78 list_item
[desc_item_id
],
81 HTTPStatus
.UNPROCESSABLE_ENTITY
,
83 used_ids
.append(list_item
[desc_item_id
])
85 _check_unique_id_name(final_content
)
86 # 1. validate again with pyangbind
87 # 1.1. remove internal keys
89 for k
in ("_id", "_admin"):
90 if k
in final_content
:
91 internal_keys
[k
] = final_content
.pop(k
)
92 storage_params
= internal_keys
["_admin"].get("storage")
93 serialized
= self
._validate
_input
_new
(
94 final_content
, storage_params
, session
["force"]
97 # 1.2. modify final_content with a serialized version
98 final_content
= copy
.deepcopy(serialized
)
99 # 1.3. restore internal keys
100 for k
, v
in internal_keys
.items():
105 # 2. check that this id is not present
106 if "id" in edit_content
:
107 _filter
= self
._get
_project
_filter
(session
)
109 _filter
["id"] = final_content
["id"]
110 _filter
["_id.neq"] = _id
112 if self
.db
.get_one(self
.topic
, _filter
, fail_on_empty
=False):
113 raise EngineException(
114 "{} with id '{}' already exists for this project".format(
115 self
.topic
[:-1], final_content
["id"]
123 def format_on_new(content
, project_id
=None, make_public
=False):
124 BaseTopic
.format_on_new(content
, project_id
=project_id
, make_public
=make_public
)
125 content
["_admin"]["onboardingState"] = "CREATED"
126 content
["_admin"]["operationalState"] = "DISABLED"
127 content
["_admin"]["usageState"] = "NOT_IN_USE"
129 def delete_extra(self
, session
, _id
, db_content
, not_send_msg
=None):
131 Deletes file system storage associated with the descriptor
132 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
133 :param _id: server internal id
134 :param db_content: The database content of the descriptor
135 :param not_send_msg: To not send message (False) or store content (list) instead
136 :return: None if ok or raises EngineException with the problem
138 self
.fs
.file_delete(_id
, ignore_non_exist
=True)
139 self
.fs
.file_delete(_id
+ "_", ignore_non_exist
=True) # remove temp folder
142 def get_one_by_id(db
, session
, topic
, id):
143 # find owned by this project
144 _filter
= BaseTopic
._get
_project
_filter
(session
)
146 desc_list
= db
.get_list(topic
, _filter
)
147 if len(desc_list
) == 1:
149 elif len(desc_list
) > 1:
151 "Found more than one {} with id='{}' belonging to this project".format(
157 # not found any: try to find public
158 _filter
= BaseTopic
._get
_project
_filter
(session
)
160 desc_list
= db
.get_list(topic
, _filter
)
163 "Not found any {} with id='{}'".format(topic
[:-1], id),
164 HTTPStatus
.NOT_FOUND
,
166 elif len(desc_list
) == 1:
170 "Found more than one public {} with id='{}'; and no one belonging to this project".format(
176 def new(self
, rollback
, session
, indata
=None, kwargs
=None, headers
=None):
178 Creates a new almost empty DISABLED entry into database. Due to SOL005, it does not follow normal procedure.
179 Creating a VNFD or NSD is done in two steps: 1. Creates an empty descriptor (this step) and 2) upload content
180 (self.upload_content)
181 :param rollback: list to append created items at database in case a rollback may to be done
182 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
183 :param indata: data to be inserted
184 :param kwargs: used to override the indata descriptor
185 :param headers: http request headers
186 :return: _id, None: identity of the inserted data; and None as there is not any operation
189 # No needed to capture exceptions
191 self
.check_quota(session
)
195 if "userDefinedData" in indata
:
196 indata
= indata
["userDefinedData"]
198 # Override descriptor with query string kwargs
199 self
._update
_input
_with
_kwargs
(indata
, kwargs
)
200 # uncomment when this method is implemented.
201 # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors
202 # indata = DescriptorTopic._validate_input_new(self, indata, project_id=session["force"])
204 content
= {"_admin": {"userDefinedData": indata
}}
206 content
, session
["project_id"], make_public
=session
["public"]
208 _id
= self
.db
.create(self
.topic
, content
)
209 rollback
.append({"topic": self
.topic
, "_id": _id
})
210 self
._send
_msg
("created", {"_id": _id
})
213 def upload_content(self
, session
, _id
, indata
, kwargs
, headers
):
215 Used for receiving content by chunks (with a transaction_id header and/or gzip file. It will store and extract)
216 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
217 :param _id : the nsd,vnfd is already created, this is the id
218 :param indata: http body request
219 :param kwargs: user query string to override parameters. NOT USED
220 :param headers: http request headers
221 :return: True if package is completely uploaded or False if partial content has been uploded
222 Raise exception on error
224 # Check that _id exists and it is valid
225 current_desc
= self
.show(session
, _id
)
227 content_range_text
= headers
.get("Content-Range")
228 expected_md5
= headers
.get("Content-File-MD5")
230 content_type
= headers
.get("Content-Type")
233 and "application/gzip" in content_type
234 or "application/x-gzip" in content_type
239 and "application/zip" in content_type
242 filename
= headers
.get("Content-Filename")
243 if not filename
and compressed
:
244 filename
= "package.tar.gz" if compressed
== "gzip" else "package.zip"
248 # TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266
252 if content_range_text
:
254 content_range_text
.replace("-", " ").replace("/", " ").split()
257 content_range
[0] != "bytes"
258 ): # TODO check x<y not negative < total....
260 start
= int(content_range
[1])
261 end
= int(content_range
[2]) + 1
262 total
= int(content_range
[3])
267 ) # all the content is upload here and if ok, it is rename from id_ to is folder
270 if not self
.fs
.file_exists(temp_folder
, "dir"):
271 raise EngineException(
272 "invalid Transaction-Id header", HTTPStatus
.NOT_FOUND
275 self
.fs
.file_delete(temp_folder
, ignore_non_exist
=True)
276 self
.fs
.mkdir(temp_folder
)
278 storage
= self
.fs
.get_params()
279 storage
["folder"] = _id
281 file_path
= (temp_folder
, filename
)
282 if self
.fs
.file_exists(file_path
, "file"):
283 file_size
= self
.fs
.file_size(file_path
)
286 if file_size
!= start
:
287 raise EngineException(
288 "invalid Content-Range start sequence, expected '{}' but received '{}'".format(
291 HTTPStatus
.REQUESTED_RANGE_NOT_SATISFIABLE
,
293 file_pkg
= self
.fs
.file_open(file_path
, "a+b")
294 if isinstance(indata
, dict):
295 indata_text
= yaml
.safe_dump(indata
, indent
=4, default_flow_style
=False)
296 file_pkg
.write(indata_text
.encode(encoding
="utf-8"))
300 indata_text
= indata
.read(4096)
301 indata_len
+= len(indata_text
)
304 file_pkg
.write(indata_text
)
305 if content_range_text
:
306 if indata_len
!= end
- start
:
307 raise EngineException(
308 "Mismatch between Content-Range header {}-{} and body length of {}".format(
309 start
, end
- 1, indata_len
311 HTTPStatus
.REQUESTED_RANGE_NOT_SATISFIABLE
,
314 # TODO update to UPLOADING
321 chunk_data
= file_pkg
.read(1024)
323 file_md5
.update(chunk_data
)
324 chunk_data
= file_pkg
.read(1024)
325 if expected_md5
!= file_md5
.hexdigest():
326 raise EngineException("Error, MD5 mismatch", HTTPStatus
.CONFLICT
)
328 if compressed
== "gzip":
329 tar
= tarfile
.open(mode
="r", fileobj
=file_pkg
)
330 descriptor_file_name
= None
332 tarname
= tarinfo
.name
333 tarname_path
= tarname
.split("/")
335 not tarname_path
[0] or ".." in tarname_path
336 ): # if start with "/" means absolute path
337 raise EngineException(
338 "Absolute path or '..' are not allowed for package descriptor tar.gz"
340 if len(tarname_path
) == 1 and not tarinfo
.isdir():
341 raise EngineException(
342 "All files must be inside a dir for package descriptor tar.gz"
345 tarname
.endswith(".yaml")
346 or tarname
.endswith(".json")
347 or tarname
.endswith(".yml")
349 storage
["pkg-dir"] = tarname_path
[0]
350 if len(tarname_path
) == 2:
351 if descriptor_file_name
:
352 raise EngineException(
353 "Found more than one descriptor file at package descriptor tar.gz"
355 descriptor_file_name
= tarname
356 if not descriptor_file_name
:
357 raise EngineException(
358 "Not found any descriptor file at package descriptor tar.gz"
360 storage
["descriptor"] = descriptor_file_name
361 storage
["zipfile"] = filename
362 self
.fs
.file_extract(tar
, temp_folder
)
363 with self
.fs
.file_open(
364 (temp_folder
, descriptor_file_name
), "r"
365 ) as descriptor_file
:
366 content
= descriptor_file
.read()
367 elif compressed
== "zip":
368 zipfile
= ZipFile(file_pkg
)
369 descriptor_file_name
= None
370 for package_file
in zipfile
.infolist():
371 zipfilename
= package_file
.filename
372 file_path
= zipfilename
.split("/")
374 not file_path
[0] or ".." in zipfilename
375 ): # if start with "/" means absolute path
376 raise EngineException(
377 "Absolute path or '..' are not allowed for package descriptor zip"
382 zipfilename
.endswith(".yaml")
383 or zipfilename
.endswith(".json")
384 or zipfilename
.endswith(".yml")
386 zipfilename
.find("/") < 0
387 or zipfilename
.find("Definitions") >= 0
390 storage
["pkg-dir"] = ""
391 if descriptor_file_name
:
392 raise EngineException(
393 "Found more than one descriptor file at package descriptor zip"
395 descriptor_file_name
= zipfilename
396 if not descriptor_file_name
:
397 raise EngineException(
398 "Not found any descriptor file at package descriptor zip"
400 storage
["descriptor"] = descriptor_file_name
401 storage
["zipfile"] = filename
402 self
.fs
.file_extract(zipfile
, temp_folder
)
404 with self
.fs
.file_open(
405 (temp_folder
, descriptor_file_name
), "r"
406 ) as descriptor_file
:
407 content
= descriptor_file
.read()
409 content
= file_pkg
.read()
410 storage
["descriptor"] = descriptor_file_name
= filename
412 if descriptor_file_name
.endswith(".json"):
413 error_text
= "Invalid json format "
414 indata
= json
.load(content
)
416 error_text
= "Invalid yaml format "
417 indata
= yaml
.load(content
, Loader
=yaml
.SafeLoader
)
419 current_desc
["_admin"]["storage"] = storage
420 current_desc
["_admin"]["onboardingState"] = "ONBOARDED"
421 current_desc
["_admin"]["operationalState"] = "ENABLED"
423 indata
= self
._remove
_envelop
(indata
)
425 # Override descriptor with query string kwargs
427 self
._update
_input
_with
_kwargs
(indata
, kwargs
)
429 deep_update_rfc7396(current_desc
, indata
)
430 current_desc
= self
.check_conflict_on_edit(
431 session
, current_desc
, indata
, _id
=_id
433 current_desc
["_admin"]["modified"] = time()
434 self
.db
.replace(self
.topic
, _id
, current_desc
)
435 self
.fs
.dir_rename(temp_folder
, _id
)
438 self
._send
_msg
("edited", indata
)
440 # TODO if descriptor has changed because kwargs update content and remove cached zip
441 # TODO if zip is not present creates one
444 except EngineException
:
447 raise EngineException(
448 "invalid Content-Range header format. Expected 'bytes start-end/total'",
449 HTTPStatus
.REQUESTED_RANGE_NOT_SATISFIABLE
,
452 raise EngineException(
453 "invalid upload transaction sequence: '{}'".format(e
),
454 HTTPStatus
.BAD_REQUEST
,
456 except tarfile
.ReadError
as e
:
457 raise EngineException(
458 "invalid file content {}".format(e
), HTTPStatus
.BAD_REQUEST
460 except (ValueError, yaml
.YAMLError
) as e
:
461 raise EngineException(error_text
+ str(e
))
462 except ValidationError
as e
:
463 raise EngineException(e
, HTTPStatus
.UNPROCESSABLE_ENTITY
)
468 def get_file(self
, session
, _id
, path
=None, accept_header
=None):
470 Return the file content of a vnfd or nsd
471 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
472 :param _id: Identity of the vnfd, nsd
473 :param path: artifact path or "$DESCRIPTOR" or None
474 :param accept_header: Content of Accept header. Must contain applition/zip or/and text/plain
475 :return: opened file plus Accept format or raises an exception
477 accept_text
= accept_zip
= False
479 if "text/plain" in accept_header
or "*/*" in accept_header
:
481 if "application/zip" in accept_header
or "*/*" in accept_header
:
482 accept_zip
= "application/zip"
483 elif "application/gzip" in accept_header
:
484 accept_zip
= "application/gzip"
486 if not accept_text
and not accept_zip
:
487 raise EngineException(
488 "provide request header 'Accept' with 'application/zip' or 'text/plain'",
489 http_code
=HTTPStatus
.NOT_ACCEPTABLE
,
492 content
= self
.show(session
, _id
)
493 if content
["_admin"]["onboardingState"] != "ONBOARDED":
494 raise EngineException(
495 "Cannot get content because this resource is not at 'ONBOARDED' state. "
496 "onboardingState is {}".format(content
["_admin"]["onboardingState"]),
497 http_code
=HTTPStatus
.CONFLICT
,
499 storage
= content
["_admin"]["storage"]
500 if path
is not None and path
!= "$DESCRIPTOR": # artifacts
501 if not storage
.get("pkg-dir"):
502 raise EngineException(
503 "Packages does not contains artifacts",
504 http_code
=HTTPStatus
.BAD_REQUEST
,
506 if self
.fs
.file_exists(
507 (storage
["folder"], storage
["pkg-dir"], *path
), "dir"
509 folder_content
= self
.fs
.dir_ls(
510 (storage
["folder"], storage
["pkg-dir"], *path
)
512 return folder_content
, "text/plain"
513 # TODO manage folders in http
517 (storage
["folder"], storage
["pkg-dir"], *path
), "rb"
519 "application/octet-stream",
522 # pkgtype accept ZIP TEXT -> result
523 # manyfiles yes X -> zip
525 # onefile yes no -> zip
527 contain_many_files
= False
528 if storage
.get("pkg-dir"):
529 # check if there are more than one file in the package, ignoring checksums.txt.
530 pkg_files
= self
.fs
.dir_ls((storage
["folder"], storage
["pkg-dir"]))
531 if len(pkg_files
) >= 3 or (
532 len(pkg_files
) == 2 and "checksums.txt" not in pkg_files
534 contain_many_files
= True
535 if accept_text
and (not contain_many_files
or path
== "$DESCRIPTOR"):
537 self
.fs
.file_open((storage
["folder"], storage
["descriptor"]), "r"),
540 elif contain_many_files
and not accept_zip
:
541 raise EngineException(
542 "Packages that contains several files need to be retrieved with 'application/zip'"
544 http_code
=HTTPStatus
.NOT_ACCEPTABLE
,
547 if not storage
.get("zipfile"):
548 # TODO generate zipfile if not present
549 raise EngineException(
550 "Only allowed 'text/plain' Accept header for this descriptor. To be solved in "
552 http_code
=HTTPStatus
.NOT_ACCEPTABLE
,
555 self
.fs
.file_open((storage
["folder"], storage
["zipfile"]), "rb"),
559 def _remove_yang_prefixes_from_descriptor(self
, descriptor
):
561 for k
, v
in descriptor
.items():
563 if isinstance(v
, dict):
564 new_v
= self
._remove
_yang
_prefixes
_from
_descriptor
(v
)
565 elif isinstance(v
, list):
568 if isinstance(x
, dict):
569 new_v
.append(self
._remove
_yang
_prefixes
_from
_descriptor
(x
))
572 new_descriptor
[k
.split(":")[-1]] = new_v
573 return new_descriptor
575 def pyangbind_validation(self
, item
, data
, force
=False):
576 raise EngineException(
577 "Not possible to validate '{}' item".format(item
),
578 http_code
=HTTPStatus
.INTERNAL_SERVER_ERROR
,
581 def _validate_input_edit(self
, indata
, content
, force
=False):
582 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
585 if "_admin" not in indata
:
586 indata
["_admin"] = {}
588 if "operationalState" in indata
:
589 if indata
["operationalState"] in ("ENABLED", "DISABLED"):
590 indata
["_admin"]["operationalState"] = indata
.pop("operationalState")
592 raise EngineException(
593 "State '{}' is not a valid operational state".format(
594 indata
["operationalState"]
596 http_code
=HTTPStatus
.BAD_REQUEST
,
599 # In the case of user defined data, we need to put the data in the root of the object
600 # to preserve current expected behaviour
601 if "userDefinedData" in indata
:
602 data
= indata
.pop("userDefinedData")
603 if type(data
) == dict:
604 indata
["_admin"]["userDefinedData"] = data
606 raise EngineException(
607 "userDefinedData should be an object, but is '{}' instead".format(
610 http_code
=HTTPStatus
.BAD_REQUEST
,
614 "operationalState" in indata
["_admin"]
615 and content
["_admin"]["operationalState"]
616 == indata
["_admin"]["operationalState"]
618 raise EngineException(
619 "operationalState already {}".format(
620 content
["_admin"]["operationalState"]
622 http_code
=HTTPStatus
.CONFLICT
,
628 class VnfdTopic(DescriptorTopic
):
632 def __init__(self
, db
, fs
, msg
, auth
):
633 DescriptorTopic
.__init
__(self
, db
, fs
, msg
, auth
)
635 def pyangbind_validation(self
, item
, data
, force
=False):
636 if self
._descriptor
_data
_is
_in
_old
_format
(data
):
637 raise EngineException(
638 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
639 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
642 myvnfd
= etsi_nfv_vnfd
.etsi_nfv_vnfd()
643 pybindJSONDecoder
.load_ietf_json(
644 {"etsi-nfv-vnfd:vnfd": data
},
651 out
= pybindJSON
.dumps(myvnfd
, mode
="ietf")
652 desc_out
= self
._remove
_envelop
(yaml
.safe_load(out
))
653 desc_out
= self
._remove
_yang
_prefixes
_from
_descriptor
(desc_out
)
654 return utils
.deep_update_dict(data
, desc_out
)
655 except Exception as e
:
656 raise EngineException(
657 "Error in pyangbind validation: {}".format(str(e
)),
658 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
662 def _descriptor_data_is_in_old_format(data
):
663 return ("vnfd-catalog" in data
) or ("vnfd:vnfd-catalog" in data
)
666 def _remove_envelop(indata
=None):
669 clean_indata
= indata
671 if clean_indata
.get("etsi-nfv-vnfd:vnfd"):
672 if not isinstance(clean_indata
["etsi-nfv-vnfd:vnfd"], dict):
673 raise EngineException("'etsi-nfv-vnfd:vnfd' must be a dict")
674 clean_indata
= clean_indata
["etsi-nfv-vnfd:vnfd"]
675 elif clean_indata
.get("vnfd"):
676 if not isinstance(clean_indata
["vnfd"], dict):
677 raise EngineException("'vnfd' must be dict")
678 clean_indata
= clean_indata
["vnfd"]
682 def check_conflict_on_edit(self
, session
, final_content
, edit_content
, _id
):
683 final_content
= super().check_conflict_on_edit(
684 session
, final_content
, edit_content
, _id
690 for vdu
in get_iterable(final_content
.get("vdu")):
691 if vdu
.get("pdu-type"):
696 final_content
["_admin"]["type"] = "hnfd" if contains_vdu
else "pnfd"
698 final_content
["_admin"]["type"] = "vnfd"
699 # if neither vud nor pdu do not fill type
702 def check_conflict_on_del(self
, session
, _id
, db_content
):
704 Check that there is not any NSD that uses this VNFD. Only NSDs belonging to this project are considered. Note
705 that VNFD can be public and be used by NSD of other projects. Also check there are not deployments, or vnfr
707 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
708 :param _id: vnfd internal id
709 :param db_content: The database content of the _id.
710 :return: None or raises EngineException with the conflict
714 descriptor
= db_content
715 descriptor_id
= descriptor
.get("id")
716 if not descriptor_id
: # empty vnfd not uploaded
719 _filter
= self
._get
_project
_filter
(session
)
721 # check vnfrs using this vnfd
722 _filter
["vnfd-id"] = _id
723 if self
.db
.get_list("vnfrs", _filter
):
724 raise EngineException(
725 "There is at least one VNF instance using this descriptor",
726 http_code
=HTTPStatus
.CONFLICT
,
729 # check NSD referencing this VNFD
730 del _filter
["vnfd-id"]
731 _filter
["vnfd-id"] = descriptor_id
732 if self
.db
.get_list("nsds", _filter
):
733 raise EngineException(
734 "There is at least one NS package referencing this descriptor",
735 http_code
=HTTPStatus
.CONFLICT
,
738 def _validate_input_new(self
, indata
, storage_params
, force
=False):
739 indata
.pop("onboardingState", None)
740 indata
.pop("operationalState", None)
741 indata
.pop("usageState", None)
742 indata
.pop("links", None)
744 indata
= self
.pyangbind_validation("vnfds", indata
, force
)
745 # Cross references validation in the descriptor
747 self
.validate_mgmt_interface_connection_point(indata
)
749 for vdu
in get_iterable(indata
.get("vdu")):
750 self
.validate_vdu_internal_connection_points(vdu
)
751 self
._validate
_vdu
_cloud
_init
_in
_package
(storage_params
, vdu
, indata
)
752 self
._validate
_vdu
_charms
_in
_package
(storage_params
, indata
)
754 self
._validate
_vnf
_charms
_in
_package
(storage_params
, indata
)
756 self
.validate_external_connection_points(indata
)
757 self
.validate_internal_virtual_links(indata
)
758 self
.validate_monitoring_params(indata
)
759 self
.validate_scaling_group_descriptor(indata
)
764 def validate_mgmt_interface_connection_point(indata
):
765 if not indata
.get("vdu"):
767 if not indata
.get("mgmt-cp"):
768 raise EngineException(
769 "'mgmt-cp' is a mandatory field and it is not defined",
770 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
773 for cp
in get_iterable(indata
.get("ext-cpd")):
774 if cp
["id"] == indata
["mgmt-cp"]:
777 raise EngineException(
778 "mgmt-cp='{}' must match an existing ext-cpd".format(indata
["mgmt-cp"]),
779 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
783 def validate_vdu_internal_connection_points(vdu
):
785 for cpd
in get_iterable(vdu
.get("int-cpd")):
786 cpd_id
= cpd
.get("id")
787 if cpd_id
and cpd_id
in int_cpds
:
788 raise EngineException(
789 "vdu[id='{}']:int-cpd[id='{}'] is already used by other int-cpd".format(
792 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
797 def validate_external_connection_points(indata
):
798 all_vdus_int_cpds
= set()
799 for vdu
in get_iterable(indata
.get("vdu")):
800 for int_cpd
in get_iterable(vdu
.get("int-cpd")):
801 all_vdus_int_cpds
.add((vdu
.get("id"), int_cpd
.get("id")))
804 for cpd
in get_iterable(indata
.get("ext-cpd")):
805 cpd_id
= cpd
.get("id")
806 if cpd_id
and cpd_id
in ext_cpds
:
807 raise EngineException(
808 "ext-cpd[id='{}'] is already used by other ext-cpd".format(cpd_id
),
809 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
813 int_cpd
= cpd
.get("int-cpd")
815 if (int_cpd
.get("vdu-id"), int_cpd
.get("cpd")) not in all_vdus_int_cpds
:
816 raise EngineException(
817 "ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd".format(
820 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
822 # TODO: Validate k8s-cluster-net points to a valid k8s-cluster:nets ?
824 def _validate_vdu_charms_in_package(self
, storage_params
, indata
):
825 for df
in indata
["df"]:
827 "lcm-operations-configuration" in df
828 and "operate-vnf-op-config" in df
["lcm-operations-configuration"]
830 configs
= df
["lcm-operations-configuration"][
831 "operate-vnf-op-config"
833 vdus
= df
.get("vdu-profile", [])
835 for config
in configs
:
836 if config
["id"] == vdu
["id"] and utils
.find_in_list(
837 config
.get("execution-environment-list", []),
838 lambda ee
: "juju" in ee
,
840 if not self
._validate
_package
_folders
(
841 storage_params
, "charms"
842 ) and not self
._validate
_package
_folders
(
843 storage_params
, "Scripts/charms"
845 raise EngineException(
846 "Charm defined in vnf[id={}] but not present in "
847 "package".format(indata
["id"])
850 def _validate_vdu_cloud_init_in_package(self
, storage_params
, vdu
, indata
):
851 if not vdu
.get("cloud-init-file"):
853 if not self
._validate
_package
_folders
(
854 storage_params
, "cloud_init", vdu
["cloud-init-file"]
855 ) and not self
._validate
_package
_folders
(
856 storage_params
, "Scripts/cloud_init", vdu
["cloud-init-file"]
858 raise EngineException(
859 "Cloud-init defined in vnf[id={}]:vdu[id={}] but not present in "
860 "package".format(indata
["id"], vdu
["id"])
863 def _validate_vnf_charms_in_package(self
, storage_params
, indata
):
864 # Get VNF configuration through new container
865 for deployment_flavor
in indata
.get("df", []):
866 if "lcm-operations-configuration" not in deployment_flavor
:
869 "operate-vnf-op-config"
870 not in deployment_flavor
["lcm-operations-configuration"]
873 for day_1_2_config
in deployment_flavor
["lcm-operations-configuration"][
874 "operate-vnf-op-config"
876 if day_1_2_config
["id"] == indata
["id"]:
877 if utils
.find_in_list(
878 day_1_2_config
.get("execution-environment-list", []),
879 lambda ee
: "juju" in ee
,
881 if not self
._validate
_package
_folders
(
882 storage_params
, "charms"
883 ) and not self
._validate
_package
_folders
(
884 storage_params
, "Scripts/charms"
886 raise EngineException(
887 "Charm defined in vnf[id={}] but not present in "
888 "package".format(indata
["id"])
891 def _validate_package_folders(self
, storage_params
, folder
, file=None):
892 if not storage_params
:
894 elif not storage_params
.get("pkg-dir"):
895 if self
.fs
.file_exists("{}_".format(storage_params
["folder"]), "dir"):
897 storage_params
["folder"], folder
901 storage_params
["folder"], folder
904 return self
.fs
.file_exists("{}/{}".format(f
, file), "file")
907 if self
.fs
.file_exists(f
, "dir"):
908 if self
.fs
.dir_ls(f
):
912 if self
.fs
.file_exists("{}_".format(storage_params
["folder"]), "dir"):
913 f
= "{}_/{}/{}".format(
914 storage_params
["folder"], storage_params
["pkg-dir"], folder
917 f
= "{}/{}/{}".format(
918 storage_params
["folder"], storage_params
["pkg-dir"], folder
921 return self
.fs
.file_exists("{}/{}".format(f
, file), "file")
923 if self
.fs
.file_exists(f
, "dir"):
924 if self
.fs
.dir_ls(f
):
929 def validate_internal_virtual_links(indata
):
931 for ivld
in get_iterable(indata
.get("int-virtual-link-desc")):
932 ivld_id
= ivld
.get("id")
933 if ivld_id
and ivld_id
in all_ivld_ids
:
934 raise EngineException(
935 "Duplicated VLD id in int-virtual-link-desc[id={}]".format(ivld_id
),
936 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
939 all_ivld_ids
.add(ivld_id
)
941 for vdu
in get_iterable(indata
.get("vdu")):
942 for int_cpd
in get_iterable(vdu
.get("int-cpd")):
943 int_cpd_ivld_id
= int_cpd
.get("int-virtual-link-desc")
944 if int_cpd_ivld_id
and int_cpd_ivld_id
not in all_ivld_ids
:
945 raise EngineException(
946 "vdu[id='{}']:int-cpd[id='{}']:int-virtual-link-desc='{}' must match an existing "
947 "int-virtual-link-desc".format(
948 vdu
["id"], int_cpd
["id"], int_cpd_ivld_id
950 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
953 for df
in get_iterable(indata
.get("df")):
954 for vlp
in get_iterable(df
.get("virtual-link-profile")):
955 vlp_ivld_id
= vlp
.get("id")
956 if vlp_ivld_id
and vlp_ivld_id
not in all_ivld_ids
:
957 raise EngineException(
958 "df[id='{}']:virtual-link-profile='{}' must match an existing "
959 "int-virtual-link-desc".format(df
["id"], vlp_ivld_id
),
960 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
964 def validate_monitoring_params(indata
):
965 all_monitoring_params
= set()
966 for ivld
in get_iterable(indata
.get("int-virtual-link-desc")):
967 for mp
in get_iterable(ivld
.get("monitoring-parameters")):
969 if mp_id
and mp_id
in all_monitoring_params
:
970 raise EngineException(
971 "Duplicated monitoring-parameter id in "
972 "int-virtual-link-desc[id='{}']:monitoring-parameters[id='{}']".format(
975 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
978 all_monitoring_params
.add(mp_id
)
980 for vdu
in get_iterable(indata
.get("vdu")):
981 for mp
in get_iterable(vdu
.get("monitoring-parameter")):
983 if mp_id
and mp_id
in all_monitoring_params
:
984 raise EngineException(
985 "Duplicated monitoring-parameter id in "
986 "vdu[id='{}']:monitoring-parameter[id='{}']".format(
989 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
992 all_monitoring_params
.add(mp_id
)
994 for df
in get_iterable(indata
.get("df")):
995 for mp
in get_iterable(df
.get("monitoring-parameter")):
997 if mp_id
and mp_id
in all_monitoring_params
:
998 raise EngineException(
999 "Duplicated monitoring-parameter id in "
1000 "df[id='{}']:monitoring-parameter[id='{}']".format(
1003 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1006 all_monitoring_params
.add(mp_id
)
1009 def validate_scaling_group_descriptor(indata
):
1010 all_monitoring_params
= set()
1011 for ivld
in get_iterable(indata
.get("int-virtual-link-desc")):
1012 for mp
in get_iterable(ivld
.get("monitoring-parameters")):
1013 all_monitoring_params
.add(mp
.get("id"))
1015 for vdu
in get_iterable(indata
.get("vdu")):
1016 for mp
in get_iterable(vdu
.get("monitoring-parameter")):
1017 all_monitoring_params
.add(mp
.get("id"))
1019 for df
in get_iterable(indata
.get("df")):
1020 for mp
in get_iterable(df
.get("monitoring-parameter")):
1021 all_monitoring_params
.add(mp
.get("id"))
1023 for df
in get_iterable(indata
.get("df")):
1024 for sa
in get_iterable(df
.get("scaling-aspect")):
1025 for sp
in get_iterable(sa
.get("scaling-policy")):
1026 for sc
in get_iterable(sp
.get("scaling-criteria")):
1027 sc_monitoring_param
= sc
.get("vnf-monitoring-param-ref")
1030 and sc_monitoring_param
not in all_monitoring_params
1032 raise EngineException(
1033 "df[id='{}']:scaling-aspect[id='{}']:scaling-policy"
1034 "[name='{}']:scaling-criteria[name='{}']: "
1035 "vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format(
1040 sc_monitoring_param
,
1042 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1045 for sca
in get_iterable(sa
.get("scaling-config-action")):
1047 "lcm-operations-configuration" not in df
1048 or "operate-vnf-op-config"
1049 not in df
["lcm-operations-configuration"]
1050 or not utils
.find_in_list(
1051 df
["lcm-operations-configuration"][
1052 "operate-vnf-op-config"
1053 ].get("day1-2", []),
1054 lambda config
: config
["id"] == indata
["id"],
1057 raise EngineException(
1058 "'day1-2 configuration' not defined in the descriptor but it is "
1059 "referenced by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action".format(
1062 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1064 for configuration
in get_iterable(
1065 df
["lcm-operations-configuration"]["operate-vnf-op-config"].get(
1069 for primitive
in get_iterable(
1070 configuration
.get("config-primitive")
1074 == sca
["vnf-config-primitive-name-ref"]
1078 raise EngineException(
1079 "df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-"
1080 "config-primitive-name-ref='{}' does not match any "
1081 "day1-2 configuration:config-primitive:name".format(
1084 sca
["vnf-config-primitive-name-ref"],
1086 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1089 def delete_extra(self
, session
, _id
, db_content
, not_send_msg
=None):
1091 Deletes associate file system storage (via super)
1092 Deletes associated vnfpkgops from database.
1093 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1094 :param _id: server internal id
1095 :param db_content: The database content of the descriptor
1097 :raises: FsException in case of error while deleting associated storage
1099 super().delete_extra(session
, _id
, db_content
, not_send_msg
)
1100 self
.db
.del_list("vnfpkgops", {"vnfPkgId": _id
})
1102 def sol005_projection(self
, data
):
1103 data
["onboardingState"] = data
["_admin"]["onboardingState"]
1104 data
["operationalState"] = data
["_admin"]["operationalState"]
1105 data
["usageState"] = data
["_admin"]["usageState"]
1108 links
["self"] = {"href": "/vnfpkgm/v1/vnf_packages/{}".format(data
["_id"])}
1109 links
["vnfd"] = {"href": "/vnfpkgm/v1/vnf_packages/{}/vnfd".format(data
["_id"])}
1110 links
["packageContent"] = {
1111 "href": "/vnfpkgm/v1/vnf_packages/{}/package_content".format(data
["_id"])
1113 data
["_links"] = links
1115 return super().sol005_projection(data
)
1118 class NsdTopic(DescriptorTopic
):
1122 def __init__(self
, db
, fs
, msg
, auth
):
1123 DescriptorTopic
.__init
__(self
, db
, fs
, msg
, auth
)
1125 def pyangbind_validation(self
, item
, data
, force
=False):
1126 if self
._descriptor
_data
_is
_in
_old
_format
(data
):
1127 raise EngineException(
1128 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
1129 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1132 nsd_vnf_profiles
= data
.get("df", [{}])[0].get("vnf-profile", [])
1133 mynsd
= etsi_nfv_nsd
.etsi_nfv_nsd()
1134 pybindJSONDecoder
.load_ietf_json(
1135 {"nsd": {"nsd": [data
]}},
1142 out
= pybindJSON
.dumps(mynsd
, mode
="ietf")
1143 desc_out
= self
._remove
_envelop
(yaml
.safe_load(out
))
1144 desc_out
= self
._remove
_yang
_prefixes
_from
_descriptor
(desc_out
)
1145 if nsd_vnf_profiles
:
1146 desc_out
["df"][0]["vnf-profile"] = nsd_vnf_profiles
1148 except Exception as e
:
1149 raise EngineException(
1150 "Error in pyangbind validation: {}".format(str(e
)),
1151 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1155 def _descriptor_data_is_in_old_format(data
):
1156 return ("nsd-catalog" in data
) or ("nsd:nsd-catalog" in data
)
1159 def _remove_envelop(indata
=None):
1162 clean_indata
= indata
1164 if clean_indata
.get("nsd"):
1165 clean_indata
= clean_indata
["nsd"]
1166 elif clean_indata
.get("etsi-nfv-nsd:nsd"):
1167 clean_indata
= clean_indata
["etsi-nfv-nsd:nsd"]
1168 if clean_indata
.get("nsd"):
1170 not isinstance(clean_indata
["nsd"], list)
1171 or len(clean_indata
["nsd"]) != 1
1173 raise EngineException("'nsd' must be a list of only one element")
1174 clean_indata
= clean_indata
["nsd"][0]
1177 def _validate_input_new(self
, indata
, storage_params
, force
=False):
1178 indata
.pop("nsdOnboardingState", None)
1179 indata
.pop("nsdOperationalState", None)
1180 indata
.pop("nsdUsageState", None)
1182 indata
.pop("links", None)
1184 indata
= self
.pyangbind_validation("nsds", indata
, force
)
1185 # Cross references validation in the descriptor
1186 # TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none
1187 for vld
in get_iterable(indata
.get("virtual-link-desc")):
1188 self
.validate_vld_mgmt_network_with_virtual_link_protocol_data(vld
, indata
)
1190 self
.validate_vnf_profiles_vnfd_id(indata
)
1195 def validate_vld_mgmt_network_with_virtual_link_protocol_data(vld
, indata
):
1196 if not vld
.get("mgmt-network"):
1198 vld_id
= vld
.get("id")
1199 for df
in get_iterable(indata
.get("df")):
1200 for vlp
in get_iterable(df
.get("virtual-link-profile")):
1201 if vld_id
and vld_id
== vlp
.get("virtual-link-desc-id"):
1202 if vlp
.get("virtual-link-protocol-data"):
1203 raise EngineException(
1204 "Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-"
1205 "protocol-data You cannot set a virtual-link-protocol-data "
1206 "when mgmt-network is True".format(df
["id"], vlp
["id"]),
1207 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1211 def validate_vnf_profiles_vnfd_id(indata
):
1212 all_vnfd_ids
= set(get_iterable(indata
.get("vnfd-id")))
1213 for df
in get_iterable(indata
.get("df")):
1214 for vnf_profile
in get_iterable(df
.get("vnf-profile")):
1215 vnfd_id
= vnf_profile
.get("vnfd-id")
1216 if vnfd_id
and vnfd_id
not in all_vnfd_ids
:
1217 raise EngineException(
1218 "Error at df[id='{}']:vnf_profile[id='{}']:vnfd-id='{}' "
1219 "does not match any vnfd-id".format(
1220 df
["id"], vnf_profile
["id"], vnfd_id
1222 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1225 def _validate_input_edit(self
, indata
, content
, force
=False):
1226 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
1228 indata looks as follows:
1229 - In the new case (conformant)
1230 {'nsdOperationalState': 'DISABLED', 'userDefinedData': {'id': 'string23',
1231 '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}}
1232 - In the old case (backwards-compatible)
1233 {'id': 'string23', '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}
1235 if "_admin" not in indata
:
1236 indata
["_admin"] = {}
1238 if "nsdOperationalState" in indata
:
1239 if indata
["nsdOperationalState"] in ("ENABLED", "DISABLED"):
1240 indata
["_admin"]["operationalState"] = indata
.pop("nsdOperationalState")
1242 raise EngineException(
1243 "State '{}' is not a valid operational state".format(
1244 indata
["nsdOperationalState"]
1246 http_code
=HTTPStatus
.BAD_REQUEST
,
1249 # In the case of user defined data, we need to put the data in the root of the object
1250 # to preserve current expected behaviour
1251 if "userDefinedData" in indata
:
1252 data
= indata
.pop("userDefinedData")
1253 if type(data
) == dict:
1254 indata
["_admin"]["userDefinedData"] = data
1256 raise EngineException(
1257 "userDefinedData should be an object, but is '{}' instead".format(
1260 http_code
=HTTPStatus
.BAD_REQUEST
,
1263 "operationalState" in indata
["_admin"]
1264 and content
["_admin"]["operationalState"]
1265 == indata
["_admin"]["operationalState"]
1267 raise EngineException(
1268 "nsdOperationalState already {}".format(
1269 content
["_admin"]["operationalState"]
1271 http_code
=HTTPStatus
.CONFLICT
,
1275 def _check_descriptor_dependencies(self
, session
, descriptor
):
1277 Check that the dependent descriptors exist on a new descriptor or edition. Also checks references to vnfd
1278 connection points are ok
1279 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1280 :param descriptor: descriptor to be inserted or edit
1281 :return: None or raises exception
1283 if session
["force"]:
1285 vnfds_index
= self
._get
_descriptor
_constituent
_vnfds
_index
(session
, descriptor
)
1287 # Cross references validation in the descriptor and vnfd connection point validation
1288 for df
in get_iterable(descriptor
.get("df")):
1289 self
.validate_df_vnf_profiles_constituent_connection_points(df
, vnfds_index
)
1291 def _get_descriptor_constituent_vnfds_index(self
, session
, descriptor
):
1293 if descriptor
.get("vnfd-id") and not session
["force"]:
1294 for vnfd_id
in get_iterable(descriptor
.get("vnfd-id")):
1295 query_filter
= self
._get
_project
_filter
(session
)
1296 query_filter
["id"] = vnfd_id
1297 vnf_list
= self
.db
.get_list("vnfds", query_filter
)
1299 raise EngineException(
1300 "Descriptor error at 'vnfd-id'='{}' references a non "
1301 "existing vnfd".format(vnfd_id
),
1302 http_code
=HTTPStatus
.CONFLICT
,
1304 vnfds_index
[vnfd_id
] = vnf_list
[0]
1308 def validate_df_vnf_profiles_constituent_connection_points(df
, vnfds_index
):
1309 for vnf_profile
in get_iterable(df
.get("vnf-profile")):
1310 vnfd
= vnfds_index
.get(vnf_profile
["vnfd-id"])
1311 all_vnfd_ext_cpds
= set()
1312 for ext_cpd
in get_iterable(vnfd
.get("ext-cpd")):
1313 if ext_cpd
.get("id"):
1314 all_vnfd_ext_cpds
.add(ext_cpd
.get("id"))
1316 for virtual_link
in get_iterable(
1317 vnf_profile
.get("virtual-link-connectivity")
1319 for vl_cpd
in get_iterable(virtual_link
.get("constituent-cpd-id")):
1320 vl_cpd_id
= vl_cpd
.get("constituent-cpd-id")
1321 if vl_cpd_id
and vl_cpd_id
not in all_vnfd_ext_cpds
:
1322 raise EngineException(
1323 "Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity"
1324 "[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a "
1325 "non existing ext-cpd:id inside vnfd '{}'".format(
1328 virtual_link
["virtual-link-profile-id"],
1332 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1335 def check_conflict_on_edit(self
, session
, final_content
, edit_content
, _id
):
1336 final_content
= super().check_conflict_on_edit(
1337 session
, final_content
, edit_content
, _id
1340 self
._check
_descriptor
_dependencies
(session
, final_content
)
1342 return final_content
1344 def check_conflict_on_del(self
, session
, _id
, db_content
):
1346 Check that there is not any NSR that uses this NSD. Only NSRs belonging to this project are considered. Note
1347 that NSD can be public and be used by other projects.
1348 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1349 :param _id: nsd internal id
1350 :param db_content: The database content of the _id
1351 :return: None or raises EngineException with the conflict
1353 if session
["force"]:
1355 descriptor
= db_content
1356 descriptor_id
= descriptor
.get("id")
1357 if not descriptor_id
: # empty nsd not uploaded
1360 # check NSD used by NS
1361 _filter
= self
._get
_project
_filter
(session
)
1362 _filter
["nsd-id"] = _id
1363 if self
.db
.get_list("nsrs", _filter
):
1364 raise EngineException(
1365 "There is at least one NS instance using this descriptor",
1366 http_code
=HTTPStatus
.CONFLICT
,
1369 # check NSD referenced by NST
1370 del _filter
["nsd-id"]
1371 _filter
["netslice-subnet.ANYINDEX.nsd-ref"] = descriptor_id
1372 if self
.db
.get_list("nsts", _filter
):
1373 raise EngineException(
1374 "There is at least one NetSlice Template referencing this descriptor",
1375 http_code
=HTTPStatus
.CONFLICT
,
1378 def sol005_projection(self
, data
):
1379 data
["nsdOnboardingState"] = data
["_admin"]["onboardingState"]
1380 data
["nsdOperationalState"] = data
["_admin"]["operationalState"]
1381 data
["nsdUsageState"] = data
["_admin"]["usageState"]
1384 links
["self"] = {"href": "/nsd/v1/ns_descriptors/{}".format(data
["_id"])}
1385 links
["nsd_content"] = {
1386 "href": "/nsd/v1/ns_descriptors/{}/nsd_content".format(data
["_id"])
1388 data
["_links"] = links
1390 return super().sol005_projection(data
)
1393 class NstTopic(DescriptorTopic
):
1396 quota_name
= "slice_templates"
1398 def __init__(self
, db
, fs
, msg
, auth
):
1399 DescriptorTopic
.__init
__(self
, db
, fs
, msg
, auth
)
1401 def pyangbind_validation(self
, item
, data
, force
=False):
1404 pybindJSONDecoder
.load_ietf_json(
1412 out
= pybindJSON
.dumps(mynst
, mode
="ietf")
1413 desc_out
= self
._remove
_envelop
(yaml
.safe_load(out
))
1415 except Exception as e
:
1416 raise EngineException(
1417 "Error in pyangbind validation: {}".format(str(e
)),
1418 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
,
1422 def _remove_envelop(indata
=None):
1425 clean_indata
= indata
1427 if clean_indata
.get("nst"):
1429 not isinstance(clean_indata
["nst"], list)
1430 or len(clean_indata
["nst"]) != 1
1432 raise EngineException("'nst' must be a list only one element")
1433 clean_indata
= clean_indata
["nst"][0]
1434 elif clean_indata
.get("nst:nst"):
1436 not isinstance(clean_indata
["nst:nst"], list)
1437 or len(clean_indata
["nst:nst"]) != 1
1439 raise EngineException("'nst:nst' must be a list only one element")
1440 clean_indata
= clean_indata
["nst:nst"][0]
1443 def _validate_input_new(self
, indata
, storage_params
, force
=False):
1444 indata
.pop("onboardingState", None)
1445 indata
.pop("operationalState", None)
1446 indata
.pop("usageState", None)
1447 indata
= self
.pyangbind_validation("nsts", indata
, force
)
1448 return indata
.copy()
1450 def _check_descriptor_dependencies(self
, session
, descriptor
):
1452 Check that the dependent descriptors exist on a new descriptor or edition
1453 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1454 :param descriptor: descriptor to be inserted or edit
1455 :return: None or raises exception
1457 if not descriptor
.get("netslice-subnet"):
1459 for nsd
in descriptor
["netslice-subnet"]:
1460 nsd_id
= nsd
["nsd-ref"]
1461 filter_q
= self
._get
_project
_filter
(session
)
1462 filter_q
["id"] = nsd_id
1463 if not self
.db
.get_list("nsds", filter_q
):
1464 raise EngineException(
1465 "Descriptor error at 'netslice-subnet':'nsd-ref'='{}' references a non "
1466 "existing nsd".format(nsd_id
),
1467 http_code
=HTTPStatus
.CONFLICT
,
1470 def check_conflict_on_edit(self
, session
, final_content
, edit_content
, _id
):
1471 final_content
= super().check_conflict_on_edit(
1472 session
, final_content
, edit_content
, _id
1475 self
._check
_descriptor
_dependencies
(session
, final_content
)
1476 return final_content
1478 def check_conflict_on_del(self
, session
, _id
, db_content
):
1480 Check that there is not any NSIR that uses this NST. Only NSIRs belonging to this project are considered. Note
1481 that NST can be public and be used by other projects.
1482 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1483 :param _id: nst internal id
1484 :param db_content: The database content of the _id.
1485 :return: None or raises EngineException with the conflict
1487 # TODO: Check this method
1488 if session
["force"]:
1490 # Get Network Slice Template from Database
1491 _filter
= self
._get
_project
_filter
(session
)
1492 _filter
["_admin.nst-id"] = _id
1493 if self
.db
.get_list("nsis", _filter
):
1494 raise EngineException(
1495 "there is at least one Netslice Instance using this descriptor",
1496 http_code
=HTTPStatus
.CONFLICT
,
1499 def sol005_projection(self
, data
):
1500 data
["onboardingState"] = data
["_admin"]["onboardingState"]
1501 data
["operationalState"] = data
["_admin"]["operationalState"]
1502 data
["usageState"] = data
["_admin"]["usageState"]
1505 links
["self"] = {"href": "/nst/v1/netslice_templates/{}".format(data
["_id"])}
1506 links
["nst"] = {"href": "/nst/v1/netslice_templates/{}/nst".format(data
["_id"])}
1507 data
["_links"] = links
1509 return super().sol005_projection(data
)
1512 class PduTopic(BaseTopic
):
1515 quota_name
= "pduds"
1516 schema_new
= pdu_new_schema
1517 schema_edit
= pdu_edit_schema
1519 def __init__(self
, db
, fs
, msg
, auth
):
1520 BaseTopic
.__init
__(self
, db
, fs
, msg
, auth
)
1523 def format_on_new(content
, project_id
=None, make_public
=False):
1524 BaseTopic
.format_on_new(content
, project_id
=project_id
, make_public
=make_public
)
1525 content
["_admin"]["onboardingState"] = "CREATED"
1526 content
["_admin"]["operationalState"] = "ENABLED"
1527 content
["_admin"]["usageState"] = "NOT_IN_USE"
1529 def check_conflict_on_del(self
, session
, _id
, db_content
):
1531 Check that there is not any vnfr that uses this PDU
1532 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1533 :param _id: pdu internal id
1534 :param db_content: The database content of the _id.
1535 :return: None or raises EngineException with the conflict
1537 if session
["force"]:
1540 _filter
= self
._get
_project
_filter
(session
)
1541 _filter
["vdur.pdu-id"] = _id
1542 if self
.db
.get_list("vnfrs", _filter
):
1543 raise EngineException(
1544 "There is at least one VNF instance using this PDU",
1545 http_code
=HTTPStatus
.CONFLICT
,
1549 class VnfPkgOpTopic(BaseTopic
):
1552 schema_new
= vnfpkgop_new_schema
1555 def __init__(self
, db
, fs
, msg
, auth
):
1556 BaseTopic
.__init
__(self
, db
, fs
, msg
, auth
)
1558 def edit(self
, session
, _id
, indata
=None, kwargs
=None, content
=None):
1559 raise EngineException(
1560 "Method 'edit' not allowed for topic '{}'".format(self
.topic
),
1561 HTTPStatus
.METHOD_NOT_ALLOWED
,
1564 def delete(self
, session
, _id
, dry_run
=False):
1565 raise EngineException(
1566 "Method 'delete' not allowed for topic '{}'".format(self
.topic
),
1567 HTTPStatus
.METHOD_NOT_ALLOWED
,
1570 def delete_list(self
, session
, filter_q
=None):
1571 raise EngineException(
1572 "Method 'delete_list' not allowed for topic '{}'".format(self
.topic
),
1573 HTTPStatus
.METHOD_NOT_ALLOWED
,
1576 def new(self
, rollback
, session
, indata
=None, kwargs
=None, headers
=None):
1578 Creates a new entry into database.
1579 :param rollback: list to append created items at database in case a rollback may to be done
1580 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1581 :param indata: data to be inserted
1582 :param kwargs: used to override the indata descriptor
1583 :param headers: http request headers
1584 :return: _id, op_id:
1585 _id: identity of the inserted data.
1588 self
._update
_input
_with
_kwargs
(indata
, kwargs
)
1589 validate_input(indata
, self
.schema_new
)
1590 vnfpkg_id
= indata
["vnfPkgId"]
1591 filter_q
= BaseTopic
._get
_project
_filter
(session
)
1592 filter_q
["_id"] = vnfpkg_id
1593 vnfd
= self
.db
.get_one("vnfds", filter_q
)
1594 operation
= indata
["lcmOperationType"]
1595 kdu_name
= indata
["kdu_name"]
1596 for kdu
in vnfd
.get("kdu", []):
1597 if kdu
["name"] == kdu_name
:
1598 helm_chart
= kdu
.get("helm-chart")
1599 juju_bundle
= kdu
.get("juju-bundle")
1602 raise EngineException(
1603 "Not found vnfd[id='{}']:kdu[name='{}']".format(vnfpkg_id
, kdu_name
)
1606 indata
["helm-chart"] = helm_chart
1607 match
= fullmatch(r
"([^/]*)/([^/]*)", helm_chart
)
1608 repo_name
= match
.group(1) if match
else None
1610 indata
["juju-bundle"] = juju_bundle
1611 match
= fullmatch(r
"([^/]*)/([^/]*)", juju_bundle
)
1612 repo_name
= match
.group(1) if match
else None
1614 raise EngineException(
1615 "Found neither 'helm-chart' nor 'juju-bundle' in vnfd[id='{}']:kdu[name='{}']".format(
1621 filter_q
["name"] = repo_name
1622 repo
= self
.db
.get_one("k8srepos", filter_q
)
1623 k8srepo_id
= repo
.get("_id")
1624 k8srepo_url
= repo
.get("url")
1628 indata
["k8srepoId"] = k8srepo_id
1629 indata
["k8srepo_url"] = k8srepo_url
1630 vnfpkgop_id
= str(uuid4())
1633 "operationState": "PROCESSING",
1634 "vnfPkgId": vnfpkg_id
,
1635 "lcmOperationType": operation
,
1636 "isAutomaticInvocation": False,
1637 "isCancelPending": False,
1638 "operationParams": indata
,
1640 "self": "/osm/vnfpkgm/v1/vnfpkg_op_occs/" + vnfpkgop_id
,
1641 "vnfpkg": "/osm/vnfpkgm/v1/vnf_packages/" + vnfpkg_id
,
1645 vnfpkgop_desc
, session
["project_id"], make_public
=session
["public"]
1647 ctime
= vnfpkgop_desc
["_admin"]["created"]
1648 vnfpkgop_desc
["statusEnteredTime"] = ctime
1649 vnfpkgop_desc
["startTime"] = ctime
1650 self
.db
.create(self
.topic
, vnfpkgop_desc
)
1651 rollback
.append({"topic": self
.topic
, "_id": vnfpkgop_id
})
1652 self
.msg
.write(self
.topic_msg
, operation
, vnfpkgop_desc
)
1653 return vnfpkgop_id
, None