1 # -*- coding: utf-8 -*-
7 from hashlib
import md5
8 from osm_common
.dbbase
import DbException
, deep_update_rfc7396
9 from http
import HTTPStatus
10 from validation
import ValidationError
, pdu_new_schema
, pdu_edit_schema
11 from base_topic
import BaseTopic
, EngineException
, get_iterable
13 __author__
= "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
16 class DescriptorTopic(BaseTopic
):
18 def __init__(self
, db
, fs
, msg
):
19 BaseTopic
.__init
__(self
, db
, fs
, msg
)
21 def check_conflict_on_edit(self
, session
, final_content
, edit_content
, _id
, force
=False):
22 # check that this id is not present
23 _filter
= {"id": final_content
["id"]}
25 _filter
["_id.neq"] = _id
27 _filter
.update(self
._get
_project
_filter
(session
, write
=False, show_all
=False))
28 if self
.db
.get_one(self
.topic
, _filter
, fail_on_empty
=False):
29 raise EngineException("{} with id '{}' already exists for this project".format(self
.topic
[:-1],
32 # TODO validate with pyangbind. Load and dumps to convert data types
35 def format_on_new(content
, project_id
=None, make_public
=False):
36 BaseTopic
.format_on_new(content
, project_id
=project_id
, make_public
=make_public
)
37 content
["_admin"]["onboardingState"] = "CREATED"
38 content
["_admin"]["operationalState"] = "DISABLED"
39 content
["_admin"]["usageSate"] = "NOT_IN_USE"
41 def delete(self
, session
, _id
, force
=False, dry_run
=False):
43 Delete item by its internal _id
44 :param session: contains the used login username, working project, and admin rights
45 :param _id: server internal id
46 :param force: indicates if deletion must be forced in case of conflict
47 :param dry_run: make checking but do not delete
48 :return: dictionary with deleted item _id. It raises EngineException on error: not found, conflict, ...
50 # TODO add admin to filter, validate rights
51 v
= BaseTopic
.delete(self
, session
, _id
, force
, dry_run
=True)
54 v
= self
.db
.del_one(self
.topic
, {"_id": _id
})
55 self
.fs
.file_delete(_id
, ignore_non_exist
=True)
56 self
._send
_msg
("delete", {"_id": _id
})
60 def get_one_by_id(db
, session
, topic
, id):
61 # find owned by this project
62 _filter
= BaseTopic
._get
_project
_filter
(session
, write
=False, show_all
=False)
64 desc_list
= db
.get_list(topic
, _filter
)
65 if len(desc_list
) == 1:
67 elif len(desc_list
) > 1:
68 raise DbException("Found more than one {} with id='{}' belonging to this project".format(topic
[:-1], id),
71 # not found any: try to find public
72 _filter
= BaseTopic
._get
_project
_filter
(session
, write
=False, show_all
=True)
74 desc_list
= db
.get_list(topic
, _filter
)
76 raise DbException("Not found any {} with id='{}'".format(topic
[:-1], id), HTTPStatus
.NOT_FOUND
)
77 elif len(desc_list
) == 1:
80 raise DbException("Found more than one public {} with id='{}'; and no one belonging to this project".format(
81 topic
[:-1], id), HTTPStatus
.CONFLICT
)
83 def new(self
, rollback
, session
, indata
=None, kwargs
=None, headers
=None, force
=False, make_public
=False):
85 Creates a new almost empty DISABLED entry into database. Due to SOL005, it does not follow normal procedure.
86 Creating a VNFD or NSD is done in two steps: 1. Creates an empty descriptor (this step) and 2) upload content
88 :param rollback: list to append created items at database in case a rollback may to be done
89 :param session: contains the used login username and working project
90 :param indata: data to be inserted
91 :param kwargs: used to override the indata descriptor
92 :param headers: http request headers
93 :param force: If True avoid some dependence checks
94 :param make_public: Make the created descriptor public to all projects
95 :return: _id: identity of the inserted data.
101 if "userDefinedData" in indata
:
102 indata
= indata
['userDefinedData']
104 # Override descriptor with query string kwargs
105 self
._update
_input
_with
_kwargs
(indata
, kwargs
)
106 # uncomment when this method is implemented.
107 # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors
108 # indata = DescriptorTopic._validate_input_new(self, indata, force=force)
110 content
= {"_admin": {"userDefinedData": indata
}}
111 self
.format_on_new(content
, session
["project_id"], make_public
=make_public
)
112 _id
= self
.db
.create(self
.topic
, content
)
113 rollback
.append({"topic": self
.topic
, "_id": _id
})
115 except ValidationError
as e
:
116 raise EngineException(e
, HTTPStatus
.UNPROCESSABLE_ENTITY
)
118 def upload_content(self
, session
, _id
, indata
, kwargs
, headers
, force
=False):
120 Used for receiving content by chunks (with a transaction_id header and/or gzip file. It will store and extract)
121 :param session: session
122 :param _id : the nsd,vnfd is already created, this is the id
123 :param indata: http body request
124 :param kwargs: user query string to override parameters. NOT USED
125 :param headers: http request headers
126 :param force: to be more tolerant with validation
127 :return: True package has is completely uploaded or False if partial content has been uplodaed.
128 Raise exception on error
130 # Check that _id exists and it is valid
131 current_desc
= self
.show(session
, _id
)
133 content_range_text
= headers
.get("Content-Range")
134 expected_md5
= headers
.get("Content-File-MD5")
136 content_type
= headers
.get("Content-Type")
137 if content_type
and "application/gzip" in content_type
or "application/x-gzip" in content_type
or \
138 "application/zip" in content_type
:
140 filename
= headers
.get("Content-Filename")
142 filename
= "package.tar.gz" if compressed
else "package"
143 # TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266
147 if content_range_text
:
148 content_range
= content_range_text
.replace("-", " ").replace("/", " ").split()
149 if content_range
[0] != "bytes": # TODO check x<y not negative < total....
151 start
= int(content_range
[1])
152 end
= int(content_range
[2]) + 1
153 total
= int(content_range
[3])
158 if not self
.fs
.file_exists(_id
, 'dir'):
159 raise EngineException("invalid Transaction-Id header", HTTPStatus
.NOT_FOUND
)
161 self
.fs
.file_delete(_id
, ignore_non_exist
=True)
164 storage
= self
.fs
.get_params()
165 storage
["folder"] = _id
167 file_path
= (_id
, filename
)
168 if self
.fs
.file_exists(file_path
, 'file'):
169 file_size
= self
.fs
.file_size(file_path
)
172 if file_size
!= start
:
173 raise EngineException("invalid Content-Range start sequence, expected '{}' but received '{}'".format(
174 file_size
, start
), HTTPStatus
.REQUESTED_RANGE_NOT_SATISFIABLE
)
175 file_pkg
= self
.fs
.file_open(file_path
, 'a+b')
176 if isinstance(indata
, dict):
177 indata_text
= yaml
.safe_dump(indata
, indent
=4, default_flow_style
=False)
178 file_pkg
.write(indata_text
.encode(encoding
="utf-8"))
182 indata_text
= indata
.read(4096)
183 indata_len
+= len(indata_text
)
186 file_pkg
.write(indata_text
)
187 if content_range_text
:
188 if indata_len
!= end
-start
:
189 raise EngineException("Mismatch between Content-Range header {}-{} and body length of {}".format(
190 start
, end
-1, indata_len
), HTTPStatus
.REQUESTED_RANGE_NOT_SATISFIABLE
)
192 # TODO update to UPLOADING
199 chunk_data
= file_pkg
.read(1024)
201 file_md5
.update(chunk_data
)
202 chunk_data
= file_pkg
.read(1024)
203 if expected_md5
!= file_md5
.hexdigest():
204 raise EngineException("Error, MD5 mismatch", HTTPStatus
.CONFLICT
)
206 if compressed
== "gzip":
207 tar
= tarfile
.open(mode
='r', fileobj
=file_pkg
)
208 descriptor_file_name
= None
210 tarname
= tarinfo
.name
211 tarname_path
= tarname
.split("/")
212 if not tarname_path
[0] or ".." in tarname_path
: # if start with "/" means absolute path
213 raise EngineException("Absolute path or '..' are not allowed for package descriptor tar.gz")
214 if len(tarname_path
) == 1 and not tarinfo
.isdir():
215 raise EngineException("All files must be inside a dir for package descriptor tar.gz")
216 if tarname
.endswith(".yaml") or tarname
.endswith(".json") or tarname
.endswith(".yml"):
217 storage
["pkg-dir"] = tarname_path
[0]
218 if len(tarname_path
) == 2:
219 if descriptor_file_name
:
220 raise EngineException(
221 "Found more than one descriptor file at package descriptor tar.gz")
222 descriptor_file_name
= tarname
223 if not descriptor_file_name
:
224 raise EngineException("Not found any descriptor file at package descriptor tar.gz")
225 storage
["descriptor"] = descriptor_file_name
226 storage
["zipfile"] = filename
227 self
.fs
.file_extract(tar
, _id
)
228 with self
.fs
.file_open((_id
, descriptor_file_name
), "r") as descriptor_file
:
229 content
= descriptor_file
.read()
231 content
= file_pkg
.read()
232 storage
["descriptor"] = descriptor_file_name
= filename
234 if descriptor_file_name
.endswith(".json"):
235 error_text
= "Invalid json format "
236 indata
= json
.load(content
)
238 error_text
= "Invalid yaml format "
239 indata
= yaml
.load(content
)
241 current_desc
["_admin"]["storage"] = storage
242 current_desc
["_admin"]["onboardingState"] = "ONBOARDED"
243 current_desc
["_admin"]["operationalState"] = "ENABLED"
245 indata
= self
._remove
_envelop
(indata
)
247 # Override descriptor with query string kwargs
249 self
._update
_input
_with
_kwargs
(indata
, kwargs
)
250 # it will call overrides method at VnfdTopic or NsdTopic
251 indata
= self
._validate
_input
_new
(indata
, force
=force
)
253 deep_update_rfc7396(current_desc
, indata
)
254 self
.check_conflict_on_edit(session
, current_desc
, indata
, _id
=_id
, force
=force
)
255 self
.db
.replace(self
.topic
, _id
, current_desc
)
258 self
._send
_msg
("created", indata
)
260 # TODO if descriptor has changed because kwargs update content and remove cached zip
261 # TODO if zip is not present creates one
264 except EngineException
:
267 raise EngineException("invalid Content-Range header format. Expected 'bytes start-end/total'",
268 HTTPStatus
.REQUESTED_RANGE_NOT_SATISFIABLE
)
270 raise EngineException("invalid upload transaction sequence: '{}'".format(e
), HTTPStatus
.BAD_REQUEST
)
271 except tarfile
.ReadError
as e
:
272 raise EngineException("invalid file content {}".format(e
), HTTPStatus
.BAD_REQUEST
)
273 except (ValueError, yaml
.YAMLError
) as e
:
274 raise EngineException(error_text
+ str(e
))
275 except ValidationError
as e
:
276 raise EngineException(e
, HTTPStatus
.UNPROCESSABLE_ENTITY
)
281 def get_file(self
, session
, _id
, path
=None, accept_header
=None):
283 Return the file content of a vnfd or nsd
284 :param session: contains the used login username and working project
285 :param _id: Identity of the vnfd, nsd
286 :param path: artifact path or "$DESCRIPTOR" or None
287 :param accept_header: Content of Accept header. Must contain applition/zip or/and text/plain
288 :return: opened file plus Accept format or raises an exception
290 accept_text
= accept_zip
= False
292 if 'text/plain' in accept_header
or '*/*' in accept_header
:
294 if 'application/zip' in accept_header
or '*/*' in accept_header
:
295 accept_zip
= 'application/zip'
296 elif 'application/gzip' in accept_header
:
297 accept_zip
= 'application/gzip'
299 if not accept_text
and not accept_zip
:
300 raise EngineException("provide request header 'Accept' with 'application/zip' or 'text/plain'",
301 http_code
=HTTPStatus
.NOT_ACCEPTABLE
)
303 content
= self
.show(session
, _id
)
304 if content
["_admin"]["onboardingState"] != "ONBOARDED":
305 raise EngineException("Cannot get content because this resource is not at 'ONBOARDED' state. "
306 "onboardingState is {}".format(content
["_admin"]["onboardingState"]),
307 http_code
=HTTPStatus
.CONFLICT
)
308 storage
= content
["_admin"]["storage"]
309 if path
is not None and path
!= "$DESCRIPTOR": # artifacts
310 if not storage
.get('pkg-dir'):
311 raise EngineException("Packages does not contains artifacts", http_code
=HTTPStatus
.BAD_REQUEST
)
312 if self
.fs
.file_exists((storage
['folder'], storage
['pkg-dir'], *path
), 'dir'):
313 folder_content
= self
.fs
.dir_ls((storage
['folder'], storage
['pkg-dir'], *path
))
314 return folder_content
, "text/plain"
315 # TODO manage folders in http
317 return self
.fs
.file_open((storage
['folder'], storage
['pkg-dir'], *path
), "rb"),\
318 "application/octet-stream"
320 # pkgtype accept ZIP TEXT -> result
321 # manyfiles yes X -> zip
323 # onefile yes no -> zip
326 if accept_text
and (not storage
.get('pkg-dir') or path
== "$DESCRIPTOR"):
327 return self
.fs
.file_open((storage
['folder'], storage
['descriptor']), "r"), "text/plain"
328 elif storage
.get('pkg-dir') and not accept_zip
:
329 raise EngineException("Packages that contains several files need to be retrieved with 'application/zip'"
330 "Accept header", http_code
=HTTPStatus
.NOT_ACCEPTABLE
)
332 if not storage
.get('zipfile'):
333 # TODO generate zipfile if not present
334 raise EngineException("Only allowed 'text/plain' Accept header for this descriptor. To be solved in "
335 "future versions", http_code
=HTTPStatus
.NOT_ACCEPTABLE
)
336 return self
.fs
.file_open((storage
['folder'], storage
['zipfile']), "rb"), accept_zip
339 class VnfdTopic(DescriptorTopic
):
343 def __init__(self
, db
, fs
, msg
):
344 DescriptorTopic
.__init
__(self
, db
, fs
, msg
)
347 def _remove_envelop(indata
=None):
350 clean_indata
= indata
351 if clean_indata
.get('vnfd:vnfd-catalog'):
352 clean_indata
= clean_indata
['vnfd:vnfd-catalog']
353 elif clean_indata
.get('vnfd-catalog'):
354 clean_indata
= clean_indata
['vnfd-catalog']
355 if clean_indata
.get('vnfd'):
356 if not isinstance(clean_indata
['vnfd'], list) or len(clean_indata
['vnfd']) != 1:
357 raise EngineException("'vnfd' must be a list only one element")
358 clean_indata
= clean_indata
['vnfd'][0]
361 def check_conflict_on_del(self
, session
, _id
, force
=False):
363 Check that there is not any NSD that uses this VNFD. Only NSDs belonging to this project are considered. Note
364 that VNFD can be public and be used by NSD of other projects. Also check there are not deployments, or vnfr
367 :param _id: vnfd inernal id
368 :param force: Avoid this checking
369 :return: None or raises EngineException with the conflict
373 descriptor
= self
.db
.get_one("vnfds", {"_id": _id
})
374 descriptor_id
= descriptor
.get("id")
375 if not descriptor_id
: # empty vnfd not uploaded
378 _filter
= self
._get
_project
_filter
(session
, write
=False, show_all
=False)
379 # check vnfrs using this vnfd
380 _filter
["vnfd-id"] = _id
381 if self
.db
.get_list("vnfrs", _filter
):
382 raise EngineException("There is some VNFR that depends on this VNFD", http_code
=HTTPStatus
.CONFLICT
)
383 del _filter
["vnfd-id"]
384 # check NSD using this VNFD
385 _filter
["constituent-vnfd.ANYINDEX.vnfd-id-ref"] = descriptor_id
386 if self
.db
.get_list("nsds", _filter
):
387 raise EngineException("There is soame NSD that depends on this VNFD", http_code
=HTTPStatus
.CONFLICT
)
389 def _validate_input_new(self
, indata
, force
=False):
390 # TODO validate with pyangbind, serialize
392 # Cross references validation in the descriptor
393 if not indata
.get("mgmt-interface"):
394 raise EngineException("'mgmt-interface' is a mandatory field and it is not defined",
395 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
)
396 if indata
["mgmt-interface"].get("cp"):
397 for cp
in get_iterable(indata
.get("connection-point")):
398 if cp
["name"] == indata
["mgmt-interface"]["cp"]:
401 raise EngineException("mgmt-interface:cp='{}' must match an existing connection-point"
402 .format(indata
["mgmt-interface"]["cp"]),
403 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
)
405 for vdu
in get_iterable(indata
.get("vdu")):
406 for interface
in get_iterable(vdu
.get("interface")):
407 if interface
.get("external-connection-point-ref"):
408 for cp
in get_iterable(indata
.get("connection-point")):
409 if cp
["name"] == interface
["external-connection-point-ref"]:
412 raise EngineException("vdu[id='{}']:interface[name='{}']:external-connection-point-ref='{}' "
413 "must match an existing connection-point"
414 .format(vdu
["id"], interface
["name"],
415 interface
["external-connection-point-ref"]),
416 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
)
418 elif interface
.get("internal-connection-point-ref"):
419 for internal_cp
in get_iterable(vdu
.get("internal-connection-point")):
420 if interface
["internal-connection-point-ref"] == internal_cp
.get("id"):
423 raise EngineException("vdu[id='{}']:interface[name='{}']:internal-connection-point-ref='{}' "
424 "must match an existing vdu:internal-connection-point"
425 .format(vdu
["id"], interface
["name"],
426 interface
["internal-connection-point-ref"]),
427 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
)
428 for ivld
in get_iterable(indata
.get("internal-vld")):
429 for icp
in get_iterable(ivld
.get("internal-connection-point")):
431 for vdu
in get_iterable(indata
.get("vdu")):
432 for internal_cp
in get_iterable(vdu
.get("internal-connection-point")):
433 if icp
["id-ref"] == internal_cp
["id"]:
439 raise EngineException("internal-vld[id='{}']:internal-connection-point='{}' must match an existing "
440 "vdu:internal-connection-point".format(ivld
["id"], icp
["id-ref"]),
441 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
)
442 if ivld
.get("ip-profile-ref"):
443 for ip_prof
in get_iterable(indata
.get("ip-profiles")):
444 if ip_prof
["name"] == get_iterable(ivld
.get("ip-profile-ref")):
447 raise EngineException("internal-vld[id='{}']:ip-profile-ref='{}' does not exist".format(
448 ivld
["id"], ivld
["ip-profile-ref"]),
449 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
)
450 for mp
in get_iterable(indata
.get("monitoring-param")):
451 if mp
.get("vdu-monitoring-param"):
453 for vdu
in get_iterable(indata
.get("vdu")):
454 for vmp
in get_iterable(vdu
.get("monitoring-param")):
455 if vmp
["id"] == mp
["vdu-monitoring-param"].get("vdu-monitoring-param-ref") and vdu
["id"] ==\
456 mp
["vdu-monitoring-param"]["vdu-ref"]:
462 raise EngineException("monitoring-param:vdu-monitoring-param:vdu-monitoring-param-ref='{}' not "
463 "defined at vdu[id='{}'] or vdu does not exist"
464 .format(mp
["vdu-monitoring-param"]["vdu-monitoring-param-ref"],
465 mp
["vdu-monitoring-param"]["vdu-ref"]),
466 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
)
467 elif mp
.get("vdu-metric"):
469 for vdu
in get_iterable(indata
.get("vdu")):
470 if vdu
.get("vdu-configuration"):
471 for metric
in get_iterable(vdu
["vdu-configuration"].get("metrics")):
472 if metric
["name"] == mp
["vdu-metric"]["vdu-metric-name-ref"] and vdu
["id"] == \
473 mp
["vdu-metric"]["vdu-ref"]:
479 raise EngineException("monitoring-param:vdu-metric:vdu-metric-name-ref='{}' not defined at "
480 "vdu[id='{}'] or vdu does not exist"
481 .format(mp
["vdu-metric"]["vdu-metric-name-ref"],
482 mp
["vdu-metric"]["vdu-ref"]),
483 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
)
485 for sgd
in get_iterable(indata
.get("scaling-group-descriptor")):
486 for sp
in get_iterable(sgd
.get("scaling-policy")):
487 for sc
in get_iterable(sp
.get("scaling-criteria")):
488 for mp
in get_iterable(indata
.get("monitoring-param")):
489 if mp
["id"] == get_iterable(sc
.get("vnf-monitoring-param-ref")):
492 raise EngineException("scaling-group-descriptor[name='{}']:scaling-criteria[name='{}']:"
493 "vnf-monitoring-param-ref='{}' not defined in any monitoring-param"
494 .format(sgd
["name"], sc
["name"], sc
["vnf-monitoring-param-ref"]),
495 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
)
496 for sgd_vdu
in get_iterable(sgd
.get("vdu")):
498 for vdu
in get_iterable(indata
.get("vdu")):
499 if vdu
["id"] == sgd_vdu
["vdu-id-ref"]:
505 raise EngineException("scaling-group-descriptor[name='{}']:vdu-id-ref={} does not match any vdu"
506 .format(sgd
["name"], sgd_vdu
["vdu-id-ref"]),
507 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
)
508 for sca
in get_iterable(sgd
.get("scaling-config-action")):
509 if not indata
.get("vnf-configuration"):
510 raise EngineException("'vnf-configuration' not defined in the descriptor but it is referenced by "
511 "scaling-group-descriptor[name='{}']:scaling-config-action"
512 .format(sgd
["name"]),
513 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
)
514 for primitive
in get_iterable(indata
["vnf-configuration"].get("config-primitive")):
515 if primitive
["name"] == sca
["vnf-config-primitive-name-ref"]:
518 raise EngineException("scaling-group-descriptor[name='{}']:scaling-config-action:vnf-config-"
519 "primitive-name-ref='{}' does not match any "
520 "vnf-configuration:config-primitive:name"
521 .format(sgd
["name"], sca
["vnf-config-primitive-name-ref"]),
522 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
)
525 def _validate_input_edit(self
, indata
, force
=False):
526 # TODO validate with pyangbind, serialize
530 class NsdTopic(DescriptorTopic
):
534 def __init__(self
, db
, fs
, msg
):
535 DescriptorTopic
.__init
__(self
, db
, fs
, msg
)
538 def _remove_envelop(indata
=None):
541 clean_indata
= indata
543 if clean_indata
.get('nsd:nsd-catalog'):
544 clean_indata
= clean_indata
['nsd:nsd-catalog']
545 elif clean_indata
.get('nsd-catalog'):
546 clean_indata
= clean_indata
['nsd-catalog']
547 if clean_indata
.get('nsd'):
548 if not isinstance(clean_indata
['nsd'], list) or len(clean_indata
['nsd']) != 1:
549 raise EngineException("'nsd' must be a list only one element")
550 clean_indata
= clean_indata
['nsd'][0]
553 def _validate_input_new(self
, indata
, force
=False):
554 # transform constituent-vnfd:member-vnf-index to string
555 if indata
.get("constituent-vnfd"):
556 for constituent_vnfd
in indata
["constituent-vnfd"]:
557 if "member-vnf-index" in constituent_vnfd
:
558 constituent_vnfd
["member-vnf-index"] = str(constituent_vnfd
["member-vnf-index"])
560 # TODO validate with pyangbind, serialize
563 def _validate_input_edit(self
, indata
, force
=False):
564 # TODO validate with pyangbind, serialize
567 def _check_descriptor_dependencies(self
, session
, descriptor
):
569 Check that the dependent descriptors exist on a new descriptor or edition
570 :param session: client session information
571 :param descriptor: descriptor to be inserted or edit
572 :return: None or raises exception
574 if not descriptor
.get("constituent-vnfd"):
576 for vnf
in descriptor
["constituent-vnfd"]:
577 vnfd_id
= vnf
["vnfd-id-ref"]
578 filter_q
= self
._get
_project
_filter
(session
, write
=False, show_all
=True)
579 filter_q
["id"] = vnfd_id
580 if not self
.db
.get_list("vnfds", filter_q
):
581 raise EngineException("Descriptor error at 'constituent-vnfd':'vnfd-id-ref'='{}' references a non "
582 "existing vnfd".format(vnfd_id
), http_code
=HTTPStatus
.CONFLICT
)
584 def check_conflict_on_edit(self
, session
, final_content
, edit_content
, _id
, force
=False):
585 super().check_conflict_on_edit(session
, final_content
, edit_content
, _id
, force
=force
)
587 self
._check
_descriptor
_dependencies
(session
, final_content
)
589 def check_conflict_on_del(self
, session
, _id
, force
=False):
591 Check that there is not any NSR that uses this NSD. Only NSRs belonging to this project are considered. Note
592 that NSD can be public and be used by other projects.
594 :param _id: vnfd inernal id
595 :param force: Avoid this checking
596 :return: None or raises EngineException with the conflict
600 _filter
= self
._get
_project
_filter
(session
, write
=False, show_all
=False)
601 _filter
["nsdId"] = _id
602 if self
.db
.get_list("nsrs", _filter
):
603 raise EngineException("There is some NSR that depends on this NSD", http_code
=HTTPStatus
.CONFLICT
)
606 class PduTopic(BaseTopic
):
609 schema_new
= pdu_new_schema
610 schema_edit
= pdu_edit_schema
612 def __init__(self
, db
, fs
, msg
):
613 BaseTopic
.__init
__(self
, db
, fs
, msg
)
616 def format_on_new(content
, project_id
=None, make_public
=False):
617 BaseTopic
.format_on_new(content
, project_id
=None, make_public
=make_public
)
618 content
["_admin"]["onboardingState"] = "CREATED"
619 content
["_admin"]["operationalState"] = "DISABLED"
620 content
["_admin"]["usageSate"] = "NOT_IN_USE"
622 def check_conflict_on_del(self
, session
, _id
, force
=False):
625 # TODO Is it needed to check descriptors _admin.project_read/project_write??
626 _filter
= {"vdur.pdu-id": _id
}
627 if self
.db
.get_list("vnfrs", _filter
):
628 raise EngineException("There is some NSR that uses this PDU", http_code
=HTTPStatus
.CONFLICT
)