Validate cross references in descriptors
[osm/NBI.git] / osm_nbi / descriptor_topics.py
1 # -*- coding: utf-8 -*-
2
3 import tarfile
4 import yaml
5 import json
6 # import logging
7 from hashlib import md5
8 from osm_common.dbbase import DbException, deep_update_rfc7396
9 from http import HTTPStatus
10 from validation import ValidationError, pdu_new_schema, pdu_edit_schema
11 from base_topic import BaseTopic, EngineException, get_iterable
12
13 __author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
14
15
16 class DescriptorTopic(BaseTopic):
17
18 def __init__(self, db, fs, msg):
19 BaseTopic.__init__(self, db, fs, msg)
20
21 def check_conflict_on_edit(self, session, final_content, edit_content, _id, force=False):
22 # check that this id is not present
23 _filter = {"id": final_content["id"]}
24 if _id:
25 _filter["_id.neq"] = _id
26
27 _filter.update(self._get_project_filter(session, write=False, show_all=False))
28 if self.db.get_one(self.topic, _filter, fail_on_empty=False):
29 raise EngineException("{} with id '{}' already exists for this project".format(self.topic[:-1],
30 final_content["id"]),
31 HTTPStatus.CONFLICT)
32 # TODO validate with pyangbind. Load and dumps to convert data types
33
34 @staticmethod
35 def format_on_new(content, project_id=None, make_public=False):
36 BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
37 content["_admin"]["onboardingState"] = "CREATED"
38 content["_admin"]["operationalState"] = "DISABLED"
39 content["_admin"]["usageSate"] = "NOT_IN_USE"
40
41 def delete(self, session, _id, force=False, dry_run=False):
42 """
43 Delete item by its internal _id
44 :param session: contains the used login username, working project, and admin rights
45 :param _id: server internal id
46 :param force: indicates if deletion must be forced in case of conflict
47 :param dry_run: make checking but do not delete
48 :return: dictionary with deleted item _id. It raises EngineException on error: not found, conflict, ...
49 """
50 # TODO add admin to filter, validate rights
51 v = BaseTopic.delete(self, session, _id, force, dry_run=True)
52 if dry_run:
53 return
54 v = self.db.del_one(self.topic, {"_id": _id})
55 self.fs.file_delete(_id, ignore_non_exist=True)
56 self._send_msg("delete", {"_id": _id})
57 return v
58
59 @staticmethod
60 def get_one_by_id(db, session, topic, id):
61 # find owned by this project
62 _filter = BaseTopic._get_project_filter(session, write=False, show_all=False)
63 _filter["id"] = id
64 desc_list = db.get_list(topic, _filter)
65 if len(desc_list) == 1:
66 return desc_list[0]
67 elif len(desc_list) > 1:
68 raise DbException("Found more than one {} with id='{}' belonging to this project".format(topic[:-1], id),
69 HTTPStatus.CONFLICT)
70
71 # not found any: try to find public
72 _filter = BaseTopic._get_project_filter(session, write=False, show_all=True)
73 _filter["id"] = id
74 desc_list = db.get_list(topic, _filter)
75 if not desc_list:
76 raise DbException("Not found any {} with id='{}'".format(topic[:-1], id), HTTPStatus.NOT_FOUND)
77 elif len(desc_list) == 1:
78 return desc_list[0]
79 else:
80 raise DbException("Found more than one public {} with id='{}'; and no one belonging to this project".format(
81 topic[:-1], id), HTTPStatus.CONFLICT)
82
83 def new(self, rollback, session, indata=None, kwargs=None, headers=None, force=False, make_public=False):
84 """
85 Creates a new almost empty DISABLED entry into database. Due to SOL005, it does not follow normal procedure.
86 Creating a VNFD or NSD is done in two steps: 1. Creates an empty descriptor (this step) and 2) upload content
87 (self.upload_content)
88 :param rollback: list to append created items at database in case a rollback may to be done
89 :param session: contains the used login username and working project
90 :param indata: data to be inserted
91 :param kwargs: used to override the indata descriptor
92 :param headers: http request headers
93 :param force: If True avoid some dependence checks
94 :param make_public: Make the created descriptor public to all projects
95 :return: _id: identity of the inserted data.
96 """
97
98 try:
99 # _remove_envelop
100 if indata:
101 if "userDefinedData" in indata:
102 indata = indata['userDefinedData']
103
104 # Override descriptor with query string kwargs
105 self._update_input_with_kwargs(indata, kwargs)
106 # uncomment when this method is implemented.
107 # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors
108 # indata = DescriptorTopic._validate_input_new(self, indata, force=force)
109
110 content = {"_admin": {"userDefinedData": indata}}
111 self.format_on_new(content, session["project_id"], make_public=make_public)
112 _id = self.db.create(self.topic, content)
113 rollback.append({"topic": self.topic, "_id": _id})
114 return _id
115 except ValidationError as e:
116 raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
117
118 def upload_content(self, session, _id, indata, kwargs, headers, force=False):
119 """
120 Used for receiving content by chunks (with a transaction_id header and/or gzip file. It will store and extract)
121 :param session: session
122 :param _id : the nsd,vnfd is already created, this is the id
123 :param indata: http body request
124 :param kwargs: user query string to override parameters. NOT USED
125 :param headers: http request headers
126 :param force: to be more tolerant with validation
127 :return: True package has is completely uploaded or False if partial content has been uplodaed.
128 Raise exception on error
129 """
130 # Check that _id exists and it is valid
131 current_desc = self.show(session, _id)
132
133 content_range_text = headers.get("Content-Range")
134 expected_md5 = headers.get("Content-File-MD5")
135 compressed = None
136 content_type = headers.get("Content-Type")
137 if content_type and "application/gzip" in content_type or "application/x-gzip" in content_type or \
138 "application/zip" in content_type:
139 compressed = "gzip"
140 filename = headers.get("Content-Filename")
141 if not filename:
142 filename = "package.tar.gz" if compressed else "package"
143 # TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266
144 file_pkg = None
145 error_text = ""
146 try:
147 if content_range_text:
148 content_range = content_range_text.replace("-", " ").replace("/", " ").split()
149 if content_range[0] != "bytes": # TODO check x<y not negative < total....
150 raise IndexError()
151 start = int(content_range[1])
152 end = int(content_range[2]) + 1
153 total = int(content_range[3])
154 else:
155 start = 0
156
157 if start:
158 if not self.fs.file_exists(_id, 'dir'):
159 raise EngineException("invalid Transaction-Id header", HTTPStatus.NOT_FOUND)
160 else:
161 self.fs.file_delete(_id, ignore_non_exist=True)
162 self.fs.mkdir(_id)
163
164 storage = self.fs.get_params()
165 storage["folder"] = _id
166
167 file_path = (_id, filename)
168 if self.fs.file_exists(file_path, 'file'):
169 file_size = self.fs.file_size(file_path)
170 else:
171 file_size = 0
172 if file_size != start:
173 raise EngineException("invalid Content-Range start sequence, expected '{}' but received '{}'".format(
174 file_size, start), HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE)
175 file_pkg = self.fs.file_open(file_path, 'a+b')
176 if isinstance(indata, dict):
177 indata_text = yaml.safe_dump(indata, indent=4, default_flow_style=False)
178 file_pkg.write(indata_text.encode(encoding="utf-8"))
179 else:
180 indata_len = 0
181 while True:
182 indata_text = indata.read(4096)
183 indata_len += len(indata_text)
184 if not indata_text:
185 break
186 file_pkg.write(indata_text)
187 if content_range_text:
188 if indata_len != end-start:
189 raise EngineException("Mismatch between Content-Range header {}-{} and body length of {}".format(
190 start, end-1, indata_len), HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE)
191 if end != total:
192 # TODO update to UPLOADING
193 return False
194
195 # PACKAGE UPLOADED
196 if expected_md5:
197 file_pkg.seek(0, 0)
198 file_md5 = md5()
199 chunk_data = file_pkg.read(1024)
200 while chunk_data:
201 file_md5.update(chunk_data)
202 chunk_data = file_pkg.read(1024)
203 if expected_md5 != file_md5.hexdigest():
204 raise EngineException("Error, MD5 mismatch", HTTPStatus.CONFLICT)
205 file_pkg.seek(0, 0)
206 if compressed == "gzip":
207 tar = tarfile.open(mode='r', fileobj=file_pkg)
208 descriptor_file_name = None
209 for tarinfo in tar:
210 tarname = tarinfo.name
211 tarname_path = tarname.split("/")
212 if not tarname_path[0] or ".." in tarname_path: # if start with "/" means absolute path
213 raise EngineException("Absolute path or '..' are not allowed for package descriptor tar.gz")
214 if len(tarname_path) == 1 and not tarinfo.isdir():
215 raise EngineException("All files must be inside a dir for package descriptor tar.gz")
216 if tarname.endswith(".yaml") or tarname.endswith(".json") or tarname.endswith(".yml"):
217 storage["pkg-dir"] = tarname_path[0]
218 if len(tarname_path) == 2:
219 if descriptor_file_name:
220 raise EngineException(
221 "Found more than one descriptor file at package descriptor tar.gz")
222 descriptor_file_name = tarname
223 if not descriptor_file_name:
224 raise EngineException("Not found any descriptor file at package descriptor tar.gz")
225 storage["descriptor"] = descriptor_file_name
226 storage["zipfile"] = filename
227 self.fs.file_extract(tar, _id)
228 with self.fs.file_open((_id, descriptor_file_name), "r") as descriptor_file:
229 content = descriptor_file.read()
230 else:
231 content = file_pkg.read()
232 storage["descriptor"] = descriptor_file_name = filename
233
234 if descriptor_file_name.endswith(".json"):
235 error_text = "Invalid json format "
236 indata = json.load(content)
237 else:
238 error_text = "Invalid yaml format "
239 indata = yaml.load(content)
240
241 current_desc["_admin"]["storage"] = storage
242 current_desc["_admin"]["onboardingState"] = "ONBOARDED"
243 current_desc["_admin"]["operationalState"] = "ENABLED"
244
245 indata = self._remove_envelop(indata)
246
247 # Override descriptor with query string kwargs
248 if kwargs:
249 self._update_input_with_kwargs(indata, kwargs)
250 # it will call overrides method at VnfdTopic or NsdTopic
251 indata = self._validate_input_new(indata, force=force)
252
253 deep_update_rfc7396(current_desc, indata)
254 self.check_conflict_on_edit(session, current_desc, indata, _id=_id, force=force)
255 self.db.replace(self.topic, _id, current_desc)
256
257 indata["_id"] = _id
258 self._send_msg("created", indata)
259
260 # TODO if descriptor has changed because kwargs update content and remove cached zip
261 # TODO if zip is not present creates one
262 return True
263
264 except EngineException:
265 raise
266 except IndexError:
267 raise EngineException("invalid Content-Range header format. Expected 'bytes start-end/total'",
268 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE)
269 except IOError as e:
270 raise EngineException("invalid upload transaction sequence: '{}'".format(e), HTTPStatus.BAD_REQUEST)
271 except tarfile.ReadError as e:
272 raise EngineException("invalid file content {}".format(e), HTTPStatus.BAD_REQUEST)
273 except (ValueError, yaml.YAMLError) as e:
274 raise EngineException(error_text + str(e))
275 except ValidationError as e:
276 raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
277 finally:
278 if file_pkg:
279 file_pkg.close()
280
281 def get_file(self, session, _id, path=None, accept_header=None):
282 """
283 Return the file content of a vnfd or nsd
284 :param session: contains the used login username and working project
285 :param _id: Identity of the vnfd, nsd
286 :param path: artifact path or "$DESCRIPTOR" or None
287 :param accept_header: Content of Accept header. Must contain applition/zip or/and text/plain
288 :return: opened file plus Accept format or raises an exception
289 """
290 accept_text = accept_zip = False
291 if accept_header:
292 if 'text/plain' in accept_header or '*/*' in accept_header:
293 accept_text = True
294 if 'application/zip' in accept_header or '*/*' in accept_header:
295 accept_zip = 'application/zip'
296 elif 'application/gzip' in accept_header:
297 accept_zip = 'application/gzip'
298
299 if not accept_text and not accept_zip:
300 raise EngineException("provide request header 'Accept' with 'application/zip' or 'text/plain'",
301 http_code=HTTPStatus.NOT_ACCEPTABLE)
302
303 content = self.show(session, _id)
304 if content["_admin"]["onboardingState"] != "ONBOARDED":
305 raise EngineException("Cannot get content because this resource is not at 'ONBOARDED' state. "
306 "onboardingState is {}".format(content["_admin"]["onboardingState"]),
307 http_code=HTTPStatus.CONFLICT)
308 storage = content["_admin"]["storage"]
309 if path is not None and path != "$DESCRIPTOR": # artifacts
310 if not storage.get('pkg-dir'):
311 raise EngineException("Packages does not contains artifacts", http_code=HTTPStatus.BAD_REQUEST)
312 if self.fs.file_exists((storage['folder'], storage['pkg-dir'], *path), 'dir'):
313 folder_content = self.fs.dir_ls((storage['folder'], storage['pkg-dir'], *path))
314 return folder_content, "text/plain"
315 # TODO manage folders in http
316 else:
317 return self.fs.file_open((storage['folder'], storage['pkg-dir'], *path), "rb"),\
318 "application/octet-stream"
319
320 # pkgtype accept ZIP TEXT -> result
321 # manyfiles yes X -> zip
322 # no yes -> error
323 # onefile yes no -> zip
324 # X yes -> text
325
326 if accept_text and (not storage.get('pkg-dir') or path == "$DESCRIPTOR"):
327 return self.fs.file_open((storage['folder'], storage['descriptor']), "r"), "text/plain"
328 elif storage.get('pkg-dir') and not accept_zip:
329 raise EngineException("Packages that contains several files need to be retrieved with 'application/zip'"
330 "Accept header", http_code=HTTPStatus.NOT_ACCEPTABLE)
331 else:
332 if not storage.get('zipfile'):
333 # TODO generate zipfile if not present
334 raise EngineException("Only allowed 'text/plain' Accept header for this descriptor. To be solved in "
335 "future versions", http_code=HTTPStatus.NOT_ACCEPTABLE)
336 return self.fs.file_open((storage['folder'], storage['zipfile']), "rb"), accept_zip
337
338
339 class VnfdTopic(DescriptorTopic):
340 topic = "vnfds"
341 topic_msg = "vnfd"
342
343 def __init__(self, db, fs, msg):
344 DescriptorTopic.__init__(self, db, fs, msg)
345
346 @staticmethod
347 def _remove_envelop(indata=None):
348 if not indata:
349 return {}
350 clean_indata = indata
351 if clean_indata.get('vnfd:vnfd-catalog'):
352 clean_indata = clean_indata['vnfd:vnfd-catalog']
353 elif clean_indata.get('vnfd-catalog'):
354 clean_indata = clean_indata['vnfd-catalog']
355 if clean_indata.get('vnfd'):
356 if not isinstance(clean_indata['vnfd'], list) or len(clean_indata['vnfd']) != 1:
357 raise EngineException("'vnfd' must be a list only one element")
358 clean_indata = clean_indata['vnfd'][0]
359 return clean_indata
360
361 def check_conflict_on_del(self, session, _id, force=False):
362 """
363 Check that there is not any NSD that uses this VNFD. Only NSDs belonging to this project are considered. Note
364 that VNFD can be public and be used by NSD of other projects. Also check there are not deployments, or vnfr
365 that uses this vnfd
366 :param session:
367 :param _id: vnfd inernal id
368 :param force: Avoid this checking
369 :return: None or raises EngineException with the conflict
370 """
371 if force:
372 return
373 descriptor = self.db.get_one("vnfds", {"_id": _id})
374 descriptor_id = descriptor.get("id")
375 if not descriptor_id: # empty vnfd not uploaded
376 return
377
378 _filter = self._get_project_filter(session, write=False, show_all=False)
379 # check vnfrs using this vnfd
380 _filter["vnfd-id"] = _id
381 if self.db.get_list("vnfrs", _filter):
382 raise EngineException("There is some VNFR that depends on this VNFD", http_code=HTTPStatus.CONFLICT)
383 del _filter["vnfd-id"]
384 # check NSD using this VNFD
385 _filter["constituent-vnfd.ANYINDEX.vnfd-id-ref"] = descriptor_id
386 if self.db.get_list("nsds", _filter):
387 raise EngineException("There is soame NSD that depends on this VNFD", http_code=HTTPStatus.CONFLICT)
388
389 def _validate_input_new(self, indata, force=False):
390 # TODO validate with pyangbind, serialize
391
392 # Cross references validation in the descriptor
393 if indata.get("mgmt-interface"):
394 if indata["mgmt-interface"].get("cp"):
395 for cp in get_iterable(indata.get("connection-point")):
396 if cp["name"] == indata["mgmt-interface"]["cp"]:
397 break
398 else:
399 raise EngineException("mgmt-interface:cp='{} must match an existing connection-point"
400 .format(indata["mgmt-interface"].get("cp")),
401 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
402 else:
403 raise EngineException("'mgmt-interface' is a mandatory field and it is not defined",
404 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
405
406 for vdu in get_iterable(indata.get("vdu")):
407 for interface in get_iterable(vdu.get("interface")):
408 if interface.get("external-connection-point-ref"):
409 for cp in get_iterable(indata.get("connection-point")):
410 if cp["name"] == interface.get("external-connection-point-ref"):
411 break
412 else:
413 raise EngineException("vdu[id='{}']:interface[name='{}']:external-connection-point-ref='{} "
414 "must match an existing connection-point"
415 .format(vdu["id"], interface["name"],
416 interface["external-connection-point-ref"]),
417 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
418 for interface in get_iterable(vdu.get("interface")):
419 if interface.get("internal-connection-point-ref"):
420 for internal_cp in get_iterable(vdu.get("internal-connection-point")):
421 if interface.get("internal-connection-point-ref") == internal_cp.get("id"):
422 break
423 else:
424 raise EngineException("vdu[id='{}']:interface[name='{}']:internal-connection-point-ref='{}' is "
425 "not associated to an internal-connection-point".format
426 (vdu["id"], interface["name"],
427 interface["internal-connection-point-ref"]),
428 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
429 for ivld in get_iterable(indata.get("internal-vld")):
430 for icp in get_iterable(ivld.get("internal-connection-point")):
431 icp_mark = False
432 for vdu in get_iterable(indata.get("vdu")):
433 for internal_cp in get_iterable(vdu.get("internal-connection-point")):
434 if icp["id-ref"] == internal_cp["id"]:
435 icp_mark = True
436 break
437 if icp_mark:
438 break
439 else:
440 raise EngineException("'internal-vld='{}':internal-connection-point='{}'' must match an existing "
441 "internal-connection-point".format(ivld["id"], icp["id-ref"]),
442 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
443 if ivld.get("ip-profile-ref"):
444 for ip_prof in get_iterable(indata["ip-profiles"]):
445 if ip_prof["name"] == get_iterable(ivld.get("ip-profile-ref")):
446 break
447 else:
448 raise EngineException("internal-vld='{}':ip-profile-ref='{}' does not exist".format(
449 ivld["id"], ivld["ip-profile-ref"]),
450 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
451 for mp in get_iterable(indata.get("monitoring-param")):
452 if mp.get("vdu-monitoring-param"):
453 mp_vmp_mark = False
454 for vdu in get_iterable(indata.get("vdu")):
455 for vmp in get_iterable(vdu.get("monitoring-param")):
456 if vmp["id"] == mp["vdu-monitoring-param"]["vdu-monitoring-param-ref"] and vdu["id"] ==\
457 mp["vdu-monitoring-param"]["vdu-ref"]:
458 mp_vmp_mark = True
459 break
460 if mp_vmp_mark:
461 break
462 else:
463 raise EngineException("monitoring-param:vdu-monitoring-param:vdu-monitoring-param-ref='{}' not "
464 "defined in VDU vdu[id='{}'] or does not exist an VDU with this id"
465 .format(mp["vdu-monitoring-param"]["vdu-monitoring-param-ref"],
466 mp["vdu-monitoring-param"]["vdu-ref"]),
467 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
468 elif mp.get("vdu-metric"):
469 mp_vm_mark = False
470 for vdu in get_iterable(indata.get("vdu")):
471 if vdu.get("vdu-configuration"):
472 for metric in get_iterable(vdu["vdu-configuration"].get("metrics")):
473 if metric["name"] == mp["vdu-metric"]["vdu-metric-name-ref"] and vdu["id"] == \
474 mp["vdu-metric"]["vdu-ref"]:
475 mp_vm_mark = True
476 break
477 if mp_vm_mark:
478 break
479 else:
480 raise EngineException("monitoring-param:vdu-metric:vdu-metric-name-ref='{}' not defined in VDU "
481 "vdu[id='{}'] or or does not exist an VDU with this id"
482 .format(mp["vdu-metric"]["vdu-metric-name-ref"],
483 mp["vdu-metric"]["vdu-ref"]),
484 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
485
486 for sgd in get_iterable(indata.get("scaling-group-descriptor")):
487 for sp in get_iterable(sgd.get("scaling-policy")):
488 for sc in get_iterable(sp.get("scaling-criteria")):
489 for mp in get_iterable(indata.get("monitoring-param")):
490 if mp["id"] == get_iterable(sc.get("vnf-monitoring-param-ref")):
491 break
492 else:
493 raise EngineException("scaling-group-descriptor:name='{}':scaling-criteria:name='{}':"
494 "vnf-monitoring-param-ref='{}' not defined in any 'monitoring-param"
495 "['id']'".format(sgd["name"], sc["name"], sc["vnf-monitoring-param-ref"]),
496 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
497 for sgd_vdu in get_iterable(sgd.get("vdu")):
498 sgd_vdu_mark = False
499 for vdu in get_iterable(indata.get("vdu")):
500 if vdu["id"] == sgd_vdu["vdu-id-ref"]:
501 sgd_vdu_mark = True
502 break
503 if sgd_vdu_mark:
504 break
505 else:
506 raise EngineException("scaling-group-descriptor[name='{}']:vdu not defined and"
507 "it is mandatory in any 'scaling-group-descriptor'"
508 .format(sgd["name"]),
509 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
510 for sca in get_iterable(sgd.get("scaling-config-action")):
511 sca_vcp_mark = False
512 if indata.get("vnf-configuration"):
513 for primitive in get_iterable(indata["vnf-configuration"].get("config-primitive")):
514 if primitive["name"] == sca["vnf-config-primitive-name-ref"]:
515 sca_vcp_mark = True
516 break
517 else:
518 raise EngineException("scaling-group-descriptor:name='{}':scaling-config-action:vnf-config-"
519 "primitive-name-ref='{}' not defined in "
520 "'vnf-configuration:config-primitive'"
521 .format(sgd["name"], sca["vnf-config-primitive-name-ref"]),
522 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
523 else:
524 raise EngineException("'vnf-configuration' not defined in the descriptor but it is referenced",
525 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
526 if sca_vcp_mark:
527 break
528 return indata
529
530 def _validate_input_edit(self, indata, force=False):
531 # TODO validate with pyangbind, serialize
532 return indata
533
534
535 class NsdTopic(DescriptorTopic):
536 topic = "nsds"
537 topic_msg = "nsd"
538
539 def __init__(self, db, fs, msg):
540 DescriptorTopic.__init__(self, db, fs, msg)
541
542 @staticmethod
543 def _remove_envelop(indata=None):
544 if not indata:
545 return {}
546 clean_indata = indata
547
548 if clean_indata.get('nsd:nsd-catalog'):
549 clean_indata = clean_indata['nsd:nsd-catalog']
550 elif clean_indata.get('nsd-catalog'):
551 clean_indata = clean_indata['nsd-catalog']
552 if clean_indata.get('nsd'):
553 if not isinstance(clean_indata['nsd'], list) or len(clean_indata['nsd']) != 1:
554 raise EngineException("'nsd' must be a list only one element")
555 clean_indata = clean_indata['nsd'][0]
556 return clean_indata
557
558 def _validate_input_new(self, indata, force=False):
559 # transform constituent-vnfd:member-vnf-index to string
560 if indata.get("constituent-vnfd"):
561 for constituent_vnfd in indata["constituent-vnfd"]:
562 if "member-vnf-index" in constituent_vnfd:
563 constituent_vnfd["member-vnf-index"] = str(constituent_vnfd["member-vnf-index"])
564
565 # TODO validate with pyangbind, serialize
566 return indata
567
568 def _validate_input_edit(self, indata, force=False):
569 # TODO validate with pyangbind, serialize
570 return indata
571
572 def _check_descriptor_dependencies(self, session, descriptor):
573 """
574 Check that the dependent descriptors exist on a new descriptor or edition
575 :param session: client session information
576 :param descriptor: descriptor to be inserted or edit
577 :return: None or raises exception
578 """
579 if not descriptor.get("constituent-vnfd"):
580 return
581 for vnf in descriptor["constituent-vnfd"]:
582 vnfd_id = vnf["vnfd-id-ref"]
583 filter_q = self._get_project_filter(session, write=False, show_all=True)
584 filter_q["id"] = vnfd_id
585 if not self.db.get_list("vnfds", filter_q):
586 raise EngineException("Descriptor error at 'constituent-vnfd':'vnfd-id-ref'='{}' references a non "
587 "existing vnfd".format(vnfd_id), http_code=HTTPStatus.CONFLICT)
588
589 def check_conflict_on_edit(self, session, final_content, edit_content, _id, force=False):
590 super().check_conflict_on_edit(session, final_content, edit_content, _id, force=force)
591
592 self._check_descriptor_dependencies(session, final_content)
593
594 def check_conflict_on_del(self, session, _id, force=False):
595 """
596 Check that there is not any NSR that uses this NSD. Only NSRs belonging to this project are considered. Note
597 that NSD can be public and be used by other projects.
598 :param session:
599 :param _id: vnfd inernal id
600 :param force: Avoid this checking
601 :return: None or raises EngineException with the conflict
602 """
603 if force:
604 return
605 _filter = self._get_project_filter(session, write=False, show_all=False)
606 _filter["nsdId"] = _id
607 if self.db.get_list("nsrs", _filter):
608 raise EngineException("There is some NSR that depends on this NSD", http_code=HTTPStatus.CONFLICT)
609
610
611 class PduTopic(BaseTopic):
612 topic = "pdus"
613 topic_msg = "pdu"
614 schema_new = pdu_new_schema
615 schema_edit = pdu_edit_schema
616
617 def __init__(self, db, fs, msg):
618 BaseTopic.__init__(self, db, fs, msg)
619
620 @staticmethod
621 def format_on_new(content, project_id=None, make_public=False):
622 BaseTopic.format_on_new(content, project_id=None, make_public=make_public)
623 content["_admin"]["onboardingState"] = "CREATED"
624 content["_admin"]["operationalState"] = "DISABLED"
625 content["_admin"]["usageSate"] = "NOT_IN_USE"
626
627 def check_conflict_on_del(self, session, _id, force=False):
628 if force:
629 return
630 # TODO Is it needed to check descriptors _admin.project_read/project_write??
631 _filter = {"vdur.pdu-id": _id}
632 if self.db.get_list("vnfrs", _filter):
633 raise EngineException("There is some NSR that uses this PDU", http_code=HTTPStatus.CONFLICT)