adding pyangbind validation at VNFD/NSD edition
[osm/NBI.git] / osm_nbi / descriptor_topics.py
1 # -*- coding: utf-8 -*-
2
3 import tarfile
4 import yaml
5 import json
6 # import logging
7 from hashlib import md5
8 from osm_common.dbbase import DbException, deep_update_rfc7396
9 from http import HTTPStatus
10 from validation import ValidationError, pdu_new_schema, pdu_edit_schema
11 from base_topic import BaseTopic, EngineException, get_iterable
12 from osm_im.vnfd import vnfd as vnfd_im
13 from osm_im.nsd import nsd as nsd_im
14 from pyangbind.lib.serialise import pybindJSONDecoder
15 import pyangbind.lib.pybindJSON as pybindJSON
16
17 __author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
18
19
20 class DescriptorTopic(BaseTopic):
21
22 def __init__(self, db, fs, msg):
23 BaseTopic.__init__(self, db, fs, msg)
24
25 def check_conflict_on_edit(self, session, final_content, edit_content, _id, force=False):
26 # 1. validate again with pyangbind
27 # 1.1. remove internal keys
28 internal_keys = {}
29 for k in ("_id", "_admin"):
30 if k in final_content:
31 internal_keys[k] = final_content.pop(k)
32 serialized = self._validate_input_new(final_content, force)
33 # 1.2. modify final_content with a serialized version
34 final_content.clear()
35 final_content.update(serialized)
36 # 1.3. restore internal keys
37 for k, v in internal_keys.items():
38 final_content[k] = v
39
40 # 2. check that this id is not present
41 if "id" in edit_content:
42 _filter = self._get_project_filter(session, write=False, show_all=False)
43 _filter["id"] = final_content["id"]
44 _filter["_id.neq"] = _id
45 if self.db.get_one(self.topic, _filter, fail_on_empty=False):
46 raise EngineException("{} with id '{}' already exists for this project".format(self.topic[:-1],
47 final_content["id"]),
48 HTTPStatus.CONFLICT)
49
50 @staticmethod
51 def format_on_new(content, project_id=None, make_public=False):
52 BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
53 content["_admin"]["onboardingState"] = "CREATED"
54 content["_admin"]["operationalState"] = "DISABLED"
55 content["_admin"]["usageSate"] = "NOT_IN_USE"
56
57 def delete(self, session, _id, force=False, dry_run=False):
58 """
59 Delete item by its internal _id
60 :param session: contains the used login username, working project, and admin rights
61 :param _id: server internal id
62 :param force: indicates if deletion must be forced in case of conflict
63 :param dry_run: make checking but do not delete
64 :return: dictionary with deleted item _id. It raises EngineException on error: not found, conflict, ...
65 """
66 # TODO add admin to filter, validate rights
67 v = BaseTopic.delete(self, session, _id, force, dry_run=True)
68 if dry_run:
69 return
70 v = self.db.del_one(self.topic, {"_id": _id})
71 self.fs.file_delete(_id, ignore_non_exist=True)
72 self._send_msg("delete", {"_id": _id})
73 return v
74
75 @staticmethod
76 def get_one_by_id(db, session, topic, id):
77 # find owned by this project
78 _filter = BaseTopic._get_project_filter(session, write=False, show_all=False)
79 _filter["id"] = id
80 desc_list = db.get_list(topic, _filter)
81 if len(desc_list) == 1:
82 return desc_list[0]
83 elif len(desc_list) > 1:
84 raise DbException("Found more than one {} with id='{}' belonging to this project".format(topic[:-1], id),
85 HTTPStatus.CONFLICT)
86
87 # not found any: try to find public
88 _filter = BaseTopic._get_project_filter(session, write=False, show_all=True)
89 _filter["id"] = id
90 desc_list = db.get_list(topic, _filter)
91 if not desc_list:
92 raise DbException("Not found any {} with id='{}'".format(topic[:-1], id), HTTPStatus.NOT_FOUND)
93 elif len(desc_list) == 1:
94 return desc_list[0]
95 else:
96 raise DbException("Found more than one public {} with id='{}'; and no one belonging to this project".format(
97 topic[:-1], id), HTTPStatus.CONFLICT)
98
99 def new(self, rollback, session, indata=None, kwargs=None, headers=None, force=False, make_public=False):
100 """
101 Creates a new almost empty DISABLED entry into database. Due to SOL005, it does not follow normal procedure.
102 Creating a VNFD or NSD is done in two steps: 1. Creates an empty descriptor (this step) and 2) upload content
103 (self.upload_content)
104 :param rollback: list to append created items at database in case a rollback may to be done
105 :param session: contains the used login username and working project
106 :param indata: data to be inserted
107 :param kwargs: used to override the indata descriptor
108 :param headers: http request headers
109 :param force: If True avoid some dependence checks
110 :param make_public: Make the created descriptor public to all projects
111 :return: _id: identity of the inserted data.
112 """
113
114 try:
115 # _remove_envelop
116 if indata:
117 if "userDefinedData" in indata:
118 indata = indata['userDefinedData']
119
120 # Override descriptor with query string kwargs
121 self._update_input_with_kwargs(indata, kwargs)
122 # uncomment when this method is implemented.
123 # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors
124 # indata = DescriptorTopic._validate_input_new(self, indata, force=force)
125
126 content = {"_admin": {"userDefinedData": indata}}
127 self.format_on_new(content, session["project_id"], make_public=make_public)
128 _id = self.db.create(self.topic, content)
129 rollback.append({"topic": self.topic, "_id": _id})
130 return _id
131 except ValidationError as e:
132 raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
133
134 def upload_content(self, session, _id, indata, kwargs, headers, force=False):
135 """
136 Used for receiving content by chunks (with a transaction_id header and/or gzip file. It will store and extract)
137 :param session: session
138 :param _id : the nsd,vnfd is already created, this is the id
139 :param indata: http body request
140 :param kwargs: user query string to override parameters. NOT USED
141 :param headers: http request headers
142 :param force: to be more tolerant with validation
143 :return: True package has is completely uploaded or False if partial content has been uplodaed.
144 Raise exception on error
145 """
146 # Check that _id exists and it is valid
147 current_desc = self.show(session, _id)
148
149 content_range_text = headers.get("Content-Range")
150 expected_md5 = headers.get("Content-File-MD5")
151 compressed = None
152 content_type = headers.get("Content-Type")
153 if content_type and "application/gzip" in content_type or "application/x-gzip" in content_type or \
154 "application/zip" in content_type:
155 compressed = "gzip"
156 filename = headers.get("Content-Filename")
157 if not filename:
158 filename = "package.tar.gz" if compressed else "package"
159 # TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266
160 file_pkg = None
161 error_text = ""
162 try:
163 if content_range_text:
164 content_range = content_range_text.replace("-", " ").replace("/", " ").split()
165 if content_range[0] != "bytes": # TODO check x<y not negative < total....
166 raise IndexError()
167 start = int(content_range[1])
168 end = int(content_range[2]) + 1
169 total = int(content_range[3])
170 else:
171 start = 0
172
173 if start:
174 if not self.fs.file_exists(_id, 'dir'):
175 raise EngineException("invalid Transaction-Id header", HTTPStatus.NOT_FOUND)
176 else:
177 self.fs.file_delete(_id, ignore_non_exist=True)
178 self.fs.mkdir(_id)
179
180 storage = self.fs.get_params()
181 storage["folder"] = _id
182
183 file_path = (_id, filename)
184 if self.fs.file_exists(file_path, 'file'):
185 file_size = self.fs.file_size(file_path)
186 else:
187 file_size = 0
188 if file_size != start:
189 raise EngineException("invalid Content-Range start sequence, expected '{}' but received '{}'".format(
190 file_size, start), HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE)
191 file_pkg = self.fs.file_open(file_path, 'a+b')
192 if isinstance(indata, dict):
193 indata_text = yaml.safe_dump(indata, indent=4, default_flow_style=False)
194 file_pkg.write(indata_text.encode(encoding="utf-8"))
195 else:
196 indata_len = 0
197 while True:
198 indata_text = indata.read(4096)
199 indata_len += len(indata_text)
200 if not indata_text:
201 break
202 file_pkg.write(indata_text)
203 if content_range_text:
204 if indata_len != end-start:
205 raise EngineException("Mismatch between Content-Range header {}-{} and body length of {}".format(
206 start, end-1, indata_len), HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE)
207 if end != total:
208 # TODO update to UPLOADING
209 return False
210
211 # PACKAGE UPLOADED
212 if expected_md5:
213 file_pkg.seek(0, 0)
214 file_md5 = md5()
215 chunk_data = file_pkg.read(1024)
216 while chunk_data:
217 file_md5.update(chunk_data)
218 chunk_data = file_pkg.read(1024)
219 if expected_md5 != file_md5.hexdigest():
220 raise EngineException("Error, MD5 mismatch", HTTPStatus.CONFLICT)
221 file_pkg.seek(0, 0)
222 if compressed == "gzip":
223 tar = tarfile.open(mode='r', fileobj=file_pkg)
224 descriptor_file_name = None
225 for tarinfo in tar:
226 tarname = tarinfo.name
227 tarname_path = tarname.split("/")
228 if not tarname_path[0] or ".." in tarname_path: # if start with "/" means absolute path
229 raise EngineException("Absolute path or '..' are not allowed for package descriptor tar.gz")
230 if len(tarname_path) == 1 and not tarinfo.isdir():
231 raise EngineException("All files must be inside a dir for package descriptor tar.gz")
232 if tarname.endswith(".yaml") or tarname.endswith(".json") or tarname.endswith(".yml"):
233 storage["pkg-dir"] = tarname_path[0]
234 if len(tarname_path) == 2:
235 if descriptor_file_name:
236 raise EngineException(
237 "Found more than one descriptor file at package descriptor tar.gz")
238 descriptor_file_name = tarname
239 if not descriptor_file_name:
240 raise EngineException("Not found any descriptor file at package descriptor tar.gz")
241 storage["descriptor"] = descriptor_file_name
242 storage["zipfile"] = filename
243 self.fs.file_extract(tar, _id)
244 with self.fs.file_open((_id, descriptor_file_name), "r") as descriptor_file:
245 content = descriptor_file.read()
246 else:
247 content = file_pkg.read()
248 storage["descriptor"] = descriptor_file_name = filename
249
250 if descriptor_file_name.endswith(".json"):
251 error_text = "Invalid json format "
252 indata = json.load(content)
253 else:
254 error_text = "Invalid yaml format "
255 indata = yaml.load(content)
256
257 current_desc["_admin"]["storage"] = storage
258 current_desc["_admin"]["onboardingState"] = "ONBOARDED"
259 current_desc["_admin"]["operationalState"] = "ENABLED"
260
261 indata = self._remove_envelop(indata)
262
263 # Override descriptor with query string kwargs
264 if kwargs:
265 self._update_input_with_kwargs(indata, kwargs)
266 # it will call overrides method at VnfdTopic or NsdTopic
267 indata = self._validate_input_new(indata, force=force)
268
269 deep_update_rfc7396(current_desc, indata)
270 self.check_conflict_on_edit(session, current_desc, indata, _id=_id, force=force)
271 self.db.replace(self.topic, _id, current_desc)
272
273 indata["_id"] = _id
274 self._send_msg("created", indata)
275
276 # TODO if descriptor has changed because kwargs update content and remove cached zip
277 # TODO if zip is not present creates one
278 return True
279
280 except EngineException:
281 raise
282 except IndexError:
283 raise EngineException("invalid Content-Range header format. Expected 'bytes start-end/total'",
284 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE)
285 except IOError as e:
286 raise EngineException("invalid upload transaction sequence: '{}'".format(e), HTTPStatus.BAD_REQUEST)
287 except tarfile.ReadError as e:
288 raise EngineException("invalid file content {}".format(e), HTTPStatus.BAD_REQUEST)
289 except (ValueError, yaml.YAMLError) as e:
290 raise EngineException(error_text + str(e))
291 except ValidationError as e:
292 raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
293 finally:
294 if file_pkg:
295 file_pkg.close()
296
297 def get_file(self, session, _id, path=None, accept_header=None):
298 """
299 Return the file content of a vnfd or nsd
300 :param session: contains the used login username and working project
301 :param _id: Identity of the vnfd, nsd
302 :param path: artifact path or "$DESCRIPTOR" or None
303 :param accept_header: Content of Accept header. Must contain applition/zip or/and text/plain
304 :return: opened file plus Accept format or raises an exception
305 """
306 accept_text = accept_zip = False
307 if accept_header:
308 if 'text/plain' in accept_header or '*/*' in accept_header:
309 accept_text = True
310 if 'application/zip' in accept_header or '*/*' in accept_header:
311 accept_zip = 'application/zip'
312 elif 'application/gzip' in accept_header:
313 accept_zip = 'application/gzip'
314
315 if not accept_text and not accept_zip:
316 raise EngineException("provide request header 'Accept' with 'application/zip' or 'text/plain'",
317 http_code=HTTPStatus.NOT_ACCEPTABLE)
318
319 content = self.show(session, _id)
320 if content["_admin"]["onboardingState"] != "ONBOARDED":
321 raise EngineException("Cannot get content because this resource is not at 'ONBOARDED' state. "
322 "onboardingState is {}".format(content["_admin"]["onboardingState"]),
323 http_code=HTTPStatus.CONFLICT)
324 storage = content["_admin"]["storage"]
325 if path is not None and path != "$DESCRIPTOR": # artifacts
326 if not storage.get('pkg-dir'):
327 raise EngineException("Packages does not contains artifacts", http_code=HTTPStatus.BAD_REQUEST)
328 if self.fs.file_exists((storage['folder'], storage['pkg-dir'], *path), 'dir'):
329 folder_content = self.fs.dir_ls((storage['folder'], storage['pkg-dir'], *path))
330 return folder_content, "text/plain"
331 # TODO manage folders in http
332 else:
333 return self.fs.file_open((storage['folder'], storage['pkg-dir'], *path), "rb"),\
334 "application/octet-stream"
335
336 # pkgtype accept ZIP TEXT -> result
337 # manyfiles yes X -> zip
338 # no yes -> error
339 # onefile yes no -> zip
340 # X yes -> text
341
342 if accept_text and (not storage.get('pkg-dir') or path == "$DESCRIPTOR"):
343 return self.fs.file_open((storage['folder'], storage['descriptor']), "r"), "text/plain"
344 elif storage.get('pkg-dir') and not accept_zip:
345 raise EngineException("Packages that contains several files need to be retrieved with 'application/zip'"
346 "Accept header", http_code=HTTPStatus.NOT_ACCEPTABLE)
347 else:
348 if not storage.get('zipfile'):
349 # TODO generate zipfile if not present
350 raise EngineException("Only allowed 'text/plain' Accept header for this descriptor. To be solved in "
351 "future versions", http_code=HTTPStatus.NOT_ACCEPTABLE)
352 return self.fs.file_open((storage['folder'], storage['zipfile']), "rb"), accept_zip
353
354 def pyangbind_validation(self, item, data, force=False):
355 try:
356 if item == "vnfds":
357 myvnfd = vnfd_im()
358 pybindJSONDecoder.load_ietf_json({'vnfd:vnfd-catalog': {'vnfd': [data]}}, None, None, obj=myvnfd,
359 path_helper=True, skip_unknown=force)
360 out = pybindJSON.dumps(myvnfd, mode="ietf")
361 elif item == "nsds":
362 mynsd = nsd_im()
363 pybindJSONDecoder.load_ietf_json({'nsd:nsd-catalog': {'nsd': [data]}}, None, None, obj=mynsd,
364 path_helper=True, skip_unknown=force)
365 out = pybindJSON.dumps(mynsd, mode="ietf")
366 else:
367 raise EngineException("Not possible to validate '{}' item".format(item),
368 http_code=HTTPStatus.INTERNAL_SERVER_ERROR)
369
370 desc_out = self._remove_envelop(yaml.safe_load(out))
371 return desc_out
372
373 except Exception as e:
374 raise EngineException("Error in pyangbind validation: {}".format(str(e)),
375 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
376
377
378 class VnfdTopic(DescriptorTopic):
379 topic = "vnfds"
380 topic_msg = "vnfd"
381
382 def __init__(self, db, fs, msg):
383 DescriptorTopic.__init__(self, db, fs, msg)
384
385 @staticmethod
386 def _remove_envelop(indata=None):
387 if not indata:
388 return {}
389 clean_indata = indata
390 if clean_indata.get('vnfd:vnfd-catalog'):
391 clean_indata = clean_indata['vnfd:vnfd-catalog']
392 elif clean_indata.get('vnfd-catalog'):
393 clean_indata = clean_indata['vnfd-catalog']
394 if clean_indata.get('vnfd'):
395 if not isinstance(clean_indata['vnfd'], list) or len(clean_indata['vnfd']) != 1:
396 raise EngineException("'vnfd' must be a list of only one element")
397 clean_indata = clean_indata['vnfd'][0]
398 elif clean_indata.get('vnfd:vnfd'):
399 if not isinstance(clean_indata['vnfd:vnfd'], list) or len(clean_indata['vnfd:vnfd']) != 1:
400 raise EngineException("'vnfd:vnfd' must be a list of only one element")
401 clean_indata = clean_indata['vnfd:vnfd'][0]
402 return clean_indata
403
404 def check_conflict_on_del(self, session, _id, force=False):
405 """
406 Check that there is not any NSD that uses this VNFD. Only NSDs belonging to this project are considered. Note
407 that VNFD can be public and be used by NSD of other projects. Also check there are not deployments, or vnfr
408 that uses this vnfd
409 :param session:
410 :param _id: vnfd inernal id
411 :param force: Avoid this checking
412 :return: None or raises EngineException with the conflict
413 """
414 if force:
415 return
416 descriptor = self.db.get_one("vnfds", {"_id": _id})
417 descriptor_id = descriptor.get("id")
418 if not descriptor_id: # empty vnfd not uploaded
419 return
420
421 _filter = self._get_project_filter(session, write=False, show_all=False)
422 # check vnfrs using this vnfd
423 _filter["vnfd-id"] = _id
424 if self.db.get_list("vnfrs", _filter):
425 raise EngineException("There is some VNFR that depends on this VNFD", http_code=HTTPStatus.CONFLICT)
426 del _filter["vnfd-id"]
427 # check NSD using this VNFD
428 _filter["constituent-vnfd.ANYINDEX.vnfd-id-ref"] = descriptor_id
429 if self.db.get_list("nsds", _filter):
430 raise EngineException("There is soame NSD that depends on this VNFD", http_code=HTTPStatus.CONFLICT)
431
432 def _validate_input_new(self, indata, force=False):
433 indata = self.pyangbind_validation("vnfds", indata, force)
434 # Cross references validation in the descriptor
435 if not indata.get("mgmt-interface"):
436 raise EngineException("'mgmt-interface' is a mandatory field and it is not defined",
437 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
438 if indata["mgmt-interface"].get("cp"):
439 for cp in get_iterable(indata.get("connection-point")):
440 if cp["name"] == indata["mgmt-interface"]["cp"]:
441 break
442 else:
443 raise EngineException("mgmt-interface:cp='{}' must match an existing connection-point"
444 .format(indata["mgmt-interface"]["cp"]),
445 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
446
447 for vdu in get_iterable(indata.get("vdu")):
448 for interface in get_iterable(vdu.get("interface")):
449 if interface.get("external-connection-point-ref"):
450 for cp in get_iterable(indata.get("connection-point")):
451 if cp["name"] == interface["external-connection-point-ref"]:
452 break
453 else:
454 raise EngineException("vdu[id='{}']:interface[name='{}']:external-connection-point-ref='{}' "
455 "must match an existing connection-point"
456 .format(vdu["id"], interface["name"],
457 interface["external-connection-point-ref"]),
458 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
459
460 elif interface.get("internal-connection-point-ref"):
461 for internal_cp in get_iterable(vdu.get("internal-connection-point")):
462 if interface["internal-connection-point-ref"] == internal_cp.get("id"):
463 break
464 else:
465 raise EngineException("vdu[id='{}']:interface[name='{}']:internal-connection-point-ref='{}' "
466 "must match an existing vdu:internal-connection-point"
467 .format(vdu["id"], interface["name"],
468 interface["internal-connection-point-ref"]),
469 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
470 for ivld in get_iterable(indata.get("internal-vld")):
471 for icp in get_iterable(ivld.get("internal-connection-point")):
472 icp_mark = False
473 for vdu in get_iterable(indata.get("vdu")):
474 for internal_cp in get_iterable(vdu.get("internal-connection-point")):
475 if icp["id-ref"] == internal_cp["id"]:
476 icp_mark = True
477 break
478 if icp_mark:
479 break
480 else:
481 raise EngineException("internal-vld[id='{}']:internal-connection-point='{}' must match an existing "
482 "vdu:internal-connection-point".format(ivld["id"], icp["id-ref"]),
483 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
484 if ivld.get("ip-profile-ref"):
485 for ip_prof in get_iterable(indata.get("ip-profiles")):
486 if ip_prof["name"] == get_iterable(ivld.get("ip-profile-ref")):
487 break
488 else:
489 raise EngineException("internal-vld[id='{}']:ip-profile-ref='{}' does not exist".format(
490 ivld["id"], ivld["ip-profile-ref"]),
491 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
492 for mp in get_iterable(indata.get("monitoring-param")):
493 if mp.get("vdu-monitoring-param"):
494 mp_vmp_mark = False
495 for vdu in get_iterable(indata.get("vdu")):
496 for vmp in get_iterable(vdu.get("monitoring-param")):
497 if vmp["id"] == mp["vdu-monitoring-param"].get("vdu-monitoring-param-ref") and vdu["id"] ==\
498 mp["vdu-monitoring-param"]["vdu-ref"]:
499 mp_vmp_mark = True
500 break
501 if mp_vmp_mark:
502 break
503 else:
504 raise EngineException("monitoring-param:vdu-monitoring-param:vdu-monitoring-param-ref='{}' not "
505 "defined at vdu[id='{}'] or vdu does not exist"
506 .format(mp["vdu-monitoring-param"]["vdu-monitoring-param-ref"],
507 mp["vdu-monitoring-param"]["vdu-ref"]),
508 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
509 elif mp.get("vdu-metric"):
510 mp_vm_mark = False
511 for vdu in get_iterable(indata.get("vdu")):
512 if vdu.get("vdu-configuration"):
513 for metric in get_iterable(vdu["vdu-configuration"].get("metrics")):
514 if metric["name"] == mp["vdu-metric"]["vdu-metric-name-ref"] and vdu["id"] == \
515 mp["vdu-metric"]["vdu-ref"]:
516 mp_vm_mark = True
517 break
518 if mp_vm_mark:
519 break
520 else:
521 raise EngineException("monitoring-param:vdu-metric:vdu-metric-name-ref='{}' not defined at "
522 "vdu[id='{}'] or vdu does not exist"
523 .format(mp["vdu-metric"]["vdu-metric-name-ref"],
524 mp["vdu-metric"]["vdu-ref"]),
525 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
526
527 for sgd in get_iterable(indata.get("scaling-group-descriptor")):
528 for sp in get_iterable(sgd.get("scaling-policy")):
529 for sc in get_iterable(sp.get("scaling-criteria")):
530 for mp in get_iterable(indata.get("monitoring-param")):
531 if mp["id"] == get_iterable(sc.get("vnf-monitoring-param-ref")):
532 break
533 else:
534 raise EngineException("scaling-group-descriptor[name='{}']:scaling-criteria[name='{}']:"
535 "vnf-monitoring-param-ref='{}' not defined in any monitoring-param"
536 .format(sgd["name"], sc["name"], sc["vnf-monitoring-param-ref"]),
537 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
538 for sgd_vdu in get_iterable(sgd.get("vdu")):
539 sgd_vdu_mark = False
540 for vdu in get_iterable(indata.get("vdu")):
541 if vdu["id"] == sgd_vdu["vdu-id-ref"]:
542 sgd_vdu_mark = True
543 break
544 if sgd_vdu_mark:
545 break
546 else:
547 raise EngineException("scaling-group-descriptor[name='{}']:vdu-id-ref={} does not match any vdu"
548 .format(sgd["name"], sgd_vdu["vdu-id-ref"]),
549 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
550 for sca in get_iterable(sgd.get("scaling-config-action")):
551 if not indata.get("vnf-configuration"):
552 raise EngineException("'vnf-configuration' not defined in the descriptor but it is referenced by "
553 "scaling-group-descriptor[name='{}']:scaling-config-action"
554 .format(sgd["name"]),
555 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
556 for primitive in get_iterable(indata["vnf-configuration"].get("config-primitive")):
557 if primitive["name"] == sca["vnf-config-primitive-name-ref"]:
558 break
559 else:
560 raise EngineException("scaling-group-descriptor[name='{}']:scaling-config-action:vnf-config-"
561 "primitive-name-ref='{}' does not match any "
562 "vnf-configuration:config-primitive:name"
563 .format(sgd["name"], sca["vnf-config-primitive-name-ref"]),
564 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
565 # TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none
566 return indata
567
568 def _validate_input_edit(self, indata, force=False):
569 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
570 return indata
571
572
573 class NsdTopic(DescriptorTopic):
574 topic = "nsds"
575 topic_msg = "nsd"
576
577 def __init__(self, db, fs, msg):
578 DescriptorTopic.__init__(self, db, fs, msg)
579
580 @staticmethod
581 def _remove_envelop(indata=None):
582 if not indata:
583 return {}
584 clean_indata = indata
585
586 if clean_indata.get('nsd:nsd-catalog'):
587 clean_indata = clean_indata['nsd:nsd-catalog']
588 elif clean_indata.get('nsd-catalog'):
589 clean_indata = clean_indata['nsd-catalog']
590 if clean_indata.get('nsd'):
591 if not isinstance(clean_indata['nsd'], list) or len(clean_indata['nsd']) != 1:
592 raise EngineException("'nsd' must be a list of only one element")
593 clean_indata = clean_indata['nsd'][0]
594 elif clean_indata.get('nsd:nsd'):
595 if not isinstance(clean_indata['nsd:nsd'], list) or len(clean_indata['nsd:nsd']) != 1:
596 raise EngineException("'nsd:nsd' must be a list of only one element")
597 clean_indata = clean_indata['nsd:nsd'][0]
598 return clean_indata
599
600 def _validate_input_new(self, indata, force=False):
601 indata = self.pyangbind_validation("nsds", indata, force)
602 # TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none
603 return indata
604
605 def _validate_input_edit(self, indata, force=False):
606 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
607 return indata
608
609 def _check_descriptor_dependencies(self, session, descriptor):
610 """
611 Check that the dependent descriptors exist on a new descriptor or edition
612 :param session: client session information
613 :param descriptor: descriptor to be inserted or edit
614 :return: None or raises exception
615 """
616 if not descriptor.get("constituent-vnfd"):
617 return
618 for vnf in descriptor["constituent-vnfd"]:
619 vnfd_id = vnf["vnfd-id-ref"]
620 filter_q = self._get_project_filter(session, write=False, show_all=True)
621 filter_q["id"] = vnfd_id
622 if not self.db.get_list("vnfds", filter_q):
623 raise EngineException("Descriptor error at 'constituent-vnfd':'vnfd-id-ref'='{}' references a non "
624 "existing vnfd".format(vnfd_id), http_code=HTTPStatus.CONFLICT)
625
626 def check_conflict_on_edit(self, session, final_content, edit_content, _id, force=False):
627 super().check_conflict_on_edit(session, final_content, edit_content, _id, force=force)
628
629 if not force:
630 self._check_descriptor_dependencies(session, final_content)
631
632 def check_conflict_on_del(self, session, _id, force=False):
633 """
634 Check that there is not any NSR that uses this NSD. Only NSRs belonging to this project are considered. Note
635 that NSD can be public and be used by other projects.
636 :param session:
637 :param _id: vnfd inernal id
638 :param force: Avoid this checking
639 :return: None or raises EngineException with the conflict
640 """
641 if force:
642 return
643 _filter = self._get_project_filter(session, write=False, show_all=False)
644 _filter["nsdId"] = _id
645 if self.db.get_list("nsrs", _filter):
646 raise EngineException("There is some NSR that depends on this NSD", http_code=HTTPStatus.CONFLICT)
647
648
649 class PduTopic(BaseTopic):
650 topic = "pdus"
651 topic_msg = "pdu"
652 schema_new = pdu_new_schema
653 schema_edit = pdu_edit_schema
654
655 def __init__(self, db, fs, msg):
656 BaseTopic.__init__(self, db, fs, msg)
657
658 @staticmethod
659 def format_on_new(content, project_id=None, make_public=False):
660 BaseTopic.format_on_new(content, project_id=None, make_public=make_public)
661 content["_admin"]["onboardingState"] = "CREATED"
662 content["_admin"]["operationalState"] = "DISABLED"
663 content["_admin"]["usageSate"] = "NOT_IN_USE"
664
665 def check_conflict_on_del(self, session, _id, force=False):
666 if force:
667 return
668 # TODO Is it needed to check descriptors _admin.project_read/project_write??
669 _filter = {"vdur.pdu-id": _id}
670 if self.db.get_list("vnfrs", _filter):
671 raise EngineException("There is some NSR that uses this PDU", http_code=HTTPStatus.CONFLICT)