fix bug 555 Change extended pci validation schema for sdn port mapping
[osm/NBI.git] / osm_nbi / descriptor_topics.py
1 # -*- coding: utf-8 -*-
2
3 import tarfile
4 import yaml
5 import json
6 # import logging
7 from hashlib import md5
8 from osm_common.dbbase import DbException, deep_update_rfc7396
9 from http import HTTPStatus
10 from validation import ValidationError, pdu_new_schema, pdu_edit_schema
11 from base_topic import BaseTopic, EngineException, get_iterable
12 from osm_im.vnfd import vnfd as vnfd_im
13 from osm_im.nsd import nsd as nsd_im
14 from pyangbind.lib.serialise import pybindJSONDecoder
15 import pyangbind.lib.pybindJSON as pybindJSON
16
17 __author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
18
19
20 class DescriptorTopic(BaseTopic):
21
22 def __init__(self, db, fs, msg):
23 BaseTopic.__init__(self, db, fs, msg)
24
25 def check_conflict_on_edit(self, session, final_content, edit_content, _id, force=False):
26 # check that this id is not present
27 _filter = {"id": final_content["id"]}
28 if _id:
29 _filter["_id.neq"] = _id
30
31 _filter.update(self._get_project_filter(session, write=False, show_all=False))
32 if self.db.get_one(self.topic, _filter, fail_on_empty=False):
33 raise EngineException("{} with id '{}' already exists for this project".format(self.topic[:-1],
34 final_content["id"]),
35 HTTPStatus.CONFLICT)
36 # TODO validate with pyangbind. Load and dumps to convert data types
37
38 @staticmethod
39 def format_on_new(content, project_id=None, make_public=False):
40 BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
41 content["_admin"]["onboardingState"] = "CREATED"
42 content["_admin"]["operationalState"] = "DISABLED"
43 content["_admin"]["usageSate"] = "NOT_IN_USE"
44
45 def delete(self, session, _id, force=False, dry_run=False):
46 """
47 Delete item by its internal _id
48 :param session: contains the used login username, working project, and admin rights
49 :param _id: server internal id
50 :param force: indicates if deletion must be forced in case of conflict
51 :param dry_run: make checking but do not delete
52 :return: dictionary with deleted item _id. It raises EngineException on error: not found, conflict, ...
53 """
54 # TODO add admin to filter, validate rights
55 v = BaseTopic.delete(self, session, _id, force, dry_run=True)
56 if dry_run:
57 return
58 v = self.db.del_one(self.topic, {"_id": _id})
59 self.fs.file_delete(_id, ignore_non_exist=True)
60 self._send_msg("delete", {"_id": _id})
61 return v
62
63 @staticmethod
64 def get_one_by_id(db, session, topic, id):
65 # find owned by this project
66 _filter = BaseTopic._get_project_filter(session, write=False, show_all=False)
67 _filter["id"] = id
68 desc_list = db.get_list(topic, _filter)
69 if len(desc_list) == 1:
70 return desc_list[0]
71 elif len(desc_list) > 1:
72 raise DbException("Found more than one {} with id='{}' belonging to this project".format(topic[:-1], id),
73 HTTPStatus.CONFLICT)
74
75 # not found any: try to find public
76 _filter = BaseTopic._get_project_filter(session, write=False, show_all=True)
77 _filter["id"] = id
78 desc_list = db.get_list(topic, _filter)
79 if not desc_list:
80 raise DbException("Not found any {} with id='{}'".format(topic[:-1], id), HTTPStatus.NOT_FOUND)
81 elif len(desc_list) == 1:
82 return desc_list[0]
83 else:
84 raise DbException("Found more than one public {} with id='{}'; and no one belonging to this project".format(
85 topic[:-1], id), HTTPStatus.CONFLICT)
86
87 def new(self, rollback, session, indata=None, kwargs=None, headers=None, force=False, make_public=False):
88 """
89 Creates a new almost empty DISABLED entry into database. Due to SOL005, it does not follow normal procedure.
90 Creating a VNFD or NSD is done in two steps: 1. Creates an empty descriptor (this step) and 2) upload content
91 (self.upload_content)
92 :param rollback: list to append created items at database in case a rollback may to be done
93 :param session: contains the used login username and working project
94 :param indata: data to be inserted
95 :param kwargs: used to override the indata descriptor
96 :param headers: http request headers
97 :param force: If True avoid some dependence checks
98 :param make_public: Make the created descriptor public to all projects
99 :return: _id: identity of the inserted data.
100 """
101
102 try:
103 # _remove_envelop
104 if indata:
105 if "userDefinedData" in indata:
106 indata = indata['userDefinedData']
107
108 # Override descriptor with query string kwargs
109 self._update_input_with_kwargs(indata, kwargs)
110 # uncomment when this method is implemented.
111 # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors
112 # indata = DescriptorTopic._validate_input_new(self, indata, force=force)
113
114 content = {"_admin": {"userDefinedData": indata}}
115 self.format_on_new(content, session["project_id"], make_public=make_public)
116 _id = self.db.create(self.topic, content)
117 rollback.append({"topic": self.topic, "_id": _id})
118 return _id
119 except ValidationError as e:
120 raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
121
122 def upload_content(self, session, _id, indata, kwargs, headers, force=False):
123 """
124 Used for receiving content by chunks (with a transaction_id header and/or gzip file. It will store and extract)
125 :param session: session
126 :param _id : the nsd,vnfd is already created, this is the id
127 :param indata: http body request
128 :param kwargs: user query string to override parameters. NOT USED
129 :param headers: http request headers
130 :param force: to be more tolerant with validation
131 :return: True package has is completely uploaded or False if partial content has been uplodaed.
132 Raise exception on error
133 """
134 # Check that _id exists and it is valid
135 current_desc = self.show(session, _id)
136
137 content_range_text = headers.get("Content-Range")
138 expected_md5 = headers.get("Content-File-MD5")
139 compressed = None
140 content_type = headers.get("Content-Type")
141 if content_type and "application/gzip" in content_type or "application/x-gzip" in content_type or \
142 "application/zip" in content_type:
143 compressed = "gzip"
144 filename = headers.get("Content-Filename")
145 if not filename:
146 filename = "package.tar.gz" if compressed else "package"
147 # TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266
148 file_pkg = None
149 error_text = ""
150 try:
151 if content_range_text:
152 content_range = content_range_text.replace("-", " ").replace("/", " ").split()
153 if content_range[0] != "bytes": # TODO check x<y not negative < total....
154 raise IndexError()
155 start = int(content_range[1])
156 end = int(content_range[2]) + 1
157 total = int(content_range[3])
158 else:
159 start = 0
160
161 if start:
162 if not self.fs.file_exists(_id, 'dir'):
163 raise EngineException("invalid Transaction-Id header", HTTPStatus.NOT_FOUND)
164 else:
165 self.fs.file_delete(_id, ignore_non_exist=True)
166 self.fs.mkdir(_id)
167
168 storage = self.fs.get_params()
169 storage["folder"] = _id
170
171 file_path = (_id, filename)
172 if self.fs.file_exists(file_path, 'file'):
173 file_size = self.fs.file_size(file_path)
174 else:
175 file_size = 0
176 if file_size != start:
177 raise EngineException("invalid Content-Range start sequence, expected '{}' but received '{}'".format(
178 file_size, start), HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE)
179 file_pkg = self.fs.file_open(file_path, 'a+b')
180 if isinstance(indata, dict):
181 indata_text = yaml.safe_dump(indata, indent=4, default_flow_style=False)
182 file_pkg.write(indata_text.encode(encoding="utf-8"))
183 else:
184 indata_len = 0
185 while True:
186 indata_text = indata.read(4096)
187 indata_len += len(indata_text)
188 if not indata_text:
189 break
190 file_pkg.write(indata_text)
191 if content_range_text:
192 if indata_len != end-start:
193 raise EngineException("Mismatch between Content-Range header {}-{} and body length of {}".format(
194 start, end-1, indata_len), HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE)
195 if end != total:
196 # TODO update to UPLOADING
197 return False
198
199 # PACKAGE UPLOADED
200 if expected_md5:
201 file_pkg.seek(0, 0)
202 file_md5 = md5()
203 chunk_data = file_pkg.read(1024)
204 while chunk_data:
205 file_md5.update(chunk_data)
206 chunk_data = file_pkg.read(1024)
207 if expected_md5 != file_md5.hexdigest():
208 raise EngineException("Error, MD5 mismatch", HTTPStatus.CONFLICT)
209 file_pkg.seek(0, 0)
210 if compressed == "gzip":
211 tar = tarfile.open(mode='r', fileobj=file_pkg)
212 descriptor_file_name = None
213 for tarinfo in tar:
214 tarname = tarinfo.name
215 tarname_path = tarname.split("/")
216 if not tarname_path[0] or ".." in tarname_path: # if start with "/" means absolute path
217 raise EngineException("Absolute path or '..' are not allowed for package descriptor tar.gz")
218 if len(tarname_path) == 1 and not tarinfo.isdir():
219 raise EngineException("All files must be inside a dir for package descriptor tar.gz")
220 if tarname.endswith(".yaml") or tarname.endswith(".json") or tarname.endswith(".yml"):
221 storage["pkg-dir"] = tarname_path[0]
222 if len(tarname_path) == 2:
223 if descriptor_file_name:
224 raise EngineException(
225 "Found more than one descriptor file at package descriptor tar.gz")
226 descriptor_file_name = tarname
227 if not descriptor_file_name:
228 raise EngineException("Not found any descriptor file at package descriptor tar.gz")
229 storage["descriptor"] = descriptor_file_name
230 storage["zipfile"] = filename
231 self.fs.file_extract(tar, _id)
232 with self.fs.file_open((_id, descriptor_file_name), "r") as descriptor_file:
233 content = descriptor_file.read()
234 else:
235 content = file_pkg.read()
236 storage["descriptor"] = descriptor_file_name = filename
237
238 if descriptor_file_name.endswith(".json"):
239 error_text = "Invalid json format "
240 indata = json.load(content)
241 else:
242 error_text = "Invalid yaml format "
243 indata = yaml.load(content)
244
245 current_desc["_admin"]["storage"] = storage
246 current_desc["_admin"]["onboardingState"] = "ONBOARDED"
247 current_desc["_admin"]["operationalState"] = "ENABLED"
248
249 indata = self._remove_envelop(indata)
250
251 # Override descriptor with query string kwargs
252 if kwargs:
253 self._update_input_with_kwargs(indata, kwargs)
254 # it will call overrides method at VnfdTopic or NsdTopic
255 indata = self._validate_input_new(indata, force=force)
256
257 deep_update_rfc7396(current_desc, indata)
258 self.check_conflict_on_edit(session, current_desc, indata, _id=_id, force=force)
259 self.db.replace(self.topic, _id, current_desc)
260
261 indata["_id"] = _id
262 self._send_msg("created", indata)
263
264 # TODO if descriptor has changed because kwargs update content and remove cached zip
265 # TODO if zip is not present creates one
266 return True
267
268 except EngineException:
269 raise
270 except IndexError:
271 raise EngineException("invalid Content-Range header format. Expected 'bytes start-end/total'",
272 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE)
273 except IOError as e:
274 raise EngineException("invalid upload transaction sequence: '{}'".format(e), HTTPStatus.BAD_REQUEST)
275 except tarfile.ReadError as e:
276 raise EngineException("invalid file content {}".format(e), HTTPStatus.BAD_REQUEST)
277 except (ValueError, yaml.YAMLError) as e:
278 raise EngineException(error_text + str(e))
279 except ValidationError as e:
280 raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
281 finally:
282 if file_pkg:
283 file_pkg.close()
284
285 def get_file(self, session, _id, path=None, accept_header=None):
286 """
287 Return the file content of a vnfd or nsd
288 :param session: contains the used login username and working project
289 :param _id: Identity of the vnfd, nsd
290 :param path: artifact path or "$DESCRIPTOR" or None
291 :param accept_header: Content of Accept header. Must contain applition/zip or/and text/plain
292 :return: opened file plus Accept format or raises an exception
293 """
294 accept_text = accept_zip = False
295 if accept_header:
296 if 'text/plain' in accept_header or '*/*' in accept_header:
297 accept_text = True
298 if 'application/zip' in accept_header or '*/*' in accept_header:
299 accept_zip = 'application/zip'
300 elif 'application/gzip' in accept_header:
301 accept_zip = 'application/gzip'
302
303 if not accept_text and not accept_zip:
304 raise EngineException("provide request header 'Accept' with 'application/zip' or 'text/plain'",
305 http_code=HTTPStatus.NOT_ACCEPTABLE)
306
307 content = self.show(session, _id)
308 if content["_admin"]["onboardingState"] != "ONBOARDED":
309 raise EngineException("Cannot get content because this resource is not at 'ONBOARDED' state. "
310 "onboardingState is {}".format(content["_admin"]["onboardingState"]),
311 http_code=HTTPStatus.CONFLICT)
312 storage = content["_admin"]["storage"]
313 if path is not None and path != "$DESCRIPTOR": # artifacts
314 if not storage.get('pkg-dir'):
315 raise EngineException("Packages does not contains artifacts", http_code=HTTPStatus.BAD_REQUEST)
316 if self.fs.file_exists((storage['folder'], storage['pkg-dir'], *path), 'dir'):
317 folder_content = self.fs.dir_ls((storage['folder'], storage['pkg-dir'], *path))
318 return folder_content, "text/plain"
319 # TODO manage folders in http
320 else:
321 return self.fs.file_open((storage['folder'], storage['pkg-dir'], *path), "rb"),\
322 "application/octet-stream"
323
324 # pkgtype accept ZIP TEXT -> result
325 # manyfiles yes X -> zip
326 # no yes -> error
327 # onefile yes no -> zip
328 # X yes -> text
329
330 if accept_text and (not storage.get('pkg-dir') or path == "$DESCRIPTOR"):
331 return self.fs.file_open((storage['folder'], storage['descriptor']), "r"), "text/plain"
332 elif storage.get('pkg-dir') and not accept_zip:
333 raise EngineException("Packages that contains several files need to be retrieved with 'application/zip'"
334 "Accept header", http_code=HTTPStatus.NOT_ACCEPTABLE)
335 else:
336 if not storage.get('zipfile'):
337 # TODO generate zipfile if not present
338 raise EngineException("Only allowed 'text/plain' Accept header for this descriptor. To be solved in "
339 "future versions", http_code=HTTPStatus.NOT_ACCEPTABLE)
340 return self.fs.file_open((storage['folder'], storage['zipfile']), "rb"), accept_zip
341
342 def pyangbind_validation(self, item, data, force=False):
343 try:
344 if item == "vnfds":
345 myvnfd = vnfd_im()
346 pybindJSONDecoder.load_ietf_json({'vnfd:vnfd-catalog': {'vnfd': [data]}}, None, None, obj=myvnfd,
347 path_helper=True, skip_unknown=force)
348 out = pybindJSON.dumps(myvnfd, mode="ietf")
349 elif item == "nsds":
350 mynsd = nsd_im()
351 pybindJSONDecoder.load_ietf_json({'nsd:nsd-catalog': {'nsd': [data]}}, None, None, obj=mynsd,
352 path_helper=True, skip_unknown=force)
353 out = pybindJSON.dumps(mynsd, mode="ietf")
354 else:
355 raise EngineException("Not possible to validate '{}' item".format(item),
356 http_code=HTTPStatus.INTERNAL_SERVER_ERROR)
357
358 desc_out = self._remove_envelop(yaml.safe_load(out))
359 return desc_out
360
361 except Exception as e:
362 raise EngineException("Error in pyangbind validation: {}".format(str(e)),
363 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
364
365
366 class VnfdTopic(DescriptorTopic):
367 topic = "vnfds"
368 topic_msg = "vnfd"
369
370 def __init__(self, db, fs, msg):
371 DescriptorTopic.__init__(self, db, fs, msg)
372
373 @staticmethod
374 def _remove_envelop(indata=None):
375 if not indata:
376 return {}
377 clean_indata = indata
378 if clean_indata.get('vnfd:vnfd-catalog'):
379 clean_indata = clean_indata['vnfd:vnfd-catalog']
380 elif clean_indata.get('vnfd-catalog'):
381 clean_indata = clean_indata['vnfd-catalog']
382 if clean_indata.get('vnfd'):
383 if not isinstance(clean_indata['vnfd'], list) or len(clean_indata['vnfd']) != 1:
384 raise EngineException("'vnfd' must be a list of only one element")
385 clean_indata = clean_indata['vnfd'][0]
386 elif clean_indata.get('vnfd:vnfd'):
387 if not isinstance(clean_indata['vnfd:vnfd'], list) or len(clean_indata['vnfd:vnfd']) != 1:
388 raise EngineException("'vnfd:vnfd' must be a list of only one element")
389 clean_indata = clean_indata['vnfd:vnfd'][0]
390 return clean_indata
391
392 def check_conflict_on_del(self, session, _id, force=False):
393 """
394 Check that there is not any NSD that uses this VNFD. Only NSDs belonging to this project are considered. Note
395 that VNFD can be public and be used by NSD of other projects. Also check there are not deployments, or vnfr
396 that uses this vnfd
397 :param session:
398 :param _id: vnfd inernal id
399 :param force: Avoid this checking
400 :return: None or raises EngineException with the conflict
401 """
402 if force:
403 return
404 descriptor = self.db.get_one("vnfds", {"_id": _id})
405 descriptor_id = descriptor.get("id")
406 if not descriptor_id: # empty vnfd not uploaded
407 return
408
409 _filter = self._get_project_filter(session, write=False, show_all=False)
410 # check vnfrs using this vnfd
411 _filter["vnfd-id"] = _id
412 if self.db.get_list("vnfrs", _filter):
413 raise EngineException("There is some VNFR that depends on this VNFD", http_code=HTTPStatus.CONFLICT)
414 del _filter["vnfd-id"]
415 # check NSD using this VNFD
416 _filter["constituent-vnfd.ANYINDEX.vnfd-id-ref"] = descriptor_id
417 if self.db.get_list("nsds", _filter):
418 raise EngineException("There is soame NSD that depends on this VNFD", http_code=HTTPStatus.CONFLICT)
419
420 def _validate_input_new(self, indata, force=False):
421 # TODO validate with pyangbind, serialize
422 indata = self.pyangbind_validation("vnfds", indata, force)
423 # Cross references validation in the descriptor
424 if not indata.get("mgmt-interface"):
425 raise EngineException("'mgmt-interface' is a mandatory field and it is not defined",
426 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
427 if indata["mgmt-interface"].get("cp"):
428 for cp in get_iterable(indata.get("connection-point")):
429 if cp["name"] == indata["mgmt-interface"]["cp"]:
430 break
431 else:
432 raise EngineException("mgmt-interface:cp='{}' must match an existing connection-point"
433 .format(indata["mgmt-interface"]["cp"]),
434 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
435
436 for vdu in get_iterable(indata.get("vdu")):
437 for interface in get_iterable(vdu.get("interface")):
438 if interface.get("external-connection-point-ref"):
439 for cp in get_iterable(indata.get("connection-point")):
440 if cp["name"] == interface["external-connection-point-ref"]:
441 break
442 else:
443 raise EngineException("vdu[id='{}']:interface[name='{}']:external-connection-point-ref='{}' "
444 "must match an existing connection-point"
445 .format(vdu["id"], interface["name"],
446 interface["external-connection-point-ref"]),
447 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
448
449 elif interface.get("internal-connection-point-ref"):
450 for internal_cp in get_iterable(vdu.get("internal-connection-point")):
451 if interface["internal-connection-point-ref"] == internal_cp.get("id"):
452 break
453 else:
454 raise EngineException("vdu[id='{}']:interface[name='{}']:internal-connection-point-ref='{}' "
455 "must match an existing vdu:internal-connection-point"
456 .format(vdu["id"], interface["name"],
457 interface["internal-connection-point-ref"]),
458 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
459 for ivld in get_iterable(indata.get("internal-vld")):
460 for icp in get_iterable(ivld.get("internal-connection-point")):
461 icp_mark = False
462 for vdu in get_iterable(indata.get("vdu")):
463 for internal_cp in get_iterable(vdu.get("internal-connection-point")):
464 if icp["id-ref"] == internal_cp["id"]:
465 icp_mark = True
466 break
467 if icp_mark:
468 break
469 else:
470 raise EngineException("internal-vld[id='{}']:internal-connection-point='{}' must match an existing "
471 "vdu:internal-connection-point".format(ivld["id"], icp["id-ref"]),
472 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
473 if ivld.get("ip-profile-ref"):
474 for ip_prof in get_iterable(indata.get("ip-profiles")):
475 if ip_prof["name"] == get_iterable(ivld.get("ip-profile-ref")):
476 break
477 else:
478 raise EngineException("internal-vld[id='{}']:ip-profile-ref='{}' does not exist".format(
479 ivld["id"], ivld["ip-profile-ref"]),
480 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
481 for mp in get_iterable(indata.get("monitoring-param")):
482 if mp.get("vdu-monitoring-param"):
483 mp_vmp_mark = False
484 for vdu in get_iterable(indata.get("vdu")):
485 for vmp in get_iterable(vdu.get("monitoring-param")):
486 if vmp["id"] == mp["vdu-monitoring-param"].get("vdu-monitoring-param-ref") and vdu["id"] ==\
487 mp["vdu-monitoring-param"]["vdu-ref"]:
488 mp_vmp_mark = True
489 break
490 if mp_vmp_mark:
491 break
492 else:
493 raise EngineException("monitoring-param:vdu-monitoring-param:vdu-monitoring-param-ref='{}' not "
494 "defined at vdu[id='{}'] or vdu does not exist"
495 .format(mp["vdu-monitoring-param"]["vdu-monitoring-param-ref"],
496 mp["vdu-monitoring-param"]["vdu-ref"]),
497 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
498 elif mp.get("vdu-metric"):
499 mp_vm_mark = False
500 for vdu in get_iterable(indata.get("vdu")):
501 if vdu.get("vdu-configuration"):
502 for metric in get_iterable(vdu["vdu-configuration"].get("metrics")):
503 if metric["name"] == mp["vdu-metric"]["vdu-metric-name-ref"] and vdu["id"] == \
504 mp["vdu-metric"]["vdu-ref"]:
505 mp_vm_mark = True
506 break
507 if mp_vm_mark:
508 break
509 else:
510 raise EngineException("monitoring-param:vdu-metric:vdu-metric-name-ref='{}' not defined at "
511 "vdu[id='{}'] or vdu does not exist"
512 .format(mp["vdu-metric"]["vdu-metric-name-ref"],
513 mp["vdu-metric"]["vdu-ref"]),
514 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
515
516 for sgd in get_iterable(indata.get("scaling-group-descriptor")):
517 for sp in get_iterable(sgd.get("scaling-policy")):
518 for sc in get_iterable(sp.get("scaling-criteria")):
519 for mp in get_iterable(indata.get("monitoring-param")):
520 if mp["id"] == get_iterable(sc.get("vnf-monitoring-param-ref")):
521 break
522 else:
523 raise EngineException("scaling-group-descriptor[name='{}']:scaling-criteria[name='{}']:"
524 "vnf-monitoring-param-ref='{}' not defined in any monitoring-param"
525 .format(sgd["name"], sc["name"], sc["vnf-monitoring-param-ref"]),
526 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
527 for sgd_vdu in get_iterable(sgd.get("vdu")):
528 sgd_vdu_mark = False
529 for vdu in get_iterable(indata.get("vdu")):
530 if vdu["id"] == sgd_vdu["vdu-id-ref"]:
531 sgd_vdu_mark = True
532 break
533 if sgd_vdu_mark:
534 break
535 else:
536 raise EngineException("scaling-group-descriptor[name='{}']:vdu-id-ref={} does not match any vdu"
537 .format(sgd["name"], sgd_vdu["vdu-id-ref"]),
538 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
539 for sca in get_iterable(sgd.get("scaling-config-action")):
540 if not indata.get("vnf-configuration"):
541 raise EngineException("'vnf-configuration' not defined in the descriptor but it is referenced by "
542 "scaling-group-descriptor[name='{}']:scaling-config-action"
543 .format(sgd["name"]),
544 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
545 for primitive in get_iterable(indata["vnf-configuration"].get("config-primitive")):
546 if primitive["name"] == sca["vnf-config-primitive-name-ref"]:
547 break
548 else:
549 raise EngineException("scaling-group-descriptor[name='{}']:scaling-config-action:vnf-config-"
550 "primitive-name-ref='{}' does not match any "
551 "vnf-configuration:config-primitive:name"
552 .format(sgd["name"], sca["vnf-config-primitive-name-ref"]),
553 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
554 return indata
555
556 def _validate_input_edit(self, indata, force=False):
557 # TODO validate with pyangbind, serialize
558 return indata
559
560
561 class NsdTopic(DescriptorTopic):
562 topic = "nsds"
563 topic_msg = "nsd"
564
565 def __init__(self, db, fs, msg):
566 DescriptorTopic.__init__(self, db, fs, msg)
567
568 @staticmethod
569 def _remove_envelop(indata=None):
570 if not indata:
571 return {}
572 clean_indata = indata
573
574 if clean_indata.get('nsd:nsd-catalog'):
575 clean_indata = clean_indata['nsd:nsd-catalog']
576 elif clean_indata.get('nsd-catalog'):
577 clean_indata = clean_indata['nsd-catalog']
578 if clean_indata.get('nsd'):
579 if not isinstance(clean_indata['nsd'], list) or len(clean_indata['nsd']) != 1:
580 raise EngineException("'nsd' must be a list of only one element")
581 clean_indata = clean_indata['nsd'][0]
582 elif clean_indata.get('nsd:nsd'):
583 if not isinstance(clean_indata['nsd:nsd'], list) or len(clean_indata['nsd:nsd']) != 1:
584 raise EngineException("'nsd:nsd' must be a list of only one element")
585 clean_indata = clean_indata['nsd:nsd'][0]
586 return clean_indata
587
588 def _validate_input_new(self, indata, force=False):
589
590 # TODO validate with pyangbind, serialize
591 indata = self.pyangbind_validation("nsds", indata, force)
592 return indata
593
594 def _validate_input_edit(self, indata, force=False):
595 # TODO validate with pyangbind, serialize
596 return indata
597
598 def _check_descriptor_dependencies(self, session, descriptor):
599 """
600 Check that the dependent descriptors exist on a new descriptor or edition
601 :param session: client session information
602 :param descriptor: descriptor to be inserted or edit
603 :return: None or raises exception
604 """
605 if not descriptor.get("constituent-vnfd"):
606 return
607 for vnf in descriptor["constituent-vnfd"]:
608 vnfd_id = vnf["vnfd-id-ref"]
609 filter_q = self._get_project_filter(session, write=False, show_all=True)
610 filter_q["id"] = vnfd_id
611 if not self.db.get_list("vnfds", filter_q):
612 raise EngineException("Descriptor error at 'constituent-vnfd':'vnfd-id-ref'='{}' references a non "
613 "existing vnfd".format(vnfd_id), http_code=HTTPStatus.CONFLICT)
614
615 def check_conflict_on_edit(self, session, final_content, edit_content, _id, force=False):
616 super().check_conflict_on_edit(session, final_content, edit_content, _id, force=force)
617
618 self._check_descriptor_dependencies(session, final_content)
619
620 def check_conflict_on_del(self, session, _id, force=False):
621 """
622 Check that there is not any NSR that uses this NSD. Only NSRs belonging to this project are considered. Note
623 that NSD can be public and be used by other projects.
624 :param session:
625 :param _id: vnfd inernal id
626 :param force: Avoid this checking
627 :return: None or raises EngineException with the conflict
628 """
629 if force:
630 return
631 _filter = self._get_project_filter(session, write=False, show_all=False)
632 _filter["nsdId"] = _id
633 if self.db.get_list("nsrs", _filter):
634 raise EngineException("There is some NSR that depends on this NSD", http_code=HTTPStatus.CONFLICT)
635
636
637 class PduTopic(BaseTopic):
638 topic = "pdus"
639 topic_msg = "pdu"
640 schema_new = pdu_new_schema
641 schema_edit = pdu_edit_schema
642
643 def __init__(self, db, fs, msg):
644 BaseTopic.__init__(self, db, fs, msg)
645
646 @staticmethod
647 def format_on_new(content, project_id=None, make_public=False):
648 BaseTopic.format_on_new(content, project_id=None, make_public=make_public)
649 content["_admin"]["onboardingState"] = "CREATED"
650 content["_admin"]["operationalState"] = "DISABLED"
651 content["_admin"]["usageSate"] = "NOT_IN_USE"
652
653 def check_conflict_on_del(self, session, _id, force=False):
654 if force:
655 return
656 # TODO Is it needed to check descriptors _admin.project_read/project_write??
657 _filter = {"vdur.pdu-id": _id}
658 if self.db.get_list("vnfrs", _filter):
659 raise EngineException("There is some NSR that uses this PDU", http_code=HTTPStatus.CONFLICT)