Merge branch 'netslice'
[osm/NBI.git] / osm_nbi / descriptor_topics.py
1 # -*- coding: utf-8 -*-
2
3 import tarfile
4 import yaml
5 import json
6 # import logging
7 from hashlib import md5
8 from osm_common.dbbase import DbException, deep_update_rfc7396
9 from http import HTTPStatus
10 from validation import ValidationError, pdu_new_schema, pdu_edit_schema
11 from base_topic import BaseTopic, EngineException, get_iterable
12 from osm_im.vnfd import vnfd as vnfd_im
13 from osm_im.nsd import nsd as nsd_im
14 from pyangbind.lib.serialise import pybindJSONDecoder
15 import pyangbind.lib.pybindJSON as pybindJSON
16
17 __author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
18
19
20 class DescriptorTopic(BaseTopic):
21
22 def __init__(self, db, fs, msg):
23 BaseTopic.__init__(self, db, fs, msg)
24
25 def check_conflict_on_edit(self, session, final_content, edit_content, _id, force=False):
26 # 1. validate again with pyangbind
27 # 1.1. remove internal keys
28 internal_keys = {}
29 for k in ("_id", "_admin"):
30 if k in final_content:
31 internal_keys[k] = final_content.pop(k)
32 serialized = self._validate_input_new(final_content, force)
33 # 1.2. modify final_content with a serialized version
34 final_content.clear()
35 final_content.update(serialized)
36 # 1.3. restore internal keys
37 for k, v in internal_keys.items():
38 final_content[k] = v
39
40 # 2. check that this id is not present
41 if "id" in edit_content:
42 _filter = self._get_project_filter(session, write=False, show_all=False)
43 _filter["id"] = final_content["id"]
44 _filter["_id.neq"] = _id
45 if self.db.get_one(self.topic, _filter, fail_on_empty=False):
46 raise EngineException("{} with id '{}' already exists for this project".format(self.topic[:-1],
47 final_content["id"]),
48 HTTPStatus.CONFLICT)
49
50 @staticmethod
51 def format_on_new(content, project_id=None, make_public=False):
52 BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
53 content["_admin"]["onboardingState"] = "CREATED"
54 content["_admin"]["operationalState"] = "DISABLED"
55 content["_admin"]["usageState"] = "NOT_IN_USE"
56
57 def delete(self, session, _id, force=False, dry_run=False):
58 """
59 Delete item by its internal _id
60 :param session: contains the used login username, working project, and admin rights
61 :param _id: server internal id
62 :param force: indicates if deletion must be forced in case of conflict
63 :param dry_run: make checking but do not delete
64 :return: dictionary with deleted item _id. It raises EngineException on error: not found, conflict, ...
65 """
66 # TODO add admin to filter, validate rights
67 v = BaseTopic.delete(self, session, _id, force, dry_run=True)
68 if dry_run:
69 return
70 v = self.db.del_one(self.topic, {"_id": _id})
71 self.fs.file_delete(_id, ignore_non_exist=True)
72 self._send_msg("delete", {"_id": _id})
73 return v
74
75 @staticmethod
76 def get_one_by_id(db, session, topic, id):
77 # find owned by this project
78 _filter = BaseTopic._get_project_filter(session, write=False, show_all=False)
79 _filter["id"] = id
80 desc_list = db.get_list(topic, _filter)
81 if len(desc_list) == 1:
82 return desc_list[0]
83 elif len(desc_list) > 1:
84 raise DbException("Found more than one {} with id='{}' belonging to this project".format(topic[:-1], id),
85 HTTPStatus.CONFLICT)
86
87 # not found any: try to find public
88 _filter = BaseTopic._get_project_filter(session, write=False, show_all=True)
89 _filter["id"] = id
90 desc_list = db.get_list(topic, _filter)
91 if not desc_list:
92 raise DbException("Not found any {} with id='{}'".format(topic[:-1], id), HTTPStatus.NOT_FOUND)
93 elif len(desc_list) == 1:
94 return desc_list[0]
95 else:
96 raise DbException("Found more than one public {} with id='{}'; and no one belonging to this project".format(
97 topic[:-1], id), HTTPStatus.CONFLICT)
98
99 def new(self, rollback, session, indata=None, kwargs=None, headers=None, force=False, make_public=False):
100 """
101 Creates a new almost empty DISABLED entry into database. Due to SOL005, it does not follow normal procedure.
102 Creating a VNFD or NSD is done in two steps: 1. Creates an empty descriptor (this step) and 2) upload content
103 (self.upload_content)
104 :param rollback: list to append created items at database in case a rollback may to be done
105 :param session: contains the used login username and working project
106 :param indata: data to be inserted
107 :param kwargs: used to override the indata descriptor
108 :param headers: http request headers
109 :param force: If True avoid some dependence checks
110 :param make_public: Make the created descriptor public to all projects
111 :return: _id: identity of the inserted data.
112 """
113
114 try:
115 # _remove_envelop
116 if indata:
117 if "userDefinedData" in indata:
118 indata = indata['userDefinedData']
119
120 # Override descriptor with query string kwargs
121 self._update_input_with_kwargs(indata, kwargs)
122 # uncomment when this method is implemented.
123 # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors
124 # indata = DescriptorTopic._validate_input_new(self, indata, force=force)
125
126 content = {"_admin": {"userDefinedData": indata}}
127 self.format_on_new(content, session["project_id"], make_public=make_public)
128 _id = self.db.create(self.topic, content)
129 rollback.append({"topic": self.topic, "_id": _id})
130 return _id
131 except ValidationError as e:
132 raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
133
134 def upload_content(self, session, _id, indata, kwargs, headers, force=False):
135 """
136 Used for receiving content by chunks (with a transaction_id header and/or gzip file. It will store and extract)
137 :param session: session
138 :param _id : the nsd,vnfd is already created, this is the id
139 :param indata: http body request
140 :param kwargs: user query string to override parameters. NOT USED
141 :param headers: http request headers
142 :param force: to be more tolerant with validation
143 :return: True package has is completely uploaded or False if partial content has been uplodaed.
144 Raise exception on error
145 """
146 # Check that _id exists and it is valid
147 current_desc = self.show(session, _id)
148
149 content_range_text = headers.get("Content-Range")
150 expected_md5 = headers.get("Content-File-MD5")
151 compressed = None
152 content_type = headers.get("Content-Type")
153 if content_type and "application/gzip" in content_type or "application/x-gzip" in content_type or \
154 "application/zip" in content_type:
155 compressed = "gzip"
156 filename = headers.get("Content-Filename")
157 if not filename:
158 filename = "package.tar.gz" if compressed else "package"
159 # TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266
160 file_pkg = None
161 error_text = ""
162 try:
163 if content_range_text:
164 content_range = content_range_text.replace("-", " ").replace("/", " ").split()
165 if content_range[0] != "bytes": # TODO check x<y not negative < total....
166 raise IndexError()
167 start = int(content_range[1])
168 end = int(content_range[2]) + 1
169 total = int(content_range[3])
170 else:
171 start = 0
172
173 if start:
174 if not self.fs.file_exists(_id, 'dir'):
175 raise EngineException("invalid Transaction-Id header", HTTPStatus.NOT_FOUND)
176 else:
177 self.fs.file_delete(_id, ignore_non_exist=True)
178 self.fs.mkdir(_id)
179
180 storage = self.fs.get_params()
181 storage["folder"] = _id
182
183 file_path = (_id, filename)
184 if self.fs.file_exists(file_path, 'file'):
185 file_size = self.fs.file_size(file_path)
186 else:
187 file_size = 0
188 if file_size != start:
189 raise EngineException("invalid Content-Range start sequence, expected '{}' but received '{}'".format(
190 file_size, start), HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE)
191 file_pkg = self.fs.file_open(file_path, 'a+b')
192 if isinstance(indata, dict):
193 indata_text = yaml.safe_dump(indata, indent=4, default_flow_style=False)
194 file_pkg.write(indata_text.encode(encoding="utf-8"))
195 else:
196 indata_len = 0
197 while True:
198 indata_text = indata.read(4096)
199 indata_len += len(indata_text)
200 if not indata_text:
201 break
202 file_pkg.write(indata_text)
203 if content_range_text:
204 if indata_len != end-start:
205 raise EngineException("Mismatch between Content-Range header {}-{} and body length of {}".format(
206 start, end-1, indata_len), HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE)
207 if end != total:
208 # TODO update to UPLOADING
209 return False
210
211 # PACKAGE UPLOADED
212 if expected_md5:
213 file_pkg.seek(0, 0)
214 file_md5 = md5()
215 chunk_data = file_pkg.read(1024)
216 while chunk_data:
217 file_md5.update(chunk_data)
218 chunk_data = file_pkg.read(1024)
219 if expected_md5 != file_md5.hexdigest():
220 raise EngineException("Error, MD5 mismatch", HTTPStatus.CONFLICT)
221 file_pkg.seek(0, 0)
222 if compressed == "gzip":
223 tar = tarfile.open(mode='r', fileobj=file_pkg)
224 descriptor_file_name = None
225 for tarinfo in tar:
226 tarname = tarinfo.name
227 tarname_path = tarname.split("/")
228 if not tarname_path[0] or ".." in tarname_path: # if start with "/" means absolute path
229 raise EngineException("Absolute path or '..' are not allowed for package descriptor tar.gz")
230 if len(tarname_path) == 1 and not tarinfo.isdir():
231 raise EngineException("All files must be inside a dir for package descriptor tar.gz")
232 if tarname.endswith(".yaml") or tarname.endswith(".json") or tarname.endswith(".yml"):
233 storage["pkg-dir"] = tarname_path[0]
234 if len(tarname_path) == 2:
235 if descriptor_file_name:
236 raise EngineException(
237 "Found more than one descriptor file at package descriptor tar.gz")
238 descriptor_file_name = tarname
239 if not descriptor_file_name:
240 raise EngineException("Not found any descriptor file at package descriptor tar.gz")
241 storage["descriptor"] = descriptor_file_name
242 storage["zipfile"] = filename
243 self.fs.file_extract(tar, _id)
244 with self.fs.file_open((_id, descriptor_file_name), "r") as descriptor_file:
245 content = descriptor_file.read()
246 else:
247 content = file_pkg.read()
248 storage["descriptor"] = descriptor_file_name = filename
249
250 if descriptor_file_name.endswith(".json"):
251 error_text = "Invalid json format "
252 indata = json.load(content)
253 else:
254 error_text = "Invalid yaml format "
255 indata = yaml.load(content)
256
257 current_desc["_admin"]["storage"] = storage
258 current_desc["_admin"]["onboardingState"] = "ONBOARDED"
259 current_desc["_admin"]["operationalState"] = "ENABLED"
260
261 indata = self._remove_envelop(indata)
262
263 # Override descriptor with query string kwargs
264 if kwargs:
265 self._update_input_with_kwargs(indata, kwargs)
266 # it will call overrides method at VnfdTopic or NsdTopic
267 indata = self._validate_input_new(indata, force=force)
268
269 deep_update_rfc7396(current_desc, indata)
270 self.check_conflict_on_edit(session, current_desc, indata, _id=_id, force=force)
271 self.db.replace(self.topic, _id, current_desc)
272
273 indata["_id"] = _id
274 self._send_msg("created", indata)
275
276 # TODO if descriptor has changed because kwargs update content and remove cached zip
277 # TODO if zip is not present creates one
278 return True
279
280 except EngineException:
281 raise
282 except IndexError:
283 raise EngineException("invalid Content-Range header format. Expected 'bytes start-end/total'",
284 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE)
285 except IOError as e:
286 raise EngineException("invalid upload transaction sequence: '{}'".format(e), HTTPStatus.BAD_REQUEST)
287 except tarfile.ReadError as e:
288 raise EngineException("invalid file content {}".format(e), HTTPStatus.BAD_REQUEST)
289 except (ValueError, yaml.YAMLError) as e:
290 raise EngineException(error_text + str(e))
291 except ValidationError as e:
292 raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
293 finally:
294 if file_pkg:
295 file_pkg.close()
296
297 def get_file(self, session, _id, path=None, accept_header=None):
298 """
299 Return the file content of a vnfd or nsd
300 :param session: contains the used login username and working project
301 :param _id: Identity of the vnfd, nsd
302 :param path: artifact path or "$DESCRIPTOR" or None
303 :param accept_header: Content of Accept header. Must contain applition/zip or/and text/plain
304 :return: opened file plus Accept format or raises an exception
305 """
306 accept_text = accept_zip = False
307 if accept_header:
308 if 'text/plain' in accept_header or '*/*' in accept_header:
309 accept_text = True
310 if 'application/zip' in accept_header or '*/*' in accept_header:
311 accept_zip = 'application/zip'
312 elif 'application/gzip' in accept_header:
313 accept_zip = 'application/gzip'
314
315 if not accept_text and not accept_zip:
316 raise EngineException("provide request header 'Accept' with 'application/zip' or 'text/plain'",
317 http_code=HTTPStatus.NOT_ACCEPTABLE)
318
319 content = self.show(session, _id)
320 if content["_admin"]["onboardingState"] != "ONBOARDED":
321 raise EngineException("Cannot get content because this resource is not at 'ONBOARDED' state. "
322 "onboardingState is {}".format(content["_admin"]["onboardingState"]),
323 http_code=HTTPStatus.CONFLICT)
324 storage = content["_admin"]["storage"]
325 if path is not None and path != "$DESCRIPTOR": # artifacts
326 if not storage.get('pkg-dir'):
327 raise EngineException("Packages does not contains artifacts", http_code=HTTPStatus.BAD_REQUEST)
328 if self.fs.file_exists((storage['folder'], storage['pkg-dir'], *path), 'dir'):
329 folder_content = self.fs.dir_ls((storage['folder'], storage['pkg-dir'], *path))
330 return folder_content, "text/plain"
331 # TODO manage folders in http
332 else:
333 return self.fs.file_open((storage['folder'], storage['pkg-dir'], *path), "rb"),\
334 "application/octet-stream"
335
336 # pkgtype accept ZIP TEXT -> result
337 # manyfiles yes X -> zip
338 # no yes -> error
339 # onefile yes no -> zip
340 # X yes -> text
341
342 if accept_text and (not storage.get('pkg-dir') or path == "$DESCRIPTOR"):
343 return self.fs.file_open((storage['folder'], storage['descriptor']), "r"), "text/plain"
344 elif storage.get('pkg-dir') and not accept_zip:
345 raise EngineException("Packages that contains several files need to be retrieved with 'application/zip'"
346 "Accept header", http_code=HTTPStatus.NOT_ACCEPTABLE)
347 else:
348 if not storage.get('zipfile'):
349 # TODO generate zipfile if not present
350 raise EngineException("Only allowed 'text/plain' Accept header for this descriptor. To be solved in "
351 "future versions", http_code=HTTPStatus.NOT_ACCEPTABLE)
352 return self.fs.file_open((storage['folder'], storage['zipfile']), "rb"), accept_zip
353
354 def pyangbind_validation(self, item, data, force=False):
355 try:
356 if item == "vnfds":
357 myvnfd = vnfd_im()
358 pybindJSONDecoder.load_ietf_json({'vnfd:vnfd-catalog': {'vnfd': [data]}}, None, None, obj=myvnfd,
359 path_helper=True, skip_unknown=force)
360 out = pybindJSON.dumps(myvnfd, mode="ietf")
361 elif item == "nsds":
362 mynsd = nsd_im()
363 pybindJSONDecoder.load_ietf_json({'nsd:nsd-catalog': {'nsd': [data]}}, None, None, obj=mynsd,
364 path_helper=True, skip_unknown=force)
365 out = pybindJSON.dumps(mynsd, mode="ietf")
366 else:
367 raise EngineException("Not possible to validate '{}' item".format(item),
368 http_code=HTTPStatus.INTERNAL_SERVER_ERROR)
369
370 desc_out = self._remove_envelop(yaml.safe_load(out))
371 return desc_out
372
373 except Exception as e:
374 raise EngineException("Error in pyangbind validation: {}".format(str(e)),
375 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
376
377
378 class VnfdTopic(DescriptorTopic):
379 topic = "vnfds"
380 topic_msg = "vnfd"
381
382 def __init__(self, db, fs, msg):
383 DescriptorTopic.__init__(self, db, fs, msg)
384
385 @staticmethod
386 def _remove_envelop(indata=None):
387 if not indata:
388 return {}
389 clean_indata = indata
390 if clean_indata.get('vnfd:vnfd-catalog'):
391 clean_indata = clean_indata['vnfd:vnfd-catalog']
392 elif clean_indata.get('vnfd-catalog'):
393 clean_indata = clean_indata['vnfd-catalog']
394 if clean_indata.get('vnfd'):
395 if not isinstance(clean_indata['vnfd'], list) or len(clean_indata['vnfd']) != 1:
396 raise EngineException("'vnfd' must be a list of only one element")
397 clean_indata = clean_indata['vnfd'][0]
398 elif clean_indata.get('vnfd:vnfd'):
399 if not isinstance(clean_indata['vnfd:vnfd'], list) or len(clean_indata['vnfd:vnfd']) != 1:
400 raise EngineException("'vnfd:vnfd' must be a list of only one element")
401 clean_indata = clean_indata['vnfd:vnfd'][0]
402 return clean_indata
403
404 def check_conflict_on_edit(self, session, final_content, edit_content, _id, force=False):
405 super().check_conflict_on_edit(session, final_content, edit_content, _id, force=force)
406
407 # set type of vnfd
408 contains_pdu = False
409 contains_vdu = False
410 for vdu in get_iterable(final_content.get("vdu")):
411 if vdu.get("pdu-type"):
412 contains_pdu = True
413 else:
414 contains_vdu = True
415 if contains_pdu:
416 final_content["_admin"]["type"] = "hnfd" if contains_vdu else "pnfd"
417 elif contains_vdu:
418 final_content["_admin"]["type"] = "vnfd"
419 # if neither vud nor pdu do not fill type
420
421 def check_conflict_on_del(self, session, _id, force=False):
422 """
423 Check that there is not any NSD that uses this VNFD. Only NSDs belonging to this project are considered. Note
424 that VNFD can be public and be used by NSD of other projects. Also check there are not deployments, or vnfr
425 that uses this vnfd
426 :param session:
427 :param _id: vnfd inernal id
428 :param force: Avoid this checking
429 :return: None or raises EngineException with the conflict
430 """
431 if force:
432 return
433 descriptor = self.db.get_one("vnfds", {"_id": _id})
434 descriptor_id = descriptor.get("id")
435 if not descriptor_id: # empty vnfd not uploaded
436 return
437
438 _filter = self._get_project_filter(session, write=False, show_all=False)
439 # check vnfrs using this vnfd
440 _filter["vnfd-id"] = _id
441 if self.db.get_list("vnfrs", _filter):
442 raise EngineException("There is some VNFR that depends on this VNFD", http_code=HTTPStatus.CONFLICT)
443 del _filter["vnfd-id"]
444 # check NSD using this VNFD
445 _filter["constituent-vnfd.ANYINDEX.vnfd-id-ref"] = descriptor_id
446 if self.db.get_list("nsds", _filter):
447 raise EngineException("There is soame NSD that depends on this VNFD", http_code=HTTPStatus.CONFLICT)
448
449 def _validate_input_new(self, indata, force=False):
450 indata = self.pyangbind_validation("vnfds", indata, force)
451 # Cross references validation in the descriptor
452 if indata.get("vdu"):
453 if not indata.get("mgmt-interface"):
454 raise EngineException("'mgmt-interface' is a mandatory field and it is not defined",
455 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
456 if indata["mgmt-interface"].get("cp"):
457 for cp in get_iterable(indata.get("connection-point")):
458 if cp["name"] == indata["mgmt-interface"]["cp"]:
459 break
460 else:
461 raise EngineException("mgmt-interface:cp='{}' must match an existing connection-point"
462 .format(indata["mgmt-interface"]["cp"]),
463 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
464
465 for vdu in get_iterable(indata.get("vdu")):
466 for interface in get_iterable(vdu.get("interface")):
467 if interface.get("external-connection-point-ref"):
468 for cp in get_iterable(indata.get("connection-point")):
469 if cp["name"] == interface["external-connection-point-ref"]:
470 break
471 else:
472 raise EngineException("vdu[id='{}']:interface[name='{}']:external-connection-point-ref='{}' "
473 "must match an existing connection-point"
474 .format(vdu["id"], interface["name"],
475 interface["external-connection-point-ref"]),
476 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
477
478 elif interface.get("internal-connection-point-ref"):
479 for internal_cp in get_iterable(vdu.get("internal-connection-point")):
480 if interface["internal-connection-point-ref"] == internal_cp.get("id"):
481 break
482 else:
483 raise EngineException("vdu[id='{}']:interface[name='{}']:internal-connection-point-ref='{}' "
484 "must match an existing vdu:internal-connection-point"
485 .format(vdu["id"], interface["name"],
486 interface["internal-connection-point-ref"]),
487 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
488 for ivld in get_iterable(indata.get("internal-vld")):
489 for icp in get_iterable(ivld.get("internal-connection-point")):
490 icp_mark = False
491 for vdu in get_iterable(indata.get("vdu")):
492 for internal_cp in get_iterable(vdu.get("internal-connection-point")):
493 if icp["id-ref"] == internal_cp["id"]:
494 icp_mark = True
495 break
496 if icp_mark:
497 break
498 else:
499 raise EngineException("internal-vld[id='{}']:internal-connection-point='{}' must match an existing "
500 "vdu:internal-connection-point".format(ivld["id"], icp["id-ref"]),
501 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
502 if ivld.get("ip-profile-ref"):
503 for ip_prof in get_iterable(indata.get("ip-profiles")):
504 if ip_prof["name"] == get_iterable(ivld.get("ip-profile-ref")):
505 break
506 else:
507 raise EngineException("internal-vld[id='{}']:ip-profile-ref='{}' does not exist".format(
508 ivld["id"], ivld["ip-profile-ref"]),
509 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
510 for mp in get_iterable(indata.get("monitoring-param")):
511 if mp.get("vdu-monitoring-param"):
512 mp_vmp_mark = False
513 for vdu in get_iterable(indata.get("vdu")):
514 for vmp in get_iterable(vdu.get("monitoring-param")):
515 if vmp["id"] == mp["vdu-monitoring-param"].get("vdu-monitoring-param-ref") and vdu["id"] ==\
516 mp["vdu-monitoring-param"]["vdu-ref"]:
517 mp_vmp_mark = True
518 break
519 if mp_vmp_mark:
520 break
521 else:
522 raise EngineException("monitoring-param:vdu-monitoring-param:vdu-monitoring-param-ref='{}' not "
523 "defined at vdu[id='{}'] or vdu does not exist"
524 .format(mp["vdu-monitoring-param"]["vdu-monitoring-param-ref"],
525 mp["vdu-monitoring-param"]["vdu-ref"]),
526 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
527 elif mp.get("vdu-metric"):
528 mp_vm_mark = False
529 for vdu in get_iterable(indata.get("vdu")):
530 if vdu.get("vdu-configuration"):
531 for metric in get_iterable(vdu["vdu-configuration"].get("metrics")):
532 if metric["name"] == mp["vdu-metric"]["vdu-metric-name-ref"] and vdu["id"] == \
533 mp["vdu-metric"]["vdu-ref"]:
534 mp_vm_mark = True
535 break
536 if mp_vm_mark:
537 break
538 else:
539 raise EngineException("monitoring-param:vdu-metric:vdu-metric-name-ref='{}' not defined at "
540 "vdu[id='{}'] or vdu does not exist"
541 .format(mp["vdu-metric"]["vdu-metric-name-ref"],
542 mp["vdu-metric"]["vdu-ref"]),
543 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
544
545 for sgd in get_iterable(indata.get("scaling-group-descriptor")):
546 for sp in get_iterable(sgd.get("scaling-policy")):
547 for sc in get_iterable(sp.get("scaling-criteria")):
548 for mp in get_iterable(indata.get("monitoring-param")):
549 if mp["id"] == get_iterable(sc.get("vnf-monitoring-param-ref")):
550 break
551 else:
552 raise EngineException("scaling-group-descriptor[name='{}']:scaling-criteria[name='{}']:"
553 "vnf-monitoring-param-ref='{}' not defined in any monitoring-param"
554 .format(sgd["name"], sc["name"], sc["vnf-monitoring-param-ref"]),
555 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
556 for sgd_vdu in get_iterable(sgd.get("vdu")):
557 sgd_vdu_mark = False
558 for vdu in get_iterable(indata.get("vdu")):
559 if vdu["id"] == sgd_vdu["vdu-id-ref"]:
560 sgd_vdu_mark = True
561 break
562 if sgd_vdu_mark:
563 break
564 else:
565 raise EngineException("scaling-group-descriptor[name='{}']:vdu-id-ref={} does not match any vdu"
566 .format(sgd["name"], sgd_vdu["vdu-id-ref"]),
567 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
568 for sca in get_iterable(sgd.get("scaling-config-action")):
569 if not indata.get("vnf-configuration"):
570 raise EngineException("'vnf-configuration' not defined in the descriptor but it is referenced by "
571 "scaling-group-descriptor[name='{}']:scaling-config-action"
572 .format(sgd["name"]),
573 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
574 for primitive in get_iterable(indata["vnf-configuration"].get("config-primitive")):
575 if primitive["name"] == sca["vnf-config-primitive-name-ref"]:
576 break
577 else:
578 raise EngineException("scaling-group-descriptor[name='{}']:scaling-config-action:vnf-config-"
579 "primitive-name-ref='{}' does not match any "
580 "vnf-configuration:config-primitive:name"
581 .format(sgd["name"], sca["vnf-config-primitive-name-ref"]),
582 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
583 # TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none
584 return indata
585
586 def _validate_input_edit(self, indata, force=False):
587 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
588 return indata
589
590
591 class NsdTopic(DescriptorTopic):
592 topic = "nsds"
593 topic_msg = "nsd"
594
595 def __init__(self, db, fs, msg):
596 DescriptorTopic.__init__(self, db, fs, msg)
597
598 @staticmethod
599 def _remove_envelop(indata=None):
600 if not indata:
601 return {}
602 clean_indata = indata
603
604 if clean_indata.get('nsd:nsd-catalog'):
605 clean_indata = clean_indata['nsd:nsd-catalog']
606 elif clean_indata.get('nsd-catalog'):
607 clean_indata = clean_indata['nsd-catalog']
608 if clean_indata.get('nsd'):
609 if not isinstance(clean_indata['nsd'], list) or len(clean_indata['nsd']) != 1:
610 raise EngineException("'nsd' must be a list of only one element")
611 clean_indata = clean_indata['nsd'][0]
612 elif clean_indata.get('nsd:nsd'):
613 if not isinstance(clean_indata['nsd:nsd'], list) or len(clean_indata['nsd:nsd']) != 1:
614 raise EngineException("'nsd:nsd' must be a list of only one element")
615 clean_indata = clean_indata['nsd:nsd'][0]
616 return clean_indata
617
618 def _validate_input_new(self, indata, force=False):
619 indata = self.pyangbind_validation("nsds", indata, force)
620 # TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none
621 return indata
622
623 def _validate_input_edit(self, indata, force=False):
624 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
625 return indata
626
627 def _check_descriptor_dependencies(self, session, descriptor):
628 """
629 Check that the dependent descriptors exist on a new descriptor or edition
630 :param session: client session information
631 :param descriptor: descriptor to be inserted or edit
632 :return: None or raises exception
633 """
634 if not descriptor.get("constituent-vnfd"):
635 return
636 for vnf in descriptor["constituent-vnfd"]:
637 vnfd_id = vnf["vnfd-id-ref"]
638 filter_q = self._get_project_filter(session, write=False, show_all=True)
639 filter_q["id"] = vnfd_id
640 if not self.db.get_list("vnfds", filter_q):
641 raise EngineException("Descriptor error at 'constituent-vnfd':'vnfd-id-ref'='{}' references a non "
642 "existing vnfd".format(vnfd_id), http_code=HTTPStatus.CONFLICT)
643
644 def check_conflict_on_edit(self, session, final_content, edit_content, _id, force=False):
645 super().check_conflict_on_edit(session, final_content, edit_content, _id, force=force)
646
647 if not force:
648 self._check_descriptor_dependencies(session, final_content)
649
650 def check_conflict_on_del(self, session, _id, force=False):
651 """
652 Check that there is not any NSR that uses this NSD. Only NSRs belonging to this project are considered. Note
653 that NSD can be public and be used by other projects.
654 :param session:
655 :param _id: vnfd inernal id
656 :param force: Avoid this checking
657 :return: None or raises EngineException with the conflict
658 """
659 if force:
660 return
661 _filter = self._get_project_filter(session, write=False, show_all=False)
662 _filter["nsdId"] = _id
663 if self.db.get_list("nsrs", _filter):
664 raise EngineException("There is some NSR that depends on this NSD", http_code=HTTPStatus.CONFLICT)
665
666
667 class NstTopic(DescriptorTopic):
668 topic = "nsts"
669 topic_msg = "nst"
670
671 def __init__(self, db, fs, msg):
672 DescriptorTopic.__init__(self, db, fs, msg)
673
674 @staticmethod
675 def _remove_envelop(indata=None):
676 if not indata:
677 return {}
678 clean_indata = indata
679
680 if clean_indata.get('nst:nst'):
681 clean_indata = clean_indata['nst:nst']
682 elif clean_indata.get('nst'):
683 clean_indata = clean_indata['nst']
684 if clean_indata.get('nst'):
685 if not isinstance(clean_indata['nst'], list) or len(clean_indata['nst']) != 1:
686 raise EngineException("'nst' must be a list only one element")
687 clean_indata = clean_indata['nst'][0]
688 return clean_indata
689
690 def _validate_input_edit(self, indata, force=False):
691 # TODO validate with pyangbind, serialize
692 return indata
693
694 def _check_descriptor_dependencies(self, session, descriptor):
695 """
696 Check that the dependent descriptors exist on a new descriptor or edition
697 :param session: client session information
698 :param descriptor: descriptor to be inserted or edit
699 :return: None or raises exception
700 """
701 if not descriptor.get("netslice-subnet"):
702 return
703 for nsd in descriptor["netslice-subnet"]:
704 nsd_id = nsd["nsd-ref"]
705 filter_q = self._get_project_filter(session, write=False, show_all=True)
706 filter_q["id"] = nsd_id
707 if not self.db.get_list("nsds", filter_q):
708 raise EngineException("Descriptor error at 'netslice-subnet':'nsd-ref'='{}' references a non "
709 "existing nsd".format(nsd_id), http_code=HTTPStatus.CONFLICT)
710
711 def check_conflict_on_edit(self, session, final_content, edit_content, _id, force=False):
712 super().check_conflict_on_edit(session, final_content, edit_content, _id, force=force)
713
714 self._check_descriptor_dependencies(session, final_content)
715
716 def check_conflict_on_del(self, session, _id, force=False):
717 """
718 Check that there is not any NSIR that uses this NST. Only NSIRs belonging to this project are considered. Note
719 that NST can be public and be used by other projects.
720 :param session:
721 :param _id: nst internal id
722 :param force: Avoid this checking
723 :return: None or raises EngineException with the conflict
724 """
725 # TODO: Check this method
726 if force:
727 return
728 # Get Network Slice Template from Database
729 _filter = self._get_project_filter(session, write=False, show_all=False)
730 _filter["_id"] = _id
731 nst = self.db.get_one("nst", _filter)
732
733 # Search NSIs using NST via nst-ref
734 _filter = self._get_project_filter(session, write=False, show_all=False)
735 _filter["nst-ref"] = nst["id"]
736 if self.db.get_list("nsis", _filter):
737 raise EngineException("There is some NSIS that depends on this NST", http_code=HTTPStatus.CONFLICT)
738
739
740 class PduTopic(BaseTopic):
741 topic = "pdus"
742 topic_msg = "pdu"
743 schema_new = pdu_new_schema
744 schema_edit = pdu_edit_schema
745
746 def __init__(self, db, fs, msg):
747 BaseTopic.__init__(self, db, fs, msg)
748
749 @staticmethod
750 def format_on_new(content, project_id=None, make_public=False):
751 BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
752 content["_admin"]["onboardingState"] = "CREATED"
753 content["_admin"]["operationalState"] = "ENABLED"
754 content["_admin"]["usageState"] = "NOT_IN_USE"
755
756 def check_conflict_on_del(self, session, _id, force=False):
757 if force:
758 return
759 # TODO Is it needed to check descriptors _admin.project_read/project_write??
760 _filter = {"vdur.pdu-id": _id}
761 if self.db.get_list("vnfrs", _filter):
762 raise EngineException("There is some NSR that uses this PDU", http_code=HTTPStatus.CONFLICT)