fd3e3f492472d527f3aa7c0d2dba9a94944219da
[osm/NBI.git] / osm_nbi / descriptor_topics.py
1 # -*- coding: utf-8 -*-
2
3 import tarfile
4 import yaml
5 import json
6 # import logging
7 from hashlib import md5
8 from osm_common.dbbase import DbException, deep_update_rfc7396
9 from http import HTTPStatus
10 from validation import ValidationError, pdu_new_schema, pdu_edit_schema
11 from base_topic import BaseTopic, EngineException, get_iterable
12 from osm_im.vnfd import vnfd as vnfd_im
13 from osm_im.nsd import nsd as nsd_im
14 from pyangbind.lib.serialise import pybindJSONDecoder
15 import pyangbind.lib.pybindJSON as pybindJSON
16
17 __author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
18
19
20 class DescriptorTopic(BaseTopic):
21
22 def __init__(self, db, fs, msg):
23 BaseTopic.__init__(self, db, fs, msg)
24
25 def check_conflict_on_edit(self, session, final_content, edit_content, _id, force=False):
26 # 1. validate again with pyangbind
27 # 1.1. remove internal keys
28 internal_keys = {}
29 for k in ("_id", "_admin"):
30 if k in final_content:
31 internal_keys[k] = final_content.pop(k)
32 serialized = self._validate_input_new(final_content, force)
33 # 1.2. modify final_content with a serialized version
34 final_content.clear()
35 final_content.update(serialized)
36 # 1.3. restore internal keys
37 for k, v in internal_keys.items():
38 final_content[k] = v
39
40 # 2. check that this id is not present
41 if "id" in edit_content:
42 _filter = self._get_project_filter(session, write=False, show_all=False)
43 _filter["id"] = final_content["id"]
44 _filter["_id.neq"] = _id
45 if self.db.get_one(self.topic, _filter, fail_on_empty=False):
46 raise EngineException("{} with id '{}' already exists for this project".format(self.topic[:-1],
47 final_content["id"]),
48 HTTPStatus.CONFLICT)
49
50 @staticmethod
51 def format_on_new(content, project_id=None, make_public=False):
52 BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
53 content["_admin"]["onboardingState"] = "CREATED"
54 content["_admin"]["operationalState"] = "DISABLED"
55 content["_admin"]["usageSate"] = "NOT_IN_USE"
56
57 def delete(self, session, _id, force=False, dry_run=False):
58 """
59 Delete item by its internal _id
60 :param session: contains the used login username, working project, and admin rights
61 :param _id: server internal id
62 :param force: indicates if deletion must be forced in case of conflict
63 :param dry_run: make checking but do not delete
64 :return: dictionary with deleted item _id. It raises EngineException on error: not found, conflict, ...
65 """
66 # TODO add admin to filter, validate rights
67 v = BaseTopic.delete(self, session, _id, force, dry_run=True)
68 if dry_run:
69 return
70 v = self.db.del_one(self.topic, {"_id": _id})
71 self.fs.file_delete(_id, ignore_non_exist=True)
72 self._send_msg("delete", {"_id": _id})
73 return v
74
75 @staticmethod
76 def get_one_by_id(db, session, topic, id):
77 # find owned by this project
78 _filter = BaseTopic._get_project_filter(session, write=False, show_all=False)
79 _filter["id"] = id
80 desc_list = db.get_list(topic, _filter)
81 if len(desc_list) == 1:
82 return desc_list[0]
83 elif len(desc_list) > 1:
84 raise DbException("Found more than one {} with id='{}' belonging to this project".format(topic[:-1], id),
85 HTTPStatus.CONFLICT)
86
87 # not found any: try to find public
88 _filter = BaseTopic._get_project_filter(session, write=False, show_all=True)
89 _filter["id"] = id
90 desc_list = db.get_list(topic, _filter)
91 if not desc_list:
92 raise DbException("Not found any {} with id='{}'".format(topic[:-1], id), HTTPStatus.NOT_FOUND)
93 elif len(desc_list) == 1:
94 return desc_list[0]
95 else:
96 raise DbException("Found more than one public {} with id='{}'; and no one belonging to this project".format(
97 topic[:-1], id), HTTPStatus.CONFLICT)
98
99 def new(self, rollback, session, indata=None, kwargs=None, headers=None, force=False, make_public=False):
100 """
101 Creates a new almost empty DISABLED entry into database. Due to SOL005, it does not follow normal procedure.
102 Creating a VNFD or NSD is done in two steps: 1. Creates an empty descriptor (this step) and 2) upload content
103 (self.upload_content)
104 :param rollback: list to append created items at database in case a rollback may to be done
105 :param session: contains the used login username and working project
106 :param indata: data to be inserted
107 :param kwargs: used to override the indata descriptor
108 :param headers: http request headers
109 :param force: If True avoid some dependence checks
110 :param make_public: Make the created descriptor public to all projects
111 :return: _id: identity of the inserted data.
112 """
113
114 try:
115 # _remove_envelop
116 if indata:
117 if "userDefinedData" in indata:
118 indata = indata['userDefinedData']
119
120 # Override descriptor with query string kwargs
121 self._update_input_with_kwargs(indata, kwargs)
122 # uncomment when this method is implemented.
123 # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors
124 # indata = DescriptorTopic._validate_input_new(self, indata, force=force)
125
126 content = {"_admin": {"userDefinedData": indata}}
127 self.format_on_new(content, session["project_id"], make_public=make_public)
128 _id = self.db.create(self.topic, content)
129 rollback.append({"topic": self.topic, "_id": _id})
130 return _id
131 except ValidationError as e:
132 raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
133
134 def upload_content(self, session, _id, indata, kwargs, headers, force=False):
135 """
136 Used for receiving content by chunks (with a transaction_id header and/or gzip file. It will store and extract)
137 :param session: session
138 :param _id : the nsd,vnfd is already created, this is the id
139 :param indata: http body request
140 :param kwargs: user query string to override parameters. NOT USED
141 :param headers: http request headers
142 :param force: to be more tolerant with validation
143 :return: True package has is completely uploaded or False if partial content has been uplodaed.
144 Raise exception on error
145 """
146 # Check that _id exists and it is valid
147 current_desc = self.show(session, _id)
148
149 content_range_text = headers.get("Content-Range")
150 expected_md5 = headers.get("Content-File-MD5")
151 compressed = None
152 content_type = headers.get("Content-Type")
153 if content_type and "application/gzip" in content_type or "application/x-gzip" in content_type or \
154 "application/zip" in content_type:
155 compressed = "gzip"
156 filename = headers.get("Content-Filename")
157 if not filename:
158 filename = "package.tar.gz" if compressed else "package"
159 # TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266
160 file_pkg = None
161 error_text = ""
162 try:
163 if content_range_text:
164 content_range = content_range_text.replace("-", " ").replace("/", " ").split()
165 if content_range[0] != "bytes": # TODO check x<y not negative < total....
166 raise IndexError()
167 start = int(content_range[1])
168 end = int(content_range[2]) + 1
169 total = int(content_range[3])
170 else:
171 start = 0
172
173 if start:
174 if not self.fs.file_exists(_id, 'dir'):
175 raise EngineException("invalid Transaction-Id header", HTTPStatus.NOT_FOUND)
176 else:
177 self.fs.file_delete(_id, ignore_non_exist=True)
178 self.fs.mkdir(_id)
179
180 storage = self.fs.get_params()
181 storage["folder"] = _id
182
183 file_path = (_id, filename)
184 if self.fs.file_exists(file_path, 'file'):
185 file_size = self.fs.file_size(file_path)
186 else:
187 file_size = 0
188 if file_size != start:
189 raise EngineException("invalid Content-Range start sequence, expected '{}' but received '{}'".format(
190 file_size, start), HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE)
191 file_pkg = self.fs.file_open(file_path, 'a+b')
192 if isinstance(indata, dict):
193 indata_text = yaml.safe_dump(indata, indent=4, default_flow_style=False)
194 file_pkg.write(indata_text.encode(encoding="utf-8"))
195 else:
196 indata_len = 0
197 while True:
198 indata_text = indata.read(4096)
199 indata_len += len(indata_text)
200 if not indata_text:
201 break
202 file_pkg.write(indata_text)
203 if content_range_text:
204 if indata_len != end-start:
205 raise EngineException("Mismatch between Content-Range header {}-{} and body length of {}".format(
206 start, end-1, indata_len), HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE)
207 if end != total:
208 # TODO update to UPLOADING
209 return False
210
211 # PACKAGE UPLOADED
212 if expected_md5:
213 file_pkg.seek(0, 0)
214 file_md5 = md5()
215 chunk_data = file_pkg.read(1024)
216 while chunk_data:
217 file_md5.update(chunk_data)
218 chunk_data = file_pkg.read(1024)
219 if expected_md5 != file_md5.hexdigest():
220 raise EngineException("Error, MD5 mismatch", HTTPStatus.CONFLICT)
221 file_pkg.seek(0, 0)
222 if compressed == "gzip":
223 tar = tarfile.open(mode='r', fileobj=file_pkg)
224 descriptor_file_name = None
225 for tarinfo in tar:
226 tarname = tarinfo.name
227 tarname_path = tarname.split("/")
228 if not tarname_path[0] or ".." in tarname_path: # if start with "/" means absolute path
229 raise EngineException("Absolute path or '..' are not allowed for package descriptor tar.gz")
230 if len(tarname_path) == 1 and not tarinfo.isdir():
231 raise EngineException("All files must be inside a dir for package descriptor tar.gz")
232 if tarname.endswith(".yaml") or tarname.endswith(".json") or tarname.endswith(".yml"):
233 storage["pkg-dir"] = tarname_path[0]
234 if len(tarname_path) == 2:
235 if descriptor_file_name:
236 raise EngineException(
237 "Found more than one descriptor file at package descriptor tar.gz")
238 descriptor_file_name = tarname
239 if not descriptor_file_name:
240 raise EngineException("Not found any descriptor file at package descriptor tar.gz")
241 storage["descriptor"] = descriptor_file_name
242 storage["zipfile"] = filename
243 self.fs.file_extract(tar, _id)
244 with self.fs.file_open((_id, descriptor_file_name), "r") as descriptor_file:
245 content = descriptor_file.read()
246 else:
247 content = file_pkg.read()
248 storage["descriptor"] = descriptor_file_name = filename
249
250 if descriptor_file_name.endswith(".json"):
251 error_text = "Invalid json format "
252 indata = json.load(content)
253 else:
254 error_text = "Invalid yaml format "
255 indata = yaml.load(content)
256
257 current_desc["_admin"]["storage"] = storage
258 current_desc["_admin"]["onboardingState"] = "ONBOARDED"
259 current_desc["_admin"]["operationalState"] = "ENABLED"
260
261 indata = self._remove_envelop(indata)
262
263 # Override descriptor with query string kwargs
264 if kwargs:
265 self._update_input_with_kwargs(indata, kwargs)
266 # it will call overrides method at VnfdTopic or NsdTopic
267 indata = self._validate_input_new(indata, force=force)
268
269 deep_update_rfc7396(current_desc, indata)
270 self.check_conflict_on_edit(session, current_desc, indata, _id=_id, force=force)
271 self.db.replace(self.topic, _id, current_desc)
272
273 indata["_id"] = _id
274 self._send_msg("created", indata)
275
276 # TODO if descriptor has changed because kwargs update content and remove cached zip
277 # TODO if zip is not present creates one
278 return True
279
280 except EngineException:
281 raise
282 except IndexError:
283 raise EngineException("invalid Content-Range header format. Expected 'bytes start-end/total'",
284 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE)
285 except IOError as e:
286 raise EngineException("invalid upload transaction sequence: '{}'".format(e), HTTPStatus.BAD_REQUEST)
287 except tarfile.ReadError as e:
288 raise EngineException("invalid file content {}".format(e), HTTPStatus.BAD_REQUEST)
289 except (ValueError, yaml.YAMLError) as e:
290 raise EngineException(error_text + str(e))
291 except ValidationError as e:
292 raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
293 finally:
294 if file_pkg:
295 file_pkg.close()
296
297 def get_file(self, session, _id, path=None, accept_header=None):
298 """
299 Return the file content of a vnfd or nsd
300 :param session: contains the used login username and working project
301 :param _id: Identity of the vnfd, nsd
302 :param path: artifact path or "$DESCRIPTOR" or None
303 :param accept_header: Content of Accept header. Must contain applition/zip or/and text/plain
304 :return: opened file plus Accept format or raises an exception
305 """
306 accept_text = accept_zip = False
307 if accept_header:
308 if 'text/plain' in accept_header or '*/*' in accept_header:
309 accept_text = True
310 if 'application/zip' in accept_header or '*/*' in accept_header:
311 accept_zip = 'application/zip'
312 elif 'application/gzip' in accept_header:
313 accept_zip = 'application/gzip'
314
315 if not accept_text and not accept_zip:
316 raise EngineException("provide request header 'Accept' with 'application/zip' or 'text/plain'",
317 http_code=HTTPStatus.NOT_ACCEPTABLE)
318
319 content = self.show(session, _id)
320 if content["_admin"]["onboardingState"] != "ONBOARDED":
321 raise EngineException("Cannot get content because this resource is not at 'ONBOARDED' state. "
322 "onboardingState is {}".format(content["_admin"]["onboardingState"]),
323 http_code=HTTPStatus.CONFLICT)
324 storage = content["_admin"]["storage"]
325 if path is not None and path != "$DESCRIPTOR": # artifacts
326 if not storage.get('pkg-dir'):
327 raise EngineException("Packages does not contains artifacts", http_code=HTTPStatus.BAD_REQUEST)
328 if self.fs.file_exists((storage['folder'], storage['pkg-dir'], *path), 'dir'):
329 folder_content = self.fs.dir_ls((storage['folder'], storage['pkg-dir'], *path))
330 return folder_content, "text/plain"
331 # TODO manage folders in http
332 else:
333 return self.fs.file_open((storage['folder'], storage['pkg-dir'], *path), "rb"),\
334 "application/octet-stream"
335
336 # pkgtype accept ZIP TEXT -> result
337 # manyfiles yes X -> zip
338 # no yes -> error
339 # onefile yes no -> zip
340 # X yes -> text
341
342 if accept_text and (not storage.get('pkg-dir') or path == "$DESCRIPTOR"):
343 return self.fs.file_open((storage['folder'], storage['descriptor']), "r"), "text/plain"
344 elif storage.get('pkg-dir') and not accept_zip:
345 raise EngineException("Packages that contains several files need to be retrieved with 'application/zip'"
346 "Accept header", http_code=HTTPStatus.NOT_ACCEPTABLE)
347 else:
348 if not storage.get('zipfile'):
349 # TODO generate zipfile if not present
350 raise EngineException("Only allowed 'text/plain' Accept header for this descriptor. To be solved in "
351 "future versions", http_code=HTTPStatus.NOT_ACCEPTABLE)
352 return self.fs.file_open((storage['folder'], storage['zipfile']), "rb"), accept_zip
353
354 def pyangbind_validation(self, item, data, force=False):
355 try:
356 if item == "vnfds":
357 myvnfd = vnfd_im()
358 pybindJSONDecoder.load_ietf_json({'vnfd:vnfd-catalog': {'vnfd': [data]}}, None, None, obj=myvnfd,
359 path_helper=True, skip_unknown=force)
360 out = pybindJSON.dumps(myvnfd, mode="ietf")
361 elif item == "nsds":
362 mynsd = nsd_im()
363 pybindJSONDecoder.load_ietf_json({'nsd:nsd-catalog': {'nsd': [data]}}, None, None, obj=mynsd,
364 path_helper=True, skip_unknown=force)
365 out = pybindJSON.dumps(mynsd, mode="ietf")
366 else:
367 raise EngineException("Not possible to validate '{}' item".format(item),
368 http_code=HTTPStatus.INTERNAL_SERVER_ERROR)
369
370 desc_out = self._remove_envelop(yaml.safe_load(out))
371 return desc_out
372
373 except Exception as e:
374 raise EngineException("Error in pyangbind validation: {}".format(str(e)),
375 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
376
377
378 class VnfdTopic(DescriptorTopic):
379 topic = "vnfds"
380 topic_msg = "vnfd"
381
382 def __init__(self, db, fs, msg):
383 DescriptorTopic.__init__(self, db, fs, msg)
384
385 @staticmethod
386 def _remove_envelop(indata=None):
387 if not indata:
388 return {}
389 clean_indata = indata
390 if clean_indata.get('vnfd:vnfd-catalog'):
391 clean_indata = clean_indata['vnfd:vnfd-catalog']
392 elif clean_indata.get('vnfd-catalog'):
393 clean_indata = clean_indata['vnfd-catalog']
394 if clean_indata.get('vnfd'):
395 if not isinstance(clean_indata['vnfd'], list) or len(clean_indata['vnfd']) != 1:
396 raise EngineException("'vnfd' must be a list of only one element")
397 clean_indata = clean_indata['vnfd'][0]
398 elif clean_indata.get('vnfd:vnfd'):
399 if not isinstance(clean_indata['vnfd:vnfd'], list) or len(clean_indata['vnfd:vnfd']) != 1:
400 raise EngineException("'vnfd:vnfd' must be a list of only one element")
401 clean_indata = clean_indata['vnfd:vnfd'][0]
402 return clean_indata
403
404 def check_conflict_on_del(self, session, _id, force=False):
405 """
406 Check that there is not any NSD that uses this VNFD. Only NSDs belonging to this project are considered. Note
407 that VNFD can be public and be used by NSD of other projects. Also check there are not deployments, or vnfr
408 that uses this vnfd
409 :param session:
410 :param _id: vnfd inernal id
411 :param force: Avoid this checking
412 :return: None or raises EngineException with the conflict
413 """
414 if force:
415 return
416 descriptor = self.db.get_one("vnfds", {"_id": _id})
417 descriptor_id = descriptor.get("id")
418 if not descriptor_id: # empty vnfd not uploaded
419 return
420
421 _filter = self._get_project_filter(session, write=False, show_all=False)
422 # check vnfrs using this vnfd
423 _filter["vnfd-id"] = _id
424 if self.db.get_list("vnfrs", _filter):
425 raise EngineException("There is some VNFR that depends on this VNFD", http_code=HTTPStatus.CONFLICT)
426 del _filter["vnfd-id"]
427 # check NSD using this VNFD
428 _filter["constituent-vnfd.ANYINDEX.vnfd-id-ref"] = descriptor_id
429 if self.db.get_list("nsds", _filter):
430 raise EngineException("There is soame NSD that depends on this VNFD", http_code=HTTPStatus.CONFLICT)
431
432 def _validate_input_new(self, indata, force=False):
433 indata = self.pyangbind_validation("vnfds", indata, force)
434 # Cross references validation in the descriptor
435 if indata.get("vdu"):
436 if not indata.get("mgmt-interface"):
437 raise EngineException("'mgmt-interface' is a mandatory field and it is not defined",
438 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
439 if indata["mgmt-interface"].get("cp"):
440 for cp in get_iterable(indata.get("connection-point")):
441 if cp["name"] == indata["mgmt-interface"]["cp"]:
442 break
443 else:
444 raise EngineException("mgmt-interface:cp='{}' must match an existing connection-point"
445 .format(indata["mgmt-interface"]["cp"]),
446 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
447
448 for vdu in get_iterable(indata.get("vdu")):
449 for interface in get_iterable(vdu.get("interface")):
450 if interface.get("external-connection-point-ref"):
451 for cp in get_iterable(indata.get("connection-point")):
452 if cp["name"] == interface["external-connection-point-ref"]:
453 break
454 else:
455 raise EngineException("vdu[id='{}']:interface[name='{}']:external-connection-point-ref='{}' "
456 "must match an existing connection-point"
457 .format(vdu["id"], interface["name"],
458 interface["external-connection-point-ref"]),
459 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
460
461 elif interface.get("internal-connection-point-ref"):
462 for internal_cp in get_iterable(vdu.get("internal-connection-point")):
463 if interface["internal-connection-point-ref"] == internal_cp.get("id"):
464 break
465 else:
466 raise EngineException("vdu[id='{}']:interface[name='{}']:internal-connection-point-ref='{}' "
467 "must match an existing vdu:internal-connection-point"
468 .format(vdu["id"], interface["name"],
469 interface["internal-connection-point-ref"]),
470 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
471 for ivld in get_iterable(indata.get("internal-vld")):
472 for icp in get_iterable(ivld.get("internal-connection-point")):
473 icp_mark = False
474 for vdu in get_iterable(indata.get("vdu")):
475 for internal_cp in get_iterable(vdu.get("internal-connection-point")):
476 if icp["id-ref"] == internal_cp["id"]:
477 icp_mark = True
478 break
479 if icp_mark:
480 break
481 else:
482 raise EngineException("internal-vld[id='{}']:internal-connection-point='{}' must match an existing "
483 "vdu:internal-connection-point".format(ivld["id"], icp["id-ref"]),
484 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
485 if ivld.get("ip-profile-ref"):
486 for ip_prof in get_iterable(indata.get("ip-profiles")):
487 if ip_prof["name"] == get_iterable(ivld.get("ip-profile-ref")):
488 break
489 else:
490 raise EngineException("internal-vld[id='{}']:ip-profile-ref='{}' does not exist".format(
491 ivld["id"], ivld["ip-profile-ref"]),
492 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
493 for mp in get_iterable(indata.get("monitoring-param")):
494 if mp.get("vdu-monitoring-param"):
495 mp_vmp_mark = False
496 for vdu in get_iterable(indata.get("vdu")):
497 for vmp in get_iterable(vdu.get("monitoring-param")):
498 if vmp["id"] == mp["vdu-monitoring-param"].get("vdu-monitoring-param-ref") and vdu["id"] ==\
499 mp["vdu-monitoring-param"]["vdu-ref"]:
500 mp_vmp_mark = True
501 break
502 if mp_vmp_mark:
503 break
504 else:
505 raise EngineException("monitoring-param:vdu-monitoring-param:vdu-monitoring-param-ref='{}' not "
506 "defined at vdu[id='{}'] or vdu does not exist"
507 .format(mp["vdu-monitoring-param"]["vdu-monitoring-param-ref"],
508 mp["vdu-monitoring-param"]["vdu-ref"]),
509 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
510 elif mp.get("vdu-metric"):
511 mp_vm_mark = False
512 for vdu in get_iterable(indata.get("vdu")):
513 if vdu.get("vdu-configuration"):
514 for metric in get_iterable(vdu["vdu-configuration"].get("metrics")):
515 if metric["name"] == mp["vdu-metric"]["vdu-metric-name-ref"] and vdu["id"] == \
516 mp["vdu-metric"]["vdu-ref"]:
517 mp_vm_mark = True
518 break
519 if mp_vm_mark:
520 break
521 else:
522 raise EngineException("monitoring-param:vdu-metric:vdu-metric-name-ref='{}' not defined at "
523 "vdu[id='{}'] or vdu does not exist"
524 .format(mp["vdu-metric"]["vdu-metric-name-ref"],
525 mp["vdu-metric"]["vdu-ref"]),
526 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
527
528 for sgd in get_iterable(indata.get("scaling-group-descriptor")):
529 for sp in get_iterable(sgd.get("scaling-policy")):
530 for sc in get_iterable(sp.get("scaling-criteria")):
531 for mp in get_iterable(indata.get("monitoring-param")):
532 if mp["id"] == get_iterable(sc.get("vnf-monitoring-param-ref")):
533 break
534 else:
535 raise EngineException("scaling-group-descriptor[name='{}']:scaling-criteria[name='{}']:"
536 "vnf-monitoring-param-ref='{}' not defined in any monitoring-param"
537 .format(sgd["name"], sc["name"], sc["vnf-monitoring-param-ref"]),
538 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
539 for sgd_vdu in get_iterable(sgd.get("vdu")):
540 sgd_vdu_mark = False
541 for vdu in get_iterable(indata.get("vdu")):
542 if vdu["id"] == sgd_vdu["vdu-id-ref"]:
543 sgd_vdu_mark = True
544 break
545 if sgd_vdu_mark:
546 break
547 else:
548 raise EngineException("scaling-group-descriptor[name='{}']:vdu-id-ref={} does not match any vdu"
549 .format(sgd["name"], sgd_vdu["vdu-id-ref"]),
550 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
551 for sca in get_iterable(sgd.get("scaling-config-action")):
552 if not indata.get("vnf-configuration"):
553 raise EngineException("'vnf-configuration' not defined in the descriptor but it is referenced by "
554 "scaling-group-descriptor[name='{}']:scaling-config-action"
555 .format(sgd["name"]),
556 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
557 for primitive in get_iterable(indata["vnf-configuration"].get("config-primitive")):
558 if primitive["name"] == sca["vnf-config-primitive-name-ref"]:
559 break
560 else:
561 raise EngineException("scaling-group-descriptor[name='{}']:scaling-config-action:vnf-config-"
562 "primitive-name-ref='{}' does not match any "
563 "vnf-configuration:config-primitive:name"
564 .format(sgd["name"], sca["vnf-config-primitive-name-ref"]),
565 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
566 # TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none
567 return indata
568
569 def _validate_input_edit(self, indata, force=False):
570 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
571 return indata
572
573
574 class NsdTopic(DescriptorTopic):
575 topic = "nsds"
576 topic_msg = "nsd"
577
578 def __init__(self, db, fs, msg):
579 DescriptorTopic.__init__(self, db, fs, msg)
580
581 @staticmethod
582 def _remove_envelop(indata=None):
583 if not indata:
584 return {}
585 clean_indata = indata
586
587 if clean_indata.get('nsd:nsd-catalog'):
588 clean_indata = clean_indata['nsd:nsd-catalog']
589 elif clean_indata.get('nsd-catalog'):
590 clean_indata = clean_indata['nsd-catalog']
591 if clean_indata.get('nsd'):
592 if not isinstance(clean_indata['nsd'], list) or len(clean_indata['nsd']) != 1:
593 raise EngineException("'nsd' must be a list of only one element")
594 clean_indata = clean_indata['nsd'][0]
595 elif clean_indata.get('nsd:nsd'):
596 if not isinstance(clean_indata['nsd:nsd'], list) or len(clean_indata['nsd:nsd']) != 1:
597 raise EngineException("'nsd:nsd' must be a list of only one element")
598 clean_indata = clean_indata['nsd:nsd'][0]
599 return clean_indata
600
601 def _validate_input_new(self, indata, force=False):
602 indata = self.pyangbind_validation("nsds", indata, force)
603 # TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none
604 return indata
605
606 def _validate_input_edit(self, indata, force=False):
607 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
608 return indata
609
610 def _check_descriptor_dependencies(self, session, descriptor):
611 """
612 Check that the dependent descriptors exist on a new descriptor or edition
613 :param session: client session information
614 :param descriptor: descriptor to be inserted or edit
615 :return: None or raises exception
616 """
617 if not descriptor.get("constituent-vnfd"):
618 return
619 for vnf in descriptor["constituent-vnfd"]:
620 vnfd_id = vnf["vnfd-id-ref"]
621 filter_q = self._get_project_filter(session, write=False, show_all=True)
622 filter_q["id"] = vnfd_id
623 if not self.db.get_list("vnfds", filter_q):
624 raise EngineException("Descriptor error at 'constituent-vnfd':'vnfd-id-ref'='{}' references a non "
625 "existing vnfd".format(vnfd_id), http_code=HTTPStatus.CONFLICT)
626
627 def check_conflict_on_edit(self, session, final_content, edit_content, _id, force=False):
628 super().check_conflict_on_edit(session, final_content, edit_content, _id, force=force)
629
630 if not force:
631 self._check_descriptor_dependencies(session, final_content)
632
633 def check_conflict_on_del(self, session, _id, force=False):
634 """
635 Check that there is not any NSR that uses this NSD. Only NSRs belonging to this project are considered. Note
636 that NSD can be public and be used by other projects.
637 :param session:
638 :param _id: vnfd inernal id
639 :param force: Avoid this checking
640 :return: None or raises EngineException with the conflict
641 """
642 if force:
643 return
644 _filter = self._get_project_filter(session, write=False, show_all=False)
645 _filter["nsdId"] = _id
646 if self.db.get_list("nsrs", _filter):
647 raise EngineException("There is some NSR that depends on this NSD", http_code=HTTPStatus.CONFLICT)
648
649
650 class PduTopic(BaseTopic):
651 topic = "pdus"
652 topic_msg = "pdu"
653 schema_new = pdu_new_schema
654 schema_edit = pdu_edit_schema
655
656 def __init__(self, db, fs, msg):
657 BaseTopic.__init__(self, db, fs, msg)
658
659 @staticmethod
660 def format_on_new(content, project_id=None, make_public=False):
661 BaseTopic.format_on_new(content, project_id=None, make_public=make_public)
662 content["_admin"]["onboardingState"] = "CREATED"
663 content["_admin"]["operationalState"] = "DISABLED"
664 content["_admin"]["usageSate"] = "NOT_IN_USE"
665
666 def check_conflict_on_del(self, session, _id, force=False):
667 if force:
668 return
669 # TODO Is it needed to check descriptors _admin.project_read/project_write??
670 _filter = {"vdur.pdu-id": _id}
671 if self.db.get_list("vnfrs", _filter):
672 raise EngineException("There is some NSR that uses this PDU", http_code=HTTPStatus.CONFLICT)