Replaced TODO_PUT_IP with real IP
[osm/NBI.git] / osm_nbi / descriptor_topics.py
1 # -*- coding: utf-8 -*-
2
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
12 # implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
15
16 import tarfile
17 import yaml
18 import json
19 # import logging
20 from hashlib import md5
21 from osm_common.dbbase import DbException, deep_update_rfc7396
22 from http import HTTPStatus
23 from validation import ValidationError, pdu_new_schema, pdu_edit_schema
24 from base_topic import BaseTopic, EngineException, get_iterable
25 from osm_im.vnfd import vnfd as vnfd_im
26 from osm_im.nsd import nsd as nsd_im
27 from osm_im.nst import nst as nst_im
28 from pyangbind.lib.serialise import pybindJSONDecoder
29 import pyangbind.lib.pybindJSON as pybindJSON
30
31 __author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
32
33
34 class DescriptorTopic(BaseTopic):
35
36 def __init__(self, db, fs, msg):
37 BaseTopic.__init__(self, db, fs, msg)
38
39 def check_conflict_on_edit(self, session, final_content, edit_content, _id, force=False):
40 # 1. validate again with pyangbind
41 # 1.1. remove internal keys
42 internal_keys = {}
43 for k in ("_id", "_admin"):
44 if k in final_content:
45 internal_keys[k] = final_content.pop(k)
46 storage_params = internal_keys["_admin"].get("storage")
47 serialized = self._validate_input_new(final_content, storage_params, force)
48 # 1.2. modify final_content with a serialized version
49 final_content.clear()
50 final_content.update(serialized)
51 # 1.3. restore internal keys
52 for k, v in internal_keys.items():
53 final_content[k] = v
54
55 if force:
56 return
57 # 2. check that this id is not present
58 if "id" in edit_content:
59 _filter = self._get_project_filter(session, write=False, show_all=False)
60 _filter["id"] = final_content["id"]
61 _filter["_id.neq"] = _id
62 if self.db.get_one(self.topic, _filter, fail_on_empty=False):
63 raise EngineException("{} with id '{}' already exists for this project".format(self.topic[:-1],
64 final_content["id"]),
65 HTTPStatus.CONFLICT)
66
67 @staticmethod
68 def format_on_new(content, project_id=None, make_public=False):
69 BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
70 content["_admin"]["onboardingState"] = "CREATED"
71 content["_admin"]["operationalState"] = "DISABLED"
72 content["_admin"]["usageState"] = "NOT_IN_USE"
73
74 def delete(self, session, _id, force=False, dry_run=False):
75 """
76 Delete item by its internal _id
77 :param session: contains the used login username, working project, and admin rights
78 :param _id: server internal id
79 :param force: indicates if deletion must be forced in case of conflict
80 :param dry_run: make checking but do not delete
81 :return: dictionary with deleted item _id. It raises EngineException on error: not found, conflict, ...
82 """
83 # TODO add admin to filter, validate rights
84 v = BaseTopic.delete(self, session, _id, force, dry_run=True)
85 if dry_run:
86 return
87 v = self.db.del_one(self.topic, {"_id": _id})
88 self.fs.file_delete(_id, ignore_non_exist=True)
89 self.fs.file_delete(_id + "_", ignore_non_exist=True) # remove temp folder
90 self._send_msg("delete", {"_id": _id})
91 return v
92
93 @staticmethod
94 def get_one_by_id(db, session, topic, id):
95 # find owned by this project
96 _filter = BaseTopic._get_project_filter(session, write=False, show_all=False)
97 _filter["id"] = id
98 desc_list = db.get_list(topic, _filter)
99 if len(desc_list) == 1:
100 return desc_list[0]
101 elif len(desc_list) > 1:
102 raise DbException("Found more than one {} with id='{}' belonging to this project".format(topic[:-1], id),
103 HTTPStatus.CONFLICT)
104
105 # not found any: try to find public
106 _filter = BaseTopic._get_project_filter(session, write=False, show_all=True)
107 _filter["id"] = id
108 desc_list = db.get_list(topic, _filter)
109 if not desc_list:
110 raise DbException("Not found any {} with id='{}'".format(topic[:-1], id), HTTPStatus.NOT_FOUND)
111 elif len(desc_list) == 1:
112 return desc_list[0]
113 else:
114 raise DbException("Found more than one public {} with id='{}'; and no one belonging to this project".format(
115 topic[:-1], id), HTTPStatus.CONFLICT)
116
117 def new(self, rollback, session, indata=None, kwargs=None, headers=None, force=False, make_public=False):
118 """
119 Creates a new almost empty DISABLED entry into database. Due to SOL005, it does not follow normal procedure.
120 Creating a VNFD or NSD is done in two steps: 1. Creates an empty descriptor (this step) and 2) upload content
121 (self.upload_content)
122 :param rollback: list to append created items at database in case a rollback may to be done
123 :param session: contains the used login username and working project
124 :param indata: data to be inserted
125 :param kwargs: used to override the indata descriptor
126 :param headers: http request headers
127 :param force: If True avoid some dependence checks
128 :param make_public: Make the created descriptor public to all projects
129 :return: _id: identity of the inserted data.
130 """
131
132 try:
133 # _remove_envelop
134 if indata:
135 if "userDefinedData" in indata:
136 indata = indata['userDefinedData']
137
138 # Override descriptor with query string kwargs
139 self._update_input_with_kwargs(indata, kwargs)
140 # uncomment when this method is implemented.
141 # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors
142 # indata = DescriptorTopic._validate_input_new(self, indata, force=force)
143
144 content = {"_admin": {"userDefinedData": indata}}
145 self.format_on_new(content, session["project_id"], make_public=make_public)
146 _id = self.db.create(self.topic, content)
147 rollback.append({"topic": self.topic, "_id": _id})
148 return _id
149 except ValidationError as e:
150 raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
151
152 def upload_content(self, session, _id, indata, kwargs, headers, force=False):
153 """
154 Used for receiving content by chunks (with a transaction_id header and/or gzip file. It will store and extract)
155 :param session: session
156 :param _id : the nsd,vnfd is already created, this is the id
157 :param indata: http body request
158 :param kwargs: user query string to override parameters. NOT USED
159 :param headers: http request headers
160 :param force: to be more tolerant with validation
161 :return: True if package is completely uploaded or False if partial content has been uploded
162 Raise exception on error
163 """
164 # Check that _id exists and it is valid
165 current_desc = self.show(session, _id)
166
167 content_range_text = headers.get("Content-Range")
168 expected_md5 = headers.get("Content-File-MD5")
169 compressed = None
170 content_type = headers.get("Content-Type")
171 if content_type and "application/gzip" in content_type or "application/x-gzip" in content_type or \
172 "application/zip" in content_type:
173 compressed = "gzip"
174 filename = headers.get("Content-Filename")
175 if not filename:
176 filename = "package.tar.gz" if compressed else "package"
177 # TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266
178 file_pkg = None
179 error_text = ""
180 try:
181 if content_range_text:
182 content_range = content_range_text.replace("-", " ").replace("/", " ").split()
183 if content_range[0] != "bytes": # TODO check x<y not negative < total....
184 raise IndexError()
185 start = int(content_range[1])
186 end = int(content_range[2]) + 1
187 total = int(content_range[3])
188 else:
189 start = 0
190 temp_folder = _id + "_" # all the content is upload here and if ok, it is rename from id_ to is folder
191
192 if start:
193 if not self.fs.file_exists(temp_folder, 'dir'):
194 raise EngineException("invalid Transaction-Id header", HTTPStatus.NOT_FOUND)
195 else:
196 self.fs.file_delete(temp_folder, ignore_non_exist=True)
197 self.fs.mkdir(temp_folder)
198
199 storage = self.fs.get_params()
200 storage["folder"] = _id
201
202 file_path = (temp_folder, filename)
203 if self.fs.file_exists(file_path, 'file'):
204 file_size = self.fs.file_size(file_path)
205 else:
206 file_size = 0
207 if file_size != start:
208 raise EngineException("invalid Content-Range start sequence, expected '{}' but received '{}'".format(
209 file_size, start), HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE)
210 file_pkg = self.fs.file_open(file_path, 'a+b')
211 if isinstance(indata, dict):
212 indata_text = yaml.safe_dump(indata, indent=4, default_flow_style=False)
213 file_pkg.write(indata_text.encode(encoding="utf-8"))
214 else:
215 indata_len = 0
216 while True:
217 indata_text = indata.read(4096)
218 indata_len += len(indata_text)
219 if not indata_text:
220 break
221 file_pkg.write(indata_text)
222 if content_range_text:
223 if indata_len != end-start:
224 raise EngineException("Mismatch between Content-Range header {}-{} and body length of {}".format(
225 start, end-1, indata_len), HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE)
226 if end != total:
227 # TODO update to UPLOADING
228 return False
229
230 # PACKAGE UPLOADED
231 if expected_md5:
232 file_pkg.seek(0, 0)
233 file_md5 = md5()
234 chunk_data = file_pkg.read(1024)
235 while chunk_data:
236 file_md5.update(chunk_data)
237 chunk_data = file_pkg.read(1024)
238 if expected_md5 != file_md5.hexdigest():
239 raise EngineException("Error, MD5 mismatch", HTTPStatus.CONFLICT)
240 file_pkg.seek(0, 0)
241 if compressed == "gzip":
242 tar = tarfile.open(mode='r', fileobj=file_pkg)
243 descriptor_file_name = None
244 for tarinfo in tar:
245 tarname = tarinfo.name
246 tarname_path = tarname.split("/")
247 if not tarname_path[0] or ".." in tarname_path: # if start with "/" means absolute path
248 raise EngineException("Absolute path or '..' are not allowed for package descriptor tar.gz")
249 if len(tarname_path) == 1 and not tarinfo.isdir():
250 raise EngineException("All files must be inside a dir for package descriptor tar.gz")
251 if tarname.endswith(".yaml") or tarname.endswith(".json") or tarname.endswith(".yml"):
252 storage["pkg-dir"] = tarname_path[0]
253 if len(tarname_path) == 2:
254 if descriptor_file_name:
255 raise EngineException(
256 "Found more than one descriptor file at package descriptor tar.gz")
257 descriptor_file_name = tarname
258 if not descriptor_file_name:
259 raise EngineException("Not found any descriptor file at package descriptor tar.gz")
260 storage["descriptor"] = descriptor_file_name
261 storage["zipfile"] = filename
262 self.fs.file_extract(tar, temp_folder)
263 with self.fs.file_open((temp_folder, descriptor_file_name), "r") as descriptor_file:
264 content = descriptor_file.read()
265 else:
266 content = file_pkg.read()
267 storage["descriptor"] = descriptor_file_name = filename
268
269 if descriptor_file_name.endswith(".json"):
270 error_text = "Invalid json format "
271 indata = json.load(content)
272 else:
273 error_text = "Invalid yaml format "
274 indata = yaml.load(content)
275
276 current_desc["_admin"]["storage"] = storage
277 current_desc["_admin"]["onboardingState"] = "ONBOARDED"
278 current_desc["_admin"]["operationalState"] = "ENABLED"
279
280 indata = self._remove_envelop(indata)
281
282 # Override descriptor with query string kwargs
283 if kwargs:
284 self._update_input_with_kwargs(indata, kwargs)
285 # it will call overrides method at VnfdTopic or NsdTopic
286 # indata = self._validate_input_edit(indata, force=force)
287
288 deep_update_rfc7396(current_desc, indata)
289 self.check_conflict_on_edit(session, current_desc, indata, _id=_id, force=force)
290 self.db.replace(self.topic, _id, current_desc)
291 self.fs.dir_rename(temp_folder, _id)
292
293 indata["_id"] = _id
294 self._send_msg("created", indata)
295
296 # TODO if descriptor has changed because kwargs update content and remove cached zip
297 # TODO if zip is not present creates one
298 return True
299
300 except EngineException:
301 raise
302 except IndexError:
303 raise EngineException("invalid Content-Range header format. Expected 'bytes start-end/total'",
304 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE)
305 except IOError as e:
306 raise EngineException("invalid upload transaction sequence: '{}'".format(e), HTTPStatus.BAD_REQUEST)
307 except tarfile.ReadError as e:
308 raise EngineException("invalid file content {}".format(e), HTTPStatus.BAD_REQUEST)
309 except (ValueError, yaml.YAMLError) as e:
310 raise EngineException(error_text + str(e))
311 except ValidationError as e:
312 raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
313 finally:
314 if file_pkg:
315 file_pkg.close()
316
317 def get_file(self, session, _id, path=None, accept_header=None):
318 """
319 Return the file content of a vnfd or nsd
320 :param session: contains the used login username and working project
321 :param _id: Identity of the vnfd, nsd
322 :param path: artifact path or "$DESCRIPTOR" or None
323 :param accept_header: Content of Accept header. Must contain applition/zip or/and text/plain
324 :return: opened file plus Accept format or raises an exception
325 """
326 accept_text = accept_zip = False
327 if accept_header:
328 if 'text/plain' in accept_header or '*/*' in accept_header:
329 accept_text = True
330 if 'application/zip' in accept_header or '*/*' in accept_header:
331 accept_zip = 'application/zip'
332 elif 'application/gzip' in accept_header:
333 accept_zip = 'application/gzip'
334
335 if not accept_text and not accept_zip:
336 raise EngineException("provide request header 'Accept' with 'application/zip' or 'text/plain'",
337 http_code=HTTPStatus.NOT_ACCEPTABLE)
338
339 content = self.show(session, _id)
340 if content["_admin"]["onboardingState"] != "ONBOARDED":
341 raise EngineException("Cannot get content because this resource is not at 'ONBOARDED' state. "
342 "onboardingState is {}".format(content["_admin"]["onboardingState"]),
343 http_code=HTTPStatus.CONFLICT)
344 storage = content["_admin"]["storage"]
345 if path is not None and path != "$DESCRIPTOR": # artifacts
346 if not storage.get('pkg-dir'):
347 raise EngineException("Packages does not contains artifacts", http_code=HTTPStatus.BAD_REQUEST)
348 if self.fs.file_exists((storage['folder'], storage['pkg-dir'], *path), 'dir'):
349 folder_content = self.fs.dir_ls((storage['folder'], storage['pkg-dir'], *path))
350 return folder_content, "text/plain"
351 # TODO manage folders in http
352 else:
353 return self.fs.file_open((storage['folder'], storage['pkg-dir'], *path), "rb"),\
354 "application/octet-stream"
355
356 # pkgtype accept ZIP TEXT -> result
357 # manyfiles yes X -> zip
358 # no yes -> error
359 # onefile yes no -> zip
360 # X yes -> text
361
362 if accept_text and (not storage.get('pkg-dir') or path == "$DESCRIPTOR"):
363 return self.fs.file_open((storage['folder'], storage['descriptor']), "r"), "text/plain"
364 elif storage.get('pkg-dir') and not accept_zip:
365 raise EngineException("Packages that contains several files need to be retrieved with 'application/zip'"
366 "Accept header", http_code=HTTPStatus.NOT_ACCEPTABLE)
367 else:
368 if not storage.get('zipfile'):
369 # TODO generate zipfile if not present
370 raise EngineException("Only allowed 'text/plain' Accept header for this descriptor. To be solved in "
371 "future versions", http_code=HTTPStatus.NOT_ACCEPTABLE)
372 return self.fs.file_open((storage['folder'], storage['zipfile']), "rb"), accept_zip
373
374 def pyangbind_validation(self, item, data, force=False):
375 try:
376 if item == "vnfds":
377 myvnfd = vnfd_im()
378 pybindJSONDecoder.load_ietf_json({'vnfd:vnfd-catalog': {'vnfd': [data]}}, None, None, obj=myvnfd,
379 path_helper=True, skip_unknown=force)
380 out = pybindJSON.dumps(myvnfd, mode="ietf")
381 elif item == "nsds":
382 mynsd = nsd_im()
383 pybindJSONDecoder.load_ietf_json({'nsd:nsd-catalog': {'nsd': [data]}}, None, None, obj=mynsd,
384 path_helper=True, skip_unknown=force)
385 out = pybindJSON.dumps(mynsd, mode="ietf")
386 elif item == "nsts":
387 mynst = nst_im()
388 pybindJSONDecoder.load_ietf_json({'nst': [data]}, None, None, obj=mynst,
389 path_helper=True, skip_unknown=force)
390 out = pybindJSON.dumps(mynst, mode="ietf")
391 else:
392 raise EngineException("Not possible to validate '{}' item".format(item),
393 http_code=HTTPStatus.INTERNAL_SERVER_ERROR)
394
395 desc_out = self._remove_envelop(yaml.safe_load(out))
396 return desc_out
397
398 except Exception as e:
399 raise EngineException("Error in pyangbind validation: {}".format(str(e)),
400 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
401
402
403 class VnfdTopic(DescriptorTopic):
404 topic = "vnfds"
405 topic_msg = "vnfd"
406
407 def __init__(self, db, fs, msg):
408 DescriptorTopic.__init__(self, db, fs, msg)
409
410 @staticmethod
411 def _remove_envelop(indata=None):
412 if not indata:
413 return {}
414 clean_indata = indata
415 if clean_indata.get('vnfd:vnfd-catalog'):
416 clean_indata = clean_indata['vnfd:vnfd-catalog']
417 elif clean_indata.get('vnfd-catalog'):
418 clean_indata = clean_indata['vnfd-catalog']
419 if clean_indata.get('vnfd'):
420 if not isinstance(clean_indata['vnfd'], list) or len(clean_indata['vnfd']) != 1:
421 raise EngineException("'vnfd' must be a list of only one element")
422 clean_indata = clean_indata['vnfd'][0]
423 elif clean_indata.get('vnfd:vnfd'):
424 if not isinstance(clean_indata['vnfd:vnfd'], list) or len(clean_indata['vnfd:vnfd']) != 1:
425 raise EngineException("'vnfd:vnfd' must be a list of only one element")
426 clean_indata = clean_indata['vnfd:vnfd'][0]
427 return clean_indata
428
429 def check_conflict_on_edit(self, session, final_content, edit_content, _id, force=False):
430 super().check_conflict_on_edit(session, final_content, edit_content, _id, force=force)
431
432 # set type of vnfd
433 contains_pdu = False
434 contains_vdu = False
435 for vdu in get_iterable(final_content.get("vdu")):
436 if vdu.get("pdu-type"):
437 contains_pdu = True
438 else:
439 contains_vdu = True
440 if contains_pdu:
441 final_content["_admin"]["type"] = "hnfd" if contains_vdu else "pnfd"
442 elif contains_vdu:
443 final_content["_admin"]["type"] = "vnfd"
444 # if neither vud nor pdu do not fill type
445
446 def check_conflict_on_del(self, session, _id, force=False):
447 """
448 Check that there is not any NSD that uses this VNFD. Only NSDs belonging to this project are considered. Note
449 that VNFD can be public and be used by NSD of other projects. Also check there are not deployments, or vnfr
450 that uses this vnfd
451 :param session:
452 :param _id: vnfd inernal id
453 :param force: Avoid this checking
454 :return: None or raises EngineException with the conflict
455 """
456 if force:
457 return
458 descriptor = self.db.get_one("vnfds", {"_id": _id})
459 descriptor_id = descriptor.get("id")
460 if not descriptor_id: # empty vnfd not uploaded
461 return
462
463 _filter = self._get_project_filter(session, write=False, show_all=False)
464 # check vnfrs using this vnfd
465 _filter["vnfd-id"] = _id
466 if self.db.get_list("vnfrs", _filter):
467 raise EngineException("There is some VNFR that depends on this VNFD", http_code=HTTPStatus.CONFLICT)
468 del _filter["vnfd-id"]
469 # check NSD using this VNFD
470 _filter["constituent-vnfd.ANYINDEX.vnfd-id-ref"] = descriptor_id
471 if self.db.get_list("nsds", _filter):
472 raise EngineException("There is soame NSD that depends on this VNFD", http_code=HTTPStatus.CONFLICT)
473
474 def _validate_input_new(self, indata, storage_params, force=False):
475 indata = self.pyangbind_validation("vnfds", indata, force)
476 # Cross references validation in the descriptor
477 if indata.get("vdu"):
478 if not indata.get("mgmt-interface"):
479 raise EngineException("'mgmt-interface' is a mandatory field and it is not defined",
480 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
481 if indata["mgmt-interface"].get("cp"):
482 for cp in get_iterable(indata.get("connection-point")):
483 if cp["name"] == indata["mgmt-interface"]["cp"]:
484 break
485 else:
486 raise EngineException("mgmt-interface:cp='{}' must match an existing connection-point"
487 .format(indata["mgmt-interface"]["cp"]),
488 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
489
490 for vdu in get_iterable(indata.get("vdu")):
491 for interface in get_iterable(vdu.get("interface")):
492 if interface.get("external-connection-point-ref"):
493 for cp in get_iterable(indata.get("connection-point")):
494 if cp["name"] == interface["external-connection-point-ref"]:
495 break
496 else:
497 raise EngineException("vdu[id='{}']:interface[name='{}']:external-connection-point-ref='{}' "
498 "must match an existing connection-point"
499 .format(vdu["id"], interface["name"],
500 interface["external-connection-point-ref"]),
501 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
502
503 elif interface.get("internal-connection-point-ref"):
504 for internal_cp in get_iterable(vdu.get("internal-connection-point")):
505 if interface["internal-connection-point-ref"] == internal_cp.get("id"):
506 break
507 else:
508 raise EngineException("vdu[id='{}']:interface[name='{}']:internal-connection-point-ref='{}' "
509 "must match an existing vdu:internal-connection-point"
510 .format(vdu["id"], interface["name"],
511 interface["internal-connection-point-ref"]),
512 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
513 # Validate that if descriptor contains charms, artifacts _admin.storage."pkg-dir" is not none
514 if vdu.get("vdu-configuration"):
515 if vdu["vdu-configuration"].get("juju"):
516 if not self._validate_package_folders(storage_params, 'charms'):
517 raise EngineException("Charm defined in vnf[id={}]:vdu[id={}] but not present in "
518 "package".format(indata["id"], vdu["id"]))
519 # Validate that if descriptor contains cloud-init, artifacts _admin.storage."pkg-dir" is not none
520 if vdu.get("cloud-init-file"):
521 if not self._validate_package_folders(storage_params, 'cloud_init', vdu["cloud-init-file"]):
522 raise EngineException("Cloud-init defined in vnf[id={}]:vdu[id={}] but not present in "
523 "package".format(indata["id"], vdu["id"]))
524 # Validate that if descriptor contains charms, artifacts _admin.storage."pkg-dir" is not none
525 if indata.get("vnf-configuration"):
526 if indata["vnf-configuration"].get("juju"):
527 if not self._validate_package_folders(storage_params, 'charms'):
528 raise EngineException("Charm defined in vnf[id={}] but not present in "
529 "package".format(indata["id"]))
530 vld_names = [] # For detection of duplicated VLD names
531 for ivld in get_iterable(indata.get("internal-vld")):
532 # BEGIN Detection of duplicated VLD names
533 ivld_name = ivld["name"]
534 if ivld_name in vld_names:
535 raise EngineException("Duplicated VLD name '{}' in vnfd[id={}]:internal-vld[id={}]"
536 .format(ivld["name"], indata["id"], ivld["id"]),
537 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
538 else:
539 vld_names.append(ivld_name)
540 # END Detection of duplicated VLD names
541 for icp in get_iterable(ivld.get("internal-connection-point")):
542 icp_mark = False
543 for vdu in get_iterable(indata.get("vdu")):
544 for internal_cp in get_iterable(vdu.get("internal-connection-point")):
545 if icp["id-ref"] == internal_cp["id"]:
546 icp_mark = True
547 break
548 if icp_mark:
549 break
550 else:
551 raise EngineException("internal-vld[id='{}']:internal-connection-point='{}' must match an existing "
552 "vdu:internal-connection-point".format(ivld["id"], icp["id-ref"]),
553 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
554 if ivld.get("ip-profile-ref"):
555 for ip_prof in get_iterable(indata.get("ip-profiles")):
556 if ip_prof["name"] == get_iterable(ivld.get("ip-profile-ref")):
557 break
558 else:
559 raise EngineException("internal-vld[id='{}']:ip-profile-ref='{}' does not exist".format(
560 ivld["id"], ivld["ip-profile-ref"]),
561 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
562 for mp in get_iterable(indata.get("monitoring-param")):
563 if mp.get("vdu-monitoring-param"):
564 mp_vmp_mark = False
565 for vdu in get_iterable(indata.get("vdu")):
566 for vmp in get_iterable(vdu.get("monitoring-param")):
567 if vmp["id"] == mp["vdu-monitoring-param"].get("vdu-monitoring-param-ref") and vdu["id"] ==\
568 mp["vdu-monitoring-param"]["vdu-ref"]:
569 mp_vmp_mark = True
570 break
571 if mp_vmp_mark:
572 break
573 else:
574 raise EngineException("monitoring-param:vdu-monitoring-param:vdu-monitoring-param-ref='{}' not "
575 "defined at vdu[id='{}'] or vdu does not exist"
576 .format(mp["vdu-monitoring-param"]["vdu-monitoring-param-ref"],
577 mp["vdu-monitoring-param"]["vdu-ref"]),
578 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
579 elif mp.get("vdu-metric"):
580 mp_vm_mark = False
581 for vdu in get_iterable(indata.get("vdu")):
582 if vdu.get("vdu-configuration"):
583 for metric in get_iterable(vdu["vdu-configuration"].get("metrics")):
584 if metric["name"] == mp["vdu-metric"]["vdu-metric-name-ref"] and vdu["id"] == \
585 mp["vdu-metric"]["vdu-ref"]:
586 mp_vm_mark = True
587 break
588 if mp_vm_mark:
589 break
590 else:
591 raise EngineException("monitoring-param:vdu-metric:vdu-metric-name-ref='{}' not defined at "
592 "vdu[id='{}'] or vdu does not exist"
593 .format(mp["vdu-metric"]["vdu-metric-name-ref"],
594 mp["vdu-metric"]["vdu-ref"]),
595 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
596
597 for sgd in get_iterable(indata.get("scaling-group-descriptor")):
598 for sp in get_iterable(sgd.get("scaling-policy")):
599 for sc in get_iterable(sp.get("scaling-criteria")):
600 for mp in get_iterable(indata.get("monitoring-param")):
601 if mp["id"] == get_iterable(sc.get("vnf-monitoring-param-ref")):
602 break
603 else:
604 raise EngineException("scaling-group-descriptor[name='{}']:scaling-criteria[name='{}']:"
605 "vnf-monitoring-param-ref='{}' not defined in any monitoring-param"
606 .format(sgd["name"], sc["name"], sc["vnf-monitoring-param-ref"]),
607 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
608 for sgd_vdu in get_iterable(sgd.get("vdu")):
609 sgd_vdu_mark = False
610 for vdu in get_iterable(indata.get("vdu")):
611 if vdu["id"] == sgd_vdu["vdu-id-ref"]:
612 sgd_vdu_mark = True
613 break
614 if sgd_vdu_mark:
615 break
616 else:
617 raise EngineException("scaling-group-descriptor[name='{}']:vdu-id-ref={} does not match any vdu"
618 .format(sgd["name"], sgd_vdu["vdu-id-ref"]),
619 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
620 for sca in get_iterable(sgd.get("scaling-config-action")):
621 if not indata.get("vnf-configuration"):
622 raise EngineException("'vnf-configuration' not defined in the descriptor but it is referenced by "
623 "scaling-group-descriptor[name='{}']:scaling-config-action"
624 .format(sgd["name"]),
625 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
626 for primitive in get_iterable(indata["vnf-configuration"].get("config-primitive")):
627 if primitive["name"] == sca["vnf-config-primitive-name-ref"]:
628 break
629 else:
630 raise EngineException("scaling-group-descriptor[name='{}']:scaling-config-action:vnf-config-"
631 "primitive-name-ref='{}' does not match any "
632 "vnf-configuration:config-primitive:name"
633 .format(sgd["name"], sca["vnf-config-primitive-name-ref"]),
634 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
635 return indata
636
637 def _validate_input_edit(self, indata, force=False):
638 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
639 return indata
640
641 def _validate_package_folders(self, storage_params, folder, file=None):
642 if not storage_params or not storage_params.get("pkg-dir"):
643 return False
644 else:
645 if self.fs.file_exists("{}_".format(storage_params["folder"]), 'dir'):
646 f = "{}_/{}/{}".format(storage_params["folder"], storage_params["pkg-dir"], folder)
647 else:
648 f = "{}/{}/{}".format(storage_params["folder"], storage_params["pkg-dir"], folder)
649 if file:
650 return self.fs.file_exists("{}/{}".format(f, file), 'file')
651 else:
652 if self.fs.file_exists(f, 'dir'):
653 if self.fs.dir_ls(f):
654 return True
655 return False
656
657
658 class NsdTopic(DescriptorTopic):
659 topic = "nsds"
660 topic_msg = "nsd"
661
662 def __init__(self, db, fs, msg):
663 DescriptorTopic.__init__(self, db, fs, msg)
664
665 @staticmethod
666 def _remove_envelop(indata=None):
667 if not indata:
668 return {}
669 clean_indata = indata
670
671 if clean_indata.get('nsd:nsd-catalog'):
672 clean_indata = clean_indata['nsd:nsd-catalog']
673 elif clean_indata.get('nsd-catalog'):
674 clean_indata = clean_indata['nsd-catalog']
675 if clean_indata.get('nsd'):
676 if not isinstance(clean_indata['nsd'], list) or len(clean_indata['nsd']) != 1:
677 raise EngineException("'nsd' must be a list of only one element")
678 clean_indata = clean_indata['nsd'][0]
679 elif clean_indata.get('nsd:nsd'):
680 if not isinstance(clean_indata['nsd:nsd'], list) or len(clean_indata['nsd:nsd']) != 1:
681 raise EngineException("'nsd:nsd' must be a list of only one element")
682 clean_indata = clean_indata['nsd:nsd'][0]
683 return clean_indata
684
685 def _validate_input_new(self, indata, storage_params, force=False):
686 indata = self.pyangbind_validation("nsds", indata, force)
687 # Cross references validation in the descriptor
688 # TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none
689 for vld in get_iterable(indata.get("vld")):
690 if vld.get("mgmt-network") and vld.get("ip-profile-ref"):
691 raise EngineException("Error at vld[id='{}']:ip-profile-ref"
692 " You cannot set an ip-profile when mgmt-network is True"
693 .format(vld["id"]), http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
694 for vnfd_cp in get_iterable(vld.get("vnfd-connection-point-ref")):
695 for constituent_vnfd in get_iterable(indata.get("constituent-vnfd")):
696 if vnfd_cp["member-vnf-index-ref"] == constituent_vnfd["member-vnf-index"]:
697 if vnfd_cp.get("vnfd-id-ref") and vnfd_cp["vnfd-id-ref"] != constituent_vnfd["vnfd-id-ref"]:
698 raise EngineException("Error at vld[id='{}']:vnfd-connection-point-ref[vnfd-id-ref='{}'] "
699 "does not match constituent-vnfd[member-vnf-index='{}']:vnfd-id-ref"
700 " '{}'".format(vld["id"], vnfd_cp["vnfd-id-ref"],
701 constituent_vnfd["member-vnf-index"],
702 constituent_vnfd["vnfd-id-ref"]),
703 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
704 break
705 else:
706 raise EngineException("Error at vld[id='{}']:vnfd-connection-point-ref[member-vnf-index-ref='{}'] "
707 "does not match any constituent-vnfd:member-vnf-index"
708 .format(vld["id"], vnfd_cp["member-vnf-index-ref"]),
709 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
710 return indata
711
712 def _validate_input_edit(self, indata, force=False):
713 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
714 return indata
715
716 def _check_descriptor_dependencies(self, session, descriptor, force=False):
717 """
718 Check that the dependent descriptors exist on a new descriptor or edition. Also checks references to vnfd
719 connection points are ok
720 :param session: client session information
721 :param descriptor: descriptor to be inserted or edit
722 :param force: if true skip dependencies checking
723 :return: None or raises exception
724 """
725 if force:
726 return
727 member_vnfd_index = {}
728 if descriptor.get("constituent-vnfd") and not force:
729 for vnf in descriptor["constituent-vnfd"]:
730 vnfd_id = vnf["vnfd-id-ref"]
731 filter_q = self._get_project_filter(session, write=False, show_all=True)
732 filter_q["id"] = vnfd_id
733 vnf_list = self.db.get_list("vnfds", filter_q)
734 if not vnf_list:
735 raise EngineException("Descriptor error at 'constituent-vnfd':'vnfd-id-ref'='{}' references a non "
736 "existing vnfd".format(vnfd_id), http_code=HTTPStatus.CONFLICT)
737 # elif len(vnf_list) > 1:
738 # raise EngineException("More than one vnfd found for id='{}'".format(vnfd_id),
739 # http_code=HTTPStatus.CONFLICT)
740 member_vnfd_index[vnf["member-vnf-index"]] = vnf_list[0]
741
742 # Cross references validation in the descriptor and vnfd connection point validation
743 for vld in get_iterable(descriptor.get("vld")):
744 for referenced_vnfd_cp in get_iterable(vld.get("vnfd-connection-point-ref")):
745 # look if this vnfd contains this connection point
746 vnfd = member_vnfd_index.get(referenced_vnfd_cp["member-vnf-index-ref"])
747 if not vnfd:
748 raise EngineException("Error at vld[id='{}']:vnfd-connection-point-ref[member-vnf-index-ref='{}'] "
749 "does not match any constituent-vnfd:member-vnf-index"
750 .format(vld["id"], referenced_vnfd_cp["member-vnf-index-ref"]),
751 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
752 for vnfd_cp in get_iterable(vnfd.get("connection-point")):
753 if referenced_vnfd_cp.get("vnfd-connection-point-ref") == vnfd_cp["name"]:
754 break
755 else:
756 raise EngineException(
757 "Error at vld[id='{}']:vnfd-connection-point-ref[member-vnf-index-ref='{}']:vnfd-"
758 "connection-point-ref='{}' references a non existing conection-point:name inside vnfd '{}'"
759 .format(vld["id"], referenced_vnfd_cp["member-vnf-index-ref"],
760 referenced_vnfd_cp["vnfd-connection-point-ref"], vnfd["id"]),
761 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
762
763 def check_conflict_on_edit(self, session, final_content, edit_content, _id, force=False):
764 super().check_conflict_on_edit(session, final_content, edit_content, _id, force=force)
765
766 self._check_descriptor_dependencies(session, final_content, force)
767
768 def check_conflict_on_del(self, session, _id, force=False):
769 """
770 Check that there is not any NSR that uses this NSD. Only NSRs belonging to this project are considered. Note
771 that NSD can be public and be used by other projects.
772 :param session:
773 :param _id: vnfd inernal id
774 :param force: Avoid this checking
775 :return: None or raises EngineException with the conflict
776 """
777 if force:
778 return
779 _filter = self._get_project_filter(session, write=False, show_all=False)
780 _filter["nsdId"] = _id
781 if self.db.get_list("nsrs", _filter):
782 raise EngineException("There is some NSR that depends on this NSD", http_code=HTTPStatus.CONFLICT)
783
784
785 class NstTopic(DescriptorTopic):
786 topic = "nsts"
787 topic_msg = "nst"
788
789 def __init__(self, db, fs, msg):
790 DescriptorTopic.__init__(self, db, fs, msg)
791
792 @staticmethod
793 def _remove_envelop(indata=None):
794 if not indata:
795 return {}
796 clean_indata = indata
797
798 if clean_indata.get('nst'):
799 if not isinstance(clean_indata['nst'], list) or len(clean_indata['nst']) != 1:
800 raise EngineException("'nst' must be a list only one element")
801 clean_indata = clean_indata['nst'][0]
802 elif clean_indata.get('nst:nst'):
803 if not isinstance(clean_indata['nst:nst'], list) or len(clean_indata['nst:nst']) != 1:
804 raise EngineException("'nst:nst' must be a list only one element")
805 clean_indata = clean_indata['nst:nst'][0]
806 return clean_indata
807
808 def _validate_input_edit(self, indata, force=False):
809 # TODO validate with pyangbind, serialize
810 return indata
811
812 def _validate_input_new(self, indata, storage_params, force=False):
813 indata = self.pyangbind_validation("nsts", indata, force)
814 return indata.copy()
815
816 def _check_descriptor_dependencies(self, session, descriptor):
817 """
818 Check that the dependent descriptors exist on a new descriptor or edition
819 :param session: client session information
820 :param descriptor: descriptor to be inserted or edit
821 :return: None or raises exception
822 """
823 if not descriptor.get("netslice-subnet"):
824 return
825 for nsd in descriptor["netslice-subnet"]:
826 nsd_id = nsd["nsd-ref"]
827 filter_q = self._get_project_filter(session, write=False, show_all=True)
828 filter_q["id"] = nsd_id
829 if not self.db.get_list("nsds", filter_q):
830 raise EngineException("Descriptor error at 'netslice-subnet':'nsd-ref'='{}' references a non "
831 "existing nsd".format(nsd_id), http_code=HTTPStatus.CONFLICT)
832
833 def check_conflict_on_edit(self, session, final_content, edit_content, _id, force=False):
834 super().check_conflict_on_edit(session, final_content, edit_content, _id, force=force)
835
836 self._check_descriptor_dependencies(session, final_content)
837
838 def check_conflict_on_del(self, session, _id, force=False):
839 """
840 Check that there is not any NSIR that uses this NST. Only NSIRs belonging to this project are considered. Note
841 that NST can be public and be used by other projects.
842 :param session:
843 :param _id: nst internal id
844 :param force: Avoid this checking
845 :return: None or raises EngineException with the conflict
846 """
847 # TODO: Check this method
848 if force:
849 return
850 # Get Network Slice Template from Database
851 _filter = self._get_project_filter(session, write=False, show_all=False)
852 _filter["_id"] = _id
853 nst = self.db.get_one("nsts", _filter)
854
855 # Search NSIs using NST via nst-ref
856 _filter = self._get_project_filter(session, write=False, show_all=False)
857 _filter["nst-ref"] = nst["id"]
858 nsis_list = self.db.get_list("nsis", _filter)
859 for nsi_item in nsis_list:
860 if nsi_item["_admin"].get("nsiState") != "TERMINATED":
861 raise EngineException("There is some NSIS that depends on this NST", http_code=HTTPStatus.CONFLICT)
862
863
864 class PduTopic(BaseTopic):
865 topic = "pdus"
866 topic_msg = "pdu"
867 schema_new = pdu_new_schema
868 schema_edit = pdu_edit_schema
869
870 def __init__(self, db, fs, msg):
871 BaseTopic.__init__(self, db, fs, msg)
872
873 @staticmethod
874 def format_on_new(content, project_id=None, make_public=False):
875 BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
876 content["_admin"]["onboardingState"] = "CREATED"
877 content["_admin"]["operationalState"] = "ENABLED"
878 content["_admin"]["usageState"] = "NOT_IN_USE"
879
880 def check_conflict_on_del(self, session, _id, force=False):
881 if force:
882 return
883 # TODO Is it needed to check descriptors _admin.project_read/project_write??
884 _filter = {"vdur.pdu-id": _id}
885 if self.db.get_list("vnfrs", _filter):
886 raise EngineException("There is some NSR that uses this PDU", http_code=HTTPStatus.CONFLICT)