Bug 619 - Missing check of VNF package content when the descriptor mentions a charm
[osm/NBI.git] / osm_nbi / descriptor_topics.py
1 # -*- coding: utf-8 -*-
2
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
12 # implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
15
16 import tarfile
17 import yaml
18 import json
19 # import logging
20 from hashlib import md5
21 from osm_common.dbbase import DbException, deep_update_rfc7396
22 from http import HTTPStatus
23 from validation import ValidationError, pdu_new_schema, pdu_edit_schema
24 from base_topic import BaseTopic, EngineException, get_iterable
25 from osm_im.vnfd import vnfd as vnfd_im
26 from osm_im.nsd import nsd as nsd_im
27 from osm_im.nst import nst as nst_im
28 from pyangbind.lib.serialise import pybindJSONDecoder
29 import pyangbind.lib.pybindJSON as pybindJSON
30
31 __author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
32
33
34 class DescriptorTopic(BaseTopic):
35
36 def __init__(self, db, fs, msg):
37 BaseTopic.__init__(self, db, fs, msg)
38
39 def check_conflict_on_edit(self, session, final_content, edit_content, _id, force=False):
40 # 1. validate again with pyangbind
41 # 1.1. remove internal keys
42 internal_keys = {}
43 for k in ("_id", "_admin"):
44 if k in final_content:
45 internal_keys[k] = final_content.pop(k)
46 storage_params = internal_keys["_admin"].get("storage")
47 serialized = self._validate_input_new(final_content, storage_params, force)
48 # 1.2. modify final_content with a serialized version
49 final_content.clear()
50 final_content.update(serialized)
51 # 1.3. restore internal keys
52 for k, v in internal_keys.items():
53 final_content[k] = v
54
55 if force:
56 return
57 # 2. check that this id is not present
58 if "id" in edit_content:
59 _filter = self._get_project_filter(session, write=False, show_all=False)
60 _filter["id"] = final_content["id"]
61 _filter["_id.neq"] = _id
62 if self.db.get_one(self.topic, _filter, fail_on_empty=False):
63 raise EngineException("{} with id '{}' already exists for this project".format(self.topic[:-1],
64 final_content["id"]),
65 HTTPStatus.CONFLICT)
66
67 @staticmethod
68 def format_on_new(content, project_id=None, make_public=False):
69 BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
70 content["_admin"]["onboardingState"] = "CREATED"
71 content["_admin"]["operationalState"] = "DISABLED"
72 content["_admin"]["usageState"] = "NOT_IN_USE"
73
74 def delete(self, session, _id, force=False, dry_run=False):
75 """
76 Delete item by its internal _id
77 :param session: contains the used login username, working project, and admin rights
78 :param _id: server internal id
79 :param force: indicates if deletion must be forced in case of conflict
80 :param dry_run: make checking but do not delete
81 :return: dictionary with deleted item _id. It raises EngineException on error: not found, conflict, ...
82 """
83 # TODO add admin to filter, validate rights
84 v = BaseTopic.delete(self, session, _id, force, dry_run=True)
85 if dry_run:
86 return
87 v = self.db.del_one(self.topic, {"_id": _id})
88 self.fs.file_delete(_id, ignore_non_exist=True)
89 self.fs.file_delete(_id + "_", ignore_non_exist=True) # remove temp folder
90 self._send_msg("delete", {"_id": _id})
91 return v
92
93 @staticmethod
94 def get_one_by_id(db, session, topic, id):
95 # find owned by this project
96 _filter = BaseTopic._get_project_filter(session, write=False, show_all=False)
97 _filter["id"] = id
98 desc_list = db.get_list(topic, _filter)
99 if len(desc_list) == 1:
100 return desc_list[0]
101 elif len(desc_list) > 1:
102 raise DbException("Found more than one {} with id='{}' belonging to this project".format(topic[:-1], id),
103 HTTPStatus.CONFLICT)
104
105 # not found any: try to find public
106 _filter = BaseTopic._get_project_filter(session, write=False, show_all=True)
107 _filter["id"] = id
108 desc_list = db.get_list(topic, _filter)
109 if not desc_list:
110 raise DbException("Not found any {} with id='{}'".format(topic[:-1], id), HTTPStatus.NOT_FOUND)
111 elif len(desc_list) == 1:
112 return desc_list[0]
113 else:
114 raise DbException("Found more than one public {} with id='{}'; and no one belonging to this project".format(
115 topic[:-1], id), HTTPStatus.CONFLICT)
116
117 def new(self, rollback, session, indata=None, kwargs=None, headers=None, force=False, make_public=False):
118 """
119 Creates a new almost empty DISABLED entry into database. Due to SOL005, it does not follow normal procedure.
120 Creating a VNFD or NSD is done in two steps: 1. Creates an empty descriptor (this step) and 2) upload content
121 (self.upload_content)
122 :param rollback: list to append created items at database in case a rollback may to be done
123 :param session: contains the used login username and working project
124 :param indata: data to be inserted
125 :param kwargs: used to override the indata descriptor
126 :param headers: http request headers
127 :param force: If True avoid some dependence checks
128 :param make_public: Make the created descriptor public to all projects
129 :return: _id: identity of the inserted data.
130 """
131
132 try:
133 # _remove_envelop
134 if indata:
135 if "userDefinedData" in indata:
136 indata = indata['userDefinedData']
137
138 # Override descriptor with query string kwargs
139 self._update_input_with_kwargs(indata, kwargs)
140 # uncomment when this method is implemented.
141 # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors
142 # indata = DescriptorTopic._validate_input_new(self, indata, force=force)
143
144 content = {"_admin": {"userDefinedData": indata}}
145 self.format_on_new(content, session["project_id"], make_public=make_public)
146 _id = self.db.create(self.topic, content)
147 rollback.append({"topic": self.topic, "_id": _id})
148 return _id
149 except ValidationError as e:
150 raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
151
152 def upload_content(self, session, _id, indata, kwargs, headers, force=False):
153 """
154 Used for receiving content by chunks (with a transaction_id header and/or gzip file. It will store and extract)
155 :param session: session
156 :param _id : the nsd,vnfd is already created, this is the id
157 :param indata: http body request
158 :param kwargs: user query string to override parameters. NOT USED
159 :param headers: http request headers
160 :param force: to be more tolerant with validation
161 :return: True if package is completely uploaded or False if partial content has been uploded
162 Raise exception on error
163 """
164 # Check that _id exists and it is valid
165 current_desc = self.show(session, _id)
166
167 content_range_text = headers.get("Content-Range")
168 expected_md5 = headers.get("Content-File-MD5")
169 compressed = None
170 content_type = headers.get("Content-Type")
171 if content_type and "application/gzip" in content_type or "application/x-gzip" in content_type or \
172 "application/zip" in content_type:
173 compressed = "gzip"
174 filename = headers.get("Content-Filename")
175 if not filename:
176 filename = "package.tar.gz" if compressed else "package"
177 # TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266
178 file_pkg = None
179 error_text = ""
180 try:
181 if content_range_text:
182 content_range = content_range_text.replace("-", " ").replace("/", " ").split()
183 if content_range[0] != "bytes": # TODO check x<y not negative < total....
184 raise IndexError()
185 start = int(content_range[1])
186 end = int(content_range[2]) + 1
187 total = int(content_range[3])
188 else:
189 start = 0
190 temp_folder = _id + "_" # all the content is upload here and if ok, it is rename from id_ to is folder
191
192 if start:
193 if not self.fs.file_exists(temp_folder, 'dir'):
194 raise EngineException("invalid Transaction-Id header", HTTPStatus.NOT_FOUND)
195 else:
196 self.fs.file_delete(temp_folder, ignore_non_exist=True)
197 self.fs.mkdir(temp_folder)
198
199 storage = self.fs.get_params()
200 storage["folder"] = _id
201
202 file_path = (temp_folder, filename)
203 if self.fs.file_exists(file_path, 'file'):
204 file_size = self.fs.file_size(file_path)
205 else:
206 file_size = 0
207 if file_size != start:
208 raise EngineException("invalid Content-Range start sequence, expected '{}' but received '{}'".format(
209 file_size, start), HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE)
210 file_pkg = self.fs.file_open(file_path, 'a+b')
211 if isinstance(indata, dict):
212 indata_text = yaml.safe_dump(indata, indent=4, default_flow_style=False)
213 file_pkg.write(indata_text.encode(encoding="utf-8"))
214 else:
215 indata_len = 0
216 while True:
217 indata_text = indata.read(4096)
218 indata_len += len(indata_text)
219 if not indata_text:
220 break
221 file_pkg.write(indata_text)
222 if content_range_text:
223 if indata_len != end-start:
224 raise EngineException("Mismatch between Content-Range header {}-{} and body length of {}".format(
225 start, end-1, indata_len), HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE)
226 if end != total:
227 # TODO update to UPLOADING
228 return False
229
230 # PACKAGE UPLOADED
231 if expected_md5:
232 file_pkg.seek(0, 0)
233 file_md5 = md5()
234 chunk_data = file_pkg.read(1024)
235 while chunk_data:
236 file_md5.update(chunk_data)
237 chunk_data = file_pkg.read(1024)
238 if expected_md5 != file_md5.hexdigest():
239 raise EngineException("Error, MD5 mismatch", HTTPStatus.CONFLICT)
240 file_pkg.seek(0, 0)
241 if compressed == "gzip":
242 tar = tarfile.open(mode='r', fileobj=file_pkg)
243 descriptor_file_name = None
244 for tarinfo in tar:
245 tarname = tarinfo.name
246 tarname_path = tarname.split("/")
247 if not tarname_path[0] or ".." in tarname_path: # if start with "/" means absolute path
248 raise EngineException("Absolute path or '..' are not allowed for package descriptor tar.gz")
249 if len(tarname_path) == 1 and not tarinfo.isdir():
250 raise EngineException("All files must be inside a dir for package descriptor tar.gz")
251 if tarname.endswith(".yaml") or tarname.endswith(".json") or tarname.endswith(".yml"):
252 storage["pkg-dir"] = tarname_path[0]
253 if len(tarname_path) == 2:
254 if descriptor_file_name:
255 raise EngineException(
256 "Found more than one descriptor file at package descriptor tar.gz")
257 descriptor_file_name = tarname
258 if not descriptor_file_name:
259 raise EngineException("Not found any descriptor file at package descriptor tar.gz")
260 storage["descriptor"] = descriptor_file_name
261 storage["zipfile"] = filename
262 self.fs.file_extract(tar, temp_folder)
263 with self.fs.file_open((temp_folder, descriptor_file_name), "r") as descriptor_file:
264 content = descriptor_file.read()
265 else:
266 content = file_pkg.read()
267 storage["descriptor"] = descriptor_file_name = filename
268
269 if descriptor_file_name.endswith(".json"):
270 error_text = "Invalid json format "
271 indata = json.load(content)
272 else:
273 error_text = "Invalid yaml format "
274 indata = yaml.load(content)
275
276 current_desc["_admin"]["storage"] = storage
277 current_desc["_admin"]["onboardingState"] = "ONBOARDED"
278 current_desc["_admin"]["operationalState"] = "ENABLED"
279
280 indata = self._remove_envelop(indata)
281
282 # Override descriptor with query string kwargs
283 if kwargs:
284 self._update_input_with_kwargs(indata, kwargs)
285 # it will call overrides method at VnfdTopic or NsdTopic
286 # indata = self._validate_input_edit(indata, force=force)
287
288 deep_update_rfc7396(current_desc, indata)
289 self.check_conflict_on_edit(session, current_desc, indata, _id=_id, force=force)
290 self.db.replace(self.topic, _id, current_desc)
291 self.fs.dir_rename(temp_folder, _id)
292
293 indata["_id"] = _id
294 self._send_msg("created", indata)
295
296 # TODO if descriptor has changed because kwargs update content and remove cached zip
297 # TODO if zip is not present creates one
298 return True
299
300 except EngineException:
301 raise
302 except IndexError:
303 raise EngineException("invalid Content-Range header format. Expected 'bytes start-end/total'",
304 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE)
305 except IOError as e:
306 raise EngineException("invalid upload transaction sequence: '{}'".format(e), HTTPStatus.BAD_REQUEST)
307 except tarfile.ReadError as e:
308 raise EngineException("invalid file content {}".format(e), HTTPStatus.BAD_REQUEST)
309 except (ValueError, yaml.YAMLError) as e:
310 raise EngineException(error_text + str(e))
311 except ValidationError as e:
312 raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
313 finally:
314 if file_pkg:
315 file_pkg.close()
316
317 def get_file(self, session, _id, path=None, accept_header=None):
318 """
319 Return the file content of a vnfd or nsd
320 :param session: contains the used login username and working project
321 :param _id: Identity of the vnfd, nsd
322 :param path: artifact path or "$DESCRIPTOR" or None
323 :param accept_header: Content of Accept header. Must contain applition/zip or/and text/plain
324 :return: opened file plus Accept format or raises an exception
325 """
326 accept_text = accept_zip = False
327 if accept_header:
328 if 'text/plain' in accept_header or '*/*' in accept_header:
329 accept_text = True
330 if 'application/zip' in accept_header or '*/*' in accept_header:
331 accept_zip = 'application/zip'
332 elif 'application/gzip' in accept_header:
333 accept_zip = 'application/gzip'
334
335 if not accept_text and not accept_zip:
336 raise EngineException("provide request header 'Accept' with 'application/zip' or 'text/plain'",
337 http_code=HTTPStatus.NOT_ACCEPTABLE)
338
339 content = self.show(session, _id)
340 if content["_admin"]["onboardingState"] != "ONBOARDED":
341 raise EngineException("Cannot get content because this resource is not at 'ONBOARDED' state. "
342 "onboardingState is {}".format(content["_admin"]["onboardingState"]),
343 http_code=HTTPStatus.CONFLICT)
344 storage = content["_admin"]["storage"]
345 if path is not None and path != "$DESCRIPTOR": # artifacts
346 if not storage.get('pkg-dir'):
347 raise EngineException("Packages does not contains artifacts", http_code=HTTPStatus.BAD_REQUEST)
348 if self.fs.file_exists((storage['folder'], storage['pkg-dir'], *path), 'dir'):
349 folder_content = self.fs.dir_ls((storage['folder'], storage['pkg-dir'], *path))
350 return folder_content, "text/plain"
351 # TODO manage folders in http
352 else:
353 return self.fs.file_open((storage['folder'], storage['pkg-dir'], *path), "rb"),\
354 "application/octet-stream"
355
356 # pkgtype accept ZIP TEXT -> result
357 # manyfiles yes X -> zip
358 # no yes -> error
359 # onefile yes no -> zip
360 # X yes -> text
361
362 if accept_text and (not storage.get('pkg-dir') or path == "$DESCRIPTOR"):
363 return self.fs.file_open((storage['folder'], storage['descriptor']), "r"), "text/plain"
364 elif storage.get('pkg-dir') and not accept_zip:
365 raise EngineException("Packages that contains several files need to be retrieved with 'application/zip'"
366 "Accept header", http_code=HTTPStatus.NOT_ACCEPTABLE)
367 else:
368 if not storage.get('zipfile'):
369 # TODO generate zipfile if not present
370 raise EngineException("Only allowed 'text/plain' Accept header for this descriptor. To be solved in "
371 "future versions", http_code=HTTPStatus.NOT_ACCEPTABLE)
372 return self.fs.file_open((storage['folder'], storage['zipfile']), "rb"), accept_zip
373
374 def pyangbind_validation(self, item, data, force=False):
375 try:
376 if item == "vnfds":
377 myvnfd = vnfd_im()
378 pybindJSONDecoder.load_ietf_json({'vnfd:vnfd-catalog': {'vnfd': [data]}}, None, None, obj=myvnfd,
379 path_helper=True, skip_unknown=force)
380 out = pybindJSON.dumps(myvnfd, mode="ietf")
381 elif item == "nsds":
382 mynsd = nsd_im()
383 pybindJSONDecoder.load_ietf_json({'nsd:nsd-catalog': {'nsd': [data]}}, None, None, obj=mynsd,
384 path_helper=True, skip_unknown=force)
385 out = pybindJSON.dumps(mynsd, mode="ietf")
386 elif item == "nsts":
387 mynst = nst_im()
388 pybindJSONDecoder.load_ietf_json({'nst': [data]}, None, None, obj=mynst,
389 path_helper=True, skip_unknown=force)
390 out = pybindJSON.dumps(mynst, mode="ietf")
391 else:
392 raise EngineException("Not possible to validate '{}' item".format(item),
393 http_code=HTTPStatus.INTERNAL_SERVER_ERROR)
394
395 desc_out = self._remove_envelop(yaml.safe_load(out))
396 return desc_out
397
398 except Exception as e:
399 raise EngineException("Error in pyangbind validation: {}".format(str(e)),
400 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
401
402
403 class VnfdTopic(DescriptorTopic):
404 topic = "vnfds"
405 topic_msg = "vnfd"
406
407 def __init__(self, db, fs, msg):
408 DescriptorTopic.__init__(self, db, fs, msg)
409
410 @staticmethod
411 def _remove_envelop(indata=None):
412 if not indata:
413 return {}
414 clean_indata = indata
415 if clean_indata.get('vnfd:vnfd-catalog'):
416 clean_indata = clean_indata['vnfd:vnfd-catalog']
417 elif clean_indata.get('vnfd-catalog'):
418 clean_indata = clean_indata['vnfd-catalog']
419 if clean_indata.get('vnfd'):
420 if not isinstance(clean_indata['vnfd'], list) or len(clean_indata['vnfd']) != 1:
421 raise EngineException("'vnfd' must be a list of only one element")
422 clean_indata = clean_indata['vnfd'][0]
423 elif clean_indata.get('vnfd:vnfd'):
424 if not isinstance(clean_indata['vnfd:vnfd'], list) or len(clean_indata['vnfd:vnfd']) != 1:
425 raise EngineException("'vnfd:vnfd' must be a list of only one element")
426 clean_indata = clean_indata['vnfd:vnfd'][0]
427 return clean_indata
428
429 def check_conflict_on_edit(self, session, final_content, edit_content, _id, force=False):
430 super().check_conflict_on_edit(session, final_content, edit_content, _id, force=force)
431
432 # set type of vnfd
433 contains_pdu = False
434 contains_vdu = False
435 for vdu in get_iterable(final_content.get("vdu")):
436 if vdu.get("pdu-type"):
437 contains_pdu = True
438 else:
439 contains_vdu = True
440 if contains_pdu:
441 final_content["_admin"]["type"] = "hnfd" if contains_vdu else "pnfd"
442 elif contains_vdu:
443 final_content["_admin"]["type"] = "vnfd"
444 # if neither vud nor pdu do not fill type
445
446 def check_conflict_on_del(self, session, _id, force=False):
447 """
448 Check that there is not any NSD that uses this VNFD. Only NSDs belonging to this project are considered. Note
449 that VNFD can be public and be used by NSD of other projects. Also check there are not deployments, or vnfr
450 that uses this vnfd
451 :param session:
452 :param _id: vnfd inernal id
453 :param force: Avoid this checking
454 :return: None or raises EngineException with the conflict
455 """
456 if force:
457 return
458 descriptor = self.db.get_one("vnfds", {"_id": _id})
459 descriptor_id = descriptor.get("id")
460 if not descriptor_id: # empty vnfd not uploaded
461 return
462
463 _filter = self._get_project_filter(session, write=False, show_all=False)
464 # check vnfrs using this vnfd
465 _filter["vnfd-id"] = _id
466 if self.db.get_list("vnfrs", _filter):
467 raise EngineException("There is some VNFR that depends on this VNFD", http_code=HTTPStatus.CONFLICT)
468 del _filter["vnfd-id"]
469 # check NSD using this VNFD
470 _filter["constituent-vnfd.ANYINDEX.vnfd-id-ref"] = descriptor_id
471 if self.db.get_list("nsds", _filter):
472 raise EngineException("There is soame NSD that depends on this VNFD", http_code=HTTPStatus.CONFLICT)
473
474 def _validate_input_new(self, indata, storage_params, force=False):
475 indata = self.pyangbind_validation("vnfds", indata, force)
476 # Cross references validation in the descriptor
477 if indata.get("vdu"):
478 if not indata.get("mgmt-interface"):
479 raise EngineException("'mgmt-interface' is a mandatory field and it is not defined",
480 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
481 if indata["mgmt-interface"].get("cp"):
482 for cp in get_iterable(indata.get("connection-point")):
483 if cp["name"] == indata["mgmt-interface"]["cp"]:
484 break
485 else:
486 raise EngineException("mgmt-interface:cp='{}' must match an existing connection-point"
487 .format(indata["mgmt-interface"]["cp"]),
488 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
489
490 for vdu in get_iterable(indata.get("vdu")):
491 for interface in get_iterable(vdu.get("interface")):
492 if interface.get("external-connection-point-ref"):
493 for cp in get_iterable(indata.get("connection-point")):
494 if cp["name"] == interface["external-connection-point-ref"]:
495 break
496 else:
497 raise EngineException("vdu[id='{}']:interface[name='{}']:external-connection-point-ref='{}' "
498 "must match an existing connection-point"
499 .format(vdu["id"], interface["name"],
500 interface["external-connection-point-ref"]),
501 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
502
503 elif interface.get("internal-connection-point-ref"):
504 for internal_cp in get_iterable(vdu.get("internal-connection-point")):
505 if interface["internal-connection-point-ref"] == internal_cp.get("id"):
506 break
507 else:
508 raise EngineException("vdu[id='{}']:interface[name='{}']:internal-connection-point-ref='{}' "
509 "must match an existing vdu:internal-connection-point"
510 .format(vdu["id"], interface["name"],
511 interface["internal-connection-point-ref"]),
512 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
513 # Validate that if descriptor contains charms, artifacts _admin.storage."pkg-dir" is not none
514 if vdu.get("vdu-configuration"):
515 if vdu["vdu-configuration"].get("juju"):
516 if not self._validate_package_folders(storage_params, 'charms'):
517 raise EngineException("Charm defined in vnf[id={}]:vdu[id={}] but not present in "
518 "package".format(indata["id"], vdu["id"]))
519 # Validate that if descriptor contains cloud-init, artifacts _admin.storage."pkg-dir" is not none
520 if vdu.get("cloud-init-file"):
521 if not self._validate_package_folders(storage_params, 'cloud_init', vdu["cloud-init-file"]):
522 raise EngineException("Cloud-init defined in vnf[id={}]:vdu[id={}] but not present in "
523 "package".format(indata["id"], vdu["id"]))
524 # Validate that if descriptor contains charms, artifacts _admin.storage."pkg-dir" is not none
525 if indata.get("vnf-configuration"):
526 if indata["vnf-configuration"].get("juju"):
527 if not self._validate_package_folders(storage_params, 'charms'):
528 raise EngineException("Charm defined in vnf[id={}] but not present in "
529 "package".format(indata["id"]))
530 for ivld in get_iterable(indata.get("internal-vld")):
531 for icp in get_iterable(ivld.get("internal-connection-point")):
532 icp_mark = False
533 for vdu in get_iterable(indata.get("vdu")):
534 for internal_cp in get_iterable(vdu.get("internal-connection-point")):
535 if icp["id-ref"] == internal_cp["id"]:
536 icp_mark = True
537 break
538 if icp_mark:
539 break
540 else:
541 raise EngineException("internal-vld[id='{}']:internal-connection-point='{}' must match an existing "
542 "vdu:internal-connection-point".format(ivld["id"], icp["id-ref"]),
543 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
544 if ivld.get("ip-profile-ref"):
545 for ip_prof in get_iterable(indata.get("ip-profiles")):
546 if ip_prof["name"] == get_iterable(ivld.get("ip-profile-ref")):
547 break
548 else:
549 raise EngineException("internal-vld[id='{}']:ip-profile-ref='{}' does not exist".format(
550 ivld["id"], ivld["ip-profile-ref"]),
551 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
552 for mp in get_iterable(indata.get("monitoring-param")):
553 if mp.get("vdu-monitoring-param"):
554 mp_vmp_mark = False
555 for vdu in get_iterable(indata.get("vdu")):
556 for vmp in get_iterable(vdu.get("monitoring-param")):
557 if vmp["id"] == mp["vdu-monitoring-param"].get("vdu-monitoring-param-ref") and vdu["id"] ==\
558 mp["vdu-monitoring-param"]["vdu-ref"]:
559 mp_vmp_mark = True
560 break
561 if mp_vmp_mark:
562 break
563 else:
564 raise EngineException("monitoring-param:vdu-monitoring-param:vdu-monitoring-param-ref='{}' not "
565 "defined at vdu[id='{}'] or vdu does not exist"
566 .format(mp["vdu-monitoring-param"]["vdu-monitoring-param-ref"],
567 mp["vdu-monitoring-param"]["vdu-ref"]),
568 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
569 elif mp.get("vdu-metric"):
570 mp_vm_mark = False
571 for vdu in get_iterable(indata.get("vdu")):
572 if vdu.get("vdu-configuration"):
573 for metric in get_iterable(vdu["vdu-configuration"].get("metrics")):
574 if metric["name"] == mp["vdu-metric"]["vdu-metric-name-ref"] and vdu["id"] == \
575 mp["vdu-metric"]["vdu-ref"]:
576 mp_vm_mark = True
577 break
578 if mp_vm_mark:
579 break
580 else:
581 raise EngineException("monitoring-param:vdu-metric:vdu-metric-name-ref='{}' not defined at "
582 "vdu[id='{}'] or vdu does not exist"
583 .format(mp["vdu-metric"]["vdu-metric-name-ref"],
584 mp["vdu-metric"]["vdu-ref"]),
585 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
586
587 for sgd in get_iterable(indata.get("scaling-group-descriptor")):
588 for sp in get_iterable(sgd.get("scaling-policy")):
589 for sc in get_iterable(sp.get("scaling-criteria")):
590 for mp in get_iterable(indata.get("monitoring-param")):
591 if mp["id"] == get_iterable(sc.get("vnf-monitoring-param-ref")):
592 break
593 else:
594 raise EngineException("scaling-group-descriptor[name='{}']:scaling-criteria[name='{}']:"
595 "vnf-monitoring-param-ref='{}' not defined in any monitoring-param"
596 .format(sgd["name"], sc["name"], sc["vnf-monitoring-param-ref"]),
597 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
598 for sgd_vdu in get_iterable(sgd.get("vdu")):
599 sgd_vdu_mark = False
600 for vdu in get_iterable(indata.get("vdu")):
601 if vdu["id"] == sgd_vdu["vdu-id-ref"]:
602 sgd_vdu_mark = True
603 break
604 if sgd_vdu_mark:
605 break
606 else:
607 raise EngineException("scaling-group-descriptor[name='{}']:vdu-id-ref={} does not match any vdu"
608 .format(sgd["name"], sgd_vdu["vdu-id-ref"]),
609 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
610 for sca in get_iterable(sgd.get("scaling-config-action")):
611 if not indata.get("vnf-configuration"):
612 raise EngineException("'vnf-configuration' not defined in the descriptor but it is referenced by "
613 "scaling-group-descriptor[name='{}']:scaling-config-action"
614 .format(sgd["name"]),
615 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
616 for primitive in get_iterable(indata["vnf-configuration"].get("config-primitive")):
617 if primitive["name"] == sca["vnf-config-primitive-name-ref"]:
618 break
619 else:
620 raise EngineException("scaling-group-descriptor[name='{}']:scaling-config-action:vnf-config-"
621 "primitive-name-ref='{}' does not match any "
622 "vnf-configuration:config-primitive:name"
623 .format(sgd["name"], sca["vnf-config-primitive-name-ref"]),
624 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
625 return indata
626
627 def _validate_input_edit(self, indata, force=False):
628 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
629 return indata
630
631 def _validate_package_folders(self, storage_params, folder, file=None):
632 if not storage_params or not storage_params.get("pkg-dir"):
633 return False
634 else:
635 if self.fs.file_exists("{}_".format(storage_params["folder"]), 'dir'):
636 f = "{}_/{}/{}".format(storage_params["folder"], storage_params["pkg-dir"], folder)
637 else:
638 f = "{}/{}/{}".format(storage_params["folder"], storage_params["pkg-dir"], folder)
639 if file:
640 return self.fs.file_exists("{}/{}".format(f, file), 'file')
641 else:
642 if self.fs.file_exists(f, 'dir'):
643 if self.fs.dir_ls(f):
644 return True
645 return False
646
647
648 class NsdTopic(DescriptorTopic):
649 topic = "nsds"
650 topic_msg = "nsd"
651
652 def __init__(self, db, fs, msg):
653 DescriptorTopic.__init__(self, db, fs, msg)
654
655 @staticmethod
656 def _remove_envelop(indata=None):
657 if not indata:
658 return {}
659 clean_indata = indata
660
661 if clean_indata.get('nsd:nsd-catalog'):
662 clean_indata = clean_indata['nsd:nsd-catalog']
663 elif clean_indata.get('nsd-catalog'):
664 clean_indata = clean_indata['nsd-catalog']
665 if clean_indata.get('nsd'):
666 if not isinstance(clean_indata['nsd'], list) or len(clean_indata['nsd']) != 1:
667 raise EngineException("'nsd' must be a list of only one element")
668 clean_indata = clean_indata['nsd'][0]
669 elif clean_indata.get('nsd:nsd'):
670 if not isinstance(clean_indata['nsd:nsd'], list) or len(clean_indata['nsd:nsd']) != 1:
671 raise EngineException("'nsd:nsd' must be a list of only one element")
672 clean_indata = clean_indata['nsd:nsd'][0]
673 return clean_indata
674
675 def _validate_input_new(self, indata, storage_params, force=False):
676 indata = self.pyangbind_validation("nsds", indata, force)
677 # Cross references validation in the descriptor
678 # TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none
679 for vld in get_iterable(indata.get("vld")):
680 for vnfd_cp in get_iterable(vld.get("vnfd-connection-point-ref")):
681 for constituent_vnfd in get_iterable(indata.get("constituent-vnfd")):
682 if vnfd_cp["member-vnf-index-ref"] == constituent_vnfd["member-vnf-index"]:
683 if vnfd_cp.get("vnfd-id-ref") and vnfd_cp["vnfd-id-ref"] != constituent_vnfd["vnfd-id-ref"]:
684 raise EngineException("Error at vld[id='{}']:vnfd-connection-point-ref[vnfd-id-ref='{}'] "
685 "does not match constituent-vnfd[member-vnf-index='{}']:vnfd-id-ref"
686 " '{}'".format(vld["id"], vnfd_cp["vnfd-id-ref"],
687 constituent_vnfd["member-vnf-index"],
688 constituent_vnfd["vnfd-id-ref"]),
689 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
690 break
691 else:
692 raise EngineException("Error at vld[id='{}']:vnfd-connection-point-ref[member-vnf-index-ref='{}'] "
693 "does not match any constituent-vnfd:member-vnf-index"
694 .format(vld["id"], vnfd_cp["member-vnf-index-ref"]),
695 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
696 return indata
697
698 def _validate_input_edit(self, indata, force=False):
699 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
700 return indata
701
702 def _check_descriptor_dependencies(self, session, descriptor, force=False):
703 """
704 Check that the dependent descriptors exist on a new descriptor or edition. Also checks references to vnfd
705 connection points are ok
706 :param session: client session information
707 :param descriptor: descriptor to be inserted or edit
708 :param force: if true skip dependencies checking
709 :return: None or raises exception
710 """
711 if force:
712 return
713 member_vnfd_index = {}
714 if descriptor.get("constituent-vnfd") and not force:
715 for vnf in descriptor["constituent-vnfd"]:
716 vnfd_id = vnf["vnfd-id-ref"]
717 filter_q = self._get_project_filter(session, write=False, show_all=True)
718 filter_q["id"] = vnfd_id
719 vnf_list = self.db.get_list("vnfds", filter_q)
720 if not vnf_list:
721 raise EngineException("Descriptor error at 'constituent-vnfd':'vnfd-id-ref'='{}' references a non "
722 "existing vnfd".format(vnfd_id), http_code=HTTPStatus.CONFLICT)
723 # elif len(vnf_list) > 1:
724 # raise EngineException("More than one vnfd found for id='{}'".format(vnfd_id),
725 # http_code=HTTPStatus.CONFLICT)
726 member_vnfd_index[vnf["member-vnf-index"]] = vnf_list[0]
727
728 # Cross references validation in the descriptor and vnfd connection point validation
729 for vld in get_iterable(descriptor.get("vld")):
730 for referenced_vnfd_cp in get_iterable(vld.get("vnfd-connection-point-ref")):
731 # look if this vnfd contains this connection point
732 vnfd = member_vnfd_index.get(referenced_vnfd_cp["member-vnf-index-ref"])
733 if not vnfd:
734 raise EngineException("Error at vld[id='{}']:vnfd-connection-point-ref[member-vnf-index-ref='{}'] "
735 "does not match any constituent-vnfd:member-vnf-index"
736 .format(vld["id"], referenced_vnfd_cp["member-vnf-index-ref"]),
737 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
738 for vnfd_cp in get_iterable(vnfd.get("connection-point")):
739 if referenced_vnfd_cp.get("vnfd-connection-point-ref") == vnfd_cp["name"]:
740 break
741 else:
742 raise EngineException(
743 "Error at vld[id='{}']:vnfd-connection-point-ref[member-vnf-index-ref='{}']:vnfd-"
744 "connection-point-ref='{}' references a non existing conection-point:name inside vnfd '{}'"
745 .format(vld["id"], referenced_vnfd_cp["member-vnf-index-ref"],
746 referenced_vnfd_cp["vnfd-connection-point-ref"], vnfd["id"]),
747 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
748
749 def check_conflict_on_edit(self, session, final_content, edit_content, _id, force=False):
750 super().check_conflict_on_edit(session, final_content, edit_content, _id, force=force)
751
752 self._check_descriptor_dependencies(session, final_content, force)
753
754 def check_conflict_on_del(self, session, _id, force=False):
755 """
756 Check that there is not any NSR that uses this NSD. Only NSRs belonging to this project are considered. Note
757 that NSD can be public and be used by other projects.
758 :param session:
759 :param _id: vnfd inernal id
760 :param force: Avoid this checking
761 :return: None or raises EngineException with the conflict
762 """
763 if force:
764 return
765 _filter = self._get_project_filter(session, write=False, show_all=False)
766 _filter["nsdId"] = _id
767 if self.db.get_list("nsrs", _filter):
768 raise EngineException("There is some NSR that depends on this NSD", http_code=HTTPStatus.CONFLICT)
769
770
771 class NstTopic(DescriptorTopic):
772 topic = "nsts"
773 topic_msg = "nst"
774
775 def __init__(self, db, fs, msg):
776 DescriptorTopic.__init__(self, db, fs, msg)
777
778 @staticmethod
779 def _remove_envelop(indata=None):
780 if not indata:
781 return {}
782 clean_indata = indata
783
784 if clean_indata.get('nst'):
785 if not isinstance(clean_indata['nst'], list) or len(clean_indata['nst']) != 1:
786 raise EngineException("'nst' must be a list only one element")
787 clean_indata = clean_indata['nst'][0]
788 elif clean_indata.get('nst:nst'):
789 if not isinstance(clean_indata['nst:nst'], list) or len(clean_indata['nst:nst']) != 1:
790 raise EngineException("'nst:nst' must be a list only one element")
791 clean_indata = clean_indata['nst:nst'][0]
792 return clean_indata
793
794 def _validate_input_edit(self, indata, force=False):
795 # TODO validate with pyangbind, serialize
796 return indata
797
798 def _validate_input_new(self, indata, storage_params, force=False):
799 indata = self.pyangbind_validation("nsts", indata, force)
800 return indata.copy()
801
802 def _check_descriptor_dependencies(self, session, descriptor):
803 """
804 Check that the dependent descriptors exist on a new descriptor or edition
805 :param session: client session information
806 :param descriptor: descriptor to be inserted or edit
807 :return: None or raises exception
808 """
809 if not descriptor.get("netslice-subnet"):
810 return
811 for nsd in descriptor["netslice-subnet"]:
812 nsd_id = nsd["nsd-ref"]
813 filter_q = self._get_project_filter(session, write=False, show_all=True)
814 filter_q["id"] = nsd_id
815 if not self.db.get_list("nsds", filter_q):
816 raise EngineException("Descriptor error at 'netslice-subnet':'nsd-ref'='{}' references a non "
817 "existing nsd".format(nsd_id), http_code=HTTPStatus.CONFLICT)
818
819 def check_conflict_on_edit(self, session, final_content, edit_content, _id, force=False):
820 super().check_conflict_on_edit(session, final_content, edit_content, _id, force=force)
821
822 self._check_descriptor_dependencies(session, final_content)
823
824 def check_conflict_on_del(self, session, _id, force=False):
825 """
826 Check that there is not any NSIR that uses this NST. Only NSIRs belonging to this project are considered. Note
827 that NST can be public and be used by other projects.
828 :param session:
829 :param _id: nst internal id
830 :param force: Avoid this checking
831 :return: None or raises EngineException with the conflict
832 """
833 # TODO: Check this method
834 if force:
835 return
836 # Get Network Slice Template from Database
837 _filter = self._get_project_filter(session, write=False, show_all=False)
838 _filter["_id"] = _id
839 nst = self.db.get_one("nsts", _filter)
840
841 # Search NSIs using NST via nst-ref
842 _filter = self._get_project_filter(session, write=False, show_all=False)
843 _filter["nst-ref"] = nst["id"]
844 nsis_list = self.db.get_list("nsis", _filter)
845 for nsi_item in nsis_list:
846 if nsi_item["_admin"].get("nsiState") != "TERMINATED":
847 raise EngineException("There is some NSIS that depends on this NST", http_code=HTTPStatus.CONFLICT)
848
849
850 class PduTopic(BaseTopic):
851 topic = "pdus"
852 topic_msg = "pdu"
853 schema_new = pdu_new_schema
854 schema_edit = pdu_edit_schema
855
856 def __init__(self, db, fs, msg):
857 BaseTopic.__init__(self, db, fs, msg)
858
859 @staticmethod
860 def format_on_new(content, project_id=None, make_public=False):
861 BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
862 content["_admin"]["onboardingState"] = "CREATED"
863 content["_admin"]["operationalState"] = "ENABLED"
864 content["_admin"]["usageState"] = "NOT_IN_USE"
865
866 def check_conflict_on_del(self, session, _id, force=False):
867 if force:
868 return
869 # TODO Is it needed to check descriptors _admin.project_read/project_write??
870 _filter = {"vdur.pdu-id": _id}
871 if self.db.get_list("vnfrs", _filter):
872 raise EngineException("There is some NSR that uses this PDU", http_code=HTTPStatus.CONFLICT)