Adding filter to UserTopicAuth
[osm/NBI.git] / osm_nbi / descriptor_topics.py
1 # -*- coding: utf-8 -*-
2
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
12 # implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
15
16 import tarfile
17 import yaml
18 import json
19 # import logging
20 from hashlib import md5
21 from osm_common.dbbase import DbException, deep_update_rfc7396
22 from http import HTTPStatus
23 from validation import ValidationError, pdu_new_schema, pdu_edit_schema
24 from base_topic import BaseTopic, EngineException, get_iterable
25 from osm_im.vnfd import vnfd as vnfd_im
26 from osm_im.nsd import nsd as nsd_im
27 from osm_im.nst import nst as nst_im
28 from pyangbind.lib.serialise import pybindJSONDecoder
29 import pyangbind.lib.pybindJSON as pybindJSON
30
31 __author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
32
33
34 class DescriptorTopic(BaseTopic):
35
36 def __init__(self, db, fs, msg):
37 BaseTopic.__init__(self, db, fs, msg)
38
39 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
40 super().check_conflict_on_edit(session, final_content, edit_content, _id)
41 # 1. validate again with pyangbind
42 # 1.1. remove internal keys
43 internal_keys = {}
44 for k in ("_id", "_admin"):
45 if k in final_content:
46 internal_keys[k] = final_content.pop(k)
47 storage_params = internal_keys["_admin"].get("storage")
48 serialized = self._validate_input_new(final_content, storage_params, session["force"])
49 # 1.2. modify final_content with a serialized version
50 final_content.clear()
51 final_content.update(serialized)
52 # 1.3. restore internal keys
53 for k, v in internal_keys.items():
54 final_content[k] = v
55
56 if session["force"]:
57 return
58 # 2. check that this id is not present
59 if "id" in edit_content:
60 _filter = self._get_project_filter(session)
61 _filter["id"] = final_content["id"]
62 _filter["_id.neq"] = _id
63 if self.db.get_one(self.topic, _filter, fail_on_empty=False):
64 raise EngineException("{} with id '{}' already exists for this project".format(self.topic[:-1],
65 final_content["id"]),
66 HTTPStatus.CONFLICT)
67
68 @staticmethod
69 def format_on_new(content, project_id=None, make_public=False):
70 BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
71 content["_admin"]["onboardingState"] = "CREATED"
72 content["_admin"]["operationalState"] = "DISABLED"
73 content["_admin"]["usageState"] = "NOT_IN_USE"
74
75 def delete_extra(self, session, _id):
76 self.fs.file_delete(_id, ignore_non_exist=True)
77 self.fs.file_delete(_id + "_", ignore_non_exist=True) # remove temp folder
78
79 @staticmethod
80 def get_one_by_id(db, session, topic, id):
81 # find owned by this project
82 _filter = BaseTopic._get_project_filter(session)
83 _filter["id"] = id
84 desc_list = db.get_list(topic, _filter)
85 if len(desc_list) == 1:
86 return desc_list[0]
87 elif len(desc_list) > 1:
88 raise DbException("Found more than one {} with id='{}' belonging to this project".format(topic[:-1], id),
89 HTTPStatus.CONFLICT)
90
91 # not found any: try to find public
92 _filter = BaseTopic._get_project_filter(session)
93 _filter["id"] = id
94 desc_list = db.get_list(topic, _filter)
95 if not desc_list:
96 raise DbException("Not found any {} with id='{}'".format(topic[:-1], id), HTTPStatus.NOT_FOUND)
97 elif len(desc_list) == 1:
98 return desc_list[0]
99 else:
100 raise DbException("Found more than one public {} with id='{}'; and no one belonging to this project".format(
101 topic[:-1], id), HTTPStatus.CONFLICT)
102
103 def new(self, rollback, session, indata=None, kwargs=None, headers=None):
104 """
105 Creates a new almost empty DISABLED entry into database. Due to SOL005, it does not follow normal procedure.
106 Creating a VNFD or NSD is done in two steps: 1. Creates an empty descriptor (this step) and 2) upload content
107 (self.upload_content)
108 :param rollback: list to append created items at database in case a rollback may to be done
109 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
110 :param indata: data to be inserted
111 :param kwargs: used to override the indata descriptor
112 :param headers: http request headers
113 :return: _id: identity of the inserted data.
114 """
115
116 try:
117 # _remove_envelop
118 if indata:
119 if "userDefinedData" in indata:
120 indata = indata['userDefinedData']
121
122 # Override descriptor with query string kwargs
123 self._update_input_with_kwargs(indata, kwargs)
124 # uncomment when this method is implemented.
125 # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors
126 # indata = DescriptorTopic._validate_input_new(self, indata, project_id=session["force"])
127
128 content = {"_admin": {"userDefinedData": indata}}
129 self.format_on_new(content, session["project_id"], make_public=session["public"])
130 _id = self.db.create(self.topic, content)
131 rollback.append({"topic": self.topic, "_id": _id})
132 return _id
133 except ValidationError as e:
134 raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
135
136 def upload_content(self, session, _id, indata, kwargs, headers):
137 """
138 Used for receiving content by chunks (with a transaction_id header and/or gzip file. It will store and extract)
139 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
140 :param _id : the nsd,vnfd is already created, this is the id
141 :param indata: http body request
142 :param kwargs: user query string to override parameters. NOT USED
143 :param headers: http request headers
144 :return: True if package is completely uploaded or False if partial content has been uploded
145 Raise exception on error
146 """
147 # Check that _id exists and it is valid
148 current_desc = self.show(session, _id)
149
150 content_range_text = headers.get("Content-Range")
151 expected_md5 = headers.get("Content-File-MD5")
152 compressed = None
153 content_type = headers.get("Content-Type")
154 if content_type and "application/gzip" in content_type or "application/x-gzip" in content_type or \
155 "application/zip" in content_type:
156 compressed = "gzip"
157 filename = headers.get("Content-Filename")
158 if not filename:
159 filename = "package.tar.gz" if compressed else "package"
160 # TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266
161 file_pkg = None
162 error_text = ""
163 try:
164 if content_range_text:
165 content_range = content_range_text.replace("-", " ").replace("/", " ").split()
166 if content_range[0] != "bytes": # TODO check x<y not negative < total....
167 raise IndexError()
168 start = int(content_range[1])
169 end = int(content_range[2]) + 1
170 total = int(content_range[3])
171 else:
172 start = 0
173 temp_folder = _id + "_" # all the content is upload here and if ok, it is rename from id_ to is folder
174
175 if start:
176 if not self.fs.file_exists(temp_folder, 'dir'):
177 raise EngineException("invalid Transaction-Id header", HTTPStatus.NOT_FOUND)
178 else:
179 self.fs.file_delete(temp_folder, ignore_non_exist=True)
180 self.fs.mkdir(temp_folder)
181
182 storage = self.fs.get_params()
183 storage["folder"] = _id
184
185 file_path = (temp_folder, filename)
186 if self.fs.file_exists(file_path, 'file'):
187 file_size = self.fs.file_size(file_path)
188 else:
189 file_size = 0
190 if file_size != start:
191 raise EngineException("invalid Content-Range start sequence, expected '{}' but received '{}'".format(
192 file_size, start), HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE)
193 file_pkg = self.fs.file_open(file_path, 'a+b')
194 if isinstance(indata, dict):
195 indata_text = yaml.safe_dump(indata, indent=4, default_flow_style=False)
196 file_pkg.write(indata_text.encode(encoding="utf-8"))
197 else:
198 indata_len = 0
199 while True:
200 indata_text = indata.read(4096)
201 indata_len += len(indata_text)
202 if not indata_text:
203 break
204 file_pkg.write(indata_text)
205 if content_range_text:
206 if indata_len != end-start:
207 raise EngineException("Mismatch between Content-Range header {}-{} and body length of {}".format(
208 start, end-1, indata_len), HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE)
209 if end != total:
210 # TODO update to UPLOADING
211 return False
212
213 # PACKAGE UPLOADED
214 if expected_md5:
215 file_pkg.seek(0, 0)
216 file_md5 = md5()
217 chunk_data = file_pkg.read(1024)
218 while chunk_data:
219 file_md5.update(chunk_data)
220 chunk_data = file_pkg.read(1024)
221 if expected_md5 != file_md5.hexdigest():
222 raise EngineException("Error, MD5 mismatch", HTTPStatus.CONFLICT)
223 file_pkg.seek(0, 0)
224 if compressed == "gzip":
225 tar = tarfile.open(mode='r', fileobj=file_pkg)
226 descriptor_file_name = None
227 for tarinfo in tar:
228 tarname = tarinfo.name
229 tarname_path = tarname.split("/")
230 if not tarname_path[0] or ".." in tarname_path: # if start with "/" means absolute path
231 raise EngineException("Absolute path or '..' are not allowed for package descriptor tar.gz")
232 if len(tarname_path) == 1 and not tarinfo.isdir():
233 raise EngineException("All files must be inside a dir for package descriptor tar.gz")
234 if tarname.endswith(".yaml") or tarname.endswith(".json") or tarname.endswith(".yml"):
235 storage["pkg-dir"] = tarname_path[0]
236 if len(tarname_path) == 2:
237 if descriptor_file_name:
238 raise EngineException(
239 "Found more than one descriptor file at package descriptor tar.gz")
240 descriptor_file_name = tarname
241 if not descriptor_file_name:
242 raise EngineException("Not found any descriptor file at package descriptor tar.gz")
243 storage["descriptor"] = descriptor_file_name
244 storage["zipfile"] = filename
245 self.fs.file_extract(tar, temp_folder)
246 with self.fs.file_open((temp_folder, descriptor_file_name), "r") as descriptor_file:
247 content = descriptor_file.read()
248 else:
249 content = file_pkg.read()
250 storage["descriptor"] = descriptor_file_name = filename
251
252 if descriptor_file_name.endswith(".json"):
253 error_text = "Invalid json format "
254 indata = json.load(content)
255 else:
256 error_text = "Invalid yaml format "
257 indata = yaml.load(content)
258
259 current_desc["_admin"]["storage"] = storage
260 current_desc["_admin"]["onboardingState"] = "ONBOARDED"
261 current_desc["_admin"]["operationalState"] = "ENABLED"
262
263 indata = self._remove_envelop(indata)
264
265 # Override descriptor with query string kwargs
266 if kwargs:
267 self._update_input_with_kwargs(indata, kwargs)
268 # it will call overrides method at VnfdTopic or NsdTopic
269 # indata = self._validate_input_edit(indata, force=session["force"])
270
271 deep_update_rfc7396(current_desc, indata)
272 self.check_conflict_on_edit(session, current_desc, indata, _id=_id)
273 self.db.replace(self.topic, _id, current_desc)
274 self.fs.dir_rename(temp_folder, _id)
275
276 indata["_id"] = _id
277 self._send_msg("created", indata)
278
279 # TODO if descriptor has changed because kwargs update content and remove cached zip
280 # TODO if zip is not present creates one
281 return True
282
283 except EngineException:
284 raise
285 except IndexError:
286 raise EngineException("invalid Content-Range header format. Expected 'bytes start-end/total'",
287 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE)
288 except IOError as e:
289 raise EngineException("invalid upload transaction sequence: '{}'".format(e), HTTPStatus.BAD_REQUEST)
290 except tarfile.ReadError as e:
291 raise EngineException("invalid file content {}".format(e), HTTPStatus.BAD_REQUEST)
292 except (ValueError, yaml.YAMLError) as e:
293 raise EngineException(error_text + str(e))
294 except ValidationError as e:
295 raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
296 finally:
297 if file_pkg:
298 file_pkg.close()
299
300 def get_file(self, session, _id, path=None, accept_header=None):
301 """
302 Return the file content of a vnfd or nsd
303 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
304 :param _id: Identity of the vnfd, nsd
305 :param path: artifact path or "$DESCRIPTOR" or None
306 :param accept_header: Content of Accept header. Must contain applition/zip or/and text/plain
307 :return: opened file plus Accept format or raises an exception
308 """
309 accept_text = accept_zip = False
310 if accept_header:
311 if 'text/plain' in accept_header or '*/*' in accept_header:
312 accept_text = True
313 if 'application/zip' in accept_header or '*/*' in accept_header:
314 accept_zip = 'application/zip'
315 elif 'application/gzip' in accept_header:
316 accept_zip = 'application/gzip'
317
318 if not accept_text and not accept_zip:
319 raise EngineException("provide request header 'Accept' with 'application/zip' or 'text/plain'",
320 http_code=HTTPStatus.NOT_ACCEPTABLE)
321
322 content = self.show(session, _id)
323 if content["_admin"]["onboardingState"] != "ONBOARDED":
324 raise EngineException("Cannot get content because this resource is not at 'ONBOARDED' state. "
325 "onboardingState is {}".format(content["_admin"]["onboardingState"]),
326 http_code=HTTPStatus.CONFLICT)
327 storage = content["_admin"]["storage"]
328 if path is not None and path != "$DESCRIPTOR": # artifacts
329 if not storage.get('pkg-dir'):
330 raise EngineException("Packages does not contains artifacts", http_code=HTTPStatus.BAD_REQUEST)
331 if self.fs.file_exists((storage['folder'], storage['pkg-dir'], *path), 'dir'):
332 folder_content = self.fs.dir_ls((storage['folder'], storage['pkg-dir'], *path))
333 return folder_content, "text/plain"
334 # TODO manage folders in http
335 else:
336 return self.fs.file_open((storage['folder'], storage['pkg-dir'], *path), "rb"),\
337 "application/octet-stream"
338
339 # pkgtype accept ZIP TEXT -> result
340 # manyfiles yes X -> zip
341 # no yes -> error
342 # onefile yes no -> zip
343 # X yes -> text
344
345 if accept_text and (not storage.get('pkg-dir') or path == "$DESCRIPTOR"):
346 return self.fs.file_open((storage['folder'], storage['descriptor']), "r"), "text/plain"
347 elif storage.get('pkg-dir') and not accept_zip:
348 raise EngineException("Packages that contains several files need to be retrieved with 'application/zip'"
349 "Accept header", http_code=HTTPStatus.NOT_ACCEPTABLE)
350 else:
351 if not storage.get('zipfile'):
352 # TODO generate zipfile if not present
353 raise EngineException("Only allowed 'text/plain' Accept header for this descriptor. To be solved in "
354 "future versions", http_code=HTTPStatus.NOT_ACCEPTABLE)
355 return self.fs.file_open((storage['folder'], storage['zipfile']), "rb"), accept_zip
356
357 def pyangbind_validation(self, item, data, force=False):
358 try:
359 if item == "vnfds":
360 myvnfd = vnfd_im()
361 pybindJSONDecoder.load_ietf_json({'vnfd:vnfd-catalog': {'vnfd': [data]}}, None, None, obj=myvnfd,
362 path_helper=True, skip_unknown=force)
363 out = pybindJSON.dumps(myvnfd, mode="ietf")
364 elif item == "nsds":
365 mynsd = nsd_im()
366 pybindJSONDecoder.load_ietf_json({'nsd:nsd-catalog': {'nsd': [data]}}, None, None, obj=mynsd,
367 path_helper=True, skip_unknown=force)
368 out = pybindJSON.dumps(mynsd, mode="ietf")
369 elif item == "nsts":
370 mynst = nst_im()
371 pybindJSONDecoder.load_ietf_json({'nst': [data]}, None, None, obj=mynst,
372 path_helper=True, skip_unknown=force)
373 out = pybindJSON.dumps(mynst, mode="ietf")
374 else:
375 raise EngineException("Not possible to validate '{}' item".format(item),
376 http_code=HTTPStatus.INTERNAL_SERVER_ERROR)
377
378 desc_out = self._remove_envelop(yaml.safe_load(out))
379 return desc_out
380
381 except Exception as e:
382 raise EngineException("Error in pyangbind validation: {}".format(str(e)),
383 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
384
385
386 class VnfdTopic(DescriptorTopic):
387 topic = "vnfds"
388 topic_msg = "vnfd"
389
390 def __init__(self, db, fs, msg):
391 DescriptorTopic.__init__(self, db, fs, msg)
392
393 @staticmethod
394 def _remove_envelop(indata=None):
395 if not indata:
396 return {}
397 clean_indata = indata
398 if clean_indata.get('vnfd:vnfd-catalog'):
399 clean_indata = clean_indata['vnfd:vnfd-catalog']
400 elif clean_indata.get('vnfd-catalog'):
401 clean_indata = clean_indata['vnfd-catalog']
402 if clean_indata.get('vnfd'):
403 if not isinstance(clean_indata['vnfd'], list) or len(clean_indata['vnfd']) != 1:
404 raise EngineException("'vnfd' must be a list of only one element")
405 clean_indata = clean_indata['vnfd'][0]
406 elif clean_indata.get('vnfd:vnfd'):
407 if not isinstance(clean_indata['vnfd:vnfd'], list) or len(clean_indata['vnfd:vnfd']) != 1:
408 raise EngineException("'vnfd:vnfd' must be a list of only one element")
409 clean_indata = clean_indata['vnfd:vnfd'][0]
410 return clean_indata
411
412 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
413 super().check_conflict_on_edit(session, final_content, edit_content, _id)
414
415 # set type of vnfd
416 contains_pdu = False
417 contains_vdu = False
418 for vdu in get_iterable(final_content.get("vdu")):
419 if vdu.get("pdu-type"):
420 contains_pdu = True
421 else:
422 contains_vdu = True
423 if contains_pdu:
424 final_content["_admin"]["type"] = "hnfd" if contains_vdu else "pnfd"
425 elif contains_vdu:
426 final_content["_admin"]["type"] = "vnfd"
427 # if neither vud nor pdu do not fill type
428
429 def check_conflict_on_del(self, session, _id):
430 """
431 Check that there is not any NSD that uses this VNFD. Only NSDs belonging to this project are considered. Note
432 that VNFD can be public and be used by NSD of other projects. Also check there are not deployments, or vnfr
433 that uses this vnfd
434 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
435 :param _id: vnfd inernal id
436 :return: None or raises EngineException with the conflict
437 """
438 if session["force"]:
439 return
440 descriptor = self.db.get_one("vnfds", {"_id": _id})
441 descriptor_id = descriptor.get("id")
442 if not descriptor_id: # empty vnfd not uploaded
443 return
444
445 _filter = self._get_project_filter(session)
446 # check vnfrs using this vnfd
447 _filter["vnfd-id"] = _id
448 if self.db.get_list("vnfrs", _filter):
449 raise EngineException("There is some VNFR that depends on this VNFD", http_code=HTTPStatus.CONFLICT)
450 del _filter["vnfd-id"]
451 # check NSD using this VNFD
452 _filter["constituent-vnfd.ANYINDEX.vnfd-id-ref"] = descriptor_id
453 if self.db.get_list("nsds", _filter):
454 raise EngineException("There is at least a NSD that depends on this VNFD", http_code=HTTPStatus.CONFLICT)
455
456 def _validate_input_new(self, indata, storage_params, force=False):
457 indata = self.pyangbind_validation("vnfds", indata, force)
458 # Cross references validation in the descriptor
459 if indata.get("vdu"):
460 if not indata.get("mgmt-interface"):
461 raise EngineException("'mgmt-interface' is a mandatory field and it is not defined",
462 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
463 if indata["mgmt-interface"].get("cp"):
464 for cp in get_iterable(indata.get("connection-point")):
465 if cp["name"] == indata["mgmt-interface"]["cp"]:
466 break
467 else:
468 raise EngineException("mgmt-interface:cp='{}' must match an existing connection-point"
469 .format(indata["mgmt-interface"]["cp"]),
470 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
471
472 for vdu in get_iterable(indata.get("vdu")):
473 for interface in get_iterable(vdu.get("interface")):
474 if interface.get("external-connection-point-ref"):
475 for cp in get_iterable(indata.get("connection-point")):
476 if cp["name"] == interface["external-connection-point-ref"]:
477 break
478 else:
479 raise EngineException("vdu[id='{}']:interface[name='{}']:external-connection-point-ref='{}' "
480 "must match an existing connection-point"
481 .format(vdu["id"], interface["name"],
482 interface["external-connection-point-ref"]),
483 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
484
485 elif interface.get("internal-connection-point-ref"):
486 for internal_cp in get_iterable(vdu.get("internal-connection-point")):
487 if interface["internal-connection-point-ref"] == internal_cp.get("id"):
488 break
489 else:
490 raise EngineException("vdu[id='{}']:interface[name='{}']:internal-connection-point-ref='{}' "
491 "must match an existing vdu:internal-connection-point"
492 .format(vdu["id"], interface["name"],
493 interface["internal-connection-point-ref"]),
494 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
495 # Validate that if descriptor contains charms, artifacts _admin.storage."pkg-dir" is not none
496 if vdu.get("vdu-configuration"):
497 if vdu["vdu-configuration"].get("juju"):
498 if not self._validate_package_folders(storage_params, 'charms'):
499 raise EngineException("Charm defined in vnf[id={}]:vdu[id={}] but not present in "
500 "package".format(indata["id"], vdu["id"]))
501 # Validate that if descriptor contains cloud-init, artifacts _admin.storage."pkg-dir" is not none
502 if vdu.get("cloud-init-file"):
503 if not self._validate_package_folders(storage_params, 'cloud_init', vdu["cloud-init-file"]):
504 raise EngineException("Cloud-init defined in vnf[id={}]:vdu[id={}] but not present in "
505 "package".format(indata["id"], vdu["id"]))
506 # Validate that if descriptor contains charms, artifacts _admin.storage."pkg-dir" is not none
507 if indata.get("vnf-configuration"):
508 if indata["vnf-configuration"].get("juju"):
509 if not self._validate_package_folders(storage_params, 'charms'):
510 raise EngineException("Charm defined in vnf[id={}] but not present in "
511 "package".format(indata["id"]))
512 vld_names = [] # For detection of duplicated VLD names
513 for ivld in get_iterable(indata.get("internal-vld")):
514 # BEGIN Detection of duplicated VLD names
515 ivld_name = ivld["name"]
516 if ivld_name in vld_names:
517 raise EngineException("Duplicated VLD name '{}' in vnfd[id={}]:internal-vld[id={}]"
518 .format(ivld["name"], indata["id"], ivld["id"]),
519 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
520 else:
521 vld_names.append(ivld_name)
522 # END Detection of duplicated VLD names
523 for icp in get_iterable(ivld.get("internal-connection-point")):
524 icp_mark = False
525 for vdu in get_iterable(indata.get("vdu")):
526 for internal_cp in get_iterable(vdu.get("internal-connection-point")):
527 if icp["id-ref"] == internal_cp["id"]:
528 icp_mark = True
529 break
530 if icp_mark:
531 break
532 else:
533 raise EngineException("internal-vld[id='{}']:internal-connection-point='{}' must match an existing "
534 "vdu:internal-connection-point".format(ivld["id"], icp["id-ref"]),
535 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
536 if ivld.get("ip-profile-ref"):
537 for ip_prof in get_iterable(indata.get("ip-profiles")):
538 if ip_prof["name"] == get_iterable(ivld.get("ip-profile-ref")):
539 break
540 else:
541 raise EngineException("internal-vld[id='{}']:ip-profile-ref='{}' does not exist".format(
542 ivld["id"], ivld["ip-profile-ref"]),
543 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
544 for mp in get_iterable(indata.get("monitoring-param")):
545 if mp.get("vdu-monitoring-param"):
546 mp_vmp_mark = False
547 for vdu in get_iterable(indata.get("vdu")):
548 for vmp in get_iterable(vdu.get("monitoring-param")):
549 if vmp["id"] == mp["vdu-monitoring-param"].get("vdu-monitoring-param-ref") and vdu["id"] ==\
550 mp["vdu-monitoring-param"]["vdu-ref"]:
551 mp_vmp_mark = True
552 break
553 if mp_vmp_mark:
554 break
555 else:
556 raise EngineException("monitoring-param:vdu-monitoring-param:vdu-monitoring-param-ref='{}' not "
557 "defined at vdu[id='{}'] or vdu does not exist"
558 .format(mp["vdu-monitoring-param"]["vdu-monitoring-param-ref"],
559 mp["vdu-monitoring-param"]["vdu-ref"]),
560 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
561 elif mp.get("vdu-metric"):
562 mp_vm_mark = False
563 for vdu in get_iterable(indata.get("vdu")):
564 if vdu.get("vdu-configuration"):
565 for metric in get_iterable(vdu["vdu-configuration"].get("metrics")):
566 if metric["name"] == mp["vdu-metric"]["vdu-metric-name-ref"] and vdu["id"] == \
567 mp["vdu-metric"]["vdu-ref"]:
568 mp_vm_mark = True
569 break
570 if mp_vm_mark:
571 break
572 else:
573 raise EngineException("monitoring-param:vdu-metric:vdu-metric-name-ref='{}' not defined at "
574 "vdu[id='{}'] or vdu does not exist"
575 .format(mp["vdu-metric"]["vdu-metric-name-ref"],
576 mp["vdu-metric"]["vdu-ref"]),
577 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
578
579 for sgd in get_iterable(indata.get("scaling-group-descriptor")):
580 for sp in get_iterable(sgd.get("scaling-policy")):
581 for sc in get_iterable(sp.get("scaling-criteria")):
582 for mp in get_iterable(indata.get("monitoring-param")):
583 if mp["id"] == get_iterable(sc.get("vnf-monitoring-param-ref")):
584 break
585 else:
586 raise EngineException("scaling-group-descriptor[name='{}']:scaling-criteria[name='{}']:"
587 "vnf-monitoring-param-ref='{}' not defined in any monitoring-param"
588 .format(sgd["name"], sc["name"], sc["vnf-monitoring-param-ref"]),
589 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
590 for sgd_vdu in get_iterable(sgd.get("vdu")):
591 sgd_vdu_mark = False
592 for vdu in get_iterable(indata.get("vdu")):
593 if vdu["id"] == sgd_vdu["vdu-id-ref"]:
594 sgd_vdu_mark = True
595 break
596 if sgd_vdu_mark:
597 break
598 else:
599 raise EngineException("scaling-group-descriptor[name='{}']:vdu-id-ref={} does not match any vdu"
600 .format(sgd["name"], sgd_vdu["vdu-id-ref"]),
601 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
602 for sca in get_iterable(sgd.get("scaling-config-action")):
603 if not indata.get("vnf-configuration"):
604 raise EngineException("'vnf-configuration' not defined in the descriptor but it is referenced by "
605 "scaling-group-descriptor[name='{}']:scaling-config-action"
606 .format(sgd["name"]),
607 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
608 for primitive in get_iterable(indata["vnf-configuration"].get("config-primitive")):
609 if primitive["name"] == sca["vnf-config-primitive-name-ref"]:
610 break
611 else:
612 raise EngineException("scaling-group-descriptor[name='{}']:scaling-config-action:vnf-config-"
613 "primitive-name-ref='{}' does not match any "
614 "vnf-configuration:config-primitive:name"
615 .format(sgd["name"], sca["vnf-config-primitive-name-ref"]),
616 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
617 return indata
618
619 def _validate_input_edit(self, indata, force=False):
620 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
621 return indata
622
623 def _validate_package_folders(self, storage_params, folder, file=None):
624 if not storage_params or not storage_params.get("pkg-dir"):
625 return False
626 else:
627 if self.fs.file_exists("{}_".format(storage_params["folder"]), 'dir'):
628 f = "{}_/{}/{}".format(storage_params["folder"], storage_params["pkg-dir"], folder)
629 else:
630 f = "{}/{}/{}".format(storage_params["folder"], storage_params["pkg-dir"], folder)
631 if file:
632 return self.fs.file_exists("{}/{}".format(f, file), 'file')
633 else:
634 if self.fs.file_exists(f, 'dir'):
635 if self.fs.dir_ls(f):
636 return True
637 return False
638
639
640 class NsdTopic(DescriptorTopic):
641 topic = "nsds"
642 topic_msg = "nsd"
643
644 def __init__(self, db, fs, msg):
645 DescriptorTopic.__init__(self, db, fs, msg)
646
647 @staticmethod
648 def _remove_envelop(indata=None):
649 if not indata:
650 return {}
651 clean_indata = indata
652
653 if clean_indata.get('nsd:nsd-catalog'):
654 clean_indata = clean_indata['nsd:nsd-catalog']
655 elif clean_indata.get('nsd-catalog'):
656 clean_indata = clean_indata['nsd-catalog']
657 if clean_indata.get('nsd'):
658 if not isinstance(clean_indata['nsd'], list) or len(clean_indata['nsd']) != 1:
659 raise EngineException("'nsd' must be a list of only one element")
660 clean_indata = clean_indata['nsd'][0]
661 elif clean_indata.get('nsd:nsd'):
662 if not isinstance(clean_indata['nsd:nsd'], list) or len(clean_indata['nsd:nsd']) != 1:
663 raise EngineException("'nsd:nsd' must be a list of only one element")
664 clean_indata = clean_indata['nsd:nsd'][0]
665 return clean_indata
666
667 def _validate_input_new(self, indata, storage_params, force=False):
668 indata = self.pyangbind_validation("nsds", indata, force)
669 # Cross references validation in the descriptor
670 # TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none
671 for vld in get_iterable(indata.get("vld")):
672 if vld.get("mgmt-network") and vld.get("ip-profile-ref"):
673 raise EngineException("Error at vld[id='{}']:ip-profile-ref"
674 " You cannot set an ip-profile when mgmt-network is True"
675 .format(vld["id"]), http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
676 for vnfd_cp in get_iterable(vld.get("vnfd-connection-point-ref")):
677 for constituent_vnfd in get_iterable(indata.get("constituent-vnfd")):
678 if vnfd_cp["member-vnf-index-ref"] == constituent_vnfd["member-vnf-index"]:
679 if vnfd_cp.get("vnfd-id-ref") and vnfd_cp["vnfd-id-ref"] != constituent_vnfd["vnfd-id-ref"]:
680 raise EngineException("Error at vld[id='{}']:vnfd-connection-point-ref[vnfd-id-ref='{}'] "
681 "does not match constituent-vnfd[member-vnf-index='{}']:vnfd-id-ref"
682 " '{}'".format(vld["id"], vnfd_cp["vnfd-id-ref"],
683 constituent_vnfd["member-vnf-index"],
684 constituent_vnfd["vnfd-id-ref"]),
685 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
686 break
687 else:
688 raise EngineException("Error at vld[id='{}']:vnfd-connection-point-ref[member-vnf-index-ref='{}'] "
689 "does not match any constituent-vnfd:member-vnf-index"
690 .format(vld["id"], vnfd_cp["member-vnf-index-ref"]),
691 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
692 return indata
693
694 def _validate_input_edit(self, indata, force=False):
695 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
696 return indata
697
698 def _check_descriptor_dependencies(self, session, descriptor):
699 """
700 Check that the dependent descriptors exist on a new descriptor or edition. Also checks references to vnfd
701 connection points are ok
702 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
703 :param descriptor: descriptor to be inserted or edit
704 :return: None or raises exception
705 """
706 if session["force"]:
707 return
708 member_vnfd_index = {}
709 if descriptor.get("constituent-vnfd") and not session["force"]:
710 for vnf in descriptor["constituent-vnfd"]:
711 vnfd_id = vnf["vnfd-id-ref"]
712 filter_q = self._get_project_filter(session)
713 filter_q["id"] = vnfd_id
714 vnf_list = self.db.get_list("vnfds", filter_q)
715 if not vnf_list:
716 raise EngineException("Descriptor error at 'constituent-vnfd':'vnfd-id-ref'='{}' references a non "
717 "existing vnfd".format(vnfd_id), http_code=HTTPStatus.CONFLICT)
718 # elif len(vnf_list) > 1:
719 # raise EngineException("More than one vnfd found for id='{}'".format(vnfd_id),
720 # http_code=HTTPStatus.CONFLICT)
721 member_vnfd_index[vnf["member-vnf-index"]] = vnf_list[0]
722
723 # Cross references validation in the descriptor and vnfd connection point validation
724 for vld in get_iterable(descriptor.get("vld")):
725 for referenced_vnfd_cp in get_iterable(vld.get("vnfd-connection-point-ref")):
726 # look if this vnfd contains this connection point
727 vnfd = member_vnfd_index.get(referenced_vnfd_cp["member-vnf-index-ref"])
728 if not vnfd:
729 raise EngineException("Error at vld[id='{}']:vnfd-connection-point-ref[member-vnf-index-ref='{}'] "
730 "does not match any constituent-vnfd:member-vnf-index"
731 .format(vld["id"], referenced_vnfd_cp["member-vnf-index-ref"]),
732 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
733 for vnfd_cp in get_iterable(vnfd.get("connection-point")):
734 if referenced_vnfd_cp.get("vnfd-connection-point-ref") == vnfd_cp["name"]:
735 break
736 else:
737 raise EngineException(
738 "Error at vld[id='{}']:vnfd-connection-point-ref[member-vnf-index-ref='{}']:vnfd-"
739 "connection-point-ref='{}' references a non existing conection-point:name inside vnfd '{}'"
740 .format(vld["id"], referenced_vnfd_cp["member-vnf-index-ref"],
741 referenced_vnfd_cp["vnfd-connection-point-ref"], vnfd["id"]),
742 http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
743
744 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
745 super().check_conflict_on_edit(session, final_content, edit_content, _id)
746
747 self._check_descriptor_dependencies(session, final_content)
748
749 def check_conflict_on_del(self, session, _id):
750 """
751 Check that there is not any NSR that uses this NSD. Only NSRs belonging to this project are considered. Note
752 that NSD can be public and be used by other projects.
753 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
754 :param _id: vnfd inernal id
755 :return: None or raises EngineException with the conflict
756 """
757 if session["force"]:
758 return
759 _filter = self._get_project_filter(session)
760 _filter["nsdId"] = _id
761 if self.db.get_list("nsrs", _filter):
762 raise EngineException("There is some NSR that depends on this NSD", http_code=HTTPStatus.CONFLICT)
763
764
765 class NstTopic(DescriptorTopic):
766 topic = "nsts"
767 topic_msg = "nst"
768
769 def __init__(self, db, fs, msg):
770 DescriptorTopic.__init__(self, db, fs, msg)
771
772 @staticmethod
773 def _remove_envelop(indata=None):
774 if not indata:
775 return {}
776 clean_indata = indata
777
778 if clean_indata.get('nst'):
779 if not isinstance(clean_indata['nst'], list) or len(clean_indata['nst']) != 1:
780 raise EngineException("'nst' must be a list only one element")
781 clean_indata = clean_indata['nst'][0]
782 elif clean_indata.get('nst:nst'):
783 if not isinstance(clean_indata['nst:nst'], list) or len(clean_indata['nst:nst']) != 1:
784 raise EngineException("'nst:nst' must be a list only one element")
785 clean_indata = clean_indata['nst:nst'][0]
786 return clean_indata
787
788 def _validate_input_edit(self, indata, force=False):
789 # TODO validate with pyangbind, serialize
790 return indata
791
792 def _validate_input_new(self, indata, storage_params, force=False):
793 indata = self.pyangbind_validation("nsts", indata, force)
794 return indata.copy()
795
796 def _check_descriptor_dependencies(self, session, descriptor):
797 """
798 Check that the dependent descriptors exist on a new descriptor or edition
799 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
800 :param descriptor: descriptor to be inserted or edit
801 :return: None or raises exception
802 """
803 if not descriptor.get("netslice-subnet"):
804 return
805 for nsd in descriptor["netslice-subnet"]:
806 nsd_id = nsd["nsd-ref"]
807 filter_q = self._get_project_filter(session)
808 filter_q["id"] = nsd_id
809 if not self.db.get_list("nsds", filter_q):
810 raise EngineException("Descriptor error at 'netslice-subnet':'nsd-ref'='{}' references a non "
811 "existing nsd".format(nsd_id), http_code=HTTPStatus.CONFLICT)
812
813 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
814 super().check_conflict_on_edit(session, final_content, edit_content, _id)
815
816 self._check_descriptor_dependencies(session, final_content)
817
818 def check_conflict_on_del(self, session, _id):
819 """
820 Check that there is not any NSIR that uses this NST. Only NSIRs belonging to this project are considered. Note
821 that NST can be public and be used by other projects.
822 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
823 :param _id: nst internal id
824 :return: None or raises EngineException with the conflict
825 """
826 # TODO: Check this method
827 if session["force"]:
828 return
829 # Get Network Slice Template from Database
830 _filter = self._get_project_filter(session)
831 _filter["_id"] = _id
832 nst = self.db.get_one("nsts", _filter)
833
834 # Search NSIs using NST via nst-ref
835 _filter = self._get_project_filter(session)
836 _filter["nst-ref"] = nst["id"]
837 nsis_list = self.db.get_list("nsis", _filter)
838 for nsi_item in nsis_list:
839 if nsi_item["_admin"].get("nsiState") != "TERMINATED":
840 raise EngineException("There is some NSIS that depends on this NST", http_code=HTTPStatus.CONFLICT)
841
842
843 class PduTopic(BaseTopic):
844 topic = "pdus"
845 topic_msg = "pdu"
846 schema_new = pdu_new_schema
847 schema_edit = pdu_edit_schema
848
849 def __init__(self, db, fs, msg):
850 BaseTopic.__init__(self, db, fs, msg)
851
852 @staticmethod
853 def format_on_new(content, project_id=None, make_public=False):
854 BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
855 content["_admin"]["onboardingState"] = "CREATED"
856 content["_admin"]["operationalState"] = "ENABLED"
857 content["_admin"]["usageState"] = "NOT_IN_USE"
858
859 def check_conflict_on_del(self, session, _id):
860 if session["force"]:
861 return
862 # TODO Is it needed to check descriptors _admin.project_read/project_write??
863 _filter = {"vdur.pdu-id": _id}
864 if self.db.get_list("vnfrs", _filter):
865 raise EngineException("There is some NSR that uses this PDU", http_code=HTTPStatus.CONFLICT)