Feature5950: Management of quotas in VIM Account
[osm/NBI.git] / osm_nbi / descriptor_topics.py
1 # -*- coding: utf-8 -*-
2
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
12 # implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
15
16 import tarfile
17 import yaml
18 import json
19 import importlib
20 import copy
21
22 # import logging
23 from hashlib import md5
24 from osm_common.dbbase import DbException, deep_update_rfc7396
25 from http import HTTPStatus
26 from time import time
27 from uuid import uuid4
28 from re import fullmatch
29 from osm_nbi.validation import (
30 ValidationError,
31 pdu_new_schema,
32 pdu_edit_schema,
33 validate_input,
34 vnfpkgop_new_schema,
35 )
36 from osm_nbi.base_topic import BaseTopic, EngineException, get_iterable
37
38 etsi_nfv_vnfd = importlib.import_module("osm_im.etsi-nfv-vnfd")
39 etsi_nfv_nsd = importlib.import_module("osm_im.etsi-nfv-nsd")
40 from osm_im.nst import nst as nst_im
41 from pyangbind.lib.serialise import pybindJSONDecoder
42 import pyangbind.lib.pybindJSON as pybindJSON
43 from osm_nbi import utils
44
45 __author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
46
47
48 class DescriptorTopic(BaseTopic):
49 def __init__(self, db, fs, msg, auth):
50 BaseTopic.__init__(self, db, fs, msg, auth)
51
52 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
53 final_content = super().check_conflict_on_edit(
54 session, final_content, edit_content, _id
55 )
56
57 def _check_unique_id_name(descriptor, position=""):
58 for desc_key, desc_item in descriptor.items():
59 if isinstance(desc_item, list) and desc_item:
60 used_ids = []
61 desc_item_id = None
62 for index, list_item in enumerate(desc_item):
63 if isinstance(list_item, dict):
64 _check_unique_id_name(
65 list_item, "{}.{}[{}]".format(position, desc_key, index)
66 )
67 # Base case
68 if index == 0 and (
69 list_item.get("id") or list_item.get("name")
70 ):
71 desc_item_id = "id" if list_item.get("id") else "name"
72 if desc_item_id and list_item.get(desc_item_id):
73 if list_item[desc_item_id] in used_ids:
74 position = "{}.{}[{}]".format(
75 position, desc_key, index
76 )
77 raise EngineException(
78 "Error: identifier {} '{}' is not unique and repeats at '{}'".format(
79 desc_item_id,
80 list_item[desc_item_id],
81 position,
82 ),
83 HTTPStatus.UNPROCESSABLE_ENTITY,
84 )
85 used_ids.append(list_item[desc_item_id])
86
87 _check_unique_id_name(final_content)
88 # 1. validate again with pyangbind
89 # 1.1. remove internal keys
90 internal_keys = {}
91 for k in ("_id", "_admin"):
92 if k in final_content:
93 internal_keys[k] = final_content.pop(k)
94 storage_params = internal_keys["_admin"].get("storage")
95 serialized = self._validate_input_new(
96 final_content, storage_params, session["force"]
97 )
98
99 # 1.2. modify final_content with a serialized version
100 final_content = copy.deepcopy(serialized)
101 # 1.3. restore internal keys
102 for k, v in internal_keys.items():
103 final_content[k] = v
104 if session["force"]:
105 return final_content
106
107 # 2. check that this id is not present
108 if "id" in edit_content:
109 _filter = self._get_project_filter(session)
110
111 _filter["id"] = final_content["id"]
112 _filter["_id.neq"] = _id
113
114 if self.db.get_one(self.topic, _filter, fail_on_empty=False):
115 raise EngineException(
116 "{} with id '{}' already exists for this project".format(
117 self.topic[:-1], final_content["id"]
118 ),
119 HTTPStatus.CONFLICT,
120 )
121
122 return final_content
123
124 @staticmethod
125 def format_on_new(content, project_id=None, make_public=False):
126 BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
127 content["_admin"]["onboardingState"] = "CREATED"
128 content["_admin"]["operationalState"] = "DISABLED"
129 content["_admin"]["usageState"] = "NOT_IN_USE"
130
131 def delete_extra(self, session, _id, db_content, not_send_msg=None):
132 """
133 Deletes file system storage associated with the descriptor
134 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
135 :param _id: server internal id
136 :param db_content: The database content of the descriptor
137 :param not_send_msg: To not send message (False) or store content (list) instead
138 :return: None if ok or raises EngineException with the problem
139 """
140 self.fs.file_delete(_id, ignore_non_exist=True)
141 self.fs.file_delete(_id + "_", ignore_non_exist=True) # remove temp folder
142
143 @staticmethod
144 def get_one_by_id(db, session, topic, id):
145 # find owned by this project
146 _filter = BaseTopic._get_project_filter(session)
147 _filter["id"] = id
148 desc_list = db.get_list(topic, _filter)
149 if len(desc_list) == 1:
150 return desc_list[0]
151 elif len(desc_list) > 1:
152 raise DbException(
153 "Found more than one {} with id='{}' belonging to this project".format(
154 topic[:-1], id
155 ),
156 HTTPStatus.CONFLICT,
157 )
158
159 # not found any: try to find public
160 _filter = BaseTopic._get_project_filter(session)
161 _filter["id"] = id
162 desc_list = db.get_list(topic, _filter)
163 if not desc_list:
164 raise DbException(
165 "Not found any {} with id='{}'".format(topic[:-1], id),
166 HTTPStatus.NOT_FOUND,
167 )
168 elif len(desc_list) == 1:
169 return desc_list[0]
170 else:
171 raise DbException(
172 "Found more than one public {} with id='{}'; and no one belonging to this project".format(
173 topic[:-1], id
174 ),
175 HTTPStatus.CONFLICT,
176 )
177
178 def new(self, rollback, session, indata=None, kwargs=None, headers=None):
179 """
180 Creates a new almost empty DISABLED entry into database. Due to SOL005, it does not follow normal procedure.
181 Creating a VNFD or NSD is done in two steps: 1. Creates an empty descriptor (this step) and 2) upload content
182 (self.upload_content)
183 :param rollback: list to append created items at database in case a rollback may to be done
184 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
185 :param indata: data to be inserted
186 :param kwargs: used to override the indata descriptor
187 :param headers: http request headers
188 :return: _id, None: identity of the inserted data; and None as there is not any operation
189 """
190
191 # No needed to capture exceptions
192 # Check Quota
193 self.check_quota(session)
194
195 # _remove_envelop
196 if indata:
197 if "userDefinedData" in indata:
198 indata = indata["userDefinedData"]
199
200 # Override descriptor with query string kwargs
201 self._update_input_with_kwargs(indata, kwargs)
202 # uncomment when this method is implemented.
203 # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors
204 # indata = DescriptorTopic._validate_input_new(self, indata, project_id=session["force"])
205
206 content = {"_admin": {"userDefinedData": indata}}
207 self.format_on_new(
208 content, session["project_id"], make_public=session["public"]
209 )
210 _id = self.db.create(self.topic, content)
211 rollback.append({"topic": self.topic, "_id": _id})
212 self._send_msg("created", {"_id": _id})
213 return _id, None
214
215 def upload_content(self, session, _id, indata, kwargs, headers):
216 """
217 Used for receiving content by chunks (with a transaction_id header and/or gzip file. It will store and extract)
218 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
219 :param _id : the nsd,vnfd is already created, this is the id
220 :param indata: http body request
221 :param kwargs: user query string to override parameters. NOT USED
222 :param headers: http request headers
223 :return: True if package is completely uploaded or False if partial content has been uploded
224 Raise exception on error
225 """
226 # Check that _id exists and it is valid
227 current_desc = self.show(session, _id)
228
229 content_range_text = headers.get("Content-Range")
230 expected_md5 = headers.get("Content-File-MD5")
231 compressed = None
232 content_type = headers.get("Content-Type")
233 if (
234 content_type
235 and "application/gzip" in content_type
236 or "application/x-gzip" in content_type
237 or "application/zip" in content_type
238 ):
239 compressed = "gzip"
240 filename = headers.get("Content-Filename")
241 if not filename:
242 filename = "package.tar.gz" if compressed else "package"
243 # TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266
244 file_pkg = None
245 error_text = ""
246 try:
247 if content_range_text:
248 content_range = (
249 content_range_text.replace("-", " ").replace("/", " ").split()
250 )
251 if (
252 content_range[0] != "bytes"
253 ): # TODO check x<y not negative < total....
254 raise IndexError()
255 start = int(content_range[1])
256 end = int(content_range[2]) + 1
257 total = int(content_range[3])
258 else:
259 start = 0
260 temp_folder = (
261 _id + "_"
262 ) # all the content is upload here and if ok, it is rename from id_ to is folder
263
264 if start:
265 if not self.fs.file_exists(temp_folder, "dir"):
266 raise EngineException(
267 "invalid Transaction-Id header", HTTPStatus.NOT_FOUND
268 )
269 else:
270 self.fs.file_delete(temp_folder, ignore_non_exist=True)
271 self.fs.mkdir(temp_folder)
272
273 storage = self.fs.get_params()
274 storage["folder"] = _id
275
276 file_path = (temp_folder, filename)
277 if self.fs.file_exists(file_path, "file"):
278 file_size = self.fs.file_size(file_path)
279 else:
280 file_size = 0
281 if file_size != start:
282 raise EngineException(
283 "invalid Content-Range start sequence, expected '{}' but received '{}'".format(
284 file_size, start
285 ),
286 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
287 )
288 file_pkg = self.fs.file_open(file_path, "a+b")
289 if isinstance(indata, dict):
290 indata_text = yaml.safe_dump(indata, indent=4, default_flow_style=False)
291 file_pkg.write(indata_text.encode(encoding="utf-8"))
292 else:
293 indata_len = 0
294 while True:
295 indata_text = indata.read(4096)
296 indata_len += len(indata_text)
297 if not indata_text:
298 break
299 file_pkg.write(indata_text)
300 if content_range_text:
301 if indata_len != end - start:
302 raise EngineException(
303 "Mismatch between Content-Range header {}-{} and body length of {}".format(
304 start, end - 1, indata_len
305 ),
306 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
307 )
308 if end != total:
309 # TODO update to UPLOADING
310 return False
311
312 # PACKAGE UPLOADED
313 if expected_md5:
314 file_pkg.seek(0, 0)
315 file_md5 = md5()
316 chunk_data = file_pkg.read(1024)
317 while chunk_data:
318 file_md5.update(chunk_data)
319 chunk_data = file_pkg.read(1024)
320 if expected_md5 != file_md5.hexdigest():
321 raise EngineException("Error, MD5 mismatch", HTTPStatus.CONFLICT)
322 file_pkg.seek(0, 0)
323 if compressed == "gzip":
324 tar = tarfile.open(mode="r", fileobj=file_pkg)
325 descriptor_file_name = None
326 for tarinfo in tar:
327 tarname = tarinfo.name
328 tarname_path = tarname.split("/")
329 if (
330 not tarname_path[0] or ".." in tarname_path
331 ): # if start with "/" means absolute path
332 raise EngineException(
333 "Absolute path or '..' are not allowed for package descriptor tar.gz"
334 )
335 if len(tarname_path) == 1 and not tarinfo.isdir():
336 raise EngineException(
337 "All files must be inside a dir for package descriptor tar.gz"
338 )
339 if (
340 tarname.endswith(".yaml")
341 or tarname.endswith(".json")
342 or tarname.endswith(".yml")
343 ):
344 storage["pkg-dir"] = tarname_path[0]
345 if len(tarname_path) == 2:
346 if descriptor_file_name:
347 raise EngineException(
348 "Found more than one descriptor file at package descriptor tar.gz"
349 )
350 descriptor_file_name = tarname
351 if not descriptor_file_name:
352 raise EngineException(
353 "Not found any descriptor file at package descriptor tar.gz"
354 )
355 storage["descriptor"] = descriptor_file_name
356 storage["zipfile"] = filename
357 self.fs.file_extract(tar, temp_folder)
358 with self.fs.file_open(
359 (temp_folder, descriptor_file_name), "r"
360 ) as descriptor_file:
361 content = descriptor_file.read()
362 else:
363 content = file_pkg.read()
364 storage["descriptor"] = descriptor_file_name = filename
365
366 if descriptor_file_name.endswith(".json"):
367 error_text = "Invalid json format "
368 indata = json.load(content)
369 else:
370 error_text = "Invalid yaml format "
371 indata = yaml.load(content, Loader=yaml.SafeLoader)
372
373 current_desc["_admin"]["storage"] = storage
374 current_desc["_admin"]["onboardingState"] = "ONBOARDED"
375 current_desc["_admin"]["operationalState"] = "ENABLED"
376
377 indata = self._remove_envelop(indata)
378
379 # Override descriptor with query string kwargs
380 if kwargs:
381 self._update_input_with_kwargs(indata, kwargs)
382
383 deep_update_rfc7396(current_desc, indata)
384 current_desc = self.check_conflict_on_edit(
385 session, current_desc, indata, _id=_id
386 )
387 current_desc["_admin"]["modified"] = time()
388 self.db.replace(self.topic, _id, current_desc)
389 self.fs.dir_rename(temp_folder, _id)
390
391 indata["_id"] = _id
392 self._send_msg("edited", indata)
393
394 # TODO if descriptor has changed because kwargs update content and remove cached zip
395 # TODO if zip is not present creates one
396 return True
397
398 except EngineException:
399 raise
400 except IndexError:
401 raise EngineException(
402 "invalid Content-Range header format. Expected 'bytes start-end/total'",
403 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
404 )
405 except IOError as e:
406 raise EngineException(
407 "invalid upload transaction sequence: '{}'".format(e),
408 HTTPStatus.BAD_REQUEST,
409 )
410 except tarfile.ReadError as e:
411 raise EngineException(
412 "invalid file content {}".format(e), HTTPStatus.BAD_REQUEST
413 )
414 except (ValueError, yaml.YAMLError) as e:
415 raise EngineException(error_text + str(e))
416 except ValidationError as e:
417 raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
418 finally:
419 if file_pkg:
420 file_pkg.close()
421
422 def get_file(self, session, _id, path=None, accept_header=None):
423 """
424 Return the file content of a vnfd or nsd
425 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
426 :param _id: Identity of the vnfd, nsd
427 :param path: artifact path or "$DESCRIPTOR" or None
428 :param accept_header: Content of Accept header. Must contain applition/zip or/and text/plain
429 :return: opened file plus Accept format or raises an exception
430 """
431 accept_text = accept_zip = False
432 if accept_header:
433 if "text/plain" in accept_header or "*/*" in accept_header:
434 accept_text = True
435 if "application/zip" in accept_header or "*/*" in accept_header:
436 accept_zip = "application/zip"
437 elif "application/gzip" in accept_header:
438 accept_zip = "application/gzip"
439
440 if not accept_text and not accept_zip:
441 raise EngineException(
442 "provide request header 'Accept' with 'application/zip' or 'text/plain'",
443 http_code=HTTPStatus.NOT_ACCEPTABLE,
444 )
445
446 content = self.show(session, _id)
447 if content["_admin"]["onboardingState"] != "ONBOARDED":
448 raise EngineException(
449 "Cannot get content because this resource is not at 'ONBOARDED' state. "
450 "onboardingState is {}".format(content["_admin"]["onboardingState"]),
451 http_code=HTTPStatus.CONFLICT,
452 )
453 storage = content["_admin"]["storage"]
454 if path is not None and path != "$DESCRIPTOR": # artifacts
455 if not storage.get("pkg-dir"):
456 raise EngineException(
457 "Packages does not contains artifacts",
458 http_code=HTTPStatus.BAD_REQUEST,
459 )
460 if self.fs.file_exists(
461 (storage["folder"], storage["pkg-dir"], *path), "dir"
462 ):
463 folder_content = self.fs.dir_ls(
464 (storage["folder"], storage["pkg-dir"], *path)
465 )
466 return folder_content, "text/plain"
467 # TODO manage folders in http
468 else:
469 return (
470 self.fs.file_open(
471 (storage["folder"], storage["pkg-dir"], *path), "rb"
472 ),
473 "application/octet-stream",
474 )
475
476 # pkgtype accept ZIP TEXT -> result
477 # manyfiles yes X -> zip
478 # no yes -> error
479 # onefile yes no -> zip
480 # X yes -> text
481 contain_many_files = False
482 if storage.get("pkg-dir"):
483 # check if there are more than one file in the package, ignoring checksums.txt.
484 pkg_files = self.fs.dir_ls((storage["folder"], storage["pkg-dir"]))
485 if len(pkg_files) >= 3 or (
486 len(pkg_files) == 2 and "checksums.txt" not in pkg_files
487 ):
488 contain_many_files = True
489 if accept_text and (not contain_many_files or path == "$DESCRIPTOR"):
490 return (
491 self.fs.file_open((storage["folder"], storage["descriptor"]), "r"),
492 "text/plain",
493 )
494 elif contain_many_files and not accept_zip:
495 raise EngineException(
496 "Packages that contains several files need to be retrieved with 'application/zip'"
497 "Accept header",
498 http_code=HTTPStatus.NOT_ACCEPTABLE,
499 )
500 else:
501 if not storage.get("zipfile"):
502 # TODO generate zipfile if not present
503 raise EngineException(
504 "Only allowed 'text/plain' Accept header for this descriptor. To be solved in "
505 "future versions",
506 http_code=HTTPStatus.NOT_ACCEPTABLE,
507 )
508 return (
509 self.fs.file_open((storage["folder"], storage["zipfile"]), "rb"),
510 accept_zip,
511 )
512
513 def _remove_yang_prefixes_from_descriptor(self, descriptor):
514 new_descriptor = {}
515 for k, v in descriptor.items():
516 new_v = v
517 if isinstance(v, dict):
518 new_v = self._remove_yang_prefixes_from_descriptor(v)
519 elif isinstance(v, list):
520 new_v = list()
521 for x in v:
522 if isinstance(x, dict):
523 new_v.append(self._remove_yang_prefixes_from_descriptor(x))
524 else:
525 new_v.append(x)
526 new_descriptor[k.split(":")[-1]] = new_v
527 return new_descriptor
528
529 def pyangbind_validation(self, item, data, force=False):
530 raise EngineException(
531 "Not possible to validate '{}' item".format(item),
532 http_code=HTTPStatus.INTERNAL_SERVER_ERROR,
533 )
534
535 def _validate_input_edit(self, indata, content, force=False):
536 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
537 if "_id" in indata:
538 indata.pop("_id")
539 if "_admin" not in indata:
540 indata["_admin"] = {}
541
542 if "operationalState" in indata:
543 if indata["operationalState"] in ("ENABLED", "DISABLED"):
544 indata["_admin"]["operationalState"] = indata.pop("operationalState")
545 else:
546 raise EngineException(
547 "State '{}' is not a valid operational state".format(
548 indata["operationalState"]
549 ),
550 http_code=HTTPStatus.BAD_REQUEST,
551 )
552
553 # In the case of user defined data, we need to put the data in the root of the object
554 # to preserve current expected behaviour
555 if "userDefinedData" in indata:
556 data = indata.pop("userDefinedData")
557 if type(data) == dict:
558 indata["_admin"]["userDefinedData"] = data
559 else:
560 raise EngineException(
561 "userDefinedData should be an object, but is '{}' instead".format(
562 type(data)
563 ),
564 http_code=HTTPStatus.BAD_REQUEST,
565 )
566
567 if (
568 "operationalState" in indata["_admin"]
569 and content["_admin"]["operationalState"]
570 == indata["_admin"]["operationalState"]
571 ):
572 raise EngineException(
573 "operationalState already {}".format(
574 content["_admin"]["operationalState"]
575 ),
576 http_code=HTTPStatus.CONFLICT,
577 )
578
579 return indata
580
581
582 class VnfdTopic(DescriptorTopic):
583 topic = "vnfds"
584 topic_msg = "vnfd"
585
586 def __init__(self, db, fs, msg, auth):
587 DescriptorTopic.__init__(self, db, fs, msg, auth)
588
589 def pyangbind_validation(self, item, data, force=False):
590 if self._descriptor_data_is_in_old_format(data):
591 raise EngineException(
592 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
593 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
594 )
595 try:
596 myvnfd = etsi_nfv_vnfd.etsi_nfv_vnfd()
597 pybindJSONDecoder.load_ietf_json(
598 {"etsi-nfv-vnfd:vnfd": data},
599 None,
600 None,
601 obj=myvnfd,
602 path_helper=True,
603 skip_unknown=force,
604 )
605 out = pybindJSON.dumps(myvnfd, mode="ietf")
606 desc_out = self._remove_envelop(yaml.safe_load(out))
607 desc_out = self._remove_yang_prefixes_from_descriptor(desc_out)
608 return utils.deep_update_dict(data, desc_out)
609 except Exception as e:
610 raise EngineException(
611 "Error in pyangbind validation: {}".format(str(e)),
612 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
613 )
614
615 @staticmethod
616 def _descriptor_data_is_in_old_format(data):
617 return ("vnfd-catalog" in data) or ("vnfd:vnfd-catalog" in data)
618
619 @staticmethod
620 def _remove_envelop(indata=None):
621 if not indata:
622 return {}
623 clean_indata = indata
624
625 if clean_indata.get("etsi-nfv-vnfd:vnfd"):
626 if not isinstance(clean_indata["etsi-nfv-vnfd:vnfd"], dict):
627 raise EngineException("'etsi-nfv-vnfd:vnfd' must be a dict")
628 clean_indata = clean_indata["etsi-nfv-vnfd:vnfd"]
629 elif clean_indata.get("vnfd"):
630 if not isinstance(clean_indata["vnfd"], dict):
631 raise EngineException("'vnfd' must be dict")
632 clean_indata = clean_indata["vnfd"]
633
634 return clean_indata
635
636 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
637 final_content = super().check_conflict_on_edit(
638 session, final_content, edit_content, _id
639 )
640
641 # set type of vnfd
642 contains_pdu = False
643 contains_vdu = False
644 for vdu in get_iterable(final_content.get("vdu")):
645 if vdu.get("pdu-type"):
646 contains_pdu = True
647 else:
648 contains_vdu = True
649 if contains_pdu:
650 final_content["_admin"]["type"] = "hnfd" if contains_vdu else "pnfd"
651 elif contains_vdu:
652 final_content["_admin"]["type"] = "vnfd"
653 # if neither vud nor pdu do not fill type
654 return final_content
655
656 def check_conflict_on_del(self, session, _id, db_content):
657 """
658 Check that there is not any NSD that uses this VNFD. Only NSDs belonging to this project are considered. Note
659 that VNFD can be public and be used by NSD of other projects. Also check there are not deployments, or vnfr
660 that uses this vnfd
661 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
662 :param _id: vnfd internal id
663 :param db_content: The database content of the _id.
664 :return: None or raises EngineException with the conflict
665 """
666 if session["force"]:
667 return
668 descriptor = db_content
669 descriptor_id = descriptor.get("id")
670 if not descriptor_id: # empty vnfd not uploaded
671 return
672
673 _filter = self._get_project_filter(session)
674
675 # check vnfrs using this vnfd
676 _filter["vnfd-id"] = _id
677 if self.db.get_list("vnfrs", _filter):
678 raise EngineException(
679 "There is at least one VNF instance using this descriptor",
680 http_code=HTTPStatus.CONFLICT,
681 )
682
683 # check NSD referencing this VNFD
684 del _filter["vnfd-id"]
685 _filter["vnfd-id"] = descriptor_id
686 if self.db.get_list("nsds", _filter):
687 raise EngineException(
688 "There is at least one NS package referencing this descriptor",
689 http_code=HTTPStatus.CONFLICT,
690 )
691
692 def _validate_input_new(self, indata, storage_params, force=False):
693 indata.pop("onboardingState", None)
694 indata.pop("operationalState", None)
695 indata.pop("usageState", None)
696 indata.pop("links", None)
697
698 indata = self.pyangbind_validation("vnfds", indata, force)
699 # Cross references validation in the descriptor
700
701 self.validate_mgmt_interface_connection_point(indata)
702
703 for vdu in get_iterable(indata.get("vdu")):
704 self.validate_vdu_internal_connection_points(vdu)
705 self._validate_vdu_cloud_init_in_package(storage_params, vdu, indata)
706 self._validate_vdu_charms_in_package(storage_params, indata)
707
708 self._validate_vnf_charms_in_package(storage_params, indata)
709
710 self.validate_external_connection_points(indata)
711 self.validate_internal_virtual_links(indata)
712 self.validate_monitoring_params(indata)
713 self.validate_scaling_group_descriptor(indata)
714
715 return indata
716
717 @staticmethod
718 def validate_mgmt_interface_connection_point(indata):
719 if not indata.get("vdu"):
720 return
721 if not indata.get("mgmt-cp"):
722 raise EngineException(
723 "'mgmt-cp' is a mandatory field and it is not defined",
724 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
725 )
726
727 for cp in get_iterable(indata.get("ext-cpd")):
728 if cp["id"] == indata["mgmt-cp"]:
729 break
730 else:
731 raise EngineException(
732 "mgmt-cp='{}' must match an existing ext-cpd".format(indata["mgmt-cp"]),
733 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
734 )
735
736 @staticmethod
737 def validate_vdu_internal_connection_points(vdu):
738 int_cpds = set()
739 for cpd in get_iterable(vdu.get("int-cpd")):
740 cpd_id = cpd.get("id")
741 if cpd_id and cpd_id in int_cpds:
742 raise EngineException(
743 "vdu[id='{}']:int-cpd[id='{}'] is already used by other int-cpd".format(
744 vdu["id"], cpd_id
745 ),
746 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
747 )
748 int_cpds.add(cpd_id)
749
750 @staticmethod
751 def validate_external_connection_points(indata):
752 all_vdus_int_cpds = set()
753 for vdu in get_iterable(indata.get("vdu")):
754 for int_cpd in get_iterable(vdu.get("int-cpd")):
755 all_vdus_int_cpds.add((vdu.get("id"), int_cpd.get("id")))
756
757 ext_cpds = set()
758 for cpd in get_iterable(indata.get("ext-cpd")):
759 cpd_id = cpd.get("id")
760 if cpd_id and cpd_id in ext_cpds:
761 raise EngineException(
762 "ext-cpd[id='{}'] is already used by other ext-cpd".format(cpd_id),
763 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
764 )
765 ext_cpds.add(cpd_id)
766
767 int_cpd = cpd.get("int-cpd")
768 if int_cpd:
769 if (int_cpd.get("vdu-id"), int_cpd.get("cpd")) not in all_vdus_int_cpds:
770 raise EngineException(
771 "ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd".format(
772 cpd_id
773 ),
774 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
775 )
776 # TODO: Validate k8s-cluster-net points to a valid k8s-cluster:nets ?
777
778 def _validate_vdu_charms_in_package(self, storage_params, indata):
779 for df in indata["df"]:
780 if (
781 "lcm-operations-configuration" in df
782 and "operate-vnf-op-config" in df["lcm-operations-configuration"]
783 ):
784 configs = df["lcm-operations-configuration"][
785 "operate-vnf-op-config"
786 ].get("day1-2", [])
787 vdus = df.get("vdu-profile", [])
788 for vdu in vdus:
789 for config in configs:
790 if config["id"] == vdu["id"] and utils.find_in_list(
791 config.get("execution-environment-list", []),
792 lambda ee: "juju" in ee,
793 ):
794 if not self._validate_package_folders(
795 storage_params, "charms"
796 ):
797 raise EngineException(
798 "Charm defined in vnf[id={}] but not present in "
799 "package".format(indata["id"])
800 )
801
802 def _validate_vdu_cloud_init_in_package(self, storage_params, vdu, indata):
803 if not vdu.get("cloud-init-file"):
804 return
805 if not self._validate_package_folders(
806 storage_params, "cloud_init", vdu["cloud-init-file"]
807 ):
808 raise EngineException(
809 "Cloud-init defined in vnf[id={}]:vdu[id={}] but not present in "
810 "package".format(indata["id"], vdu["id"])
811 )
812
813 def _validate_vnf_charms_in_package(self, storage_params, indata):
814 # Get VNF configuration through new container
815 for deployment_flavor in indata.get("df", []):
816 if "lcm-operations-configuration" not in deployment_flavor:
817 return
818 if (
819 "operate-vnf-op-config"
820 not in deployment_flavor["lcm-operations-configuration"]
821 ):
822 return
823 for day_1_2_config in deployment_flavor["lcm-operations-configuration"][
824 "operate-vnf-op-config"
825 ]["day1-2"]:
826 if day_1_2_config["id"] == indata["id"]:
827 if utils.find_in_list(
828 day_1_2_config.get("execution-environment-list", []),
829 lambda ee: "juju" in ee,
830 ):
831 if not self._validate_package_folders(storage_params, "charms"):
832 raise EngineException(
833 "Charm defined in vnf[id={}] but not present in "
834 "package".format(indata["id"])
835 )
836
837 def _validate_package_folders(self, storage_params, folder, file=None):
838 if not storage_params or not storage_params.get("pkg-dir"):
839 return False
840 else:
841 if self.fs.file_exists("{}_".format(storage_params["folder"]), "dir"):
842 f = "{}_/{}/{}".format(
843 storage_params["folder"], storage_params["pkg-dir"], folder
844 )
845 else:
846 f = "{}/{}/{}".format(
847 storage_params["folder"], storage_params["pkg-dir"], folder
848 )
849 if file:
850 return self.fs.file_exists("{}/{}".format(f, file), "file")
851 else:
852 if self.fs.file_exists(f, "dir"):
853 if self.fs.dir_ls(f):
854 return True
855 return False
856
857 @staticmethod
858 def validate_internal_virtual_links(indata):
859 all_ivld_ids = set()
860 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
861 ivld_id = ivld.get("id")
862 if ivld_id and ivld_id in all_ivld_ids:
863 raise EngineException(
864 "Duplicated VLD id in int-virtual-link-desc[id={}]".format(ivld_id),
865 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
866 )
867 else:
868 all_ivld_ids.add(ivld_id)
869
870 for vdu in get_iterable(indata.get("vdu")):
871 for int_cpd in get_iterable(vdu.get("int-cpd")):
872 int_cpd_ivld_id = int_cpd.get("int-virtual-link-desc")
873 if int_cpd_ivld_id and int_cpd_ivld_id not in all_ivld_ids:
874 raise EngineException(
875 "vdu[id='{}']:int-cpd[id='{}']:int-virtual-link-desc='{}' must match an existing "
876 "int-virtual-link-desc".format(
877 vdu["id"], int_cpd["id"], int_cpd_ivld_id
878 ),
879 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
880 )
881
882 for df in get_iterable(indata.get("df")):
883 for vlp in get_iterable(df.get("virtual-link-profile")):
884 vlp_ivld_id = vlp.get("id")
885 if vlp_ivld_id and vlp_ivld_id not in all_ivld_ids:
886 raise EngineException(
887 "df[id='{}']:virtual-link-profile='{}' must match an existing "
888 "int-virtual-link-desc".format(df["id"], vlp_ivld_id),
889 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
890 )
891
892 @staticmethod
893 def validate_monitoring_params(indata):
894 all_monitoring_params = set()
895 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
896 for mp in get_iterable(ivld.get("monitoring-parameters")):
897 mp_id = mp.get("id")
898 if mp_id and mp_id in all_monitoring_params:
899 raise EngineException(
900 "Duplicated monitoring-parameter id in "
901 "int-virtual-link-desc[id='{}']:monitoring-parameters[id='{}']".format(
902 ivld["id"], mp_id
903 ),
904 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
905 )
906 else:
907 all_monitoring_params.add(mp_id)
908
909 for vdu in get_iterable(indata.get("vdu")):
910 for mp in get_iterable(vdu.get("monitoring-parameter")):
911 mp_id = mp.get("id")
912 if mp_id and mp_id in all_monitoring_params:
913 raise EngineException(
914 "Duplicated monitoring-parameter id in "
915 "vdu[id='{}']:monitoring-parameter[id='{}']".format(
916 vdu["id"], mp_id
917 ),
918 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
919 )
920 else:
921 all_monitoring_params.add(mp_id)
922
923 for df in get_iterable(indata.get("df")):
924 for mp in get_iterable(df.get("monitoring-parameter")):
925 mp_id = mp.get("id")
926 if mp_id and mp_id in all_monitoring_params:
927 raise EngineException(
928 "Duplicated monitoring-parameter id in "
929 "df[id='{}']:monitoring-parameter[id='{}']".format(
930 df["id"], mp_id
931 ),
932 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
933 )
934 else:
935 all_monitoring_params.add(mp_id)
936
937 @staticmethod
938 def validate_scaling_group_descriptor(indata):
939 all_monitoring_params = set()
940 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
941 for mp in get_iterable(ivld.get("monitoring-parameters")):
942 all_monitoring_params.add(mp.get("id"))
943
944 for vdu in get_iterable(indata.get("vdu")):
945 for mp in get_iterable(vdu.get("monitoring-parameter")):
946 all_monitoring_params.add(mp.get("id"))
947
948 for df in get_iterable(indata.get("df")):
949 for mp in get_iterable(df.get("monitoring-parameter")):
950 all_monitoring_params.add(mp.get("id"))
951
952 for df in get_iterable(indata.get("df")):
953 for sa in get_iterable(df.get("scaling-aspect")):
954 for sp in get_iterable(sa.get("scaling-policy")):
955 for sc in get_iterable(sp.get("scaling-criteria")):
956 sc_monitoring_param = sc.get("vnf-monitoring-param-ref")
957 if (
958 sc_monitoring_param
959 and sc_monitoring_param not in all_monitoring_params
960 ):
961 raise EngineException(
962 "df[id='{}']:scaling-aspect[id='{}']:scaling-policy"
963 "[name='{}']:scaling-criteria[name='{}']: "
964 "vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format(
965 df["id"],
966 sa["id"],
967 sp["name"],
968 sc["name"],
969 sc_monitoring_param,
970 ),
971 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
972 )
973
974 for sca in get_iterable(sa.get("scaling-config-action")):
975 if (
976 "lcm-operations-configuration" not in df
977 or "operate-vnf-op-config"
978 not in df["lcm-operations-configuration"]
979 or not utils.find_in_list(
980 df["lcm-operations-configuration"][
981 "operate-vnf-op-config"
982 ].get("day1-2", []),
983 lambda config: config["id"] == indata["id"],
984 )
985 ):
986 raise EngineException(
987 "'day1-2 configuration' not defined in the descriptor but it is "
988 "referenced by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action".format(
989 df["id"], sa["id"]
990 ),
991 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
992 )
993 for configuration in get_iterable(
994 df["lcm-operations-configuration"]["operate-vnf-op-config"].get(
995 "day1-2", []
996 )
997 ):
998 for primitive in get_iterable(
999 configuration.get("config-primitive")
1000 ):
1001 if (
1002 primitive["name"]
1003 == sca["vnf-config-primitive-name-ref"]
1004 ):
1005 break
1006 else:
1007 raise EngineException(
1008 "df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-"
1009 "config-primitive-name-ref='{}' does not match any "
1010 "day1-2 configuration:config-primitive:name".format(
1011 df["id"],
1012 sa["id"],
1013 sca["vnf-config-primitive-name-ref"],
1014 ),
1015 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1016 )
1017
1018 def delete_extra(self, session, _id, db_content, not_send_msg=None):
1019 """
1020 Deletes associate file system storage (via super)
1021 Deletes associated vnfpkgops from database.
1022 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1023 :param _id: server internal id
1024 :param db_content: The database content of the descriptor
1025 :return: None
1026 :raises: FsException in case of error while deleting associated storage
1027 """
1028 super().delete_extra(session, _id, db_content, not_send_msg)
1029 self.db.del_list("vnfpkgops", {"vnfPkgId": _id})
1030
1031 def sol005_projection(self, data):
1032 data["onboardingState"] = data["_admin"]["onboardingState"]
1033 data["operationalState"] = data["_admin"]["operationalState"]
1034 data["usageState"] = data["_admin"]["usageState"]
1035
1036 links = {}
1037 links["self"] = {"href": "/vnfpkgm/v1/vnf_packages/{}".format(data["_id"])}
1038 links["vnfd"] = {"href": "/vnfpkgm/v1/vnf_packages/{}/vnfd".format(data["_id"])}
1039 links["packageContent"] = {
1040 "href": "/vnfpkgm/v1/vnf_packages/{}/package_content".format(data["_id"])
1041 }
1042 data["_links"] = links
1043
1044 return super().sol005_projection(data)
1045
1046
1047 class NsdTopic(DescriptorTopic):
1048 topic = "nsds"
1049 topic_msg = "nsd"
1050
1051 def __init__(self, db, fs, msg, auth):
1052 DescriptorTopic.__init__(self, db, fs, msg, auth)
1053
1054 def pyangbind_validation(self, item, data, force=False):
1055 if self._descriptor_data_is_in_old_format(data):
1056 raise EngineException(
1057 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
1058 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1059 )
1060 try:
1061 nsd_vnf_profiles = data.get("df", [{}])[0].get("vnf-profile", [])
1062 mynsd = etsi_nfv_nsd.etsi_nfv_nsd()
1063 pybindJSONDecoder.load_ietf_json(
1064 {"nsd": {"nsd": [data]}},
1065 None,
1066 None,
1067 obj=mynsd,
1068 path_helper=True,
1069 skip_unknown=force,
1070 )
1071 out = pybindJSON.dumps(mynsd, mode="ietf")
1072 desc_out = self._remove_envelop(yaml.safe_load(out))
1073 desc_out = self._remove_yang_prefixes_from_descriptor(desc_out)
1074 if nsd_vnf_profiles:
1075 desc_out["df"][0]["vnf-profile"] = nsd_vnf_profiles
1076 return desc_out
1077 except Exception as e:
1078 raise EngineException(
1079 "Error in pyangbind validation: {}".format(str(e)),
1080 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1081 )
1082
1083 @staticmethod
1084 def _descriptor_data_is_in_old_format(data):
1085 return ("nsd-catalog" in data) or ("nsd:nsd-catalog" in data)
1086
1087 @staticmethod
1088 def _remove_envelop(indata=None):
1089 if not indata:
1090 return {}
1091 clean_indata = indata
1092
1093 if clean_indata.get("nsd"):
1094 clean_indata = clean_indata["nsd"]
1095 elif clean_indata.get("etsi-nfv-nsd:nsd"):
1096 clean_indata = clean_indata["etsi-nfv-nsd:nsd"]
1097 if clean_indata.get("nsd"):
1098 if (
1099 not isinstance(clean_indata["nsd"], list)
1100 or len(clean_indata["nsd"]) != 1
1101 ):
1102 raise EngineException("'nsd' must be a list of only one element")
1103 clean_indata = clean_indata["nsd"][0]
1104 return clean_indata
1105
1106 def _validate_input_new(self, indata, storage_params, force=False):
1107 indata.pop("nsdOnboardingState", None)
1108 indata.pop("nsdOperationalState", None)
1109 indata.pop("nsdUsageState", None)
1110
1111 indata.pop("links", None)
1112
1113 indata = self.pyangbind_validation("nsds", indata, force)
1114 # Cross references validation in the descriptor
1115 # TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none
1116 for vld in get_iterable(indata.get("virtual-link-desc")):
1117 self.validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata)
1118
1119 self.validate_vnf_profiles_vnfd_id(indata)
1120
1121 return indata
1122
1123 @staticmethod
1124 def validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata):
1125 if not vld.get("mgmt-network"):
1126 return
1127 vld_id = vld.get("id")
1128 for df in get_iterable(indata.get("df")):
1129 for vlp in get_iterable(df.get("virtual-link-profile")):
1130 if vld_id and vld_id == vlp.get("virtual-link-desc-id"):
1131 if vlp.get("virtual-link-protocol-data"):
1132 raise EngineException(
1133 "Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-"
1134 "protocol-data You cannot set a virtual-link-protocol-data "
1135 "when mgmt-network is True".format(df["id"], vlp["id"]),
1136 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1137 )
1138
1139 @staticmethod
1140 def validate_vnf_profiles_vnfd_id(indata):
1141 all_vnfd_ids = set(get_iterable(indata.get("vnfd-id")))
1142 for df in get_iterable(indata.get("df")):
1143 for vnf_profile in get_iterable(df.get("vnf-profile")):
1144 vnfd_id = vnf_profile.get("vnfd-id")
1145 if vnfd_id and vnfd_id not in all_vnfd_ids:
1146 raise EngineException(
1147 "Error at df[id='{}']:vnf_profile[id='{}']:vnfd-id='{}' "
1148 "does not match any vnfd-id".format(
1149 df["id"], vnf_profile["id"], vnfd_id
1150 ),
1151 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1152 )
1153
1154 def _validate_input_edit(self, indata, content, force=False):
1155 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
1156 """
1157 indata looks as follows:
1158 - In the new case (conformant)
1159 {'nsdOperationalState': 'DISABLED', 'userDefinedData': {'id': 'string23',
1160 '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}}
1161 - In the old case (backwards-compatible)
1162 {'id': 'string23', '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}
1163 """
1164 if "_admin" not in indata:
1165 indata["_admin"] = {}
1166
1167 if "nsdOperationalState" in indata:
1168 if indata["nsdOperationalState"] in ("ENABLED", "DISABLED"):
1169 indata["_admin"]["operationalState"] = indata.pop("nsdOperationalState")
1170 else:
1171 raise EngineException(
1172 "State '{}' is not a valid operational state".format(
1173 indata["nsdOperationalState"]
1174 ),
1175 http_code=HTTPStatus.BAD_REQUEST,
1176 )
1177
1178 # In the case of user defined data, we need to put the data in the root of the object
1179 # to preserve current expected behaviour
1180 if "userDefinedData" in indata:
1181 data = indata.pop("userDefinedData")
1182 if type(data) == dict:
1183 indata["_admin"]["userDefinedData"] = data
1184 else:
1185 raise EngineException(
1186 "userDefinedData should be an object, but is '{}' instead".format(
1187 type(data)
1188 ),
1189 http_code=HTTPStatus.BAD_REQUEST,
1190 )
1191 if (
1192 "operationalState" in indata["_admin"]
1193 and content["_admin"]["operationalState"]
1194 == indata["_admin"]["operationalState"]
1195 ):
1196 raise EngineException(
1197 "nsdOperationalState already {}".format(
1198 content["_admin"]["operationalState"]
1199 ),
1200 http_code=HTTPStatus.CONFLICT,
1201 )
1202 return indata
1203
1204 def _check_descriptor_dependencies(self, session, descriptor):
1205 """
1206 Check that the dependent descriptors exist on a new descriptor or edition. Also checks references to vnfd
1207 connection points are ok
1208 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1209 :param descriptor: descriptor to be inserted or edit
1210 :return: None or raises exception
1211 """
1212 if session["force"]:
1213 return
1214 vnfds_index = self._get_descriptor_constituent_vnfds_index(session, descriptor)
1215
1216 # Cross references validation in the descriptor and vnfd connection point validation
1217 for df in get_iterable(descriptor.get("df")):
1218 self.validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index)
1219
1220 def _get_descriptor_constituent_vnfds_index(self, session, descriptor):
1221 vnfds_index = {}
1222 if descriptor.get("vnfd-id") and not session["force"]:
1223 for vnfd_id in get_iterable(descriptor.get("vnfd-id")):
1224 query_filter = self._get_project_filter(session)
1225 query_filter["id"] = vnfd_id
1226 vnf_list = self.db.get_list("vnfds", query_filter)
1227 if not vnf_list:
1228 raise EngineException(
1229 "Descriptor error at 'vnfd-id'='{}' references a non "
1230 "existing vnfd".format(vnfd_id),
1231 http_code=HTTPStatus.CONFLICT,
1232 )
1233 vnfds_index[vnfd_id] = vnf_list[0]
1234 return vnfds_index
1235
1236 @staticmethod
1237 def validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index):
1238 for vnf_profile in get_iterable(df.get("vnf-profile")):
1239 vnfd = vnfds_index.get(vnf_profile["vnfd-id"])
1240 all_vnfd_ext_cpds = set()
1241 for ext_cpd in get_iterable(vnfd.get("ext-cpd")):
1242 if ext_cpd.get("id"):
1243 all_vnfd_ext_cpds.add(ext_cpd.get("id"))
1244
1245 for virtual_link in get_iterable(
1246 vnf_profile.get("virtual-link-connectivity")
1247 ):
1248 for vl_cpd in get_iterable(virtual_link.get("constituent-cpd-id")):
1249 vl_cpd_id = vl_cpd.get("constituent-cpd-id")
1250 if vl_cpd_id and vl_cpd_id not in all_vnfd_ext_cpds:
1251 raise EngineException(
1252 "Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity"
1253 "[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a "
1254 "non existing ext-cpd:id inside vnfd '{}'".format(
1255 df["id"],
1256 vnf_profile["id"],
1257 virtual_link["virtual-link-profile-id"],
1258 vl_cpd_id,
1259 vnfd["id"],
1260 ),
1261 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1262 )
1263
1264 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
1265 final_content = super().check_conflict_on_edit(
1266 session, final_content, edit_content, _id
1267 )
1268
1269 self._check_descriptor_dependencies(session, final_content)
1270
1271 return final_content
1272
1273 def check_conflict_on_del(self, session, _id, db_content):
1274 """
1275 Check that there is not any NSR that uses this NSD. Only NSRs belonging to this project are considered. Note
1276 that NSD can be public and be used by other projects.
1277 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1278 :param _id: nsd internal id
1279 :param db_content: The database content of the _id
1280 :return: None or raises EngineException with the conflict
1281 """
1282 if session["force"]:
1283 return
1284 descriptor = db_content
1285 descriptor_id = descriptor.get("id")
1286 if not descriptor_id: # empty nsd not uploaded
1287 return
1288
1289 # check NSD used by NS
1290 _filter = self._get_project_filter(session)
1291 _filter["nsd-id"] = _id
1292 if self.db.get_list("nsrs", _filter):
1293 raise EngineException(
1294 "There is at least one NS instance using this descriptor",
1295 http_code=HTTPStatus.CONFLICT,
1296 )
1297
1298 # check NSD referenced by NST
1299 del _filter["nsd-id"]
1300 _filter["netslice-subnet.ANYINDEX.nsd-ref"] = descriptor_id
1301 if self.db.get_list("nsts", _filter):
1302 raise EngineException(
1303 "There is at least one NetSlice Template referencing this descriptor",
1304 http_code=HTTPStatus.CONFLICT,
1305 )
1306
1307 def sol005_projection(self, data):
1308 data["nsdOnboardingState"] = data["_admin"]["onboardingState"]
1309 data["nsdOperationalState"] = data["_admin"]["operationalState"]
1310 data["nsdUsageState"] = data["_admin"]["usageState"]
1311
1312 links = {}
1313 links["self"] = {"href": "/nsd/v1/ns_descriptors/{}".format(data["_id"])}
1314 links["nsd_content"] = {
1315 "href": "/nsd/v1/ns_descriptors/{}/nsd_content".format(data["_id"])
1316 }
1317 data["_links"] = links
1318
1319 return super().sol005_projection(data)
1320
1321
1322 class NstTopic(DescriptorTopic):
1323 topic = "nsts"
1324 topic_msg = "nst"
1325 quota_name = "slice_templates"
1326
1327 def __init__(self, db, fs, msg, auth):
1328 DescriptorTopic.__init__(self, db, fs, msg, auth)
1329
1330 def pyangbind_validation(self, item, data, force=False):
1331 try:
1332 mynst = nst_im()
1333 pybindJSONDecoder.load_ietf_json(
1334 {"nst": [data]},
1335 None,
1336 None,
1337 obj=mynst,
1338 path_helper=True,
1339 skip_unknown=force,
1340 )
1341 out = pybindJSON.dumps(mynst, mode="ietf")
1342 desc_out = self._remove_envelop(yaml.safe_load(out))
1343 return desc_out
1344 except Exception as e:
1345 raise EngineException(
1346 "Error in pyangbind validation: {}".format(str(e)),
1347 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1348 )
1349
1350 @staticmethod
1351 def _remove_envelop(indata=None):
1352 if not indata:
1353 return {}
1354 clean_indata = indata
1355
1356 if clean_indata.get("nst"):
1357 if (
1358 not isinstance(clean_indata["nst"], list)
1359 or len(clean_indata["nst"]) != 1
1360 ):
1361 raise EngineException("'nst' must be a list only one element")
1362 clean_indata = clean_indata["nst"][0]
1363 elif clean_indata.get("nst:nst"):
1364 if (
1365 not isinstance(clean_indata["nst:nst"], list)
1366 or len(clean_indata["nst:nst"]) != 1
1367 ):
1368 raise EngineException("'nst:nst' must be a list only one element")
1369 clean_indata = clean_indata["nst:nst"][0]
1370 return clean_indata
1371
1372 def _validate_input_new(self, indata, storage_params, force=False):
1373 indata.pop("onboardingState", None)
1374 indata.pop("operationalState", None)
1375 indata.pop("usageState", None)
1376 indata = self.pyangbind_validation("nsts", indata, force)
1377 return indata.copy()
1378
1379 def _check_descriptor_dependencies(self, session, descriptor):
1380 """
1381 Check that the dependent descriptors exist on a new descriptor or edition
1382 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1383 :param descriptor: descriptor to be inserted or edit
1384 :return: None or raises exception
1385 """
1386 if not descriptor.get("netslice-subnet"):
1387 return
1388 for nsd in descriptor["netslice-subnet"]:
1389 nsd_id = nsd["nsd-ref"]
1390 filter_q = self._get_project_filter(session)
1391 filter_q["id"] = nsd_id
1392 if not self.db.get_list("nsds", filter_q):
1393 raise EngineException(
1394 "Descriptor error at 'netslice-subnet':'nsd-ref'='{}' references a non "
1395 "existing nsd".format(nsd_id),
1396 http_code=HTTPStatus.CONFLICT,
1397 )
1398
1399 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
1400 final_content = super().check_conflict_on_edit(
1401 session, final_content, edit_content, _id
1402 )
1403
1404 self._check_descriptor_dependencies(session, final_content)
1405 return final_content
1406
1407 def check_conflict_on_del(self, session, _id, db_content):
1408 """
1409 Check that there is not any NSIR that uses this NST. Only NSIRs belonging to this project are considered. Note
1410 that NST can be public and be used by other projects.
1411 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1412 :param _id: nst internal id
1413 :param db_content: The database content of the _id.
1414 :return: None or raises EngineException with the conflict
1415 """
1416 # TODO: Check this method
1417 if session["force"]:
1418 return
1419 # Get Network Slice Template from Database
1420 _filter = self._get_project_filter(session)
1421 _filter["_admin.nst-id"] = _id
1422 if self.db.get_list("nsis", _filter):
1423 raise EngineException(
1424 "there is at least one Netslice Instance using this descriptor",
1425 http_code=HTTPStatus.CONFLICT,
1426 )
1427
1428 def sol005_projection(self, data):
1429 data["onboardingState"] = data["_admin"]["onboardingState"]
1430 data["operationalState"] = data["_admin"]["operationalState"]
1431 data["usageState"] = data["_admin"]["usageState"]
1432
1433 links = {}
1434 links["self"] = {"href": "/nst/v1/netslice_templates/{}".format(data["_id"])}
1435 links["nst"] = {"href": "/nst/v1/netslice_templates/{}/nst".format(data["_id"])}
1436 data["_links"] = links
1437
1438 return super().sol005_projection(data)
1439
1440
1441 class PduTopic(BaseTopic):
1442 topic = "pdus"
1443 topic_msg = "pdu"
1444 quota_name = "pduds"
1445 schema_new = pdu_new_schema
1446 schema_edit = pdu_edit_schema
1447
1448 def __init__(self, db, fs, msg, auth):
1449 BaseTopic.__init__(self, db, fs, msg, auth)
1450
1451 @staticmethod
1452 def format_on_new(content, project_id=None, make_public=False):
1453 BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
1454 content["_admin"]["onboardingState"] = "CREATED"
1455 content["_admin"]["operationalState"] = "ENABLED"
1456 content["_admin"]["usageState"] = "NOT_IN_USE"
1457
1458 def check_conflict_on_del(self, session, _id, db_content):
1459 """
1460 Check that there is not any vnfr that uses this PDU
1461 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1462 :param _id: pdu internal id
1463 :param db_content: The database content of the _id.
1464 :return: None or raises EngineException with the conflict
1465 """
1466 if session["force"]:
1467 return
1468
1469 _filter = self._get_project_filter(session)
1470 _filter["vdur.pdu-id"] = _id
1471 if self.db.get_list("vnfrs", _filter):
1472 raise EngineException(
1473 "There is at least one VNF instance using this PDU",
1474 http_code=HTTPStatus.CONFLICT,
1475 )
1476
1477
1478 class VnfPkgOpTopic(BaseTopic):
1479 topic = "vnfpkgops"
1480 topic_msg = "vnfd"
1481 schema_new = vnfpkgop_new_schema
1482 schema_edit = None
1483
1484 def __init__(self, db, fs, msg, auth):
1485 BaseTopic.__init__(self, db, fs, msg, auth)
1486
1487 def edit(self, session, _id, indata=None, kwargs=None, content=None):
1488 raise EngineException(
1489 "Method 'edit' not allowed for topic '{}'".format(self.topic),
1490 HTTPStatus.METHOD_NOT_ALLOWED,
1491 )
1492
1493 def delete(self, session, _id, dry_run=False):
1494 raise EngineException(
1495 "Method 'delete' not allowed for topic '{}'".format(self.topic),
1496 HTTPStatus.METHOD_NOT_ALLOWED,
1497 )
1498
1499 def delete_list(self, session, filter_q=None):
1500 raise EngineException(
1501 "Method 'delete_list' not allowed for topic '{}'".format(self.topic),
1502 HTTPStatus.METHOD_NOT_ALLOWED,
1503 )
1504
1505 def new(self, rollback, session, indata=None, kwargs=None, headers=None):
1506 """
1507 Creates a new entry into database.
1508 :param rollback: list to append created items at database in case a rollback may to be done
1509 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1510 :param indata: data to be inserted
1511 :param kwargs: used to override the indata descriptor
1512 :param headers: http request headers
1513 :return: _id, op_id:
1514 _id: identity of the inserted data.
1515 op_id: None
1516 """
1517 self._update_input_with_kwargs(indata, kwargs)
1518 validate_input(indata, self.schema_new)
1519 vnfpkg_id = indata["vnfPkgId"]
1520 filter_q = BaseTopic._get_project_filter(session)
1521 filter_q["_id"] = vnfpkg_id
1522 vnfd = self.db.get_one("vnfds", filter_q)
1523 operation = indata["lcmOperationType"]
1524 kdu_name = indata["kdu_name"]
1525 for kdu in vnfd.get("kdu", []):
1526 if kdu["name"] == kdu_name:
1527 helm_chart = kdu.get("helm-chart")
1528 juju_bundle = kdu.get("juju-bundle")
1529 break
1530 else:
1531 raise EngineException(
1532 "Not found vnfd[id='{}']:kdu[name='{}']".format(vnfpkg_id, kdu_name)
1533 )
1534 if helm_chart:
1535 indata["helm-chart"] = helm_chart
1536 match = fullmatch(r"([^/]*)/([^/]*)", helm_chart)
1537 repo_name = match.group(1) if match else None
1538 elif juju_bundle:
1539 indata["juju-bundle"] = juju_bundle
1540 match = fullmatch(r"([^/]*)/([^/]*)", juju_bundle)
1541 repo_name = match.group(1) if match else None
1542 else:
1543 raise EngineException(
1544 "Found neither 'helm-chart' nor 'juju-bundle' in vnfd[id='{}']:kdu[name='{}']".format(
1545 vnfpkg_id, kdu_name
1546 )
1547 )
1548 if repo_name:
1549 del filter_q["_id"]
1550 filter_q["name"] = repo_name
1551 repo = self.db.get_one("k8srepos", filter_q)
1552 k8srepo_id = repo.get("_id")
1553 k8srepo_url = repo.get("url")
1554 else:
1555 k8srepo_id = None
1556 k8srepo_url = None
1557 indata["k8srepoId"] = k8srepo_id
1558 indata["k8srepo_url"] = k8srepo_url
1559 vnfpkgop_id = str(uuid4())
1560 vnfpkgop_desc = {
1561 "_id": vnfpkgop_id,
1562 "operationState": "PROCESSING",
1563 "vnfPkgId": vnfpkg_id,
1564 "lcmOperationType": operation,
1565 "isAutomaticInvocation": False,
1566 "isCancelPending": False,
1567 "operationParams": indata,
1568 "links": {
1569 "self": "/osm/vnfpkgm/v1/vnfpkg_op_occs/" + vnfpkgop_id,
1570 "vnfpkg": "/osm/vnfpkgm/v1/vnf_packages/" + vnfpkg_id,
1571 },
1572 }
1573 self.format_on_new(
1574 vnfpkgop_desc, session["project_id"], make_public=session["public"]
1575 )
1576 ctime = vnfpkgop_desc["_admin"]["created"]
1577 vnfpkgop_desc["statusEnteredTime"] = ctime
1578 vnfpkgop_desc["startTime"] = ctime
1579 self.db.create(self.topic, vnfpkgop_desc)
1580 rollback.append({"topic": self.topic, "_id": vnfpkgop_id})
1581 self.msg.write(self.topic_msg, operation, vnfpkgop_desc)
1582 return vnfpkgop_id, None