Bug 2032 NBI leaves files orphaned in fsmongo
[osm/NBI.git] / osm_nbi / descriptor_topics.py
1 # -*- coding: utf-8 -*-
2
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
12 # implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
15
16 import tarfile
17 import yaml
18 import json
19 import copy
20
21 # import logging
22 from hashlib import md5
23 from osm_common.dbbase import DbException, deep_update_rfc7396
24 from http import HTTPStatus
25 from time import time
26 from uuid import uuid4
27 from re import fullmatch
28 from zipfile import ZipFile
29 from osm_nbi.validation import (
30 ValidationError,
31 pdu_new_schema,
32 pdu_edit_schema,
33 validate_input,
34 vnfpkgop_new_schema,
35 )
36 from osm_nbi.base_topic import BaseTopic, EngineException, get_iterable
37 from osm_im import etsi_nfv_vnfd, etsi_nfv_nsd
38 from osm_im.nst import nst as nst_im
39 from pyangbind.lib.serialise import pybindJSONDecoder
40 import pyangbind.lib.pybindJSON as pybindJSON
41 from osm_nbi import utils
42
43 __author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
44
45
46 class DescriptorTopic(BaseTopic):
47 def __init__(self, db, fs, msg, auth):
48 BaseTopic.__init__(self, db, fs, msg, auth)
49
50 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
51 final_content = super().check_conflict_on_edit(
52 session, final_content, edit_content, _id
53 )
54
55 def _check_unique_id_name(descriptor, position=""):
56 for desc_key, desc_item in descriptor.items():
57 if isinstance(desc_item, list) and desc_item:
58 used_ids = []
59 desc_item_id = None
60 for index, list_item in enumerate(desc_item):
61 if isinstance(list_item, dict):
62 _check_unique_id_name(
63 list_item, "{}.{}[{}]".format(position, desc_key, index)
64 )
65 # Base case
66 if index == 0 and (
67 list_item.get("id") or list_item.get("name")
68 ):
69 desc_item_id = "id" if list_item.get("id") else "name"
70 if desc_item_id and list_item.get(desc_item_id):
71 if list_item[desc_item_id] in used_ids:
72 position = "{}.{}[{}]".format(
73 position, desc_key, index
74 )
75 raise EngineException(
76 "Error: identifier {} '{}' is not unique and repeats at '{}'".format(
77 desc_item_id,
78 list_item[desc_item_id],
79 position,
80 ),
81 HTTPStatus.UNPROCESSABLE_ENTITY,
82 )
83 used_ids.append(list_item[desc_item_id])
84
85 _check_unique_id_name(final_content)
86 # 1. validate again with pyangbind
87 # 1.1. remove internal keys
88 internal_keys = {}
89 for k in ("_id", "_admin"):
90 if k in final_content:
91 internal_keys[k] = final_content.pop(k)
92 storage_params = internal_keys["_admin"].get("storage")
93 serialized = self._validate_input_new(
94 final_content, storage_params, session["force"]
95 )
96
97 # 1.2. modify final_content with a serialized version
98 final_content = copy.deepcopy(serialized)
99 # 1.3. restore internal keys
100 for k, v in internal_keys.items():
101 final_content[k] = v
102 if session["force"]:
103 return final_content
104
105 # 2. check that this id is not present
106 if "id" in edit_content:
107 _filter = self._get_project_filter(session)
108
109 _filter["id"] = final_content["id"]
110 _filter["_id.neq"] = _id
111
112 if self.db.get_one(self.topic, _filter, fail_on_empty=False):
113 raise EngineException(
114 "{} with id '{}' already exists for this project".format(
115 self.topic[:-1], final_content["id"]
116 ),
117 HTTPStatus.CONFLICT,
118 )
119
120 return final_content
121
122 @staticmethod
123 def format_on_new(content, project_id=None, make_public=False):
124 BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
125 content["_admin"]["onboardingState"] = "CREATED"
126 content["_admin"]["operationalState"] = "DISABLED"
127 content["_admin"]["usageState"] = "NOT_IN_USE"
128
129 def delete_extra(self, session, _id, db_content, not_send_msg=None):
130 """
131 Deletes file system storage associated with the descriptor
132 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
133 :param _id: server internal id
134 :param db_content: The database content of the descriptor
135 :param not_send_msg: To not send message (False) or store content (list) instead
136 :return: None if ok or raises EngineException with the problem
137 """
138 self.fs.file_delete(_id, ignore_non_exist=True)
139 self.fs.file_delete(_id + "_", ignore_non_exist=True) # remove temp folder
140
141 @staticmethod
142 def get_one_by_id(db, session, topic, id):
143 # find owned by this project
144 _filter = BaseTopic._get_project_filter(session)
145 _filter["id"] = id
146 desc_list = db.get_list(topic, _filter)
147 if len(desc_list) == 1:
148 return desc_list[0]
149 elif len(desc_list) > 1:
150 raise DbException(
151 "Found more than one {} with id='{}' belonging to this project".format(
152 topic[:-1], id
153 ),
154 HTTPStatus.CONFLICT,
155 )
156
157 # not found any: try to find public
158 _filter = BaseTopic._get_project_filter(session)
159 _filter["id"] = id
160 desc_list = db.get_list(topic, _filter)
161 if not desc_list:
162 raise DbException(
163 "Not found any {} with id='{}'".format(topic[:-1], id),
164 HTTPStatus.NOT_FOUND,
165 )
166 elif len(desc_list) == 1:
167 return desc_list[0]
168 else:
169 raise DbException(
170 "Found more than one public {} with id='{}'; and no one belonging to this project".format(
171 topic[:-1], id
172 ),
173 HTTPStatus.CONFLICT,
174 )
175
176 def new(self, rollback, session, indata=None, kwargs=None, headers=None):
177 """
178 Creates a new almost empty DISABLED entry into database. Due to SOL005, it does not follow normal procedure.
179 Creating a VNFD or NSD is done in two steps: 1. Creates an empty descriptor (this step) and 2) upload content
180 (self.upload_content)
181 :param rollback: list to append created items at database in case a rollback may to be done
182 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
183 :param indata: data to be inserted
184 :param kwargs: used to override the indata descriptor
185 :param headers: http request headers
186 :return: _id, None: identity of the inserted data; and None as there is not any operation
187 """
188
189 # No needed to capture exceptions
190 # Check Quota
191 self.check_quota(session)
192
193 # _remove_envelop
194 if indata:
195 if "userDefinedData" in indata:
196 indata = indata["userDefinedData"]
197
198 # Override descriptor with query string kwargs
199 self._update_input_with_kwargs(indata, kwargs)
200 # uncomment when this method is implemented.
201 # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors
202 # indata = DescriptorTopic._validate_input_new(self, indata, project_id=session["force"])
203
204 content = {"_admin": {"userDefinedData": indata}}
205 self.format_on_new(
206 content, session["project_id"], make_public=session["public"]
207 )
208 _id = self.db.create(self.topic, content)
209 rollback.append({"topic": self.topic, "_id": _id})
210 self._send_msg("created", {"_id": _id})
211 return _id, None
212
213 def upload_content(self, session, _id, indata, kwargs, headers):
214 """
215 Used for receiving content by chunks (with a transaction_id header and/or gzip file. It will store and extract)
216 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
217 :param _id : the nsd,vnfd is already created, this is the id
218 :param indata: http body request
219 :param kwargs: user query string to override parameters. NOT USED
220 :param headers: http request headers
221 :return: True if package is completely uploaded or False if partial content has been uploded
222 Raise exception on error
223 """
224 # Check that _id exists and it is valid
225 current_desc = self.show(session, _id)
226
227 content_range_text = headers.get("Content-Range")
228 expected_md5 = headers.get("Content-File-MD5")
229 compressed = None
230 content_type = headers.get("Content-Type")
231 if (
232 content_type
233 and "application/gzip" in content_type
234 or "application/x-gzip" in content_type
235 ):
236 compressed = "gzip"
237 if (
238 content_type
239 and "application/zip" in content_type
240 ):
241 compressed = "zip"
242 filename = headers.get("Content-Filename")
243 if not filename and compressed:
244 filename = "package.tar.gz" if compressed == "gzip" else "package.zip"
245 elif not filename:
246 filename = "package"
247
248 # TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266
249 file_pkg = None
250 error_text = ""
251 fs_rollback = []
252 try:
253 if content_range_text:
254 content_range = (
255 content_range_text.replace("-", " ").replace("/", " ").split()
256 )
257 if (
258 content_range[0] != "bytes"
259 ): # TODO check x<y not negative < total....
260 raise IndexError()
261 start = int(content_range[1])
262 end = int(content_range[2]) + 1
263 total = int(content_range[3])
264 else:
265 start = 0
266 temp_folder = (
267 _id + "_"
268 ) # all the content is upload here and if ok, it is rename from id_ to is folder
269
270 if start:
271 if not self.fs.file_exists(temp_folder, "dir"):
272 raise EngineException(
273 "invalid Transaction-Id header", HTTPStatus.NOT_FOUND
274 )
275 else:
276 self.fs.file_delete(temp_folder, ignore_non_exist=True)
277 self.fs.mkdir(temp_folder)
278 fs_rollback.append(temp_folder)
279
280 storage = self.fs.get_params()
281 storage["folder"] = _id
282
283 file_path = (temp_folder, filename)
284 if self.fs.file_exists(file_path, "file"):
285 file_size = self.fs.file_size(file_path)
286 else:
287 file_size = 0
288 if file_size != start:
289 raise EngineException(
290 "invalid Content-Range start sequence, expected '{}' but received '{}'".format(
291 file_size, start
292 ),
293 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
294 )
295 file_pkg = self.fs.file_open(file_path, "a+b")
296 if isinstance(indata, dict):
297 indata_text = yaml.safe_dump(indata, indent=4, default_flow_style=False)
298 file_pkg.write(indata_text.encode(encoding="utf-8"))
299 else:
300 indata_len = 0
301 while True:
302 indata_text = indata.read(4096)
303 indata_len += len(indata_text)
304 if not indata_text:
305 break
306 file_pkg.write(indata_text)
307 if content_range_text:
308 if indata_len != end - start:
309 raise EngineException(
310 "Mismatch between Content-Range header {}-{} and body length of {}".format(
311 start, end - 1, indata_len
312 ),
313 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
314 )
315 if end != total:
316 # TODO update to UPLOADING
317 return False
318
319 # PACKAGE UPLOADED
320 if expected_md5:
321 file_pkg.seek(0, 0)
322 file_md5 = md5()
323 chunk_data = file_pkg.read(1024)
324 while chunk_data:
325 file_md5.update(chunk_data)
326 chunk_data = file_pkg.read(1024)
327 if expected_md5 != file_md5.hexdigest():
328 raise EngineException("Error, MD5 mismatch", HTTPStatus.CONFLICT)
329 file_pkg.seek(0, 0)
330 if compressed == "gzip":
331 tar = tarfile.open(mode="r", fileobj=file_pkg)
332 descriptor_file_name = None
333 for tarinfo in tar:
334 tarname = tarinfo.name
335 tarname_path = tarname.split("/")
336 if (
337 not tarname_path[0] or ".." in tarname_path
338 ): # if start with "/" means absolute path
339 raise EngineException(
340 "Absolute path or '..' are not allowed for package descriptor tar.gz"
341 )
342 if len(tarname_path) == 1 and not tarinfo.isdir():
343 raise EngineException(
344 "All files must be inside a dir for package descriptor tar.gz"
345 )
346 if (
347 tarname.endswith(".yaml")
348 or tarname.endswith(".json")
349 or tarname.endswith(".yml")
350 ):
351 storage["pkg-dir"] = tarname_path[0]
352 if len(tarname_path) == 2:
353 if descriptor_file_name:
354 raise EngineException(
355 "Found more than one descriptor file at package descriptor tar.gz"
356 )
357 descriptor_file_name = tarname
358 if not descriptor_file_name:
359 raise EngineException(
360 "Not found any descriptor file at package descriptor tar.gz"
361 )
362 storage["descriptor"] = descriptor_file_name
363 storage["zipfile"] = filename
364 self.fs.file_extract(tar, temp_folder)
365 with self.fs.file_open(
366 (temp_folder, descriptor_file_name), "r"
367 ) as descriptor_file:
368 content = descriptor_file.read()
369 elif compressed == "zip":
370 zipfile = ZipFile(file_pkg)
371 descriptor_file_name = None
372 for package_file in zipfile.infolist():
373 zipfilename = package_file.filename
374 file_path = zipfilename.split("/")
375 if (
376 not file_path[0] or ".." in zipfilename
377 ): # if start with "/" means absolute path
378 raise EngineException(
379 "Absolute path or '..' are not allowed for package descriptor zip"
380 )
381
382 if (
383 (
384 zipfilename.endswith(".yaml")
385 or zipfilename.endswith(".json")
386 or zipfilename.endswith(".yml")
387 ) and (
388 zipfilename.find("/") < 0
389 or zipfilename.find("Definitions") >= 0
390 )
391 ):
392 storage["pkg-dir"] = ""
393 if descriptor_file_name:
394 raise EngineException(
395 "Found more than one descriptor file at package descriptor zip"
396 )
397 descriptor_file_name = zipfilename
398 if not descriptor_file_name:
399 raise EngineException(
400 "Not found any descriptor file at package descriptor zip"
401 )
402 storage["descriptor"] = descriptor_file_name
403 storage["zipfile"] = filename
404 self.fs.file_extract(zipfile, temp_folder)
405
406 with self.fs.file_open(
407 (temp_folder, descriptor_file_name), "r"
408 ) as descriptor_file:
409 content = descriptor_file.read()
410 else:
411 content = file_pkg.read()
412 storage["descriptor"] = descriptor_file_name = filename
413
414 if descriptor_file_name.endswith(".json"):
415 error_text = "Invalid json format "
416 indata = json.load(content)
417 else:
418 error_text = "Invalid yaml format "
419 indata = yaml.load(content, Loader=yaml.SafeLoader)
420
421 current_desc["_admin"]["storage"] = storage
422 current_desc["_admin"]["onboardingState"] = "ONBOARDED"
423 current_desc["_admin"]["operationalState"] = "ENABLED"
424
425 indata = self._remove_envelop(indata)
426
427 # Override descriptor with query string kwargs
428 if kwargs:
429 self._update_input_with_kwargs(indata, kwargs)
430
431 deep_update_rfc7396(current_desc, indata)
432 current_desc = self.check_conflict_on_edit(
433 session, current_desc, indata, _id=_id
434 )
435 current_desc["_admin"]["modified"] = time()
436 self.db.replace(self.topic, _id, current_desc)
437 self.fs.dir_rename(temp_folder, _id)
438 fs_rollback = []
439
440 indata["_id"] = _id
441 self._send_msg("edited", indata)
442
443 # TODO if descriptor has changed because kwargs update content and remove cached zip
444 # TODO if zip is not present creates one
445 return True
446
447 except EngineException:
448 raise
449 except IndexError:
450 raise EngineException(
451 "invalid Content-Range header format. Expected 'bytes start-end/total'",
452 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
453 )
454 except IOError as e:
455 raise EngineException(
456 "invalid upload transaction sequence: '{}'".format(e),
457 HTTPStatus.BAD_REQUEST,
458 )
459 except tarfile.ReadError as e:
460 raise EngineException(
461 "invalid file content {}".format(e), HTTPStatus.BAD_REQUEST
462 )
463 except (ValueError, yaml.YAMLError) as e:
464 raise EngineException(error_text + str(e))
465 except ValidationError as e:
466 raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
467 finally:
468 if file_pkg:
469 file_pkg.close()
470 for file in fs_rollback:
471 self.fs.file_delete(file, ignore_non_exist=True)
472
473 def get_file(self, session, _id, path=None, accept_header=None):
474 """
475 Return the file content of a vnfd or nsd
476 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
477 :param _id: Identity of the vnfd, nsd
478 :param path: artifact path or "$DESCRIPTOR" or None
479 :param accept_header: Content of Accept header. Must contain applition/zip or/and text/plain
480 :return: opened file plus Accept format or raises an exception
481 """
482 accept_text = accept_zip = False
483 if accept_header:
484 if "text/plain" in accept_header or "*/*" in accept_header:
485 accept_text = True
486 if "application/zip" in accept_header or "*/*" in accept_header:
487 accept_zip = "application/zip"
488 elif "application/gzip" in accept_header:
489 accept_zip = "application/gzip"
490
491 if not accept_text and not accept_zip:
492 raise EngineException(
493 "provide request header 'Accept' with 'application/zip' or 'text/plain'",
494 http_code=HTTPStatus.NOT_ACCEPTABLE,
495 )
496
497 content = self.show(session, _id)
498 if content["_admin"]["onboardingState"] != "ONBOARDED":
499 raise EngineException(
500 "Cannot get content because this resource is not at 'ONBOARDED' state. "
501 "onboardingState is {}".format(content["_admin"]["onboardingState"]),
502 http_code=HTTPStatus.CONFLICT,
503 )
504 storage = content["_admin"]["storage"]
505 if path is not None and path != "$DESCRIPTOR": # artifacts
506 if not storage.get("pkg-dir"):
507 raise EngineException(
508 "Packages does not contains artifacts",
509 http_code=HTTPStatus.BAD_REQUEST,
510 )
511 if self.fs.file_exists(
512 (storage["folder"], storage["pkg-dir"], *path), "dir"
513 ):
514 folder_content = self.fs.dir_ls(
515 (storage["folder"], storage["pkg-dir"], *path)
516 )
517 return folder_content, "text/plain"
518 # TODO manage folders in http
519 else:
520 return (
521 self.fs.file_open(
522 (storage["folder"], storage["pkg-dir"], *path), "rb"
523 ),
524 "application/octet-stream",
525 )
526
527 # pkgtype accept ZIP TEXT -> result
528 # manyfiles yes X -> zip
529 # no yes -> error
530 # onefile yes no -> zip
531 # X yes -> text
532 contain_many_files = False
533 if storage.get("pkg-dir"):
534 # check if there are more than one file in the package, ignoring checksums.txt.
535 pkg_files = self.fs.dir_ls((storage["folder"], storage["pkg-dir"]))
536 if len(pkg_files) >= 3 or (
537 len(pkg_files) == 2 and "checksums.txt" not in pkg_files
538 ):
539 contain_many_files = True
540 if accept_text and (not contain_many_files or path == "$DESCRIPTOR"):
541 return (
542 self.fs.file_open((storage["folder"], storage["descriptor"]), "r"),
543 "text/plain",
544 )
545 elif contain_many_files and not accept_zip:
546 raise EngineException(
547 "Packages that contains several files need to be retrieved with 'application/zip'"
548 "Accept header",
549 http_code=HTTPStatus.NOT_ACCEPTABLE,
550 )
551 else:
552 if not storage.get("zipfile"):
553 # TODO generate zipfile if not present
554 raise EngineException(
555 "Only allowed 'text/plain' Accept header for this descriptor. To be solved in "
556 "future versions",
557 http_code=HTTPStatus.NOT_ACCEPTABLE,
558 )
559 return (
560 self.fs.file_open((storage["folder"], storage["zipfile"]), "rb"),
561 accept_zip,
562 )
563
564 def _remove_yang_prefixes_from_descriptor(self, descriptor):
565 new_descriptor = {}
566 for k, v in descriptor.items():
567 new_v = v
568 if isinstance(v, dict):
569 new_v = self._remove_yang_prefixes_from_descriptor(v)
570 elif isinstance(v, list):
571 new_v = list()
572 for x in v:
573 if isinstance(x, dict):
574 new_v.append(self._remove_yang_prefixes_from_descriptor(x))
575 else:
576 new_v.append(x)
577 new_descriptor[k.split(":")[-1]] = new_v
578 return new_descriptor
579
580 def pyangbind_validation(self, item, data, force=False):
581 raise EngineException(
582 "Not possible to validate '{}' item".format(item),
583 http_code=HTTPStatus.INTERNAL_SERVER_ERROR,
584 )
585
586 def _validate_input_edit(self, indata, content, force=False):
587 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
588 if "_id" in indata:
589 indata.pop("_id")
590 if "_admin" not in indata:
591 indata["_admin"] = {}
592
593 if "operationalState" in indata:
594 if indata["operationalState"] in ("ENABLED", "DISABLED"):
595 indata["_admin"]["operationalState"] = indata.pop("operationalState")
596 else:
597 raise EngineException(
598 "State '{}' is not a valid operational state".format(
599 indata["operationalState"]
600 ),
601 http_code=HTTPStatus.BAD_REQUEST,
602 )
603
604 # In the case of user defined data, we need to put the data in the root of the object
605 # to preserve current expected behaviour
606 if "userDefinedData" in indata:
607 data = indata.pop("userDefinedData")
608 if type(data) == dict:
609 indata["_admin"]["userDefinedData"] = data
610 else:
611 raise EngineException(
612 "userDefinedData should be an object, but is '{}' instead".format(
613 type(data)
614 ),
615 http_code=HTTPStatus.BAD_REQUEST,
616 )
617
618 if (
619 "operationalState" in indata["_admin"]
620 and content["_admin"]["operationalState"]
621 == indata["_admin"]["operationalState"]
622 ):
623 raise EngineException(
624 "operationalState already {}".format(
625 content["_admin"]["operationalState"]
626 ),
627 http_code=HTTPStatus.CONFLICT,
628 )
629
630 return indata
631
632
633 class VnfdTopic(DescriptorTopic):
634 topic = "vnfds"
635 topic_msg = "vnfd"
636
637 def __init__(self, db, fs, msg, auth):
638 DescriptorTopic.__init__(self, db, fs, msg, auth)
639
640 def pyangbind_validation(self, item, data, force=False):
641 if self._descriptor_data_is_in_old_format(data):
642 raise EngineException(
643 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
644 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
645 )
646 try:
647 myvnfd = etsi_nfv_vnfd.etsi_nfv_vnfd()
648 pybindJSONDecoder.load_ietf_json(
649 {"etsi-nfv-vnfd:vnfd": data},
650 None,
651 None,
652 obj=myvnfd,
653 path_helper=True,
654 skip_unknown=force,
655 )
656 out = pybindJSON.dumps(myvnfd, mode="ietf")
657 desc_out = self._remove_envelop(yaml.safe_load(out))
658 desc_out = self._remove_yang_prefixes_from_descriptor(desc_out)
659 return utils.deep_update_dict(data, desc_out)
660 except Exception as e:
661 raise EngineException(
662 "Error in pyangbind validation: {}".format(str(e)),
663 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
664 )
665
666 @staticmethod
667 def _descriptor_data_is_in_old_format(data):
668 return ("vnfd-catalog" in data) or ("vnfd:vnfd-catalog" in data)
669
670 @staticmethod
671 def _remove_envelop(indata=None):
672 if not indata:
673 return {}
674 clean_indata = indata
675
676 if clean_indata.get("etsi-nfv-vnfd:vnfd"):
677 if not isinstance(clean_indata["etsi-nfv-vnfd:vnfd"], dict):
678 raise EngineException("'etsi-nfv-vnfd:vnfd' must be a dict")
679 clean_indata = clean_indata["etsi-nfv-vnfd:vnfd"]
680 elif clean_indata.get("vnfd"):
681 if not isinstance(clean_indata["vnfd"], dict):
682 raise EngineException("'vnfd' must be dict")
683 clean_indata = clean_indata["vnfd"]
684
685 return clean_indata
686
687 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
688 final_content = super().check_conflict_on_edit(
689 session, final_content, edit_content, _id
690 )
691
692 # set type of vnfd
693 contains_pdu = False
694 contains_vdu = False
695 for vdu in get_iterable(final_content.get("vdu")):
696 if vdu.get("pdu-type"):
697 contains_pdu = True
698 else:
699 contains_vdu = True
700 if contains_pdu:
701 final_content["_admin"]["type"] = "hnfd" if contains_vdu else "pnfd"
702 elif contains_vdu:
703 final_content["_admin"]["type"] = "vnfd"
704 # if neither vud nor pdu do not fill type
705 return final_content
706
707 def check_conflict_on_del(self, session, _id, db_content):
708 """
709 Check that there is not any NSD that uses this VNFD. Only NSDs belonging to this project are considered. Note
710 that VNFD can be public and be used by NSD of other projects. Also check there are not deployments, or vnfr
711 that uses this vnfd
712 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
713 :param _id: vnfd internal id
714 :param db_content: The database content of the _id.
715 :return: None or raises EngineException with the conflict
716 """
717 if session["force"]:
718 return
719 descriptor = db_content
720 descriptor_id = descriptor.get("id")
721 if not descriptor_id: # empty vnfd not uploaded
722 return
723
724 _filter = self._get_project_filter(session)
725
726 # check vnfrs using this vnfd
727 _filter["vnfd-id"] = _id
728 if self.db.get_list("vnfrs", _filter):
729 raise EngineException(
730 "There is at least one VNF instance using this descriptor",
731 http_code=HTTPStatus.CONFLICT,
732 )
733
734 # check NSD referencing this VNFD
735 del _filter["vnfd-id"]
736 _filter["vnfd-id"] = descriptor_id
737 if self.db.get_list("nsds", _filter):
738 raise EngineException(
739 "There is at least one NS package referencing this descriptor",
740 http_code=HTTPStatus.CONFLICT,
741 )
742
743 def _validate_input_new(self, indata, storage_params, force=False):
744 indata.pop("onboardingState", None)
745 indata.pop("operationalState", None)
746 indata.pop("usageState", None)
747 indata.pop("links", None)
748
749 indata = self.pyangbind_validation("vnfds", indata, force)
750 # Cross references validation in the descriptor
751
752 self.validate_mgmt_interface_connection_point(indata)
753
754 for vdu in get_iterable(indata.get("vdu")):
755 self.validate_vdu_internal_connection_points(vdu)
756 self._validate_vdu_cloud_init_in_package(storage_params, vdu, indata)
757 self._validate_vdu_charms_in_package(storage_params, indata)
758
759 self._validate_vnf_charms_in_package(storage_params, indata)
760
761 self.validate_external_connection_points(indata)
762 self.validate_internal_virtual_links(indata)
763 self.validate_monitoring_params(indata)
764 self.validate_scaling_group_descriptor(indata)
765
766 return indata
767
768 @staticmethod
769 def validate_mgmt_interface_connection_point(indata):
770 if not indata.get("vdu"):
771 return
772 if not indata.get("mgmt-cp"):
773 raise EngineException(
774 "'mgmt-cp' is a mandatory field and it is not defined",
775 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
776 )
777
778 for cp in get_iterable(indata.get("ext-cpd")):
779 if cp["id"] == indata["mgmt-cp"]:
780 break
781 else:
782 raise EngineException(
783 "mgmt-cp='{}' must match an existing ext-cpd".format(indata["mgmt-cp"]),
784 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
785 )
786
787 @staticmethod
788 def validate_vdu_internal_connection_points(vdu):
789 int_cpds = set()
790 for cpd in get_iterable(vdu.get("int-cpd")):
791 cpd_id = cpd.get("id")
792 if cpd_id and cpd_id in int_cpds:
793 raise EngineException(
794 "vdu[id='{}']:int-cpd[id='{}'] is already used by other int-cpd".format(
795 vdu["id"], cpd_id
796 ),
797 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
798 )
799 int_cpds.add(cpd_id)
800
801 @staticmethod
802 def validate_external_connection_points(indata):
803 all_vdus_int_cpds = set()
804 for vdu in get_iterable(indata.get("vdu")):
805 for int_cpd in get_iterable(vdu.get("int-cpd")):
806 all_vdus_int_cpds.add((vdu.get("id"), int_cpd.get("id")))
807
808 ext_cpds = set()
809 for cpd in get_iterable(indata.get("ext-cpd")):
810 cpd_id = cpd.get("id")
811 if cpd_id and cpd_id in ext_cpds:
812 raise EngineException(
813 "ext-cpd[id='{}'] is already used by other ext-cpd".format(cpd_id),
814 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
815 )
816 ext_cpds.add(cpd_id)
817
818 int_cpd = cpd.get("int-cpd")
819 if int_cpd:
820 if (int_cpd.get("vdu-id"), int_cpd.get("cpd")) not in all_vdus_int_cpds:
821 raise EngineException(
822 "ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd".format(
823 cpd_id
824 ),
825 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
826 )
827 # TODO: Validate k8s-cluster-net points to a valid k8s-cluster:nets ?
828
829 def _validate_vdu_charms_in_package(self, storage_params, indata):
830 for df in indata["df"]:
831 if (
832 "lcm-operations-configuration" in df
833 and "operate-vnf-op-config" in df["lcm-operations-configuration"]
834 ):
835 configs = df["lcm-operations-configuration"][
836 "operate-vnf-op-config"
837 ].get("day1-2", [])
838 vdus = df.get("vdu-profile", [])
839 for vdu in vdus:
840 for config in configs:
841 if config["id"] == vdu["id"] and utils.find_in_list(
842 config.get("execution-environment-list", []),
843 lambda ee: "juju" in ee,
844 ):
845 if not self._validate_package_folders(
846 storage_params, "charms"
847 ) and not self._validate_package_folders(
848 storage_params, "Scripts/charms"
849 ):
850 raise EngineException(
851 "Charm defined in vnf[id={}] but not present in "
852 "package".format(indata["id"])
853 )
854
855 def _validate_vdu_cloud_init_in_package(self, storage_params, vdu, indata):
856 if not vdu.get("cloud-init-file"):
857 return
858 if not self._validate_package_folders(
859 storage_params, "cloud_init", vdu["cloud-init-file"]
860 ) and not self._validate_package_folders(
861 storage_params, "Scripts/cloud_init", vdu["cloud-init-file"]
862 ):
863 raise EngineException(
864 "Cloud-init defined in vnf[id={}]:vdu[id={}] but not present in "
865 "package".format(indata["id"], vdu["id"])
866 )
867
868 def _validate_vnf_charms_in_package(self, storage_params, indata):
869 # Get VNF configuration through new container
870 for deployment_flavor in indata.get("df", []):
871 if "lcm-operations-configuration" not in deployment_flavor:
872 return
873 if (
874 "operate-vnf-op-config"
875 not in deployment_flavor["lcm-operations-configuration"]
876 ):
877 return
878 for day_1_2_config in deployment_flavor["lcm-operations-configuration"][
879 "operate-vnf-op-config"
880 ]["day1-2"]:
881 if day_1_2_config["id"] == indata["id"]:
882 if utils.find_in_list(
883 day_1_2_config.get("execution-environment-list", []),
884 lambda ee: "juju" in ee,
885 ):
886 if not self._validate_package_folders(
887 storage_params, "charms"
888 ) and not self._validate_package_folders(
889 storage_params, "Scripts/charms"
890 ):
891 raise EngineException(
892 "Charm defined in vnf[id={}] but not present in "
893 "package".format(indata["id"])
894 )
895
896 def _validate_package_folders(self, storage_params, folder, file=None):
897 if not storage_params:
898 return False
899 elif not storage_params.get("pkg-dir"):
900 if self.fs.file_exists("{}_".format(storage_params["folder"]), "dir"):
901 f = "{}_/{}".format(
902 storage_params["folder"], folder
903 )
904 else:
905 f = "{}/{}".format(
906 storage_params["folder"], folder
907 )
908 if file:
909 return self.fs.file_exists("{}/{}".format(f, file), "file")
910 else:
911 f = f+"/"
912 if self.fs.file_exists(f, "dir"):
913 if self.fs.dir_ls(f):
914 return True
915 return False
916 else:
917 if self.fs.file_exists("{}_".format(storage_params["folder"]), "dir"):
918 f = "{}_/{}/{}".format(
919 storage_params["folder"], storage_params["pkg-dir"], folder
920 )
921 else:
922 f = "{}/{}/{}".format(
923 storage_params["folder"], storage_params["pkg-dir"], folder
924 )
925 if file:
926 return self.fs.file_exists("{}/{}".format(f, file), "file")
927 else:
928 if self.fs.file_exists(f, "dir"):
929 if self.fs.dir_ls(f):
930 return True
931 return False
932
933 @staticmethod
934 def validate_internal_virtual_links(indata):
935 all_ivld_ids = set()
936 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
937 ivld_id = ivld.get("id")
938 if ivld_id and ivld_id in all_ivld_ids:
939 raise EngineException(
940 "Duplicated VLD id in int-virtual-link-desc[id={}]".format(ivld_id),
941 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
942 )
943 else:
944 all_ivld_ids.add(ivld_id)
945
946 for vdu in get_iterable(indata.get("vdu")):
947 for int_cpd in get_iterable(vdu.get("int-cpd")):
948 int_cpd_ivld_id = int_cpd.get("int-virtual-link-desc")
949 if int_cpd_ivld_id and int_cpd_ivld_id not in all_ivld_ids:
950 raise EngineException(
951 "vdu[id='{}']:int-cpd[id='{}']:int-virtual-link-desc='{}' must match an existing "
952 "int-virtual-link-desc".format(
953 vdu["id"], int_cpd["id"], int_cpd_ivld_id
954 ),
955 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
956 )
957
958 for df in get_iterable(indata.get("df")):
959 for vlp in get_iterable(df.get("virtual-link-profile")):
960 vlp_ivld_id = vlp.get("id")
961 if vlp_ivld_id and vlp_ivld_id not in all_ivld_ids:
962 raise EngineException(
963 "df[id='{}']:virtual-link-profile='{}' must match an existing "
964 "int-virtual-link-desc".format(df["id"], vlp_ivld_id),
965 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
966 )
967
968 @staticmethod
969 def validate_monitoring_params(indata):
970 all_monitoring_params = set()
971 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
972 for mp in get_iterable(ivld.get("monitoring-parameters")):
973 mp_id = mp.get("id")
974 if mp_id and mp_id in all_monitoring_params:
975 raise EngineException(
976 "Duplicated monitoring-parameter id in "
977 "int-virtual-link-desc[id='{}']:monitoring-parameters[id='{}']".format(
978 ivld["id"], mp_id
979 ),
980 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
981 )
982 else:
983 all_monitoring_params.add(mp_id)
984
985 for vdu in get_iterable(indata.get("vdu")):
986 for mp in get_iterable(vdu.get("monitoring-parameter")):
987 mp_id = mp.get("id")
988 if mp_id and mp_id in all_monitoring_params:
989 raise EngineException(
990 "Duplicated monitoring-parameter id in "
991 "vdu[id='{}']:monitoring-parameter[id='{}']".format(
992 vdu["id"], mp_id
993 ),
994 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
995 )
996 else:
997 all_monitoring_params.add(mp_id)
998
999 for df in get_iterable(indata.get("df")):
1000 for mp in get_iterable(df.get("monitoring-parameter")):
1001 mp_id = mp.get("id")
1002 if mp_id and mp_id in all_monitoring_params:
1003 raise EngineException(
1004 "Duplicated monitoring-parameter id in "
1005 "df[id='{}']:monitoring-parameter[id='{}']".format(
1006 df["id"], mp_id
1007 ),
1008 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1009 )
1010 else:
1011 all_monitoring_params.add(mp_id)
1012
1013 @staticmethod
1014 def validate_scaling_group_descriptor(indata):
1015 all_monitoring_params = set()
1016 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
1017 for mp in get_iterable(ivld.get("monitoring-parameters")):
1018 all_monitoring_params.add(mp.get("id"))
1019
1020 for vdu in get_iterable(indata.get("vdu")):
1021 for mp in get_iterable(vdu.get("monitoring-parameter")):
1022 all_monitoring_params.add(mp.get("id"))
1023
1024 for df in get_iterable(indata.get("df")):
1025 for mp in get_iterable(df.get("monitoring-parameter")):
1026 all_monitoring_params.add(mp.get("id"))
1027
1028 for df in get_iterable(indata.get("df")):
1029 for sa in get_iterable(df.get("scaling-aspect")):
1030 for sp in get_iterable(sa.get("scaling-policy")):
1031 for sc in get_iterable(sp.get("scaling-criteria")):
1032 sc_monitoring_param = sc.get("vnf-monitoring-param-ref")
1033 if (
1034 sc_monitoring_param
1035 and sc_monitoring_param not in all_monitoring_params
1036 ):
1037 raise EngineException(
1038 "df[id='{}']:scaling-aspect[id='{}']:scaling-policy"
1039 "[name='{}']:scaling-criteria[name='{}']: "
1040 "vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format(
1041 df["id"],
1042 sa["id"],
1043 sp["name"],
1044 sc["name"],
1045 sc_monitoring_param,
1046 ),
1047 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1048 )
1049
1050 for sca in get_iterable(sa.get("scaling-config-action")):
1051 if (
1052 "lcm-operations-configuration" not in df
1053 or "operate-vnf-op-config"
1054 not in df["lcm-operations-configuration"]
1055 or not utils.find_in_list(
1056 df["lcm-operations-configuration"][
1057 "operate-vnf-op-config"
1058 ].get("day1-2", []),
1059 lambda config: config["id"] == indata["id"],
1060 )
1061 ):
1062 raise EngineException(
1063 "'day1-2 configuration' not defined in the descriptor but it is "
1064 "referenced by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action".format(
1065 df["id"], sa["id"]
1066 ),
1067 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1068 )
1069 for configuration in get_iterable(
1070 df["lcm-operations-configuration"]["operate-vnf-op-config"].get(
1071 "day1-2", []
1072 )
1073 ):
1074 for primitive in get_iterable(
1075 configuration.get("config-primitive")
1076 ):
1077 if (
1078 primitive["name"]
1079 == sca["vnf-config-primitive-name-ref"]
1080 ):
1081 break
1082 else:
1083 raise EngineException(
1084 "df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-"
1085 "config-primitive-name-ref='{}' does not match any "
1086 "day1-2 configuration:config-primitive:name".format(
1087 df["id"],
1088 sa["id"],
1089 sca["vnf-config-primitive-name-ref"],
1090 ),
1091 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1092 )
1093
1094 def delete_extra(self, session, _id, db_content, not_send_msg=None):
1095 """
1096 Deletes associate file system storage (via super)
1097 Deletes associated vnfpkgops from database.
1098 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1099 :param _id: server internal id
1100 :param db_content: The database content of the descriptor
1101 :return: None
1102 :raises: FsException in case of error while deleting associated storage
1103 """
1104 super().delete_extra(session, _id, db_content, not_send_msg)
1105 self.db.del_list("vnfpkgops", {"vnfPkgId": _id})
1106
1107 def sol005_projection(self, data):
1108 data["onboardingState"] = data["_admin"]["onboardingState"]
1109 data["operationalState"] = data["_admin"]["operationalState"]
1110 data["usageState"] = data["_admin"]["usageState"]
1111
1112 links = {}
1113 links["self"] = {"href": "/vnfpkgm/v1/vnf_packages/{}".format(data["_id"])}
1114 links["vnfd"] = {"href": "/vnfpkgm/v1/vnf_packages/{}/vnfd".format(data["_id"])}
1115 links["packageContent"] = {
1116 "href": "/vnfpkgm/v1/vnf_packages/{}/package_content".format(data["_id"])
1117 }
1118 data["_links"] = links
1119
1120 return super().sol005_projection(data)
1121
1122
1123 class NsdTopic(DescriptorTopic):
1124 topic = "nsds"
1125 topic_msg = "nsd"
1126
1127 def __init__(self, db, fs, msg, auth):
1128 DescriptorTopic.__init__(self, db, fs, msg, auth)
1129
1130 def pyangbind_validation(self, item, data, force=False):
1131 if self._descriptor_data_is_in_old_format(data):
1132 raise EngineException(
1133 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
1134 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1135 )
1136 try:
1137 nsd_vnf_profiles = data.get("df", [{}])[0].get("vnf-profile", [])
1138 mynsd = etsi_nfv_nsd.etsi_nfv_nsd()
1139 pybindJSONDecoder.load_ietf_json(
1140 {"nsd": {"nsd": [data]}},
1141 None,
1142 None,
1143 obj=mynsd,
1144 path_helper=True,
1145 skip_unknown=force,
1146 )
1147 out = pybindJSON.dumps(mynsd, mode="ietf")
1148 desc_out = self._remove_envelop(yaml.safe_load(out))
1149 desc_out = self._remove_yang_prefixes_from_descriptor(desc_out)
1150 if nsd_vnf_profiles:
1151 desc_out["df"][0]["vnf-profile"] = nsd_vnf_profiles
1152 return desc_out
1153 except Exception as e:
1154 raise EngineException(
1155 "Error in pyangbind validation: {}".format(str(e)),
1156 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1157 )
1158
1159 @staticmethod
1160 def _descriptor_data_is_in_old_format(data):
1161 return ("nsd-catalog" in data) or ("nsd:nsd-catalog" in data)
1162
1163 @staticmethod
1164 def _remove_envelop(indata=None):
1165 if not indata:
1166 return {}
1167 clean_indata = indata
1168
1169 if clean_indata.get("nsd"):
1170 clean_indata = clean_indata["nsd"]
1171 elif clean_indata.get("etsi-nfv-nsd:nsd"):
1172 clean_indata = clean_indata["etsi-nfv-nsd:nsd"]
1173 if clean_indata.get("nsd"):
1174 if (
1175 not isinstance(clean_indata["nsd"], list)
1176 or len(clean_indata["nsd"]) != 1
1177 ):
1178 raise EngineException("'nsd' must be a list of only one element")
1179 clean_indata = clean_indata["nsd"][0]
1180 return clean_indata
1181
1182 def _validate_input_new(self, indata, storage_params, force=False):
1183 indata.pop("nsdOnboardingState", None)
1184 indata.pop("nsdOperationalState", None)
1185 indata.pop("nsdUsageState", None)
1186
1187 indata.pop("links", None)
1188
1189 indata = self.pyangbind_validation("nsds", indata, force)
1190 # Cross references validation in the descriptor
1191 # TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none
1192 for vld in get_iterable(indata.get("virtual-link-desc")):
1193 self.validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata)
1194
1195 self.validate_vnf_profiles_vnfd_id(indata)
1196
1197 return indata
1198
1199 @staticmethod
1200 def validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata):
1201 if not vld.get("mgmt-network"):
1202 return
1203 vld_id = vld.get("id")
1204 for df in get_iterable(indata.get("df")):
1205 for vlp in get_iterable(df.get("virtual-link-profile")):
1206 if vld_id and vld_id == vlp.get("virtual-link-desc-id"):
1207 if vlp.get("virtual-link-protocol-data"):
1208 raise EngineException(
1209 "Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-"
1210 "protocol-data You cannot set a virtual-link-protocol-data "
1211 "when mgmt-network is True".format(df["id"], vlp["id"]),
1212 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1213 )
1214
1215 @staticmethod
1216 def validate_vnf_profiles_vnfd_id(indata):
1217 all_vnfd_ids = set(get_iterable(indata.get("vnfd-id")))
1218 for df in get_iterable(indata.get("df")):
1219 for vnf_profile in get_iterable(df.get("vnf-profile")):
1220 vnfd_id = vnf_profile.get("vnfd-id")
1221 if vnfd_id and vnfd_id not in all_vnfd_ids:
1222 raise EngineException(
1223 "Error at df[id='{}']:vnf_profile[id='{}']:vnfd-id='{}' "
1224 "does not match any vnfd-id".format(
1225 df["id"], vnf_profile["id"], vnfd_id
1226 ),
1227 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1228 )
1229
1230 def _validate_input_edit(self, indata, content, force=False):
1231 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
1232 """
1233 indata looks as follows:
1234 - In the new case (conformant)
1235 {'nsdOperationalState': 'DISABLED', 'userDefinedData': {'id': 'string23',
1236 '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}}
1237 - In the old case (backwards-compatible)
1238 {'id': 'string23', '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}
1239 """
1240 if "_admin" not in indata:
1241 indata["_admin"] = {}
1242
1243 if "nsdOperationalState" in indata:
1244 if indata["nsdOperationalState"] in ("ENABLED", "DISABLED"):
1245 indata["_admin"]["operationalState"] = indata.pop("nsdOperationalState")
1246 else:
1247 raise EngineException(
1248 "State '{}' is not a valid operational state".format(
1249 indata["nsdOperationalState"]
1250 ),
1251 http_code=HTTPStatus.BAD_REQUEST,
1252 )
1253
1254 # In the case of user defined data, we need to put the data in the root of the object
1255 # to preserve current expected behaviour
1256 if "userDefinedData" in indata:
1257 data = indata.pop("userDefinedData")
1258 if type(data) == dict:
1259 indata["_admin"]["userDefinedData"] = data
1260 else:
1261 raise EngineException(
1262 "userDefinedData should be an object, but is '{}' instead".format(
1263 type(data)
1264 ),
1265 http_code=HTTPStatus.BAD_REQUEST,
1266 )
1267 if (
1268 "operationalState" in indata["_admin"]
1269 and content["_admin"]["operationalState"]
1270 == indata["_admin"]["operationalState"]
1271 ):
1272 raise EngineException(
1273 "nsdOperationalState already {}".format(
1274 content["_admin"]["operationalState"]
1275 ),
1276 http_code=HTTPStatus.CONFLICT,
1277 )
1278 return indata
1279
1280 def _check_descriptor_dependencies(self, session, descriptor):
1281 """
1282 Check that the dependent descriptors exist on a new descriptor or edition. Also checks references to vnfd
1283 connection points are ok
1284 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1285 :param descriptor: descriptor to be inserted or edit
1286 :return: None or raises exception
1287 """
1288 if session["force"]:
1289 return
1290 vnfds_index = self._get_descriptor_constituent_vnfds_index(session, descriptor)
1291
1292 # Cross references validation in the descriptor and vnfd connection point validation
1293 for df in get_iterable(descriptor.get("df")):
1294 self.validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index)
1295
1296 def _get_descriptor_constituent_vnfds_index(self, session, descriptor):
1297 vnfds_index = {}
1298 if descriptor.get("vnfd-id") and not session["force"]:
1299 for vnfd_id in get_iterable(descriptor.get("vnfd-id")):
1300 query_filter = self._get_project_filter(session)
1301 query_filter["id"] = vnfd_id
1302 vnf_list = self.db.get_list("vnfds", query_filter)
1303 if not vnf_list:
1304 raise EngineException(
1305 "Descriptor error at 'vnfd-id'='{}' references a non "
1306 "existing vnfd".format(vnfd_id),
1307 http_code=HTTPStatus.CONFLICT,
1308 )
1309 vnfds_index[vnfd_id] = vnf_list[0]
1310 return vnfds_index
1311
1312 @staticmethod
1313 def validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index):
1314 for vnf_profile in get_iterable(df.get("vnf-profile")):
1315 vnfd = vnfds_index.get(vnf_profile["vnfd-id"])
1316 all_vnfd_ext_cpds = set()
1317 for ext_cpd in get_iterable(vnfd.get("ext-cpd")):
1318 if ext_cpd.get("id"):
1319 all_vnfd_ext_cpds.add(ext_cpd.get("id"))
1320
1321 for virtual_link in get_iterable(
1322 vnf_profile.get("virtual-link-connectivity")
1323 ):
1324 for vl_cpd in get_iterable(virtual_link.get("constituent-cpd-id")):
1325 vl_cpd_id = vl_cpd.get("constituent-cpd-id")
1326 if vl_cpd_id and vl_cpd_id not in all_vnfd_ext_cpds:
1327 raise EngineException(
1328 "Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity"
1329 "[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a "
1330 "non existing ext-cpd:id inside vnfd '{}'".format(
1331 df["id"],
1332 vnf_profile["id"],
1333 virtual_link["virtual-link-profile-id"],
1334 vl_cpd_id,
1335 vnfd["id"],
1336 ),
1337 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1338 )
1339
1340 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
1341 final_content = super().check_conflict_on_edit(
1342 session, final_content, edit_content, _id
1343 )
1344
1345 self._check_descriptor_dependencies(session, final_content)
1346
1347 return final_content
1348
1349 def check_conflict_on_del(self, session, _id, db_content):
1350 """
1351 Check that there is not any NSR that uses this NSD. Only NSRs belonging to this project are considered. Note
1352 that NSD can be public and be used by other projects.
1353 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1354 :param _id: nsd internal id
1355 :param db_content: The database content of the _id
1356 :return: None or raises EngineException with the conflict
1357 """
1358 if session["force"]:
1359 return
1360 descriptor = db_content
1361 descriptor_id = descriptor.get("id")
1362 if not descriptor_id: # empty nsd not uploaded
1363 return
1364
1365 # check NSD used by NS
1366 _filter = self._get_project_filter(session)
1367 _filter["nsd-id"] = _id
1368 if self.db.get_list("nsrs", _filter):
1369 raise EngineException(
1370 "There is at least one NS instance using this descriptor",
1371 http_code=HTTPStatus.CONFLICT,
1372 )
1373
1374 # check NSD referenced by NST
1375 del _filter["nsd-id"]
1376 _filter["netslice-subnet.ANYINDEX.nsd-ref"] = descriptor_id
1377 if self.db.get_list("nsts", _filter):
1378 raise EngineException(
1379 "There is at least one NetSlice Template referencing this descriptor",
1380 http_code=HTTPStatus.CONFLICT,
1381 )
1382
1383 def sol005_projection(self, data):
1384 data["nsdOnboardingState"] = data["_admin"]["onboardingState"]
1385 data["nsdOperationalState"] = data["_admin"]["operationalState"]
1386 data["nsdUsageState"] = data["_admin"]["usageState"]
1387
1388 links = {}
1389 links["self"] = {"href": "/nsd/v1/ns_descriptors/{}".format(data["_id"])}
1390 links["nsd_content"] = {
1391 "href": "/nsd/v1/ns_descriptors/{}/nsd_content".format(data["_id"])
1392 }
1393 data["_links"] = links
1394
1395 return super().sol005_projection(data)
1396
1397
1398 class NstTopic(DescriptorTopic):
1399 topic = "nsts"
1400 topic_msg = "nst"
1401 quota_name = "slice_templates"
1402
1403 def __init__(self, db, fs, msg, auth):
1404 DescriptorTopic.__init__(self, db, fs, msg, auth)
1405
1406 def pyangbind_validation(self, item, data, force=False):
1407 try:
1408 mynst = nst_im()
1409 pybindJSONDecoder.load_ietf_json(
1410 {"nst": [data]},
1411 None,
1412 None,
1413 obj=mynst,
1414 path_helper=True,
1415 skip_unknown=force,
1416 )
1417 out = pybindJSON.dumps(mynst, mode="ietf")
1418 desc_out = self._remove_envelop(yaml.safe_load(out))
1419 return desc_out
1420 except Exception as e:
1421 raise EngineException(
1422 "Error in pyangbind validation: {}".format(str(e)),
1423 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1424 )
1425
1426 @staticmethod
1427 def _remove_envelop(indata=None):
1428 if not indata:
1429 return {}
1430 clean_indata = indata
1431
1432 if clean_indata.get("nst"):
1433 if (
1434 not isinstance(clean_indata["nst"], list)
1435 or len(clean_indata["nst"]) != 1
1436 ):
1437 raise EngineException("'nst' must be a list only one element")
1438 clean_indata = clean_indata["nst"][0]
1439 elif clean_indata.get("nst:nst"):
1440 if (
1441 not isinstance(clean_indata["nst:nst"], list)
1442 or len(clean_indata["nst:nst"]) != 1
1443 ):
1444 raise EngineException("'nst:nst' must be a list only one element")
1445 clean_indata = clean_indata["nst:nst"][0]
1446 return clean_indata
1447
1448 def _validate_input_new(self, indata, storage_params, force=False):
1449 indata.pop("onboardingState", None)
1450 indata.pop("operationalState", None)
1451 indata.pop("usageState", None)
1452 indata = self.pyangbind_validation("nsts", indata, force)
1453 return indata.copy()
1454
1455 def _check_descriptor_dependencies(self, session, descriptor):
1456 """
1457 Check that the dependent descriptors exist on a new descriptor or edition
1458 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1459 :param descriptor: descriptor to be inserted or edit
1460 :return: None or raises exception
1461 """
1462 if not descriptor.get("netslice-subnet"):
1463 return
1464 for nsd in descriptor["netslice-subnet"]:
1465 nsd_id = nsd["nsd-ref"]
1466 filter_q = self._get_project_filter(session)
1467 filter_q["id"] = nsd_id
1468 if not self.db.get_list("nsds", filter_q):
1469 raise EngineException(
1470 "Descriptor error at 'netslice-subnet':'nsd-ref'='{}' references a non "
1471 "existing nsd".format(nsd_id),
1472 http_code=HTTPStatus.CONFLICT,
1473 )
1474
1475 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
1476 final_content = super().check_conflict_on_edit(
1477 session, final_content, edit_content, _id
1478 )
1479
1480 self._check_descriptor_dependencies(session, final_content)
1481 return final_content
1482
1483 def check_conflict_on_del(self, session, _id, db_content):
1484 """
1485 Check that there is not any NSIR that uses this NST. Only NSIRs belonging to this project are considered. Note
1486 that NST can be public and be used by other projects.
1487 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1488 :param _id: nst internal id
1489 :param db_content: The database content of the _id.
1490 :return: None or raises EngineException with the conflict
1491 """
1492 # TODO: Check this method
1493 if session["force"]:
1494 return
1495 # Get Network Slice Template from Database
1496 _filter = self._get_project_filter(session)
1497 _filter["_admin.nst-id"] = _id
1498 if self.db.get_list("nsis", _filter):
1499 raise EngineException(
1500 "there is at least one Netslice Instance using this descriptor",
1501 http_code=HTTPStatus.CONFLICT,
1502 )
1503
1504 def sol005_projection(self, data):
1505 data["onboardingState"] = data["_admin"]["onboardingState"]
1506 data["operationalState"] = data["_admin"]["operationalState"]
1507 data["usageState"] = data["_admin"]["usageState"]
1508
1509 links = {}
1510 links["self"] = {"href": "/nst/v1/netslice_templates/{}".format(data["_id"])}
1511 links["nst"] = {"href": "/nst/v1/netslice_templates/{}/nst".format(data["_id"])}
1512 data["_links"] = links
1513
1514 return super().sol005_projection(data)
1515
1516
1517 class PduTopic(BaseTopic):
1518 topic = "pdus"
1519 topic_msg = "pdu"
1520 quota_name = "pduds"
1521 schema_new = pdu_new_schema
1522 schema_edit = pdu_edit_schema
1523
1524 def __init__(self, db, fs, msg, auth):
1525 BaseTopic.__init__(self, db, fs, msg, auth)
1526
1527 @staticmethod
1528 def format_on_new(content, project_id=None, make_public=False):
1529 BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
1530 content["_admin"]["onboardingState"] = "CREATED"
1531 content["_admin"]["operationalState"] = "ENABLED"
1532 content["_admin"]["usageState"] = "NOT_IN_USE"
1533
1534 def check_conflict_on_del(self, session, _id, db_content):
1535 """
1536 Check that there is not any vnfr that uses this PDU
1537 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1538 :param _id: pdu internal id
1539 :param db_content: The database content of the _id.
1540 :return: None or raises EngineException with the conflict
1541 """
1542 if session["force"]:
1543 return
1544
1545 _filter = self._get_project_filter(session)
1546 _filter["vdur.pdu-id"] = _id
1547 if self.db.get_list("vnfrs", _filter):
1548 raise EngineException(
1549 "There is at least one VNF instance using this PDU",
1550 http_code=HTTPStatus.CONFLICT,
1551 )
1552
1553
1554 class VnfPkgOpTopic(BaseTopic):
1555 topic = "vnfpkgops"
1556 topic_msg = "vnfd"
1557 schema_new = vnfpkgop_new_schema
1558 schema_edit = None
1559
1560 def __init__(self, db, fs, msg, auth):
1561 BaseTopic.__init__(self, db, fs, msg, auth)
1562
1563 def edit(self, session, _id, indata=None, kwargs=None, content=None):
1564 raise EngineException(
1565 "Method 'edit' not allowed for topic '{}'".format(self.topic),
1566 HTTPStatus.METHOD_NOT_ALLOWED,
1567 )
1568
1569 def delete(self, session, _id, dry_run=False):
1570 raise EngineException(
1571 "Method 'delete' not allowed for topic '{}'".format(self.topic),
1572 HTTPStatus.METHOD_NOT_ALLOWED,
1573 )
1574
1575 def delete_list(self, session, filter_q=None):
1576 raise EngineException(
1577 "Method 'delete_list' not allowed for topic '{}'".format(self.topic),
1578 HTTPStatus.METHOD_NOT_ALLOWED,
1579 )
1580
1581 def new(self, rollback, session, indata=None, kwargs=None, headers=None):
1582 """
1583 Creates a new entry into database.
1584 :param rollback: list to append created items at database in case a rollback may to be done
1585 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1586 :param indata: data to be inserted
1587 :param kwargs: used to override the indata descriptor
1588 :param headers: http request headers
1589 :return: _id, op_id:
1590 _id: identity of the inserted data.
1591 op_id: None
1592 """
1593 self._update_input_with_kwargs(indata, kwargs)
1594 validate_input(indata, self.schema_new)
1595 vnfpkg_id = indata["vnfPkgId"]
1596 filter_q = BaseTopic._get_project_filter(session)
1597 filter_q["_id"] = vnfpkg_id
1598 vnfd = self.db.get_one("vnfds", filter_q)
1599 operation = indata["lcmOperationType"]
1600 kdu_name = indata["kdu_name"]
1601 for kdu in vnfd.get("kdu", []):
1602 if kdu["name"] == kdu_name:
1603 helm_chart = kdu.get("helm-chart")
1604 juju_bundle = kdu.get("juju-bundle")
1605 break
1606 else:
1607 raise EngineException(
1608 "Not found vnfd[id='{}']:kdu[name='{}']".format(vnfpkg_id, kdu_name)
1609 )
1610 if helm_chart:
1611 indata["helm-chart"] = helm_chart
1612 match = fullmatch(r"([^/]*)/([^/]*)", helm_chart)
1613 repo_name = match.group(1) if match else None
1614 elif juju_bundle:
1615 indata["juju-bundle"] = juju_bundle
1616 match = fullmatch(r"([^/]*)/([^/]*)", juju_bundle)
1617 repo_name = match.group(1) if match else None
1618 else:
1619 raise EngineException(
1620 "Found neither 'helm-chart' nor 'juju-bundle' in vnfd[id='{}']:kdu[name='{}']".format(
1621 vnfpkg_id, kdu_name
1622 )
1623 )
1624 if repo_name:
1625 del filter_q["_id"]
1626 filter_q["name"] = repo_name
1627 repo = self.db.get_one("k8srepos", filter_q)
1628 k8srepo_id = repo.get("_id")
1629 k8srepo_url = repo.get("url")
1630 else:
1631 k8srepo_id = None
1632 k8srepo_url = None
1633 indata["k8srepoId"] = k8srepo_id
1634 indata["k8srepo_url"] = k8srepo_url
1635 vnfpkgop_id = str(uuid4())
1636 vnfpkgop_desc = {
1637 "_id": vnfpkgop_id,
1638 "operationState": "PROCESSING",
1639 "vnfPkgId": vnfpkg_id,
1640 "lcmOperationType": operation,
1641 "isAutomaticInvocation": False,
1642 "isCancelPending": False,
1643 "operationParams": indata,
1644 "links": {
1645 "self": "/osm/vnfpkgm/v1/vnfpkg_op_occs/" + vnfpkgop_id,
1646 "vnfpkg": "/osm/vnfpkgm/v1/vnf_packages/" + vnfpkg_id,
1647 },
1648 }
1649 self.format_on_new(
1650 vnfpkgop_desc, session["project_id"], make_public=session["public"]
1651 )
1652 ctime = vnfpkgop_desc["_admin"]["created"]
1653 vnfpkgop_desc["statusEnteredTime"] = ctime
1654 vnfpkgop_desc["startTime"] = ctime
1655 self.db.create(self.topic, vnfpkgop_desc)
1656 rollback.append({"topic": self.topic, "_id": vnfpkgop_id})
1657 self.msg.write(self.topic_msg, operation, vnfpkgop_desc)
1658 return vnfpkgop_id, None