Bug 2033 NBI leaves files orphaned in fsmongo
[osm/NBI.git] / osm_nbi / descriptor_topics.py
1 # -*- coding: utf-8 -*-
2
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
12 # implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
15
16 import tarfile
17 import yaml
18 import json
19 import importlib
20 import copy
21
22 # import logging
23 from hashlib import md5
24 from osm_common.dbbase import DbException, deep_update_rfc7396
25 from http import HTTPStatus
26 from time import time
27 from uuid import uuid4
28 from re import fullmatch
29 from osm_nbi.validation import (
30 ValidationError,
31 pdu_new_schema,
32 pdu_edit_schema,
33 validate_input,
34 vnfpkgop_new_schema,
35 )
36 from osm_nbi.base_topic import BaseTopic, EngineException, get_iterable
37 from osm_im import etsi_nfv_vnfd, etsi_nfv_nsd
38 from osm_im.nst import nst as nst_im
39 from pyangbind.lib.serialise import pybindJSONDecoder
40 import pyangbind.lib.pybindJSON as pybindJSON
41 from osm_nbi import utils
42
43 __author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
44
45
46 class DescriptorTopic(BaseTopic):
47 def __init__(self, db, fs, msg, auth):
48 BaseTopic.__init__(self, db, fs, msg, auth)
49
50 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
51 final_content = super().check_conflict_on_edit(
52 session, final_content, edit_content, _id
53 )
54
55 def _check_unique_id_name(descriptor, position=""):
56 for desc_key, desc_item in descriptor.items():
57 if isinstance(desc_item, list) and desc_item:
58 used_ids = []
59 desc_item_id = None
60 for index, list_item in enumerate(desc_item):
61 if isinstance(list_item, dict):
62 _check_unique_id_name(
63 list_item, "{}.{}[{}]".format(position, desc_key, index)
64 )
65 # Base case
66 if index == 0 and (
67 list_item.get("id") or list_item.get("name")
68 ):
69 desc_item_id = "id" if list_item.get("id") else "name"
70 if desc_item_id and list_item.get(desc_item_id):
71 if list_item[desc_item_id] in used_ids:
72 position = "{}.{}[{}]".format(
73 position, desc_key, index
74 )
75 raise EngineException(
76 "Error: identifier {} '{}' is not unique and repeats at '{}'".format(
77 desc_item_id,
78 list_item[desc_item_id],
79 position,
80 ),
81 HTTPStatus.UNPROCESSABLE_ENTITY,
82 )
83 used_ids.append(list_item[desc_item_id])
84
85 _check_unique_id_name(final_content)
86 # 1. validate again with pyangbind
87 # 1.1. remove internal keys
88 internal_keys = {}
89 for k in ("_id", "_admin"):
90 if k in final_content:
91 internal_keys[k] = final_content.pop(k)
92 storage_params = internal_keys["_admin"].get("storage")
93 serialized = self._validate_input_new(
94 final_content, storage_params, session["force"]
95 )
96
97 # 1.2. modify final_content with a serialized version
98 final_content = copy.deepcopy(serialized)
99 # 1.3. restore internal keys
100 for k, v in internal_keys.items():
101 final_content[k] = v
102 if session["force"]:
103 return final_content
104
105 # 2. check that this id is not present
106 if "id" in edit_content:
107 _filter = self._get_project_filter(session)
108
109 _filter["id"] = final_content["id"]
110 _filter["_id.neq"] = _id
111
112 if self.db.get_one(self.topic, _filter, fail_on_empty=False):
113 raise EngineException(
114 "{} with id '{}' already exists for this project".format(
115 self.topic[:-1], final_content["id"]
116 ),
117 HTTPStatus.CONFLICT,
118 )
119
120 return final_content
121
122 @staticmethod
123 def format_on_new(content, project_id=None, make_public=False):
124 BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
125 content["_admin"]["onboardingState"] = "CREATED"
126 content["_admin"]["operationalState"] = "DISABLED"
127 content["_admin"]["usageState"] = "NOT_IN_USE"
128
129 def delete_extra(self, session, _id, db_content, not_send_msg=None):
130 """
131 Deletes file system storage associated with the descriptor
132 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
133 :param _id: server internal id
134 :param db_content: The database content of the descriptor
135 :param not_send_msg: To not send message (False) or store content (list) instead
136 :return: None if ok or raises EngineException with the problem
137 """
138 self.fs.file_delete(_id, ignore_non_exist=True)
139 self.fs.file_delete(_id + "_", ignore_non_exist=True) # remove temp folder
140
141 @staticmethod
142 def get_one_by_id(db, session, topic, id):
143 # find owned by this project
144 _filter = BaseTopic._get_project_filter(session)
145 _filter["id"] = id
146 desc_list = db.get_list(topic, _filter)
147 if len(desc_list) == 1:
148 return desc_list[0]
149 elif len(desc_list) > 1:
150 raise DbException(
151 "Found more than one {} with id='{}' belonging to this project".format(
152 topic[:-1], id
153 ),
154 HTTPStatus.CONFLICT,
155 )
156
157 # not found any: try to find public
158 _filter = BaseTopic._get_project_filter(session)
159 _filter["id"] = id
160 desc_list = db.get_list(topic, _filter)
161 if not desc_list:
162 raise DbException(
163 "Not found any {} with id='{}'".format(topic[:-1], id),
164 HTTPStatus.NOT_FOUND,
165 )
166 elif len(desc_list) == 1:
167 return desc_list[0]
168 else:
169 raise DbException(
170 "Found more than one public {} with id='{}'; and no one belonging to this project".format(
171 topic[:-1], id
172 ),
173 HTTPStatus.CONFLICT,
174 )
175
176 def new(self, rollback, session, indata=None, kwargs=None, headers=None):
177 """
178 Creates a new almost empty DISABLED entry into database. Due to SOL005, it does not follow normal procedure.
179 Creating a VNFD or NSD is done in two steps: 1. Creates an empty descriptor (this step) and 2) upload content
180 (self.upload_content)
181 :param rollback: list to append created items at database in case a rollback may to be done
182 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
183 :param indata: data to be inserted
184 :param kwargs: used to override the indata descriptor
185 :param headers: http request headers
186 :return: _id, None: identity of the inserted data; and None as there is not any operation
187 """
188
189 # No needed to capture exceptions
190 # Check Quota
191 self.check_quota(session)
192
193 # _remove_envelop
194 if indata:
195 if "userDefinedData" in indata:
196 indata = indata["userDefinedData"]
197
198 # Override descriptor with query string kwargs
199 self._update_input_with_kwargs(indata, kwargs)
200 # uncomment when this method is implemented.
201 # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors
202 # indata = DescriptorTopic._validate_input_new(self, indata, project_id=session["force"])
203
204 content = {"_admin": {"userDefinedData": indata}}
205 self.format_on_new(
206 content, session["project_id"], make_public=session["public"]
207 )
208 _id = self.db.create(self.topic, content)
209 rollback.append({"topic": self.topic, "_id": _id})
210 self._send_msg("created", {"_id": _id})
211 return _id, None
212
213 def upload_content(self, session, _id, indata, kwargs, headers):
214 """
215 Used for receiving content by chunks (with a transaction_id header and/or gzip file. It will store and extract)
216 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
217 :param _id : the nsd,vnfd is already created, this is the id
218 :param indata: http body request
219 :param kwargs: user query string to override parameters. NOT USED
220 :param headers: http request headers
221 :return: True if package is completely uploaded or False if partial content has been uploded
222 Raise exception on error
223 """
224 # Check that _id exists and it is valid
225 current_desc = self.show(session, _id)
226
227 content_range_text = headers.get("Content-Range")
228 expected_md5 = headers.get("Content-File-MD5")
229 compressed = None
230 content_type = headers.get("Content-Type")
231 if (
232 content_type
233 and "application/gzip" in content_type
234 or "application/x-gzip" in content_type
235 or "application/zip" in content_type
236 ):
237 compressed = "gzip"
238 filename = headers.get("Content-Filename")
239 if not filename:
240 filename = "package.tar.gz" if compressed else "package"
241 # TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266
242 file_pkg = None
243 error_text = ""
244 fs_rollback = []
245 try:
246 if content_range_text:
247 content_range = (
248 content_range_text.replace("-", " ").replace("/", " ").split()
249 )
250 if (
251 content_range[0] != "bytes"
252 ): # TODO check x<y not negative < total....
253 raise IndexError()
254 start = int(content_range[1])
255 end = int(content_range[2]) + 1
256 total = int(content_range[3])
257 else:
258 start = 0
259 temp_folder = (
260 _id + "_"
261 ) # all the content is upload here and if ok, it is rename from id_ to is folder
262
263 if start:
264 if not self.fs.file_exists(temp_folder, "dir"):
265 raise EngineException(
266 "invalid Transaction-Id header", HTTPStatus.NOT_FOUND
267 )
268 else:
269 self.fs.file_delete(temp_folder, ignore_non_exist=True)
270 self.fs.mkdir(temp_folder)
271 fs_rollback.append(temp_folder)
272
273 storage = self.fs.get_params()
274 storage["folder"] = _id
275
276 file_path = (temp_folder, filename)
277 if self.fs.file_exists(file_path, "file"):
278 file_size = self.fs.file_size(file_path)
279 else:
280 file_size = 0
281 if file_size != start:
282 raise EngineException(
283 "invalid Content-Range start sequence, expected '{}' but received '{}'".format(
284 file_size, start
285 ),
286 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
287 )
288 file_pkg = self.fs.file_open(file_path, "a+b")
289 if isinstance(indata, dict):
290 indata_text = yaml.safe_dump(indata, indent=4, default_flow_style=False)
291 file_pkg.write(indata_text.encode(encoding="utf-8"))
292 else:
293 indata_len = 0
294 while True:
295 indata_text = indata.read(4096)
296 indata_len += len(indata_text)
297 if not indata_text:
298 break
299 file_pkg.write(indata_text)
300 if content_range_text:
301 if indata_len != end - start:
302 raise EngineException(
303 "Mismatch between Content-Range header {}-{} and body length of {}".format(
304 start, end - 1, indata_len
305 ),
306 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
307 )
308 if end != total:
309 # TODO update to UPLOADING
310 return False
311
312 # PACKAGE UPLOADED
313 if expected_md5:
314 file_pkg.seek(0, 0)
315 file_md5 = md5()
316 chunk_data = file_pkg.read(1024)
317 while chunk_data:
318 file_md5.update(chunk_data)
319 chunk_data = file_pkg.read(1024)
320 if expected_md5 != file_md5.hexdigest():
321 raise EngineException("Error, MD5 mismatch", HTTPStatus.CONFLICT)
322 file_pkg.seek(0, 0)
323 if compressed == "gzip":
324 tar = tarfile.open(mode="r", fileobj=file_pkg)
325 descriptor_file_name = None
326 for tarinfo in tar:
327 tarname = tarinfo.name
328 tarname_path = tarname.split("/")
329 if (
330 not tarname_path[0] or ".." in tarname_path
331 ): # if start with "/" means absolute path
332 raise EngineException(
333 "Absolute path or '..' are not allowed for package descriptor tar.gz"
334 )
335 if len(tarname_path) == 1 and not tarinfo.isdir():
336 raise EngineException(
337 "All files must be inside a dir for package descriptor tar.gz"
338 )
339 if (
340 tarname.endswith(".yaml")
341 or tarname.endswith(".json")
342 or tarname.endswith(".yml")
343 ):
344 storage["pkg-dir"] = tarname_path[0]
345 if len(tarname_path) == 2:
346 if descriptor_file_name:
347 raise EngineException(
348 "Found more than one descriptor file at package descriptor tar.gz"
349 )
350 descriptor_file_name = tarname
351 if not descriptor_file_name:
352 raise EngineException(
353 "Not found any descriptor file at package descriptor tar.gz"
354 )
355 storage["descriptor"] = descriptor_file_name
356 storage["zipfile"] = filename
357 self.fs.file_extract(tar, temp_folder)
358 with self.fs.file_open(
359 (temp_folder, descriptor_file_name), "r"
360 ) as descriptor_file:
361 content = descriptor_file.read()
362 else:
363 content = file_pkg.read()
364 storage["descriptor"] = descriptor_file_name = filename
365
366 if descriptor_file_name.endswith(".json"):
367 error_text = "Invalid json format "
368 indata = json.load(content)
369 else:
370 error_text = "Invalid yaml format "
371 indata = yaml.load(content, Loader=yaml.SafeLoader)
372
373 current_desc["_admin"]["storage"] = storage
374 current_desc["_admin"]["onboardingState"] = "ONBOARDED"
375 current_desc["_admin"]["operationalState"] = "ENABLED"
376
377 indata = self._remove_envelop(indata)
378
379 # Override descriptor with query string kwargs
380 if kwargs:
381 self._update_input_with_kwargs(indata, kwargs)
382
383 deep_update_rfc7396(current_desc, indata)
384 current_desc = self.check_conflict_on_edit(
385 session, current_desc, indata, _id=_id
386 )
387 current_desc["_admin"]["modified"] = time()
388 self.db.replace(self.topic, _id, current_desc)
389 self.fs.dir_rename(temp_folder, _id)
390 fs_rollback = []
391
392 indata["_id"] = _id
393 self._send_msg("edited", indata)
394
395 # TODO if descriptor has changed because kwargs update content and remove cached zip
396 # TODO if zip is not present creates one
397 return True
398
399 except EngineException:
400 raise
401 except IndexError:
402 raise EngineException(
403 "invalid Content-Range header format. Expected 'bytes start-end/total'",
404 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
405 )
406 except IOError as e:
407 raise EngineException(
408 "invalid upload transaction sequence: '{}'".format(e),
409 HTTPStatus.BAD_REQUEST,
410 )
411 except tarfile.ReadError as e:
412 raise EngineException(
413 "invalid file content {}".format(e), HTTPStatus.BAD_REQUEST
414 )
415 except (ValueError, yaml.YAMLError) as e:
416 raise EngineException(error_text + str(e))
417 except ValidationError as e:
418 raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
419 finally:
420 if file_pkg:
421 file_pkg.close()
422 for file in fs_rollback:
423 self.fs.file_delete(file, ignore_non_exist=True)
424
425 def get_file(self, session, _id, path=None, accept_header=None):
426 """
427 Return the file content of a vnfd or nsd
428 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
429 :param _id: Identity of the vnfd, nsd
430 :param path: artifact path or "$DESCRIPTOR" or None
431 :param accept_header: Content of Accept header. Must contain applition/zip or/and text/plain
432 :return: opened file plus Accept format or raises an exception
433 """
434 accept_text = accept_zip = False
435 if accept_header:
436 if "text/plain" in accept_header or "*/*" in accept_header:
437 accept_text = True
438 if "application/zip" in accept_header or "*/*" in accept_header:
439 accept_zip = "application/zip"
440 elif "application/gzip" in accept_header:
441 accept_zip = "application/gzip"
442
443 if not accept_text and not accept_zip:
444 raise EngineException(
445 "provide request header 'Accept' with 'application/zip' or 'text/plain'",
446 http_code=HTTPStatus.NOT_ACCEPTABLE,
447 )
448
449 content = self.show(session, _id)
450 if content["_admin"]["onboardingState"] != "ONBOARDED":
451 raise EngineException(
452 "Cannot get content because this resource is not at 'ONBOARDED' state. "
453 "onboardingState is {}".format(content["_admin"]["onboardingState"]),
454 http_code=HTTPStatus.CONFLICT,
455 )
456 storage = content["_admin"]["storage"]
457 if path is not None and path != "$DESCRIPTOR": # artifacts
458 if not storage.get("pkg-dir"):
459 raise EngineException(
460 "Packages does not contains artifacts",
461 http_code=HTTPStatus.BAD_REQUEST,
462 )
463 if self.fs.file_exists(
464 (storage["folder"], storage["pkg-dir"], *path), "dir"
465 ):
466 folder_content = self.fs.dir_ls(
467 (storage["folder"], storage["pkg-dir"], *path)
468 )
469 return folder_content, "text/plain"
470 # TODO manage folders in http
471 else:
472 return (
473 self.fs.file_open(
474 (storage["folder"], storage["pkg-dir"], *path), "rb"
475 ),
476 "application/octet-stream",
477 )
478
479 # pkgtype accept ZIP TEXT -> result
480 # manyfiles yes X -> zip
481 # no yes -> error
482 # onefile yes no -> zip
483 # X yes -> text
484 contain_many_files = False
485 if storage.get("pkg-dir"):
486 # check if there are more than one file in the package, ignoring checksums.txt.
487 pkg_files = self.fs.dir_ls((storage["folder"], storage["pkg-dir"]))
488 if len(pkg_files) >= 3 or (
489 len(pkg_files) == 2 and "checksums.txt" not in pkg_files
490 ):
491 contain_many_files = True
492 if accept_text and (not contain_many_files or path == "$DESCRIPTOR"):
493 return (
494 self.fs.file_open((storage["folder"], storage["descriptor"]), "r"),
495 "text/plain",
496 )
497 elif contain_many_files and not accept_zip:
498 raise EngineException(
499 "Packages that contains several files need to be retrieved with 'application/zip'"
500 "Accept header",
501 http_code=HTTPStatus.NOT_ACCEPTABLE,
502 )
503 else:
504 if not storage.get("zipfile"):
505 # TODO generate zipfile if not present
506 raise EngineException(
507 "Only allowed 'text/plain' Accept header for this descriptor. To be solved in "
508 "future versions",
509 http_code=HTTPStatus.NOT_ACCEPTABLE,
510 )
511 return (
512 self.fs.file_open((storage["folder"], storage["zipfile"]), "rb"),
513 accept_zip,
514 )
515
516 def _remove_yang_prefixes_from_descriptor(self, descriptor):
517 new_descriptor = {}
518 for k, v in descriptor.items():
519 new_v = v
520 if isinstance(v, dict):
521 new_v = self._remove_yang_prefixes_from_descriptor(v)
522 elif isinstance(v, list):
523 new_v = list()
524 for x in v:
525 if isinstance(x, dict):
526 new_v.append(self._remove_yang_prefixes_from_descriptor(x))
527 else:
528 new_v.append(x)
529 new_descriptor[k.split(":")[-1]] = new_v
530 return new_descriptor
531
532 def pyangbind_validation(self, item, data, force=False):
533 raise EngineException(
534 "Not possible to validate '{}' item".format(item),
535 http_code=HTTPStatus.INTERNAL_SERVER_ERROR,
536 )
537
538 def _validate_input_edit(self, indata, content, force=False):
539 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
540 if "_id" in indata:
541 indata.pop("_id")
542 if "_admin" not in indata:
543 indata["_admin"] = {}
544
545 if "operationalState" in indata:
546 if indata["operationalState"] in ("ENABLED", "DISABLED"):
547 indata["_admin"]["operationalState"] = indata.pop("operationalState")
548 else:
549 raise EngineException(
550 "State '{}' is not a valid operational state".format(
551 indata["operationalState"]
552 ),
553 http_code=HTTPStatus.BAD_REQUEST,
554 )
555
556 # In the case of user defined data, we need to put the data in the root of the object
557 # to preserve current expected behaviour
558 if "userDefinedData" in indata:
559 data = indata.pop("userDefinedData")
560 if type(data) == dict:
561 indata["_admin"]["userDefinedData"] = data
562 else:
563 raise EngineException(
564 "userDefinedData should be an object, but is '{}' instead".format(
565 type(data)
566 ),
567 http_code=HTTPStatus.BAD_REQUEST,
568 )
569
570 if (
571 "operationalState" in indata["_admin"]
572 and content["_admin"]["operationalState"]
573 == indata["_admin"]["operationalState"]
574 ):
575 raise EngineException(
576 "operationalState already {}".format(
577 content["_admin"]["operationalState"]
578 ),
579 http_code=HTTPStatus.CONFLICT,
580 )
581
582 return indata
583
584
585 class VnfdTopic(DescriptorTopic):
586 topic = "vnfds"
587 topic_msg = "vnfd"
588
589 def __init__(self, db, fs, msg, auth):
590 DescriptorTopic.__init__(self, db, fs, msg, auth)
591
592 def pyangbind_validation(self, item, data, force=False):
593 if self._descriptor_data_is_in_old_format(data):
594 raise EngineException(
595 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
596 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
597 )
598 try:
599 myvnfd = etsi_nfv_vnfd.etsi_nfv_vnfd()
600 pybindJSONDecoder.load_ietf_json(
601 {"etsi-nfv-vnfd:vnfd": data},
602 None,
603 None,
604 obj=myvnfd,
605 path_helper=True,
606 skip_unknown=force,
607 )
608 out = pybindJSON.dumps(myvnfd, mode="ietf")
609 desc_out = self._remove_envelop(yaml.safe_load(out))
610 desc_out = self._remove_yang_prefixes_from_descriptor(desc_out)
611 return utils.deep_update_dict(data, desc_out)
612 except Exception as e:
613 raise EngineException(
614 "Error in pyangbind validation: {}".format(str(e)),
615 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
616 )
617
618 @staticmethod
619 def _descriptor_data_is_in_old_format(data):
620 return ("vnfd-catalog" in data) or ("vnfd:vnfd-catalog" in data)
621
622 @staticmethod
623 def _remove_envelop(indata=None):
624 if not indata:
625 return {}
626 clean_indata = indata
627
628 if clean_indata.get("etsi-nfv-vnfd:vnfd"):
629 if not isinstance(clean_indata["etsi-nfv-vnfd:vnfd"], dict):
630 raise EngineException("'etsi-nfv-vnfd:vnfd' must be a dict")
631 clean_indata = clean_indata["etsi-nfv-vnfd:vnfd"]
632 elif clean_indata.get("vnfd"):
633 if not isinstance(clean_indata["vnfd"], dict):
634 raise EngineException("'vnfd' must be dict")
635 clean_indata = clean_indata["vnfd"]
636
637 return clean_indata
638
639 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
640 final_content = super().check_conflict_on_edit(
641 session, final_content, edit_content, _id
642 )
643
644 # set type of vnfd
645 contains_pdu = False
646 contains_vdu = False
647 for vdu in get_iterable(final_content.get("vdu")):
648 if vdu.get("pdu-type"):
649 contains_pdu = True
650 else:
651 contains_vdu = True
652 if contains_pdu:
653 final_content["_admin"]["type"] = "hnfd" if contains_vdu else "pnfd"
654 elif contains_vdu:
655 final_content["_admin"]["type"] = "vnfd"
656 # if neither vud nor pdu do not fill type
657 return final_content
658
659 def check_conflict_on_del(self, session, _id, db_content):
660 """
661 Check that there is not any NSD that uses this VNFD. Only NSDs belonging to this project are considered. Note
662 that VNFD can be public and be used by NSD of other projects. Also check there are not deployments, or vnfr
663 that uses this vnfd
664 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
665 :param _id: vnfd internal id
666 :param db_content: The database content of the _id.
667 :return: None or raises EngineException with the conflict
668 """
669 if session["force"]:
670 return
671 descriptor = db_content
672 descriptor_id = descriptor.get("id")
673 if not descriptor_id: # empty vnfd not uploaded
674 return
675
676 _filter = self._get_project_filter(session)
677
678 # check vnfrs using this vnfd
679 _filter["vnfd-id"] = _id
680 if self.db.get_list("vnfrs", _filter):
681 raise EngineException(
682 "There is at least one VNF instance using this descriptor",
683 http_code=HTTPStatus.CONFLICT,
684 )
685
686 # check NSD referencing this VNFD
687 del _filter["vnfd-id"]
688 _filter["vnfd-id"] = descriptor_id
689 if self.db.get_list("nsds", _filter):
690 raise EngineException(
691 "There is at least one NS package referencing this descriptor",
692 http_code=HTTPStatus.CONFLICT,
693 )
694
695 def _validate_input_new(self, indata, storage_params, force=False):
696 indata.pop("onboardingState", None)
697 indata.pop("operationalState", None)
698 indata.pop("usageState", None)
699 indata.pop("links", None)
700
701 indata = self.pyangbind_validation("vnfds", indata, force)
702 # Cross references validation in the descriptor
703
704 self.validate_mgmt_interface_connection_point(indata)
705
706 for vdu in get_iterable(indata.get("vdu")):
707 self.validate_vdu_internal_connection_points(vdu)
708 self._validate_vdu_cloud_init_in_package(storage_params, vdu, indata)
709 self._validate_vdu_charms_in_package(storage_params, indata)
710
711 self._validate_vnf_charms_in_package(storage_params, indata)
712
713 self.validate_external_connection_points(indata)
714 self.validate_internal_virtual_links(indata)
715 self.validate_monitoring_params(indata)
716 self.validate_scaling_group_descriptor(indata)
717
718 return indata
719
720 @staticmethod
721 def validate_mgmt_interface_connection_point(indata):
722 if not indata.get("vdu"):
723 return
724 if not indata.get("mgmt-cp"):
725 raise EngineException(
726 "'mgmt-cp' is a mandatory field and it is not defined",
727 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
728 )
729
730 for cp in get_iterable(indata.get("ext-cpd")):
731 if cp["id"] == indata["mgmt-cp"]:
732 break
733 else:
734 raise EngineException(
735 "mgmt-cp='{}' must match an existing ext-cpd".format(indata["mgmt-cp"]),
736 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
737 )
738
739 @staticmethod
740 def validate_vdu_internal_connection_points(vdu):
741 int_cpds = set()
742 for cpd in get_iterable(vdu.get("int-cpd")):
743 cpd_id = cpd.get("id")
744 if cpd_id and cpd_id in int_cpds:
745 raise EngineException(
746 "vdu[id='{}']:int-cpd[id='{}'] is already used by other int-cpd".format(
747 vdu["id"], cpd_id
748 ),
749 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
750 )
751 int_cpds.add(cpd_id)
752
753 @staticmethod
754 def validate_external_connection_points(indata):
755 all_vdus_int_cpds = set()
756 for vdu in get_iterable(indata.get("vdu")):
757 for int_cpd in get_iterable(vdu.get("int-cpd")):
758 all_vdus_int_cpds.add((vdu.get("id"), int_cpd.get("id")))
759
760 ext_cpds = set()
761 for cpd in get_iterable(indata.get("ext-cpd")):
762 cpd_id = cpd.get("id")
763 if cpd_id and cpd_id in ext_cpds:
764 raise EngineException(
765 "ext-cpd[id='{}'] is already used by other ext-cpd".format(cpd_id),
766 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
767 )
768 ext_cpds.add(cpd_id)
769
770 int_cpd = cpd.get("int-cpd")
771 if int_cpd:
772 if (int_cpd.get("vdu-id"), int_cpd.get("cpd")) not in all_vdus_int_cpds:
773 raise EngineException(
774 "ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd".format(
775 cpd_id
776 ),
777 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
778 )
779 # TODO: Validate k8s-cluster-net points to a valid k8s-cluster:nets ?
780
781 def _validate_vdu_charms_in_package(self, storage_params, indata):
782 for df in indata["df"]:
783 if (
784 "lcm-operations-configuration" in df
785 and "operate-vnf-op-config" in df["lcm-operations-configuration"]
786 ):
787 configs = df["lcm-operations-configuration"][
788 "operate-vnf-op-config"
789 ].get("day1-2", [])
790 vdus = df.get("vdu-profile", [])
791 for vdu in vdus:
792 for config in configs:
793 if config["id"] == vdu["id"] and utils.find_in_list(
794 config.get("execution-environment-list", []),
795 lambda ee: "juju" in ee,
796 ):
797 if not self._validate_package_folders(
798 storage_params, "charms"
799 ):
800 raise EngineException(
801 "Charm defined in vnf[id={}] but not present in "
802 "package".format(indata["id"])
803 )
804
805 def _validate_vdu_cloud_init_in_package(self, storage_params, vdu, indata):
806 if not vdu.get("cloud-init-file"):
807 return
808 if not self._validate_package_folders(
809 storage_params, "cloud_init", vdu["cloud-init-file"]
810 ):
811 raise EngineException(
812 "Cloud-init defined in vnf[id={}]:vdu[id={}] but not present in "
813 "package".format(indata["id"], vdu["id"])
814 )
815
816 def _validate_vnf_charms_in_package(self, storage_params, indata):
817 # Get VNF configuration through new container
818 for deployment_flavor in indata.get("df", []):
819 if "lcm-operations-configuration" not in deployment_flavor:
820 return
821 if (
822 "operate-vnf-op-config"
823 not in deployment_flavor["lcm-operations-configuration"]
824 ):
825 return
826 for day_1_2_config in deployment_flavor["lcm-operations-configuration"][
827 "operate-vnf-op-config"
828 ]["day1-2"]:
829 if day_1_2_config["id"] == indata["id"]:
830 if utils.find_in_list(
831 day_1_2_config.get("execution-environment-list", []),
832 lambda ee: "juju" in ee,
833 ):
834 if not self._validate_package_folders(storage_params, "charms"):
835 raise EngineException(
836 "Charm defined in vnf[id={}] but not present in "
837 "package".format(indata["id"])
838 )
839
840 def _validate_package_folders(self, storage_params, folder, file=None):
841 if not storage_params or not storage_params.get("pkg-dir"):
842 return False
843 else:
844 if self.fs.file_exists("{}_".format(storage_params["folder"]), "dir"):
845 f = "{}_/{}/{}".format(
846 storage_params["folder"], storage_params["pkg-dir"], folder
847 )
848 else:
849 f = "{}/{}/{}".format(
850 storage_params["folder"], storage_params["pkg-dir"], folder
851 )
852 if file:
853 return self.fs.file_exists("{}/{}".format(f, file), "file")
854 else:
855 if self.fs.file_exists(f, "dir"):
856 if self.fs.dir_ls(f):
857 return True
858 return False
859
860 @staticmethod
861 def validate_internal_virtual_links(indata):
862 all_ivld_ids = set()
863 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
864 ivld_id = ivld.get("id")
865 if ivld_id and ivld_id in all_ivld_ids:
866 raise EngineException(
867 "Duplicated VLD id in int-virtual-link-desc[id={}]".format(ivld_id),
868 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
869 )
870 else:
871 all_ivld_ids.add(ivld_id)
872
873 for vdu in get_iterable(indata.get("vdu")):
874 for int_cpd in get_iterable(vdu.get("int-cpd")):
875 int_cpd_ivld_id = int_cpd.get("int-virtual-link-desc")
876 if int_cpd_ivld_id and int_cpd_ivld_id not in all_ivld_ids:
877 raise EngineException(
878 "vdu[id='{}']:int-cpd[id='{}']:int-virtual-link-desc='{}' must match an existing "
879 "int-virtual-link-desc".format(
880 vdu["id"], int_cpd["id"], int_cpd_ivld_id
881 ),
882 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
883 )
884
885 for df in get_iterable(indata.get("df")):
886 for vlp in get_iterable(df.get("virtual-link-profile")):
887 vlp_ivld_id = vlp.get("id")
888 if vlp_ivld_id and vlp_ivld_id not in all_ivld_ids:
889 raise EngineException(
890 "df[id='{}']:virtual-link-profile='{}' must match an existing "
891 "int-virtual-link-desc".format(df["id"], vlp_ivld_id),
892 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
893 )
894
895 @staticmethod
896 def validate_monitoring_params(indata):
897 all_monitoring_params = set()
898 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
899 for mp in get_iterable(ivld.get("monitoring-parameters")):
900 mp_id = mp.get("id")
901 if mp_id and mp_id in all_monitoring_params:
902 raise EngineException(
903 "Duplicated monitoring-parameter id in "
904 "int-virtual-link-desc[id='{}']:monitoring-parameters[id='{}']".format(
905 ivld["id"], mp_id
906 ),
907 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
908 )
909 else:
910 all_monitoring_params.add(mp_id)
911
912 for vdu in get_iterable(indata.get("vdu")):
913 for mp in get_iterable(vdu.get("monitoring-parameter")):
914 mp_id = mp.get("id")
915 if mp_id and mp_id in all_monitoring_params:
916 raise EngineException(
917 "Duplicated monitoring-parameter id in "
918 "vdu[id='{}']:monitoring-parameter[id='{}']".format(
919 vdu["id"], mp_id
920 ),
921 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
922 )
923 else:
924 all_monitoring_params.add(mp_id)
925
926 for df in get_iterable(indata.get("df")):
927 for mp in get_iterable(df.get("monitoring-parameter")):
928 mp_id = mp.get("id")
929 if mp_id and mp_id in all_monitoring_params:
930 raise EngineException(
931 "Duplicated monitoring-parameter id in "
932 "df[id='{}']:monitoring-parameter[id='{}']".format(
933 df["id"], mp_id
934 ),
935 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
936 )
937 else:
938 all_monitoring_params.add(mp_id)
939
940 @staticmethod
941 def validate_scaling_group_descriptor(indata):
942 all_monitoring_params = set()
943 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
944 for mp in get_iterable(ivld.get("monitoring-parameters")):
945 all_monitoring_params.add(mp.get("id"))
946
947 for vdu in get_iterable(indata.get("vdu")):
948 for mp in get_iterable(vdu.get("monitoring-parameter")):
949 all_monitoring_params.add(mp.get("id"))
950
951 for df in get_iterable(indata.get("df")):
952 for mp in get_iterable(df.get("monitoring-parameter")):
953 all_monitoring_params.add(mp.get("id"))
954
955 for df in get_iterable(indata.get("df")):
956 for sa in get_iterable(df.get("scaling-aspect")):
957 for sp in get_iterable(sa.get("scaling-policy")):
958 for sc in get_iterable(sp.get("scaling-criteria")):
959 sc_monitoring_param = sc.get("vnf-monitoring-param-ref")
960 if (
961 sc_monitoring_param
962 and sc_monitoring_param not in all_monitoring_params
963 ):
964 raise EngineException(
965 "df[id='{}']:scaling-aspect[id='{}']:scaling-policy"
966 "[name='{}']:scaling-criteria[name='{}']: "
967 "vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format(
968 df["id"],
969 sa["id"],
970 sp["name"],
971 sc["name"],
972 sc_monitoring_param,
973 ),
974 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
975 )
976
977 for sca in get_iterable(sa.get("scaling-config-action")):
978 if (
979 "lcm-operations-configuration" not in df
980 or "operate-vnf-op-config"
981 not in df["lcm-operations-configuration"]
982 or not utils.find_in_list(
983 df["lcm-operations-configuration"][
984 "operate-vnf-op-config"
985 ].get("day1-2", []),
986 lambda config: config["id"] == indata["id"],
987 )
988 ):
989 raise EngineException(
990 "'day1-2 configuration' not defined in the descriptor but it is "
991 "referenced by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action".format(
992 df["id"], sa["id"]
993 ),
994 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
995 )
996 for configuration in get_iterable(
997 df["lcm-operations-configuration"]["operate-vnf-op-config"].get(
998 "day1-2", []
999 )
1000 ):
1001 for primitive in get_iterable(
1002 configuration.get("config-primitive")
1003 ):
1004 if (
1005 primitive["name"]
1006 == sca["vnf-config-primitive-name-ref"]
1007 ):
1008 break
1009 else:
1010 raise EngineException(
1011 "df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-"
1012 "config-primitive-name-ref='{}' does not match any "
1013 "day1-2 configuration:config-primitive:name".format(
1014 df["id"],
1015 sa["id"],
1016 sca["vnf-config-primitive-name-ref"],
1017 ),
1018 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1019 )
1020
1021 def delete_extra(self, session, _id, db_content, not_send_msg=None):
1022 """
1023 Deletes associate file system storage (via super)
1024 Deletes associated vnfpkgops from database.
1025 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1026 :param _id: server internal id
1027 :param db_content: The database content of the descriptor
1028 :return: None
1029 :raises: FsException in case of error while deleting associated storage
1030 """
1031 super().delete_extra(session, _id, db_content, not_send_msg)
1032 self.db.del_list("vnfpkgops", {"vnfPkgId": _id})
1033
1034 def sol005_projection(self, data):
1035 data["onboardingState"] = data["_admin"]["onboardingState"]
1036 data["operationalState"] = data["_admin"]["operationalState"]
1037 data["usageState"] = data["_admin"]["usageState"]
1038
1039 links = {}
1040 links["self"] = {"href": "/vnfpkgm/v1/vnf_packages/{}".format(data["_id"])}
1041 links["vnfd"] = {"href": "/vnfpkgm/v1/vnf_packages/{}/vnfd".format(data["_id"])}
1042 links["packageContent"] = {
1043 "href": "/vnfpkgm/v1/vnf_packages/{}/package_content".format(data["_id"])
1044 }
1045 data["_links"] = links
1046
1047 return super().sol005_projection(data)
1048
1049
1050 class NsdTopic(DescriptorTopic):
1051 topic = "nsds"
1052 topic_msg = "nsd"
1053
1054 def __init__(self, db, fs, msg, auth):
1055 DescriptorTopic.__init__(self, db, fs, msg, auth)
1056
1057 def pyangbind_validation(self, item, data, force=False):
1058 if self._descriptor_data_is_in_old_format(data):
1059 raise EngineException(
1060 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
1061 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1062 )
1063 try:
1064 nsd_vnf_profiles = data.get("df", [{}])[0].get("vnf-profile", [])
1065 mynsd = etsi_nfv_nsd.etsi_nfv_nsd()
1066 pybindJSONDecoder.load_ietf_json(
1067 {"nsd": {"nsd": [data]}},
1068 None,
1069 None,
1070 obj=mynsd,
1071 path_helper=True,
1072 skip_unknown=force,
1073 )
1074 out = pybindJSON.dumps(mynsd, mode="ietf")
1075 desc_out = self._remove_envelop(yaml.safe_load(out))
1076 desc_out = self._remove_yang_prefixes_from_descriptor(desc_out)
1077 if nsd_vnf_profiles:
1078 desc_out["df"][0]["vnf-profile"] = nsd_vnf_profiles
1079 return desc_out
1080 except Exception as e:
1081 raise EngineException(
1082 "Error in pyangbind validation: {}".format(str(e)),
1083 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1084 )
1085
1086 @staticmethod
1087 def _descriptor_data_is_in_old_format(data):
1088 return ("nsd-catalog" in data) or ("nsd:nsd-catalog" in data)
1089
1090 @staticmethod
1091 def _remove_envelop(indata=None):
1092 if not indata:
1093 return {}
1094 clean_indata = indata
1095
1096 if clean_indata.get("nsd"):
1097 clean_indata = clean_indata["nsd"]
1098 elif clean_indata.get("etsi-nfv-nsd:nsd"):
1099 clean_indata = clean_indata["etsi-nfv-nsd:nsd"]
1100 if clean_indata.get("nsd"):
1101 if (
1102 not isinstance(clean_indata["nsd"], list)
1103 or len(clean_indata["nsd"]) != 1
1104 ):
1105 raise EngineException("'nsd' must be a list of only one element")
1106 clean_indata = clean_indata["nsd"][0]
1107 return clean_indata
1108
1109 def _validate_input_new(self, indata, storage_params, force=False):
1110 indata.pop("nsdOnboardingState", None)
1111 indata.pop("nsdOperationalState", None)
1112 indata.pop("nsdUsageState", None)
1113
1114 indata.pop("links", None)
1115
1116 indata = self.pyangbind_validation("nsds", indata, force)
1117 # Cross references validation in the descriptor
1118 # TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none
1119 for vld in get_iterable(indata.get("virtual-link-desc")):
1120 self.validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata)
1121
1122 self.validate_vnf_profiles_vnfd_id(indata)
1123
1124 return indata
1125
1126 @staticmethod
1127 def validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata):
1128 if not vld.get("mgmt-network"):
1129 return
1130 vld_id = vld.get("id")
1131 for df in get_iterable(indata.get("df")):
1132 for vlp in get_iterable(df.get("virtual-link-profile")):
1133 if vld_id and vld_id == vlp.get("virtual-link-desc-id"):
1134 if vlp.get("virtual-link-protocol-data"):
1135 raise EngineException(
1136 "Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-"
1137 "protocol-data You cannot set a virtual-link-protocol-data "
1138 "when mgmt-network is True".format(df["id"], vlp["id"]),
1139 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1140 )
1141
1142 @staticmethod
1143 def validate_vnf_profiles_vnfd_id(indata):
1144 all_vnfd_ids = set(get_iterable(indata.get("vnfd-id")))
1145 for df in get_iterable(indata.get("df")):
1146 for vnf_profile in get_iterable(df.get("vnf-profile")):
1147 vnfd_id = vnf_profile.get("vnfd-id")
1148 if vnfd_id and vnfd_id not in all_vnfd_ids:
1149 raise EngineException(
1150 "Error at df[id='{}']:vnf_profile[id='{}']:vnfd-id='{}' "
1151 "does not match any vnfd-id".format(
1152 df["id"], vnf_profile["id"], vnfd_id
1153 ),
1154 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1155 )
1156
1157 def _validate_input_edit(self, indata, content, force=False):
1158 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
1159 """
1160 indata looks as follows:
1161 - In the new case (conformant)
1162 {'nsdOperationalState': 'DISABLED', 'userDefinedData': {'id': 'string23',
1163 '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}}
1164 - In the old case (backwards-compatible)
1165 {'id': 'string23', '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}
1166 """
1167 if "_admin" not in indata:
1168 indata["_admin"] = {}
1169
1170 if "nsdOperationalState" in indata:
1171 if indata["nsdOperationalState"] in ("ENABLED", "DISABLED"):
1172 indata["_admin"]["operationalState"] = indata.pop("nsdOperationalState")
1173 else:
1174 raise EngineException(
1175 "State '{}' is not a valid operational state".format(
1176 indata["nsdOperationalState"]
1177 ),
1178 http_code=HTTPStatus.BAD_REQUEST,
1179 )
1180
1181 # In the case of user defined data, we need to put the data in the root of the object
1182 # to preserve current expected behaviour
1183 if "userDefinedData" in indata:
1184 data = indata.pop("userDefinedData")
1185 if type(data) == dict:
1186 indata["_admin"]["userDefinedData"] = data
1187 else:
1188 raise EngineException(
1189 "userDefinedData should be an object, but is '{}' instead".format(
1190 type(data)
1191 ),
1192 http_code=HTTPStatus.BAD_REQUEST,
1193 )
1194 if (
1195 "operationalState" in indata["_admin"]
1196 and content["_admin"]["operationalState"]
1197 == indata["_admin"]["operationalState"]
1198 ):
1199 raise EngineException(
1200 "nsdOperationalState already {}".format(
1201 content["_admin"]["operationalState"]
1202 ),
1203 http_code=HTTPStatus.CONFLICT,
1204 )
1205 return indata
1206
1207 def _check_descriptor_dependencies(self, session, descriptor):
1208 """
1209 Check that the dependent descriptors exist on a new descriptor or edition. Also checks references to vnfd
1210 connection points are ok
1211 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1212 :param descriptor: descriptor to be inserted or edit
1213 :return: None or raises exception
1214 """
1215 if session["force"]:
1216 return
1217 vnfds_index = self._get_descriptor_constituent_vnfds_index(session, descriptor)
1218
1219 # Cross references validation in the descriptor and vnfd connection point validation
1220 for df in get_iterable(descriptor.get("df")):
1221 self.validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index)
1222
1223 def _get_descriptor_constituent_vnfds_index(self, session, descriptor):
1224 vnfds_index = {}
1225 if descriptor.get("vnfd-id") and not session["force"]:
1226 for vnfd_id in get_iterable(descriptor.get("vnfd-id")):
1227 query_filter = self._get_project_filter(session)
1228 query_filter["id"] = vnfd_id
1229 vnf_list = self.db.get_list("vnfds", query_filter)
1230 if not vnf_list:
1231 raise EngineException(
1232 "Descriptor error at 'vnfd-id'='{}' references a non "
1233 "existing vnfd".format(vnfd_id),
1234 http_code=HTTPStatus.CONFLICT,
1235 )
1236 vnfds_index[vnfd_id] = vnf_list[0]
1237 return vnfds_index
1238
1239 @staticmethod
1240 def validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index):
1241 for vnf_profile in get_iterable(df.get("vnf-profile")):
1242 vnfd = vnfds_index.get(vnf_profile["vnfd-id"])
1243 all_vnfd_ext_cpds = set()
1244 for ext_cpd in get_iterable(vnfd.get("ext-cpd")):
1245 if ext_cpd.get("id"):
1246 all_vnfd_ext_cpds.add(ext_cpd.get("id"))
1247
1248 for virtual_link in get_iterable(
1249 vnf_profile.get("virtual-link-connectivity")
1250 ):
1251 for vl_cpd in get_iterable(virtual_link.get("constituent-cpd-id")):
1252 vl_cpd_id = vl_cpd.get("constituent-cpd-id")
1253 if vl_cpd_id and vl_cpd_id not in all_vnfd_ext_cpds:
1254 raise EngineException(
1255 "Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity"
1256 "[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a "
1257 "non existing ext-cpd:id inside vnfd '{}'".format(
1258 df["id"],
1259 vnf_profile["id"],
1260 virtual_link["virtual-link-profile-id"],
1261 vl_cpd_id,
1262 vnfd["id"],
1263 ),
1264 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1265 )
1266
1267 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
1268 final_content = super().check_conflict_on_edit(
1269 session, final_content, edit_content, _id
1270 )
1271
1272 self._check_descriptor_dependencies(session, final_content)
1273
1274 return final_content
1275
1276 def check_conflict_on_del(self, session, _id, db_content):
1277 """
1278 Check that there is not any NSR that uses this NSD. Only NSRs belonging to this project are considered. Note
1279 that NSD can be public and be used by other projects.
1280 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1281 :param _id: nsd internal id
1282 :param db_content: The database content of the _id
1283 :return: None or raises EngineException with the conflict
1284 """
1285 if session["force"]:
1286 return
1287 descriptor = db_content
1288 descriptor_id = descriptor.get("id")
1289 if not descriptor_id: # empty nsd not uploaded
1290 return
1291
1292 # check NSD used by NS
1293 _filter = self._get_project_filter(session)
1294 _filter["nsd-id"] = _id
1295 if self.db.get_list("nsrs", _filter):
1296 raise EngineException(
1297 "There is at least one NS instance using this descriptor",
1298 http_code=HTTPStatus.CONFLICT,
1299 )
1300
1301 # check NSD referenced by NST
1302 del _filter["nsd-id"]
1303 _filter["netslice-subnet.ANYINDEX.nsd-ref"] = descriptor_id
1304 if self.db.get_list("nsts", _filter):
1305 raise EngineException(
1306 "There is at least one NetSlice Template referencing this descriptor",
1307 http_code=HTTPStatus.CONFLICT,
1308 )
1309
1310 def sol005_projection(self, data):
1311 data["nsdOnboardingState"] = data["_admin"]["onboardingState"]
1312 data["nsdOperationalState"] = data["_admin"]["operationalState"]
1313 data["nsdUsageState"] = data["_admin"]["usageState"]
1314
1315 links = {}
1316 links["self"] = {"href": "/nsd/v1/ns_descriptors/{}".format(data["_id"])}
1317 links["nsd_content"] = {
1318 "href": "/nsd/v1/ns_descriptors/{}/nsd_content".format(data["_id"])
1319 }
1320 data["_links"] = links
1321
1322 return super().sol005_projection(data)
1323
1324
1325 class NstTopic(DescriptorTopic):
1326 topic = "nsts"
1327 topic_msg = "nst"
1328 quota_name = "slice_templates"
1329
1330 def __init__(self, db, fs, msg, auth):
1331 DescriptorTopic.__init__(self, db, fs, msg, auth)
1332
1333 def pyangbind_validation(self, item, data, force=False):
1334 try:
1335 mynst = nst_im()
1336 pybindJSONDecoder.load_ietf_json(
1337 {"nst": [data]},
1338 None,
1339 None,
1340 obj=mynst,
1341 path_helper=True,
1342 skip_unknown=force,
1343 )
1344 out = pybindJSON.dumps(mynst, mode="ietf")
1345 desc_out = self._remove_envelop(yaml.safe_load(out))
1346 return desc_out
1347 except Exception as e:
1348 raise EngineException(
1349 "Error in pyangbind validation: {}".format(str(e)),
1350 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1351 )
1352
1353 @staticmethod
1354 def _remove_envelop(indata=None):
1355 if not indata:
1356 return {}
1357 clean_indata = indata
1358
1359 if clean_indata.get("nst"):
1360 if (
1361 not isinstance(clean_indata["nst"], list)
1362 or len(clean_indata["nst"]) != 1
1363 ):
1364 raise EngineException("'nst' must be a list only one element")
1365 clean_indata = clean_indata["nst"][0]
1366 elif clean_indata.get("nst:nst"):
1367 if (
1368 not isinstance(clean_indata["nst:nst"], list)
1369 or len(clean_indata["nst:nst"]) != 1
1370 ):
1371 raise EngineException("'nst:nst' must be a list only one element")
1372 clean_indata = clean_indata["nst:nst"][0]
1373 return clean_indata
1374
1375 def _validate_input_new(self, indata, storage_params, force=False):
1376 indata.pop("onboardingState", None)
1377 indata.pop("operationalState", None)
1378 indata.pop("usageState", None)
1379 indata = self.pyangbind_validation("nsts", indata, force)
1380 return indata.copy()
1381
1382 def _check_descriptor_dependencies(self, session, descriptor):
1383 """
1384 Check that the dependent descriptors exist on a new descriptor or edition
1385 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1386 :param descriptor: descriptor to be inserted or edit
1387 :return: None or raises exception
1388 """
1389 if not descriptor.get("netslice-subnet"):
1390 return
1391 for nsd in descriptor["netslice-subnet"]:
1392 nsd_id = nsd["nsd-ref"]
1393 filter_q = self._get_project_filter(session)
1394 filter_q["id"] = nsd_id
1395 if not self.db.get_list("nsds", filter_q):
1396 raise EngineException(
1397 "Descriptor error at 'netslice-subnet':'nsd-ref'='{}' references a non "
1398 "existing nsd".format(nsd_id),
1399 http_code=HTTPStatus.CONFLICT,
1400 )
1401
1402 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
1403 final_content = super().check_conflict_on_edit(
1404 session, final_content, edit_content, _id
1405 )
1406
1407 self._check_descriptor_dependencies(session, final_content)
1408 return final_content
1409
1410 def check_conflict_on_del(self, session, _id, db_content):
1411 """
1412 Check that there is not any NSIR that uses this NST. Only NSIRs belonging to this project are considered. Note
1413 that NST can be public and be used by other projects.
1414 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1415 :param _id: nst internal id
1416 :param db_content: The database content of the _id.
1417 :return: None or raises EngineException with the conflict
1418 """
1419 # TODO: Check this method
1420 if session["force"]:
1421 return
1422 # Get Network Slice Template from Database
1423 _filter = self._get_project_filter(session)
1424 _filter["_admin.nst-id"] = _id
1425 if self.db.get_list("nsis", _filter):
1426 raise EngineException(
1427 "there is at least one Netslice Instance using this descriptor",
1428 http_code=HTTPStatus.CONFLICT,
1429 )
1430
1431 def sol005_projection(self, data):
1432 data["onboardingState"] = data["_admin"]["onboardingState"]
1433 data["operationalState"] = data["_admin"]["operationalState"]
1434 data["usageState"] = data["_admin"]["usageState"]
1435
1436 links = {}
1437 links["self"] = {"href": "/nst/v1/netslice_templates/{}".format(data["_id"])}
1438 links["nst"] = {"href": "/nst/v1/netslice_templates/{}/nst".format(data["_id"])}
1439 data["_links"] = links
1440
1441 return super().sol005_projection(data)
1442
1443
1444 class PduTopic(BaseTopic):
1445 topic = "pdus"
1446 topic_msg = "pdu"
1447 quota_name = "pduds"
1448 schema_new = pdu_new_schema
1449 schema_edit = pdu_edit_schema
1450
1451 def __init__(self, db, fs, msg, auth):
1452 BaseTopic.__init__(self, db, fs, msg, auth)
1453
1454 @staticmethod
1455 def format_on_new(content, project_id=None, make_public=False):
1456 BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
1457 content["_admin"]["onboardingState"] = "CREATED"
1458 content["_admin"]["operationalState"] = "ENABLED"
1459 content["_admin"]["usageState"] = "NOT_IN_USE"
1460
1461 def check_conflict_on_del(self, session, _id, db_content):
1462 """
1463 Check that there is not any vnfr that uses this PDU
1464 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1465 :param _id: pdu internal id
1466 :param db_content: The database content of the _id.
1467 :return: None or raises EngineException with the conflict
1468 """
1469 if session["force"]:
1470 return
1471
1472 _filter = self._get_project_filter(session)
1473 _filter["vdur.pdu-id"] = _id
1474 if self.db.get_list("vnfrs", _filter):
1475 raise EngineException(
1476 "There is at least one VNF instance using this PDU",
1477 http_code=HTTPStatus.CONFLICT,
1478 )
1479
1480
1481 class VnfPkgOpTopic(BaseTopic):
1482 topic = "vnfpkgops"
1483 topic_msg = "vnfd"
1484 schema_new = vnfpkgop_new_schema
1485 schema_edit = None
1486
1487 def __init__(self, db, fs, msg, auth):
1488 BaseTopic.__init__(self, db, fs, msg, auth)
1489
1490 def edit(self, session, _id, indata=None, kwargs=None, content=None):
1491 raise EngineException(
1492 "Method 'edit' not allowed for topic '{}'".format(self.topic),
1493 HTTPStatus.METHOD_NOT_ALLOWED,
1494 )
1495
1496 def delete(self, session, _id, dry_run=False):
1497 raise EngineException(
1498 "Method 'delete' not allowed for topic '{}'".format(self.topic),
1499 HTTPStatus.METHOD_NOT_ALLOWED,
1500 )
1501
1502 def delete_list(self, session, filter_q=None):
1503 raise EngineException(
1504 "Method 'delete_list' not allowed for topic '{}'".format(self.topic),
1505 HTTPStatus.METHOD_NOT_ALLOWED,
1506 )
1507
1508 def new(self, rollback, session, indata=None, kwargs=None, headers=None):
1509 """
1510 Creates a new entry into database.
1511 :param rollback: list to append created items at database in case a rollback may to be done
1512 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1513 :param indata: data to be inserted
1514 :param kwargs: used to override the indata descriptor
1515 :param headers: http request headers
1516 :return: _id, op_id:
1517 _id: identity of the inserted data.
1518 op_id: None
1519 """
1520 self._update_input_with_kwargs(indata, kwargs)
1521 validate_input(indata, self.schema_new)
1522 vnfpkg_id = indata["vnfPkgId"]
1523 filter_q = BaseTopic._get_project_filter(session)
1524 filter_q["_id"] = vnfpkg_id
1525 vnfd = self.db.get_one("vnfds", filter_q)
1526 operation = indata["lcmOperationType"]
1527 kdu_name = indata["kdu_name"]
1528 for kdu in vnfd.get("kdu", []):
1529 if kdu["name"] == kdu_name:
1530 helm_chart = kdu.get("helm-chart")
1531 juju_bundle = kdu.get("juju-bundle")
1532 break
1533 else:
1534 raise EngineException(
1535 "Not found vnfd[id='{}']:kdu[name='{}']".format(vnfpkg_id, kdu_name)
1536 )
1537 if helm_chart:
1538 indata["helm-chart"] = helm_chart
1539 match = fullmatch(r"([^/]*)/([^/]*)", helm_chart)
1540 repo_name = match.group(1) if match else None
1541 elif juju_bundle:
1542 indata["juju-bundle"] = juju_bundle
1543 match = fullmatch(r"([^/]*)/([^/]*)", juju_bundle)
1544 repo_name = match.group(1) if match else None
1545 else:
1546 raise EngineException(
1547 "Found neither 'helm-chart' nor 'juju-bundle' in vnfd[id='{}']:kdu[name='{}']".format(
1548 vnfpkg_id, kdu_name
1549 )
1550 )
1551 if repo_name:
1552 del filter_q["_id"]
1553 filter_q["name"] = repo_name
1554 repo = self.db.get_one("k8srepos", filter_q)
1555 k8srepo_id = repo.get("_id")
1556 k8srepo_url = repo.get("url")
1557 else:
1558 k8srepo_id = None
1559 k8srepo_url = None
1560 indata["k8srepoId"] = k8srepo_id
1561 indata["k8srepo_url"] = k8srepo_url
1562 vnfpkgop_id = str(uuid4())
1563 vnfpkgop_desc = {
1564 "_id": vnfpkgop_id,
1565 "operationState": "PROCESSING",
1566 "vnfPkgId": vnfpkg_id,
1567 "lcmOperationType": operation,
1568 "isAutomaticInvocation": False,
1569 "isCancelPending": False,
1570 "operationParams": indata,
1571 "links": {
1572 "self": "/osm/vnfpkgm/v1/vnfpkg_op_occs/" + vnfpkgop_id,
1573 "vnfpkg": "/osm/vnfpkgm/v1/vnf_packages/" + vnfpkg_id,
1574 },
1575 }
1576 self.format_on_new(
1577 vnfpkgop_desc, session["project_id"], make_public=session["public"]
1578 )
1579 ctime = vnfpkgop_desc["_admin"]["created"]
1580 vnfpkgop_desc["statusEnteredTime"] = ctime
1581 vnfpkgop_desc["startTime"] = ctime
1582 self.db.create(self.topic, vnfpkgop_desc)
1583 rollback.append({"topic": self.topic, "_id": vnfpkgop_id})
1584 self.msg.write(self.topic_msg, operation, vnfpkgop_desc)
1585 return vnfpkgop_id, None