Fix bug 1716: remove vcaId from NSR instantiation parameter
[osm/NBI.git] / osm_nbi / descriptor_topics.py
1 # -*- coding: utf-8 -*-
2
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
12 # implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
15
16 import tarfile
17 import yaml
18 import json
19 import importlib
20 import copy
21
22 # import logging
23 from hashlib import md5
24 from osm_common.dbbase import DbException, deep_update_rfc7396
25 from http import HTTPStatus
26 from time import time
27 from uuid import uuid4
28 from re import fullmatch
29 from osm_nbi.validation import (
30 ValidationError,
31 pdu_new_schema,
32 pdu_edit_schema,
33 validate_input,
34 vnfpkgop_new_schema,
35 )
36 from osm_nbi.base_topic import BaseTopic, EngineException, get_iterable
37 from osm_im import etsi_nfv_vnfd, etsi_nfv_nsd
38 from osm_im.nst import nst as nst_im
39 from pyangbind.lib.serialise import pybindJSONDecoder
40 import pyangbind.lib.pybindJSON as pybindJSON
41 from osm_nbi import utils
42
43 __author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
44
45
46 class DescriptorTopic(BaseTopic):
47 def __init__(self, db, fs, msg, auth):
48 BaseTopic.__init__(self, db, fs, msg, auth)
49
50 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
51 final_content = super().check_conflict_on_edit(
52 session, final_content, edit_content, _id
53 )
54
55 def _check_unique_id_name(descriptor, position=""):
56 for desc_key, desc_item in descriptor.items():
57 if isinstance(desc_item, list) and desc_item:
58 used_ids = []
59 desc_item_id = None
60 for index, list_item in enumerate(desc_item):
61 if isinstance(list_item, dict):
62 _check_unique_id_name(
63 list_item, "{}.{}[{}]".format(position, desc_key, index)
64 )
65 # Base case
66 if index == 0 and (
67 list_item.get("id") or list_item.get("name")
68 ):
69 desc_item_id = "id" if list_item.get("id") else "name"
70 if desc_item_id and list_item.get(desc_item_id):
71 if list_item[desc_item_id] in used_ids:
72 position = "{}.{}[{}]".format(
73 position, desc_key, index
74 )
75 raise EngineException(
76 "Error: identifier {} '{}' is not unique and repeats at '{}'".format(
77 desc_item_id,
78 list_item[desc_item_id],
79 position,
80 ),
81 HTTPStatus.UNPROCESSABLE_ENTITY,
82 )
83 used_ids.append(list_item[desc_item_id])
84
85 _check_unique_id_name(final_content)
86 # 1. validate again with pyangbind
87 # 1.1. remove internal keys
88 internal_keys = {}
89 for k in ("_id", "_admin"):
90 if k in final_content:
91 internal_keys[k] = final_content.pop(k)
92 storage_params = internal_keys["_admin"].get("storage")
93 serialized = self._validate_input_new(
94 final_content, storage_params, session["force"]
95 )
96
97 # 1.2. modify final_content with a serialized version
98 final_content = copy.deepcopy(serialized)
99 # 1.3. restore internal keys
100 for k, v in internal_keys.items():
101 final_content[k] = v
102 if session["force"]:
103 return final_content
104
105 # 2. check that this id is not present
106 if "id" in edit_content:
107 _filter = self._get_project_filter(session)
108
109 _filter["id"] = final_content["id"]
110 _filter["_id.neq"] = _id
111
112 if self.db.get_one(self.topic, _filter, fail_on_empty=False):
113 raise EngineException(
114 "{} with id '{}' already exists for this project".format(
115 self.topic[:-1], final_content["id"]
116 ),
117 HTTPStatus.CONFLICT,
118 )
119
120 return final_content
121
122 @staticmethod
123 def format_on_new(content, project_id=None, make_public=False):
124 BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
125 content["_admin"]["onboardingState"] = "CREATED"
126 content["_admin"]["operationalState"] = "DISABLED"
127 content["_admin"]["usageState"] = "NOT_IN_USE"
128
129 def delete_extra(self, session, _id, db_content, not_send_msg=None):
130 """
131 Deletes file system storage associated with the descriptor
132 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
133 :param _id: server internal id
134 :param db_content: The database content of the descriptor
135 :param not_send_msg: To not send message (False) or store content (list) instead
136 :return: None if ok or raises EngineException with the problem
137 """
138 self.fs.file_delete(_id, ignore_non_exist=True)
139 self.fs.file_delete(_id + "_", ignore_non_exist=True) # remove temp folder
140
141 @staticmethod
142 def get_one_by_id(db, session, topic, id):
143 # find owned by this project
144 _filter = BaseTopic._get_project_filter(session)
145 _filter["id"] = id
146 desc_list = db.get_list(topic, _filter)
147 if len(desc_list) == 1:
148 return desc_list[0]
149 elif len(desc_list) > 1:
150 raise DbException(
151 "Found more than one {} with id='{}' belonging to this project".format(
152 topic[:-1], id
153 ),
154 HTTPStatus.CONFLICT,
155 )
156
157 # not found any: try to find public
158 _filter = BaseTopic._get_project_filter(session)
159 _filter["id"] = id
160 desc_list = db.get_list(topic, _filter)
161 if not desc_list:
162 raise DbException(
163 "Not found any {} with id='{}'".format(topic[:-1], id),
164 HTTPStatus.NOT_FOUND,
165 )
166 elif len(desc_list) == 1:
167 return desc_list[0]
168 else:
169 raise DbException(
170 "Found more than one public {} with id='{}'; and no one belonging to this project".format(
171 topic[:-1], id
172 ),
173 HTTPStatus.CONFLICT,
174 )
175
176 def new(self, rollback, session, indata=None, kwargs=None, headers=None):
177 """
178 Creates a new almost empty DISABLED entry into database. Due to SOL005, it does not follow normal procedure.
179 Creating a VNFD or NSD is done in two steps: 1. Creates an empty descriptor (this step) and 2) upload content
180 (self.upload_content)
181 :param rollback: list to append created items at database in case a rollback may to be done
182 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
183 :param indata: data to be inserted
184 :param kwargs: used to override the indata descriptor
185 :param headers: http request headers
186 :return: _id, None: identity of the inserted data; and None as there is not any operation
187 """
188
189 # No needed to capture exceptions
190 # Check Quota
191 self.check_quota(session)
192
193 # _remove_envelop
194 if indata:
195 if "userDefinedData" in indata:
196 indata = indata["userDefinedData"]
197
198 # Override descriptor with query string kwargs
199 self._update_input_with_kwargs(indata, kwargs)
200 # uncomment when this method is implemented.
201 # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors
202 # indata = DescriptorTopic._validate_input_new(self, indata, project_id=session["force"])
203
204 content = {"_admin": {"userDefinedData": indata}}
205 self.format_on_new(
206 content, session["project_id"], make_public=session["public"]
207 )
208 _id = self.db.create(self.topic, content)
209 rollback.append({"topic": self.topic, "_id": _id})
210 self._send_msg("created", {"_id": _id})
211 return _id, None
212
213 def upload_content(self, session, _id, indata, kwargs, headers):
214 """
215 Used for receiving content by chunks (with a transaction_id header and/or gzip file. It will store and extract)
216 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
217 :param _id : the nsd,vnfd is already created, this is the id
218 :param indata: http body request
219 :param kwargs: user query string to override parameters. NOT USED
220 :param headers: http request headers
221 :return: True if package is completely uploaded or False if partial content has been uploded
222 Raise exception on error
223 """
224 # Check that _id exists and it is valid
225 current_desc = self.show(session, _id)
226
227 content_range_text = headers.get("Content-Range")
228 expected_md5 = headers.get("Content-File-MD5")
229 compressed = None
230 content_type = headers.get("Content-Type")
231 if (
232 content_type
233 and "application/gzip" in content_type
234 or "application/x-gzip" in content_type
235 or "application/zip" in content_type
236 ):
237 compressed = "gzip"
238 filename = headers.get("Content-Filename")
239 if not filename:
240 filename = "package.tar.gz" if compressed else "package"
241 # TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266
242 file_pkg = None
243 error_text = ""
244 try:
245 if content_range_text:
246 content_range = (
247 content_range_text.replace("-", " ").replace("/", " ").split()
248 )
249 if (
250 content_range[0] != "bytes"
251 ): # TODO check x<y not negative < total....
252 raise IndexError()
253 start = int(content_range[1])
254 end = int(content_range[2]) + 1
255 total = int(content_range[3])
256 else:
257 start = 0
258 temp_folder = (
259 _id + "_"
260 ) # all the content is upload here and if ok, it is rename from id_ to is folder
261
262 if start:
263 if not self.fs.file_exists(temp_folder, "dir"):
264 raise EngineException(
265 "invalid Transaction-Id header", HTTPStatus.NOT_FOUND
266 )
267 else:
268 self.fs.file_delete(temp_folder, ignore_non_exist=True)
269 self.fs.mkdir(temp_folder)
270
271 storage = self.fs.get_params()
272 storage["folder"] = _id
273
274 file_path = (temp_folder, filename)
275 if self.fs.file_exists(file_path, "file"):
276 file_size = self.fs.file_size(file_path)
277 else:
278 file_size = 0
279 if file_size != start:
280 raise EngineException(
281 "invalid Content-Range start sequence, expected '{}' but received '{}'".format(
282 file_size, start
283 ),
284 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
285 )
286 file_pkg = self.fs.file_open(file_path, "a+b")
287 if isinstance(indata, dict):
288 indata_text = yaml.safe_dump(indata, indent=4, default_flow_style=False)
289 file_pkg.write(indata_text.encode(encoding="utf-8"))
290 else:
291 indata_len = 0
292 while True:
293 indata_text = indata.read(4096)
294 indata_len += len(indata_text)
295 if not indata_text:
296 break
297 file_pkg.write(indata_text)
298 if content_range_text:
299 if indata_len != end - start:
300 raise EngineException(
301 "Mismatch between Content-Range header {}-{} and body length of {}".format(
302 start, end - 1, indata_len
303 ),
304 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
305 )
306 if end != total:
307 # TODO update to UPLOADING
308 return False
309
310 # PACKAGE UPLOADED
311 if expected_md5:
312 file_pkg.seek(0, 0)
313 file_md5 = md5()
314 chunk_data = file_pkg.read(1024)
315 while chunk_data:
316 file_md5.update(chunk_data)
317 chunk_data = file_pkg.read(1024)
318 if expected_md5 != file_md5.hexdigest():
319 raise EngineException("Error, MD5 mismatch", HTTPStatus.CONFLICT)
320 file_pkg.seek(0, 0)
321 if compressed == "gzip":
322 tar = tarfile.open(mode="r", fileobj=file_pkg)
323 descriptor_file_name = None
324 for tarinfo in tar:
325 tarname = tarinfo.name
326 tarname_path = tarname.split("/")
327 if (
328 not tarname_path[0] or ".." in tarname_path
329 ): # if start with "/" means absolute path
330 raise EngineException(
331 "Absolute path or '..' are not allowed for package descriptor tar.gz"
332 )
333 if len(tarname_path) == 1 and not tarinfo.isdir():
334 raise EngineException(
335 "All files must be inside a dir for package descriptor tar.gz"
336 )
337 if (
338 tarname.endswith(".yaml")
339 or tarname.endswith(".json")
340 or tarname.endswith(".yml")
341 ):
342 storage["pkg-dir"] = tarname_path[0]
343 if len(tarname_path) == 2:
344 if descriptor_file_name:
345 raise EngineException(
346 "Found more than one descriptor file at package descriptor tar.gz"
347 )
348 descriptor_file_name = tarname
349 if not descriptor_file_name:
350 raise EngineException(
351 "Not found any descriptor file at package descriptor tar.gz"
352 )
353 storage["descriptor"] = descriptor_file_name
354 storage["zipfile"] = filename
355 self.fs.file_extract(tar, temp_folder)
356 with self.fs.file_open(
357 (temp_folder, descriptor_file_name), "r"
358 ) as descriptor_file:
359 content = descriptor_file.read()
360 else:
361 content = file_pkg.read()
362 storage["descriptor"] = descriptor_file_name = filename
363
364 if descriptor_file_name.endswith(".json"):
365 error_text = "Invalid json format "
366 indata = json.load(content)
367 else:
368 error_text = "Invalid yaml format "
369 indata = yaml.load(content, Loader=yaml.SafeLoader)
370
371 current_desc["_admin"]["storage"] = storage
372 current_desc["_admin"]["onboardingState"] = "ONBOARDED"
373 current_desc["_admin"]["operationalState"] = "ENABLED"
374
375 indata = self._remove_envelop(indata)
376
377 # Override descriptor with query string kwargs
378 if kwargs:
379 self._update_input_with_kwargs(indata, kwargs)
380
381 deep_update_rfc7396(current_desc, indata)
382 current_desc = self.check_conflict_on_edit(
383 session, current_desc, indata, _id=_id
384 )
385 current_desc["_admin"]["modified"] = time()
386 self.db.replace(self.topic, _id, current_desc)
387 self.fs.dir_rename(temp_folder, _id)
388
389 indata["_id"] = _id
390 self._send_msg("edited", indata)
391
392 # TODO if descriptor has changed because kwargs update content and remove cached zip
393 # TODO if zip is not present creates one
394 return True
395
396 except EngineException:
397 raise
398 except IndexError:
399 raise EngineException(
400 "invalid Content-Range header format. Expected 'bytes start-end/total'",
401 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
402 )
403 except IOError as e:
404 raise EngineException(
405 "invalid upload transaction sequence: '{}'".format(e),
406 HTTPStatus.BAD_REQUEST,
407 )
408 except tarfile.ReadError as e:
409 raise EngineException(
410 "invalid file content {}".format(e), HTTPStatus.BAD_REQUEST
411 )
412 except (ValueError, yaml.YAMLError) as e:
413 raise EngineException(error_text + str(e))
414 except ValidationError as e:
415 raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
416 finally:
417 if file_pkg:
418 file_pkg.close()
419
420 def get_file(self, session, _id, path=None, accept_header=None):
421 """
422 Return the file content of a vnfd or nsd
423 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
424 :param _id: Identity of the vnfd, nsd
425 :param path: artifact path or "$DESCRIPTOR" or None
426 :param accept_header: Content of Accept header. Must contain applition/zip or/and text/plain
427 :return: opened file plus Accept format or raises an exception
428 """
429 accept_text = accept_zip = False
430 if accept_header:
431 if "text/plain" in accept_header or "*/*" in accept_header:
432 accept_text = True
433 if "application/zip" in accept_header or "*/*" in accept_header:
434 accept_zip = "application/zip"
435 elif "application/gzip" in accept_header:
436 accept_zip = "application/gzip"
437
438 if not accept_text and not accept_zip:
439 raise EngineException(
440 "provide request header 'Accept' with 'application/zip' or 'text/plain'",
441 http_code=HTTPStatus.NOT_ACCEPTABLE,
442 )
443
444 content = self.show(session, _id)
445 if content["_admin"]["onboardingState"] != "ONBOARDED":
446 raise EngineException(
447 "Cannot get content because this resource is not at 'ONBOARDED' state. "
448 "onboardingState is {}".format(content["_admin"]["onboardingState"]),
449 http_code=HTTPStatus.CONFLICT,
450 )
451 storage = content["_admin"]["storage"]
452 if path is not None and path != "$DESCRIPTOR": # artifacts
453 if not storage.get("pkg-dir"):
454 raise EngineException(
455 "Packages does not contains artifacts",
456 http_code=HTTPStatus.BAD_REQUEST,
457 )
458 if self.fs.file_exists(
459 (storage["folder"], storage["pkg-dir"], *path), "dir"
460 ):
461 folder_content = self.fs.dir_ls(
462 (storage["folder"], storage["pkg-dir"], *path)
463 )
464 return folder_content, "text/plain"
465 # TODO manage folders in http
466 else:
467 return (
468 self.fs.file_open(
469 (storage["folder"], storage["pkg-dir"], *path), "rb"
470 ),
471 "application/octet-stream",
472 )
473
474 # pkgtype accept ZIP TEXT -> result
475 # manyfiles yes X -> zip
476 # no yes -> error
477 # onefile yes no -> zip
478 # X yes -> text
479 contain_many_files = False
480 if storage.get("pkg-dir"):
481 # check if there are more than one file in the package, ignoring checksums.txt.
482 pkg_files = self.fs.dir_ls((storage["folder"], storage["pkg-dir"]))
483 if len(pkg_files) >= 3 or (
484 len(pkg_files) == 2 and "checksums.txt" not in pkg_files
485 ):
486 contain_many_files = True
487 if accept_text and (not contain_many_files or path == "$DESCRIPTOR"):
488 return (
489 self.fs.file_open((storage["folder"], storage["descriptor"]), "r"),
490 "text/plain",
491 )
492 elif contain_many_files and not accept_zip:
493 raise EngineException(
494 "Packages that contains several files need to be retrieved with 'application/zip'"
495 "Accept header",
496 http_code=HTTPStatus.NOT_ACCEPTABLE,
497 )
498 else:
499 if not storage.get("zipfile"):
500 # TODO generate zipfile if not present
501 raise EngineException(
502 "Only allowed 'text/plain' Accept header for this descriptor. To be solved in "
503 "future versions",
504 http_code=HTTPStatus.NOT_ACCEPTABLE,
505 )
506 return (
507 self.fs.file_open((storage["folder"], storage["zipfile"]), "rb"),
508 accept_zip,
509 )
510
511 def _remove_yang_prefixes_from_descriptor(self, descriptor):
512 new_descriptor = {}
513 for k, v in descriptor.items():
514 new_v = v
515 if isinstance(v, dict):
516 new_v = self._remove_yang_prefixes_from_descriptor(v)
517 elif isinstance(v, list):
518 new_v = list()
519 for x in v:
520 if isinstance(x, dict):
521 new_v.append(self._remove_yang_prefixes_from_descriptor(x))
522 else:
523 new_v.append(x)
524 new_descriptor[k.split(":")[-1]] = new_v
525 return new_descriptor
526
527 def pyangbind_validation(self, item, data, force=False):
528 raise EngineException(
529 "Not possible to validate '{}' item".format(item),
530 http_code=HTTPStatus.INTERNAL_SERVER_ERROR,
531 )
532
533 def _validate_input_edit(self, indata, content, force=False):
534 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
535 if "_id" in indata:
536 indata.pop("_id")
537 if "_admin" not in indata:
538 indata["_admin"] = {}
539
540 if "operationalState" in indata:
541 if indata["operationalState"] in ("ENABLED", "DISABLED"):
542 indata["_admin"]["operationalState"] = indata.pop("operationalState")
543 else:
544 raise EngineException(
545 "State '{}' is not a valid operational state".format(
546 indata["operationalState"]
547 ),
548 http_code=HTTPStatus.BAD_REQUEST,
549 )
550
551 # In the case of user defined data, we need to put the data in the root of the object
552 # to preserve current expected behaviour
553 if "userDefinedData" in indata:
554 data = indata.pop("userDefinedData")
555 if type(data) == dict:
556 indata["_admin"]["userDefinedData"] = data
557 else:
558 raise EngineException(
559 "userDefinedData should be an object, but is '{}' instead".format(
560 type(data)
561 ),
562 http_code=HTTPStatus.BAD_REQUEST,
563 )
564
565 if (
566 "operationalState" in indata["_admin"]
567 and content["_admin"]["operationalState"]
568 == indata["_admin"]["operationalState"]
569 ):
570 raise EngineException(
571 "operationalState already {}".format(
572 content["_admin"]["operationalState"]
573 ),
574 http_code=HTTPStatus.CONFLICT,
575 )
576
577 return indata
578
579
580 class VnfdTopic(DescriptorTopic):
581 topic = "vnfds"
582 topic_msg = "vnfd"
583
584 def __init__(self, db, fs, msg, auth):
585 DescriptorTopic.__init__(self, db, fs, msg, auth)
586
587 def pyangbind_validation(self, item, data, force=False):
588 if self._descriptor_data_is_in_old_format(data):
589 raise EngineException(
590 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
591 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
592 )
593 try:
594 myvnfd = etsi_nfv_vnfd.etsi_nfv_vnfd()
595 pybindJSONDecoder.load_ietf_json(
596 {"etsi-nfv-vnfd:vnfd": data},
597 None,
598 None,
599 obj=myvnfd,
600 path_helper=True,
601 skip_unknown=force,
602 )
603 out = pybindJSON.dumps(myvnfd, mode="ietf")
604 desc_out = self._remove_envelop(yaml.safe_load(out))
605 desc_out = self._remove_yang_prefixes_from_descriptor(desc_out)
606 return utils.deep_update_dict(data, desc_out)
607 except Exception as e:
608 raise EngineException(
609 "Error in pyangbind validation: {}".format(str(e)),
610 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
611 )
612
613 @staticmethod
614 def _descriptor_data_is_in_old_format(data):
615 return ("vnfd-catalog" in data) or ("vnfd:vnfd-catalog" in data)
616
617 @staticmethod
618 def _remove_envelop(indata=None):
619 if not indata:
620 return {}
621 clean_indata = indata
622
623 if clean_indata.get("etsi-nfv-vnfd:vnfd"):
624 if not isinstance(clean_indata["etsi-nfv-vnfd:vnfd"], dict):
625 raise EngineException("'etsi-nfv-vnfd:vnfd' must be a dict")
626 clean_indata = clean_indata["etsi-nfv-vnfd:vnfd"]
627 elif clean_indata.get("vnfd"):
628 if not isinstance(clean_indata["vnfd"], dict):
629 raise EngineException("'vnfd' must be dict")
630 clean_indata = clean_indata["vnfd"]
631
632 return clean_indata
633
634 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
635 final_content = super().check_conflict_on_edit(
636 session, final_content, edit_content, _id
637 )
638
639 # set type of vnfd
640 contains_pdu = False
641 contains_vdu = False
642 for vdu in get_iterable(final_content.get("vdu")):
643 if vdu.get("pdu-type"):
644 contains_pdu = True
645 else:
646 contains_vdu = True
647 if contains_pdu:
648 final_content["_admin"]["type"] = "hnfd" if contains_vdu else "pnfd"
649 elif contains_vdu:
650 final_content["_admin"]["type"] = "vnfd"
651 # if neither vud nor pdu do not fill type
652 return final_content
653
654 def check_conflict_on_del(self, session, _id, db_content):
655 """
656 Check that there is not any NSD that uses this VNFD. Only NSDs belonging to this project are considered. Note
657 that VNFD can be public and be used by NSD of other projects. Also check there are not deployments, or vnfr
658 that uses this vnfd
659 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
660 :param _id: vnfd internal id
661 :param db_content: The database content of the _id.
662 :return: None or raises EngineException with the conflict
663 """
664 if session["force"]:
665 return
666 descriptor = db_content
667 descriptor_id = descriptor.get("id")
668 if not descriptor_id: # empty vnfd not uploaded
669 return
670
671 _filter = self._get_project_filter(session)
672
673 # check vnfrs using this vnfd
674 _filter["vnfd-id"] = _id
675 if self.db.get_list("vnfrs", _filter):
676 raise EngineException(
677 "There is at least one VNF instance using this descriptor",
678 http_code=HTTPStatus.CONFLICT,
679 )
680
681 # check NSD referencing this VNFD
682 del _filter["vnfd-id"]
683 _filter["vnfd-id"] = descriptor_id
684 if self.db.get_list("nsds", _filter):
685 raise EngineException(
686 "There is at least one NS package referencing this descriptor",
687 http_code=HTTPStatus.CONFLICT,
688 )
689
690 def _validate_input_new(self, indata, storage_params, force=False):
691 indata.pop("onboardingState", None)
692 indata.pop("operationalState", None)
693 indata.pop("usageState", None)
694 indata.pop("links", None)
695
696 indata = self.pyangbind_validation("vnfds", indata, force)
697 # Cross references validation in the descriptor
698
699 self.validate_mgmt_interface_connection_point(indata)
700
701 for vdu in get_iterable(indata.get("vdu")):
702 self.validate_vdu_internal_connection_points(vdu)
703 self._validate_vdu_cloud_init_in_package(storage_params, vdu, indata)
704 self._validate_vdu_charms_in_package(storage_params, indata)
705
706 self._validate_vnf_charms_in_package(storage_params, indata)
707
708 self.validate_external_connection_points(indata)
709 self.validate_internal_virtual_links(indata)
710 self.validate_monitoring_params(indata)
711 self.validate_scaling_group_descriptor(indata)
712
713 return indata
714
715 @staticmethod
716 def validate_mgmt_interface_connection_point(indata):
717 if not indata.get("vdu"):
718 return
719 if not indata.get("mgmt-cp"):
720 raise EngineException(
721 "'mgmt-cp' is a mandatory field and it is not defined",
722 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
723 )
724
725 for cp in get_iterable(indata.get("ext-cpd")):
726 if cp["id"] == indata["mgmt-cp"]:
727 break
728 else:
729 raise EngineException(
730 "mgmt-cp='{}' must match an existing ext-cpd".format(indata["mgmt-cp"]),
731 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
732 )
733
734 @staticmethod
735 def validate_vdu_internal_connection_points(vdu):
736 int_cpds = set()
737 for cpd in get_iterable(vdu.get("int-cpd")):
738 cpd_id = cpd.get("id")
739 if cpd_id and cpd_id in int_cpds:
740 raise EngineException(
741 "vdu[id='{}']:int-cpd[id='{}'] is already used by other int-cpd".format(
742 vdu["id"], cpd_id
743 ),
744 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
745 )
746 int_cpds.add(cpd_id)
747
748 @staticmethod
749 def validate_external_connection_points(indata):
750 all_vdus_int_cpds = set()
751 for vdu in get_iterable(indata.get("vdu")):
752 for int_cpd in get_iterable(vdu.get("int-cpd")):
753 all_vdus_int_cpds.add((vdu.get("id"), int_cpd.get("id")))
754
755 ext_cpds = set()
756 for cpd in get_iterable(indata.get("ext-cpd")):
757 cpd_id = cpd.get("id")
758 if cpd_id and cpd_id in ext_cpds:
759 raise EngineException(
760 "ext-cpd[id='{}'] is already used by other ext-cpd".format(cpd_id),
761 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
762 )
763 ext_cpds.add(cpd_id)
764
765 int_cpd = cpd.get("int-cpd")
766 if int_cpd:
767 if (int_cpd.get("vdu-id"), int_cpd.get("cpd")) not in all_vdus_int_cpds:
768 raise EngineException(
769 "ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd".format(
770 cpd_id
771 ),
772 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
773 )
774 # TODO: Validate k8s-cluster-net points to a valid k8s-cluster:nets ?
775
776 def _validate_vdu_charms_in_package(self, storage_params, indata):
777 for df in indata["df"]:
778 if (
779 "lcm-operations-configuration" in df
780 and "operate-vnf-op-config" in df["lcm-operations-configuration"]
781 ):
782 configs = df["lcm-operations-configuration"][
783 "operate-vnf-op-config"
784 ].get("day1-2", [])
785 vdus = df.get("vdu-profile", [])
786 for vdu in vdus:
787 for config in configs:
788 if config["id"] == vdu["id"] and utils.find_in_list(
789 config.get("execution-environment-list", []),
790 lambda ee: "juju" in ee,
791 ):
792 if not self._validate_package_folders(
793 storage_params, "charms"
794 ):
795 raise EngineException(
796 "Charm defined in vnf[id={}] but not present in "
797 "package".format(indata["id"])
798 )
799
800 def _validate_vdu_cloud_init_in_package(self, storage_params, vdu, indata):
801 if not vdu.get("cloud-init-file"):
802 return
803 if not self._validate_package_folders(
804 storage_params, "cloud_init", vdu["cloud-init-file"]
805 ):
806 raise EngineException(
807 "Cloud-init defined in vnf[id={}]:vdu[id={}] but not present in "
808 "package".format(indata["id"], vdu["id"])
809 )
810
811 def _validate_vnf_charms_in_package(self, storage_params, indata):
812 # Get VNF configuration through new container
813 for deployment_flavor in indata.get("df", []):
814 if "lcm-operations-configuration" not in deployment_flavor:
815 return
816 if (
817 "operate-vnf-op-config"
818 not in deployment_flavor["lcm-operations-configuration"]
819 ):
820 return
821 for day_1_2_config in deployment_flavor["lcm-operations-configuration"][
822 "operate-vnf-op-config"
823 ]["day1-2"]:
824 if day_1_2_config["id"] == indata["id"]:
825 if utils.find_in_list(
826 day_1_2_config.get("execution-environment-list", []),
827 lambda ee: "juju" in ee,
828 ):
829 if not self._validate_package_folders(storage_params, "charms"):
830 raise EngineException(
831 "Charm defined in vnf[id={}] but not present in "
832 "package".format(indata["id"])
833 )
834
835 def _validate_package_folders(self, storage_params, folder, file=None):
836 if not storage_params or not storage_params.get("pkg-dir"):
837 return False
838 else:
839 if self.fs.file_exists("{}_".format(storage_params["folder"]), "dir"):
840 f = "{}_/{}/{}".format(
841 storage_params["folder"], storage_params["pkg-dir"], folder
842 )
843 else:
844 f = "{}/{}/{}".format(
845 storage_params["folder"], storage_params["pkg-dir"], folder
846 )
847 if file:
848 return self.fs.file_exists("{}/{}".format(f, file), "file")
849 else:
850 if self.fs.file_exists(f, "dir"):
851 if self.fs.dir_ls(f):
852 return True
853 return False
854
855 @staticmethod
856 def validate_internal_virtual_links(indata):
857 all_ivld_ids = set()
858 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
859 ivld_id = ivld.get("id")
860 if ivld_id and ivld_id in all_ivld_ids:
861 raise EngineException(
862 "Duplicated VLD id in int-virtual-link-desc[id={}]".format(ivld_id),
863 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
864 )
865 else:
866 all_ivld_ids.add(ivld_id)
867
868 for vdu in get_iterable(indata.get("vdu")):
869 for int_cpd in get_iterable(vdu.get("int-cpd")):
870 int_cpd_ivld_id = int_cpd.get("int-virtual-link-desc")
871 if int_cpd_ivld_id and int_cpd_ivld_id not in all_ivld_ids:
872 raise EngineException(
873 "vdu[id='{}']:int-cpd[id='{}']:int-virtual-link-desc='{}' must match an existing "
874 "int-virtual-link-desc".format(
875 vdu["id"], int_cpd["id"], int_cpd_ivld_id
876 ),
877 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
878 )
879
880 for df in get_iterable(indata.get("df")):
881 for vlp in get_iterable(df.get("virtual-link-profile")):
882 vlp_ivld_id = vlp.get("id")
883 if vlp_ivld_id and vlp_ivld_id not in all_ivld_ids:
884 raise EngineException(
885 "df[id='{}']:virtual-link-profile='{}' must match an existing "
886 "int-virtual-link-desc".format(df["id"], vlp_ivld_id),
887 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
888 )
889
890 @staticmethod
891 def validate_monitoring_params(indata):
892 all_monitoring_params = set()
893 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
894 for mp in get_iterable(ivld.get("monitoring-parameters")):
895 mp_id = mp.get("id")
896 if mp_id and mp_id in all_monitoring_params:
897 raise EngineException(
898 "Duplicated monitoring-parameter id in "
899 "int-virtual-link-desc[id='{}']:monitoring-parameters[id='{}']".format(
900 ivld["id"], mp_id
901 ),
902 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
903 )
904 else:
905 all_monitoring_params.add(mp_id)
906
907 for vdu in get_iterable(indata.get("vdu")):
908 for mp in get_iterable(vdu.get("monitoring-parameter")):
909 mp_id = mp.get("id")
910 if mp_id and mp_id in all_monitoring_params:
911 raise EngineException(
912 "Duplicated monitoring-parameter id in "
913 "vdu[id='{}']:monitoring-parameter[id='{}']".format(
914 vdu["id"], mp_id
915 ),
916 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
917 )
918 else:
919 all_monitoring_params.add(mp_id)
920
921 for df in get_iterable(indata.get("df")):
922 for mp in get_iterable(df.get("monitoring-parameter")):
923 mp_id = mp.get("id")
924 if mp_id and mp_id in all_monitoring_params:
925 raise EngineException(
926 "Duplicated monitoring-parameter id in "
927 "df[id='{}']:monitoring-parameter[id='{}']".format(
928 df["id"], mp_id
929 ),
930 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
931 )
932 else:
933 all_monitoring_params.add(mp_id)
934
935 @staticmethod
936 def validate_scaling_group_descriptor(indata):
937 all_monitoring_params = set()
938 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
939 for mp in get_iterable(ivld.get("monitoring-parameters")):
940 all_monitoring_params.add(mp.get("id"))
941
942 for vdu in get_iterable(indata.get("vdu")):
943 for mp in get_iterable(vdu.get("monitoring-parameter")):
944 all_monitoring_params.add(mp.get("id"))
945
946 for df in get_iterable(indata.get("df")):
947 for mp in get_iterable(df.get("monitoring-parameter")):
948 all_monitoring_params.add(mp.get("id"))
949
950 for df in get_iterable(indata.get("df")):
951 for sa in get_iterable(df.get("scaling-aspect")):
952 for sp in get_iterable(sa.get("scaling-policy")):
953 for sc in get_iterable(sp.get("scaling-criteria")):
954 sc_monitoring_param = sc.get("vnf-monitoring-param-ref")
955 if (
956 sc_monitoring_param
957 and sc_monitoring_param not in all_monitoring_params
958 ):
959 raise EngineException(
960 "df[id='{}']:scaling-aspect[id='{}']:scaling-policy"
961 "[name='{}']:scaling-criteria[name='{}']: "
962 "vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format(
963 df["id"],
964 sa["id"],
965 sp["name"],
966 sc["name"],
967 sc_monitoring_param,
968 ),
969 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
970 )
971
972 for sca in get_iterable(sa.get("scaling-config-action")):
973 if (
974 "lcm-operations-configuration" not in df
975 or "operate-vnf-op-config"
976 not in df["lcm-operations-configuration"]
977 or not utils.find_in_list(
978 df["lcm-operations-configuration"][
979 "operate-vnf-op-config"
980 ].get("day1-2", []),
981 lambda config: config["id"] == indata["id"],
982 )
983 ):
984 raise EngineException(
985 "'day1-2 configuration' not defined in the descriptor but it is "
986 "referenced by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action".format(
987 df["id"], sa["id"]
988 ),
989 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
990 )
991 for configuration in get_iterable(
992 df["lcm-operations-configuration"]["operate-vnf-op-config"].get(
993 "day1-2", []
994 )
995 ):
996 for primitive in get_iterable(
997 configuration.get("config-primitive")
998 ):
999 if (
1000 primitive["name"]
1001 == sca["vnf-config-primitive-name-ref"]
1002 ):
1003 break
1004 else:
1005 raise EngineException(
1006 "df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-"
1007 "config-primitive-name-ref='{}' does not match any "
1008 "day1-2 configuration:config-primitive:name".format(
1009 df["id"],
1010 sa["id"],
1011 sca["vnf-config-primitive-name-ref"],
1012 ),
1013 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1014 )
1015
1016 def delete_extra(self, session, _id, db_content, not_send_msg=None):
1017 """
1018 Deletes associate file system storage (via super)
1019 Deletes associated vnfpkgops from database.
1020 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1021 :param _id: server internal id
1022 :param db_content: The database content of the descriptor
1023 :return: None
1024 :raises: FsException in case of error while deleting associated storage
1025 """
1026 super().delete_extra(session, _id, db_content, not_send_msg)
1027 self.db.del_list("vnfpkgops", {"vnfPkgId": _id})
1028
1029 def sol005_projection(self, data):
1030 data["onboardingState"] = data["_admin"]["onboardingState"]
1031 data["operationalState"] = data["_admin"]["operationalState"]
1032 data["usageState"] = data["_admin"]["usageState"]
1033
1034 links = {}
1035 links["self"] = {"href": "/vnfpkgm/v1/vnf_packages/{}".format(data["_id"])}
1036 links["vnfd"] = {"href": "/vnfpkgm/v1/vnf_packages/{}/vnfd".format(data["_id"])}
1037 links["packageContent"] = {
1038 "href": "/vnfpkgm/v1/vnf_packages/{}/package_content".format(data["_id"])
1039 }
1040 data["_links"] = links
1041
1042 return super().sol005_projection(data)
1043
1044
1045 class NsdTopic(DescriptorTopic):
1046 topic = "nsds"
1047 topic_msg = "nsd"
1048
1049 def __init__(self, db, fs, msg, auth):
1050 DescriptorTopic.__init__(self, db, fs, msg, auth)
1051
1052 def pyangbind_validation(self, item, data, force=False):
1053 if self._descriptor_data_is_in_old_format(data):
1054 raise EngineException(
1055 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
1056 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1057 )
1058 try:
1059 nsd_vnf_profiles = data.get("df", [{}])[0].get("vnf-profile", [])
1060 mynsd = etsi_nfv_nsd.etsi_nfv_nsd()
1061 pybindJSONDecoder.load_ietf_json(
1062 {"nsd": {"nsd": [data]}},
1063 None,
1064 None,
1065 obj=mynsd,
1066 path_helper=True,
1067 skip_unknown=force,
1068 )
1069 out = pybindJSON.dumps(mynsd, mode="ietf")
1070 desc_out = self._remove_envelop(yaml.safe_load(out))
1071 desc_out = self._remove_yang_prefixes_from_descriptor(desc_out)
1072 if nsd_vnf_profiles:
1073 desc_out["df"][0]["vnf-profile"] = nsd_vnf_profiles
1074 return desc_out
1075 except Exception as e:
1076 raise EngineException(
1077 "Error in pyangbind validation: {}".format(str(e)),
1078 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1079 )
1080
1081 @staticmethod
1082 def _descriptor_data_is_in_old_format(data):
1083 return ("nsd-catalog" in data) or ("nsd:nsd-catalog" in data)
1084
1085 @staticmethod
1086 def _remove_envelop(indata=None):
1087 if not indata:
1088 return {}
1089 clean_indata = indata
1090
1091 if clean_indata.get("nsd"):
1092 clean_indata = clean_indata["nsd"]
1093 elif clean_indata.get("etsi-nfv-nsd:nsd"):
1094 clean_indata = clean_indata["etsi-nfv-nsd:nsd"]
1095 if clean_indata.get("nsd"):
1096 if (
1097 not isinstance(clean_indata["nsd"], list)
1098 or len(clean_indata["nsd"]) != 1
1099 ):
1100 raise EngineException("'nsd' must be a list of only one element")
1101 clean_indata = clean_indata["nsd"][0]
1102 return clean_indata
1103
1104 def _validate_input_new(self, indata, storage_params, force=False):
1105 indata.pop("nsdOnboardingState", None)
1106 indata.pop("nsdOperationalState", None)
1107 indata.pop("nsdUsageState", None)
1108
1109 indata.pop("links", None)
1110
1111 indata = self.pyangbind_validation("nsds", indata, force)
1112 # Cross references validation in the descriptor
1113 # TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none
1114 for vld in get_iterable(indata.get("virtual-link-desc")):
1115 self.validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata)
1116
1117 self.validate_vnf_profiles_vnfd_id(indata)
1118
1119 return indata
1120
1121 @staticmethod
1122 def validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata):
1123 if not vld.get("mgmt-network"):
1124 return
1125 vld_id = vld.get("id")
1126 for df in get_iterable(indata.get("df")):
1127 for vlp in get_iterable(df.get("virtual-link-profile")):
1128 if vld_id and vld_id == vlp.get("virtual-link-desc-id"):
1129 if vlp.get("virtual-link-protocol-data"):
1130 raise EngineException(
1131 "Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-"
1132 "protocol-data You cannot set a virtual-link-protocol-data "
1133 "when mgmt-network is True".format(df["id"], vlp["id"]),
1134 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1135 )
1136
1137 @staticmethod
1138 def validate_vnf_profiles_vnfd_id(indata):
1139 all_vnfd_ids = set(get_iterable(indata.get("vnfd-id")))
1140 for df in get_iterable(indata.get("df")):
1141 for vnf_profile in get_iterable(df.get("vnf-profile")):
1142 vnfd_id = vnf_profile.get("vnfd-id")
1143 if vnfd_id and vnfd_id not in all_vnfd_ids:
1144 raise EngineException(
1145 "Error at df[id='{}']:vnf_profile[id='{}']:vnfd-id='{}' "
1146 "does not match any vnfd-id".format(
1147 df["id"], vnf_profile["id"], vnfd_id
1148 ),
1149 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1150 )
1151
1152 def _validate_input_edit(self, indata, content, force=False):
1153 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
1154 """
1155 indata looks as follows:
1156 - In the new case (conformant)
1157 {'nsdOperationalState': 'DISABLED', 'userDefinedData': {'id': 'string23',
1158 '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}}
1159 - In the old case (backwards-compatible)
1160 {'id': 'string23', '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}
1161 """
1162 if "_admin" not in indata:
1163 indata["_admin"] = {}
1164
1165 if "nsdOperationalState" in indata:
1166 if indata["nsdOperationalState"] in ("ENABLED", "DISABLED"):
1167 indata["_admin"]["operationalState"] = indata.pop("nsdOperationalState")
1168 else:
1169 raise EngineException(
1170 "State '{}' is not a valid operational state".format(
1171 indata["nsdOperationalState"]
1172 ),
1173 http_code=HTTPStatus.BAD_REQUEST,
1174 )
1175
1176 # In the case of user defined data, we need to put the data in the root of the object
1177 # to preserve current expected behaviour
1178 if "userDefinedData" in indata:
1179 data = indata.pop("userDefinedData")
1180 if type(data) == dict:
1181 indata["_admin"]["userDefinedData"] = data
1182 else:
1183 raise EngineException(
1184 "userDefinedData should be an object, but is '{}' instead".format(
1185 type(data)
1186 ),
1187 http_code=HTTPStatus.BAD_REQUEST,
1188 )
1189 if (
1190 "operationalState" in indata["_admin"]
1191 and content["_admin"]["operationalState"]
1192 == indata["_admin"]["operationalState"]
1193 ):
1194 raise EngineException(
1195 "nsdOperationalState already {}".format(
1196 content["_admin"]["operationalState"]
1197 ),
1198 http_code=HTTPStatus.CONFLICT,
1199 )
1200 return indata
1201
1202 def _check_descriptor_dependencies(self, session, descriptor):
1203 """
1204 Check that the dependent descriptors exist on a new descriptor or edition. Also checks references to vnfd
1205 connection points are ok
1206 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1207 :param descriptor: descriptor to be inserted or edit
1208 :return: None or raises exception
1209 """
1210 if session["force"]:
1211 return
1212 vnfds_index = self._get_descriptor_constituent_vnfds_index(session, descriptor)
1213
1214 # Cross references validation in the descriptor and vnfd connection point validation
1215 for df in get_iterable(descriptor.get("df")):
1216 self.validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index)
1217
1218 def _get_descriptor_constituent_vnfds_index(self, session, descriptor):
1219 vnfds_index = {}
1220 if descriptor.get("vnfd-id") and not session["force"]:
1221 for vnfd_id in get_iterable(descriptor.get("vnfd-id")):
1222 query_filter = self._get_project_filter(session)
1223 query_filter["id"] = vnfd_id
1224 vnf_list = self.db.get_list("vnfds", query_filter)
1225 if not vnf_list:
1226 raise EngineException(
1227 "Descriptor error at 'vnfd-id'='{}' references a non "
1228 "existing vnfd".format(vnfd_id),
1229 http_code=HTTPStatus.CONFLICT,
1230 )
1231 vnfds_index[vnfd_id] = vnf_list[0]
1232 return vnfds_index
1233
1234 @staticmethod
1235 def validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index):
1236 for vnf_profile in get_iterable(df.get("vnf-profile")):
1237 vnfd = vnfds_index.get(vnf_profile["vnfd-id"])
1238 all_vnfd_ext_cpds = set()
1239 for ext_cpd in get_iterable(vnfd.get("ext-cpd")):
1240 if ext_cpd.get("id"):
1241 all_vnfd_ext_cpds.add(ext_cpd.get("id"))
1242
1243 for virtual_link in get_iterable(
1244 vnf_profile.get("virtual-link-connectivity")
1245 ):
1246 for vl_cpd in get_iterable(virtual_link.get("constituent-cpd-id")):
1247 vl_cpd_id = vl_cpd.get("constituent-cpd-id")
1248 if vl_cpd_id and vl_cpd_id not in all_vnfd_ext_cpds:
1249 raise EngineException(
1250 "Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity"
1251 "[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a "
1252 "non existing ext-cpd:id inside vnfd '{}'".format(
1253 df["id"],
1254 vnf_profile["id"],
1255 virtual_link["virtual-link-profile-id"],
1256 vl_cpd_id,
1257 vnfd["id"],
1258 ),
1259 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1260 )
1261
1262 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
1263 final_content = super().check_conflict_on_edit(
1264 session, final_content, edit_content, _id
1265 )
1266
1267 self._check_descriptor_dependencies(session, final_content)
1268
1269 return final_content
1270
1271 def check_conflict_on_del(self, session, _id, db_content):
1272 """
1273 Check that there is not any NSR that uses this NSD. Only NSRs belonging to this project are considered. Note
1274 that NSD can be public and be used by other projects.
1275 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1276 :param _id: nsd internal id
1277 :param db_content: The database content of the _id
1278 :return: None or raises EngineException with the conflict
1279 """
1280 if session["force"]:
1281 return
1282 descriptor = db_content
1283 descriptor_id = descriptor.get("id")
1284 if not descriptor_id: # empty nsd not uploaded
1285 return
1286
1287 # check NSD used by NS
1288 _filter = self._get_project_filter(session)
1289 _filter["nsd-id"] = _id
1290 if self.db.get_list("nsrs", _filter):
1291 raise EngineException(
1292 "There is at least one NS instance using this descriptor",
1293 http_code=HTTPStatus.CONFLICT,
1294 )
1295
1296 # check NSD referenced by NST
1297 del _filter["nsd-id"]
1298 _filter["netslice-subnet.ANYINDEX.nsd-ref"] = descriptor_id
1299 if self.db.get_list("nsts", _filter):
1300 raise EngineException(
1301 "There is at least one NetSlice Template referencing this descriptor",
1302 http_code=HTTPStatus.CONFLICT,
1303 )
1304
1305 def sol005_projection(self, data):
1306 data["nsdOnboardingState"] = data["_admin"]["onboardingState"]
1307 data["nsdOperationalState"] = data["_admin"]["operationalState"]
1308 data["nsdUsageState"] = data["_admin"]["usageState"]
1309
1310 links = {}
1311 links["self"] = {"href": "/nsd/v1/ns_descriptors/{}".format(data["_id"])}
1312 links["nsd_content"] = {
1313 "href": "/nsd/v1/ns_descriptors/{}/nsd_content".format(data["_id"])
1314 }
1315 data["_links"] = links
1316
1317 return super().sol005_projection(data)
1318
1319
1320 class NstTopic(DescriptorTopic):
1321 topic = "nsts"
1322 topic_msg = "nst"
1323 quota_name = "slice_templates"
1324
1325 def __init__(self, db, fs, msg, auth):
1326 DescriptorTopic.__init__(self, db, fs, msg, auth)
1327
1328 def pyangbind_validation(self, item, data, force=False):
1329 try:
1330 mynst = nst_im()
1331 pybindJSONDecoder.load_ietf_json(
1332 {"nst": [data]},
1333 None,
1334 None,
1335 obj=mynst,
1336 path_helper=True,
1337 skip_unknown=force,
1338 )
1339 out = pybindJSON.dumps(mynst, mode="ietf")
1340 desc_out = self._remove_envelop(yaml.safe_load(out))
1341 return desc_out
1342 except Exception as e:
1343 raise EngineException(
1344 "Error in pyangbind validation: {}".format(str(e)),
1345 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1346 )
1347
1348 @staticmethod
1349 def _remove_envelop(indata=None):
1350 if not indata:
1351 return {}
1352 clean_indata = indata
1353
1354 if clean_indata.get("nst"):
1355 if (
1356 not isinstance(clean_indata["nst"], list)
1357 or len(clean_indata["nst"]) != 1
1358 ):
1359 raise EngineException("'nst' must be a list only one element")
1360 clean_indata = clean_indata["nst"][0]
1361 elif clean_indata.get("nst:nst"):
1362 if (
1363 not isinstance(clean_indata["nst:nst"], list)
1364 or len(clean_indata["nst:nst"]) != 1
1365 ):
1366 raise EngineException("'nst:nst' must be a list only one element")
1367 clean_indata = clean_indata["nst:nst"][0]
1368 return clean_indata
1369
1370 def _validate_input_new(self, indata, storage_params, force=False):
1371 indata.pop("onboardingState", None)
1372 indata.pop("operationalState", None)
1373 indata.pop("usageState", None)
1374 indata = self.pyangbind_validation("nsts", indata, force)
1375 return indata.copy()
1376
1377 def _check_descriptor_dependencies(self, session, descriptor):
1378 """
1379 Check that the dependent descriptors exist on a new descriptor or edition
1380 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1381 :param descriptor: descriptor to be inserted or edit
1382 :return: None or raises exception
1383 """
1384 if not descriptor.get("netslice-subnet"):
1385 return
1386 for nsd in descriptor["netslice-subnet"]:
1387 nsd_id = nsd["nsd-ref"]
1388 filter_q = self._get_project_filter(session)
1389 filter_q["id"] = nsd_id
1390 if not self.db.get_list("nsds", filter_q):
1391 raise EngineException(
1392 "Descriptor error at 'netslice-subnet':'nsd-ref'='{}' references a non "
1393 "existing nsd".format(nsd_id),
1394 http_code=HTTPStatus.CONFLICT,
1395 )
1396
1397 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
1398 final_content = super().check_conflict_on_edit(
1399 session, final_content, edit_content, _id
1400 )
1401
1402 self._check_descriptor_dependencies(session, final_content)
1403 return final_content
1404
1405 def check_conflict_on_del(self, session, _id, db_content):
1406 """
1407 Check that there is not any NSIR that uses this NST. Only NSIRs belonging to this project are considered. Note
1408 that NST can be public and be used by other projects.
1409 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1410 :param _id: nst internal id
1411 :param db_content: The database content of the _id.
1412 :return: None or raises EngineException with the conflict
1413 """
1414 # TODO: Check this method
1415 if session["force"]:
1416 return
1417 # Get Network Slice Template from Database
1418 _filter = self._get_project_filter(session)
1419 _filter["_admin.nst-id"] = _id
1420 if self.db.get_list("nsis", _filter):
1421 raise EngineException(
1422 "there is at least one Netslice Instance using this descriptor",
1423 http_code=HTTPStatus.CONFLICT,
1424 )
1425
1426 def sol005_projection(self, data):
1427 data["onboardingState"] = data["_admin"]["onboardingState"]
1428 data["operationalState"] = data["_admin"]["operationalState"]
1429 data["usageState"] = data["_admin"]["usageState"]
1430
1431 links = {}
1432 links["self"] = {"href": "/nst/v1/netslice_templates/{}".format(data["_id"])}
1433 links["nst"] = {"href": "/nst/v1/netslice_templates/{}/nst".format(data["_id"])}
1434 data["_links"] = links
1435
1436 return super().sol005_projection(data)
1437
1438
1439 class PduTopic(BaseTopic):
1440 topic = "pdus"
1441 topic_msg = "pdu"
1442 quota_name = "pduds"
1443 schema_new = pdu_new_schema
1444 schema_edit = pdu_edit_schema
1445
1446 def __init__(self, db, fs, msg, auth):
1447 BaseTopic.__init__(self, db, fs, msg, auth)
1448
1449 @staticmethod
1450 def format_on_new(content, project_id=None, make_public=False):
1451 BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
1452 content["_admin"]["onboardingState"] = "CREATED"
1453 content["_admin"]["operationalState"] = "ENABLED"
1454 content["_admin"]["usageState"] = "NOT_IN_USE"
1455
1456 def check_conflict_on_del(self, session, _id, db_content):
1457 """
1458 Check that there is not any vnfr that uses this PDU
1459 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1460 :param _id: pdu internal id
1461 :param db_content: The database content of the _id.
1462 :return: None or raises EngineException with the conflict
1463 """
1464 if session["force"]:
1465 return
1466
1467 _filter = self._get_project_filter(session)
1468 _filter["vdur.pdu-id"] = _id
1469 if self.db.get_list("vnfrs", _filter):
1470 raise EngineException(
1471 "There is at least one VNF instance using this PDU",
1472 http_code=HTTPStatus.CONFLICT,
1473 )
1474
1475
1476 class VnfPkgOpTopic(BaseTopic):
1477 topic = "vnfpkgops"
1478 topic_msg = "vnfd"
1479 schema_new = vnfpkgop_new_schema
1480 schema_edit = None
1481
1482 def __init__(self, db, fs, msg, auth):
1483 BaseTopic.__init__(self, db, fs, msg, auth)
1484
1485 def edit(self, session, _id, indata=None, kwargs=None, content=None):
1486 raise EngineException(
1487 "Method 'edit' not allowed for topic '{}'".format(self.topic),
1488 HTTPStatus.METHOD_NOT_ALLOWED,
1489 )
1490
1491 def delete(self, session, _id, dry_run=False):
1492 raise EngineException(
1493 "Method 'delete' not allowed for topic '{}'".format(self.topic),
1494 HTTPStatus.METHOD_NOT_ALLOWED,
1495 )
1496
1497 def delete_list(self, session, filter_q=None):
1498 raise EngineException(
1499 "Method 'delete_list' not allowed for topic '{}'".format(self.topic),
1500 HTTPStatus.METHOD_NOT_ALLOWED,
1501 )
1502
1503 def new(self, rollback, session, indata=None, kwargs=None, headers=None):
1504 """
1505 Creates a new entry into database.
1506 :param rollback: list to append created items at database in case a rollback may to be done
1507 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1508 :param indata: data to be inserted
1509 :param kwargs: used to override the indata descriptor
1510 :param headers: http request headers
1511 :return: _id, op_id:
1512 _id: identity of the inserted data.
1513 op_id: None
1514 """
1515 self._update_input_with_kwargs(indata, kwargs)
1516 validate_input(indata, self.schema_new)
1517 vnfpkg_id = indata["vnfPkgId"]
1518 filter_q = BaseTopic._get_project_filter(session)
1519 filter_q["_id"] = vnfpkg_id
1520 vnfd = self.db.get_one("vnfds", filter_q)
1521 operation = indata["lcmOperationType"]
1522 kdu_name = indata["kdu_name"]
1523 for kdu in vnfd.get("kdu", []):
1524 if kdu["name"] == kdu_name:
1525 helm_chart = kdu.get("helm-chart")
1526 juju_bundle = kdu.get("juju-bundle")
1527 break
1528 else:
1529 raise EngineException(
1530 "Not found vnfd[id='{}']:kdu[name='{}']".format(vnfpkg_id, kdu_name)
1531 )
1532 if helm_chart:
1533 indata["helm-chart"] = helm_chart
1534 match = fullmatch(r"([^/]*)/([^/]*)", helm_chart)
1535 repo_name = match.group(1) if match else None
1536 elif juju_bundle:
1537 indata["juju-bundle"] = juju_bundle
1538 match = fullmatch(r"([^/]*)/([^/]*)", juju_bundle)
1539 repo_name = match.group(1) if match else None
1540 else:
1541 raise EngineException(
1542 "Found neither 'helm-chart' nor 'juju-bundle' in vnfd[id='{}']:kdu[name='{}']".format(
1543 vnfpkg_id, kdu_name
1544 )
1545 )
1546 if repo_name:
1547 del filter_q["_id"]
1548 filter_q["name"] = repo_name
1549 repo = self.db.get_one("k8srepos", filter_q)
1550 k8srepo_id = repo.get("_id")
1551 k8srepo_url = repo.get("url")
1552 else:
1553 k8srepo_id = None
1554 k8srepo_url = None
1555 indata["k8srepoId"] = k8srepo_id
1556 indata["k8srepo_url"] = k8srepo_url
1557 vnfpkgop_id = str(uuid4())
1558 vnfpkgop_desc = {
1559 "_id": vnfpkgop_id,
1560 "operationState": "PROCESSING",
1561 "vnfPkgId": vnfpkg_id,
1562 "lcmOperationType": operation,
1563 "isAutomaticInvocation": False,
1564 "isCancelPending": False,
1565 "operationParams": indata,
1566 "links": {
1567 "self": "/osm/vnfpkgm/v1/vnfpkg_op_occs/" + vnfpkgop_id,
1568 "vnfpkg": "/osm/vnfpkgm/v1/vnf_packages/" + vnfpkg_id,
1569 },
1570 }
1571 self.format_on_new(
1572 vnfpkgop_desc, session["project_id"], make_public=session["public"]
1573 )
1574 ctime = vnfpkgop_desc["_admin"]["created"]
1575 vnfpkgop_desc["statusEnteredTime"] = ctime
1576 vnfpkgop_desc["startTime"] = ctime
1577 self.db.create(self.topic, vnfpkgop_desc)
1578 rollback.append({"topic": self.topic, "_id": vnfpkgop_id})
1579 self.msg.write(self.topic_msg, operation, vnfpkgop_desc)
1580 return vnfpkgop_id, None