Feature 10908: NBI Revisions
[osm/NBI.git] / osm_nbi / descriptor_topics.py
1 # -*- coding: utf-8 -*-
2
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
12 # implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
15
16 import tarfile
17 import yaml
18 import json
19 import copy
20 import os
21 import shutil
22
23 # import logging
24 from hashlib import md5
25 from osm_common.dbbase import DbException, deep_update_rfc7396
26 from http import HTTPStatus
27 from time import time
28 from uuid import uuid4
29 from re import fullmatch
30 from zipfile import ZipFile
31 from osm_nbi.validation import (
32 ValidationError,
33 pdu_new_schema,
34 pdu_edit_schema,
35 validate_input,
36 vnfpkgop_new_schema,
37 )
38 from osm_nbi.base_topic import BaseTopic, EngineException, get_iterable
39 from osm_im import etsi_nfv_vnfd, etsi_nfv_nsd
40 from osm_im.nst import nst as nst_im
41 from pyangbind.lib.serialise import pybindJSONDecoder
42 import pyangbind.lib.pybindJSON as pybindJSON
43 from osm_nbi import utils
44
45 __author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
46
47
48 class DescriptorTopic(BaseTopic):
49 def __init__(self, db, fs, msg, auth):
50
51 BaseTopic.__init__(self, db, fs, msg, auth)
52
53 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
54 final_content = super().check_conflict_on_edit(
55 session, final_content, edit_content, _id
56 )
57
58 def _check_unique_id_name(descriptor, position=""):
59 for desc_key, desc_item in descriptor.items():
60 if isinstance(desc_item, list) and desc_item:
61 used_ids = []
62 desc_item_id = None
63 for index, list_item in enumerate(desc_item):
64 if isinstance(list_item, dict):
65 _check_unique_id_name(
66 list_item, "{}.{}[{}]".format(position, desc_key, index)
67 )
68 # Base case
69 if index == 0 and (
70 list_item.get("id") or list_item.get("name")
71 ):
72 desc_item_id = "id" if list_item.get("id") else "name"
73 if desc_item_id and list_item.get(desc_item_id):
74 if list_item[desc_item_id] in used_ids:
75 position = "{}.{}[{}]".format(
76 position, desc_key, index
77 )
78 raise EngineException(
79 "Error: identifier {} '{}' is not unique and repeats at '{}'".format(
80 desc_item_id,
81 list_item[desc_item_id],
82 position,
83 ),
84 HTTPStatus.UNPROCESSABLE_ENTITY,
85 )
86 used_ids.append(list_item[desc_item_id])
87
88 _check_unique_id_name(final_content)
89 # 1. validate again with pyangbind
90 # 1.1. remove internal keys
91 internal_keys = {}
92 for k in ("_id", "_admin"):
93 if k in final_content:
94 internal_keys[k] = final_content.pop(k)
95 storage_params = internal_keys["_admin"].get("storage")
96 serialized = self._validate_input_new(
97 final_content, storage_params, session["force"]
98 )
99
100 # 1.2. modify final_content with a serialized version
101 final_content = copy.deepcopy(serialized)
102 # 1.3. restore internal keys
103 for k, v in internal_keys.items():
104 final_content[k] = v
105 if session["force"]:
106 return final_content
107
108 # 2. check that this id is not present
109 if "id" in edit_content:
110 _filter = self._get_project_filter(session)
111
112 _filter["id"] = final_content["id"]
113 _filter["_id.neq"] = _id
114
115 if self.db.get_one(self.topic, _filter, fail_on_empty=False):
116 raise EngineException(
117 "{} with id '{}' already exists for this project".format(
118 self.topic[:-1], final_content["id"]
119 ),
120 HTTPStatus.CONFLICT,
121 )
122
123 return final_content
124
125 @staticmethod
126 def format_on_new(content, project_id=None, make_public=False):
127 BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
128 content["_admin"]["onboardingState"] = "CREATED"
129 content["_admin"]["operationalState"] = "DISABLED"
130 content["_admin"]["usageState"] = "NOT_IN_USE"
131
132 def delete_extra(self, session, _id, db_content, not_send_msg=None):
133 """
134 Deletes file system storage associated with the descriptor
135 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
136 :param _id: server internal id
137 :param db_content: The database content of the descriptor
138 :param not_send_msg: To not send message (False) or store content (list) instead
139 :return: None if ok or raises EngineException with the problem
140 """
141 self.fs.file_delete(_id, ignore_non_exist=True)
142 self.fs.file_delete(_id + "_", ignore_non_exist=True) # remove temp folder
143 # Remove file revisions
144 if "revision" in db_content["_admin"]:
145 revision = db_content["_admin"]["revision"]
146 while revision > 0:
147 self.fs.file_delete(_id + ":" + str(revision), ignore_non_exist=True)
148 revision = revision - 1
149
150
151 @staticmethod
152 def get_one_by_id(db, session, topic, id):
153 # find owned by this project
154 _filter = BaseTopic._get_project_filter(session)
155 _filter["id"] = id
156 desc_list = db.get_list(topic, _filter)
157 if len(desc_list) == 1:
158 return desc_list[0]
159 elif len(desc_list) > 1:
160 raise DbException(
161 "Found more than one {} with id='{}' belonging to this project".format(
162 topic[:-1], id
163 ),
164 HTTPStatus.CONFLICT,
165 )
166
167 # not found any: try to find public
168 _filter = BaseTopic._get_project_filter(session)
169 _filter["id"] = id
170 desc_list = db.get_list(topic, _filter)
171 if not desc_list:
172 raise DbException(
173 "Not found any {} with id='{}'".format(topic[:-1], id),
174 HTTPStatus.NOT_FOUND,
175 )
176 elif len(desc_list) == 1:
177 return desc_list[0]
178 else:
179 raise DbException(
180 "Found more than one public {} with id='{}'; and no one belonging to this project".format(
181 topic[:-1], id
182 ),
183 HTTPStatus.CONFLICT,
184 )
185
186 def new(self, rollback, session, indata=None, kwargs=None, headers=None):
187 """
188 Creates a new almost empty DISABLED entry into database. Due to SOL005, it does not follow normal procedure.
189 Creating a VNFD or NSD is done in two steps: 1. Creates an empty descriptor (this step) and 2) upload content
190 (self.upload_content)
191 :param rollback: list to append created items at database in case a rollback may to be done
192 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
193 :param indata: data to be inserted
194 :param kwargs: used to override the indata descriptor
195 :param headers: http request headers
196 :return: _id, None: identity of the inserted data; and None as there is not any operation
197 """
198
199 # No needed to capture exceptions
200 # Check Quota
201 self.check_quota(session)
202
203 # _remove_envelop
204 if indata:
205 if "userDefinedData" in indata:
206 indata = indata["userDefinedData"]
207
208 # Override descriptor with query string kwargs
209 self._update_input_with_kwargs(indata, kwargs)
210 # uncomment when this method is implemented.
211 # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors
212 # indata = DescriptorTopic._validate_input_new(self, indata, project_id=session["force"])
213
214 content = {"_admin": {
215 "userDefinedData": indata,
216 "revision": 0
217 }}
218
219 self.format_on_new(
220 content, session["project_id"], make_public=session["public"]
221 )
222 _id = self.db.create(self.topic, content)
223 rollback.append({"topic": self.topic, "_id": _id})
224 self._send_msg("created", {"_id": _id})
225 return _id, None
226
227 def upload_content(self, session, _id, indata, kwargs, headers):
228 """
229 Used for receiving content by chunks (with a transaction_id header and/or gzip file. It will store and extract)
230 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
231 :param _id : the nsd,vnfd is already created, this is the id
232 :param indata: http body request
233 :param kwargs: user query string to override parameters. NOT USED
234 :param headers: http request headers
235 :return: True if package is completely uploaded or False if partial content has been uploded
236 Raise exception on error
237 """
238 # Check that _id exists and it is valid
239 current_desc = self.show(session, _id)
240
241 content_range_text = headers.get("Content-Range")
242 expected_md5 = headers.get("Content-File-MD5")
243 compressed = None
244 content_type = headers.get("Content-Type")
245 if (
246 content_type
247 and "application/gzip" in content_type
248 or "application/x-gzip" in content_type
249 ):
250 compressed = "gzip"
251 if (
252 content_type
253 and "application/zip" in content_type
254 ):
255 compressed = "zip"
256 filename = headers.get("Content-Filename")
257 if not filename and compressed:
258 filename = "package.tar.gz" if compressed == "gzip" else "package.zip"
259 elif not filename:
260 filename = "package"
261
262 revision = 1
263 if "revision" in current_desc["_admin"]:
264 revision = current_desc["_admin"]["revision"] + 1
265
266 # TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266
267 file_pkg = None
268 error_text = ""
269 try:
270 if content_range_text:
271 content_range = (
272 content_range_text.replace("-", " ").replace("/", " ").split()
273 )
274 if (
275 content_range[0] != "bytes"
276 ): # TODO check x<y not negative < total....
277 raise IndexError()
278 start = int(content_range[1])
279 end = int(content_range[2]) + 1
280 total = int(content_range[3])
281 else:
282 start = 0
283 # Rather than using a temp folder, we will store the package in a folder based on
284 # the current revision.
285 proposed_revision_path = (
286 _id + ":" + str(revision)
287 ) # all the content is upload here and if ok, it is rename from id_ to is folder
288
289 if start:
290 if not self.fs.file_exists(proposed_revision_path, "dir"):
291 raise EngineException(
292 "invalid Transaction-Id header", HTTPStatus.NOT_FOUND
293 )
294 else:
295 self.fs.file_delete(proposed_revision_path, ignore_non_exist=True)
296 self.fs.mkdir(proposed_revision_path)
297
298 storage = self.fs.get_params()
299 storage["folder"] = _id
300
301 file_path = (proposed_revision_path, filename)
302 if self.fs.file_exists(file_path, "file"):
303 file_size = self.fs.file_size(file_path)
304 else:
305 file_size = 0
306 if file_size != start:
307 raise EngineException(
308 "invalid Content-Range start sequence, expected '{}' but received '{}'".format(
309 file_size, start
310 ),
311 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
312 )
313 file_pkg = self.fs.file_open(file_path, "a+b")
314 if isinstance(indata, dict):
315 indata_text = yaml.safe_dump(indata, indent=4, default_flow_style=False)
316 file_pkg.write(indata_text.encode(encoding="utf-8"))
317 else:
318 indata_len = 0
319 while True:
320 indata_text = indata.read(4096)
321 indata_len += len(indata_text)
322 if not indata_text:
323 break
324 file_pkg.write(indata_text)
325 if content_range_text:
326 if indata_len != end - start:
327 raise EngineException(
328 "Mismatch between Content-Range header {}-{} and body length of {}".format(
329 start, end - 1, indata_len
330 ),
331 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
332 )
333 if end != total:
334 # TODO update to UPLOADING
335 return False
336
337 # PACKAGE UPLOADED
338 if expected_md5:
339 file_pkg.seek(0, 0)
340 file_md5 = md5()
341 chunk_data = file_pkg.read(1024)
342 while chunk_data:
343 file_md5.update(chunk_data)
344 chunk_data = file_pkg.read(1024)
345 if expected_md5 != file_md5.hexdigest():
346 raise EngineException("Error, MD5 mismatch", HTTPStatus.CONFLICT)
347 file_pkg.seek(0, 0)
348 if compressed == "gzip":
349 tar = tarfile.open(mode="r", fileobj=file_pkg)
350 descriptor_file_name = None
351 for tarinfo in tar:
352 tarname = tarinfo.name
353 tarname_path = tarname.split("/")
354 if (
355 not tarname_path[0] or ".." in tarname_path
356 ): # if start with "/" means absolute path
357 raise EngineException(
358 "Absolute path or '..' are not allowed for package descriptor tar.gz"
359 )
360 if len(tarname_path) == 1 and not tarinfo.isdir():
361 raise EngineException(
362 "All files must be inside a dir for package descriptor tar.gz"
363 )
364 if (
365 tarname.endswith(".yaml")
366 or tarname.endswith(".json")
367 or tarname.endswith(".yml")
368 ):
369 storage["pkg-dir"] = tarname_path[0]
370 if len(tarname_path) == 2:
371 if descriptor_file_name:
372 raise EngineException(
373 "Found more than one descriptor file at package descriptor tar.gz"
374 )
375 descriptor_file_name = tarname
376 if not descriptor_file_name:
377 raise EngineException(
378 "Not found any descriptor file at package descriptor tar.gz"
379 )
380 storage["descriptor"] = descriptor_file_name
381 storage["zipfile"] = filename
382 self.fs.file_extract(tar, proposed_revision_path)
383 with self.fs.file_open(
384 (proposed_revision_path, descriptor_file_name), "r"
385 ) as descriptor_file:
386 content = descriptor_file.read()
387 elif compressed == "zip":
388 zipfile = ZipFile(file_pkg)
389 descriptor_file_name = None
390 for package_file in zipfile.infolist():
391 zipfilename = package_file.filename
392 file_path = zipfilename.split("/")
393 if (
394 not file_path[0] or ".." in zipfilename
395 ): # if start with "/" means absolute path
396 raise EngineException(
397 "Absolute path or '..' are not allowed for package descriptor zip"
398 )
399
400 if (
401 (
402 zipfilename.endswith(".yaml")
403 or zipfilename.endswith(".json")
404 or zipfilename.endswith(".yml")
405 ) and (
406 zipfilename.find("/") < 0
407 or zipfilename.find("Definitions") >= 0
408 )
409 ):
410 storage["pkg-dir"] = ""
411 if descriptor_file_name:
412 raise EngineException(
413 "Found more than one descriptor file at package descriptor zip"
414 )
415 descriptor_file_name = zipfilename
416 if not descriptor_file_name:
417 raise EngineException(
418 "Not found any descriptor file at package descriptor zip"
419 )
420 storage["descriptor"] = descriptor_file_name
421 storage["zipfile"] = filename
422 self.fs.file_extract(zipfile, proposed_revision_path)
423
424 with self.fs.file_open(
425 (proposed_revision_path, descriptor_file_name), "r"
426 ) as descriptor_file:
427 content = descriptor_file.read()
428 else:
429 content = file_pkg.read()
430 storage["descriptor"] = descriptor_file_name = filename
431
432 if descriptor_file_name.endswith(".json"):
433 error_text = "Invalid json format "
434 indata = json.load(content)
435 else:
436 error_text = "Invalid yaml format "
437 indata = yaml.load(content, Loader=yaml.SafeLoader)
438
439 # Need to close the file package here so it can be copied from the
440 # revision to the current, unrevisioned record
441 if file_pkg:
442 file_pkg.close()
443 file_pkg = None
444
445 # Fetch both the incoming, proposed revision and the original revision so we
446 # can call a validate method to compare them
447 current_revision_path = _id + "/"
448 self.fs.sync(from_path=current_revision_path)
449 self.fs.sync(from_path=proposed_revision_path)
450
451 if revision > 1:
452 try:
453 self._validate_descriptor_changes(
454 descriptor_file_name,
455 current_revision_path,
456 proposed_revision_path)
457 except Exception as e:
458 shutil.rmtree(self.fs.path + current_revision_path, ignore_errors=True)
459 shutil.rmtree(self.fs.path + proposed_revision_path, ignore_errors=True)
460 # Only delete the new revision. We need to keep the original version in place
461 # as it has not been changed.
462 self.fs.file_delete(proposed_revision_path, ignore_non_exist=True)
463 raise e
464
465 # Copy the revision to the active package name by its original id
466 shutil.rmtree(self.fs.path + current_revision_path, ignore_errors=True)
467 os.rename(self.fs.path + proposed_revision_path, self.fs.path + current_revision_path)
468 self.fs.file_delete(current_revision_path, ignore_non_exist=True)
469 self.fs.mkdir(current_revision_path)
470 self.fs.reverse_sync(from_path=current_revision_path)
471 shutil.rmtree(self.fs.path + _id)
472
473 current_desc["_admin"]["storage"] = storage
474 current_desc["_admin"]["onboardingState"] = "ONBOARDED"
475 current_desc["_admin"]["operationalState"] = "ENABLED"
476
477 indata = self._remove_envelop(indata)
478
479 # Override descriptor with query string kwargs
480 if kwargs:
481 self._update_input_with_kwargs(indata, kwargs)
482
483 deep_update_rfc7396(current_desc, indata)
484 current_desc = self.check_conflict_on_edit(
485 session, current_desc, indata, _id=_id
486 )
487 current_desc["_admin"]["modified"] = time()
488 current_desc["_admin"]["revision"] = revision
489 self.db.replace(self.topic, _id, current_desc)
490
491 # Store a copy of the package as a point in time revision
492 revision_desc = dict(current_desc)
493 revision_desc["_id"] = _id + ":" + str(revision_desc["_admin"]["revision"])
494 self.db.create(self.topic + "_revisions", revision_desc)
495
496 indata["_id"] = _id
497 self._send_msg("edited", indata)
498
499 # TODO if descriptor has changed because kwargs update content and remove cached zip
500 # TODO if zip is not present creates one
501 return True
502
503 except EngineException:
504 raise
505 except IndexError:
506 raise EngineException(
507 "invalid Content-Range header format. Expected 'bytes start-end/total'",
508 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
509 )
510 except IOError as e:
511 raise EngineException(
512 "invalid upload transaction sequence: '{}'".format(e),
513 HTTPStatus.BAD_REQUEST,
514 )
515 except tarfile.ReadError as e:
516 raise EngineException(
517 "invalid file content {}".format(e), HTTPStatus.BAD_REQUEST
518 )
519 except (ValueError, yaml.YAMLError) as e:
520 raise EngineException(error_text + str(e))
521 except ValidationError as e:
522 raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
523 finally:
524 if file_pkg:
525 file_pkg.close()
526
527 def get_file(self, session, _id, path=None, accept_header=None):
528 """
529 Return the file content of a vnfd or nsd
530 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
531 :param _id: Identity of the vnfd, nsd
532 :param path: artifact path or "$DESCRIPTOR" or None
533 :param accept_header: Content of Accept header. Must contain applition/zip or/and text/plain
534 :return: opened file plus Accept format or raises an exception
535 """
536 accept_text = accept_zip = False
537 if accept_header:
538 if "text/plain" in accept_header or "*/*" in accept_header:
539 accept_text = True
540 if "application/zip" in accept_header or "*/*" in accept_header:
541 accept_zip = "application/zip"
542 elif "application/gzip" in accept_header:
543 accept_zip = "application/gzip"
544
545 if not accept_text and not accept_zip:
546 raise EngineException(
547 "provide request header 'Accept' with 'application/zip' or 'text/plain'",
548 http_code=HTTPStatus.NOT_ACCEPTABLE,
549 )
550
551 content = self.show(session, _id)
552 if content["_admin"]["onboardingState"] != "ONBOARDED":
553 raise EngineException(
554 "Cannot get content because this resource is not at 'ONBOARDED' state. "
555 "onboardingState is {}".format(content["_admin"]["onboardingState"]),
556 http_code=HTTPStatus.CONFLICT,
557 )
558 storage = content["_admin"]["storage"]
559 if path is not None and path != "$DESCRIPTOR": # artifacts
560 if not storage.get("pkg-dir"):
561 raise EngineException(
562 "Packages does not contains artifacts",
563 http_code=HTTPStatus.BAD_REQUEST,
564 )
565 if self.fs.file_exists(
566 (storage["folder"], storage["pkg-dir"], *path), "dir"
567 ):
568 folder_content = self.fs.dir_ls(
569 (storage["folder"], storage["pkg-dir"], *path)
570 )
571 return folder_content, "text/plain"
572 # TODO manage folders in http
573 else:
574 return (
575 self.fs.file_open(
576 (storage["folder"], storage["pkg-dir"], *path), "rb"
577 ),
578 "application/octet-stream",
579 )
580
581 # pkgtype accept ZIP TEXT -> result
582 # manyfiles yes X -> zip
583 # no yes -> error
584 # onefile yes no -> zip
585 # X yes -> text
586 contain_many_files = False
587 if storage.get("pkg-dir"):
588 # check if there are more than one file in the package, ignoring checksums.txt.
589 pkg_files = self.fs.dir_ls((storage["folder"], storage["pkg-dir"]))
590 if len(pkg_files) >= 3 or (
591 len(pkg_files) == 2 and "checksums.txt" not in pkg_files
592 ):
593 contain_many_files = True
594 if accept_text and (not contain_many_files or path == "$DESCRIPTOR"):
595 return (
596 self.fs.file_open((storage["folder"], storage["descriptor"]), "r"),
597 "text/plain",
598 )
599 elif contain_many_files and not accept_zip:
600 raise EngineException(
601 "Packages that contains several files need to be retrieved with 'application/zip'"
602 "Accept header",
603 http_code=HTTPStatus.NOT_ACCEPTABLE,
604 )
605 else:
606 if not storage.get("zipfile"):
607 # TODO generate zipfile if not present
608 raise EngineException(
609 "Only allowed 'text/plain' Accept header for this descriptor. To be solved in "
610 "future versions",
611 http_code=HTTPStatus.NOT_ACCEPTABLE,
612 )
613 return (
614 self.fs.file_open((storage["folder"], storage["zipfile"]), "rb"),
615 accept_zip,
616 )
617
618 def _remove_yang_prefixes_from_descriptor(self, descriptor):
619 new_descriptor = {}
620 for k, v in descriptor.items():
621 new_v = v
622 if isinstance(v, dict):
623 new_v = self._remove_yang_prefixes_from_descriptor(v)
624 elif isinstance(v, list):
625 new_v = list()
626 for x in v:
627 if isinstance(x, dict):
628 new_v.append(self._remove_yang_prefixes_from_descriptor(x))
629 else:
630 new_v.append(x)
631 new_descriptor[k.split(":")[-1]] = new_v
632 return new_descriptor
633
634 def pyangbind_validation(self, item, data, force=False):
635 raise EngineException(
636 "Not possible to validate '{}' item".format(item),
637 http_code=HTTPStatus.INTERNAL_SERVER_ERROR,
638 )
639
640 def _validate_input_edit(self, indata, content, force=False):
641 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
642 if "_id" in indata:
643 indata.pop("_id")
644 if "_admin" not in indata:
645 indata["_admin"] = {}
646
647 if "operationalState" in indata:
648 if indata["operationalState"] in ("ENABLED", "DISABLED"):
649 indata["_admin"]["operationalState"] = indata.pop("operationalState")
650 else:
651 raise EngineException(
652 "State '{}' is not a valid operational state".format(
653 indata["operationalState"]
654 ),
655 http_code=HTTPStatus.BAD_REQUEST,
656 )
657
658 # In the case of user defined data, we need to put the data in the root of the object
659 # to preserve current expected behaviour
660 if "userDefinedData" in indata:
661 data = indata.pop("userDefinedData")
662 if type(data) == dict:
663 indata["_admin"]["userDefinedData"] = data
664 else:
665 raise EngineException(
666 "userDefinedData should be an object, but is '{}' instead".format(
667 type(data)
668 ),
669 http_code=HTTPStatus.BAD_REQUEST,
670 )
671
672 if (
673 "operationalState" in indata["_admin"]
674 and content["_admin"]["operationalState"]
675 == indata["_admin"]["operationalState"]
676 ):
677 raise EngineException(
678 "operationalState already {}".format(
679 content["_admin"]["operationalState"]
680 ),
681 http_code=HTTPStatus.CONFLICT,
682 )
683
684 return indata
685
686 def _validate_descriptor_changes(self,
687 descriptor_file_name,
688 old_descriptor_directory,
689 new_descriptor_directory):
690 # Todo: compare changes and throw a meaningful exception for the user to understand
691 # Example:
692 # raise EngineException(
693 # "Error in validating new descriptor: <NODE> cannot be modified",
694 # http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
695 # )
696 pass
697
698 class VnfdTopic(DescriptorTopic):
699 topic = "vnfds"
700 topic_msg = "vnfd"
701
702 def __init__(self, db, fs, msg, auth):
703 DescriptorTopic.__init__(self, db, fs, msg, auth)
704
705 def pyangbind_validation(self, item, data, force=False):
706 if self._descriptor_data_is_in_old_format(data):
707 raise EngineException(
708 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
709 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
710 )
711 try:
712 myvnfd = etsi_nfv_vnfd.etsi_nfv_vnfd()
713 pybindJSONDecoder.load_ietf_json(
714 {"etsi-nfv-vnfd:vnfd": data},
715 None,
716 None,
717 obj=myvnfd,
718 path_helper=True,
719 skip_unknown=force,
720 )
721 out = pybindJSON.dumps(myvnfd, mode="ietf")
722 desc_out = self._remove_envelop(yaml.safe_load(out))
723 desc_out = self._remove_yang_prefixes_from_descriptor(desc_out)
724 return utils.deep_update_dict(data, desc_out)
725 except Exception as e:
726 raise EngineException(
727 "Error in pyangbind validation: {}".format(str(e)),
728 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
729 )
730
731 @staticmethod
732 def _descriptor_data_is_in_old_format(data):
733 return ("vnfd-catalog" in data) or ("vnfd:vnfd-catalog" in data)
734
735 @staticmethod
736 def _remove_envelop(indata=None):
737 if not indata:
738 return {}
739 clean_indata = indata
740
741 if clean_indata.get("etsi-nfv-vnfd:vnfd"):
742 if not isinstance(clean_indata["etsi-nfv-vnfd:vnfd"], dict):
743 raise EngineException("'etsi-nfv-vnfd:vnfd' must be a dict")
744 clean_indata = clean_indata["etsi-nfv-vnfd:vnfd"]
745 elif clean_indata.get("vnfd"):
746 if not isinstance(clean_indata["vnfd"], dict):
747 raise EngineException("'vnfd' must be dict")
748 clean_indata = clean_indata["vnfd"]
749
750 return clean_indata
751
752 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
753 final_content = super().check_conflict_on_edit(
754 session, final_content, edit_content, _id
755 )
756
757 # set type of vnfd
758 contains_pdu = False
759 contains_vdu = False
760 for vdu in get_iterable(final_content.get("vdu")):
761 if vdu.get("pdu-type"):
762 contains_pdu = True
763 else:
764 contains_vdu = True
765 if contains_pdu:
766 final_content["_admin"]["type"] = "hnfd" if contains_vdu else "pnfd"
767 elif contains_vdu:
768 final_content["_admin"]["type"] = "vnfd"
769 # if neither vud nor pdu do not fill type
770 return final_content
771
772 def check_conflict_on_del(self, session, _id, db_content):
773 """
774 Check that there is not any NSD that uses this VNFD. Only NSDs belonging to this project are considered. Note
775 that VNFD can be public and be used by NSD of other projects. Also check there are not deployments, or vnfr
776 that uses this vnfd
777 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
778 :param _id: vnfd internal id
779 :param db_content: The database content of the _id.
780 :return: None or raises EngineException with the conflict
781 """
782 if session["force"]:
783 return
784 descriptor = db_content
785 descriptor_id = descriptor.get("id")
786 if not descriptor_id: # empty vnfd not uploaded
787 return
788
789 _filter = self._get_project_filter(session)
790
791 # check vnfrs using this vnfd
792 _filter["vnfd-id"] = _id
793 if self.db.get_list("vnfrs", _filter):
794 raise EngineException(
795 "There is at least one VNF instance using this descriptor",
796 http_code=HTTPStatus.CONFLICT,
797 )
798
799 # check NSD referencing this VNFD
800 del _filter["vnfd-id"]
801 _filter["vnfd-id"] = descriptor_id
802 if self.db.get_list("nsds", _filter):
803 raise EngineException(
804 "There is at least one NS package referencing this descriptor",
805 http_code=HTTPStatus.CONFLICT,
806 )
807
808 def _validate_input_new(self, indata, storage_params, force=False):
809 indata.pop("onboardingState", None)
810 indata.pop("operationalState", None)
811 indata.pop("usageState", None)
812 indata.pop("links", None)
813
814 indata = self.pyangbind_validation("vnfds", indata, force)
815 # Cross references validation in the descriptor
816
817 self.validate_mgmt_interface_connection_point(indata)
818
819 for vdu in get_iterable(indata.get("vdu")):
820 self.validate_vdu_internal_connection_points(vdu)
821 self._validate_vdu_cloud_init_in_package(storage_params, vdu, indata)
822 self._validate_vdu_charms_in_package(storage_params, indata)
823
824 self._validate_vnf_charms_in_package(storage_params, indata)
825
826 self.validate_external_connection_points(indata)
827 self.validate_internal_virtual_links(indata)
828 self.validate_monitoring_params(indata)
829 self.validate_scaling_group_descriptor(indata)
830
831 return indata
832
833 @staticmethod
834 def validate_mgmt_interface_connection_point(indata):
835 if not indata.get("vdu"):
836 return
837 if not indata.get("mgmt-cp"):
838 raise EngineException(
839 "'mgmt-cp' is a mandatory field and it is not defined",
840 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
841 )
842
843 for cp in get_iterable(indata.get("ext-cpd")):
844 if cp["id"] == indata["mgmt-cp"]:
845 break
846 else:
847 raise EngineException(
848 "mgmt-cp='{}' must match an existing ext-cpd".format(indata["mgmt-cp"]),
849 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
850 )
851
852 @staticmethod
853 def validate_vdu_internal_connection_points(vdu):
854 int_cpds = set()
855 for cpd in get_iterable(vdu.get("int-cpd")):
856 cpd_id = cpd.get("id")
857 if cpd_id and cpd_id in int_cpds:
858 raise EngineException(
859 "vdu[id='{}']:int-cpd[id='{}'] is already used by other int-cpd".format(
860 vdu["id"], cpd_id
861 ),
862 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
863 )
864 int_cpds.add(cpd_id)
865
866 @staticmethod
867 def validate_external_connection_points(indata):
868 all_vdus_int_cpds = set()
869 for vdu in get_iterable(indata.get("vdu")):
870 for int_cpd in get_iterable(vdu.get("int-cpd")):
871 all_vdus_int_cpds.add((vdu.get("id"), int_cpd.get("id")))
872
873 ext_cpds = set()
874 for cpd in get_iterable(indata.get("ext-cpd")):
875 cpd_id = cpd.get("id")
876 if cpd_id and cpd_id in ext_cpds:
877 raise EngineException(
878 "ext-cpd[id='{}'] is already used by other ext-cpd".format(cpd_id),
879 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
880 )
881 ext_cpds.add(cpd_id)
882
883 int_cpd = cpd.get("int-cpd")
884 if int_cpd:
885 if (int_cpd.get("vdu-id"), int_cpd.get("cpd")) not in all_vdus_int_cpds:
886 raise EngineException(
887 "ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd".format(
888 cpd_id
889 ),
890 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
891 )
892 # TODO: Validate k8s-cluster-net points to a valid k8s-cluster:nets ?
893
894 def _validate_vdu_charms_in_package(self, storage_params, indata):
895 for df in indata["df"]:
896 if (
897 "lcm-operations-configuration" in df
898 and "operate-vnf-op-config" in df["lcm-operations-configuration"]
899 ):
900 configs = df["lcm-operations-configuration"][
901 "operate-vnf-op-config"
902 ].get("day1-2", [])
903 vdus = df.get("vdu-profile", [])
904 for vdu in vdus:
905 for config in configs:
906 if config["id"] == vdu["id"] and utils.find_in_list(
907 config.get("execution-environment-list", []),
908 lambda ee: "juju" in ee,
909 ):
910 if not self._validate_package_folders(
911 storage_params, "charms"
912 ) and not self._validate_package_folders(
913 storage_params, "Scripts/charms"
914 ):
915 raise EngineException(
916 "Charm defined in vnf[id={}] but not present in "
917 "package".format(indata["id"])
918 )
919
920 def _validate_vdu_cloud_init_in_package(self, storage_params, vdu, indata):
921 if not vdu.get("cloud-init-file"):
922 return
923 if not self._validate_package_folders(
924 storage_params, "cloud_init", vdu["cloud-init-file"]
925 ) and not self._validate_package_folders(
926 storage_params, "Scripts/cloud_init", vdu["cloud-init-file"]
927 ):
928 raise EngineException(
929 "Cloud-init defined in vnf[id={}]:vdu[id={}] but not present in "
930 "package".format(indata["id"], vdu["id"])
931 )
932
933 def _validate_vnf_charms_in_package(self, storage_params, indata):
934 # Get VNF configuration through new container
935 for deployment_flavor in indata.get("df", []):
936 if "lcm-operations-configuration" not in deployment_flavor:
937 return
938 if (
939 "operate-vnf-op-config"
940 not in deployment_flavor["lcm-operations-configuration"]
941 ):
942 return
943 for day_1_2_config in deployment_flavor["lcm-operations-configuration"][
944 "operate-vnf-op-config"
945 ]["day1-2"]:
946 if day_1_2_config["id"] == indata["id"]:
947 if utils.find_in_list(
948 day_1_2_config.get("execution-environment-list", []),
949 lambda ee: "juju" in ee,
950 ):
951 if not self._validate_package_folders(
952 storage_params, "charms"
953 ) and not self._validate_package_folders(
954 storage_params, "Scripts/charms"
955 ):
956 raise EngineException(
957 "Charm defined in vnf[id={}] but not present in "
958 "package".format(indata["id"])
959 )
960
961 def _validate_package_folders(self, storage_params, folder, file=None):
962 if not storage_params:
963 return False
964 elif not storage_params.get("pkg-dir"):
965 if self.fs.file_exists("{}_".format(storage_params["folder"]), "dir"):
966 f = "{}_/{}".format(
967 storage_params["folder"], folder
968 )
969 else:
970 f = "{}/{}".format(
971 storage_params["folder"], folder
972 )
973 if file:
974 return self.fs.file_exists("{}/{}".format(f, file), "file")
975 else:
976 f = f+"/"
977 if self.fs.file_exists(f, "dir"):
978 if self.fs.dir_ls(f):
979 return True
980 return False
981 else:
982 if self.fs.file_exists("{}_".format(storage_params["folder"]), "dir"):
983 f = "{}_/{}/{}".format(
984 storage_params["folder"], storage_params["pkg-dir"], folder
985 )
986 else:
987 f = "{}/{}/{}".format(
988 storage_params["folder"], storage_params["pkg-dir"], folder
989 )
990 if file:
991 return self.fs.file_exists("{}/{}".format(f, file), "file")
992 else:
993 if self.fs.file_exists(f, "dir"):
994 if self.fs.dir_ls(f):
995 return True
996 return False
997
998 @staticmethod
999 def validate_internal_virtual_links(indata):
1000 all_ivld_ids = set()
1001 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
1002 ivld_id = ivld.get("id")
1003 if ivld_id and ivld_id in all_ivld_ids:
1004 raise EngineException(
1005 "Duplicated VLD id in int-virtual-link-desc[id={}]".format(ivld_id),
1006 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1007 )
1008 else:
1009 all_ivld_ids.add(ivld_id)
1010
1011 for vdu in get_iterable(indata.get("vdu")):
1012 for int_cpd in get_iterable(vdu.get("int-cpd")):
1013 int_cpd_ivld_id = int_cpd.get("int-virtual-link-desc")
1014 if int_cpd_ivld_id and int_cpd_ivld_id not in all_ivld_ids:
1015 raise EngineException(
1016 "vdu[id='{}']:int-cpd[id='{}']:int-virtual-link-desc='{}' must match an existing "
1017 "int-virtual-link-desc".format(
1018 vdu["id"], int_cpd["id"], int_cpd_ivld_id
1019 ),
1020 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1021 )
1022
1023 for df in get_iterable(indata.get("df")):
1024 for vlp in get_iterable(df.get("virtual-link-profile")):
1025 vlp_ivld_id = vlp.get("id")
1026 if vlp_ivld_id and vlp_ivld_id not in all_ivld_ids:
1027 raise EngineException(
1028 "df[id='{}']:virtual-link-profile='{}' must match an existing "
1029 "int-virtual-link-desc".format(df["id"], vlp_ivld_id),
1030 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1031 )
1032
1033 @staticmethod
1034 def validate_monitoring_params(indata):
1035 all_monitoring_params = set()
1036 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
1037 for mp in get_iterable(ivld.get("monitoring-parameters")):
1038 mp_id = mp.get("id")
1039 if mp_id and mp_id in all_monitoring_params:
1040 raise EngineException(
1041 "Duplicated monitoring-parameter id in "
1042 "int-virtual-link-desc[id='{}']:monitoring-parameters[id='{}']".format(
1043 ivld["id"], mp_id
1044 ),
1045 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1046 )
1047 else:
1048 all_monitoring_params.add(mp_id)
1049
1050 for vdu in get_iterable(indata.get("vdu")):
1051 for mp in get_iterable(vdu.get("monitoring-parameter")):
1052 mp_id = mp.get("id")
1053 if mp_id and mp_id in all_monitoring_params:
1054 raise EngineException(
1055 "Duplicated monitoring-parameter id in "
1056 "vdu[id='{}']:monitoring-parameter[id='{}']".format(
1057 vdu["id"], mp_id
1058 ),
1059 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1060 )
1061 else:
1062 all_monitoring_params.add(mp_id)
1063
1064 for df in get_iterable(indata.get("df")):
1065 for mp in get_iterable(df.get("monitoring-parameter")):
1066 mp_id = mp.get("id")
1067 if mp_id and mp_id in all_monitoring_params:
1068 raise EngineException(
1069 "Duplicated monitoring-parameter id in "
1070 "df[id='{}']:monitoring-parameter[id='{}']".format(
1071 df["id"], mp_id
1072 ),
1073 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1074 )
1075 else:
1076 all_monitoring_params.add(mp_id)
1077
1078 @staticmethod
1079 def validate_scaling_group_descriptor(indata):
1080 all_monitoring_params = set()
1081 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
1082 for mp in get_iterable(ivld.get("monitoring-parameters")):
1083 all_monitoring_params.add(mp.get("id"))
1084
1085 for vdu in get_iterable(indata.get("vdu")):
1086 for mp in get_iterable(vdu.get("monitoring-parameter")):
1087 all_monitoring_params.add(mp.get("id"))
1088
1089 for df in get_iterable(indata.get("df")):
1090 for mp in get_iterable(df.get("monitoring-parameter")):
1091 all_monitoring_params.add(mp.get("id"))
1092
1093 for df in get_iterable(indata.get("df")):
1094 for sa in get_iterable(df.get("scaling-aspect")):
1095 for sp in get_iterable(sa.get("scaling-policy")):
1096 for sc in get_iterable(sp.get("scaling-criteria")):
1097 sc_monitoring_param = sc.get("vnf-monitoring-param-ref")
1098 if (
1099 sc_monitoring_param
1100 and sc_monitoring_param not in all_monitoring_params
1101 ):
1102 raise EngineException(
1103 "df[id='{}']:scaling-aspect[id='{}']:scaling-policy"
1104 "[name='{}']:scaling-criteria[name='{}']: "
1105 "vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format(
1106 df["id"],
1107 sa["id"],
1108 sp["name"],
1109 sc["name"],
1110 sc_monitoring_param,
1111 ),
1112 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1113 )
1114
1115 for sca in get_iterable(sa.get("scaling-config-action")):
1116 if (
1117 "lcm-operations-configuration" not in df
1118 or "operate-vnf-op-config"
1119 not in df["lcm-operations-configuration"]
1120 or not utils.find_in_list(
1121 df["lcm-operations-configuration"][
1122 "operate-vnf-op-config"
1123 ].get("day1-2", []),
1124 lambda config: config["id"] == indata["id"],
1125 )
1126 ):
1127 raise EngineException(
1128 "'day1-2 configuration' not defined in the descriptor but it is "
1129 "referenced by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action".format(
1130 df["id"], sa["id"]
1131 ),
1132 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1133 )
1134 for configuration in get_iterable(
1135 df["lcm-operations-configuration"]["operate-vnf-op-config"].get(
1136 "day1-2", []
1137 )
1138 ):
1139 for primitive in get_iterable(
1140 configuration.get("config-primitive")
1141 ):
1142 if (
1143 primitive["name"]
1144 == sca["vnf-config-primitive-name-ref"]
1145 ):
1146 break
1147 else:
1148 raise EngineException(
1149 "df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-"
1150 "config-primitive-name-ref='{}' does not match any "
1151 "day1-2 configuration:config-primitive:name".format(
1152 df["id"],
1153 sa["id"],
1154 sca["vnf-config-primitive-name-ref"],
1155 ),
1156 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1157 )
1158
1159 def delete_extra(self, session, _id, db_content, not_send_msg=None):
1160 """
1161 Deletes associate file system storage (via super)
1162 Deletes associated vnfpkgops from database.
1163 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1164 :param _id: server internal id
1165 :param db_content: The database content of the descriptor
1166 :return: None
1167 :raises: FsException in case of error while deleting associated storage
1168 """
1169 super().delete_extra(session, _id, db_content, not_send_msg)
1170 self.db.del_list("vnfpkgops", {"vnfPkgId": _id})
1171 self.db.del_list(self.topic+"_revisions", {"_id": {"$regex": _id}})
1172
1173 def sol005_projection(self, data):
1174 data["onboardingState"] = data["_admin"]["onboardingState"]
1175 data["operationalState"] = data["_admin"]["operationalState"]
1176 data["usageState"] = data["_admin"]["usageState"]
1177
1178 links = {}
1179 links["self"] = {"href": "/vnfpkgm/v1/vnf_packages/{}".format(data["_id"])}
1180 links["vnfd"] = {"href": "/vnfpkgm/v1/vnf_packages/{}/vnfd".format(data["_id"])}
1181 links["packageContent"] = {
1182 "href": "/vnfpkgm/v1/vnf_packages/{}/package_content".format(data["_id"])
1183 }
1184 data["_links"] = links
1185
1186 return super().sol005_projection(data)
1187
1188
1189 class NsdTopic(DescriptorTopic):
1190 topic = "nsds"
1191 topic_msg = "nsd"
1192
1193 def __init__(self, db, fs, msg, auth):
1194 DescriptorTopic.__init__(self, db, fs, msg, auth)
1195
1196 def pyangbind_validation(self, item, data, force=False):
1197 if self._descriptor_data_is_in_old_format(data):
1198 raise EngineException(
1199 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
1200 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1201 )
1202 try:
1203 nsd_vnf_profiles = data.get("df", [{}])[0].get("vnf-profile", [])
1204 mynsd = etsi_nfv_nsd.etsi_nfv_nsd()
1205 pybindJSONDecoder.load_ietf_json(
1206 {"nsd": {"nsd": [data]}},
1207 None,
1208 None,
1209 obj=mynsd,
1210 path_helper=True,
1211 skip_unknown=force,
1212 )
1213 out = pybindJSON.dumps(mynsd, mode="ietf")
1214 desc_out = self._remove_envelop(yaml.safe_load(out))
1215 desc_out = self._remove_yang_prefixes_from_descriptor(desc_out)
1216 if nsd_vnf_profiles:
1217 desc_out["df"][0]["vnf-profile"] = nsd_vnf_profiles
1218 return desc_out
1219 except Exception as e:
1220 raise EngineException(
1221 "Error in pyangbind validation: {}".format(str(e)),
1222 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1223 )
1224
1225 @staticmethod
1226 def _descriptor_data_is_in_old_format(data):
1227 return ("nsd-catalog" in data) or ("nsd:nsd-catalog" in data)
1228
1229 @staticmethod
1230 def _remove_envelop(indata=None):
1231 if not indata:
1232 return {}
1233 clean_indata = indata
1234
1235 if clean_indata.get("nsd"):
1236 clean_indata = clean_indata["nsd"]
1237 elif clean_indata.get("etsi-nfv-nsd:nsd"):
1238 clean_indata = clean_indata["etsi-nfv-nsd:nsd"]
1239 if clean_indata.get("nsd"):
1240 if (
1241 not isinstance(clean_indata["nsd"], list)
1242 or len(clean_indata["nsd"]) != 1
1243 ):
1244 raise EngineException("'nsd' must be a list of only one element")
1245 clean_indata = clean_indata["nsd"][0]
1246 return clean_indata
1247
1248 def _validate_input_new(self, indata, storage_params, force=False):
1249 indata.pop("nsdOnboardingState", None)
1250 indata.pop("nsdOperationalState", None)
1251 indata.pop("nsdUsageState", None)
1252
1253 indata.pop("links", None)
1254
1255 indata = self.pyangbind_validation("nsds", indata, force)
1256 # Cross references validation in the descriptor
1257 # TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none
1258 for vld in get_iterable(indata.get("virtual-link-desc")):
1259 self.validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata)
1260
1261 self.validate_vnf_profiles_vnfd_id(indata)
1262
1263 return indata
1264
1265 @staticmethod
1266 def validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata):
1267 if not vld.get("mgmt-network"):
1268 return
1269 vld_id = vld.get("id")
1270 for df in get_iterable(indata.get("df")):
1271 for vlp in get_iterable(df.get("virtual-link-profile")):
1272 if vld_id and vld_id == vlp.get("virtual-link-desc-id"):
1273 if vlp.get("virtual-link-protocol-data"):
1274 raise EngineException(
1275 "Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-"
1276 "protocol-data You cannot set a virtual-link-protocol-data "
1277 "when mgmt-network is True".format(df["id"], vlp["id"]),
1278 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1279 )
1280
1281 @staticmethod
1282 def validate_vnf_profiles_vnfd_id(indata):
1283 all_vnfd_ids = set(get_iterable(indata.get("vnfd-id")))
1284 for df in get_iterable(indata.get("df")):
1285 for vnf_profile in get_iterable(df.get("vnf-profile")):
1286 vnfd_id = vnf_profile.get("vnfd-id")
1287 if vnfd_id and vnfd_id not in all_vnfd_ids:
1288 raise EngineException(
1289 "Error at df[id='{}']:vnf_profile[id='{}']:vnfd-id='{}' "
1290 "does not match any vnfd-id".format(
1291 df["id"], vnf_profile["id"], vnfd_id
1292 ),
1293 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1294 )
1295
1296 def _validate_input_edit(self, indata, content, force=False):
1297 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
1298 """
1299 indata looks as follows:
1300 - In the new case (conformant)
1301 {'nsdOperationalState': 'DISABLED', 'userDefinedData': {'id': 'string23',
1302 '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}}
1303 - In the old case (backwards-compatible)
1304 {'id': 'string23', '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}
1305 """
1306 if "_admin" not in indata:
1307 indata["_admin"] = {}
1308
1309 if "nsdOperationalState" in indata:
1310 if indata["nsdOperationalState"] in ("ENABLED", "DISABLED"):
1311 indata["_admin"]["operationalState"] = indata.pop("nsdOperationalState")
1312 else:
1313 raise EngineException(
1314 "State '{}' is not a valid operational state".format(
1315 indata["nsdOperationalState"]
1316 ),
1317 http_code=HTTPStatus.BAD_REQUEST,
1318 )
1319
1320 # In the case of user defined data, we need to put the data in the root of the object
1321 # to preserve current expected behaviour
1322 if "userDefinedData" in indata:
1323 data = indata.pop("userDefinedData")
1324 if type(data) == dict:
1325 indata["_admin"]["userDefinedData"] = data
1326 else:
1327 raise EngineException(
1328 "userDefinedData should be an object, but is '{}' instead".format(
1329 type(data)
1330 ),
1331 http_code=HTTPStatus.BAD_REQUEST,
1332 )
1333 if (
1334 "operationalState" in indata["_admin"]
1335 and content["_admin"]["operationalState"]
1336 == indata["_admin"]["operationalState"]
1337 ):
1338 raise EngineException(
1339 "nsdOperationalState already {}".format(
1340 content["_admin"]["operationalState"]
1341 ),
1342 http_code=HTTPStatus.CONFLICT,
1343 )
1344 return indata
1345
1346 def _check_descriptor_dependencies(self, session, descriptor):
1347 """
1348 Check that the dependent descriptors exist on a new descriptor or edition. Also checks references to vnfd
1349 connection points are ok
1350 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1351 :param descriptor: descriptor to be inserted or edit
1352 :return: None or raises exception
1353 """
1354 if session["force"]:
1355 return
1356 vnfds_index = self._get_descriptor_constituent_vnfds_index(session, descriptor)
1357
1358 # Cross references validation in the descriptor and vnfd connection point validation
1359 for df in get_iterable(descriptor.get("df")):
1360 self.validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index)
1361
1362 def _get_descriptor_constituent_vnfds_index(self, session, descriptor):
1363 vnfds_index = {}
1364 if descriptor.get("vnfd-id") and not session["force"]:
1365 for vnfd_id in get_iterable(descriptor.get("vnfd-id")):
1366 query_filter = self._get_project_filter(session)
1367 query_filter["id"] = vnfd_id
1368 vnf_list = self.db.get_list("vnfds", query_filter)
1369 if not vnf_list:
1370 raise EngineException(
1371 "Descriptor error at 'vnfd-id'='{}' references a non "
1372 "existing vnfd".format(vnfd_id),
1373 http_code=HTTPStatus.CONFLICT,
1374 )
1375 vnfds_index[vnfd_id] = vnf_list[0]
1376 return vnfds_index
1377
1378 @staticmethod
1379 def validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index):
1380 for vnf_profile in get_iterable(df.get("vnf-profile")):
1381 vnfd = vnfds_index.get(vnf_profile["vnfd-id"])
1382 all_vnfd_ext_cpds = set()
1383 for ext_cpd in get_iterable(vnfd.get("ext-cpd")):
1384 if ext_cpd.get("id"):
1385 all_vnfd_ext_cpds.add(ext_cpd.get("id"))
1386
1387 for virtual_link in get_iterable(
1388 vnf_profile.get("virtual-link-connectivity")
1389 ):
1390 for vl_cpd in get_iterable(virtual_link.get("constituent-cpd-id")):
1391 vl_cpd_id = vl_cpd.get("constituent-cpd-id")
1392 if vl_cpd_id and vl_cpd_id not in all_vnfd_ext_cpds:
1393 raise EngineException(
1394 "Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity"
1395 "[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a "
1396 "non existing ext-cpd:id inside vnfd '{}'".format(
1397 df["id"],
1398 vnf_profile["id"],
1399 virtual_link["virtual-link-profile-id"],
1400 vl_cpd_id,
1401 vnfd["id"],
1402 ),
1403 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1404 )
1405
1406 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
1407 final_content = super().check_conflict_on_edit(
1408 session, final_content, edit_content, _id
1409 )
1410
1411 self._check_descriptor_dependencies(session, final_content)
1412
1413 return final_content
1414
1415 def check_conflict_on_del(self, session, _id, db_content):
1416 """
1417 Check that there is not any NSR that uses this NSD. Only NSRs belonging to this project are considered. Note
1418 that NSD can be public and be used by other projects.
1419 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1420 :param _id: nsd internal id
1421 :param db_content: The database content of the _id
1422 :return: None or raises EngineException with the conflict
1423 """
1424 if session["force"]:
1425 return
1426 descriptor = db_content
1427 descriptor_id = descriptor.get("id")
1428 if not descriptor_id: # empty nsd not uploaded
1429 return
1430
1431 # check NSD used by NS
1432 _filter = self._get_project_filter(session)
1433 _filter["nsd-id"] = _id
1434 if self.db.get_list("nsrs", _filter):
1435 raise EngineException(
1436 "There is at least one NS instance using this descriptor",
1437 http_code=HTTPStatus.CONFLICT,
1438 )
1439
1440 # check NSD referenced by NST
1441 del _filter["nsd-id"]
1442 _filter["netslice-subnet.ANYINDEX.nsd-ref"] = descriptor_id
1443 if self.db.get_list("nsts", _filter):
1444 raise EngineException(
1445 "There is at least one NetSlice Template referencing this descriptor",
1446 http_code=HTTPStatus.CONFLICT,
1447 )
1448
1449 def delete_extra(self, session, _id, db_content, not_send_msg=None):
1450 """
1451 Deletes associate file system storage (via super)
1452 Deletes associated vnfpkgops from database.
1453 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1454 :param _id: server internal id
1455 :param db_content: The database content of the descriptor
1456 :return: None
1457 :raises: FsException in case of error while deleting associated storage
1458 """
1459 super().delete_extra(session, _id, db_content, not_send_msg)
1460 self.db.del_list(self.topic+"_revisions", { "_id": { "$regex": _id}})
1461
1462 def sol005_projection(self, data):
1463 data["nsdOnboardingState"] = data["_admin"]["onboardingState"]
1464 data["nsdOperationalState"] = data["_admin"]["operationalState"]
1465 data["nsdUsageState"] = data["_admin"]["usageState"]
1466
1467 links = {}
1468 links["self"] = {"href": "/nsd/v1/ns_descriptors/{}".format(data["_id"])}
1469 links["nsd_content"] = {
1470 "href": "/nsd/v1/ns_descriptors/{}/nsd_content".format(data["_id"])
1471 }
1472 data["_links"] = links
1473
1474 return super().sol005_projection(data)
1475
1476
1477 class NstTopic(DescriptorTopic):
1478 topic = "nsts"
1479 topic_msg = "nst"
1480 quota_name = "slice_templates"
1481
1482 def __init__(self, db, fs, msg, auth):
1483 DescriptorTopic.__init__(self, db, fs, msg, auth)
1484
1485 def pyangbind_validation(self, item, data, force=False):
1486 try:
1487 mynst = nst_im()
1488 pybindJSONDecoder.load_ietf_json(
1489 {"nst": [data]},
1490 None,
1491 None,
1492 obj=mynst,
1493 path_helper=True,
1494 skip_unknown=force,
1495 )
1496 out = pybindJSON.dumps(mynst, mode="ietf")
1497 desc_out = self._remove_envelop(yaml.safe_load(out))
1498 return desc_out
1499 except Exception as e:
1500 raise EngineException(
1501 "Error in pyangbind validation: {}".format(str(e)),
1502 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1503 )
1504
1505 @staticmethod
1506 def _remove_envelop(indata=None):
1507 if not indata:
1508 return {}
1509 clean_indata = indata
1510
1511 if clean_indata.get("nst"):
1512 if (
1513 not isinstance(clean_indata["nst"], list)
1514 or len(clean_indata["nst"]) != 1
1515 ):
1516 raise EngineException("'nst' must be a list only one element")
1517 clean_indata = clean_indata["nst"][0]
1518 elif clean_indata.get("nst:nst"):
1519 if (
1520 not isinstance(clean_indata["nst:nst"], list)
1521 or len(clean_indata["nst:nst"]) != 1
1522 ):
1523 raise EngineException("'nst:nst' must be a list only one element")
1524 clean_indata = clean_indata["nst:nst"][0]
1525 return clean_indata
1526
1527 def _validate_input_new(self, indata, storage_params, force=False):
1528 indata.pop("onboardingState", None)
1529 indata.pop("operationalState", None)
1530 indata.pop("usageState", None)
1531 indata = self.pyangbind_validation("nsts", indata, force)
1532 return indata.copy()
1533
1534 def _check_descriptor_dependencies(self, session, descriptor):
1535 """
1536 Check that the dependent descriptors exist on a new descriptor or edition
1537 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1538 :param descriptor: descriptor to be inserted or edit
1539 :return: None or raises exception
1540 """
1541 if not descriptor.get("netslice-subnet"):
1542 return
1543 for nsd in descriptor["netslice-subnet"]:
1544 nsd_id = nsd["nsd-ref"]
1545 filter_q = self._get_project_filter(session)
1546 filter_q["id"] = nsd_id
1547 if not self.db.get_list("nsds", filter_q):
1548 raise EngineException(
1549 "Descriptor error at 'netslice-subnet':'nsd-ref'='{}' references a non "
1550 "existing nsd".format(nsd_id),
1551 http_code=HTTPStatus.CONFLICT,
1552 )
1553
1554 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
1555 final_content = super().check_conflict_on_edit(
1556 session, final_content, edit_content, _id
1557 )
1558
1559 self._check_descriptor_dependencies(session, final_content)
1560 return final_content
1561
1562 def check_conflict_on_del(self, session, _id, db_content):
1563 """
1564 Check that there is not any NSIR that uses this NST. Only NSIRs belonging to this project are considered. Note
1565 that NST can be public and be used by other projects.
1566 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1567 :param _id: nst internal id
1568 :param db_content: The database content of the _id.
1569 :return: None or raises EngineException with the conflict
1570 """
1571 # TODO: Check this method
1572 if session["force"]:
1573 return
1574 # Get Network Slice Template from Database
1575 _filter = self._get_project_filter(session)
1576 _filter["_admin.nst-id"] = _id
1577 if self.db.get_list("nsis", _filter):
1578 raise EngineException(
1579 "there is at least one Netslice Instance using this descriptor",
1580 http_code=HTTPStatus.CONFLICT,
1581 )
1582
1583 def sol005_projection(self, data):
1584 data["onboardingState"] = data["_admin"]["onboardingState"]
1585 data["operationalState"] = data["_admin"]["operationalState"]
1586 data["usageState"] = data["_admin"]["usageState"]
1587
1588 links = {}
1589 links["self"] = {"href": "/nst/v1/netslice_templates/{}".format(data["_id"])}
1590 links["nst"] = {"href": "/nst/v1/netslice_templates/{}/nst".format(data["_id"])}
1591 data["_links"] = links
1592
1593 return super().sol005_projection(data)
1594
1595
1596 class PduTopic(BaseTopic):
1597 topic = "pdus"
1598 topic_msg = "pdu"
1599 quota_name = "pduds"
1600 schema_new = pdu_new_schema
1601 schema_edit = pdu_edit_schema
1602
1603 def __init__(self, db, fs, msg, auth):
1604 BaseTopic.__init__(self, db, fs, msg, auth)
1605
1606 @staticmethod
1607 def format_on_new(content, project_id=None, make_public=False):
1608 BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
1609 content["_admin"]["onboardingState"] = "CREATED"
1610 content["_admin"]["operationalState"] = "ENABLED"
1611 content["_admin"]["usageState"] = "NOT_IN_USE"
1612
1613 def check_conflict_on_del(self, session, _id, db_content):
1614 """
1615 Check that there is not any vnfr that uses this PDU
1616 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1617 :param _id: pdu internal id
1618 :param db_content: The database content of the _id.
1619 :return: None or raises EngineException with the conflict
1620 """
1621 if session["force"]:
1622 return
1623
1624 _filter = self._get_project_filter(session)
1625 _filter["vdur.pdu-id"] = _id
1626 if self.db.get_list("vnfrs", _filter):
1627 raise EngineException(
1628 "There is at least one VNF instance using this PDU",
1629 http_code=HTTPStatus.CONFLICT,
1630 )
1631
1632
1633 class VnfPkgOpTopic(BaseTopic):
1634 topic = "vnfpkgops"
1635 topic_msg = "vnfd"
1636 schema_new = vnfpkgop_new_schema
1637 schema_edit = None
1638
1639 def __init__(self, db, fs, msg, auth):
1640 BaseTopic.__init__(self, db, fs, msg, auth)
1641
1642 def edit(self, session, _id, indata=None, kwargs=None, content=None):
1643 raise EngineException(
1644 "Method 'edit' not allowed for topic '{}'".format(self.topic),
1645 HTTPStatus.METHOD_NOT_ALLOWED,
1646 )
1647
1648 def delete(self, session, _id, dry_run=False):
1649 raise EngineException(
1650 "Method 'delete' not allowed for topic '{}'".format(self.topic),
1651 HTTPStatus.METHOD_NOT_ALLOWED,
1652 )
1653
1654 def delete_list(self, session, filter_q=None):
1655 raise EngineException(
1656 "Method 'delete_list' not allowed for topic '{}'".format(self.topic),
1657 HTTPStatus.METHOD_NOT_ALLOWED,
1658 )
1659
1660 def new(self, rollback, session, indata=None, kwargs=None, headers=None):
1661 """
1662 Creates a new entry into database.
1663 :param rollback: list to append created items at database in case a rollback may to be done
1664 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1665 :param indata: data to be inserted
1666 :param kwargs: used to override the indata descriptor
1667 :param headers: http request headers
1668 :return: _id, op_id:
1669 _id: identity of the inserted data.
1670 op_id: None
1671 """
1672 self._update_input_with_kwargs(indata, kwargs)
1673 validate_input(indata, self.schema_new)
1674 vnfpkg_id = indata["vnfPkgId"]
1675 filter_q = BaseTopic._get_project_filter(session)
1676 filter_q["_id"] = vnfpkg_id
1677 vnfd = self.db.get_one("vnfds", filter_q)
1678 operation = indata["lcmOperationType"]
1679 kdu_name = indata["kdu_name"]
1680 for kdu in vnfd.get("kdu", []):
1681 if kdu["name"] == kdu_name:
1682 helm_chart = kdu.get("helm-chart")
1683 juju_bundle = kdu.get("juju-bundle")
1684 break
1685 else:
1686 raise EngineException(
1687 "Not found vnfd[id='{}']:kdu[name='{}']".format(vnfpkg_id, kdu_name)
1688 )
1689 if helm_chart:
1690 indata["helm-chart"] = helm_chart
1691 match = fullmatch(r"([^/]*)/([^/]*)", helm_chart)
1692 repo_name = match.group(1) if match else None
1693 elif juju_bundle:
1694 indata["juju-bundle"] = juju_bundle
1695 match = fullmatch(r"([^/]*)/([^/]*)", juju_bundle)
1696 repo_name = match.group(1) if match else None
1697 else:
1698 raise EngineException(
1699 "Found neither 'helm-chart' nor 'juju-bundle' in vnfd[id='{}']:kdu[name='{}']".format(
1700 vnfpkg_id, kdu_name
1701 )
1702 )
1703 if repo_name:
1704 del filter_q["_id"]
1705 filter_q["name"] = repo_name
1706 repo = self.db.get_one("k8srepos", filter_q)
1707 k8srepo_id = repo.get("_id")
1708 k8srepo_url = repo.get("url")
1709 else:
1710 k8srepo_id = None
1711 k8srepo_url = None
1712 indata["k8srepoId"] = k8srepo_id
1713 indata["k8srepo_url"] = k8srepo_url
1714 vnfpkgop_id = str(uuid4())
1715 vnfpkgop_desc = {
1716 "_id": vnfpkgop_id,
1717 "operationState": "PROCESSING",
1718 "vnfPkgId": vnfpkg_id,
1719 "lcmOperationType": operation,
1720 "isAutomaticInvocation": False,
1721 "isCancelPending": False,
1722 "operationParams": indata,
1723 "links": {
1724 "self": "/osm/vnfpkgm/v1/vnfpkg_op_occs/" + vnfpkgop_id,
1725 "vnfpkg": "/osm/vnfpkgm/v1/vnf_packages/" + vnfpkg_id,
1726 },
1727 }
1728 self.format_on_new(
1729 vnfpkgop_desc, session["project_id"], make_public=session["public"]
1730 )
1731 ctime = vnfpkgop_desc["_admin"]["created"]
1732 vnfpkgop_desc["statusEnteredTime"] = ctime
1733 vnfpkgop_desc["startTime"] = ctime
1734 self.db.create(self.topic, vnfpkgop_desc)
1735 rollback.append({"topic": self.topic, "_id": vnfpkgop_id})
1736 self.msg.write(self.topic_msg, operation, vnfpkgop_desc)
1737 return vnfpkgop_id, None