Code Coverage

Cobertura Coverage Report > osm_nbi >

descriptor_topics.py

Trend

File Coverage summary

NameClassesLinesConditionals
descriptor_topics.py
100%
1/1
67%
673/1001
100%
0/0

Coverage Breakdown by Class

NameLinesConditionals
descriptor_topics.py
67%
673/1001
N/A

Source

osm_nbi/descriptor_topics.py
1 # -*- coding: utf-8 -*-
2
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 #    http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
12 # implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
15
16 1 import tarfile
17 1 import yaml
18 1 import json
19 1 import copy
20 1 import os
21 1 import shutil
22 1 import functools
23 1 import re
24
25 # import logging
26 1 from deepdiff import DeepDiff
27 1 from hashlib import md5
28 1 from osm_common.dbbase import DbException, deep_update_rfc7396
29 1 from http import HTTPStatus
30 1 from time import time
31 1 from uuid import uuid4
32 1 from re import fullmatch
33 1 from zipfile import ZipFile
34 1 from urllib.parse import urlparse
35 1 from osm_nbi.validation import (
36     ValidationError,
37     pdu_new_schema,
38     pdu_edit_schema,
39     validate_input,
40     vnfpkgop_new_schema,
41 )
42 1 from osm_nbi.base_topic import (
43     BaseTopic,
44     EngineException,
45     get_iterable,
46     detect_descriptor_usage,
47 )
48 1 from osm_im import etsi_nfv_vnfd, etsi_nfv_nsd
49 1 from osm_im.nst import nst as nst_im
50 1 from pyangbind.lib.serialise import pybindJSONDecoder
51 1 import pyangbind.lib.pybindJSON as pybindJSON
52 1 from osm_nbi import utils
53
54 1 __author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
55
56 1 valid_helm_chart_re = re.compile(
57     r"^[a-z0-9]([-a-z0-9]*[a-z0-9]/)?([a-z0-9]([-a-z0-9]*[a-z0-9])?)*$"
58 )
59
60
61 1 class DescriptorTopic(BaseTopic):
62 1     def __init__(self, db, fs, msg, auth):
63 1         super().__init__(db, fs, msg, auth)
64
65 1     def _validate_input_new(self, indata, storage_params, force=False):
66 0         return indata
67
68 1     def check_conflict_on_edit(self, session, final_content, edit_content, _id):
69 1         final_content = super().check_conflict_on_edit(
70             session, final_content, edit_content, _id
71         )
72
73 1         def _check_unique_id_name(descriptor, position=""):
74 1             for desc_key, desc_item in descriptor.items():
75 1                 if isinstance(desc_item, list) and desc_item:
76 1                     used_ids = []
77 1                     desc_item_id = None
78 1                     for index, list_item in enumerate(desc_item):
79 1                         if isinstance(list_item, dict):
80 1                             _check_unique_id_name(
81                                 list_item, "{}.{}[{}]".format(position, desc_key, index)
82                             )
83                             # Base case
84 1                             if index == 0 and (
85                                 list_item.get("id") or list_item.get("name")
86                             ):
87 1                                 desc_item_id = "id" if list_item.get("id") else "name"
88 1                             if desc_item_id and list_item.get(desc_item_id):
89 1                                 if list_item[desc_item_id] in used_ids:
90 1                                     position = "{}.{}[{}]".format(
91                                         position, desc_key, index
92                                     )
93 1                                     raise EngineException(
94                                         "Error: identifier {} '{}' is not unique and repeats at '{}'".format(
95                                             desc_item_id,
96                                             list_item[desc_item_id],
97                                             position,
98                                         ),
99                                         HTTPStatus.UNPROCESSABLE_ENTITY,
100                                     )
101 1                                 used_ids.append(list_item[desc_item_id])
102
103 1         _check_unique_id_name(final_content)
104         # 1. validate again with pyangbind
105         # 1.1. remove internal keys
106 1         internal_keys = {}
107 1         for k in ("_id", "_admin"):
108 1             if k in final_content:
109 1                 internal_keys[k] = final_content.pop(k)
110 1         storage_params = internal_keys["_admin"].get("storage")
111 1         serialized = self._validate_input_new(
112             final_content, storage_params, session["force"]
113         )
114
115         # 1.2. modify final_content with a serialized version
116 1         final_content = copy.deepcopy(serialized)
117         # 1.3. restore internal keys
118 1         for k, v in internal_keys.items():
119 1             final_content[k] = v
120 1         if session["force"]:
121 1             return final_content
122
123         # 2. check that this id is not present
124 1         if "id" in edit_content:
125 1             _filter = self._get_project_filter(session)
126
127 1             _filter["id"] = final_content["id"]
128 1             _filter["_id.neq"] = _id
129
130 1             if self.db.get_one(self.topic, _filter, fail_on_empty=False):
131 1                 raise EngineException(
132                     "{} with id '{}' already exists for this project".format(
133                         (str(self.topic))[:-1], final_content["id"]
134                     ),
135                     HTTPStatus.CONFLICT,
136                 )
137
138 1         return final_content
139
140 1     @staticmethod
141 1     def format_on_new(content, project_id=None, make_public=False):
142 1         BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
143 1         content["_admin"]["onboardingState"] = "CREATED"
144 1         content["_admin"]["operationalState"] = "DISABLED"
145 1         content["_admin"]["usageState"] = "NOT_IN_USE"
146
147 1     def delete_extra(self, session, _id, db_content, not_send_msg=None):
148         """
149         Deletes file system storage associated with the descriptor
150         :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
151         :param _id: server internal id
152         :param db_content: The database content of the descriptor
153         :param not_send_msg: To not send message (False) or store content (list) instead
154         :return: None if ok or raises EngineException with the problem
155         """
156 1         self.fs.file_delete(_id, ignore_non_exist=True)
157 1         self.fs.file_delete(_id + "_", ignore_non_exist=True)  # remove temp folder
158         # Remove file revisions
159 1         if "revision" in db_content["_admin"]:
160 0             revision = db_content["_admin"]["revision"]
161 0             while revision > 0:
162 0                 self.fs.file_delete(_id + ":" + str(revision), ignore_non_exist=True)
163 0                 revision = revision - 1
164
165 1     @staticmethod
166 1     def get_one_by_id(db, session, topic, id):
167         # find owned by this project
168 0         _filter = BaseTopic._get_project_filter(session)
169 0         _filter["id"] = id
170 0         desc_list = db.get_list(topic, _filter)
171 0         if len(desc_list) == 1:
172 0             return desc_list[0]
173 0         elif len(desc_list) > 1:
174 0             raise DbException(
175                 "Found more than one {} with id='{}' belonging to this project".format(
176                     topic[:-1], id
177                 ),
178                 HTTPStatus.CONFLICT,
179             )
180
181         # not found any: try to find public
182 0         _filter = BaseTopic._get_project_filter(session)
183 0         _filter["id"] = id
184 0         desc_list = db.get_list(topic, _filter)
185 0         if not desc_list:
186 0             raise DbException(
187                 "Not found any {} with id='{}'".format(topic[:-1], id),
188                 HTTPStatus.NOT_FOUND,
189             )
190 0         elif len(desc_list) == 1:
191 0             return desc_list[0]
192         else:
193 0             raise DbException(
194                 "Found more than one public {} with id='{}'; and no one belonging to this project".format(
195                     topic[:-1], id
196                 ),
197                 HTTPStatus.CONFLICT,
198             )
199
200 1     def new(self, rollback, session, indata=None, kwargs=None, headers=None):
201         """
202         Creates a new almost empty DISABLED  entry into database. Due to SOL005, it does not follow normal procedure.
203         Creating a VNFD or NSD is done in two steps: 1. Creates an empty descriptor (this step) and 2) upload content
204         (self.upload_content)
205         :param rollback: list to append created items at database in case a rollback may to be done
206         :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
207         :param indata: data to be inserted
208         :param kwargs: used to override the indata descriptor
209         :param headers: http request headers
210         :return: _id, None: identity of the inserted data; and None as there is not any operation
211         """
212
213         # No needed to capture exceptions
214         # Check Quota
215 1         self.check_quota(session)
216
217         # _remove_envelop
218 1         if indata:
219 0             if "userDefinedData" in indata:
220 0                 indata = indata["userDefinedData"]
221
222         # Override descriptor with query string kwargs
223 1         self._update_input_with_kwargs(indata, kwargs)
224         # uncomment when this method is implemented.
225         # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors
226         # indata = DescriptorTopic._validate_input_new(self, indata, project_id=session["force"])
227
228 1         content = {"_admin": {"userDefinedData": indata, "revision": 0}}
229
230 1         self.format_on_new(
231             content, session["project_id"], make_public=session["public"]
232         )
233 1         _id = self.db.create(self.topic, content)
234 1         rollback.append({"topic": self.topic, "_id": _id})
235 1         self._send_msg("created", {"_id": _id})
236 1         return _id, None
237
238 1     def upload_content(self, session, _id, indata, kwargs, headers):
239         """
240         Used for receiving content by chunks (with a transaction_id header and/or gzip file. It will store and extract)
241         :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
242         :param _id : the nsd,vnfd is already created, this is the id
243         :param indata: http body request
244         :param kwargs: user query string to override parameters. NOT USED
245         :param headers:  http request headers
246         :return: True if package is completely uploaded or False if partial content has been uploded
247             Raise exception on error
248         """
249         # Check that _id exists and it is valid
250 1         current_desc = self.show(session, _id)
251
252 1         content_range_text = headers.get("Content-Range")
253 1         expected_md5 = headers.get("Content-File-MD5")
254 1         compressed = None
255 1         content_type = headers.get("Content-Type")
256 1         if (
257             content_type
258             and "application/gzip" in content_type
259             or "application/x-gzip" in content_type
260         ):
261 0             compressed = "gzip"
262 1         if content_type and "application/zip" in content_type:
263 0             compressed = "zip"
264 1         filename = headers.get("Content-Filename")
265 1         if not filename and compressed:
266 0             filename = "package.tar.gz" if compressed == "gzip" else "package.zip"
267 1         elif not filename:
268 1             filename = "package"
269
270 1         revision = 1
271 1         if "revision" in current_desc["_admin"]:
272 1             revision = current_desc["_admin"]["revision"] + 1
273
274         # TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266
275 1         file_pkg = None
276 1         error_text = ""
277 1         fs_rollback = []
278
279 1         try:
280 1             if content_range_text:
281 0                 content_range = (
282                     content_range_text.replace("-", " ").replace("/", " ").split()
283                 )
284 0                 if (
285                     content_range[0] != "bytes"
286                 ):  # TODO check x<y not negative < total....
287 0                     raise IndexError()
288 0                 start = int(content_range[1])
289 0                 end = int(content_range[2]) + 1
290 0                 total = int(content_range[3])
291             else:
292 1                 start = 0
293             # Rather than using a temp folder, we will store the package in a folder based on
294             # the current revision.
295 1             proposed_revision_path = (
296                 _id + ":" + str(revision)
297             )  # all the content is upload here and if ok, it is rename from id_ to is folder
298
299 1             if start:
300 0                 if not self.fs.file_exists(proposed_revision_path, "dir"):
301 0                     raise EngineException(
302                         "invalid Transaction-Id header", HTTPStatus.NOT_FOUND
303                     )
304             else:
305 1                 self.fs.file_delete(proposed_revision_path, ignore_non_exist=True)
306 1                 self.fs.mkdir(proposed_revision_path)
307 1                 fs_rollback.append(proposed_revision_path)
308
309 1             storage = self.fs.get_params()
310 1             storage["folder"] = proposed_revision_path
311
312 1             file_path = (proposed_revision_path, filename)
313 1             if self.fs.file_exists(file_path, "file"):
314 0                 file_size = self.fs.file_size(file_path)
315             else:
316 1                 file_size = 0
317 1             if file_size != start:
318 0                 raise EngineException(
319                     "invalid Content-Range start sequence, expected '{}' but received '{}'".format(
320                         file_size, start
321                     ),
322                     HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
323                 )
324 1             file_pkg = self.fs.file_open(file_path, "a+b")
325 1             if isinstance(indata, dict):
326 1                 indata_text = yaml.safe_dump(indata, indent=4, default_flow_style=False)
327 1                 file_pkg.write(indata_text.encode(encoding="utf-8"))
328             else:
329 0                 indata_len = 0
330 0                 while True:
331 0                     indata_text = indata.read(4096)
332 0                     indata_len += len(indata_text)
333 0                     if not indata_text:
334 0                         break
335 0                     file_pkg.write(indata_text)
336 1             if content_range_text:
337 0                 if indata_len != end - start:
338 0                     raise EngineException(
339                         "Mismatch between Content-Range header {}-{} and body length of {}".format(
340                             start, end - 1, indata_len
341                         ),
342                         HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
343                     )
344 0                 if end != total:
345                     # TODO update to UPLOADING
346 0                     return False
347
348             # PACKAGE UPLOADED
349 1             if expected_md5:
350 0                 file_pkg.seek(0, 0)
351 0                 file_md5 = md5()
352 0                 chunk_data = file_pkg.read(1024)
353 0                 while chunk_data:
354 0                     file_md5.update(chunk_data)
355 0                     chunk_data = file_pkg.read(1024)
356 0                 if expected_md5 != file_md5.hexdigest():
357 0                     raise EngineException("Error, MD5 mismatch", HTTPStatus.CONFLICT)
358 1             file_pkg.seek(0, 0)
359 1             if compressed == "gzip":
360 0                 tar = tarfile.open(mode="r", fileobj=file_pkg)
361 0                 descriptor_file_name = None
362 0                 for tarinfo in tar:
363 0                     tarname = tarinfo.name
364 0                     tarname_path = tarname.split("/")
365 0                     if (
366                         not tarname_path[0] or ".." in tarname_path
367                     ):  # if start with "/" means absolute path
368 0                         raise EngineException(
369                             "Absolute path or '..' are not allowed for package descriptor tar.gz"
370                         )
371 0                     if len(tarname_path) == 1 and not tarinfo.isdir():
372 0                         raise EngineException(
373                             "All files must be inside a dir for package descriptor tar.gz"
374                         )
375 0                     if (
376                         tarname.endswith(".yaml")
377                         or tarname.endswith(".json")
378                         or tarname.endswith(".yml")
379                     ):
380 0                         storage["pkg-dir"] = tarname_path[0]
381 0                         if len(tarname_path) == 2:
382 0                             if descriptor_file_name:
383 0                                 raise EngineException(
384                                     "Found more than one descriptor file at package descriptor tar.gz"
385                                 )
386 0                             descriptor_file_name = tarname
387 0                 if not descriptor_file_name:
388 0                     raise EngineException(
389                         "Not found any descriptor file at package descriptor tar.gz"
390                     )
391 0                 storage["descriptor"] = descriptor_file_name
392 0                 storage["zipfile"] = filename
393 0                 self.fs.file_extract(tar, proposed_revision_path)
394 0                 with self.fs.file_open(
395                     (proposed_revision_path, descriptor_file_name), "r"
396                 ) as descriptor_file:
397 0                     content = descriptor_file.read()
398 1             elif compressed == "zip":
399 0                 zipfile = ZipFile(file_pkg)
400 0                 descriptor_file_name = None
401 0                 for package_file in zipfile.infolist():
402 0                     zipfilename = package_file.filename
403 0                     file_path = zipfilename.split("/")
404 0                     if (
405                         not file_path[0] or ".." in zipfilename
406                     ):  # if start with "/" means absolute path
407 0                         raise EngineException(
408                             "Absolute path or '..' are not allowed for package descriptor zip"
409                         )
410
411 0                     if (
412                         zipfilename.endswith(".yaml")
413                         or zipfilename.endswith(".json")
414                         or zipfilename.endswith(".yml")
415                     ) and (
416                         zipfilename.find("/") < 0
417                         or zipfilename.find("Definitions") >= 0
418                     ):
419 0                         storage["pkg-dir"] = ""
420 0                         if descriptor_file_name:
421 0                             raise EngineException(
422                                 "Found more than one descriptor file at package descriptor zip"
423                             )
424 0                         descriptor_file_name = zipfilename
425 0                 if not descriptor_file_name:
426 0                     raise EngineException(
427                         "Not found any descriptor file at package descriptor zip"
428                     )
429 0                 storage["descriptor"] = descriptor_file_name
430 0                 storage["zipfile"] = filename
431 0                 self.fs.file_extract(zipfile, proposed_revision_path)
432
433 0                 with self.fs.file_open(
434                     (proposed_revision_path, descriptor_file_name), "r"
435                 ) as descriptor_file:
436 0                     content = descriptor_file.read()
437             else:
438 1                 content = file_pkg.read()
439 1                 storage["descriptor"] = descriptor_file_name = filename
440
441 1             if descriptor_file_name.endswith(".json"):
442 0                 error_text = "Invalid json format "
443 0                 indata = json.load(content)
444             else:
445 1                 error_text = "Invalid yaml format "
446 1                 indata = yaml.safe_load(content)
447
448             # Need to close the file package here so it can be copied from the
449             # revision to the current, unrevisioned record
450 1             if file_pkg:
451 1                 file_pkg.close()
452 1             file_pkg = None
453
454             # Fetch both the incoming, proposed revision and the original revision so we
455             # can call a validate method to compare them
456 1             current_revision_path = _id + "/"
457 1             self.fs.sync(from_path=current_revision_path)
458 1             self.fs.sync(from_path=proposed_revision_path)
459
460 1             if revision > 1:
461 1                 try:
462 1                     self._validate_descriptor_changes(
463                         _id,
464                         descriptor_file_name,
465                         current_revision_path,
466                         proposed_revision_path,
467                     )
468 0                 except Exception as e:
469 0                     shutil.rmtree(
470                         self.fs.path + current_revision_path, ignore_errors=True
471                     )
472 0                     shutil.rmtree(
473                         self.fs.path + proposed_revision_path, ignore_errors=True
474                     )
475                     # Only delete the new revision.  We need to keep the original version in place
476                     # as it has not been changed.
477 0                     self.fs.file_delete(proposed_revision_path, ignore_non_exist=True)
478 0                     raise e
479
480 1             indata = self._remove_envelop(indata)
481
482             # Override descriptor with query string kwargs
483 1             if kwargs:
484 0                 self._update_input_with_kwargs(indata, kwargs)
485
486 1             current_desc["_admin"]["storage"] = storage
487 1             current_desc["_admin"]["onboardingState"] = "ONBOARDED"
488 1             current_desc["_admin"]["operationalState"] = "ENABLED"
489 1             current_desc["_admin"]["modified"] = time()
490 1             current_desc["_admin"]["revision"] = revision
491
492 1             deep_update_rfc7396(current_desc, indata)
493 1             current_desc = self.check_conflict_on_edit(
494                 session, current_desc, indata, _id=_id
495             )
496
497             # Copy the revision to the active package name by its original id
498 1             shutil.rmtree(self.fs.path + current_revision_path, ignore_errors=True)
499 1             os.rename(
500                 self.fs.path + proposed_revision_path,
501                 self.fs.path + current_revision_path,
502             )
503 1             self.fs.file_delete(current_revision_path, ignore_non_exist=True)
504 1             self.fs.mkdir(current_revision_path)
505 1             self.fs.reverse_sync(from_path=current_revision_path)
506
507 1             shutil.rmtree(self.fs.path + _id)
508
509 1             self.db.replace(self.topic, _id, current_desc)
510
511             #  Store a copy of the package as a point in time revision
512 1             revision_desc = dict(current_desc)
513 1             revision_desc["_id"] = _id + ":" + str(revision_desc["_admin"]["revision"])
514 1             self.db.create(self.topic + "_revisions", revision_desc)
515 1             fs_rollback = []
516
517 1             indata["_id"] = _id
518 1             self._send_msg("edited", indata)
519
520             # TODO if descriptor has changed because kwargs update content and remove cached zip
521             # TODO if zip is not present creates one
522 1             return True
523
524 1         except EngineException:
525 1             raise
526 0         except IndexError:
527 0             raise EngineException(
528                 "invalid Content-Range header format. Expected 'bytes start-end/total'",
529                 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
530             )
531 0         except IOError as e:
532 0             raise EngineException(
533                 "invalid upload transaction sequence: '{}'".format(e),
534                 HTTPStatus.BAD_REQUEST,
535             )
536 0         except tarfile.ReadError as e:
537 0             raise EngineException(
538                 "invalid file content {}".format(e), HTTPStatus.BAD_REQUEST
539             )
540 0         except (ValueError, yaml.YAMLError) as e:
541 0             raise EngineException(error_text + str(e))
542 0         except ValidationError as e:
543 0             raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
544         finally:
545 1             if file_pkg:
546 0                 file_pkg.close()
547 1             for file in fs_rollback:
548 1                 self.fs.file_delete(file, ignore_non_exist=True)
549
550 1     def get_file(self, session, _id, path=None, accept_header=None):
551         """
552         Return the file content of a vnfd or nsd
553         :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
554         :param _id: Identity of the vnfd, nsd
555         :param path: artifact path or "$DESCRIPTOR" or None
556         :param accept_header: Content of Accept header. Must contain applition/zip or/and text/plain
557         :return: opened file plus Accept format or raises an exception
558         """
559 0         accept_text = accept_zip = False
560 0         if accept_header:
561 0             if "text/plain" in accept_header or "*/*" in accept_header:
562 0                 accept_text = True
563 0             if "application/zip" in accept_header or "*/*" in accept_header:
564 0                 accept_zip = "application/zip"
565 0             elif "application/gzip" in accept_header:
566 0                 accept_zip = "application/gzip"
567
568 0         if not accept_text and not accept_zip:
569 0             raise EngineException(
570                 "provide request header 'Accept' with 'application/zip' or 'text/plain'",
571                 http_code=HTTPStatus.NOT_ACCEPTABLE,
572             )
573
574 0         content = self.show(session, _id)
575 0         if content["_admin"]["onboardingState"] != "ONBOARDED":
576 0             raise EngineException(
577                 "Cannot get content because this resource is not at 'ONBOARDED' state. "
578                 "onboardingState is {}".format(content["_admin"]["onboardingState"]),
579                 http_code=HTTPStatus.CONFLICT,
580             )
581 0         storage = content["_admin"]["storage"]
582 0         if path is not None and path != "$DESCRIPTOR":  # artifacts
583 0             if not storage.get("pkg-dir") and not storage.get("folder"):
584 0                 raise EngineException(
585                     "Packages does not contains artifacts",
586                     http_code=HTTPStatus.BAD_REQUEST,
587                 )
588 0             if self.fs.file_exists(
589                 (storage["folder"], storage["pkg-dir"], *path), "dir"
590             ):
591 0                 folder_content = self.fs.dir_ls(
592                     (storage["folder"], storage["pkg-dir"], *path)
593                 )
594 0                 return folder_content, "text/plain"
595                 # TODO manage folders in http
596             else:
597 0                 return (
598                     self.fs.file_open(
599                         (storage["folder"], storage["pkg-dir"], *path), "rb"
600                     ),
601                     "application/octet-stream",
602                 )
603
604         # pkgtype   accept  ZIP  TEXT    -> result
605         # manyfiles         yes  X       -> zip
606         #                   no   yes     -> error
607         # onefile           yes  no      -> zip
608         #                   X    yes     -> text
609 0         contain_many_files = False
610 0         if storage.get("pkg-dir"):
611             # check if there are more than one file in the package, ignoring checksums.txt.
612 0             pkg_files = self.fs.dir_ls((storage["folder"], storage["pkg-dir"]))
613 0             if len(pkg_files) >= 3 or (
614                 len(pkg_files) == 2 and "checksums.txt" not in pkg_files
615             ):
616 0                 contain_many_files = True
617 0         if accept_text and (not contain_many_files or path == "$DESCRIPTOR"):
618 0             return (
619                 self.fs.file_open((storage["folder"], storage["descriptor"]), "r"),
620                 "text/plain",
621             )
622 0         elif contain_many_files and not accept_zip:
623 0             raise EngineException(
624                 "Packages that contains several files need to be retrieved with 'application/zip'"
625                 "Accept header",
626                 http_code=HTTPStatus.NOT_ACCEPTABLE,
627             )
628         else:
629 0             if not storage.get("zipfile"):
630                 # TODO generate zipfile if not present
631 0                 raise EngineException(
632                     "Only allowed 'text/plain' Accept header for this descriptor. To be solved in "
633                     "future versions",
634                     http_code=HTTPStatus.NOT_ACCEPTABLE,
635                 )
636 0             return (
637                 self.fs.file_open((storage["folder"], storage["zipfile"]), "rb"),
638                 accept_zip,
639             )
640
641 1     def _remove_yang_prefixes_from_descriptor(self, descriptor):
642 1         new_descriptor = {}
643 1         for k, v in descriptor.items():
644 1             new_v = v
645 1             if isinstance(v, dict):
646 1                 new_v = self._remove_yang_prefixes_from_descriptor(v)
647 1             elif isinstance(v, list):
648 1                 new_v = list()
649 1                 for x in v:
650 1                     if isinstance(x, dict):
651 1                         new_v.append(self._remove_yang_prefixes_from_descriptor(x))
652                     else:
653 1                         new_v.append(x)
654 1             new_descriptor[k.split(":")[-1]] = new_v
655 1         return new_descriptor
656
657 1     def pyangbind_validation(self, item, data, force=False):
658 0         raise EngineException(
659             "Not possible to validate '{}' item".format(item),
660             http_code=HTTPStatus.INTERNAL_SERVER_ERROR,
661         )
662
663 1     def _validate_input_edit(self, indata, content, force=False):
664         # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
665 1         if "_id" in indata:
666 0             indata.pop("_id")
667 1         if "_admin" not in indata:
668 1             indata["_admin"] = {}
669
670 1         if "operationalState" in indata:
671 0             if indata["operationalState"] in ("ENABLED", "DISABLED"):
672 0                 indata["_admin"]["operationalState"] = indata.pop("operationalState")
673             else:
674 0                 raise EngineException(
675                     "State '{}' is not a valid operational state".format(
676                         indata["operationalState"]
677                     ),
678                     http_code=HTTPStatus.BAD_REQUEST,
679                 )
680
681         # In the case of user defined data, we need to put the data in the root of the object
682         # to preserve current expected behaviour
683 1         if "userDefinedData" in indata:
684 0             data = indata.pop("userDefinedData")
685 0             if isinstance(data, dict):
686 0                 indata["_admin"]["userDefinedData"] = data
687             else:
688 0                 raise EngineException(
689                     "userDefinedData should be an object, but is '{}' instead".format(
690                         type(data)
691                     ),
692                     http_code=HTTPStatus.BAD_REQUEST,
693                 )
694
695 1         if (
696             "operationalState" in indata["_admin"]
697             and content["_admin"]["operationalState"]
698             == indata["_admin"]["operationalState"]
699         ):
700 0             raise EngineException(
701                 "operationalState already {}".format(
702                     content["_admin"]["operationalState"]
703                 ),
704                 http_code=HTTPStatus.CONFLICT,
705             )
706
707 1         return indata
708
709 1     def _validate_descriptor_changes(
710         self,
711         descriptor_id,
712         descriptor_file_name,
713         old_descriptor_directory,
714         new_descriptor_directory,
715     ):
716         # Example:
717         #    raise EngineException(
718         #           "Error in validating new descriptor: <NODE> cannot be modified",
719         #           http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
720         #    )
721 0         pass
722
723
724 1 class VnfdTopic(DescriptorTopic):
725 1     topic = "vnfds"
726 1     topic_msg = "vnfd"
727
728 1     def __init__(self, db, fs, msg, auth):
729 1         DescriptorTopic.__init__(self, db, fs, msg, auth)
730
731 1     def pyangbind_validation(self, item, data, force=False):
732 1         if self._descriptor_data_is_in_old_format(data):
733 0             raise EngineException(
734                 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
735                 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
736             )
737 1         try:
738 1             myvnfd = etsi_nfv_vnfd.etsi_nfv_vnfd()
739 1             pybindJSONDecoder.load_ietf_json(
740                 {"etsi-nfv-vnfd:vnfd": data},
741                 None,
742                 None,
743                 obj=myvnfd,
744                 path_helper=True,
745                 skip_unknown=force,
746             )
747 1             out = pybindJSON.dumps(myvnfd, mode="ietf")
748 1             desc_out = self._remove_envelop(yaml.safe_load(out))
749 1             desc_out = self._remove_yang_prefixes_from_descriptor(desc_out)
750 1             return utils.deep_update_dict(data, desc_out)
751 1         except Exception as e:
752 1             raise EngineException(
753                 "Error in pyangbind validation: {}".format(str(e)),
754                 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
755             )
756
757 1     @staticmethod
758 1     def _descriptor_data_is_in_old_format(data):
759 1         return ("vnfd-catalog" in data) or ("vnfd:vnfd-catalog" in data)
760
761 1     @staticmethod
762 1     def _remove_envelop(indata=None):
763 1         if not indata:
764 0             return {}
765 1         clean_indata = indata
766
767 1         if clean_indata.get("etsi-nfv-vnfd:vnfd"):
768 1             if not isinstance(clean_indata["etsi-nfv-vnfd:vnfd"], dict):
769 0                 raise EngineException("'etsi-nfv-vnfd:vnfd' must be a dict")
770 1             clean_indata = clean_indata["etsi-nfv-vnfd:vnfd"]
771 1         elif clean_indata.get("vnfd"):
772 1             if not isinstance(clean_indata["vnfd"], dict):
773 1                 raise EngineException("'vnfd' must be dict")
774 0             clean_indata = clean_indata["vnfd"]
775
776 1         return clean_indata
777
778 1     def check_conflict_on_edit(self, session, final_content, edit_content, _id):
779 1         final_content = super().check_conflict_on_edit(
780             session, final_content, edit_content, _id
781         )
782
783         # set type of vnfd
784 1         contains_pdu = False
785 1         contains_vdu = False
786 1         for vdu in get_iterable(final_content.get("vdu")):
787 1             if vdu.get("pdu-type"):
788 0                 contains_pdu = True
789             else:
790 1                 contains_vdu = True
791 1         if contains_pdu:
792 0             final_content["_admin"]["type"] = "hnfd" if contains_vdu else "pnfd"
793 1         elif contains_vdu:
794 1             final_content["_admin"]["type"] = "vnfd"
795         # if neither vud nor pdu do not fill type
796 1         return final_content
797
798 1     def check_conflict_on_del(self, session, _id, db_content):
799         """
800         Check that there is not any NSD that uses this VNFD. Only NSDs belonging to this project are considered. Note
801         that VNFD can be public and be used by NSD of other projects. Also check there are not deployments, or vnfr
802         that uses this vnfd
803         :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
804         :param _id: vnfd internal id
805         :param db_content: The database content of the _id.
806         :return: None or raises EngineException with the conflict
807         """
808 1         if session["force"]:
809 0             return
810 1         descriptor = db_content
811 1         descriptor_id = descriptor.get("id")
812 1         if not descriptor_id:  # empty vnfd not uploaded
813 0             return
814
815 1         _filter = self._get_project_filter(session)
816
817         # check vnfrs using this vnfd
818 1         _filter["vnfd-id"] = _id
819 1         if self.db.get_list("vnfrs", _filter):
820 1             raise EngineException(
821                 "There is at least one VNF instance using this descriptor",
822                 http_code=HTTPStatus.CONFLICT,
823             )
824
825         # check NSD referencing this VNFD
826 1         del _filter["vnfd-id"]
827 1         _filter["vnfd-id"] = descriptor_id
828 1         if self.db.get_list("nsds", _filter):
829 1             raise EngineException(
830                 "There is at least one NS package referencing this descriptor",
831                 http_code=HTTPStatus.CONFLICT,
832             )
833
834 1     def _validate_input_new(self, indata, storage_params, force=False):
835 1         indata.pop("onboardingState", None)
836 1         indata.pop("operationalState", None)
837 1         indata.pop("usageState", None)
838 1         indata.pop("links", None)
839
840 1         indata = self.pyangbind_validation("vnfds", indata, force)
841         # Cross references validation in the descriptor
842
843 1         self.validate_mgmt_interface_connection_point(indata)
844
845 1         for vdu in get_iterable(indata.get("vdu")):
846 1             self.validate_vdu_internal_connection_points(vdu)
847 1             self._validate_vdu_cloud_init_in_package(storage_params, vdu, indata)
848 1         self._validate_vdu_charms_in_package(storage_params, indata)
849
850 1         self._validate_vnf_charms_in_package(storage_params, indata)
851
852 1         self.validate_external_connection_points(indata)
853 1         self.validate_internal_virtual_links(indata)
854 1         self.validate_monitoring_params(indata)
855 1         self.validate_scaling_group_descriptor(indata)
856 1         self.validate_helm_chart(indata)
857
858 1         return indata
859
860 1     @staticmethod
861 1     def validate_helm_chart(indata):
862 1         def is_url(url):
863 1             result = urlparse(url)
864 1             return all([result.scheme, result.netloc])
865
866 1         kdus = indata.get("kdu", [])
867 1         for kdu in kdus:
868 1             helm_chart_value = kdu.get("helm-chart")
869 1             if not helm_chart_value:
870 0                 continue
871 1             if not (
872                 valid_helm_chart_re.match(helm_chart_value) or is_url(helm_chart_value)
873             ):
874 1                 raise EngineException(
875                     "helm-chart '{}' is not valid".format(helm_chart_value),
876                     http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
877                 )
878
879 1     @staticmethod
880 1     def validate_mgmt_interface_connection_point(indata):
881 1         if not indata.get("vdu"):
882 1             return
883 1         if not indata.get("mgmt-cp"):
884 1             raise EngineException(
885                 "'mgmt-cp' is a mandatory field and it is not defined",
886                 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
887             )
888
889 1         for cp in get_iterable(indata.get("ext-cpd")):
890 1             if cp["id"] == indata["mgmt-cp"]:
891 1                 break
892         else:
893 1             raise EngineException(
894                 "mgmt-cp='{}' must match an existing ext-cpd".format(indata["mgmt-cp"]),
895                 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
896             )
897
898 1     @staticmethod
899 1     def validate_vdu_internal_connection_points(vdu):
900 1         int_cpds = set()
901 1         for cpd in get_iterable(vdu.get("int-cpd")):
902 1             cpd_id = cpd.get("id")
903 1             if cpd_id and cpd_id in int_cpds:
904 1                 raise EngineException(
905                     "vdu[id='{}']:int-cpd[id='{}'] is already used by other int-cpd".format(
906                         vdu["id"], cpd_id
907                     ),
908                     http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
909                 )
910 1             int_cpds.add(cpd_id)
911
912 1     @staticmethod
913 1     def validate_external_connection_points(indata):
914 1         all_vdus_int_cpds = set()
915 1         for vdu in get_iterable(indata.get("vdu")):
916 1             for int_cpd in get_iterable(vdu.get("int-cpd")):
917 1                 all_vdus_int_cpds.add((vdu.get("id"), int_cpd.get("id")))
918
919 1         ext_cpds = set()
920 1         for cpd in get_iterable(indata.get("ext-cpd")):
921 1             cpd_id = cpd.get("id")
922 1             if cpd_id and cpd_id in ext_cpds:
923 1                 raise EngineException(
924                     "ext-cpd[id='{}'] is already used by other ext-cpd".format(cpd_id),
925                     http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
926                 )
927 1             ext_cpds.add(cpd_id)
928
929 1             int_cpd = cpd.get("int-cpd")
930 1             if int_cpd:
931 1                 if (int_cpd.get("vdu-id"), int_cpd.get("cpd")) not in all_vdus_int_cpds:
932 1                     raise EngineException(
933                         "ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd".format(
934                             cpd_id
935                         ),
936                         http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
937                     )
938             # TODO: Validate k8s-cluster-net points to a valid k8s-cluster:nets ?
939
940 1     def _validate_vdu_charms_in_package(self, storage_params, indata):
941 1         for df in indata["df"]:
942 1             if (
943                 "lcm-operations-configuration" in df
944                 and "operate-vnf-op-config" in df["lcm-operations-configuration"]
945             ):
946 1                 configs = df["lcm-operations-configuration"][
947                     "operate-vnf-op-config"
948                 ].get("day1-2", [])
949 1                 vdus = df.get("vdu-profile", [])
950 1                 for vdu in vdus:
951 1                     for config in configs:
952 1                         if config["id"] == vdu["id"] and utils.find_in_list(
953                             config.get("execution-environment-list", []),
954                             lambda ee: "juju" in ee,
955                         ):
956 0                             if not self._validate_package_folders(
957                                 storage_params, "charms"
958                             ) and not self._validate_package_folders(
959                                 storage_params, "Scripts/charms"
960                             ):
961 0                                 raise EngineException(
962                                     "Charm defined in vnf[id={}] but not present in "
963                                     "package".format(indata["id"])
964                                 )
965
966 1     def _validate_vdu_cloud_init_in_package(self, storage_params, vdu, indata):
967 1         if not vdu.get("cloud-init-file"):
968 1             return
969 1         if not self._validate_package_folders(
970             storage_params, "cloud_init", vdu["cloud-init-file"]
971         ) and not self._validate_package_folders(
972             storage_params, "Scripts/cloud_init", vdu["cloud-init-file"]
973         ):
974 1             raise EngineException(
975                 "Cloud-init defined in vnf[id={}]:vdu[id={}] but not present in "
976                 "package".format(indata["id"], vdu["id"])
977             )
978
979 1     def _validate_vnf_charms_in_package(self, storage_params, indata):
980         # Get VNF configuration through new container
981 1         for deployment_flavor in indata.get("df", []):
982 1             if "lcm-operations-configuration" not in deployment_flavor:
983 1                 return
984 1             if (
985                 "operate-vnf-op-config"
986                 not in deployment_flavor["lcm-operations-configuration"]
987             ):
988 0                 return
989 1             for day_1_2_config in deployment_flavor["lcm-operations-configuration"][
990                 "operate-vnf-op-config"
991             ]["day1-2"]:
992 1                 if day_1_2_config["id"] == indata["id"]:
993 1                     if utils.find_in_list(
994                         day_1_2_config.get("execution-environment-list", []),
995                         lambda ee: "juju" in ee,
996                     ):
997 1                         if not self._validate_package_folders(
998                             storage_params, "charms"
999                         ) and not self._validate_package_folders(
1000                             storage_params, "Scripts/charms"
1001                         ):
1002 1                             raise EngineException(
1003                                 "Charm defined in vnf[id={}] but not present in "
1004                                 "package".format(indata["id"])
1005                             )
1006
1007 1     def _validate_package_folders(self, storage_params, folder, file=None):
1008 1         if not storage_params:
1009 0             return False
1010 1         elif not storage_params.get("pkg-dir"):
1011 0             if self.fs.file_exists("{}_".format(storage_params["folder"]), "dir"):
1012 0                 f = "{}_/{}".format(storage_params["folder"], folder)
1013             else:
1014 0                 f = "{}/{}".format(storage_params["folder"], folder)
1015 0             if file:
1016 0                 return self.fs.file_exists("{}/{}".format(f, file), "file")
1017             else:
1018 0                 if self.fs.file_exists(f, "dir"):
1019 0                     if self.fs.dir_ls(f):
1020 0                         return True
1021 0             return False
1022         else:
1023 1             if self.fs.file_exists("{}_".format(storage_params["folder"]), "dir"):
1024 1                 f = "{}_/{}/{}".format(
1025                     storage_params["folder"], storage_params["pkg-dir"], folder
1026                 )
1027             else:
1028 1                 f = "{}/{}/{}".format(
1029                     storage_params["folder"], storage_params["pkg-dir"], folder
1030                 )
1031 1             if file:
1032 1                 return self.fs.file_exists("{}/{}".format(f, file), "file")
1033             else:
1034 1                 if self.fs.file_exists(f, "dir"):
1035 1                     if self.fs.dir_ls(f):
1036 1                         return True
1037 1             return False
1038
1039 1     @staticmethod
1040 1     def validate_internal_virtual_links(indata):
1041 1         all_ivld_ids = set()
1042 1         for ivld in get_iterable(indata.get("int-virtual-link-desc")):
1043 1             ivld_id = ivld.get("id")
1044 1             if ivld_id and ivld_id in all_ivld_ids:
1045 1                 raise EngineException(
1046                     "Duplicated VLD id in int-virtual-link-desc[id={}]".format(ivld_id),
1047                     http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1048                 )
1049             else:
1050 1                 all_ivld_ids.add(ivld_id)
1051
1052 1         for vdu in get_iterable(indata.get("vdu")):
1053 1             for int_cpd in get_iterable(vdu.get("int-cpd")):
1054 1                 int_cpd_ivld_id = int_cpd.get("int-virtual-link-desc")
1055 1                 if int_cpd_ivld_id and int_cpd_ivld_id not in all_ivld_ids:
1056 1                     raise EngineException(
1057                         "vdu[id='{}']:int-cpd[id='{}']:int-virtual-link-desc='{}' must match an existing "
1058                         "int-virtual-link-desc".format(
1059                             vdu["id"], int_cpd["id"], int_cpd_ivld_id
1060                         ),
1061                         http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1062                     )
1063
1064 1         for df in get_iterable(indata.get("df")):
1065 1             for vlp in get_iterable(df.get("virtual-link-profile")):
1066 1                 vlp_ivld_id = vlp.get("id")
1067 1                 if vlp_ivld_id and vlp_ivld_id not in all_ivld_ids:
1068 1                     raise EngineException(
1069                         "df[id='{}']:virtual-link-profile='{}' must match an existing "
1070                         "int-virtual-link-desc".format(df["id"], vlp_ivld_id),
1071                         http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1072                     )
1073
1074 1     @staticmethod
1075 1     def validate_monitoring_params(indata):
1076 1         all_monitoring_params = set()
1077 1         for ivld in get_iterable(indata.get("int-virtual-link-desc")):
1078 1             for mp in get_iterable(ivld.get("monitoring-parameters")):
1079 1                 mp_id = mp.get("id")
1080 1                 if mp_id and mp_id in all_monitoring_params:
1081 1                     raise EngineException(
1082                         "Duplicated monitoring-parameter id in "
1083                         "int-virtual-link-desc[id='{}']:monitoring-parameters[id='{}']".format(
1084                             ivld["id"], mp_id
1085                         ),
1086                         http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1087                     )
1088                 else:
1089 1                     all_monitoring_params.add(mp_id)
1090
1091 1         for vdu in get_iterable(indata.get("vdu")):
1092 1             for mp in get_iterable(vdu.get("monitoring-parameter")):
1093 1                 mp_id = mp.get("id")
1094 1                 if mp_id and mp_id in all_monitoring_params:
1095 1                     raise EngineException(
1096                         "Duplicated monitoring-parameter id in "
1097                         "vdu[id='{}']:monitoring-parameter[id='{}']".format(
1098                             vdu["id"], mp_id
1099                         ),
1100                         http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1101                     )
1102                 else:
1103 1                     all_monitoring_params.add(mp_id)
1104
1105 1         for df in get_iterable(indata.get("df")):
1106 1             for mp in get_iterable(df.get("monitoring-parameter")):
1107 1                 mp_id = mp.get("id")
1108 1                 if mp_id and mp_id in all_monitoring_params:
1109 1                     raise EngineException(
1110                         "Duplicated monitoring-parameter id in "
1111                         "df[id='{}']:monitoring-parameter[id='{}']".format(
1112                             df["id"], mp_id
1113                         ),
1114                         http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1115                     )
1116                 else:
1117 1                     all_monitoring_params.add(mp_id)
1118
1119 1     @staticmethod
1120 1     def validate_scaling_group_descriptor(indata):
1121 1         all_monitoring_params = set()
1122 1         for ivld in get_iterable(indata.get("int-virtual-link-desc")):
1123 1             for mp in get_iterable(ivld.get("monitoring-parameters")):
1124 0                 all_monitoring_params.add(mp.get("id"))
1125
1126 1         for vdu in get_iterable(indata.get("vdu")):
1127 1             for mp in get_iterable(vdu.get("monitoring-parameter")):
1128 1                 all_monitoring_params.add(mp.get("id"))
1129
1130 1         for df in get_iterable(indata.get("df")):
1131 1             for mp in get_iterable(df.get("monitoring-parameter")):
1132 0                 all_monitoring_params.add(mp.get("id"))
1133
1134 1         for df in get_iterable(indata.get("df")):
1135 1             for sa in get_iterable(df.get("scaling-aspect")):
1136 1                 for sp in get_iterable(sa.get("scaling-policy")):
1137 1                     for sc in get_iterable(sp.get("scaling-criteria")):
1138 1                         sc_monitoring_param = sc.get("vnf-monitoring-param-ref")
1139 1                         if (
1140                             sc_monitoring_param
1141                             and sc_monitoring_param not in all_monitoring_params
1142                         ):
1143 1                             raise EngineException(
1144                                 "df[id='{}']:scaling-aspect[id='{}']:scaling-policy"
1145                                 "[name='{}']:scaling-criteria[name='{}']: "
1146                                 "vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format(
1147                                     df["id"],
1148                                     sa["id"],
1149                                     sp["name"],
1150                                     sc["name"],
1151                                     sc_monitoring_param,
1152                                 ),
1153                                 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1154                             )
1155
1156 1                 for sca in get_iterable(sa.get("scaling-config-action")):
1157 1                     if (
1158                         "lcm-operations-configuration" not in df
1159                         or "operate-vnf-op-config"
1160                         not in df["lcm-operations-configuration"]
1161                         or not utils.find_in_list(
1162                             df["lcm-operations-configuration"][
1163                                 "operate-vnf-op-config"
1164                             ].get("day1-2", []),
1165                             lambda config: config["id"] == indata["id"],
1166                         )
1167                     ):
1168 1                         raise EngineException(
1169                             "'day1-2 configuration' not defined in the descriptor but it is "
1170                             "referenced by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action".format(
1171                                 df["id"], sa["id"]
1172                             ),
1173                             http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1174                         )
1175 1                     for configuration in get_iterable(
1176                         df["lcm-operations-configuration"]["operate-vnf-op-config"].get(
1177                             "day1-2", []
1178                         )
1179                     ):
1180 1                         for primitive in get_iterable(
1181                             configuration.get("config-primitive")
1182                         ):
1183 1                             if (
1184                                 primitive["name"]
1185                                 == sca["vnf-config-primitive-name-ref"]
1186                             ):
1187 1                                 break
1188                         else:
1189 1                             raise EngineException(
1190                                 "df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-"
1191                                 "config-primitive-name-ref='{}' does not match any "
1192                                 "day1-2 configuration:config-primitive:name".format(
1193                                     df["id"],
1194                                     sa["id"],
1195                                     sca["vnf-config-primitive-name-ref"],
1196                                 ),
1197                                 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1198                             )
1199
1200 1     def delete_extra(self, session, _id, db_content, not_send_msg=None):
1201         """
1202         Deletes associate file system storage (via super)
1203         Deletes associated vnfpkgops from database.
1204         :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1205         :param _id: server internal id
1206         :param db_content: The database content of the descriptor
1207         :return: None
1208         :raises: FsException in case of error while deleting associated storage
1209         """
1210 1         super().delete_extra(session, _id, db_content, not_send_msg)
1211 1         self.db.del_list("vnfpkgops", {"vnfPkgId": _id})
1212 1         self.db.del_list(self.topic + "_revisions", {"_id": {"$regex": _id}})
1213
1214 1     def sol005_projection(self, data):
1215 0         data["onboardingState"] = data["_admin"]["onboardingState"]
1216 0         data["operationalState"] = data["_admin"]["operationalState"]
1217 0         data["usageState"] = data["_admin"]["usageState"]
1218
1219 0         links = {}
1220 0         links["self"] = {"href": "/vnfpkgm/v1/vnf_packages/{}".format(data["_id"])}
1221 0         links["vnfd"] = {"href": "/vnfpkgm/v1/vnf_packages/{}/vnfd".format(data["_id"])}
1222 0         links["packageContent"] = {
1223             "href": "/vnfpkgm/v1/vnf_packages/{}/package_content".format(data["_id"])
1224         }
1225 0         data["_links"] = links
1226
1227 0         return super().sol005_projection(data)
1228
1229 1     @staticmethod
1230 1     def find_software_version(vnfd: dict) -> str:
1231         """Find the sotware version in the VNFD descriptors
1232
1233         Args:
1234             vnfd (dict): Descriptor as a dictionary
1235
1236         Returns:
1237             software-version (str)
1238         """
1239 1         default_sw_version = "1.0"
1240 1         if vnfd.get("vnfd"):
1241 0             vnfd = vnfd["vnfd"]
1242 1         if vnfd.get("software-version"):
1243 1             return vnfd["software-version"]
1244         else:
1245 1             return default_sw_version
1246
1247 1     @staticmethod
1248 1     def extract_policies(vnfd: dict) -> dict:
1249         """Removes the policies from the VNFD descriptors
1250
1251         Args:
1252             vnfd (dict):   Descriptor as a dictionary
1253
1254         Returns:
1255             vnfd (dict): VNFD which does not include policies
1256         """
1257 1         for df in vnfd.get("df", {}):
1258 1             for policy in ["scaling-aspect", "healing-aspect"]:
1259 1                 if df.get(policy, {}):
1260 1                     df.pop(policy)
1261 1         for vdu in vnfd.get("vdu", {}):
1262 1             for alarm_policy in ["alarm", "monitoring-parameter"]:
1263 1                 if vdu.get(alarm_policy, {}):
1264 1                     vdu.pop(alarm_policy)
1265 1         return vnfd
1266
1267 1     @staticmethod
1268 1     def extract_day12_primitives(vnfd: dict) -> dict:
1269         """Removes the day12 primitives from the VNFD descriptors
1270
1271         Args:
1272             vnfd (dict):   Descriptor as a dictionary
1273
1274         Returns:
1275             vnfd (dict)
1276         """
1277 1         for df_id, df in enumerate(vnfd.get("df", {})):
1278 1             if (
1279                 df.get("lcm-operations-configuration", {})
1280                 .get("operate-vnf-op-config", {})
1281                 .get("day1-2")
1282             ):
1283 1                 day12 = df["lcm-operations-configuration"]["operate-vnf-op-config"].get(
1284                     "day1-2"
1285                 )
1286 1                 for config_id, config in enumerate(day12):
1287 1                     for key in [
1288                         "initial-config-primitive",
1289                         "config-primitive",
1290                         "terminate-config-primitive",
1291                     ]:
1292 1                         config.pop(key, None)
1293 1                         day12[config_id] = config
1294 1                 df["lcm-operations-configuration"]["operate-vnf-op-config"][
1295                     "day1-2"
1296                 ] = day12
1297 1             vnfd["df"][df_id] = df
1298 1         return vnfd
1299
1300 1     def remove_modifiable_items(self, vnfd: dict) -> dict:
1301         """Removes the modifiable parts from the VNFD descriptors
1302
1303         It calls different extract functions according to different update types
1304         to clear all the modifiable items from VNFD
1305
1306         Args:
1307             vnfd (dict): Descriptor as a dictionary
1308
1309         Returns:
1310             vnfd (dict): Descriptor which does not include modifiable contents
1311         """
1312 1         if vnfd.get("vnfd"):
1313 0             vnfd = vnfd["vnfd"]
1314 1         vnfd.pop("_admin", None)
1315         # If the other extractions need to be done from VNFD,
1316         # the new extract methods could be appended to below list.
1317 1         for extract_function in [self.extract_day12_primitives, self.extract_policies]:
1318 1             vnfd_temp = extract_function(vnfd)
1319 1             vnfd = vnfd_temp
1320 1         return vnfd
1321
1322 1     def _validate_descriptor_changes(
1323         self,
1324         descriptor_id: str,
1325         descriptor_file_name: str,
1326         old_descriptor_directory: str,
1327         new_descriptor_directory: str,
1328     ):
1329         """Compares the old and new VNFD descriptors and validates the new descriptor.
1330
1331         Args:
1332             old_descriptor_directory (str):   Directory of descriptor which is in-use
1333             new_descriptor_directory (str):   Directory of descriptor which is proposed to update (new revision)
1334
1335         Returns:
1336             None
1337
1338         Raises:
1339             EngineException:    In case of error when there are unallowed changes
1340         """
1341 1         try:
1342             # If VNFD does not exist in DB or it is not in use by any NS,
1343             # validation is not required.
1344 1             vnfd = self.db.get_one("vnfds", {"_id": descriptor_id})
1345 1             if not vnfd or not detect_descriptor_usage(vnfd, "vnfds", self.db):
1346 1                 return
1347
1348             # Get the old and new descriptor contents in order to compare them.
1349 1             with self.fs.file_open(
1350                 (old_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
1351             ) as old_descriptor_file:
1352 1                 with self.fs.file_open(
1353                     (new_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
1354                 ) as new_descriptor_file:
1355 1                     old_content = yaml.safe_load(old_descriptor_file.read())
1356 1                     new_content = yaml.safe_load(new_descriptor_file.read())
1357
1358                     # If software version has changed, we do not need to validate
1359                     # the differences anymore.
1360 1                     if old_content and new_content:
1361 1                         if self.find_software_version(
1362                             old_content
1363                         ) != self.find_software_version(new_content):
1364 1                             return
1365
1366 1                         disallowed_change = DeepDiff(
1367                             self.remove_modifiable_items(old_content),
1368                             self.remove_modifiable_items(new_content),
1369                         )
1370
1371 1                         if disallowed_change:
1372 1                             changed_nodes = functools.reduce(
1373                                 lambda a, b: a + " , " + b,
1374                                 [
1375                                     node.lstrip("root")
1376                                     for node in disallowed_change.get(
1377                                         "values_changed"
1378                                     ).keys()
1379                                 ],
1380                             )
1381
1382 1                             raise EngineException(
1383                                 f"Error in validating new descriptor: {changed_nodes} cannot be modified, "
1384                                 "there are disallowed changes in the vnf descriptor.",
1385                                 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1386                             )
1387 1         except (
1388             DbException,
1389             AttributeError,
1390             IndexError,
1391             KeyError,
1392             ValueError,
1393         ) as e:
1394 0             raise type(e)(
1395                 "VNF Descriptor could not be processed with error: {}.".format(e)
1396             )
1397
1398
1399 1 class NsdTopic(DescriptorTopic):
1400 1     topic = "nsds"
1401 1     topic_msg = "nsd"
1402
1403 1     def __init__(self, db, fs, msg, auth):
1404 1         super().__init__(db, fs, msg, auth)
1405
1406 1     def pyangbind_validation(self, item, data, force=False):
1407 1         if self._descriptor_data_is_in_old_format(data):
1408 0             raise EngineException(
1409                 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
1410                 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1411             )
1412 1         try:
1413 1             nsd_vnf_profiles = data.get("df", [{}])[0].get("vnf-profile", [])
1414 1             mynsd = etsi_nfv_nsd.etsi_nfv_nsd()
1415 1             pybindJSONDecoder.load_ietf_json(
1416                 {"nsd": {"nsd": [data]}},
1417                 None,
1418                 None,
1419                 obj=mynsd,
1420                 path_helper=True,
1421                 skip_unknown=force,
1422             )
1423 1             out = pybindJSON.dumps(mynsd, mode="ietf")
1424 1             desc_out = self._remove_envelop(yaml.safe_load(out))
1425 1             desc_out = self._remove_yang_prefixes_from_descriptor(desc_out)
1426 1             if nsd_vnf_profiles:
1427 1                 desc_out["df"][0]["vnf-profile"] = nsd_vnf_profiles
1428 1             return desc_out
1429 1         except Exception as e:
1430 1             raise EngineException(
1431                 "Error in pyangbind validation: {}".format(str(e)),
1432                 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1433             )
1434
1435 1     @staticmethod
1436 1     def _descriptor_data_is_in_old_format(data):
1437 1         return ("nsd-catalog" in data) or ("nsd:nsd-catalog" in data)
1438
1439 1     @staticmethod
1440 1     def _remove_envelop(indata=None):
1441 1         if not indata:
1442 0             return {}
1443 1         clean_indata = indata
1444
1445 1         if clean_indata.get("nsd"):
1446 1             clean_indata = clean_indata["nsd"]
1447 1         elif clean_indata.get("etsi-nfv-nsd:nsd"):
1448 1             clean_indata = clean_indata["etsi-nfv-nsd:nsd"]
1449 1         if clean_indata.get("nsd"):
1450 1             if (
1451                 not isinstance(clean_indata["nsd"], list)
1452                 or len(clean_indata["nsd"]) != 1
1453             ):
1454 1                 raise EngineException("'nsd' must be a list of only one element")
1455 1             clean_indata = clean_indata["nsd"][0]
1456 1         return clean_indata
1457
1458 1     def _validate_input_new(self, indata, storage_params, force=False):
1459 1         indata.pop("nsdOnboardingState", None)
1460 1         indata.pop("nsdOperationalState", None)
1461 1         indata.pop("nsdUsageState", None)
1462
1463 1         indata.pop("links", None)
1464
1465 1         indata = self.pyangbind_validation("nsds", indata, force)
1466         # Cross references validation in the descriptor
1467         # TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none
1468 1         for vld in get_iterable(indata.get("virtual-link-desc")):
1469 1             self.validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata)
1470 1         for fg in get_iterable(indata.get("vnffgd")):
1471 0             self.validate_vnffgd_data(fg, indata)
1472
1473 1         self.validate_vnf_profiles_vnfd_id(indata)
1474
1475 1         return indata
1476
1477 1     @staticmethod
1478 1     def validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata):
1479 1         if not vld.get("mgmt-network"):
1480 1             return
1481 1         vld_id = vld.get("id")
1482 1         for df in get_iterable(indata.get("df")):
1483 1             for vlp in get_iterable(df.get("virtual-link-profile")):
1484 1                 if vld_id and vld_id == vlp.get("virtual-link-desc-id"):
1485 1                     if vlp.get("virtual-link-protocol-data"):
1486 1                         raise EngineException(
1487                             "Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-"
1488                             "protocol-data You cannot set a virtual-link-protocol-data "
1489                             "when mgmt-network is True".format(df["id"], vlp["id"]),
1490                             http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1491                         )
1492
1493 1     @staticmethod
1494 1     def validate_vnffgd_data(fg, indata):
1495 1         position_list = []
1496 1         all_vnf_ids = set(get_iterable(fg.get("vnf-profile-id")))
1497 1         for fgposition in get_iterable(fg.get("nfp-position-element")):
1498 1             position_list.append(fgposition["id"])
1499
1500 1         for nfpd in get_iterable(fg.get("nfpd")):
1501 1             nfp_position = []
1502 1             for position in get_iterable(nfpd.get("position-desc-id")):
1503 1                 nfp_position = position.get("nfp-position-element-id")
1504 1                 if position == "nfp-position-element-id":
1505 0                     nfp_position = position.get("nfp-position-element-id")
1506 1                 if nfp_position[0] not in position_list:
1507 1                     raise EngineException(
1508                         "Error at vnffgd nfpd[id='{}']:nfp-position-element-id='{}' "
1509                         "does not match any nfp-position-element".format(
1510                             nfpd["id"], nfp_position[0]
1511                         ),
1512                         http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1513                     )
1514
1515 1                 for cp in get_iterable(position.get("cp-profile-id")):
1516 1                     for cpe in get_iterable(cp.get("constituent-profile-elements")):
1517 1                         constituent_base_element_id = cpe.get(
1518                             "constituent-base-element-id"
1519                         )
1520 1                         if (
1521                             constituent_base_element_id
1522                             and constituent_base_element_id not in all_vnf_ids
1523                         ):
1524 1                             raise EngineException(
1525                                 "Error at vnffgd constituent_profile[id='{}']:vnfd-id='{}' "
1526                                 "does not match any constituent-base-element-id".format(
1527                                     cpe["id"], constituent_base_element_id
1528                                 ),
1529                                 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1530                             )
1531
1532 1     @staticmethod
1533 1     def validate_vnf_profiles_vnfd_id(indata):
1534 1         all_vnfd_ids = set(get_iterable(indata.get("vnfd-id")))
1535 1         for df in get_iterable(indata.get("df")):
1536 1             for vnf_profile in get_iterable(df.get("vnf-profile")):
1537 1                 vnfd_id = vnf_profile.get("vnfd-id")
1538 1                 if vnfd_id and vnfd_id not in all_vnfd_ids:
1539 1                     raise EngineException(
1540                         "Error at df[id='{}']:vnf_profile[id='{}']:vnfd-id='{}' "
1541                         "does not match any vnfd-id".format(
1542                             df["id"], vnf_profile["id"], vnfd_id
1543                         ),
1544                         http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1545                     )
1546
1547 1     def _validate_input_edit(self, indata, content, force=False):
1548         # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
1549         """
1550         indata looks as follows:
1551             - In the new case (conformant)
1552                 {'nsdOperationalState': 'DISABLED', 'userDefinedData': {'id': 'string23',
1553                 '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}}
1554             - In the old case (backwards-compatible)
1555                 {'id': 'string23', '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}
1556         """
1557 1         if "_admin" not in indata:
1558 1             indata["_admin"] = {}
1559
1560 1         if "nsdOperationalState" in indata:
1561 0             if indata["nsdOperationalState"] in ("ENABLED", "DISABLED"):
1562 0                 indata["_admin"]["operationalState"] = indata.pop("nsdOperationalState")
1563             else:
1564 0                 raise EngineException(
1565                     "State '{}' is not a valid operational state".format(
1566                         indata["nsdOperationalState"]
1567                     ),
1568                     http_code=HTTPStatus.BAD_REQUEST,
1569                 )
1570
1571         # In the case of user defined data, we need to put the data in the root of the object
1572         # to preserve current expected behaviour
1573 1         if "userDefinedData" in indata:
1574 0             data = indata.pop("userDefinedData")
1575 0             if isinstance(data, dict):
1576 0                 indata["_admin"]["userDefinedData"] = data
1577             else:
1578 0                 raise EngineException(
1579                     "userDefinedData should be an object, but is '{}' instead".format(
1580                         type(data)
1581                     ),
1582                     http_code=HTTPStatus.BAD_REQUEST,
1583                 )
1584 1         if (
1585             "operationalState" in indata["_admin"]
1586             and content["_admin"]["operationalState"]
1587             == indata["_admin"]["operationalState"]
1588         ):
1589 0             raise EngineException(
1590                 "nsdOperationalState already {}".format(
1591                     content["_admin"]["operationalState"]
1592                 ),
1593                 http_code=HTTPStatus.CONFLICT,
1594             )
1595 1         return indata
1596
1597 1     def _check_descriptor_dependencies(self, session, descriptor):
1598         """
1599         Check that the dependent descriptors exist on a new descriptor or edition. Also checks references to vnfd
1600         connection points are ok
1601         :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1602         :param descriptor: descriptor to be inserted or edit
1603         :return: None or raises exception
1604         """
1605 1         if session["force"]:
1606 1             return
1607 1         vnfds_index = self._get_descriptor_constituent_vnfds_index(session, descriptor)
1608
1609         # Cross references validation in the descriptor and vnfd connection point validation
1610 1         for df in get_iterable(descriptor.get("df")):
1611 1             self.validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index)
1612
1613 1     def _get_descriptor_constituent_vnfds_index(self, session, descriptor):
1614 1         vnfds_index = {}
1615 1         if descriptor.get("vnfd-id") and not session["force"]:
1616 1             for vnfd_id in get_iterable(descriptor.get("vnfd-id")):
1617 1                 query_filter = self._get_project_filter(session)
1618 1                 query_filter["id"] = vnfd_id
1619 1                 vnf_list = self.db.get_list("vnfds", query_filter)
1620 1                 if not vnf_list:
1621 1                     raise EngineException(
1622                         "Descriptor error at 'vnfd-id'='{}' references a non "
1623                         "existing vnfd".format(vnfd_id),
1624                         http_code=HTTPStatus.CONFLICT,
1625                     )
1626 1                 vnfds_index[vnfd_id] = vnf_list[0]
1627 1         return vnfds_index
1628
1629 1     @staticmethod
1630 1     def validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index):
1631 1         for vnf_profile in get_iterable(df.get("vnf-profile")):
1632 1             vnfd = vnfds_index.get(vnf_profile["vnfd-id"])
1633 1             all_vnfd_ext_cpds = set()
1634 1             for ext_cpd in get_iterable(vnfd.get("ext-cpd")):
1635 1                 if ext_cpd.get("id"):
1636 1                     all_vnfd_ext_cpds.add(ext_cpd.get("id"))
1637
1638 1             for virtual_link in get_iterable(
1639                 vnf_profile.get("virtual-link-connectivity")
1640             ):
1641 1                 for vl_cpd in get_iterable(virtual_link.get("constituent-cpd-id")):
1642 1                     vl_cpd_id = vl_cpd.get("constituent-cpd-id")
1643 1                     if vl_cpd_id and vl_cpd_id not in all_vnfd_ext_cpds:
1644 1                         raise EngineException(
1645                             "Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity"
1646                             "[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a "
1647                             "non existing ext-cpd:id inside vnfd '{}'".format(
1648                                 df["id"],
1649                                 vnf_profile["id"],
1650                                 virtual_link["virtual-link-profile-id"],
1651                                 vl_cpd_id,
1652                                 vnfd["id"],
1653                             ),
1654                             http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1655                         )
1656
1657 1     def check_conflict_on_edit(self, session, final_content, edit_content, _id):
1658 1         final_content = super().check_conflict_on_edit(
1659             session, final_content, edit_content, _id
1660         )
1661
1662 1         self._check_descriptor_dependencies(session, final_content)
1663
1664 1         return final_content
1665
1666 1     def check_conflict_on_del(self, session, _id, db_content):
1667         """
1668         Check that there is not any NSR that uses this NSD. Only NSRs belonging to this project are considered. Note
1669         that NSD can be public and be used by other projects.
1670         :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1671         :param _id: nsd internal id
1672         :param db_content: The database content of the _id
1673         :return: None or raises EngineException with the conflict
1674         """
1675 1         if session["force"]:
1676 0             return
1677 1         descriptor = db_content
1678 1         descriptor_id = descriptor.get("id")
1679 1         if not descriptor_id:  # empty nsd not uploaded
1680 0             return
1681
1682         # check NSD used by NS
1683 1         _filter = self._get_project_filter(session)
1684 1         _filter["nsd-id"] = _id
1685 1         if self.db.get_list("nsrs", _filter):
1686 1             raise EngineException(
1687                 "There is at least one NS instance using this descriptor",
1688                 http_code=HTTPStatus.CONFLICT,
1689             )
1690
1691         # check NSD referenced by NST
1692 1         del _filter["nsd-id"]
1693 1         _filter["netslice-subnet.ANYINDEX.nsd-ref"] = descriptor_id
1694 1         if self.db.get_list("nsts", _filter):
1695 1             raise EngineException(
1696                 "There is at least one NetSlice Template referencing this descriptor",
1697                 http_code=HTTPStatus.CONFLICT,
1698             )
1699
1700 1     def delete_extra(self, session, _id, db_content, not_send_msg=None):
1701         """
1702         Deletes associate file system storage (via super)
1703         Deletes associated vnfpkgops from database.
1704         :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1705         :param _id: server internal id
1706         :param db_content: The database content of the descriptor
1707         :return: None
1708         :raises: FsException in case of error while deleting associated storage
1709         """
1710 1         super().delete_extra(session, _id, db_content, not_send_msg)
1711 1         self.db.del_list(self.topic + "_revisions", {"_id": {"$regex": _id}})
1712
1713 1     @staticmethod
1714 1     def extract_day12_primitives(nsd: dict) -> dict:
1715         """Removes the day12 primitives from the NSD descriptors
1716
1717         Args:
1718             nsd (dict):    Descriptor as a dictionary
1719
1720         Returns:
1721             nsd (dict):    Cleared NSD
1722         """
1723 1         if nsd.get("ns-configuration"):
1724 1             for key in [
1725                 "config-primitive",
1726                 "initial-config-primitive",
1727                 "terminate-config-primitive",
1728             ]:
1729 1                 nsd["ns-configuration"].pop(key, None)
1730 1         return nsd
1731
1732 1     def remove_modifiable_items(self, nsd: dict) -> dict:
1733         """Removes the modifiable parts from the VNFD descriptors
1734
1735         It calls different extract functions according to different update types
1736         to clear all the modifiable items from NSD
1737
1738         Args:
1739             nsd (dict):  Descriptor as a dictionary
1740
1741         Returns:
1742             nsd (dict):  Descriptor which does not include modifiable contents
1743         """
1744 1         while isinstance(nsd, dict) and nsd.get("nsd"):
1745 0             nsd = nsd["nsd"]
1746 1         if isinstance(nsd, list):
1747 0             nsd = nsd[0]
1748 1         nsd.pop("_admin", None)
1749         # If the more extractions need to be done from NSD,
1750         # the new extract methods could be appended to below list.
1751 1         for extract_function in [self.extract_day12_primitives]:
1752 1             nsd_temp = extract_function(nsd)
1753 1             nsd = nsd_temp
1754 1         return nsd
1755
1756 1     def _validate_descriptor_changes(
1757         self,
1758         descriptor_id: str,
1759         descriptor_file_name: str,
1760         old_descriptor_directory: str,
1761         new_descriptor_directory: str,
1762     ):
1763         """Compares the old and new NSD descriptors and validates the new descriptor
1764
1765         Args:
1766             old_descriptor_directory:   Directory of descriptor which is in-use
1767             new_descriptor_directory:   Directory of descriptor which is proposed to update (new revision)
1768
1769         Returns:
1770             None
1771
1772         Raises:
1773             EngineException:    In case of error if the changes are not allowed
1774         """
1775
1776 1         try:
1777             # If NSD does not exist in DB, or it is not in use by any NS,
1778             # validation is not required.
1779 1             nsd = self.db.get_one("nsds", {"_id": descriptor_id}, fail_on_empty=False)
1780 1             if not nsd or not detect_descriptor_usage(nsd, "nsds", self.db):
1781 1                 return
1782
1783             # Get the old and new descriptor contents in order to compare them.
1784 1             with self.fs.file_open(
1785                 (old_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
1786             ) as old_descriptor_file:
1787 1                 with self.fs.file_open(
1788                     (new_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
1789                 ) as new_descriptor_file:
1790 1                     old_content = yaml.safe_load(old_descriptor_file.read())
1791 1                     new_content = yaml.safe_load(new_descriptor_file.read())
1792
1793 1                     if old_content and new_content:
1794 1                         disallowed_change = DeepDiff(
1795                             self.remove_modifiable_items(old_content),
1796                             self.remove_modifiable_items(new_content),
1797                         )
1798
1799 1                         if disallowed_change:
1800 1                             changed_nodes = functools.reduce(
1801                                 lambda a, b: a + ", " + b,
1802                                 [
1803                                     node.lstrip("root")
1804                                     for node in disallowed_change.get(
1805                                         "values_changed"
1806                                     ).keys()
1807                                 ],
1808                             )
1809
1810 1                             raise EngineException(
1811                                 f"Error in validating new descriptor: {changed_nodes} cannot be modified, "
1812                                 "there are disallowed changes in the ns descriptor. ",
1813                                 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1814                             )
1815 1         except (
1816             DbException,
1817             AttributeError,
1818             IndexError,
1819             KeyError,
1820             ValueError,
1821         ) as e:
1822 0             raise type(e)(
1823                 "NS Descriptor could not be processed with error: {}.".format(e)
1824             )
1825
1826 1     def sol005_projection(self, data):
1827 0         data["nsdOnboardingState"] = data["_admin"]["onboardingState"]
1828 0         data["nsdOperationalState"] = data["_admin"]["operationalState"]
1829 0         data["nsdUsageState"] = data["_admin"]["usageState"]
1830
1831 0         links = {}
1832 0         links["self"] = {"href": "/nsd/v1/ns_descriptors/{}".format(data["_id"])}
1833 0         links["nsd_content"] = {
1834             "href": "/nsd/v1/ns_descriptors/{}/nsd_content".format(data["_id"])
1835         }
1836 0         data["_links"] = links
1837
1838 0         return super().sol005_projection(data)
1839
1840
1841 1 class NstTopic(DescriptorTopic):
1842 1     topic = "nsts"
1843 1     topic_msg = "nst"
1844 1     quota_name = "slice_templates"
1845
1846 1     def __init__(self, db, fs, msg, auth):
1847 0         DescriptorTopic.__init__(self, db, fs, msg, auth)
1848
1849 1     def pyangbind_validation(self, item, data, force=False):
1850 0         try:
1851 0             mynst = nst_im()
1852 0             pybindJSONDecoder.load_ietf_json(
1853                 {"nst": [data]},
1854                 None,
1855                 None,
1856                 obj=mynst,
1857                 path_helper=True,
1858                 skip_unknown=force,
1859             )
1860 0             out = pybindJSON.dumps(mynst, mode="ietf")
1861 0             desc_out = self._remove_envelop(yaml.safe_load(out))
1862 0             return desc_out
1863 0         except Exception as e:
1864 0             raise EngineException(
1865                 "Error in pyangbind validation: {}".format(str(e)),
1866                 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1867             )
1868
1869 1     @staticmethod
1870 1     def _remove_envelop(indata=None):
1871 0         if not indata:
1872 0             return {}
1873 0         clean_indata = indata
1874
1875 0         if clean_indata.get("nst"):
1876 0             if (
1877                 not isinstance(clean_indata["nst"], list)
1878                 or len(clean_indata["nst"]) != 1
1879             ):
1880 0                 raise EngineException("'nst' must be a list only one element")
1881 0             clean_indata = clean_indata["nst"][0]
1882 0         elif clean_indata.get("nst:nst"):
1883 0             if (
1884                 not isinstance(clean_indata["nst:nst"], list)
1885                 or len(clean_indata["nst:nst"]) != 1
1886             ):
1887 0                 raise EngineException("'nst:nst' must be a list only one element")
1888 0             clean_indata = clean_indata["nst:nst"][0]
1889 0         return clean_indata
1890
1891 1     def _validate_input_new(self, indata, storage_params, force=False):
1892 0         indata.pop("onboardingState", None)
1893 0         indata.pop("operationalState", None)
1894 0         indata.pop("usageState", None)
1895 0         indata = self.pyangbind_validation("nsts", indata, force)
1896 0         return indata.copy()
1897
1898 1     def _check_descriptor_dependencies(self, session, descriptor):
1899         """
1900         Check that the dependent descriptors exist on a new descriptor or edition
1901         :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1902         :param descriptor: descriptor to be inserted or edit
1903         :return: None or raises exception
1904         """
1905 0         if not descriptor.get("netslice-subnet"):
1906 0             return
1907 0         for nsd in descriptor["netslice-subnet"]:
1908 0             nsd_id = nsd["nsd-ref"]
1909 0             filter_q = self._get_project_filter(session)
1910 0             filter_q["id"] = nsd_id
1911 0             if not self.db.get_list("nsds", filter_q):
1912 0                 raise EngineException(
1913                     "Descriptor error at 'netslice-subnet':'nsd-ref'='{}' references a non "
1914                     "existing nsd".format(nsd_id),
1915                     http_code=HTTPStatus.CONFLICT,
1916                 )
1917
1918 1     def check_conflict_on_edit(self, session, final_content, edit_content, _id):
1919 0         final_content = super().check_conflict_on_edit(
1920             session, final_content, edit_content, _id
1921         )
1922
1923 0         self._check_descriptor_dependencies(session, final_content)
1924 0         return final_content
1925
1926 1     def check_conflict_on_del(self, session, _id, db_content):
1927         """
1928         Check that there is not any NSIR that uses this NST. Only NSIRs belonging to this project are considered. Note
1929         that NST can be public and be used by other projects.
1930         :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1931         :param _id: nst internal id
1932         :param db_content: The database content of the _id.
1933         :return: None or raises EngineException with the conflict
1934         """
1935         # TODO: Check this method
1936 0         if session["force"]:
1937 0             return
1938         # Get Network Slice Template from Database
1939 0         _filter = self._get_project_filter(session)
1940 0         _filter["_admin.nst-id"] = _id
1941 0         if self.db.get_list("nsis", _filter):
1942 0             raise EngineException(
1943                 "there is at least one Netslice Instance using this descriptor",
1944                 http_code=HTTPStatus.CONFLICT,
1945             )
1946
1947 1     def sol005_projection(self, data):
1948 0         data["onboardingState"] = data["_admin"]["onboardingState"]
1949 0         data["operationalState"] = data["_admin"]["operationalState"]
1950 0         data["usageState"] = data["_admin"]["usageState"]
1951
1952 0         links = {}
1953 0         links["self"] = {"href": "/nst/v1/netslice_templates/{}".format(data["_id"])}
1954 0         links["nst"] = {"href": "/nst/v1/netslice_templates/{}/nst".format(data["_id"])}
1955 0         data["_links"] = links
1956
1957 0         return super().sol005_projection(data)
1958
1959
1960 1 class PduTopic(BaseTopic):
1961 1     topic = "pdus"
1962 1     topic_msg = "pdu"
1963 1     quota_name = "pduds"
1964 1     schema_new = pdu_new_schema
1965 1     schema_edit = pdu_edit_schema
1966
1967 1     def __init__(self, db, fs, msg, auth):
1968 0         BaseTopic.__init__(self, db, fs, msg, auth)
1969
1970 1     @staticmethod
1971 1     def format_on_new(content, project_id=None, make_public=False):
1972 0         BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
1973 0         content["_admin"]["onboardingState"] = "CREATED"
1974 0         content["_admin"]["operationalState"] = "ENABLED"
1975 0         content["_admin"]["usageState"] = "NOT_IN_USE"
1976
1977 1     def check_conflict_on_del(self, session, _id, db_content):
1978         """
1979         Check that there is not any vnfr that uses this PDU
1980         :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1981         :param _id: pdu internal id
1982         :param db_content: The database content of the _id.
1983         :return: None or raises EngineException with the conflict
1984         """
1985 0         if session["force"]:
1986 0             return
1987
1988 0         _filter = self._get_project_filter(session)
1989 0         _filter["vdur.pdu-id"] = _id
1990 0         if self.db.get_list("vnfrs", _filter):
1991 0             raise EngineException(
1992                 "There is at least one VNF instance using this PDU",
1993                 http_code=HTTPStatus.CONFLICT,
1994             )
1995
1996
1997 1 class VnfPkgOpTopic(BaseTopic):
1998 1     topic = "vnfpkgops"
1999 1     topic_msg = "vnfd"
2000 1     schema_new = vnfpkgop_new_schema
2001 1     schema_edit = None
2002
2003 1     def __init__(self, db, fs, msg, auth):
2004 0         BaseTopic.__init__(self, db, fs, msg, auth)
2005
2006 1     def edit(self, session, _id, indata=None, kwargs=None, content=None):
2007 0         raise EngineException(
2008             "Method 'edit' not allowed for topic '{}'".format(self.topic),
2009             HTTPStatus.METHOD_NOT_ALLOWED,
2010         )
2011
2012 1     def delete(self, session, _id, dry_run=False):
2013 0         raise EngineException(
2014             "Method 'delete' not allowed for topic '{}'".format(self.topic),
2015             HTTPStatus.METHOD_NOT_ALLOWED,
2016         )
2017
2018 1     def delete_list(self, session, filter_q=None):
2019 0         raise EngineException(
2020             "Method 'delete_list' not allowed for topic '{}'".format(self.topic),
2021             HTTPStatus.METHOD_NOT_ALLOWED,
2022         )
2023
2024 1     def new(self, rollback, session, indata=None, kwargs=None, headers=None):
2025         """
2026         Creates a new entry into database.
2027         :param rollback: list to append created items at database in case a rollback may to be done
2028         :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
2029         :param indata: data to be inserted
2030         :param kwargs: used to override the indata descriptor
2031         :param headers: http request headers
2032         :return: _id, op_id:
2033             _id: identity of the inserted data.
2034              op_id: None
2035         """
2036 0         self._update_input_with_kwargs(indata, kwargs)
2037 0         validate_input(indata, self.schema_new)
2038 0         vnfpkg_id = indata["vnfPkgId"]
2039 0         filter_q = BaseTopic._get_project_filter(session)
2040 0         filter_q["_id"] = vnfpkg_id
2041 0         vnfd = self.db.get_one("vnfds", filter_q)
2042 0         operation = indata["lcmOperationType"]
2043 0         kdu_name = indata["kdu_name"]
2044 0         for kdu in vnfd.get("kdu", []):
2045 0             if kdu["name"] == kdu_name:
2046 0                 helm_chart = kdu.get("helm-chart")
2047 0                 juju_bundle = kdu.get("juju-bundle")
2048 0                 break
2049         else:
2050 0             raise EngineException(
2051                 "Not found vnfd[id='{}']:kdu[name='{}']".format(vnfpkg_id, kdu_name)
2052             )
2053 0         if helm_chart:
2054 0             indata["helm-chart"] = helm_chart
2055 0             match = fullmatch(r"([^/]*)/([^/]*)", helm_chart)
2056 0             repo_name = match.group(1) if match else None
2057 0         elif juju_bundle:
2058 0             indata["juju-bundle"] = juju_bundle
2059 0             match = fullmatch(r"([^/]*)/([^/]*)", juju_bundle)
2060 0             repo_name = match.group(1) if match else None
2061         else:
2062 0             raise EngineException(
2063                 "Found neither 'helm-chart' nor 'juju-bundle' in vnfd[id='{}']:kdu[name='{}']".format(
2064                     vnfpkg_id, kdu_name
2065                 )
2066             )
2067 0         if repo_name:
2068 0             del filter_q["_id"]
2069 0             filter_q["name"] = repo_name
2070 0             repo = self.db.get_one("k8srepos", filter_q)
2071 0             k8srepo_id = repo.get("_id")
2072 0             k8srepo_url = repo.get("url")
2073         else:
2074 0             k8srepo_id = None
2075 0             k8srepo_url = None
2076 0         indata["k8srepoId"] = k8srepo_id
2077 0         indata["k8srepo_url"] = k8srepo_url
2078 0         vnfpkgop_id = str(uuid4())
2079 0         vnfpkgop_desc = {
2080             "_id": vnfpkgop_id,
2081             "operationState": "PROCESSING",
2082             "vnfPkgId": vnfpkg_id,
2083             "lcmOperationType": operation,
2084             "isAutomaticInvocation": False,
2085             "isCancelPending": False,
2086             "operationParams": indata,
2087             "links": {
2088                 "self": "/osm/vnfpkgm/v1/vnfpkg_op_occs/" + vnfpkgop_id,
2089                 "vnfpkg": "/osm/vnfpkgm/v1/vnf_packages/" + vnfpkg_id,
2090             },
2091         }
2092 0         self.format_on_new(
2093             vnfpkgop_desc, session["project_id"], make_public=session["public"]
2094         )
2095 0         ctime = vnfpkgop_desc["_admin"]["created"]
2096 0         vnfpkgop_desc["statusEnteredTime"] = ctime
2097 0         vnfpkgop_desc["startTime"] = ctime
2098 0         self.db.create(self.topic, vnfpkgop_desc)
2099 0         rollback.append({"topic": self.topic, "_id": vnfpkgop_id})
2100 0         self.msg.write(self.topic_msg, operation, vnfpkgop_desc)
2101 0         return vnfpkgop_id, None