Code Coverage

Cobertura Coverage Report > osm_nbi >

descriptor_topics.py

Trend

Classes100%
 
Lines66%
   
Conditionals100%
 

File Coverage summary

NameClassesLinesConditionals
descriptor_topics.py
100%
1/1
66%
637/964
100%
0/0

Coverage Breakdown by Class

NameLinesConditionals
descriptor_topics.py
66%
637/964
N/A

Source

osm_nbi/descriptor_topics.py
1 # -*- coding: utf-8 -*-
2
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 #    http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
12 # implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
15
16 1 import tarfile
17 1 import yaml
18 1 import json
19 1 import copy
20 1 import os
21 1 import shutil
22 1 import functools
23
24 # import logging
25 1 from deepdiff import DeepDiff
26 1 from hashlib import md5
27 1 from osm_common.dbbase import DbException, deep_update_rfc7396
28 1 from http import HTTPStatus
29 1 from time import time
30 1 from uuid import uuid4
31 1 from re import fullmatch
32 1 from zipfile import ZipFile
33 1 from osm_nbi.validation import (
34     ValidationError,
35     pdu_new_schema,
36     pdu_edit_schema,
37     validate_input,
38     vnfpkgop_new_schema,
39 )
40 1 from osm_nbi.base_topic import (
41     BaseTopic,
42     EngineException,
43     get_iterable,
44     detect_descriptor_usage,
45 )
46 1 from osm_im import etsi_nfv_vnfd, etsi_nfv_nsd
47 1 from osm_im.nst import nst as nst_im
48 1 from pyangbind.lib.serialise import pybindJSONDecoder
49 1 import pyangbind.lib.pybindJSON as pybindJSON
50 1 from osm_nbi import utils
51
52 1 __author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
53
54
55 1 class DescriptorTopic(BaseTopic):
56 1     def __init__(self, db, fs, msg, auth):
57 1         BaseTopic.__init__(self, db, fs, msg, auth)
58
59 1     def _validate_input_new(self, indata, storage_params, force=False):
60 0         return indata
61
62 1     def check_conflict_on_edit(self, session, final_content, edit_content, _id):
63 1         final_content = super().check_conflict_on_edit(
64             session, final_content, edit_content, _id
65         )
66
67 1         def _check_unique_id_name(descriptor, position=""):
68 1             for desc_key, desc_item in descriptor.items():
69 1                 if isinstance(desc_item, list) and desc_item:
70 1                     used_ids = []
71 1                     desc_item_id = None
72 1                     for index, list_item in enumerate(desc_item):
73 1                         if isinstance(list_item, dict):
74 1                             _check_unique_id_name(
75                                 list_item, "{}.{}[{}]".format(position, desc_key, index)
76                             )
77                             # Base case
78 1                             if index == 0 and (
79                                 list_item.get("id") or list_item.get("name")
80                             ):
81 1                                 desc_item_id = "id" if list_item.get("id") else "name"
82 1                             if desc_item_id and list_item.get(desc_item_id):
83 1                                 if list_item[desc_item_id] in used_ids:
84 1                                     position = "{}.{}[{}]".format(
85                                         position, desc_key, index
86                                     )
87 1                                     raise EngineException(
88                                         "Error: identifier {} '{}' is not unique and repeats at '{}'".format(
89                                             desc_item_id,
90                                             list_item[desc_item_id],
91                                             position,
92                                         ),
93                                         HTTPStatus.UNPROCESSABLE_ENTITY,
94                                     )
95 1                                 used_ids.append(list_item[desc_item_id])
96
97 1         _check_unique_id_name(final_content)
98         # 1. validate again with pyangbind
99         # 1.1. remove internal keys
100 1         internal_keys = {}
101 1         for k in ("_id", "_admin"):
102 1             if k in final_content:
103 1                 internal_keys[k] = final_content.pop(k)
104 1         storage_params = internal_keys["_admin"].get("storage")
105 1         serialized = self._validate_input_new(
106             final_content, storage_params, session["force"]
107         )
108
109         # 1.2. modify final_content with a serialized version
110 1         final_content = copy.deepcopy(serialized)
111         # 1.3. restore internal keys
112 1         for k, v in internal_keys.items():
113 1             final_content[k] = v
114 1         if session["force"]:
115 1             return final_content
116
117         # 2. check that this id is not present
118 1         if "id" in edit_content:
119 1             _filter = self._get_project_filter(session)
120
121 1             _filter["id"] = final_content["id"]
122 1             _filter["_id.neq"] = _id
123
124 1             if self.db.get_one(self.topic, _filter, fail_on_empty=False):
125 1                 raise EngineException(
126                     "{} with id '{}' already exists for this project".format(
127                         (str(self.topic))[:-1], final_content["id"]
128                     ),
129                     HTTPStatus.CONFLICT,
130                 )
131
132 1         return final_content
133
134 1     @staticmethod
135 1     def format_on_new(content, project_id=None, make_public=False):
136 1         BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
137 1         content["_admin"]["onboardingState"] = "CREATED"
138 1         content["_admin"]["operationalState"] = "DISABLED"
139 1         content["_admin"]["usageState"] = "NOT_IN_USE"
140
141 1     def delete_extra(self, session, _id, db_content, not_send_msg=None):
142         """
143         Deletes file system storage associated with the descriptor
144         :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
145         :param _id: server internal id
146         :param db_content: The database content of the descriptor
147         :param not_send_msg: To not send message (False) or store content (list) instead
148         :return: None if ok or raises EngineException with the problem
149         """
150 1         self.fs.file_delete(_id, ignore_non_exist=True)
151 1         self.fs.file_delete(_id + "_", ignore_non_exist=True)  # remove temp folder
152         # Remove file revisions
153 1         if "revision" in db_content["_admin"]:
154 0             revision = db_content["_admin"]["revision"]
155 0             while revision > 0:
156 0                 self.fs.file_delete(_id + ":" + str(revision), ignore_non_exist=True)
157 0                 revision = revision - 1
158
159 1     @staticmethod
160 1     def get_one_by_id(db, session, topic, id):
161         # find owned by this project
162 0         _filter = BaseTopic._get_project_filter(session)
163 0         _filter["id"] = id
164 0         desc_list = db.get_list(topic, _filter)
165 0         if len(desc_list) == 1:
166 0             return desc_list[0]
167 0         elif len(desc_list) > 1:
168 0             raise DbException(
169                 "Found more than one {} with id='{}' belonging to this project".format(
170                     topic[:-1], id
171                 ),
172                 HTTPStatus.CONFLICT,
173             )
174
175         # not found any: try to find public
176 0         _filter = BaseTopic._get_project_filter(session)
177 0         _filter["id"] = id
178 0         desc_list = db.get_list(topic, _filter)
179 0         if not desc_list:
180 0             raise DbException(
181                 "Not found any {} with id='{}'".format(topic[:-1], id),
182                 HTTPStatus.NOT_FOUND,
183             )
184 0         elif len(desc_list) == 1:
185 0             return desc_list[0]
186         else:
187 0             raise DbException(
188                 "Found more than one public {} with id='{}'; and no one belonging to this project".format(
189                     topic[:-1], id
190                 ),
191                 HTTPStatus.CONFLICT,
192             )
193
194 1     def new(self, rollback, session, indata=None, kwargs=None, headers=None):
195         """
196         Creates a new almost empty DISABLED  entry into database. Due to SOL005, it does not follow normal procedure.
197         Creating a VNFD or NSD is done in two steps: 1. Creates an empty descriptor (this step) and 2) upload content
198         (self.upload_content)
199         :param rollback: list to append created items at database in case a rollback may to be done
200         :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
201         :param indata: data to be inserted
202         :param kwargs: used to override the indata descriptor
203         :param headers: http request headers
204         :return: _id, None: identity of the inserted data; and None as there is not any operation
205         """
206
207         # No needed to capture exceptions
208         # Check Quota
209 1         self.check_quota(session)
210
211         # _remove_envelop
212 1         if indata:
213 0             if "userDefinedData" in indata:
214 0                 indata = indata["userDefinedData"]
215
216         # Override descriptor with query string kwargs
217 1         self._update_input_with_kwargs(indata, kwargs)
218         # uncomment when this method is implemented.
219         # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors
220         # indata = DescriptorTopic._validate_input_new(self, indata, project_id=session["force"])
221
222 1         content = {"_admin": {"userDefinedData": indata, "revision": 0}}
223
224 1         self.format_on_new(
225             content, session["project_id"], make_public=session["public"]
226         )
227 1         _id = self.db.create(self.topic, content)
228 1         rollback.append({"topic": self.topic, "_id": _id})
229 1         self._send_msg("created", {"_id": _id})
230 1         return _id, None
231
232 1     def upload_content(self, session, _id, indata, kwargs, headers):
233         """
234         Used for receiving content by chunks (with a transaction_id header and/or gzip file. It will store and extract)
235         :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
236         :param _id : the nsd,vnfd is already created, this is the id
237         :param indata: http body request
238         :param kwargs: user query string to override parameters. NOT USED
239         :param headers:  http request headers
240         :return: True if package is completely uploaded or False if partial content has been uploded
241             Raise exception on error
242         """
243         # Check that _id exists and it is valid
244 1         current_desc = self.show(session, _id)
245
246 1         content_range_text = headers.get("Content-Range")
247 1         expected_md5 = headers.get("Content-File-MD5")
248 1         compressed = None
249 1         content_type = headers.get("Content-Type")
250 1         if (
251             content_type
252             and "application/gzip" in content_type
253             or "application/x-gzip" in content_type
254         ):
255 0             compressed = "gzip"
256 1         if content_type and "application/zip" in content_type:
257 0             compressed = "zip"
258 1         filename = headers.get("Content-Filename")
259 1         if not filename and compressed:
260 0             filename = "package.tar.gz" if compressed == "gzip" else "package.zip"
261 1         elif not filename:
262 1             filename = "package"
263
264 1         revision = 1
265 1         if "revision" in current_desc["_admin"]:
266 1             revision = current_desc["_admin"]["revision"] + 1
267
268         # TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266
269 1         file_pkg = None
270 1         error_text = ""
271 1         fs_rollback = []
272
273 1         try:
274 1             if content_range_text:
275 0                 content_range = (
276                     content_range_text.replace("-", " ").replace("/", " ").split()
277                 )
278 0                 if (
279                     content_range[0] != "bytes"
280                 ):  # TODO check x<y not negative < total....
281 0                     raise IndexError()
282 0                 start = int(content_range[1])
283 0                 end = int(content_range[2]) + 1
284 0                 total = int(content_range[3])
285             else:
286 1                 start = 0
287             # Rather than using a temp folder, we will store the package in a folder based on
288             # the current revision.
289 1             proposed_revision_path = (
290                 _id + ":" + str(revision)
291             )  # all the content is upload here and if ok, it is rename from id_ to is folder
292
293 1             if start:
294 0                 if not self.fs.file_exists(proposed_revision_path, "dir"):
295 0                     raise EngineException(
296                         "invalid Transaction-Id header", HTTPStatus.NOT_FOUND
297                     )
298             else:
299 1                 self.fs.file_delete(proposed_revision_path, ignore_non_exist=True)
300 1                 self.fs.mkdir(proposed_revision_path)
301 1                 fs_rollback.append(proposed_revision_path)
302
303 1             storage = self.fs.get_params()
304 1             storage["folder"] = proposed_revision_path
305
306 1             file_path = (proposed_revision_path, filename)
307 1             if self.fs.file_exists(file_path, "file"):
308 0                 file_size = self.fs.file_size(file_path)
309             else:
310 1                 file_size = 0
311 1             if file_size != start:
312 0                 raise EngineException(
313                     "invalid Content-Range start sequence, expected '{}' but received '{}'".format(
314                         file_size, start
315                     ),
316                     HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
317                 )
318 1             file_pkg = self.fs.file_open(file_path, "a+b")
319 1             if isinstance(indata, dict):
320 1                 indata_text = yaml.safe_dump(indata, indent=4, default_flow_style=False)
321 1                 file_pkg.write(indata_text.encode(encoding="utf-8"))
322             else:
323 0                 indata_len = 0
324 0                 while True:
325 0                     indata_text = indata.read(4096)
326 0                     indata_len += len(indata_text)
327 0                     if not indata_text:
328 0                         break
329 0                     file_pkg.write(indata_text)
330 1             if content_range_text:
331 0                 if indata_len != end - start:
332 0                     raise EngineException(
333                         "Mismatch between Content-Range header {}-{} and body length of {}".format(
334                             start, end - 1, indata_len
335                         ),
336                         HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
337                     )
338 0                 if end != total:
339                     # TODO update to UPLOADING
340 0                     return False
341
342             # PACKAGE UPLOADED
343 1             if expected_md5:
344 0                 file_pkg.seek(0, 0)
345 0                 file_md5 = md5()
346 0                 chunk_data = file_pkg.read(1024)
347 0                 while chunk_data:
348 0                     file_md5.update(chunk_data)
349 0                     chunk_data = file_pkg.read(1024)
350 0                 if expected_md5 != file_md5.hexdigest():
351 0                     raise EngineException("Error, MD5 mismatch", HTTPStatus.CONFLICT)
352 1             file_pkg.seek(0, 0)
353 1             if compressed == "gzip":
354 0                 tar = tarfile.open(mode="r", fileobj=file_pkg)
355 0                 descriptor_file_name = None
356 0                 for tarinfo in tar:
357 0                     tarname = tarinfo.name
358 0                     tarname_path = tarname.split("/")
359 0                     if (
360                         not tarname_path[0] or ".." in tarname_path
361                     ):  # if start with "/" means absolute path
362 0                         raise EngineException(
363                             "Absolute path or '..' are not allowed for package descriptor tar.gz"
364                         )
365 0                     if len(tarname_path) == 1 and not tarinfo.isdir():
366 0                         raise EngineException(
367                             "All files must be inside a dir for package descriptor tar.gz"
368                         )
369 0                     if (
370                         tarname.endswith(".yaml")
371                         or tarname.endswith(".json")
372                         or tarname.endswith(".yml")
373                     ):
374 0                         storage["pkg-dir"] = tarname_path[0]
375 0                         if len(tarname_path) == 2:
376 0                             if descriptor_file_name:
377 0                                 raise EngineException(
378                                     "Found more than one descriptor file at package descriptor tar.gz"
379                                 )
380 0                             descriptor_file_name = tarname
381 0                 if not descriptor_file_name:
382 0                     raise EngineException(
383                         "Not found any descriptor file at package descriptor tar.gz"
384                     )
385 0                 storage["descriptor"] = descriptor_file_name
386 0                 storage["zipfile"] = filename
387 0                 self.fs.file_extract(tar, proposed_revision_path)
388 0                 with self.fs.file_open(
389                     (proposed_revision_path, descriptor_file_name), "r"
390                 ) as descriptor_file:
391 0                     content = descriptor_file.read()
392 1             elif compressed == "zip":
393 0                 zipfile = ZipFile(file_pkg)
394 0                 descriptor_file_name = None
395 0                 for package_file in zipfile.infolist():
396 0                     zipfilename = package_file.filename
397 0                     file_path = zipfilename.split("/")
398 0                     if (
399                         not file_path[0] or ".." in zipfilename
400                     ):  # if start with "/" means absolute path
401 0                         raise EngineException(
402                             "Absolute path or '..' are not allowed for package descriptor zip"
403                         )
404
405 0                     if (
406                         zipfilename.endswith(".yaml")
407                         or zipfilename.endswith(".json")
408                         or zipfilename.endswith(".yml")
409                     ) and (
410                         zipfilename.find("/") < 0
411                         or zipfilename.find("Definitions") >= 0
412                     ):
413 0                         storage["pkg-dir"] = ""
414 0                         if descriptor_file_name:
415 0                             raise EngineException(
416                                 "Found more than one descriptor file at package descriptor zip"
417                             )
418 0                         descriptor_file_name = zipfilename
419 0                 if not descriptor_file_name:
420 0                     raise EngineException(
421                         "Not found any descriptor file at package descriptor zip"
422                     )
423 0                 storage["descriptor"] = descriptor_file_name
424 0                 storage["zipfile"] = filename
425 0                 self.fs.file_extract(zipfile, proposed_revision_path)
426
427 0                 with self.fs.file_open(
428                     (proposed_revision_path, descriptor_file_name), "r"
429                 ) as descriptor_file:
430 0                     content = descriptor_file.read()
431             else:
432 1                 content = file_pkg.read()
433 1                 storage["descriptor"] = descriptor_file_name = filename
434
435 1             if descriptor_file_name.endswith(".json"):
436 0                 error_text = "Invalid json format "
437 0                 indata = json.load(content)
438             else:
439 1                 error_text = "Invalid yaml format "
440 1                 indata = yaml.safe_load(content)
441
442             # Need to close the file package here so it can be copied from the
443             # revision to the current, unrevisioned record
444 1             if file_pkg:
445 1                 file_pkg.close()
446 1             file_pkg = None
447
448             # Fetch both the incoming, proposed revision and the original revision so we
449             # can call a validate method to compare them
450 1             current_revision_path = _id + "/"
451 1             self.fs.sync(from_path=current_revision_path)
452 1             self.fs.sync(from_path=proposed_revision_path)
453
454 1             if revision > 1:
455 1                 try:
456 1                     self._validate_descriptor_changes(
457                         _id,
458                         descriptor_file_name,
459                         current_revision_path,
460                         proposed_revision_path,
461                     )
462 0                 except Exception as e:
463 0                     shutil.rmtree(
464                         self.fs.path + current_revision_path, ignore_errors=True
465                     )
466 0                     shutil.rmtree(
467                         self.fs.path + proposed_revision_path, ignore_errors=True
468                     )
469                     # Only delete the new revision.  We need to keep the original version in place
470                     # as it has not been changed.
471 0                     self.fs.file_delete(proposed_revision_path, ignore_non_exist=True)
472 0                     raise e
473
474 1             indata = self._remove_envelop(indata)
475
476             # Override descriptor with query string kwargs
477 1             if kwargs:
478 0                 self._update_input_with_kwargs(indata, kwargs)
479
480 1             current_desc["_admin"]["storage"] = storage
481 1             current_desc["_admin"]["onboardingState"] = "ONBOARDED"
482 1             current_desc["_admin"]["operationalState"] = "ENABLED"
483 1             current_desc["_admin"]["modified"] = time()
484 1             current_desc["_admin"]["revision"] = revision
485
486 1             deep_update_rfc7396(current_desc, indata)
487 1             current_desc = self.check_conflict_on_edit(
488                 session, current_desc, indata, _id=_id
489             )
490
491             # Copy the revision to the active package name by its original id
492 1             shutil.rmtree(self.fs.path + current_revision_path, ignore_errors=True)
493 1             os.rename(
494                 self.fs.path + proposed_revision_path,
495                 self.fs.path + current_revision_path,
496             )
497 1             self.fs.file_delete(current_revision_path, ignore_non_exist=True)
498 1             self.fs.mkdir(current_revision_path)
499 1             self.fs.reverse_sync(from_path=current_revision_path)
500
501 1             shutil.rmtree(self.fs.path + _id)
502
503 1             self.db.replace(self.topic, _id, current_desc)
504
505             #  Store a copy of the package as a point in time revision
506 1             revision_desc = dict(current_desc)
507 1             revision_desc["_id"] = _id + ":" + str(revision_desc["_admin"]["revision"])
508 1             self.db.create(self.topic + "_revisions", revision_desc)
509 1             fs_rollback = []
510
511 1             indata["_id"] = _id
512 1             self._send_msg("edited", indata)
513
514             # TODO if descriptor has changed because kwargs update content and remove cached zip
515             # TODO if zip is not present creates one
516 1             return True
517
518 1         except EngineException:
519 1             raise
520 0         except IndexError:
521 0             raise EngineException(
522                 "invalid Content-Range header format. Expected 'bytes start-end/total'",
523                 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
524             )
525 0         except IOError as e:
526 0             raise EngineException(
527                 "invalid upload transaction sequence: '{}'".format(e),
528                 HTTPStatus.BAD_REQUEST,
529             )
530 0         except tarfile.ReadError as e:
531 0             raise EngineException(
532                 "invalid file content {}".format(e), HTTPStatus.BAD_REQUEST
533             )
534 0         except (ValueError, yaml.YAMLError) as e:
535 0             raise EngineException(error_text + str(e))
536 0         except ValidationError as e:
537 0             raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
538         finally:
539 1             if file_pkg:
540 0                 file_pkg.close()
541 1             for file in fs_rollback:
542 1                 self.fs.file_delete(file, ignore_non_exist=True)
543
544 1     def get_file(self, session, _id, path=None, accept_header=None):
545         """
546         Return the file content of a vnfd or nsd
547         :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
548         :param _id: Identity of the vnfd, nsd
549         :param path: artifact path or "$DESCRIPTOR" or None
550         :param accept_header: Content of Accept header. Must contain applition/zip or/and text/plain
551         :return: opened file plus Accept format or raises an exception
552         """
553 0         accept_text = accept_zip = False
554 0         if accept_header:
555 0             if "text/plain" in accept_header or "*/*" in accept_header:
556 0                 accept_text = True
557 0             if "application/zip" in accept_header or "*/*" in accept_header:
558 0                 accept_zip = "application/zip"
559 0             elif "application/gzip" in accept_header:
560 0                 accept_zip = "application/gzip"
561
562 0         if not accept_text and not accept_zip:
563 0             raise EngineException(
564                 "provide request header 'Accept' with 'application/zip' or 'text/plain'",
565                 http_code=HTTPStatus.NOT_ACCEPTABLE,
566             )
567
568 0         content = self.show(session, _id)
569 0         if content["_admin"]["onboardingState"] != "ONBOARDED":
570 0             raise EngineException(
571                 "Cannot get content because this resource is not at 'ONBOARDED' state. "
572                 "onboardingState is {}".format(content["_admin"]["onboardingState"]),
573                 http_code=HTTPStatus.CONFLICT,
574             )
575 0         storage = content["_admin"]["storage"]
576 0         if path is not None and path != "$DESCRIPTOR":  # artifacts
577 0             if not storage.get("pkg-dir") and not storage.get("folder"):
578 0                 raise EngineException(
579                     "Packages does not contains artifacts",
580                     http_code=HTTPStatus.BAD_REQUEST,
581                 )
582 0             if self.fs.file_exists(
583                 (storage["folder"], storage["pkg-dir"], *path), "dir"
584             ):
585 0                 folder_content = self.fs.dir_ls(
586                     (storage["folder"], storage["pkg-dir"], *path)
587                 )
588 0                 return folder_content, "text/plain"
589                 # TODO manage folders in http
590             else:
591 0                 return (
592                     self.fs.file_open(
593                         (storage["folder"], storage["pkg-dir"], *path), "rb"
594                     ),
595                     "application/octet-stream",
596                 )
597
598         # pkgtype   accept  ZIP  TEXT    -> result
599         # manyfiles         yes  X       -> zip
600         #                   no   yes     -> error
601         # onefile           yes  no      -> zip
602         #                   X    yes     -> text
603 0         contain_many_files = False
604 0         if storage.get("pkg-dir"):
605             # check if there are more than one file in the package, ignoring checksums.txt.
606 0             pkg_files = self.fs.dir_ls((storage["folder"], storage["pkg-dir"]))
607 0             if len(pkg_files) >= 3 or (
608                 len(pkg_files) == 2 and "checksums.txt" not in pkg_files
609             ):
610 0                 contain_many_files = True
611 0         if accept_text and (not contain_many_files or path == "$DESCRIPTOR"):
612 0             return (
613                 self.fs.file_open((storage["folder"], storage["descriptor"]), "r"),
614                 "text/plain",
615             )
616 0         elif contain_many_files and not accept_zip:
617 0             raise EngineException(
618                 "Packages that contains several files need to be retrieved with 'application/zip'"
619                 "Accept header",
620                 http_code=HTTPStatus.NOT_ACCEPTABLE,
621             )
622         else:
623 0             if not storage.get("zipfile"):
624                 # TODO generate zipfile if not present
625 0                 raise EngineException(
626                     "Only allowed 'text/plain' Accept header for this descriptor. To be solved in "
627                     "future versions",
628                     http_code=HTTPStatus.NOT_ACCEPTABLE,
629                 )
630 0             return (
631                 self.fs.file_open((storage["folder"], storage["zipfile"]), "rb"),
632                 accept_zip,
633             )
634
635 1     def _remove_yang_prefixes_from_descriptor(self, descriptor):
636 1         new_descriptor = {}
637 1         for k, v in descriptor.items():
638 1             new_v = v
639 1             if isinstance(v, dict):
640 1                 new_v = self._remove_yang_prefixes_from_descriptor(v)
641 1             elif isinstance(v, list):
642 1                 new_v = list()
643 1                 for x in v:
644 1                     if isinstance(x, dict):
645 1                         new_v.append(self._remove_yang_prefixes_from_descriptor(x))
646                     else:
647 1                         new_v.append(x)
648 1             new_descriptor[k.split(":")[-1]] = new_v
649 1         return new_descriptor
650
651 1     def pyangbind_validation(self, item, data, force=False):
652 0         raise EngineException(
653             "Not possible to validate '{}' item".format(item),
654             http_code=HTTPStatus.INTERNAL_SERVER_ERROR,
655         )
656
657 1     def _validate_input_edit(self, indata, content, force=False):
658         # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
659 1         if "_id" in indata:
660 0             indata.pop("_id")
661 1         if "_admin" not in indata:
662 1             indata["_admin"] = {}
663
664 1         if "operationalState" in indata:
665 0             if indata["operationalState"] in ("ENABLED", "DISABLED"):
666 0                 indata["_admin"]["operationalState"] = indata.pop("operationalState")
667             else:
668 0                 raise EngineException(
669                     "State '{}' is not a valid operational state".format(
670                         indata["operationalState"]
671                     ),
672                     http_code=HTTPStatus.BAD_REQUEST,
673                 )
674
675         # In the case of user defined data, we need to put the data in the root of the object
676         # to preserve current expected behaviour
677 1         if "userDefinedData" in indata:
678 0             data = indata.pop("userDefinedData")
679 0             if isinstance(data, dict):
680 0                 indata["_admin"]["userDefinedData"] = data
681             else:
682 0                 raise EngineException(
683                     "userDefinedData should be an object, but is '{}' instead".format(
684                         type(data)
685                     ),
686                     http_code=HTTPStatus.BAD_REQUEST,
687                 )
688
689 1         if (
690             "operationalState" in indata["_admin"]
691             and content["_admin"]["operationalState"]
692             == indata["_admin"]["operationalState"]
693         ):
694 0             raise EngineException(
695                 "operationalState already {}".format(
696                     content["_admin"]["operationalState"]
697                 ),
698                 http_code=HTTPStatus.CONFLICT,
699             )
700
701 1         return indata
702
703 1     def _validate_descriptor_changes(
704         self,
705         descriptor_id,
706         descriptor_file_name,
707         old_descriptor_directory,
708         new_descriptor_directory,
709     ):
710         # Example:
711         #    raise EngineException(
712         #           "Error in validating new descriptor: <NODE> cannot be modified",
713         #           http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
714         #    )
715 0         pass
716
717
718 1 class VnfdTopic(DescriptorTopic):
719 1     topic = "vnfds"
720 1     topic_msg = "vnfd"
721
722 1     def __init__(self, db, fs, msg, auth):
723 1         DescriptorTopic.__init__(self, db, fs, msg, auth)
724
725 1     def pyangbind_validation(self, item, data, force=False):
726 1         if self._descriptor_data_is_in_old_format(data):
727 0             raise EngineException(
728                 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
729                 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
730             )
731 1         try:
732 1             myvnfd = etsi_nfv_vnfd.etsi_nfv_vnfd()
733 1             pybindJSONDecoder.load_ietf_json(
734                 {"etsi-nfv-vnfd:vnfd": data},
735                 None,
736                 None,
737                 obj=myvnfd,
738                 path_helper=True,
739                 skip_unknown=force,
740             )
741 1             out = pybindJSON.dumps(myvnfd, mode="ietf")
742 1             desc_out = self._remove_envelop(yaml.safe_load(out))
743 1             desc_out = self._remove_yang_prefixes_from_descriptor(desc_out)
744 1             return utils.deep_update_dict(data, desc_out)
745 1         except Exception as e:
746 1             raise EngineException(
747                 "Error in pyangbind validation: {}".format(str(e)),
748                 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
749             )
750
751 1     @staticmethod
752 1     def _descriptor_data_is_in_old_format(data):
753 1         return ("vnfd-catalog" in data) or ("vnfd:vnfd-catalog" in data)
754
755 1     @staticmethod
756 1     def _remove_envelop(indata=None):
757 1         if not indata:
758 0             return {}
759 1         clean_indata = indata
760
761 1         if clean_indata.get("etsi-nfv-vnfd:vnfd"):
762 1             if not isinstance(clean_indata["etsi-nfv-vnfd:vnfd"], dict):
763 0                 raise EngineException("'etsi-nfv-vnfd:vnfd' must be a dict")
764 1             clean_indata = clean_indata["etsi-nfv-vnfd:vnfd"]
765 1         elif clean_indata.get("vnfd"):
766 1             if not isinstance(clean_indata["vnfd"], dict):
767 1                 raise EngineException("'vnfd' must be dict")
768 0             clean_indata = clean_indata["vnfd"]
769
770 1         return clean_indata
771
772 1     def check_conflict_on_edit(self, session, final_content, edit_content, _id):
773 1         final_content = super().check_conflict_on_edit(
774             session, final_content, edit_content, _id
775         )
776
777         # set type of vnfd
778 1         contains_pdu = False
779 1         contains_vdu = False
780 1         for vdu in get_iterable(final_content.get("vdu")):
781 1             if vdu.get("pdu-type"):
782 0                 contains_pdu = True
783             else:
784 1                 contains_vdu = True
785 1         if contains_pdu:
786 0             final_content["_admin"]["type"] = "hnfd" if contains_vdu else "pnfd"
787 1         elif contains_vdu:
788 1             final_content["_admin"]["type"] = "vnfd"
789         # if neither vud nor pdu do not fill type
790 1         return final_content
791
792 1     def check_conflict_on_del(self, session, _id, db_content):
793         """
794         Check that there is not any NSD that uses this VNFD. Only NSDs belonging to this project are considered. Note
795         that VNFD can be public and be used by NSD of other projects. Also check there are not deployments, or vnfr
796         that uses this vnfd
797         :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
798         :param _id: vnfd internal id
799         :param db_content: The database content of the _id.
800         :return: None or raises EngineException with the conflict
801         """
802 1         if session["force"]:
803 0             return
804 1         descriptor = db_content
805 1         descriptor_id = descriptor.get("id")
806 1         if not descriptor_id:  # empty vnfd not uploaded
807 0             return
808
809 1         _filter = self._get_project_filter(session)
810
811         # check vnfrs using this vnfd
812 1         _filter["vnfd-id"] = _id
813 1         if self.db.get_list("vnfrs", _filter):
814 1             raise EngineException(
815                 "There is at least one VNF instance using this descriptor",
816                 http_code=HTTPStatus.CONFLICT,
817             )
818
819         # check NSD referencing this VNFD
820 1         del _filter["vnfd-id"]
821 1         _filter["vnfd-id"] = descriptor_id
822 1         if self.db.get_list("nsds", _filter):
823 1             raise EngineException(
824                 "There is at least one NS package referencing this descriptor",
825                 http_code=HTTPStatus.CONFLICT,
826             )
827
828 1     def _validate_input_new(self, indata, storage_params, force=False):
829 1         indata.pop("onboardingState", None)
830 1         indata.pop("operationalState", None)
831 1         indata.pop("usageState", None)
832 1         indata.pop("links", None)
833
834 1         indata = self.pyangbind_validation("vnfds", indata, force)
835         # Cross references validation in the descriptor
836
837 1         self.validate_mgmt_interface_connection_point(indata)
838
839 1         for vdu in get_iterable(indata.get("vdu")):
840 1             self.validate_vdu_internal_connection_points(vdu)
841 1             self._validate_vdu_cloud_init_in_package(storage_params, vdu, indata)
842 1         self._validate_vdu_charms_in_package(storage_params, indata)
843
844 1         self._validate_vnf_charms_in_package(storage_params, indata)
845
846 1         self.validate_external_connection_points(indata)
847 1         self.validate_internal_virtual_links(indata)
848 1         self.validate_monitoring_params(indata)
849 1         self.validate_scaling_group_descriptor(indata)
850
851 1         return indata
852
853 1     @staticmethod
854 1     def validate_mgmt_interface_connection_point(indata):
855 1         if not indata.get("vdu"):
856 0             return
857 1         if not indata.get("mgmt-cp"):
858 1             raise EngineException(
859                 "'mgmt-cp' is a mandatory field and it is not defined",
860                 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
861             )
862
863 1         for cp in get_iterable(indata.get("ext-cpd")):
864 1             if cp["id"] == indata["mgmt-cp"]:
865 1                 break
866         else:
867 1             raise EngineException(
868                 "mgmt-cp='{}' must match an existing ext-cpd".format(indata["mgmt-cp"]),
869                 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
870             )
871
872 1     @staticmethod
873 1     def validate_vdu_internal_connection_points(vdu):
874 1         int_cpds = set()
875 1         for cpd in get_iterable(vdu.get("int-cpd")):
876 1             cpd_id = cpd.get("id")
877 1             if cpd_id and cpd_id in int_cpds:
878 1                 raise EngineException(
879                     "vdu[id='{}']:int-cpd[id='{}'] is already used by other int-cpd".format(
880                         vdu["id"], cpd_id
881                     ),
882                     http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
883                 )
884 1             int_cpds.add(cpd_id)
885
886 1     @staticmethod
887 1     def validate_external_connection_points(indata):
888 1         all_vdus_int_cpds = set()
889 1         for vdu in get_iterable(indata.get("vdu")):
890 1             for int_cpd in get_iterable(vdu.get("int-cpd")):
891 1                 all_vdus_int_cpds.add((vdu.get("id"), int_cpd.get("id")))
892
893 1         ext_cpds = set()
894 1         for cpd in get_iterable(indata.get("ext-cpd")):
895 1             cpd_id = cpd.get("id")
896 1             if cpd_id and cpd_id in ext_cpds:
897 1                 raise EngineException(
898                     "ext-cpd[id='{}'] is already used by other ext-cpd".format(cpd_id),
899                     http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
900                 )
901 1             ext_cpds.add(cpd_id)
902
903 1             int_cpd = cpd.get("int-cpd")
904 1             if int_cpd:
905 1                 if (int_cpd.get("vdu-id"), int_cpd.get("cpd")) not in all_vdus_int_cpds:
906 1                     raise EngineException(
907                         "ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd".format(
908                             cpd_id
909                         ),
910                         http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
911                     )
912             # TODO: Validate k8s-cluster-net points to a valid k8s-cluster:nets ?
913
914 1     def _validate_vdu_charms_in_package(self, storage_params, indata):
915 1         for df in indata["df"]:
916 1             if (
917                 "lcm-operations-configuration" in df
918                 and "operate-vnf-op-config" in df["lcm-operations-configuration"]
919             ):
920 1                 configs = df["lcm-operations-configuration"][
921                     "operate-vnf-op-config"
922                 ].get("day1-2", [])
923 1                 vdus = df.get("vdu-profile", [])
924 1                 for vdu in vdus:
925 1                     for config in configs:
926 1                         if config["id"] == vdu["id"] and utils.find_in_list(
927                             config.get("execution-environment-list", []),
928                             lambda ee: "juju" in ee,
929                         ):
930 0                             if not self._validate_package_folders(
931                                 storage_params, "charms"
932                             ) and not self._validate_package_folders(
933                                 storage_params, "Scripts/charms"
934                             ):
935 0                                 raise EngineException(
936                                     "Charm defined in vnf[id={}] but not present in "
937                                     "package".format(indata["id"])
938                                 )
939
940 1     def _validate_vdu_cloud_init_in_package(self, storage_params, vdu, indata):
941 1         if not vdu.get("cloud-init-file"):
942 1             return
943 1         if not self._validate_package_folders(
944             storage_params, "cloud_init", vdu["cloud-init-file"]
945         ) and not self._validate_package_folders(
946             storage_params, "Scripts/cloud_init", vdu["cloud-init-file"]
947         ):
948 1             raise EngineException(
949                 "Cloud-init defined in vnf[id={}]:vdu[id={}] but not present in "
950                 "package".format(indata["id"], vdu["id"])
951             )
952
953 1     def _validate_vnf_charms_in_package(self, storage_params, indata):
954         # Get VNF configuration through new container
955 1         for deployment_flavor in indata.get("df", []):
956 1             if "lcm-operations-configuration" not in deployment_flavor:
957 0                 return
958 1             if (
959                 "operate-vnf-op-config"
960                 not in deployment_flavor["lcm-operations-configuration"]
961             ):
962 0                 return
963 1             for day_1_2_config in deployment_flavor["lcm-operations-configuration"][
964                 "operate-vnf-op-config"
965             ]["day1-2"]:
966 1                 if day_1_2_config["id"] == indata["id"]:
967 1                     if utils.find_in_list(
968                         day_1_2_config.get("execution-environment-list", []),
969                         lambda ee: "juju" in ee,
970                     ):
971 1                         if not self._validate_package_folders(
972                             storage_params, "charms"
973                         ) and not self._validate_package_folders(
974                             storage_params, "Scripts/charms"
975                         ):
976 1                             raise EngineException(
977                                 "Charm defined in vnf[id={}] but not present in "
978                                 "package".format(indata["id"])
979                             )
980
981 1     def _validate_package_folders(self, storage_params, folder, file=None):
982 1         if not storage_params:
983 0             return False
984 1         elif not storage_params.get("pkg-dir"):
985 0             if self.fs.file_exists("{}_".format(storage_params["folder"]), "dir"):
986 0                 f = "{}_/{}".format(storage_params["folder"], folder)
987             else:
988 0                 f = "{}/{}".format(storage_params["folder"], folder)
989 0             if file:
990 0                 return self.fs.file_exists("{}/{}".format(f, file), "file")
991             else:
992 0                 if self.fs.file_exists(f, "dir"):
993 0                     if self.fs.dir_ls(f):
994 0                         return True
995 0             return False
996         else:
997 1             if self.fs.file_exists("{}_".format(storage_params["folder"]), "dir"):
998 1                 f = "{}_/{}/{}".format(
999                     storage_params["folder"], storage_params["pkg-dir"], folder
1000                 )
1001             else:
1002 1                 f = "{}/{}/{}".format(
1003                     storage_params["folder"], storage_params["pkg-dir"], folder
1004                 )
1005 1             if file:
1006 1                 return self.fs.file_exists("{}/{}".format(f, file), "file")
1007             else:
1008 1                 if self.fs.file_exists(f, "dir"):
1009 1                     if self.fs.dir_ls(f):
1010 1                         return True
1011 1             return False
1012
1013 1     @staticmethod
1014 1     def validate_internal_virtual_links(indata):
1015 1         all_ivld_ids = set()
1016 1         for ivld in get_iterable(indata.get("int-virtual-link-desc")):
1017 1             ivld_id = ivld.get("id")
1018 1             if ivld_id and ivld_id in all_ivld_ids:
1019 1                 raise EngineException(
1020                     "Duplicated VLD id in int-virtual-link-desc[id={}]".format(ivld_id),
1021                     http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1022                 )
1023             else:
1024 1                 all_ivld_ids.add(ivld_id)
1025
1026 1         for vdu in get_iterable(indata.get("vdu")):
1027 1             for int_cpd in get_iterable(vdu.get("int-cpd")):
1028 1                 int_cpd_ivld_id = int_cpd.get("int-virtual-link-desc")
1029 1                 if int_cpd_ivld_id and int_cpd_ivld_id not in all_ivld_ids:
1030 1                     raise EngineException(
1031                         "vdu[id='{}']:int-cpd[id='{}']:int-virtual-link-desc='{}' must match an existing "
1032                         "int-virtual-link-desc".format(
1033                             vdu["id"], int_cpd["id"], int_cpd_ivld_id
1034                         ),
1035                         http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1036                     )
1037
1038 1         for df in get_iterable(indata.get("df")):
1039 1             for vlp in get_iterable(df.get("virtual-link-profile")):
1040 1                 vlp_ivld_id = vlp.get("id")
1041 1                 if vlp_ivld_id and vlp_ivld_id not in all_ivld_ids:
1042 1                     raise EngineException(
1043                         "df[id='{}']:virtual-link-profile='{}' must match an existing "
1044                         "int-virtual-link-desc".format(df["id"], vlp_ivld_id),
1045                         http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1046                     )
1047
1048 1     @staticmethod
1049 1     def validate_monitoring_params(indata):
1050 1         all_monitoring_params = set()
1051 1         for ivld in get_iterable(indata.get("int-virtual-link-desc")):
1052 1             for mp in get_iterable(ivld.get("monitoring-parameters")):
1053 1                 mp_id = mp.get("id")
1054 1                 if mp_id and mp_id in all_monitoring_params:
1055 1                     raise EngineException(
1056                         "Duplicated monitoring-parameter id in "
1057                         "int-virtual-link-desc[id='{}']:monitoring-parameters[id='{}']".format(
1058                             ivld["id"], mp_id
1059                         ),
1060                         http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1061                     )
1062                 else:
1063 1                     all_monitoring_params.add(mp_id)
1064
1065 1         for vdu in get_iterable(indata.get("vdu")):
1066 1             for mp in get_iterable(vdu.get("monitoring-parameter")):
1067 1                 mp_id = mp.get("id")
1068 1                 if mp_id and mp_id in all_monitoring_params:
1069 1                     raise EngineException(
1070                         "Duplicated monitoring-parameter id in "
1071                         "vdu[id='{}']:monitoring-parameter[id='{}']".format(
1072                             vdu["id"], mp_id
1073                         ),
1074                         http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1075                     )
1076                 else:
1077 1                     all_monitoring_params.add(mp_id)
1078
1079 1         for df in get_iterable(indata.get("df")):
1080 1             for mp in get_iterable(df.get("monitoring-parameter")):
1081 1                 mp_id = mp.get("id")
1082 1                 if mp_id and mp_id in all_monitoring_params:
1083 1                     raise EngineException(
1084                         "Duplicated monitoring-parameter id in "
1085                         "df[id='{}']:monitoring-parameter[id='{}']".format(
1086                             df["id"], mp_id
1087                         ),
1088                         http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1089                     )
1090                 else:
1091 1                     all_monitoring_params.add(mp_id)
1092
1093 1     @staticmethod
1094 1     def validate_scaling_group_descriptor(indata):
1095 1         all_monitoring_params = set()
1096 1         for ivld in get_iterable(indata.get("int-virtual-link-desc")):
1097 1             for mp in get_iterable(ivld.get("monitoring-parameters")):
1098 0                 all_monitoring_params.add(mp.get("id"))
1099
1100 1         for vdu in get_iterable(indata.get("vdu")):
1101 1             for mp in get_iterable(vdu.get("monitoring-parameter")):
1102 1                 all_monitoring_params.add(mp.get("id"))
1103
1104 1         for df in get_iterable(indata.get("df")):
1105 1             for mp in get_iterable(df.get("monitoring-parameter")):
1106 0                 all_monitoring_params.add(mp.get("id"))
1107
1108 1         for df in get_iterable(indata.get("df")):
1109 1             for sa in get_iterable(df.get("scaling-aspect")):
1110 1                 for sp in get_iterable(sa.get("scaling-policy")):
1111 1                     for sc in get_iterable(sp.get("scaling-criteria")):
1112 1                         sc_monitoring_param = sc.get("vnf-monitoring-param-ref")
1113 1                         if (
1114                             sc_monitoring_param
1115                             and sc_monitoring_param not in all_monitoring_params
1116                         ):
1117 1                             raise EngineException(
1118                                 "df[id='{}']:scaling-aspect[id='{}']:scaling-policy"
1119                                 "[name='{}']:scaling-criteria[name='{}']: "
1120                                 "vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format(
1121                                     df["id"],
1122                                     sa["id"],
1123                                     sp["name"],
1124                                     sc["name"],
1125                                     sc_monitoring_param,
1126                                 ),
1127                                 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1128                             )
1129
1130 1                 for sca in get_iterable(sa.get("scaling-config-action")):
1131 1                     if (
1132                         "lcm-operations-configuration" not in df
1133                         or "operate-vnf-op-config"
1134                         not in df["lcm-operations-configuration"]
1135                         or not utils.find_in_list(
1136                             df["lcm-operations-configuration"][
1137                                 "operate-vnf-op-config"
1138                             ].get("day1-2", []),
1139                             lambda config: config["id"] == indata["id"],
1140                         )
1141                     ):
1142 1                         raise EngineException(
1143                             "'day1-2 configuration' not defined in the descriptor but it is "
1144                             "referenced by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action".format(
1145                                 df["id"], sa["id"]
1146                             ),
1147                             http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1148                         )
1149 1                     for configuration in get_iterable(
1150                         df["lcm-operations-configuration"]["operate-vnf-op-config"].get(
1151                             "day1-2", []
1152                         )
1153                     ):
1154 1                         for primitive in get_iterable(
1155                             configuration.get("config-primitive")
1156                         ):
1157 1                             if (
1158                                 primitive["name"]
1159                                 == sca["vnf-config-primitive-name-ref"]
1160                             ):
1161 1                                 break
1162                         else:
1163 1                             raise EngineException(
1164                                 "df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-"
1165                                 "config-primitive-name-ref='{}' does not match any "
1166                                 "day1-2 configuration:config-primitive:name".format(
1167                                     df["id"],
1168                                     sa["id"],
1169                                     sca["vnf-config-primitive-name-ref"],
1170                                 ),
1171                                 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1172                             )
1173
1174 1     def delete_extra(self, session, _id, db_content, not_send_msg=None):
1175         """
1176         Deletes associate file system storage (via super)
1177         Deletes associated vnfpkgops from database.
1178         :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1179         :param _id: server internal id
1180         :param db_content: The database content of the descriptor
1181         :return: None
1182         :raises: FsException in case of error while deleting associated storage
1183         """
1184 1         super().delete_extra(session, _id, db_content, not_send_msg)
1185 1         self.db.del_list("vnfpkgops", {"vnfPkgId": _id})
1186 1         self.db.del_list(self.topic + "_revisions", {"_id": {"$regex": _id}})
1187
1188 1     def sol005_projection(self, data):
1189 0         data["onboardingState"] = data["_admin"]["onboardingState"]
1190 0         data["operationalState"] = data["_admin"]["operationalState"]
1191 0         data["usageState"] = data["_admin"]["usageState"]
1192
1193 0         links = {}
1194 0         links["self"] = {"href": "/vnfpkgm/v1/vnf_packages/{}".format(data["_id"])}
1195 0         links["vnfd"] = {"href": "/vnfpkgm/v1/vnf_packages/{}/vnfd".format(data["_id"])}
1196 0         links["packageContent"] = {
1197             "href": "/vnfpkgm/v1/vnf_packages/{}/package_content".format(data["_id"])
1198         }
1199 0         data["_links"] = links
1200
1201 0         return super().sol005_projection(data)
1202
1203 1     @staticmethod
1204 1     def find_software_version(vnfd: dict) -> str:
1205         """Find the sotware version in the VNFD descriptors
1206
1207         Args:
1208             vnfd (dict): Descriptor as a dictionary
1209
1210         Returns:
1211             software-version (str)
1212         """
1213 1         default_sw_version = "1.0"
1214 1         if vnfd.get("vnfd"):
1215 0             vnfd = vnfd["vnfd"]
1216 1         if vnfd.get("software-version"):
1217 1             return vnfd["software-version"]
1218         else:
1219 1             return default_sw_version
1220
1221 1     @staticmethod
1222 1     def extract_policies(vnfd: dict) -> dict:
1223         """Removes the policies from the VNFD descriptors
1224
1225         Args:
1226             vnfd (dict):   Descriptor as a dictionary
1227
1228         Returns:
1229             vnfd (dict): VNFD which does not include policies
1230         """
1231 1         for df in vnfd.get("df", {}):
1232 1             for policy in ["scaling-aspect", "healing-aspect"]:
1233 1                 if df.get(policy, {}):
1234 1                     df.pop(policy)
1235 1         for vdu in vnfd.get("vdu", {}):
1236 1             for alarm_policy in ["alarm", "monitoring-parameter"]:
1237 1                 if vdu.get(alarm_policy, {}):
1238 1                     vdu.pop(alarm_policy)
1239 1         return vnfd
1240
1241 1     @staticmethod
1242 1     def extract_day12_primitives(vnfd: dict) -> dict:
1243         """Removes the day12 primitives from the VNFD descriptors
1244
1245         Args:
1246             vnfd (dict):   Descriptor as a dictionary
1247
1248         Returns:
1249             vnfd (dict)
1250         """
1251 1         for df_id, df in enumerate(vnfd.get("df", {})):
1252 1             if (
1253                 df.get("lcm-operations-configuration", {})
1254                 .get("operate-vnf-op-config", {})
1255                 .get("day1-2")
1256             ):
1257 1                 day12 = df["lcm-operations-configuration"]["operate-vnf-op-config"].get(
1258                     "day1-2"
1259                 )
1260 1                 for config_id, config in enumerate(day12):
1261 1                     for key in [
1262                         "initial-config-primitive",
1263                         "config-primitive",
1264                         "terminate-config-primitive",
1265                     ]:
1266 1                         config.pop(key, None)
1267 1                         day12[config_id] = config
1268 1                 df["lcm-operations-configuration"]["operate-vnf-op-config"][
1269                     "day1-2"
1270                 ] = day12
1271 1             vnfd["df"][df_id] = df
1272 1         return vnfd
1273
1274 1     def remove_modifiable_items(self, vnfd: dict) -> dict:
1275         """Removes the modifiable parts from the VNFD descriptors
1276
1277         It calls different extract functions according to different update types
1278         to clear all the modifiable items from VNFD
1279
1280         Args:
1281             vnfd (dict): Descriptor as a dictionary
1282
1283         Returns:
1284             vnfd (dict): Descriptor which does not include modifiable contents
1285         """
1286 1         if vnfd.get("vnfd"):
1287 0             vnfd = vnfd["vnfd"]
1288 1         vnfd.pop("_admin", None)
1289         # If the other extractions need to be done from VNFD,
1290         # the new extract methods could be appended to below list.
1291 1         for extract_function in [self.extract_day12_primitives, self.extract_policies]:
1292 1             vnfd_temp = extract_function(vnfd)
1293 1             vnfd = vnfd_temp
1294 1         return vnfd
1295
1296 1     def _validate_descriptor_changes(
1297         self,
1298         descriptor_id: str,
1299         descriptor_file_name: str,
1300         old_descriptor_directory: str,
1301         new_descriptor_directory: str,
1302     ):
1303         """Compares the old and new VNFD descriptors and validates the new descriptor.
1304
1305         Args:
1306             old_descriptor_directory (str):   Directory of descriptor which is in-use
1307             new_descriptor_directory (str):   Directory of descriptor which is proposed to update (new revision)
1308
1309         Returns:
1310             None
1311
1312         Raises:
1313             EngineException:    In case of error when there are unallowed changes
1314         """
1315 1         try:
1316             # If VNFD does not exist in DB or it is not in use by any NS,
1317             # validation is not required.
1318 1             vnfd = self.db.get_one("vnfds", {"_id": descriptor_id})
1319 1             if not vnfd or not detect_descriptor_usage(vnfd, "vnfds", self.db):
1320 1                 return
1321
1322             # Get the old and new descriptor contents in order to compare them.
1323 1             with self.fs.file_open(
1324                 (old_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
1325             ) as old_descriptor_file:
1326 1                 with self.fs.file_open(
1327                     (new_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
1328                 ) as new_descriptor_file:
1329 1                     old_content = yaml.safe_load(old_descriptor_file.read())
1330 1                     new_content = yaml.safe_load(new_descriptor_file.read())
1331
1332                     # If software version has changed, we do not need to validate
1333                     # the differences anymore.
1334 1                     if old_content and new_content:
1335 1                         if self.find_software_version(
1336                             old_content
1337                         ) != self.find_software_version(new_content):
1338 1                             return
1339
1340 1                         disallowed_change = DeepDiff(
1341                             self.remove_modifiable_items(old_content),
1342                             self.remove_modifiable_items(new_content),
1343                         )
1344
1345 1                         if disallowed_change:
1346 1                             changed_nodes = functools.reduce(
1347                                 lambda a, b: a + " , " + b,
1348                                 [
1349                                     node.lstrip("root")
1350                                     for node in disallowed_change.get(
1351                                         "values_changed"
1352                                     ).keys()
1353                                 ],
1354                             )
1355
1356 1                             raise EngineException(
1357                                 f"Error in validating new descriptor: {changed_nodes} cannot be modified, "
1358                                 "there are disallowed changes in the vnf descriptor.",
1359                                 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1360                             )
1361 1         except (
1362             DbException,
1363             AttributeError,
1364             IndexError,
1365             KeyError,
1366             ValueError,
1367         ) as e:
1368 0             raise type(e)(
1369                 "VNF Descriptor could not be processed with error: {}.".format(e)
1370             )
1371
1372
1373 1 class NsdTopic(DescriptorTopic):
1374 1     topic = "nsds"
1375 1     topic_msg = "nsd"
1376
1377 1     def __init__(self, db, fs, msg, auth):
1378 1         DescriptorTopic.__init__(self, db, fs, msg, auth)
1379
1380 1     def pyangbind_validation(self, item, data, force=False):
1381 1         if self._descriptor_data_is_in_old_format(data):
1382 0             raise EngineException(
1383                 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
1384                 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1385             )
1386 1         try:
1387 1             nsd_vnf_profiles = data.get("df", [{}])[0].get("vnf-profile", [])
1388 1             mynsd = etsi_nfv_nsd.etsi_nfv_nsd()
1389 1             pybindJSONDecoder.load_ietf_json(
1390                 {"nsd": {"nsd": [data]}},
1391                 None,
1392                 None,
1393                 obj=mynsd,
1394                 path_helper=True,
1395                 skip_unknown=force,
1396             )
1397 1             out = pybindJSON.dumps(mynsd, mode="ietf")
1398 1             desc_out = self._remove_envelop(yaml.safe_load(out))
1399 1             desc_out = self._remove_yang_prefixes_from_descriptor(desc_out)
1400 1             if nsd_vnf_profiles:
1401 1                 desc_out["df"][0]["vnf-profile"] = nsd_vnf_profiles
1402 1             return desc_out
1403 1         except Exception as e:
1404 1             raise EngineException(
1405                 "Error in pyangbind validation: {}".format(str(e)),
1406                 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1407             )
1408
1409 1     @staticmethod
1410 1     def _descriptor_data_is_in_old_format(data):
1411 1         return ("nsd-catalog" in data) or ("nsd:nsd-catalog" in data)
1412
1413 1     @staticmethod
1414 1     def _remove_envelop(indata=None):
1415 1         if not indata:
1416 0             return {}
1417 1         clean_indata = indata
1418
1419 1         if clean_indata.get("nsd"):
1420 1             clean_indata = clean_indata["nsd"]
1421 1         elif clean_indata.get("etsi-nfv-nsd:nsd"):
1422 1             clean_indata = clean_indata["etsi-nfv-nsd:nsd"]
1423 1         if clean_indata.get("nsd"):
1424 1             if (
1425                 not isinstance(clean_indata["nsd"], list)
1426                 or len(clean_indata["nsd"]) != 1
1427             ):
1428 1                 raise EngineException("'nsd' must be a list of only one element")
1429 1             clean_indata = clean_indata["nsd"][0]
1430 1         return clean_indata
1431
1432 1     def _validate_input_new(self, indata, storage_params, force=False):
1433 1         indata.pop("nsdOnboardingState", None)
1434 1         indata.pop("nsdOperationalState", None)
1435 1         indata.pop("nsdUsageState", None)
1436
1437 1         indata.pop("links", None)
1438
1439 1         indata = self.pyangbind_validation("nsds", indata, force)
1440         # Cross references validation in the descriptor
1441         # TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none
1442 1         for vld in get_iterable(indata.get("virtual-link-desc")):
1443 1             self.validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata)
1444
1445 1         self.validate_vnf_profiles_vnfd_id(indata)
1446
1447 1         return indata
1448
1449 1     @staticmethod
1450 1     def validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata):
1451 1         if not vld.get("mgmt-network"):
1452 1             return
1453 1         vld_id = vld.get("id")
1454 1         for df in get_iterable(indata.get("df")):
1455 1             for vlp in get_iterable(df.get("virtual-link-profile")):
1456 1                 if vld_id and vld_id == vlp.get("virtual-link-desc-id"):
1457 1                     if vlp.get("virtual-link-protocol-data"):
1458 1                         raise EngineException(
1459                             "Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-"
1460                             "protocol-data You cannot set a virtual-link-protocol-data "
1461                             "when mgmt-network is True".format(df["id"], vlp["id"]),
1462                             http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1463                         )
1464
1465 1     @staticmethod
1466 1     def validate_vnf_profiles_vnfd_id(indata):
1467 1         all_vnfd_ids = set(get_iterable(indata.get("vnfd-id")))
1468 1         for df in get_iterable(indata.get("df")):
1469 1             for vnf_profile in get_iterable(df.get("vnf-profile")):
1470 1                 vnfd_id = vnf_profile.get("vnfd-id")
1471 1                 if vnfd_id and vnfd_id not in all_vnfd_ids:
1472 1                     raise EngineException(
1473                         "Error at df[id='{}']:vnf_profile[id='{}']:vnfd-id='{}' "
1474                         "does not match any vnfd-id".format(
1475                             df["id"], vnf_profile["id"], vnfd_id
1476                         ),
1477                         http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1478                     )
1479
1480 1     def _validate_input_edit(self, indata, content, force=False):
1481         # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
1482         """
1483         indata looks as follows:
1484             - In the new case (conformant)
1485                 {'nsdOperationalState': 'DISABLED', 'userDefinedData': {'id': 'string23',
1486                 '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}}
1487             - In the old case (backwards-compatible)
1488                 {'id': 'string23', '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}
1489         """
1490 1         if "_admin" not in indata:
1491 1             indata["_admin"] = {}
1492
1493 1         if "nsdOperationalState" in indata:
1494 0             if indata["nsdOperationalState"] in ("ENABLED", "DISABLED"):
1495 0                 indata["_admin"]["operationalState"] = indata.pop("nsdOperationalState")
1496             else:
1497 0                 raise EngineException(
1498                     "State '{}' is not a valid operational state".format(
1499                         indata["nsdOperationalState"]
1500                     ),
1501                     http_code=HTTPStatus.BAD_REQUEST,
1502                 )
1503
1504         # In the case of user defined data, we need to put the data in the root of the object
1505         # to preserve current expected behaviour
1506 1         if "userDefinedData" in indata:
1507 0             data = indata.pop("userDefinedData")
1508 0             if isinstance(data, dict):
1509 0                 indata["_admin"]["userDefinedData"] = data
1510             else:
1511 0                 raise EngineException(
1512                     "userDefinedData should be an object, but is '{}' instead".format(
1513                         type(data)
1514                     ),
1515                     http_code=HTTPStatus.BAD_REQUEST,
1516                 )
1517 1         if (
1518             "operationalState" in indata["_admin"]
1519             and content["_admin"]["operationalState"]
1520             == indata["_admin"]["operationalState"]
1521         ):
1522 0             raise EngineException(
1523                 "nsdOperationalState already {}".format(
1524                     content["_admin"]["operationalState"]
1525                 ),
1526                 http_code=HTTPStatus.CONFLICT,
1527             )
1528 1         return indata
1529
1530 1     def _check_descriptor_dependencies(self, session, descriptor):
1531         """
1532         Check that the dependent descriptors exist on a new descriptor or edition. Also checks references to vnfd
1533         connection points are ok
1534         :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1535         :param descriptor: descriptor to be inserted or edit
1536         :return: None or raises exception
1537         """
1538 1         if session["force"]:
1539 1             return
1540 1         vnfds_index = self._get_descriptor_constituent_vnfds_index(session, descriptor)
1541
1542         # Cross references validation in the descriptor and vnfd connection point validation
1543 1         for df in get_iterable(descriptor.get("df")):
1544 1             self.validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index)
1545
1546 1     def _get_descriptor_constituent_vnfds_index(self, session, descriptor):
1547 1         vnfds_index = {}
1548 1         if descriptor.get("vnfd-id") and not session["force"]:
1549 1             for vnfd_id in get_iterable(descriptor.get("vnfd-id")):
1550 1                 query_filter = self._get_project_filter(session)
1551 1                 query_filter["id"] = vnfd_id
1552 1                 vnf_list = self.db.get_list("vnfds", query_filter)
1553 1                 if not vnf_list:
1554 1                     raise EngineException(
1555                         "Descriptor error at 'vnfd-id'='{}' references a non "
1556                         "existing vnfd".format(vnfd_id),
1557                         http_code=HTTPStatus.CONFLICT,
1558                     )
1559 1                 vnfds_index[vnfd_id] = vnf_list[0]
1560 1         return vnfds_index
1561
1562 1     @staticmethod
1563 1     def validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index):
1564 1         for vnf_profile in get_iterable(df.get("vnf-profile")):
1565 1             vnfd = vnfds_index.get(vnf_profile["vnfd-id"])
1566 1             all_vnfd_ext_cpds = set()
1567 1             for ext_cpd in get_iterable(vnfd.get("ext-cpd")):
1568 1                 if ext_cpd.get("id"):
1569 1                     all_vnfd_ext_cpds.add(ext_cpd.get("id"))
1570
1571 1             for virtual_link in get_iterable(
1572                 vnf_profile.get("virtual-link-connectivity")
1573             ):
1574 1                 for vl_cpd in get_iterable(virtual_link.get("constituent-cpd-id")):
1575 1                     vl_cpd_id = vl_cpd.get("constituent-cpd-id")
1576 1                     if vl_cpd_id and vl_cpd_id not in all_vnfd_ext_cpds:
1577 1                         raise EngineException(
1578                             "Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity"
1579                             "[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a "
1580                             "non existing ext-cpd:id inside vnfd '{}'".format(
1581                                 df["id"],
1582                                 vnf_profile["id"],
1583                                 virtual_link["virtual-link-profile-id"],
1584                                 vl_cpd_id,
1585                                 vnfd["id"],
1586                             ),
1587                             http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1588                         )
1589
1590 1     def check_conflict_on_edit(self, session, final_content, edit_content, _id):
1591 1         final_content = super().check_conflict_on_edit(
1592             session, final_content, edit_content, _id
1593         )
1594
1595 1         self._check_descriptor_dependencies(session, final_content)
1596
1597 1         return final_content
1598
1599 1     def check_conflict_on_del(self, session, _id, db_content):
1600         """
1601         Check that there is not any NSR that uses this NSD. Only NSRs belonging to this project are considered. Note
1602         that NSD can be public and be used by other projects.
1603         :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1604         :param _id: nsd internal id
1605         :param db_content: The database content of the _id
1606         :return: None or raises EngineException with the conflict
1607         """
1608 1         if session["force"]:
1609 0             return
1610 1         descriptor = db_content
1611 1         descriptor_id = descriptor.get("id")
1612 1         if not descriptor_id:  # empty nsd not uploaded
1613 0             return
1614
1615         # check NSD used by NS
1616 1         _filter = self._get_project_filter(session)
1617 1         _filter["nsd-id"] = _id
1618 1         if self.db.get_list("nsrs", _filter):
1619 1             raise EngineException(
1620                 "There is at least one NS instance using this descriptor",
1621                 http_code=HTTPStatus.CONFLICT,
1622             )
1623
1624         # check NSD referenced by NST
1625 1         del _filter["nsd-id"]
1626 1         _filter["netslice-subnet.ANYINDEX.nsd-ref"] = descriptor_id
1627 1         if self.db.get_list("nsts", _filter):
1628 1             raise EngineException(
1629                 "There is at least one NetSlice Template referencing this descriptor",
1630                 http_code=HTTPStatus.CONFLICT,
1631             )
1632
1633 1     def delete_extra(self, session, _id, db_content, not_send_msg=None):
1634         """
1635         Deletes associate file system storage (via super)
1636         Deletes associated vnfpkgops from database.
1637         :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1638         :param _id: server internal id
1639         :param db_content: The database content of the descriptor
1640         :return: None
1641         :raises: FsException in case of error while deleting associated storage
1642         """
1643 1         super().delete_extra(session, _id, db_content, not_send_msg)
1644 1         self.db.del_list(self.topic + "_revisions", {"_id": {"$regex": _id}})
1645
1646 1     @staticmethod
1647 1     def extract_day12_primitives(nsd: dict) -> dict:
1648         """Removes the day12 primitives from the NSD descriptors
1649
1650         Args:
1651             nsd (dict):    Descriptor as a dictionary
1652
1653         Returns:
1654             nsd (dict):    Cleared NSD
1655         """
1656 1         if nsd.get("ns-configuration"):
1657 1             for key in [
1658                 "config-primitive",
1659                 "initial-config-primitive",
1660                 "terminate-config-primitive",
1661             ]:
1662 1                 nsd["ns-configuration"].pop(key, None)
1663 1         return nsd
1664
1665 1     def remove_modifiable_items(self, nsd: dict) -> dict:
1666         """Removes the modifiable parts from the VNFD descriptors
1667
1668         It calls different extract functions according to different update types
1669         to clear all the modifiable items from NSD
1670
1671         Args:
1672             nsd (dict):  Descriptor as a dictionary
1673
1674         Returns:
1675             nsd (dict):  Descriptor which does not include modifiable contents
1676         """
1677 1         while isinstance(nsd, dict) and nsd.get("nsd"):
1678 0             nsd = nsd["nsd"]
1679 1         if isinstance(nsd, list):
1680 0             nsd = nsd[0]
1681 1         nsd.pop("_admin", None)
1682         # If the more extractions need to be done from NSD,
1683         # the new extract methods could be appended to below list.
1684 1         for extract_function in [self.extract_day12_primitives]:
1685 1             nsd_temp = extract_function(nsd)
1686 1             nsd = nsd_temp
1687 1         return nsd
1688
1689 1     def _validate_descriptor_changes(
1690         self,
1691         descriptor_id: str,
1692         descriptor_file_name: str,
1693         old_descriptor_directory: str,
1694         new_descriptor_directory: str,
1695     ):
1696         """Compares the old and new NSD descriptors and validates the new descriptor
1697
1698         Args:
1699             old_descriptor_directory:   Directory of descriptor which is in-use
1700             new_descriptor_directory:   Directory of descriptor which is proposed to update (new revision)
1701
1702         Returns:
1703             None
1704
1705         Raises:
1706             EngineException:    In case of error if the changes are not allowed
1707         """
1708
1709 1         try:
1710             # If NSD does not exist in DB, or it is not in use by any NS,
1711             # validation is not required.
1712 1             nsd = self.db.get_one("nsds", {"_id": descriptor_id}, fail_on_empty=False)
1713 1             if not nsd or not detect_descriptor_usage(nsd, "nsds", self.db):
1714 1                 return
1715
1716             # Get the old and new descriptor contents in order to compare them.
1717 1             with self.fs.file_open(
1718                 (old_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
1719             ) as old_descriptor_file:
1720 1                 with self.fs.file_open(
1721                     (new_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
1722                 ) as new_descriptor_file:
1723 1                     old_content = yaml.safe_load(old_descriptor_file.read())
1724 1                     new_content = yaml.safe_load(new_descriptor_file.read())
1725
1726 1                     if old_content and new_content:
1727 1                         disallowed_change = DeepDiff(
1728                             self.remove_modifiable_items(old_content),
1729                             self.remove_modifiable_items(new_content),
1730                         )
1731
1732 1                         if disallowed_change:
1733 1                             changed_nodes = functools.reduce(
1734                                 lambda a, b: a + ", " + b,
1735                                 [
1736                                     node.lstrip("root")
1737                                     for node in disallowed_change.get(
1738                                         "values_changed"
1739                                     ).keys()
1740                                 ],
1741                             )
1742
1743 1                             raise EngineException(
1744                                 f"Error in validating new descriptor: {changed_nodes} cannot be modified, "
1745                                 "there are disallowed changes in the ns descriptor. ",
1746                                 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1747                             )
1748 1         except (
1749             DbException,
1750             AttributeError,
1751             IndexError,
1752             KeyError,
1753             ValueError,
1754         ) as e:
1755 0             raise type(e)(
1756                 "NS Descriptor could not be processed with error: {}.".format(e)
1757             )
1758
1759 1     def sol005_projection(self, data):
1760 0         data["nsdOnboardingState"] = data["_admin"]["onboardingState"]
1761 0         data["nsdOperationalState"] = data["_admin"]["operationalState"]
1762 0         data["nsdUsageState"] = data["_admin"]["usageState"]
1763
1764 0         links = {}
1765 0         links["self"] = {"href": "/nsd/v1/ns_descriptors/{}".format(data["_id"])}
1766 0         links["nsd_content"] = {
1767             "href": "/nsd/v1/ns_descriptors/{}/nsd_content".format(data["_id"])
1768         }
1769 0         data["_links"] = links
1770
1771 0         return super().sol005_projection(data)
1772
1773
1774 1 class NstTopic(DescriptorTopic):
1775 1     topic = "nsts"
1776 1     topic_msg = "nst"
1777 1     quota_name = "slice_templates"
1778
1779 1     def __init__(self, db, fs, msg, auth):
1780 0         DescriptorTopic.__init__(self, db, fs, msg, auth)
1781
1782 1     def pyangbind_validation(self, item, data, force=False):
1783 0         try:
1784 0             mynst = nst_im()
1785 0             pybindJSONDecoder.load_ietf_json(
1786                 {"nst": [data]},
1787                 None,
1788                 None,
1789                 obj=mynst,
1790                 path_helper=True,
1791                 skip_unknown=force,
1792             )
1793 0             out = pybindJSON.dumps(mynst, mode="ietf")
1794 0             desc_out = self._remove_envelop(yaml.safe_load(out))
1795 0             return desc_out
1796 0         except Exception as e:
1797 0             raise EngineException(
1798                 "Error in pyangbind validation: {}".format(str(e)),
1799                 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1800             )
1801
1802 1     @staticmethod
1803 1     def _remove_envelop(indata=None):
1804 0         if not indata:
1805 0             return {}
1806 0         clean_indata = indata
1807
1808 0         if clean_indata.get("nst"):
1809 0             if (
1810                 not isinstance(clean_indata["nst"], list)
1811                 or len(clean_indata["nst"]) != 1
1812             ):
1813 0                 raise EngineException("'nst' must be a list only one element")
1814 0             clean_indata = clean_indata["nst"][0]
1815 0         elif clean_indata.get("nst:nst"):
1816 0             if (
1817                 not isinstance(clean_indata["nst:nst"], list)
1818                 or len(clean_indata["nst:nst"]) != 1
1819             ):
1820 0                 raise EngineException("'nst:nst' must be a list only one element")
1821 0             clean_indata = clean_indata["nst:nst"][0]
1822 0         return clean_indata
1823
1824 1     def _validate_input_new(self, indata, storage_params, force=False):
1825 0         indata.pop("onboardingState", None)
1826 0         indata.pop("operationalState", None)
1827 0         indata.pop("usageState", None)
1828 0         indata = self.pyangbind_validation("nsts", indata, force)
1829 0         return indata.copy()
1830
1831 1     def _check_descriptor_dependencies(self, session, descriptor):
1832         """
1833         Check that the dependent descriptors exist on a new descriptor or edition
1834         :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1835         :param descriptor: descriptor to be inserted or edit
1836         :return: None or raises exception
1837         """
1838 0         if not descriptor.get("netslice-subnet"):
1839 0             return
1840 0         for nsd in descriptor["netslice-subnet"]:
1841 0             nsd_id = nsd["nsd-ref"]
1842 0             filter_q = self._get_project_filter(session)
1843 0             filter_q["id"] = nsd_id
1844 0             if not self.db.get_list("nsds", filter_q):
1845 0                 raise EngineException(
1846                     "Descriptor error at 'netslice-subnet':'nsd-ref'='{}' references a non "
1847                     "existing nsd".format(nsd_id),
1848                     http_code=HTTPStatus.CONFLICT,
1849                 )
1850
1851 1     def check_conflict_on_edit(self, session, final_content, edit_content, _id):
1852 0         final_content = super().check_conflict_on_edit(
1853             session, final_content, edit_content, _id
1854         )
1855
1856 0         self._check_descriptor_dependencies(session, final_content)
1857 0         return final_content
1858
1859 1     def check_conflict_on_del(self, session, _id, db_content):
1860         """
1861         Check that there is not any NSIR that uses this NST. Only NSIRs belonging to this project are considered. Note
1862         that NST can be public and be used by other projects.
1863         :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1864         :param _id: nst internal id
1865         :param db_content: The database content of the _id.
1866         :return: None or raises EngineException with the conflict
1867         """
1868         # TODO: Check this method
1869 0         if session["force"]:
1870 0             return
1871         # Get Network Slice Template from Database
1872 0         _filter = self._get_project_filter(session)
1873 0         _filter["_admin.nst-id"] = _id
1874 0         if self.db.get_list("nsis", _filter):
1875 0             raise EngineException(
1876                 "there is at least one Netslice Instance using this descriptor",
1877                 http_code=HTTPStatus.CONFLICT,
1878             )
1879
1880 1     def sol005_projection(self, data):
1881 0         data["onboardingState"] = data["_admin"]["onboardingState"]
1882 0         data["operationalState"] = data["_admin"]["operationalState"]
1883 0         data["usageState"] = data["_admin"]["usageState"]
1884
1885 0         links = {}
1886 0         links["self"] = {"href": "/nst/v1/netslice_templates/{}".format(data["_id"])}
1887 0         links["nst"] = {"href": "/nst/v1/netslice_templates/{}/nst".format(data["_id"])}
1888 0         data["_links"] = links
1889
1890 0         return super().sol005_projection(data)
1891
1892
1893 1 class PduTopic(BaseTopic):
1894 1     topic = "pdus"
1895 1     topic_msg = "pdu"
1896 1     quota_name = "pduds"
1897 1     schema_new = pdu_new_schema
1898 1     schema_edit = pdu_edit_schema
1899
1900 1     def __init__(self, db, fs, msg, auth):
1901 0         BaseTopic.__init__(self, db, fs, msg, auth)
1902
1903 1     @staticmethod
1904 1     def format_on_new(content, project_id=None, make_public=False):
1905 0         BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
1906 0         content["_admin"]["onboardingState"] = "CREATED"
1907 0         content["_admin"]["operationalState"] = "ENABLED"
1908 0         content["_admin"]["usageState"] = "NOT_IN_USE"
1909
1910 1     def check_conflict_on_del(self, session, _id, db_content):
1911         """
1912         Check that there is not any vnfr that uses this PDU
1913         :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1914         :param _id: pdu internal id
1915         :param db_content: The database content of the _id.
1916         :return: None or raises EngineException with the conflict
1917         """
1918 0         if session["force"]:
1919 0             return
1920
1921 0         _filter = self._get_project_filter(session)
1922 0         _filter["vdur.pdu-id"] = _id
1923 0         if self.db.get_list("vnfrs", _filter):
1924 0             raise EngineException(
1925                 "There is at least one VNF instance using this PDU",
1926                 http_code=HTTPStatus.CONFLICT,
1927             )
1928
1929
1930 1 class VnfPkgOpTopic(BaseTopic):
1931 1     topic = "vnfpkgops"
1932 1     topic_msg = "vnfd"
1933 1     schema_new = vnfpkgop_new_schema
1934 1     schema_edit = None
1935
1936 1     def __init__(self, db, fs, msg, auth):
1937 0         BaseTopic.__init__(self, db, fs, msg, auth)
1938
1939 1     def edit(self, session, _id, indata=None, kwargs=None, content=None):
1940 0         raise EngineException(
1941             "Method 'edit' not allowed for topic '{}'".format(self.topic),
1942             HTTPStatus.METHOD_NOT_ALLOWED,
1943         )
1944
1945 1     def delete(self, session, _id, dry_run=False):
1946 0         raise EngineException(
1947             "Method 'delete' not allowed for topic '{}'".format(self.topic),
1948             HTTPStatus.METHOD_NOT_ALLOWED,
1949         )
1950
1951 1     def delete_list(self, session, filter_q=None):
1952 0         raise EngineException(
1953             "Method 'delete_list' not allowed for topic '{}'".format(self.topic),
1954             HTTPStatus.METHOD_NOT_ALLOWED,
1955         )
1956
1957 1     def new(self, rollback, session, indata=None, kwargs=None, headers=None):
1958         """
1959         Creates a new entry into database.
1960         :param rollback: list to append created items at database in case a rollback may to be done
1961         :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1962         :param indata: data to be inserted
1963         :param kwargs: used to override the indata descriptor
1964         :param headers: http request headers
1965         :return: _id, op_id:
1966             _id: identity of the inserted data.
1967              op_id: None
1968         """
1969 0         self._update_input_with_kwargs(indata, kwargs)
1970 0         validate_input(indata, self.schema_new)
1971 0         vnfpkg_id = indata["vnfPkgId"]
1972 0         filter_q = BaseTopic._get_project_filter(session)
1973 0         filter_q["_id"] = vnfpkg_id
1974 0         vnfd = self.db.get_one("vnfds", filter_q)
1975 0         operation = indata["lcmOperationType"]
1976 0         kdu_name = indata["kdu_name"]
1977 0         for kdu in vnfd.get("kdu", []):
1978 0             if kdu["name"] == kdu_name:
1979 0                 helm_chart = kdu.get("helm-chart")
1980 0                 juju_bundle = kdu.get("juju-bundle")
1981 0                 break
1982         else:
1983 0             raise EngineException(
1984                 "Not found vnfd[id='{}']:kdu[name='{}']".format(vnfpkg_id, kdu_name)
1985             )
1986 0         if helm_chart:
1987 0             indata["helm-chart"] = helm_chart
1988 0             match = fullmatch(r"([^/]*)/([^/]*)", helm_chart)
1989 0             repo_name = match.group(1) if match else None
1990 0         elif juju_bundle:
1991 0             indata["juju-bundle"] = juju_bundle
1992 0             match = fullmatch(r"([^/]*)/([^/]*)", juju_bundle)
1993 0             repo_name = match.group(1) if match else None
1994         else:
1995 0             raise EngineException(
1996                 "Found neither 'helm-chart' nor 'juju-bundle' in vnfd[id='{}']:kdu[name='{}']".format(
1997                     vnfpkg_id, kdu_name
1998                 )
1999             )
2000 0         if repo_name:
2001 0             del filter_q["_id"]
2002 0             filter_q["name"] = repo_name
2003 0             repo = self.db.get_one("k8srepos", filter_q)
2004 0             k8srepo_id = repo.get("_id")
2005 0             k8srepo_url = repo.get("url")
2006         else:
2007 0             k8srepo_id = None
2008 0             k8srepo_url = None
2009 0         indata["k8srepoId"] = k8srepo_id
2010 0         indata["k8srepo_url"] = k8srepo_url
2011 0         vnfpkgop_id = str(uuid4())
2012 0         vnfpkgop_desc = {
2013             "_id": vnfpkgop_id,
2014             "operationState": "PROCESSING",
2015             "vnfPkgId": vnfpkg_id,
2016             "lcmOperationType": operation,
2017             "isAutomaticInvocation": False,
2018             "isCancelPending": False,
2019             "operationParams": indata,
2020             "links": {
2021                 "self": "/osm/vnfpkgm/v1/vnfpkg_op_occs/" + vnfpkgop_id,
2022                 "vnfpkg": "/osm/vnfpkgm/v1/vnf_packages/" + vnfpkg_id,
2023             },
2024         }
2025 0         self.format_on_new(
2026             vnfpkgop_desc, session["project_id"], make_public=session["public"]
2027         )
2028 0         ctime = vnfpkgop_desc["_admin"]["created"]
2029 0         vnfpkgop_desc["statusEnteredTime"] = ctime
2030 0         vnfpkgop_desc["startTime"] = ctime
2031 0         self.db.create(self.topic, vnfpkgop_desc)
2032 0         rollback.append({"topic": self.topic, "_id": vnfpkgop_id})
2033 0         self.msg.write(self.topic_msg, operation, vnfpkgop_desc)
2034 0         return vnfpkgop_id, None