Bug 1989 Do not use trailing / for directory
[osm/NBI.git] / osm_nbi / descriptor_topics.py
1 # -*- coding: utf-8 -*-
2
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
12 # implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
15
16 import tarfile
17 import yaml
18 import json
19 import copy
20 import os
21 import shutil
22
23 # import logging
24 from hashlib import md5
25 from osm_common.dbbase import DbException, deep_update_rfc7396
26 from http import HTTPStatus
27 from time import time
28 from uuid import uuid4
29 from re import fullmatch
30 from zipfile import ZipFile
31 from osm_nbi.validation import (
32 ValidationError,
33 pdu_new_schema,
34 pdu_edit_schema,
35 validate_input,
36 vnfpkgop_new_schema,
37 )
38 from osm_nbi.base_topic import BaseTopic, EngineException, get_iterable
39 from osm_im import etsi_nfv_vnfd, etsi_nfv_nsd
40 from osm_im.nst import nst as nst_im
41 from pyangbind.lib.serialise import pybindJSONDecoder
42 import pyangbind.lib.pybindJSON as pybindJSON
43 from osm_nbi import utils
44
45 __author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
46
47
48 class DescriptorTopic(BaseTopic):
49 def __init__(self, db, fs, msg, auth):
50
51 BaseTopic.__init__(self, db, fs, msg, auth)
52
53 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
54 final_content = super().check_conflict_on_edit(
55 session, final_content, edit_content, _id
56 )
57
58 def _check_unique_id_name(descriptor, position=""):
59 for desc_key, desc_item in descriptor.items():
60 if isinstance(desc_item, list) and desc_item:
61 used_ids = []
62 desc_item_id = None
63 for index, list_item in enumerate(desc_item):
64 if isinstance(list_item, dict):
65 _check_unique_id_name(
66 list_item, "{}.{}[{}]".format(position, desc_key, index)
67 )
68 # Base case
69 if index == 0 and (
70 list_item.get("id") or list_item.get("name")
71 ):
72 desc_item_id = "id" if list_item.get("id") else "name"
73 if desc_item_id and list_item.get(desc_item_id):
74 if list_item[desc_item_id] in used_ids:
75 position = "{}.{}[{}]".format(
76 position, desc_key, index
77 )
78 raise EngineException(
79 "Error: identifier {} '{}' is not unique and repeats at '{}'".format(
80 desc_item_id,
81 list_item[desc_item_id],
82 position,
83 ),
84 HTTPStatus.UNPROCESSABLE_ENTITY,
85 )
86 used_ids.append(list_item[desc_item_id])
87
88 _check_unique_id_name(final_content)
89 # 1. validate again with pyangbind
90 # 1.1. remove internal keys
91 internal_keys = {}
92 for k in ("_id", "_admin"):
93 if k in final_content:
94 internal_keys[k] = final_content.pop(k)
95 storage_params = internal_keys["_admin"].get("storage")
96 serialized = self._validate_input_new(
97 final_content, storage_params, session["force"]
98 )
99
100 # 1.2. modify final_content with a serialized version
101 final_content = copy.deepcopy(serialized)
102 # 1.3. restore internal keys
103 for k, v in internal_keys.items():
104 final_content[k] = v
105 if session["force"]:
106 return final_content
107
108 # 2. check that this id is not present
109 if "id" in edit_content:
110 _filter = self._get_project_filter(session)
111
112 _filter["id"] = final_content["id"]
113 _filter["_id.neq"] = _id
114
115 if self.db.get_one(self.topic, _filter, fail_on_empty=False):
116 raise EngineException(
117 "{} with id '{}' already exists for this project".format(
118 self.topic[:-1], final_content["id"]
119 ),
120 HTTPStatus.CONFLICT,
121 )
122
123 return final_content
124
125 @staticmethod
126 def format_on_new(content, project_id=None, make_public=False):
127 BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
128 content["_admin"]["onboardingState"] = "CREATED"
129 content["_admin"]["operationalState"] = "DISABLED"
130 content["_admin"]["usageState"] = "NOT_IN_USE"
131
132 def delete_extra(self, session, _id, db_content, not_send_msg=None):
133 """
134 Deletes file system storage associated with the descriptor
135 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
136 :param _id: server internal id
137 :param db_content: The database content of the descriptor
138 :param not_send_msg: To not send message (False) or store content (list) instead
139 :return: None if ok or raises EngineException with the problem
140 """
141 self.fs.file_delete(_id, ignore_non_exist=True)
142 self.fs.file_delete(_id + "_", ignore_non_exist=True) # remove temp folder
143 # Remove file revisions
144 if "revision" in db_content["_admin"]:
145 revision = db_content["_admin"]["revision"]
146 while revision > 0:
147 self.fs.file_delete(_id + ":" + str(revision), ignore_non_exist=True)
148 revision = revision - 1
149
150
151 @staticmethod
152 def get_one_by_id(db, session, topic, id):
153 # find owned by this project
154 _filter = BaseTopic._get_project_filter(session)
155 _filter["id"] = id
156 desc_list = db.get_list(topic, _filter)
157 if len(desc_list) == 1:
158 return desc_list[0]
159 elif len(desc_list) > 1:
160 raise DbException(
161 "Found more than one {} with id='{}' belonging to this project".format(
162 topic[:-1], id
163 ),
164 HTTPStatus.CONFLICT,
165 )
166
167 # not found any: try to find public
168 _filter = BaseTopic._get_project_filter(session)
169 _filter["id"] = id
170 desc_list = db.get_list(topic, _filter)
171 if not desc_list:
172 raise DbException(
173 "Not found any {} with id='{}'".format(topic[:-1], id),
174 HTTPStatus.NOT_FOUND,
175 )
176 elif len(desc_list) == 1:
177 return desc_list[0]
178 else:
179 raise DbException(
180 "Found more than one public {} with id='{}'; and no one belonging to this project".format(
181 topic[:-1], id
182 ),
183 HTTPStatus.CONFLICT,
184 )
185
186 def new(self, rollback, session, indata=None, kwargs=None, headers=None):
187 """
188 Creates a new almost empty DISABLED entry into database. Due to SOL005, it does not follow normal procedure.
189 Creating a VNFD or NSD is done in two steps: 1. Creates an empty descriptor (this step) and 2) upload content
190 (self.upload_content)
191 :param rollback: list to append created items at database in case a rollback may to be done
192 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
193 :param indata: data to be inserted
194 :param kwargs: used to override the indata descriptor
195 :param headers: http request headers
196 :return: _id, None: identity of the inserted data; and None as there is not any operation
197 """
198
199 # No needed to capture exceptions
200 # Check Quota
201 self.check_quota(session)
202
203 # _remove_envelop
204 if indata:
205 if "userDefinedData" in indata:
206 indata = indata["userDefinedData"]
207
208 # Override descriptor with query string kwargs
209 self._update_input_with_kwargs(indata, kwargs)
210 # uncomment when this method is implemented.
211 # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors
212 # indata = DescriptorTopic._validate_input_new(self, indata, project_id=session["force"])
213
214 content = {"_admin": {
215 "userDefinedData": indata,
216 "revision": 0
217 }}
218
219 self.format_on_new(
220 content, session["project_id"], make_public=session["public"]
221 )
222 _id = self.db.create(self.topic, content)
223 rollback.append({"topic": self.topic, "_id": _id})
224 self._send_msg("created", {"_id": _id})
225 return _id, None
226
227 def upload_content(self, session, _id, indata, kwargs, headers):
228 """
229 Used for receiving content by chunks (with a transaction_id header and/or gzip file. It will store and extract)
230 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
231 :param _id : the nsd,vnfd is already created, this is the id
232 :param indata: http body request
233 :param kwargs: user query string to override parameters. NOT USED
234 :param headers: http request headers
235 :return: True if package is completely uploaded or False if partial content has been uploded
236 Raise exception on error
237 """
238 # Check that _id exists and it is valid
239 current_desc = self.show(session, _id)
240
241 content_range_text = headers.get("Content-Range")
242 expected_md5 = headers.get("Content-File-MD5")
243 compressed = None
244 content_type = headers.get("Content-Type")
245 if (
246 content_type
247 and "application/gzip" in content_type
248 or "application/x-gzip" in content_type
249 ):
250 compressed = "gzip"
251 if (
252 content_type
253 and "application/zip" in content_type
254 ):
255 compressed = "zip"
256 filename = headers.get("Content-Filename")
257 if not filename and compressed:
258 filename = "package.tar.gz" if compressed == "gzip" else "package.zip"
259 elif not filename:
260 filename = "package"
261
262 revision = 1
263 if "revision" in current_desc["_admin"]:
264 revision = current_desc["_admin"]["revision"] + 1
265
266 # TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266
267 file_pkg = None
268 error_text = ""
269 try:
270 if content_range_text:
271 content_range = (
272 content_range_text.replace("-", " ").replace("/", " ").split()
273 )
274 if (
275 content_range[0] != "bytes"
276 ): # TODO check x<y not negative < total....
277 raise IndexError()
278 start = int(content_range[1])
279 end = int(content_range[2]) + 1
280 total = int(content_range[3])
281 else:
282 start = 0
283 # Rather than using a temp folder, we will store the package in a folder based on
284 # the current revision.
285 proposed_revision_path = (
286 _id + ":" + str(revision)
287 ) # all the content is upload here and if ok, it is rename from id_ to is folder
288
289 if start:
290 if not self.fs.file_exists(proposed_revision_path, "dir"):
291 raise EngineException(
292 "invalid Transaction-Id header", HTTPStatus.NOT_FOUND
293 )
294 else:
295 self.fs.file_delete(proposed_revision_path, ignore_non_exist=True)
296 self.fs.mkdir(proposed_revision_path)
297
298 storage = self.fs.get_params()
299 storage["folder"] = _id
300
301 file_path = (proposed_revision_path, filename)
302 if self.fs.file_exists(file_path, "file"):
303 file_size = self.fs.file_size(file_path)
304 else:
305 file_size = 0
306 if file_size != start:
307 raise EngineException(
308 "invalid Content-Range start sequence, expected '{}' but received '{}'".format(
309 file_size, start
310 ),
311 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
312 )
313 file_pkg = self.fs.file_open(file_path, "a+b")
314 if isinstance(indata, dict):
315 indata_text = yaml.safe_dump(indata, indent=4, default_flow_style=False)
316 file_pkg.write(indata_text.encode(encoding="utf-8"))
317 else:
318 indata_len = 0
319 while True:
320 indata_text = indata.read(4096)
321 indata_len += len(indata_text)
322 if not indata_text:
323 break
324 file_pkg.write(indata_text)
325 if content_range_text:
326 if indata_len != end - start:
327 raise EngineException(
328 "Mismatch between Content-Range header {}-{} and body length of {}".format(
329 start, end - 1, indata_len
330 ),
331 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
332 )
333 if end != total:
334 # TODO update to UPLOADING
335 return False
336
337 # PACKAGE UPLOADED
338 if expected_md5:
339 file_pkg.seek(0, 0)
340 file_md5 = md5()
341 chunk_data = file_pkg.read(1024)
342 while chunk_data:
343 file_md5.update(chunk_data)
344 chunk_data = file_pkg.read(1024)
345 if expected_md5 != file_md5.hexdigest():
346 raise EngineException("Error, MD5 mismatch", HTTPStatus.CONFLICT)
347 file_pkg.seek(0, 0)
348 if compressed == "gzip":
349 tar = tarfile.open(mode="r", fileobj=file_pkg)
350 descriptor_file_name = None
351 for tarinfo in tar:
352 tarname = tarinfo.name
353 tarname_path = tarname.split("/")
354 if (
355 not tarname_path[0] or ".." in tarname_path
356 ): # if start with "/" means absolute path
357 raise EngineException(
358 "Absolute path or '..' are not allowed for package descriptor tar.gz"
359 )
360 if len(tarname_path) == 1 and not tarinfo.isdir():
361 raise EngineException(
362 "All files must be inside a dir for package descriptor tar.gz"
363 )
364 if (
365 tarname.endswith(".yaml")
366 or tarname.endswith(".json")
367 or tarname.endswith(".yml")
368 ):
369 storage["pkg-dir"] = tarname_path[0]
370 if len(tarname_path) == 2:
371 if descriptor_file_name:
372 raise EngineException(
373 "Found more than one descriptor file at package descriptor tar.gz"
374 )
375 descriptor_file_name = tarname
376 if not descriptor_file_name:
377 raise EngineException(
378 "Not found any descriptor file at package descriptor tar.gz"
379 )
380 storage["descriptor"] = descriptor_file_name
381 storage["zipfile"] = filename
382 self.fs.file_extract(tar, proposed_revision_path)
383 with self.fs.file_open(
384 (proposed_revision_path, descriptor_file_name), "r"
385 ) as descriptor_file:
386 content = descriptor_file.read()
387 elif compressed == "zip":
388 zipfile = ZipFile(file_pkg)
389 descriptor_file_name = None
390 for package_file in zipfile.infolist():
391 zipfilename = package_file.filename
392 file_path = zipfilename.split("/")
393 if (
394 not file_path[0] or ".." in zipfilename
395 ): # if start with "/" means absolute path
396 raise EngineException(
397 "Absolute path or '..' are not allowed for package descriptor zip"
398 )
399
400 if (
401 (
402 zipfilename.endswith(".yaml")
403 or zipfilename.endswith(".json")
404 or zipfilename.endswith(".yml")
405 ) and (
406 zipfilename.find("/") < 0
407 or zipfilename.find("Definitions") >= 0
408 )
409 ):
410 storage["pkg-dir"] = ""
411 if descriptor_file_name:
412 raise EngineException(
413 "Found more than one descriptor file at package descriptor zip"
414 )
415 descriptor_file_name = zipfilename
416 if not descriptor_file_name:
417 raise EngineException(
418 "Not found any descriptor file at package descriptor zip"
419 )
420 storage["descriptor"] = descriptor_file_name
421 storage["zipfile"] = filename
422 self.fs.file_extract(zipfile, proposed_revision_path)
423
424 with self.fs.file_open(
425 (proposed_revision_path, descriptor_file_name), "r"
426 ) as descriptor_file:
427 content = descriptor_file.read()
428 else:
429 content = file_pkg.read()
430 storage["descriptor"] = descriptor_file_name = filename
431
432 if descriptor_file_name.endswith(".json"):
433 error_text = "Invalid json format "
434 indata = json.load(content)
435 else:
436 error_text = "Invalid yaml format "
437 indata = yaml.load(content, Loader=yaml.SafeLoader)
438
439 # Need to close the file package here so it can be copied from the
440 # revision to the current, unrevisioned record
441 if file_pkg:
442 file_pkg.close()
443 file_pkg = None
444
445 # Fetch both the incoming, proposed revision and the original revision so we
446 # can call a validate method to compare them
447 current_revision_path = _id + "/"
448 self.fs.sync(from_path=current_revision_path)
449 self.fs.sync(from_path=proposed_revision_path)
450
451 if revision > 1:
452 try:
453 self._validate_descriptor_changes(
454 descriptor_file_name,
455 current_revision_path,
456 proposed_revision_path)
457 except Exception as e:
458 shutil.rmtree(self.fs.path + current_revision_path, ignore_errors=True)
459 shutil.rmtree(self.fs.path + proposed_revision_path, ignore_errors=True)
460 # Only delete the new revision. We need to keep the original version in place
461 # as it has not been changed.
462 self.fs.file_delete(proposed_revision_path, ignore_non_exist=True)
463 raise e
464
465 # Copy the revision to the active package name by its original id
466 shutil.rmtree(self.fs.path + current_revision_path, ignore_errors=True)
467 os.rename(self.fs.path + proposed_revision_path, self.fs.path + current_revision_path)
468 self.fs.file_delete(current_revision_path, ignore_non_exist=True)
469 self.fs.mkdir(current_revision_path)
470 self.fs.reverse_sync(from_path=current_revision_path)
471 shutil.rmtree(self.fs.path + _id)
472
473 current_desc["_admin"]["storage"] = storage
474 current_desc["_admin"]["onboardingState"] = "ONBOARDED"
475 current_desc["_admin"]["operationalState"] = "ENABLED"
476
477 indata = self._remove_envelop(indata)
478
479 # Override descriptor with query string kwargs
480 if kwargs:
481 self._update_input_with_kwargs(indata, kwargs)
482
483 deep_update_rfc7396(current_desc, indata)
484 current_desc = self.check_conflict_on_edit(
485 session, current_desc, indata, _id=_id
486 )
487 current_desc["_admin"]["modified"] = time()
488 current_desc["_admin"]["revision"] = revision
489 self.db.replace(self.topic, _id, current_desc)
490
491 # Store a copy of the package as a point in time revision
492 revision_desc = dict(current_desc)
493 revision_desc["_id"] = _id + ":" + str(revision_desc["_admin"]["revision"])
494 self.db.create(self.topic + "_revisions", revision_desc)
495
496 indata["_id"] = _id
497 self._send_msg("edited", indata)
498
499 # TODO if descriptor has changed because kwargs update content and remove cached zip
500 # TODO if zip is not present creates one
501 return True
502
503 except EngineException:
504 raise
505 except IndexError:
506 raise EngineException(
507 "invalid Content-Range header format. Expected 'bytes start-end/total'",
508 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
509 )
510 except IOError as e:
511 raise EngineException(
512 "invalid upload transaction sequence: '{}'".format(e),
513 HTTPStatus.BAD_REQUEST,
514 )
515 except tarfile.ReadError as e:
516 raise EngineException(
517 "invalid file content {}".format(e), HTTPStatus.BAD_REQUEST
518 )
519 except (ValueError, yaml.YAMLError) as e:
520 raise EngineException(error_text + str(e))
521 except ValidationError as e:
522 raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
523 finally:
524 if file_pkg:
525 file_pkg.close()
526
527 def get_file(self, session, _id, path=None, accept_header=None):
528 """
529 Return the file content of a vnfd or nsd
530 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
531 :param _id: Identity of the vnfd, nsd
532 :param path: artifact path or "$DESCRIPTOR" or None
533 :param accept_header: Content of Accept header. Must contain applition/zip or/and text/plain
534 :return: opened file plus Accept format or raises an exception
535 """
536 accept_text = accept_zip = False
537 if accept_header:
538 if "text/plain" in accept_header or "*/*" in accept_header:
539 accept_text = True
540 if "application/zip" in accept_header or "*/*" in accept_header:
541 accept_zip = "application/zip"
542 elif "application/gzip" in accept_header:
543 accept_zip = "application/gzip"
544
545 if not accept_text and not accept_zip:
546 raise EngineException(
547 "provide request header 'Accept' with 'application/zip' or 'text/plain'",
548 http_code=HTTPStatus.NOT_ACCEPTABLE,
549 )
550
551 content = self.show(session, _id)
552 if content["_admin"]["onboardingState"] != "ONBOARDED":
553 raise EngineException(
554 "Cannot get content because this resource is not at 'ONBOARDED' state. "
555 "onboardingState is {}".format(content["_admin"]["onboardingState"]),
556 http_code=HTTPStatus.CONFLICT,
557 )
558 storage = content["_admin"]["storage"]
559 if path is not None and path != "$DESCRIPTOR": # artifacts
560 if not storage.get("pkg-dir"):
561 raise EngineException(
562 "Packages does not contains artifacts",
563 http_code=HTTPStatus.BAD_REQUEST,
564 )
565 if self.fs.file_exists(
566 (storage["folder"], storage["pkg-dir"], *path), "dir"
567 ):
568 folder_content = self.fs.dir_ls(
569 (storage["folder"], storage["pkg-dir"], *path)
570 )
571 return folder_content, "text/plain"
572 # TODO manage folders in http
573 else:
574 return (
575 self.fs.file_open(
576 (storage["folder"], storage["pkg-dir"], *path), "rb"
577 ),
578 "application/octet-stream",
579 )
580
581 # pkgtype accept ZIP TEXT -> result
582 # manyfiles yes X -> zip
583 # no yes -> error
584 # onefile yes no -> zip
585 # X yes -> text
586 contain_many_files = False
587 if storage.get("pkg-dir"):
588 # check if there are more than one file in the package, ignoring checksums.txt.
589 pkg_files = self.fs.dir_ls((storage["folder"], storage["pkg-dir"]))
590 if len(pkg_files) >= 3 or (
591 len(pkg_files) == 2 and "checksums.txt" not in pkg_files
592 ):
593 contain_many_files = True
594 if accept_text and (not contain_many_files or path == "$DESCRIPTOR"):
595 return (
596 self.fs.file_open((storage["folder"], storage["descriptor"]), "r"),
597 "text/plain",
598 )
599 elif contain_many_files and not accept_zip:
600 raise EngineException(
601 "Packages that contains several files need to be retrieved with 'application/zip'"
602 "Accept header",
603 http_code=HTTPStatus.NOT_ACCEPTABLE,
604 )
605 else:
606 if not storage.get("zipfile"):
607 # TODO generate zipfile if not present
608 raise EngineException(
609 "Only allowed 'text/plain' Accept header for this descriptor. To be solved in "
610 "future versions",
611 http_code=HTTPStatus.NOT_ACCEPTABLE,
612 )
613 return (
614 self.fs.file_open((storage["folder"], storage["zipfile"]), "rb"),
615 accept_zip,
616 )
617
618 def _remove_yang_prefixes_from_descriptor(self, descriptor):
619 new_descriptor = {}
620 for k, v in descriptor.items():
621 new_v = v
622 if isinstance(v, dict):
623 new_v = self._remove_yang_prefixes_from_descriptor(v)
624 elif isinstance(v, list):
625 new_v = list()
626 for x in v:
627 if isinstance(x, dict):
628 new_v.append(self._remove_yang_prefixes_from_descriptor(x))
629 else:
630 new_v.append(x)
631 new_descriptor[k.split(":")[-1]] = new_v
632 return new_descriptor
633
634 def pyangbind_validation(self, item, data, force=False):
635 raise EngineException(
636 "Not possible to validate '{}' item".format(item),
637 http_code=HTTPStatus.INTERNAL_SERVER_ERROR,
638 )
639
640 def _validate_input_edit(self, indata, content, force=False):
641 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
642 if "_id" in indata:
643 indata.pop("_id")
644 if "_admin" not in indata:
645 indata["_admin"] = {}
646
647 if "operationalState" in indata:
648 if indata["operationalState"] in ("ENABLED", "DISABLED"):
649 indata["_admin"]["operationalState"] = indata.pop("operationalState")
650 else:
651 raise EngineException(
652 "State '{}' is not a valid operational state".format(
653 indata["operationalState"]
654 ),
655 http_code=HTTPStatus.BAD_REQUEST,
656 )
657
658 # In the case of user defined data, we need to put the data in the root of the object
659 # to preserve current expected behaviour
660 if "userDefinedData" in indata:
661 data = indata.pop("userDefinedData")
662 if type(data) == dict:
663 indata["_admin"]["userDefinedData"] = data
664 else:
665 raise EngineException(
666 "userDefinedData should be an object, but is '{}' instead".format(
667 type(data)
668 ),
669 http_code=HTTPStatus.BAD_REQUEST,
670 )
671
672 if (
673 "operationalState" in indata["_admin"]
674 and content["_admin"]["operationalState"]
675 == indata["_admin"]["operationalState"]
676 ):
677 raise EngineException(
678 "operationalState already {}".format(
679 content["_admin"]["operationalState"]
680 ),
681 http_code=HTTPStatus.CONFLICT,
682 )
683
684 return indata
685
686 def _validate_descriptor_changes(self,
687 descriptor_file_name,
688 old_descriptor_directory,
689 new_descriptor_directory):
690 # Todo: compare changes and throw a meaningful exception for the user to understand
691 # Example:
692 # raise EngineException(
693 # "Error in validating new descriptor: <NODE> cannot be modified",
694 # http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
695 # )
696 pass
697
698 class VnfdTopic(DescriptorTopic):
699 topic = "vnfds"
700 topic_msg = "vnfd"
701
702 def __init__(self, db, fs, msg, auth):
703 DescriptorTopic.__init__(self, db, fs, msg, auth)
704
705 def pyangbind_validation(self, item, data, force=False):
706 if self._descriptor_data_is_in_old_format(data):
707 raise EngineException(
708 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
709 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
710 )
711 try:
712 myvnfd = etsi_nfv_vnfd.etsi_nfv_vnfd()
713 pybindJSONDecoder.load_ietf_json(
714 {"etsi-nfv-vnfd:vnfd": data},
715 None,
716 None,
717 obj=myvnfd,
718 path_helper=True,
719 skip_unknown=force,
720 )
721 out = pybindJSON.dumps(myvnfd, mode="ietf")
722 desc_out = self._remove_envelop(yaml.safe_load(out))
723 desc_out = self._remove_yang_prefixes_from_descriptor(desc_out)
724 return utils.deep_update_dict(data, desc_out)
725 except Exception as e:
726 raise EngineException(
727 "Error in pyangbind validation: {}".format(str(e)),
728 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
729 )
730
731 @staticmethod
732 def _descriptor_data_is_in_old_format(data):
733 return ("vnfd-catalog" in data) or ("vnfd:vnfd-catalog" in data)
734
735 @staticmethod
736 def _remove_envelop(indata=None):
737 if not indata:
738 return {}
739 clean_indata = indata
740
741 if clean_indata.get("etsi-nfv-vnfd:vnfd"):
742 if not isinstance(clean_indata["etsi-nfv-vnfd:vnfd"], dict):
743 raise EngineException("'etsi-nfv-vnfd:vnfd' must be a dict")
744 clean_indata = clean_indata["etsi-nfv-vnfd:vnfd"]
745 elif clean_indata.get("vnfd"):
746 if not isinstance(clean_indata["vnfd"], dict):
747 raise EngineException("'vnfd' must be dict")
748 clean_indata = clean_indata["vnfd"]
749
750 return clean_indata
751
752 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
753 final_content = super().check_conflict_on_edit(
754 session, final_content, edit_content, _id
755 )
756
757 # set type of vnfd
758 contains_pdu = False
759 contains_vdu = False
760 for vdu in get_iterable(final_content.get("vdu")):
761 if vdu.get("pdu-type"):
762 contains_pdu = True
763 else:
764 contains_vdu = True
765 if contains_pdu:
766 final_content["_admin"]["type"] = "hnfd" if contains_vdu else "pnfd"
767 elif contains_vdu:
768 final_content["_admin"]["type"] = "vnfd"
769 # if neither vud nor pdu do not fill type
770 return final_content
771
772 def check_conflict_on_del(self, session, _id, db_content):
773 """
774 Check that there is not any NSD that uses this VNFD. Only NSDs belonging to this project are considered. Note
775 that VNFD can be public and be used by NSD of other projects. Also check there are not deployments, or vnfr
776 that uses this vnfd
777 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
778 :param _id: vnfd internal id
779 :param db_content: The database content of the _id.
780 :return: None or raises EngineException with the conflict
781 """
782 if session["force"]:
783 return
784 descriptor = db_content
785 descriptor_id = descriptor.get("id")
786 if not descriptor_id: # empty vnfd not uploaded
787 return
788
789 _filter = self._get_project_filter(session)
790
791 # check vnfrs using this vnfd
792 _filter["vnfd-id"] = _id
793 if self.db.get_list("vnfrs", _filter):
794 raise EngineException(
795 "There is at least one VNF instance using this descriptor",
796 http_code=HTTPStatus.CONFLICT,
797 )
798
799 # check NSD referencing this VNFD
800 del _filter["vnfd-id"]
801 _filter["vnfd-id"] = descriptor_id
802 if self.db.get_list("nsds", _filter):
803 raise EngineException(
804 "There is at least one NS package referencing this descriptor",
805 http_code=HTTPStatus.CONFLICT,
806 )
807
808 def _validate_input_new(self, indata, storage_params, force=False):
809 indata.pop("onboardingState", None)
810 indata.pop("operationalState", None)
811 indata.pop("usageState", None)
812 indata.pop("links", None)
813
814 indata = self.pyangbind_validation("vnfds", indata, force)
815 # Cross references validation in the descriptor
816
817 self.validate_mgmt_interface_connection_point(indata)
818
819 for vdu in get_iterable(indata.get("vdu")):
820 self.validate_vdu_internal_connection_points(vdu)
821 self._validate_vdu_cloud_init_in_package(storage_params, vdu, indata)
822 self._validate_vdu_charms_in_package(storage_params, indata)
823
824 self._validate_vnf_charms_in_package(storage_params, indata)
825
826 self.validate_external_connection_points(indata)
827 self.validate_internal_virtual_links(indata)
828 self.validate_monitoring_params(indata)
829 self.validate_scaling_group_descriptor(indata)
830
831 return indata
832
833 @staticmethod
834 def validate_mgmt_interface_connection_point(indata):
835 if not indata.get("vdu"):
836 return
837 if not indata.get("mgmt-cp"):
838 raise EngineException(
839 "'mgmt-cp' is a mandatory field and it is not defined",
840 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
841 )
842
843 for cp in get_iterable(indata.get("ext-cpd")):
844 if cp["id"] == indata["mgmt-cp"]:
845 break
846 else:
847 raise EngineException(
848 "mgmt-cp='{}' must match an existing ext-cpd".format(indata["mgmt-cp"]),
849 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
850 )
851
852 @staticmethod
853 def validate_vdu_internal_connection_points(vdu):
854 int_cpds = set()
855 for cpd in get_iterable(vdu.get("int-cpd")):
856 cpd_id = cpd.get("id")
857 if cpd_id and cpd_id in int_cpds:
858 raise EngineException(
859 "vdu[id='{}']:int-cpd[id='{}'] is already used by other int-cpd".format(
860 vdu["id"], cpd_id
861 ),
862 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
863 )
864 int_cpds.add(cpd_id)
865
866 @staticmethod
867 def validate_external_connection_points(indata):
868 all_vdus_int_cpds = set()
869 for vdu in get_iterable(indata.get("vdu")):
870 for int_cpd in get_iterable(vdu.get("int-cpd")):
871 all_vdus_int_cpds.add((vdu.get("id"), int_cpd.get("id")))
872
873 ext_cpds = set()
874 for cpd in get_iterable(indata.get("ext-cpd")):
875 cpd_id = cpd.get("id")
876 if cpd_id and cpd_id in ext_cpds:
877 raise EngineException(
878 "ext-cpd[id='{}'] is already used by other ext-cpd".format(cpd_id),
879 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
880 )
881 ext_cpds.add(cpd_id)
882
883 int_cpd = cpd.get("int-cpd")
884 if int_cpd:
885 if (int_cpd.get("vdu-id"), int_cpd.get("cpd")) not in all_vdus_int_cpds:
886 raise EngineException(
887 "ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd".format(
888 cpd_id
889 ),
890 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
891 )
892 # TODO: Validate k8s-cluster-net points to a valid k8s-cluster:nets ?
893
894 def _validate_vdu_charms_in_package(self, storage_params, indata):
895 for df in indata["df"]:
896 if (
897 "lcm-operations-configuration" in df
898 and "operate-vnf-op-config" in df["lcm-operations-configuration"]
899 ):
900 configs = df["lcm-operations-configuration"][
901 "operate-vnf-op-config"
902 ].get("day1-2", [])
903 vdus = df.get("vdu-profile", [])
904 for vdu in vdus:
905 for config in configs:
906 if config["id"] == vdu["id"] and utils.find_in_list(
907 config.get("execution-environment-list", []),
908 lambda ee: "juju" in ee,
909 ):
910 if not self._validate_package_folders(
911 storage_params, "charms"
912 ) and not self._validate_package_folders(
913 storage_params, "Scripts/charms"
914 ):
915 raise EngineException(
916 "Charm defined in vnf[id={}] but not present in "
917 "package".format(indata["id"])
918 )
919
920 def _validate_vdu_cloud_init_in_package(self, storage_params, vdu, indata):
921 if not vdu.get("cloud-init-file"):
922 return
923 if not self._validate_package_folders(
924 storage_params, "cloud_init", vdu["cloud-init-file"]
925 ) and not self._validate_package_folders(
926 storage_params, "Scripts/cloud_init", vdu["cloud-init-file"]
927 ):
928 raise EngineException(
929 "Cloud-init defined in vnf[id={}]:vdu[id={}] but not present in "
930 "package".format(indata["id"], vdu["id"])
931 )
932
933 def _validate_vnf_charms_in_package(self, storage_params, indata):
934 # Get VNF configuration through new container
935 for deployment_flavor in indata.get("df", []):
936 if "lcm-operations-configuration" not in deployment_flavor:
937 return
938 if (
939 "operate-vnf-op-config"
940 not in deployment_flavor["lcm-operations-configuration"]
941 ):
942 return
943 for day_1_2_config in deployment_flavor["lcm-operations-configuration"][
944 "operate-vnf-op-config"
945 ]["day1-2"]:
946 if day_1_2_config["id"] == indata["id"]:
947 if utils.find_in_list(
948 day_1_2_config.get("execution-environment-list", []),
949 lambda ee: "juju" in ee,
950 ):
951 if not self._validate_package_folders(
952 storage_params, "charms"
953 ) and not self._validate_package_folders(
954 storage_params, "Scripts/charms"
955 ):
956 raise EngineException(
957 "Charm defined in vnf[id={}] but not present in "
958 "package".format(indata["id"])
959 )
960
961 def _validate_package_folders(self, storage_params, folder, file=None):
962 if not storage_params:
963 return False
964 elif not storage_params.get("pkg-dir"):
965 if self.fs.file_exists("{}_".format(storage_params["folder"]), "dir"):
966 f = "{}_/{}".format(
967 storage_params["folder"], folder
968 )
969 else:
970 f = "{}/{}".format(
971 storage_params["folder"], folder
972 )
973 if file:
974 return self.fs.file_exists("{}/{}".format(f, file), "file")
975 else:
976 if self.fs.file_exists(f, "dir"):
977 if self.fs.dir_ls(f):
978 return True
979 return False
980 else:
981 if self.fs.file_exists("{}_".format(storage_params["folder"]), "dir"):
982 f = "{}_/{}/{}".format(
983 storage_params["folder"], storage_params["pkg-dir"], folder
984 )
985 else:
986 f = "{}/{}/{}".format(
987 storage_params["folder"], storage_params["pkg-dir"], folder
988 )
989 if file:
990 return self.fs.file_exists("{}/{}".format(f, file), "file")
991 else:
992 if self.fs.file_exists(f, "dir"):
993 if self.fs.dir_ls(f):
994 return True
995 return False
996
997 @staticmethod
998 def validate_internal_virtual_links(indata):
999 all_ivld_ids = set()
1000 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
1001 ivld_id = ivld.get("id")
1002 if ivld_id and ivld_id in all_ivld_ids:
1003 raise EngineException(
1004 "Duplicated VLD id in int-virtual-link-desc[id={}]".format(ivld_id),
1005 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1006 )
1007 else:
1008 all_ivld_ids.add(ivld_id)
1009
1010 for vdu in get_iterable(indata.get("vdu")):
1011 for int_cpd in get_iterable(vdu.get("int-cpd")):
1012 int_cpd_ivld_id = int_cpd.get("int-virtual-link-desc")
1013 if int_cpd_ivld_id and int_cpd_ivld_id not in all_ivld_ids:
1014 raise EngineException(
1015 "vdu[id='{}']:int-cpd[id='{}']:int-virtual-link-desc='{}' must match an existing "
1016 "int-virtual-link-desc".format(
1017 vdu["id"], int_cpd["id"], int_cpd_ivld_id
1018 ),
1019 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1020 )
1021
1022 for df in get_iterable(indata.get("df")):
1023 for vlp in get_iterable(df.get("virtual-link-profile")):
1024 vlp_ivld_id = vlp.get("id")
1025 if vlp_ivld_id and vlp_ivld_id not in all_ivld_ids:
1026 raise EngineException(
1027 "df[id='{}']:virtual-link-profile='{}' must match an existing "
1028 "int-virtual-link-desc".format(df["id"], vlp_ivld_id),
1029 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1030 )
1031
1032 @staticmethod
1033 def validate_monitoring_params(indata):
1034 all_monitoring_params = set()
1035 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
1036 for mp in get_iterable(ivld.get("monitoring-parameters")):
1037 mp_id = mp.get("id")
1038 if mp_id and mp_id in all_monitoring_params:
1039 raise EngineException(
1040 "Duplicated monitoring-parameter id in "
1041 "int-virtual-link-desc[id='{}']:monitoring-parameters[id='{}']".format(
1042 ivld["id"], mp_id
1043 ),
1044 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1045 )
1046 else:
1047 all_monitoring_params.add(mp_id)
1048
1049 for vdu in get_iterable(indata.get("vdu")):
1050 for mp in get_iterable(vdu.get("monitoring-parameter")):
1051 mp_id = mp.get("id")
1052 if mp_id and mp_id in all_monitoring_params:
1053 raise EngineException(
1054 "Duplicated monitoring-parameter id in "
1055 "vdu[id='{}']:monitoring-parameter[id='{}']".format(
1056 vdu["id"], mp_id
1057 ),
1058 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1059 )
1060 else:
1061 all_monitoring_params.add(mp_id)
1062
1063 for df in get_iterable(indata.get("df")):
1064 for mp in get_iterable(df.get("monitoring-parameter")):
1065 mp_id = mp.get("id")
1066 if mp_id and mp_id in all_monitoring_params:
1067 raise EngineException(
1068 "Duplicated monitoring-parameter id in "
1069 "df[id='{}']:monitoring-parameter[id='{}']".format(
1070 df["id"], mp_id
1071 ),
1072 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1073 )
1074 else:
1075 all_monitoring_params.add(mp_id)
1076
1077 @staticmethod
1078 def validate_scaling_group_descriptor(indata):
1079 all_monitoring_params = set()
1080 for ivld in get_iterable(indata.get("int-virtual-link-desc")):
1081 for mp in get_iterable(ivld.get("monitoring-parameters")):
1082 all_monitoring_params.add(mp.get("id"))
1083
1084 for vdu in get_iterable(indata.get("vdu")):
1085 for mp in get_iterable(vdu.get("monitoring-parameter")):
1086 all_monitoring_params.add(mp.get("id"))
1087
1088 for df in get_iterable(indata.get("df")):
1089 for mp in get_iterable(df.get("monitoring-parameter")):
1090 all_monitoring_params.add(mp.get("id"))
1091
1092 for df in get_iterable(indata.get("df")):
1093 for sa in get_iterable(df.get("scaling-aspect")):
1094 for sp in get_iterable(sa.get("scaling-policy")):
1095 for sc in get_iterable(sp.get("scaling-criteria")):
1096 sc_monitoring_param = sc.get("vnf-monitoring-param-ref")
1097 if (
1098 sc_monitoring_param
1099 and sc_monitoring_param not in all_monitoring_params
1100 ):
1101 raise EngineException(
1102 "df[id='{}']:scaling-aspect[id='{}']:scaling-policy"
1103 "[name='{}']:scaling-criteria[name='{}']: "
1104 "vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format(
1105 df["id"],
1106 sa["id"],
1107 sp["name"],
1108 sc["name"],
1109 sc_monitoring_param,
1110 ),
1111 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1112 )
1113
1114 for sca in get_iterable(sa.get("scaling-config-action")):
1115 if (
1116 "lcm-operations-configuration" not in df
1117 or "operate-vnf-op-config"
1118 not in df["lcm-operations-configuration"]
1119 or not utils.find_in_list(
1120 df["lcm-operations-configuration"][
1121 "operate-vnf-op-config"
1122 ].get("day1-2", []),
1123 lambda config: config["id"] == indata["id"],
1124 )
1125 ):
1126 raise EngineException(
1127 "'day1-2 configuration' not defined in the descriptor but it is "
1128 "referenced by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action".format(
1129 df["id"], sa["id"]
1130 ),
1131 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1132 )
1133 for configuration in get_iterable(
1134 df["lcm-operations-configuration"]["operate-vnf-op-config"].get(
1135 "day1-2", []
1136 )
1137 ):
1138 for primitive in get_iterable(
1139 configuration.get("config-primitive")
1140 ):
1141 if (
1142 primitive["name"]
1143 == sca["vnf-config-primitive-name-ref"]
1144 ):
1145 break
1146 else:
1147 raise EngineException(
1148 "df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-"
1149 "config-primitive-name-ref='{}' does not match any "
1150 "day1-2 configuration:config-primitive:name".format(
1151 df["id"],
1152 sa["id"],
1153 sca["vnf-config-primitive-name-ref"],
1154 ),
1155 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1156 )
1157
1158 def delete_extra(self, session, _id, db_content, not_send_msg=None):
1159 """
1160 Deletes associate file system storage (via super)
1161 Deletes associated vnfpkgops from database.
1162 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1163 :param _id: server internal id
1164 :param db_content: The database content of the descriptor
1165 :return: None
1166 :raises: FsException in case of error while deleting associated storage
1167 """
1168 super().delete_extra(session, _id, db_content, not_send_msg)
1169 self.db.del_list("vnfpkgops", {"vnfPkgId": _id})
1170 self.db.del_list(self.topic+"_revisions", {"_id": {"$regex": _id}})
1171
1172 def sol005_projection(self, data):
1173 data["onboardingState"] = data["_admin"]["onboardingState"]
1174 data["operationalState"] = data["_admin"]["operationalState"]
1175 data["usageState"] = data["_admin"]["usageState"]
1176
1177 links = {}
1178 links["self"] = {"href": "/vnfpkgm/v1/vnf_packages/{}".format(data["_id"])}
1179 links["vnfd"] = {"href": "/vnfpkgm/v1/vnf_packages/{}/vnfd".format(data["_id"])}
1180 links["packageContent"] = {
1181 "href": "/vnfpkgm/v1/vnf_packages/{}/package_content".format(data["_id"])
1182 }
1183 data["_links"] = links
1184
1185 return super().sol005_projection(data)
1186
1187
1188 class NsdTopic(DescriptorTopic):
1189 topic = "nsds"
1190 topic_msg = "nsd"
1191
1192 def __init__(self, db, fs, msg, auth):
1193 DescriptorTopic.__init__(self, db, fs, msg, auth)
1194
1195 def pyangbind_validation(self, item, data, force=False):
1196 if self._descriptor_data_is_in_old_format(data):
1197 raise EngineException(
1198 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
1199 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1200 )
1201 try:
1202 nsd_vnf_profiles = data.get("df", [{}])[0].get("vnf-profile", [])
1203 mynsd = etsi_nfv_nsd.etsi_nfv_nsd()
1204 pybindJSONDecoder.load_ietf_json(
1205 {"nsd": {"nsd": [data]}},
1206 None,
1207 None,
1208 obj=mynsd,
1209 path_helper=True,
1210 skip_unknown=force,
1211 )
1212 out = pybindJSON.dumps(mynsd, mode="ietf")
1213 desc_out = self._remove_envelop(yaml.safe_load(out))
1214 desc_out = self._remove_yang_prefixes_from_descriptor(desc_out)
1215 if nsd_vnf_profiles:
1216 desc_out["df"][0]["vnf-profile"] = nsd_vnf_profiles
1217 return desc_out
1218 except Exception as e:
1219 raise EngineException(
1220 "Error in pyangbind validation: {}".format(str(e)),
1221 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1222 )
1223
1224 @staticmethod
1225 def _descriptor_data_is_in_old_format(data):
1226 return ("nsd-catalog" in data) or ("nsd:nsd-catalog" in data)
1227
1228 @staticmethod
1229 def _remove_envelop(indata=None):
1230 if not indata:
1231 return {}
1232 clean_indata = indata
1233
1234 if clean_indata.get("nsd"):
1235 clean_indata = clean_indata["nsd"]
1236 elif clean_indata.get("etsi-nfv-nsd:nsd"):
1237 clean_indata = clean_indata["etsi-nfv-nsd:nsd"]
1238 if clean_indata.get("nsd"):
1239 if (
1240 not isinstance(clean_indata["nsd"], list)
1241 or len(clean_indata["nsd"]) != 1
1242 ):
1243 raise EngineException("'nsd' must be a list of only one element")
1244 clean_indata = clean_indata["nsd"][0]
1245 return clean_indata
1246
1247 def _validate_input_new(self, indata, storage_params, force=False):
1248 indata.pop("nsdOnboardingState", None)
1249 indata.pop("nsdOperationalState", None)
1250 indata.pop("nsdUsageState", None)
1251
1252 indata.pop("links", None)
1253
1254 indata = self.pyangbind_validation("nsds", indata, force)
1255 # Cross references validation in the descriptor
1256 # TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none
1257 for vld in get_iterable(indata.get("virtual-link-desc")):
1258 self.validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata)
1259
1260 self.validate_vnf_profiles_vnfd_id(indata)
1261
1262 return indata
1263
1264 @staticmethod
1265 def validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata):
1266 if not vld.get("mgmt-network"):
1267 return
1268 vld_id = vld.get("id")
1269 for df in get_iterable(indata.get("df")):
1270 for vlp in get_iterable(df.get("virtual-link-profile")):
1271 if vld_id and vld_id == vlp.get("virtual-link-desc-id"):
1272 if vlp.get("virtual-link-protocol-data"):
1273 raise EngineException(
1274 "Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-"
1275 "protocol-data You cannot set a virtual-link-protocol-data "
1276 "when mgmt-network is True".format(df["id"], vlp["id"]),
1277 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1278 )
1279
1280 @staticmethod
1281 def validate_vnf_profiles_vnfd_id(indata):
1282 all_vnfd_ids = set(get_iterable(indata.get("vnfd-id")))
1283 for df in get_iterable(indata.get("df")):
1284 for vnf_profile in get_iterable(df.get("vnf-profile")):
1285 vnfd_id = vnf_profile.get("vnfd-id")
1286 if vnfd_id and vnfd_id not in all_vnfd_ids:
1287 raise EngineException(
1288 "Error at df[id='{}']:vnf_profile[id='{}']:vnfd-id='{}' "
1289 "does not match any vnfd-id".format(
1290 df["id"], vnf_profile["id"], vnfd_id
1291 ),
1292 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1293 )
1294
1295 def _validate_input_edit(self, indata, content, force=False):
1296 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
1297 """
1298 indata looks as follows:
1299 - In the new case (conformant)
1300 {'nsdOperationalState': 'DISABLED', 'userDefinedData': {'id': 'string23',
1301 '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}}
1302 - In the old case (backwards-compatible)
1303 {'id': 'string23', '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}
1304 """
1305 if "_admin" not in indata:
1306 indata["_admin"] = {}
1307
1308 if "nsdOperationalState" in indata:
1309 if indata["nsdOperationalState"] in ("ENABLED", "DISABLED"):
1310 indata["_admin"]["operationalState"] = indata.pop("nsdOperationalState")
1311 else:
1312 raise EngineException(
1313 "State '{}' is not a valid operational state".format(
1314 indata["nsdOperationalState"]
1315 ),
1316 http_code=HTTPStatus.BAD_REQUEST,
1317 )
1318
1319 # In the case of user defined data, we need to put the data in the root of the object
1320 # to preserve current expected behaviour
1321 if "userDefinedData" in indata:
1322 data = indata.pop("userDefinedData")
1323 if type(data) == dict:
1324 indata["_admin"]["userDefinedData"] = data
1325 else:
1326 raise EngineException(
1327 "userDefinedData should be an object, but is '{}' instead".format(
1328 type(data)
1329 ),
1330 http_code=HTTPStatus.BAD_REQUEST,
1331 )
1332 if (
1333 "operationalState" in indata["_admin"]
1334 and content["_admin"]["operationalState"]
1335 == indata["_admin"]["operationalState"]
1336 ):
1337 raise EngineException(
1338 "nsdOperationalState already {}".format(
1339 content["_admin"]["operationalState"]
1340 ),
1341 http_code=HTTPStatus.CONFLICT,
1342 )
1343 return indata
1344
1345 def _check_descriptor_dependencies(self, session, descriptor):
1346 """
1347 Check that the dependent descriptors exist on a new descriptor or edition. Also checks references to vnfd
1348 connection points are ok
1349 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1350 :param descriptor: descriptor to be inserted or edit
1351 :return: None or raises exception
1352 """
1353 if session["force"]:
1354 return
1355 vnfds_index = self._get_descriptor_constituent_vnfds_index(session, descriptor)
1356
1357 # Cross references validation in the descriptor and vnfd connection point validation
1358 for df in get_iterable(descriptor.get("df")):
1359 self.validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index)
1360
1361 def _get_descriptor_constituent_vnfds_index(self, session, descriptor):
1362 vnfds_index = {}
1363 if descriptor.get("vnfd-id") and not session["force"]:
1364 for vnfd_id in get_iterable(descriptor.get("vnfd-id")):
1365 query_filter = self._get_project_filter(session)
1366 query_filter["id"] = vnfd_id
1367 vnf_list = self.db.get_list("vnfds", query_filter)
1368 if not vnf_list:
1369 raise EngineException(
1370 "Descriptor error at 'vnfd-id'='{}' references a non "
1371 "existing vnfd".format(vnfd_id),
1372 http_code=HTTPStatus.CONFLICT,
1373 )
1374 vnfds_index[vnfd_id] = vnf_list[0]
1375 return vnfds_index
1376
1377 @staticmethod
1378 def validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index):
1379 for vnf_profile in get_iterable(df.get("vnf-profile")):
1380 vnfd = vnfds_index.get(vnf_profile["vnfd-id"])
1381 all_vnfd_ext_cpds = set()
1382 for ext_cpd in get_iterable(vnfd.get("ext-cpd")):
1383 if ext_cpd.get("id"):
1384 all_vnfd_ext_cpds.add(ext_cpd.get("id"))
1385
1386 for virtual_link in get_iterable(
1387 vnf_profile.get("virtual-link-connectivity")
1388 ):
1389 for vl_cpd in get_iterable(virtual_link.get("constituent-cpd-id")):
1390 vl_cpd_id = vl_cpd.get("constituent-cpd-id")
1391 if vl_cpd_id and vl_cpd_id not in all_vnfd_ext_cpds:
1392 raise EngineException(
1393 "Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity"
1394 "[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a "
1395 "non existing ext-cpd:id inside vnfd '{}'".format(
1396 df["id"],
1397 vnf_profile["id"],
1398 virtual_link["virtual-link-profile-id"],
1399 vl_cpd_id,
1400 vnfd["id"],
1401 ),
1402 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1403 )
1404
1405 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
1406 final_content = super().check_conflict_on_edit(
1407 session, final_content, edit_content, _id
1408 )
1409
1410 self._check_descriptor_dependencies(session, final_content)
1411
1412 return final_content
1413
1414 def check_conflict_on_del(self, session, _id, db_content):
1415 """
1416 Check that there is not any NSR that uses this NSD. Only NSRs belonging to this project are considered. Note
1417 that NSD can be public and be used by other projects.
1418 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1419 :param _id: nsd internal id
1420 :param db_content: The database content of the _id
1421 :return: None or raises EngineException with the conflict
1422 """
1423 if session["force"]:
1424 return
1425 descriptor = db_content
1426 descriptor_id = descriptor.get("id")
1427 if not descriptor_id: # empty nsd not uploaded
1428 return
1429
1430 # check NSD used by NS
1431 _filter = self._get_project_filter(session)
1432 _filter["nsd-id"] = _id
1433 if self.db.get_list("nsrs", _filter):
1434 raise EngineException(
1435 "There is at least one NS instance using this descriptor",
1436 http_code=HTTPStatus.CONFLICT,
1437 )
1438
1439 # check NSD referenced by NST
1440 del _filter["nsd-id"]
1441 _filter["netslice-subnet.ANYINDEX.nsd-ref"] = descriptor_id
1442 if self.db.get_list("nsts", _filter):
1443 raise EngineException(
1444 "There is at least one NetSlice Template referencing this descriptor",
1445 http_code=HTTPStatus.CONFLICT,
1446 )
1447
1448 def delete_extra(self, session, _id, db_content, not_send_msg=None):
1449 """
1450 Deletes associate file system storage (via super)
1451 Deletes associated vnfpkgops from database.
1452 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1453 :param _id: server internal id
1454 :param db_content: The database content of the descriptor
1455 :return: None
1456 :raises: FsException in case of error while deleting associated storage
1457 """
1458 super().delete_extra(session, _id, db_content, not_send_msg)
1459 self.db.del_list(self.topic+"_revisions", { "_id": { "$regex": _id}})
1460
1461 def sol005_projection(self, data):
1462 data["nsdOnboardingState"] = data["_admin"]["onboardingState"]
1463 data["nsdOperationalState"] = data["_admin"]["operationalState"]
1464 data["nsdUsageState"] = data["_admin"]["usageState"]
1465
1466 links = {}
1467 links["self"] = {"href": "/nsd/v1/ns_descriptors/{}".format(data["_id"])}
1468 links["nsd_content"] = {
1469 "href": "/nsd/v1/ns_descriptors/{}/nsd_content".format(data["_id"])
1470 }
1471 data["_links"] = links
1472
1473 return super().sol005_projection(data)
1474
1475
1476 class NstTopic(DescriptorTopic):
1477 topic = "nsts"
1478 topic_msg = "nst"
1479 quota_name = "slice_templates"
1480
1481 def __init__(self, db, fs, msg, auth):
1482 DescriptorTopic.__init__(self, db, fs, msg, auth)
1483
1484 def pyangbind_validation(self, item, data, force=False):
1485 try:
1486 mynst = nst_im()
1487 pybindJSONDecoder.load_ietf_json(
1488 {"nst": [data]},
1489 None,
1490 None,
1491 obj=mynst,
1492 path_helper=True,
1493 skip_unknown=force,
1494 )
1495 out = pybindJSON.dumps(mynst, mode="ietf")
1496 desc_out = self._remove_envelop(yaml.safe_load(out))
1497 return desc_out
1498 except Exception as e:
1499 raise EngineException(
1500 "Error in pyangbind validation: {}".format(str(e)),
1501 http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1502 )
1503
1504 @staticmethod
1505 def _remove_envelop(indata=None):
1506 if not indata:
1507 return {}
1508 clean_indata = indata
1509
1510 if clean_indata.get("nst"):
1511 if (
1512 not isinstance(clean_indata["nst"], list)
1513 or len(clean_indata["nst"]) != 1
1514 ):
1515 raise EngineException("'nst' must be a list only one element")
1516 clean_indata = clean_indata["nst"][0]
1517 elif clean_indata.get("nst:nst"):
1518 if (
1519 not isinstance(clean_indata["nst:nst"], list)
1520 or len(clean_indata["nst:nst"]) != 1
1521 ):
1522 raise EngineException("'nst:nst' must be a list only one element")
1523 clean_indata = clean_indata["nst:nst"][0]
1524 return clean_indata
1525
1526 def _validate_input_new(self, indata, storage_params, force=False):
1527 indata.pop("onboardingState", None)
1528 indata.pop("operationalState", None)
1529 indata.pop("usageState", None)
1530 indata = self.pyangbind_validation("nsts", indata, force)
1531 return indata.copy()
1532
1533 def _check_descriptor_dependencies(self, session, descriptor):
1534 """
1535 Check that the dependent descriptors exist on a new descriptor or edition
1536 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1537 :param descriptor: descriptor to be inserted or edit
1538 :return: None or raises exception
1539 """
1540 if not descriptor.get("netslice-subnet"):
1541 return
1542 for nsd in descriptor["netslice-subnet"]:
1543 nsd_id = nsd["nsd-ref"]
1544 filter_q = self._get_project_filter(session)
1545 filter_q["id"] = nsd_id
1546 if not self.db.get_list("nsds", filter_q):
1547 raise EngineException(
1548 "Descriptor error at 'netslice-subnet':'nsd-ref'='{}' references a non "
1549 "existing nsd".format(nsd_id),
1550 http_code=HTTPStatus.CONFLICT,
1551 )
1552
1553 def check_conflict_on_edit(self, session, final_content, edit_content, _id):
1554 final_content = super().check_conflict_on_edit(
1555 session, final_content, edit_content, _id
1556 )
1557
1558 self._check_descriptor_dependencies(session, final_content)
1559 return final_content
1560
1561 def check_conflict_on_del(self, session, _id, db_content):
1562 """
1563 Check that there is not any NSIR that uses this NST. Only NSIRs belonging to this project are considered. Note
1564 that NST can be public and be used by other projects.
1565 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1566 :param _id: nst internal id
1567 :param db_content: The database content of the _id.
1568 :return: None or raises EngineException with the conflict
1569 """
1570 # TODO: Check this method
1571 if session["force"]:
1572 return
1573 # Get Network Slice Template from Database
1574 _filter = self._get_project_filter(session)
1575 _filter["_admin.nst-id"] = _id
1576 if self.db.get_list("nsis", _filter):
1577 raise EngineException(
1578 "there is at least one Netslice Instance using this descriptor",
1579 http_code=HTTPStatus.CONFLICT,
1580 )
1581
1582 def sol005_projection(self, data):
1583 data["onboardingState"] = data["_admin"]["onboardingState"]
1584 data["operationalState"] = data["_admin"]["operationalState"]
1585 data["usageState"] = data["_admin"]["usageState"]
1586
1587 links = {}
1588 links["self"] = {"href": "/nst/v1/netslice_templates/{}".format(data["_id"])}
1589 links["nst"] = {"href": "/nst/v1/netslice_templates/{}/nst".format(data["_id"])}
1590 data["_links"] = links
1591
1592 return super().sol005_projection(data)
1593
1594
1595 class PduTopic(BaseTopic):
1596 topic = "pdus"
1597 topic_msg = "pdu"
1598 quota_name = "pduds"
1599 schema_new = pdu_new_schema
1600 schema_edit = pdu_edit_schema
1601
1602 def __init__(self, db, fs, msg, auth):
1603 BaseTopic.__init__(self, db, fs, msg, auth)
1604
1605 @staticmethod
1606 def format_on_new(content, project_id=None, make_public=False):
1607 BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
1608 content["_admin"]["onboardingState"] = "CREATED"
1609 content["_admin"]["operationalState"] = "ENABLED"
1610 content["_admin"]["usageState"] = "NOT_IN_USE"
1611
1612 def check_conflict_on_del(self, session, _id, db_content):
1613 """
1614 Check that there is not any vnfr that uses this PDU
1615 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1616 :param _id: pdu internal id
1617 :param db_content: The database content of the _id.
1618 :return: None or raises EngineException with the conflict
1619 """
1620 if session["force"]:
1621 return
1622
1623 _filter = self._get_project_filter(session)
1624 _filter["vdur.pdu-id"] = _id
1625 if self.db.get_list("vnfrs", _filter):
1626 raise EngineException(
1627 "There is at least one VNF instance using this PDU",
1628 http_code=HTTPStatus.CONFLICT,
1629 )
1630
1631
1632 class VnfPkgOpTopic(BaseTopic):
1633 topic = "vnfpkgops"
1634 topic_msg = "vnfd"
1635 schema_new = vnfpkgop_new_schema
1636 schema_edit = None
1637
1638 def __init__(self, db, fs, msg, auth):
1639 BaseTopic.__init__(self, db, fs, msg, auth)
1640
1641 def edit(self, session, _id, indata=None, kwargs=None, content=None):
1642 raise EngineException(
1643 "Method 'edit' not allowed for topic '{}'".format(self.topic),
1644 HTTPStatus.METHOD_NOT_ALLOWED,
1645 )
1646
1647 def delete(self, session, _id, dry_run=False):
1648 raise EngineException(
1649 "Method 'delete' not allowed for topic '{}'".format(self.topic),
1650 HTTPStatus.METHOD_NOT_ALLOWED,
1651 )
1652
1653 def delete_list(self, session, filter_q=None):
1654 raise EngineException(
1655 "Method 'delete_list' not allowed for topic '{}'".format(self.topic),
1656 HTTPStatus.METHOD_NOT_ALLOWED,
1657 )
1658
1659 def new(self, rollback, session, indata=None, kwargs=None, headers=None):
1660 """
1661 Creates a new entry into database.
1662 :param rollback: list to append created items at database in case a rollback may to be done
1663 :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
1664 :param indata: data to be inserted
1665 :param kwargs: used to override the indata descriptor
1666 :param headers: http request headers
1667 :return: _id, op_id:
1668 _id: identity of the inserted data.
1669 op_id: None
1670 """
1671 self._update_input_with_kwargs(indata, kwargs)
1672 validate_input(indata, self.schema_new)
1673 vnfpkg_id = indata["vnfPkgId"]
1674 filter_q = BaseTopic._get_project_filter(session)
1675 filter_q["_id"] = vnfpkg_id
1676 vnfd = self.db.get_one("vnfds", filter_q)
1677 operation = indata["lcmOperationType"]
1678 kdu_name = indata["kdu_name"]
1679 for kdu in vnfd.get("kdu", []):
1680 if kdu["name"] == kdu_name:
1681 helm_chart = kdu.get("helm-chart")
1682 juju_bundle = kdu.get("juju-bundle")
1683 break
1684 else:
1685 raise EngineException(
1686 "Not found vnfd[id='{}']:kdu[name='{}']".format(vnfpkg_id, kdu_name)
1687 )
1688 if helm_chart:
1689 indata["helm-chart"] = helm_chart
1690 match = fullmatch(r"([^/]*)/([^/]*)", helm_chart)
1691 repo_name = match.group(1) if match else None
1692 elif juju_bundle:
1693 indata["juju-bundle"] = juju_bundle
1694 match = fullmatch(r"([^/]*)/([^/]*)", juju_bundle)
1695 repo_name = match.group(1) if match else None
1696 else:
1697 raise EngineException(
1698 "Found neither 'helm-chart' nor 'juju-bundle' in vnfd[id='{}']:kdu[name='{}']".format(
1699 vnfpkg_id, kdu_name
1700 )
1701 )
1702 if repo_name:
1703 del filter_q["_id"]
1704 filter_q["name"] = repo_name
1705 repo = self.db.get_one("k8srepos", filter_q)
1706 k8srepo_id = repo.get("_id")
1707 k8srepo_url = repo.get("url")
1708 else:
1709 k8srepo_id = None
1710 k8srepo_url = None
1711 indata["k8srepoId"] = k8srepo_id
1712 indata["k8srepo_url"] = k8srepo_url
1713 vnfpkgop_id = str(uuid4())
1714 vnfpkgop_desc = {
1715 "_id": vnfpkgop_id,
1716 "operationState": "PROCESSING",
1717 "vnfPkgId": vnfpkg_id,
1718 "lcmOperationType": operation,
1719 "isAutomaticInvocation": False,
1720 "isCancelPending": False,
1721 "operationParams": indata,
1722 "links": {
1723 "self": "/osm/vnfpkgm/v1/vnfpkg_op_occs/" + vnfpkgop_id,
1724 "vnfpkg": "/osm/vnfpkgm/v1/vnf_packages/" + vnfpkg_id,
1725 },
1726 }
1727 self.format_on_new(
1728 vnfpkgop_desc, session["project_id"], make_public=session["public"]
1729 )
1730 ctime = vnfpkgop_desc["_admin"]["created"]
1731 vnfpkgop_desc["statusEnteredTime"] = ctime
1732 vnfpkgop_desc["startTime"] = ctime
1733 self.db.create(self.topic, vnfpkgop_desc)
1734 rollback.append({"topic": self.topic, "_id": vnfpkgop_id})
1735 self.msg.write(self.topic_msg, operation, vnfpkgop_desc)
1736 return vnfpkgop_id, None