| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 1 | # -*- coding: utf-8 -*- |
| 2 | |
| tierno | d125caf | 2018-11-22 16:05:54 +0000 | [diff] [blame] | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); |
| 4 | # you may not use this file except in compliance with the License. |
| 5 | # You may obtain a copy of the License at |
| 6 | # |
| 7 | # http://www.apache.org/licenses/LICENSE-2.0 |
| 8 | # |
| 9 | # Unless required by applicable law or agreed to in writing, software |
| 10 | # distributed under the License is distributed on an "AS IS" BASIS, |
| 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or |
| 12 | # implied. |
| 13 | # See the License for the specific language governing permissions and |
| 14 | # limitations under the License. |
| 15 | |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 16 | import tarfile |
| 17 | import yaml |
| 18 | import json |
| garciaale | 7cbd03c | 2020-11-27 10:38:35 -0300 | [diff] [blame] | 19 | import importlib |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 20 | # import logging |
| 21 | from hashlib import md5 |
| 22 | from osm_common.dbbase import DbException, deep_update_rfc7396 |
| 23 | from http import HTTPStatus |
| delacruzramo | 26301bb | 2019-11-15 14:45:32 +0100 | [diff] [blame] | 24 | from time import time |
| delacruzramo | 271d200 | 2019-12-02 21:00:37 +0100 | [diff] [blame] | 25 | from uuid import uuid4 |
| 26 | from re import fullmatch |
| 27 | from osm_nbi.validation import ValidationError, pdu_new_schema, pdu_edit_schema, \ |
| 28 | validate_input, vnfpkgop_new_schema |
| tierno | 23acf40 | 2019-08-28 13:36:34 +0000 | [diff] [blame] | 29 | from osm_nbi.base_topic import BaseTopic, EngineException, get_iterable |
| garciaale | 7cbd03c | 2020-11-27 10:38:35 -0300 | [diff] [blame] | 30 | etsi_nfv_vnfd = importlib.import_module("osm_im.etsi-nfv-vnfd") |
| 31 | etsi_nfv_nsd = importlib.import_module("osm_im.etsi-nfv-nsd") |
| gcalvino | 70434c1 | 2018-11-27 15:17:04 +0100 | [diff] [blame] | 32 | from osm_im.nst import nst as nst_im |
| gcalvino | 46e4cb8 | 2018-10-26 13:10:22 +0200 | [diff] [blame] | 33 | from pyangbind.lib.serialise import pybindJSONDecoder |
| 34 | import pyangbind.lib.pybindJSON as pybindJSON |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 35 | |
| 36 | __author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>" |
| 37 | |
| 38 | |
| 39 | class DescriptorTopic(BaseTopic): |
| 40 | |
| delacruzramo | 32bab47 | 2019-09-13 12:24:22 +0200 | [diff] [blame] | 41 | def __init__(self, db, fs, msg, auth): |
| 42 | BaseTopic.__init__(self, db, fs, msg, auth) |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 43 | |
| tierno | 65ca36d | 2019-02-12 19:27:52 +0100 | [diff] [blame] | 44 | def check_conflict_on_edit(self, session, final_content, edit_content, _id): |
| 45 | super().check_conflict_on_edit(session, final_content, edit_content, _id) |
| K Sai Kiran | 45bd94c | 2019-11-25 17:30:37 +0530 | [diff] [blame] | 46 | |
| 47 | def _check_unique_id_name(descriptor, position=""): |
| 48 | for desc_key, desc_item in descriptor.items(): |
| 49 | if isinstance(desc_item, list) and desc_item: |
| 50 | used_ids = [] |
| 51 | desc_item_id = None |
| 52 | for index, list_item in enumerate(desc_item): |
| 53 | if isinstance(list_item, dict): |
| 54 | _check_unique_id_name(list_item, "{}.{}[{}]" |
| 55 | .format(position, desc_key, index)) |
| 56 | # Base case |
| 57 | if index == 0 and (list_item.get("id") or list_item.get("name")): |
| 58 | desc_item_id = "id" if list_item.get("id") else "name" |
| 59 | if desc_item_id and list_item.get(desc_item_id): |
| 60 | if list_item[desc_item_id] in used_ids: |
| 61 | position = "{}.{}[{}]".format(position, desc_key, index) |
| 62 | raise EngineException("Error: identifier {} '{}' is not unique and repeats at '{}'" |
| 63 | .format(desc_item_id, list_item[desc_item_id], |
| 64 | position), HTTPStatus.UNPROCESSABLE_ENTITY) |
| 65 | used_ids.append(list_item[desc_item_id]) |
| garciaale | 960531a | 2020-10-20 18:29:45 -0300 | [diff] [blame] | 66 | |
| K Sai Kiran | 45bd94c | 2019-11-25 17:30:37 +0530 | [diff] [blame] | 67 | _check_unique_id_name(final_content) |
| tierno | aa1ca7b | 2018-11-08 19:00:20 +0100 | [diff] [blame] | 68 | # 1. validate again with pyangbind |
| 69 | # 1.1. remove internal keys |
| 70 | internal_keys = {} |
| 71 | for k in ("_id", "_admin"): |
| 72 | if k in final_content: |
| 73 | internal_keys[k] = final_content.pop(k) |
| gcalvino | a6fe000 | 2019-01-09 13:27:11 +0100 | [diff] [blame] | 74 | storage_params = internal_keys["_admin"].get("storage") |
| tierno | 65ca36d | 2019-02-12 19:27:52 +0100 | [diff] [blame] | 75 | serialized = self._validate_input_new(final_content, storage_params, session["force"]) |
| tierno | aa1ca7b | 2018-11-08 19:00:20 +0100 | [diff] [blame] | 76 | # 1.2. modify final_content with a serialized version |
| 77 | final_content.clear() |
| 78 | final_content.update(serialized) |
| 79 | # 1.3. restore internal keys |
| 80 | for k, v in internal_keys.items(): |
| 81 | final_content[k] = v |
| tierno | 65ca36d | 2019-02-12 19:27:52 +0100 | [diff] [blame] | 82 | if session["force"]: |
| tierno | 5a5c218 | 2018-11-20 12:27:42 +0000 | [diff] [blame] | 83 | return |
| tierno | aa1ca7b | 2018-11-08 19:00:20 +0100 | [diff] [blame] | 84 | # 2. check that this id is not present |
| 85 | if "id" in edit_content: |
| tierno | 65ca36d | 2019-02-12 19:27:52 +0100 | [diff] [blame] | 86 | _filter = self._get_project_filter(session) |
| tierno | aa1ca7b | 2018-11-08 19:00:20 +0100 | [diff] [blame] | 87 | _filter["id"] = final_content["id"] |
| 88 | _filter["_id.neq"] = _id |
| 89 | if self.db.get_one(self.topic, _filter, fail_on_empty=False): |
| 90 | raise EngineException("{} with id '{}' already exists for this project".format(self.topic[:-1], |
| 91 | final_content["id"]), |
| 92 | HTTPStatus.CONFLICT) |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 93 | |
| 94 | @staticmethod |
| 95 | def format_on_new(content, project_id=None, make_public=False): |
| 96 | BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public) |
| 97 | content["_admin"]["onboardingState"] = "CREATED" |
| 98 | content["_admin"]["operationalState"] = "DISABLED" |
| tierno | 36ec860 | 2018-11-02 17:27:11 +0100 | [diff] [blame] | 99 | content["_admin"]["usageState"] = "NOT_IN_USE" |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 100 | |
| tierno | bee3bad | 2019-12-05 12:26:01 +0000 | [diff] [blame] | 101 | def delete_extra(self, session, _id, db_content, not_send_msg=None): |
| tierno | b4844ab | 2019-05-23 08:42:12 +0000 | [diff] [blame] | 102 | """ |
| 103 | Deletes file system storage associated with the descriptor |
| 104 | :param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
| 105 | :param _id: server internal id |
| 106 | :param db_content: The database content of the descriptor |
| tierno | bee3bad | 2019-12-05 12:26:01 +0000 | [diff] [blame] | 107 | :param not_send_msg: To not send message (False) or store content (list) instead |
| tierno | b4844ab | 2019-05-23 08:42:12 +0000 | [diff] [blame] | 108 | :return: None if ok or raises EngineException with the problem |
| 109 | """ |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 110 | self.fs.file_delete(_id, ignore_non_exist=True) |
| tierno | f717cbe | 2018-12-03 16:35:42 +0000 | [diff] [blame] | 111 | self.fs.file_delete(_id + "_", ignore_non_exist=True) # remove temp folder |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 112 | |
| 113 | @staticmethod |
| 114 | def get_one_by_id(db, session, topic, id): |
| 115 | # find owned by this project |
| tierno | 65ca36d | 2019-02-12 19:27:52 +0100 | [diff] [blame] | 116 | _filter = BaseTopic._get_project_filter(session) |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 117 | _filter["id"] = id |
| 118 | desc_list = db.get_list(topic, _filter) |
| 119 | if len(desc_list) == 1: |
| 120 | return desc_list[0] |
| 121 | elif len(desc_list) > 1: |
| 122 | raise DbException("Found more than one {} with id='{}' belonging to this project".format(topic[:-1], id), |
| 123 | HTTPStatus.CONFLICT) |
| 124 | |
| 125 | # not found any: try to find public |
| tierno | 65ca36d | 2019-02-12 19:27:52 +0100 | [diff] [blame] | 126 | _filter = BaseTopic._get_project_filter(session) |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 127 | _filter["id"] = id |
| 128 | desc_list = db.get_list(topic, _filter) |
| 129 | if not desc_list: |
| 130 | raise DbException("Not found any {} with id='{}'".format(topic[:-1], id), HTTPStatus.NOT_FOUND) |
| 131 | elif len(desc_list) == 1: |
| 132 | return desc_list[0] |
| 133 | else: |
| 134 | raise DbException("Found more than one public {} with id='{}'; and no one belonging to this project".format( |
| 135 | topic[:-1], id), HTTPStatus.CONFLICT) |
| 136 | |
| tierno | 65ca36d | 2019-02-12 19:27:52 +0100 | [diff] [blame] | 137 | def new(self, rollback, session, indata=None, kwargs=None, headers=None): |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 138 | """ |
| 139 | Creates a new almost empty DISABLED entry into database. Due to SOL005, it does not follow normal procedure. |
| 140 | Creating a VNFD or NSD is done in two steps: 1. Creates an empty descriptor (this step) and 2) upload content |
| 141 | (self.upload_content) |
| 142 | :param rollback: list to append created items at database in case a rollback may to be done |
| tierno | 65ca36d | 2019-02-12 19:27:52 +0100 | [diff] [blame] | 143 | :param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 144 | :param indata: data to be inserted |
| 145 | :param kwargs: used to override the indata descriptor |
| 146 | :param headers: http request headers |
| tierno | bdebce9 | 2019-07-01 15:36:49 +0000 | [diff] [blame] | 147 | :return: _id, None: identity of the inserted data; and None as there is not any operation |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 148 | """ |
| 149 | |
| tierno | d774958 | 2020-05-28 10:41:10 +0000 | [diff] [blame] | 150 | # No needed to capture exceptions |
| 151 | # Check Quota |
| 152 | self.check_quota(session) |
| delacruzramo | 32bab47 | 2019-09-13 12:24:22 +0200 | [diff] [blame] | 153 | |
| tierno | d774958 | 2020-05-28 10:41:10 +0000 | [diff] [blame] | 154 | # _remove_envelop |
| 155 | if indata: |
| 156 | if "userDefinedData" in indata: |
| 157 | indata = indata['userDefinedData'] |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 158 | |
| tierno | d774958 | 2020-05-28 10:41:10 +0000 | [diff] [blame] | 159 | # Override descriptor with query string kwargs |
| 160 | self._update_input_with_kwargs(indata, kwargs) |
| 161 | # uncomment when this method is implemented. |
| 162 | # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors |
| 163 | # indata = DescriptorTopic._validate_input_new(self, indata, project_id=session["force"]) |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 164 | |
| tierno | d774958 | 2020-05-28 10:41:10 +0000 | [diff] [blame] | 165 | content = {"_admin": {"userDefinedData": indata}} |
| 166 | self.format_on_new(content, session["project_id"], make_public=session["public"]) |
| 167 | _id = self.db.create(self.topic, content) |
| 168 | rollback.append({"topic": self.topic, "_id": _id}) |
| 169 | self._send_msg("created", {"_id": _id}) |
| 170 | return _id, None |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 171 | |
| tierno | 65ca36d | 2019-02-12 19:27:52 +0100 | [diff] [blame] | 172 | def upload_content(self, session, _id, indata, kwargs, headers): |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 173 | """ |
| 174 | Used for receiving content by chunks (with a transaction_id header and/or gzip file. It will store and extract) |
| tierno | 65ca36d | 2019-02-12 19:27:52 +0100 | [diff] [blame] | 175 | :param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 176 | :param _id : the nsd,vnfd is already created, this is the id |
| 177 | :param indata: http body request |
| 178 | :param kwargs: user query string to override parameters. NOT USED |
| 179 | :param headers: http request headers |
| tierno | 5a5c218 | 2018-11-20 12:27:42 +0000 | [diff] [blame] | 180 | :return: True if package is completely uploaded or False if partial content has been uploded |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 181 | Raise exception on error |
| 182 | """ |
| 183 | # Check that _id exists and it is valid |
| 184 | current_desc = self.show(session, _id) |
| 185 | |
| 186 | content_range_text = headers.get("Content-Range") |
| 187 | expected_md5 = headers.get("Content-File-MD5") |
| 188 | compressed = None |
| 189 | content_type = headers.get("Content-Type") |
| 190 | if content_type and "application/gzip" in content_type or "application/x-gzip" in content_type or \ |
| 191 | "application/zip" in content_type: |
| 192 | compressed = "gzip" |
| 193 | filename = headers.get("Content-Filename") |
| 194 | if not filename: |
| 195 | filename = "package.tar.gz" if compressed else "package" |
| 196 | # TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266 |
| 197 | file_pkg = None |
| 198 | error_text = "" |
| 199 | try: |
| 200 | if content_range_text: |
| 201 | content_range = content_range_text.replace("-", " ").replace("/", " ").split() |
| 202 | if content_range[0] != "bytes": # TODO check x<y not negative < total.... |
| 203 | raise IndexError() |
| 204 | start = int(content_range[1]) |
| 205 | end = int(content_range[2]) + 1 |
| 206 | total = int(content_range[3]) |
| 207 | else: |
| 208 | start = 0 |
| tierno | f717cbe | 2018-12-03 16:35:42 +0000 | [diff] [blame] | 209 | temp_folder = _id + "_" # all the content is upload here and if ok, it is rename from id_ to is folder |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 210 | |
| 211 | if start: |
| tierno | f717cbe | 2018-12-03 16:35:42 +0000 | [diff] [blame] | 212 | if not self.fs.file_exists(temp_folder, 'dir'): |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 213 | raise EngineException("invalid Transaction-Id header", HTTPStatus.NOT_FOUND) |
| 214 | else: |
| tierno | f717cbe | 2018-12-03 16:35:42 +0000 | [diff] [blame] | 215 | self.fs.file_delete(temp_folder, ignore_non_exist=True) |
| 216 | self.fs.mkdir(temp_folder) |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 217 | |
| 218 | storage = self.fs.get_params() |
| 219 | storage["folder"] = _id |
| 220 | |
| tierno | f717cbe | 2018-12-03 16:35:42 +0000 | [diff] [blame] | 221 | file_path = (temp_folder, filename) |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 222 | if self.fs.file_exists(file_path, 'file'): |
| 223 | file_size = self.fs.file_size(file_path) |
| 224 | else: |
| 225 | file_size = 0 |
| 226 | if file_size != start: |
| 227 | raise EngineException("invalid Content-Range start sequence, expected '{}' but received '{}'".format( |
| 228 | file_size, start), HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE) |
| 229 | file_pkg = self.fs.file_open(file_path, 'a+b') |
| 230 | if isinstance(indata, dict): |
| 231 | indata_text = yaml.safe_dump(indata, indent=4, default_flow_style=False) |
| 232 | file_pkg.write(indata_text.encode(encoding="utf-8")) |
| 233 | else: |
| 234 | indata_len = 0 |
| 235 | while True: |
| 236 | indata_text = indata.read(4096) |
| 237 | indata_len += len(indata_text) |
| 238 | if not indata_text: |
| 239 | break |
| 240 | file_pkg.write(indata_text) |
| 241 | if content_range_text: |
| garciaale | 960531a | 2020-10-20 18:29:45 -0300 | [diff] [blame] | 242 | if indata_len != end - start: |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 243 | raise EngineException("Mismatch between Content-Range header {}-{} and body length of {}".format( |
| garciaale | 960531a | 2020-10-20 18:29:45 -0300 | [diff] [blame] | 244 | start, end - 1, indata_len), HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE) |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 245 | if end != total: |
| 246 | # TODO update to UPLOADING |
| 247 | return False |
| 248 | |
| 249 | # PACKAGE UPLOADED |
| 250 | if expected_md5: |
| 251 | file_pkg.seek(0, 0) |
| 252 | file_md5 = md5() |
| 253 | chunk_data = file_pkg.read(1024) |
| 254 | while chunk_data: |
| 255 | file_md5.update(chunk_data) |
| 256 | chunk_data = file_pkg.read(1024) |
| 257 | if expected_md5 != file_md5.hexdigest(): |
| 258 | raise EngineException("Error, MD5 mismatch", HTTPStatus.CONFLICT) |
| 259 | file_pkg.seek(0, 0) |
| 260 | if compressed == "gzip": |
| 261 | tar = tarfile.open(mode='r', fileobj=file_pkg) |
| 262 | descriptor_file_name = None |
| 263 | for tarinfo in tar: |
| 264 | tarname = tarinfo.name |
| 265 | tarname_path = tarname.split("/") |
| 266 | if not tarname_path[0] or ".." in tarname_path: # if start with "/" means absolute path |
| 267 | raise EngineException("Absolute path or '..' are not allowed for package descriptor tar.gz") |
| 268 | if len(tarname_path) == 1 and not tarinfo.isdir(): |
| 269 | raise EngineException("All files must be inside a dir for package descriptor tar.gz") |
| 270 | if tarname.endswith(".yaml") or tarname.endswith(".json") or tarname.endswith(".yml"): |
| 271 | storage["pkg-dir"] = tarname_path[0] |
| 272 | if len(tarname_path) == 2: |
| 273 | if descriptor_file_name: |
| 274 | raise EngineException( |
| 275 | "Found more than one descriptor file at package descriptor tar.gz") |
| 276 | descriptor_file_name = tarname |
| 277 | if not descriptor_file_name: |
| 278 | raise EngineException("Not found any descriptor file at package descriptor tar.gz") |
| 279 | storage["descriptor"] = descriptor_file_name |
| 280 | storage["zipfile"] = filename |
| tierno | f717cbe | 2018-12-03 16:35:42 +0000 | [diff] [blame] | 281 | self.fs.file_extract(tar, temp_folder) |
| 282 | with self.fs.file_open((temp_folder, descriptor_file_name), "r") as descriptor_file: |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 283 | content = descriptor_file.read() |
| 284 | else: |
| 285 | content = file_pkg.read() |
| 286 | storage["descriptor"] = descriptor_file_name = filename |
| 287 | |
| 288 | if descriptor_file_name.endswith(".json"): |
| 289 | error_text = "Invalid json format " |
| 290 | indata = json.load(content) |
| 291 | else: |
| 292 | error_text = "Invalid yaml format " |
| delacruzramo | b19cadc | 2019-10-08 10:18:02 +0200 | [diff] [blame] | 293 | indata = yaml.load(content, Loader=yaml.SafeLoader) |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 294 | |
| 295 | current_desc["_admin"]["storage"] = storage |
| 296 | current_desc["_admin"]["onboardingState"] = "ONBOARDED" |
| 297 | current_desc["_admin"]["operationalState"] = "ENABLED" |
| 298 | |
| 299 | indata = self._remove_envelop(indata) |
| 300 | |
| 301 | # Override descriptor with query string kwargs |
| 302 | if kwargs: |
| 303 | self._update_input_with_kwargs(indata, kwargs) |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 304 | |
| 305 | deep_update_rfc7396(current_desc, indata) |
| tierno | 65ca36d | 2019-02-12 19:27:52 +0100 | [diff] [blame] | 306 | self.check_conflict_on_edit(session, current_desc, indata, _id=_id) |
| delacruzramo | 26301bb | 2019-11-15 14:45:32 +0100 | [diff] [blame] | 307 | current_desc["_admin"]["modified"] = time() |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 308 | self.db.replace(self.topic, _id, current_desc) |
| tierno | f717cbe | 2018-12-03 16:35:42 +0000 | [diff] [blame] | 309 | self.fs.dir_rename(temp_folder, _id) |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 310 | |
| 311 | indata["_id"] = _id |
| K Sai Kiran | c96fd69 | 2019-10-16 17:50:53 +0530 | [diff] [blame] | 312 | self._send_msg("edited", indata) |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 313 | |
| 314 | # TODO if descriptor has changed because kwargs update content and remove cached zip |
| 315 | # TODO if zip is not present creates one |
| 316 | return True |
| 317 | |
| 318 | except EngineException: |
| 319 | raise |
| 320 | except IndexError: |
| 321 | raise EngineException("invalid Content-Range header format. Expected 'bytes start-end/total'", |
| 322 | HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE) |
| 323 | except IOError as e: |
| 324 | raise EngineException("invalid upload transaction sequence: '{}'".format(e), HTTPStatus.BAD_REQUEST) |
| 325 | except tarfile.ReadError as e: |
| 326 | raise EngineException("invalid file content {}".format(e), HTTPStatus.BAD_REQUEST) |
| 327 | except (ValueError, yaml.YAMLError) as e: |
| 328 | raise EngineException(error_text + str(e)) |
| 329 | except ValidationError as e: |
| 330 | raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY) |
| 331 | finally: |
| 332 | if file_pkg: |
| 333 | file_pkg.close() |
| 334 | |
| 335 | def get_file(self, session, _id, path=None, accept_header=None): |
| 336 | """ |
| 337 | Return the file content of a vnfd or nsd |
| tierno | 65ca36d | 2019-02-12 19:27:52 +0100 | [diff] [blame] | 338 | :param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
| tierno | 8700604 | 2018-10-24 12:50:20 +0200 | [diff] [blame] | 339 | :param _id: Identity of the vnfd, nsd |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 340 | :param path: artifact path or "$DESCRIPTOR" or None |
| 341 | :param accept_header: Content of Accept header. Must contain applition/zip or/and text/plain |
| tierno | 8700604 | 2018-10-24 12:50:20 +0200 | [diff] [blame] | 342 | :return: opened file plus Accept format or raises an exception |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 343 | """ |
| 344 | accept_text = accept_zip = False |
| 345 | if accept_header: |
| 346 | if 'text/plain' in accept_header or '*/*' in accept_header: |
| 347 | accept_text = True |
| 348 | if 'application/zip' in accept_header or '*/*' in accept_header: |
| tierno | 8700604 | 2018-10-24 12:50:20 +0200 | [diff] [blame] | 349 | accept_zip = 'application/zip' |
| 350 | elif 'application/gzip' in accept_header: |
| 351 | accept_zip = 'application/gzip' |
| 352 | |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 353 | if not accept_text and not accept_zip: |
| 354 | raise EngineException("provide request header 'Accept' with 'application/zip' or 'text/plain'", |
| 355 | http_code=HTTPStatus.NOT_ACCEPTABLE) |
| 356 | |
| 357 | content = self.show(session, _id) |
| 358 | if content["_admin"]["onboardingState"] != "ONBOARDED": |
| 359 | raise EngineException("Cannot get content because this resource is not at 'ONBOARDED' state. " |
| 360 | "onboardingState is {}".format(content["_admin"]["onboardingState"]), |
| 361 | http_code=HTTPStatus.CONFLICT) |
| 362 | storage = content["_admin"]["storage"] |
| garciaale | 960531a | 2020-10-20 18:29:45 -0300 | [diff] [blame] | 363 | if path is not None and path != "$DESCRIPTOR": # artifacts |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 364 | if not storage.get('pkg-dir'): |
| 365 | raise EngineException("Packages does not contains artifacts", http_code=HTTPStatus.BAD_REQUEST) |
| 366 | if self.fs.file_exists((storage['folder'], storage['pkg-dir'], *path), 'dir'): |
| 367 | folder_content = self.fs.dir_ls((storage['folder'], storage['pkg-dir'], *path)) |
| 368 | return folder_content, "text/plain" |
| 369 | # TODO manage folders in http |
| 370 | else: |
| garciaale | 960531a | 2020-10-20 18:29:45 -0300 | [diff] [blame] | 371 | return self.fs.file_open((storage['folder'], storage['pkg-dir'], *path), "rb"), \ |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 372 | "application/octet-stream" |
| 373 | |
| 374 | # pkgtype accept ZIP TEXT -> result |
| 375 | # manyfiles yes X -> zip |
| 376 | # no yes -> error |
| 377 | # onefile yes no -> zip |
| 378 | # X yes -> text |
| tierno | ee00275 | 2020-08-04 14:14:16 +0000 | [diff] [blame] | 379 | contain_many_files = False |
| 380 | if storage.get('pkg-dir'): |
| 381 | # check if there are more than one file in the package, ignoring checksums.txt. |
| 382 | pkg_files = self.fs.dir_ls((storage['folder'], storage['pkg-dir'])) |
| 383 | if len(pkg_files) >= 3 or (len(pkg_files) == 2 and 'checksums.txt' not in pkg_files): |
| 384 | contain_many_files = True |
| 385 | if accept_text and (not contain_many_files or path == "$DESCRIPTOR"): |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 386 | return self.fs.file_open((storage['folder'], storage['descriptor']), "r"), "text/plain" |
| tierno | ee00275 | 2020-08-04 14:14:16 +0000 | [diff] [blame] | 387 | elif contain_many_files and not accept_zip: |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 388 | raise EngineException("Packages that contains several files need to be retrieved with 'application/zip'" |
| 389 | "Accept header", http_code=HTTPStatus.NOT_ACCEPTABLE) |
| 390 | else: |
| 391 | if not storage.get('zipfile'): |
| 392 | # TODO generate zipfile if not present |
| 393 | raise EngineException("Only allowed 'text/plain' Accept header for this descriptor. To be solved in " |
| 394 | "future versions", http_code=HTTPStatus.NOT_ACCEPTABLE) |
| tierno | 8700604 | 2018-10-24 12:50:20 +0200 | [diff] [blame] | 395 | return self.fs.file_open((storage['folder'], storage['zipfile']), "rb"), accept_zip |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 396 | |
| garciaale | 7cbd03c | 2020-11-27 10:38:35 -0300 | [diff] [blame] | 397 | def _remove_yang_prefixes_from_descriptor(self, descriptor): |
| 398 | new_descriptor = {} |
| 399 | for k, v in descriptor.items(): |
| 400 | new_v = v |
| 401 | if isinstance(v, dict): |
| 402 | new_v = self._remove_yang_prefixes_from_descriptor(v) |
| 403 | elif isinstance(v, list): |
| 404 | new_v = list() |
| 405 | for x in v: |
| 406 | if isinstance(x, dict): |
| 407 | new_v.append(self._remove_yang_prefixes_from_descriptor(x)) |
| 408 | else: |
| 409 | new_v.append(x) |
| 410 | new_descriptor[k.split(':')[-1]] = new_v |
| 411 | return new_descriptor |
| 412 | |
| gcalvino | 46e4cb8 | 2018-10-26 13:10:22 +0200 | [diff] [blame] | 413 | def pyangbind_validation(self, item, data, force=False): |
| garciaale | 7cbd03c | 2020-11-27 10:38:35 -0300 | [diff] [blame] | 414 | raise EngineException("Not possible to validate '{}' item".format(item), |
| 415 | http_code=HTTPStatus.INTERNAL_SERVER_ERROR) |
| gcalvino | 46e4cb8 | 2018-10-26 13:10:22 +0200 | [diff] [blame] | 416 | |
| Frank Bryden | deba68e | 2020-07-27 13:55:11 +0000 | [diff] [blame] | 417 | def _validate_input_edit(self, indata, content, force=False): |
| 418 | # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit |
| 419 | if "_id" in indata: |
| 420 | indata.pop("_id") |
| 421 | if "_admin" not in indata: |
| 422 | indata["_admin"] = {} |
| 423 | |
| 424 | if "operationalState" in indata: |
| 425 | if indata["operationalState"] in ("ENABLED", "DISABLED"): |
| 426 | indata["_admin"]["operationalState"] = indata.pop("operationalState") |
| 427 | else: |
| 428 | raise EngineException("State '{}' is not a valid operational state" |
| 429 | .format(indata["operationalState"]), |
| 430 | http_code=HTTPStatus.BAD_REQUEST) |
| 431 | |
| 432 | # In the case of user defined data, we need to put the data in the root of the object |
| 433 | # to preserve current expected behaviour |
| 434 | if "userDefinedData" in indata: |
| 435 | data = indata.pop("userDefinedData") |
| 436 | if type(data) == dict: |
| 437 | indata["_admin"]["userDefinedData"] = data |
| 438 | else: |
| 439 | raise EngineException("userDefinedData should be an object, but is '{}' instead" |
| 440 | .format(type(data)), |
| 441 | http_code=HTTPStatus.BAD_REQUEST) |
| garciaale | 960531a | 2020-10-20 18:29:45 -0300 | [diff] [blame] | 442 | |
| Frank Bryden | deba68e | 2020-07-27 13:55:11 +0000 | [diff] [blame] | 443 | if ("operationalState" in indata["_admin"] and |
| 444 | content["_admin"]["operationalState"] == indata["_admin"]["operationalState"]): |
| 445 | raise EngineException("operationalState already {}".format(content["_admin"]["operationalState"]), |
| 446 | http_code=HTTPStatus.CONFLICT) |
| 447 | |
| 448 | return indata |
| 449 | |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 450 | |
| 451 | class VnfdTopic(DescriptorTopic): |
| 452 | topic = "vnfds" |
| 453 | topic_msg = "vnfd" |
| 454 | |
| delacruzramo | 32bab47 | 2019-09-13 12:24:22 +0200 | [diff] [blame] | 455 | def __init__(self, db, fs, msg, auth): |
| 456 | DescriptorTopic.__init__(self, db, fs, msg, auth) |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 457 | |
| garciaale | 7cbd03c | 2020-11-27 10:38:35 -0300 | [diff] [blame] | 458 | def pyangbind_validation(self, item, data, force=False): |
| 459 | try: |
| 460 | virtual_compute_descriptors = data.get('virtual-compute-desc') |
| 461 | virtual_storage_descriptors = data.get('virtual-storage-desc') |
| 462 | myvnfd = etsi_nfv_vnfd.etsi_nfv_vnfd() |
| 463 | pybindJSONDecoder.load_ietf_json({'etsi-nfv-vnfd:vnfd': data}, None, None, obj=myvnfd, |
| 464 | path_helper=True, skip_unknown=force) |
| 465 | out = pybindJSON.dumps(myvnfd, mode="ietf") |
| 466 | desc_out = self._remove_envelop(yaml.safe_load(out)) |
| 467 | desc_out = self._remove_yang_prefixes_from_descriptor(desc_out) |
| 468 | if virtual_compute_descriptors: |
| 469 | desc_out['virtual-compute-desc'] = virtual_compute_descriptors |
| 470 | if virtual_storage_descriptors: |
| 471 | desc_out['virtual-storage-desc'] = virtual_storage_descriptors |
| 472 | return desc_out |
| 473 | except Exception as e: |
| 474 | raise EngineException("Error in pyangbind validation: {}".format(str(e)), |
| 475 | http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
| 476 | |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 477 | @staticmethod |
| 478 | def _remove_envelop(indata=None): |
| 479 | if not indata: |
| 480 | return {} |
| 481 | clean_indata = indata |
| garciaale | 7cbd03c | 2020-11-27 10:38:35 -0300 | [diff] [blame] | 482 | |
| 483 | if clean_indata.get('etsi-nfv-vnfd:vnfd'): |
| 484 | if not isinstance(clean_indata['etsi-nfv-vnfd:vnfd'], dict): |
| 485 | raise EngineException("'etsi-nfv-vnfd:vnfd' must be a dict") |
| 486 | clean_indata = clean_indata['etsi-nfv-vnfd:vnfd'] |
| 487 | elif clean_indata.get('vnfd'): |
| 488 | if not isinstance(clean_indata['vnfd'], dict): |
| 489 | raise EngineException("'vnfd' must be dict") |
| 490 | clean_indata = clean_indata['vnfd'] |
| 491 | |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 492 | return clean_indata |
| 493 | |
| tierno | 65ca36d | 2019-02-12 19:27:52 +0100 | [diff] [blame] | 494 | def check_conflict_on_edit(self, session, final_content, edit_content, _id): |
| 495 | super().check_conflict_on_edit(session, final_content, edit_content, _id) |
| tierno | 36ec860 | 2018-11-02 17:27:11 +0100 | [diff] [blame] | 496 | |
| 497 | # set type of vnfd |
| 498 | contains_pdu = False |
| 499 | contains_vdu = False |
| 500 | for vdu in get_iterable(final_content.get("vdu")): |
| 501 | if vdu.get("pdu-type"): |
| 502 | contains_pdu = True |
| 503 | else: |
| 504 | contains_vdu = True |
| 505 | if contains_pdu: |
| 506 | final_content["_admin"]["type"] = "hnfd" if contains_vdu else "pnfd" |
| 507 | elif contains_vdu: |
| 508 | final_content["_admin"]["type"] = "vnfd" |
| 509 | # if neither vud nor pdu do not fill type |
| 510 | |
| tierno | b4844ab | 2019-05-23 08:42:12 +0000 | [diff] [blame] | 511 | def check_conflict_on_del(self, session, _id, db_content): |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 512 | """ |
| 513 | Check that there is not any NSD that uses this VNFD. Only NSDs belonging to this project are considered. Note |
| 514 | that VNFD can be public and be used by NSD of other projects. Also check there are not deployments, or vnfr |
| 515 | that uses this vnfd |
| tierno | 65ca36d | 2019-02-12 19:27:52 +0100 | [diff] [blame] | 516 | :param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
| tierno | b4844ab | 2019-05-23 08:42:12 +0000 | [diff] [blame] | 517 | :param _id: vnfd internal id |
| 518 | :param db_content: The database content of the _id. |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 519 | :return: None or raises EngineException with the conflict |
| 520 | """ |
| tierno | 65ca36d | 2019-02-12 19:27:52 +0100 | [diff] [blame] | 521 | if session["force"]: |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 522 | return |
| tierno | b4844ab | 2019-05-23 08:42:12 +0000 | [diff] [blame] | 523 | descriptor = db_content |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 524 | descriptor_id = descriptor.get("id") |
| 525 | if not descriptor_id: # empty vnfd not uploaded |
| 526 | return |
| 527 | |
| tierno | 65ca36d | 2019-02-12 19:27:52 +0100 | [diff] [blame] | 528 | _filter = self._get_project_filter(session) |
| tierno | b4844ab | 2019-05-23 08:42:12 +0000 | [diff] [blame] | 529 | |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 530 | # check vnfrs using this vnfd |
| 531 | _filter["vnfd-id"] = _id |
| 532 | if self.db.get_list("vnfrs", _filter): |
| tierno | b4844ab | 2019-05-23 08:42:12 +0000 | [diff] [blame] | 533 | raise EngineException("There is at least one VNF using this descriptor", http_code=HTTPStatus.CONFLICT) |
| 534 | |
| 535 | # check NSD referencing this VNFD |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 536 | del _filter["vnfd-id"] |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 537 | _filter["constituent-vnfd.ANYINDEX.vnfd-id-ref"] = descriptor_id |
| 538 | if self.db.get_list("nsds", _filter): |
| tierno | b4844ab | 2019-05-23 08:42:12 +0000 | [diff] [blame] | 539 | raise EngineException("There is at least one NSD referencing this descriptor", |
| 540 | http_code=HTTPStatus.CONFLICT) |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 541 | |
| gcalvino | a6fe000 | 2019-01-09 13:27:11 +0100 | [diff] [blame] | 542 | def _validate_input_new(self, indata, storage_params, force=False): |
| Frank Bryden | 19b9752 | 2020-07-10 12:32:02 +0000 | [diff] [blame] | 543 | indata.pop("onboardingState", None) |
| 544 | indata.pop("operationalState", None) |
| 545 | indata.pop("usageState", None) |
| Frank Bryden | 19b9752 | 2020-07-10 12:32:02 +0000 | [diff] [blame] | 546 | indata.pop("links", None) |
| 547 | |
| gcalvino | 46e4cb8 | 2018-10-26 13:10:22 +0200 | [diff] [blame] | 548 | indata = self.pyangbind_validation("vnfds", indata, force) |
| gcalvino | 5e72d15 | 2018-10-23 11:46:57 +0200 | [diff] [blame] | 549 | # Cross references validation in the descriptor |
| garciaale | 7cbd03c | 2020-11-27 10:38:35 -0300 | [diff] [blame] | 550 | |
| 551 | self.validate_mgmt_interface_connection_point(indata) |
| gcalvino | 5e72d15 | 2018-10-23 11:46:57 +0200 | [diff] [blame] | 552 | |
| 553 | for vdu in get_iterable(indata.get("vdu")): |
| garciaale | 7cbd03c | 2020-11-27 10:38:35 -0300 | [diff] [blame] | 554 | self.validate_vdu_internal_connection_points(vdu) |
| garciaale | 960531a | 2020-10-20 18:29:45 -0300 | [diff] [blame] | 555 | self._validate_vdu_charms_in_package(storage_params, vdu, indata) |
| 556 | self._validate_vdu_cloud_init_in_package(storage_params, vdu, indata) |
| 557 | |
| 558 | self._validate_vnf_charms_in_package(storage_params, indata) |
| 559 | |
| garciaale | 7cbd03c | 2020-11-27 10:38:35 -0300 | [diff] [blame] | 560 | self.validate_external_connection_points(indata) |
| 561 | self.validate_internal_virtual_links(indata) |
| garciaale | 960531a | 2020-10-20 18:29:45 -0300 | [diff] [blame] | 562 | self.validate_monitoring_params(indata) |
| 563 | self.validate_scaling_group_descriptor(indata) |
| 564 | |
| 565 | return indata |
| 566 | |
| 567 | @staticmethod |
| garciaale | 7cbd03c | 2020-11-27 10:38:35 -0300 | [diff] [blame] | 568 | def validate_mgmt_interface_connection_point(indata): |
| garciaale | 960531a | 2020-10-20 18:29:45 -0300 | [diff] [blame] | 569 | if not indata.get("vdu"): |
| 570 | return |
| garciaale | 7cbd03c | 2020-11-27 10:38:35 -0300 | [diff] [blame] | 571 | if not indata.get("mgmt-cp"): |
| 572 | raise EngineException("'mgmt-cp' is a mandatory field and it is not defined", |
| garciaale | 960531a | 2020-10-20 18:29:45 -0300 | [diff] [blame] | 573 | http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
| garciaale | 7cbd03c | 2020-11-27 10:38:35 -0300 | [diff] [blame] | 574 | |
| 575 | for cp in get_iterable(indata.get("ext-cpd")): |
| 576 | if cp["id"] == indata["mgmt-cp"]: |
| 577 | break |
| 578 | else: |
| 579 | raise EngineException("mgmt-cp='{}' must match an existing ext-cpd".format(indata["mgmt-cp"]), |
| 580 | http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
| garciaale | 960531a | 2020-10-20 18:29:45 -0300 | [diff] [blame] | 581 | |
| 582 | @staticmethod |
| garciaale | 7cbd03c | 2020-11-27 10:38:35 -0300 | [diff] [blame] | 583 | def validate_vdu_internal_connection_points(vdu): |
| 584 | int_cpds = set() |
| 585 | for cpd in get_iterable(vdu.get("int-cpd")): |
| 586 | cpd_id = cpd.get("id") |
| 587 | if cpd_id and cpd_id in int_cpds: |
| 588 | raise EngineException("vdu[id='{}']:int-cpd[id='{}'] is already used by other int-cpd" |
| 589 | .format(vdu["id"], cpd_id), |
| 590 | http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
| 591 | int_cpds.add(cpd_id) |
| 592 | |
| 593 | @staticmethod |
| 594 | def validate_external_connection_points(indata): |
| 595 | all_vdus_int_cpds = set() |
| 596 | for vdu in get_iterable(indata.get("vdu")): |
| 597 | for int_cpd in get_iterable(vdu.get("int-cpd")): |
| 598 | all_vdus_int_cpds.add((vdu.get("id"), int_cpd.get("id"))) |
| 599 | |
| 600 | ext_cpds = set() |
| 601 | for cpd in get_iterable(indata.get("ext-cpd")): |
| 602 | cpd_id = cpd.get("id") |
| 603 | if cpd_id and cpd_id in ext_cpds: |
| 604 | raise EngineException("ext-cpd[id='{}'] is already used by other ext-cpd".format(cpd_id), |
| 605 | http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
| 606 | ext_cpds.add(cpd_id) |
| 607 | |
| 608 | int_cpd = cpd.get("int-cpd") |
| 609 | if int_cpd: |
| 610 | if (int_cpd.get("vdu-id"), int_cpd.get("cpd")) not in all_vdus_int_cpds: |
| 611 | raise EngineException("ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd".format(cpd_id), |
| garciaale | 960531a | 2020-10-20 18:29:45 -0300 | [diff] [blame] | 612 | http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
| garciaale | 7cbd03c | 2020-11-27 10:38:35 -0300 | [diff] [blame] | 613 | # TODO: Validate k8s-cluster-net points to a valid k8s-cluster:nets ? |
| garciaale | 960531a | 2020-10-20 18:29:45 -0300 | [diff] [blame] | 614 | |
| 615 | def _validate_vdu_charms_in_package(self, storage_params, vdu, indata): |
| 616 | if not vdu.get("vdu-configuration"): |
| 617 | return |
| garciaale | 7cbd03c | 2020-11-27 10:38:35 -0300 | [diff] [blame] | 618 | for vdu_configuration in get_iterable(indata.get("vdu-configuration")): |
| 619 | if vdu_configuration.get("juju"): |
| 620 | if not self._validate_package_folders(storage_params, 'charms'): |
| 621 | raise EngineException("Charm defined in vnf[id={}] but not present in " |
| 622 | "package".format(indata["id"])) |
| garciaale | 960531a | 2020-10-20 18:29:45 -0300 | [diff] [blame] | 623 | |
| 624 | def _validate_vdu_cloud_init_in_package(self, storage_params, vdu, indata): |
| 625 | if not vdu.get("cloud-init-file"): |
| 626 | return |
| 627 | if not self._validate_package_folders(storage_params, 'cloud_init', vdu["cloud-init-file"]): |
| 628 | raise EngineException("Cloud-init defined in vnf[id={}]:vdu[id={}] but not present in " |
| 629 | "package".format(indata["id"], vdu["id"])) |
| 630 | |
| 631 | def _validate_vnf_charms_in_package(self, storage_params, indata): |
| 632 | if not indata.get("vnf-configuration"): |
| 633 | return |
| garciaale | 7cbd03c | 2020-11-27 10:38:35 -0300 | [diff] [blame] | 634 | for vnf_configuration in get_iterable(indata.get("vnf-configuration")): |
| 635 | if vnf_configuration.get("juju"): |
| 636 | if not self._validate_package_folders(storage_params, 'charms'): |
| 637 | raise EngineException("Charm defined in vnf[id={}] but not present in " |
| 638 | "package".format(indata["id"])) |
| garciaale | 960531a | 2020-10-20 18:29:45 -0300 | [diff] [blame] | 639 | |
| 640 | def _validate_package_folders(self, storage_params, folder, file=None): |
| 641 | if not storage_params or not storage_params.get("pkg-dir"): |
| 642 | return False |
| 643 | else: |
| 644 | if self.fs.file_exists("{}_".format(storage_params["folder"]), 'dir'): |
| 645 | f = "{}_/{}/{}".format(storage_params["folder"], storage_params["pkg-dir"], folder) |
| 646 | else: |
| 647 | f = "{}/{}/{}".format(storage_params["folder"], storage_params["pkg-dir"], folder) |
| 648 | if file: |
| 649 | return self.fs.file_exists("{}/{}".format(f, file), 'file') |
| 650 | else: |
| 651 | if self.fs.file_exists(f, 'dir'): |
| 652 | if self.fs.dir_ls(f): |
| 653 | return True |
| 654 | return False |
| 655 | |
| 656 | @staticmethod |
| garciaale | 7cbd03c | 2020-11-27 10:38:35 -0300 | [diff] [blame] | 657 | def validate_internal_virtual_links(indata): |
| 658 | all_ivld_ids = set() |
| 659 | for ivld in get_iterable(indata.get("int-virtual-link-desc")): |
| 660 | ivld_id = ivld.get("id") |
| 661 | if ivld_id and ivld_id in all_ivld_ids: |
| 662 | raise EngineException("Duplicated VLD id in int-virtual-link-desc[id={}]".format(ivld_id), |
| garciaale | 960531a | 2020-10-20 18:29:45 -0300 | [diff] [blame] | 663 | http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
| 664 | else: |
| garciaale | 7cbd03c | 2020-11-27 10:38:35 -0300 | [diff] [blame] | 665 | all_ivld_ids.add(ivld_id) |
| garciaale | 960531a | 2020-10-20 18:29:45 -0300 | [diff] [blame] | 666 | |
| garciaale | 7cbd03c | 2020-11-27 10:38:35 -0300 | [diff] [blame] | 667 | for vdu in get_iterable(indata.get("vdu")): |
| 668 | for int_cpd in get_iterable(vdu.get("int-cpd")): |
| 669 | int_cpd_ivld_id = int_cpd.get("int-virtual-link-desc") |
| 670 | if int_cpd_ivld_id and int_cpd_ivld_id not in all_ivld_ids: |
| 671 | raise EngineException( |
| 672 | "vdu[id='{}']:int-cpd[id='{}']:int-virtual-link-desc='{}' must match an existing " |
| 673 | "int-virtual-link-desc".format(vdu["id"], int_cpd["id"], int_cpd_ivld_id), |
| gcalvino | 5e72d15 | 2018-10-23 11:46:57 +0200 | [diff] [blame] | 674 | http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
| garciaale | 960531a | 2020-10-20 18:29:45 -0300 | [diff] [blame] | 675 | |
| garciaale | 7cbd03c | 2020-11-27 10:38:35 -0300 | [diff] [blame] | 676 | for df in get_iterable(indata.get("df")): |
| 677 | for vlp in get_iterable(df.get("virtual-link-profile")): |
| 678 | vlp_ivld_id = vlp.get("id") |
| 679 | if vlp_ivld_id and vlp_ivld_id not in all_ivld_ids: |
| 680 | raise EngineException("df[id='{}']:virtual-link-profile='{}' must match an existing " |
| 681 | "int-virtual-link-desc".format(df["id"], vlp_ivld_id), |
| 682 | http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
| 683 | |
| garciaale | 960531a | 2020-10-20 18:29:45 -0300 | [diff] [blame] | 684 | @staticmethod |
| 685 | def validate_monitoring_params(indata): |
| garciaale | 7cbd03c | 2020-11-27 10:38:35 -0300 | [diff] [blame] | 686 | all_monitoring_params = set() |
| 687 | for ivld in get_iterable(indata.get("int-virtual-link-desc")): |
| 688 | for mp in get_iterable(ivld.get("monitoring-parameters")): |
| 689 | mp_id = mp.get("id") |
| 690 | if mp_id and mp_id in all_monitoring_params: |
| 691 | raise EngineException("Duplicated monitoring-parameter id in " |
| 692 | "int-virtual-link-desc[id='{}']:monitoring-parameters[id='{}']" |
| 693 | .format(ivld["id"], mp_id), |
| gcalvino | 5e72d15 | 2018-10-23 11:46:57 +0200 | [diff] [blame] | 694 | http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
| gcalvino | 5e72d15 | 2018-10-23 11:46:57 +0200 | [diff] [blame] | 695 | else: |
| garciaale | 7cbd03c | 2020-11-27 10:38:35 -0300 | [diff] [blame] | 696 | all_monitoring_params.add(mp_id) |
| 697 | |
| 698 | for vdu in get_iterable(indata.get("vdu")): |
| 699 | for mp in get_iterable(vdu.get("monitoring-parameter")): |
| 700 | mp_id = mp.get("id") |
| 701 | if mp_id and mp_id in all_monitoring_params: |
| 702 | raise EngineException("Duplicated monitoring-parameter id in " |
| 703 | "vdu[id='{}']:monitoring-parameter[id='{}']" |
| 704 | .format(vdu["id"], mp_id), |
| gcalvino | 5e72d15 | 2018-10-23 11:46:57 +0200 | [diff] [blame] | 705 | http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
| garciaale | 7cbd03c | 2020-11-27 10:38:35 -0300 | [diff] [blame] | 706 | else: |
| 707 | all_monitoring_params.add(mp_id) |
| 708 | |
| 709 | for df in get_iterable(indata.get("df")): |
| 710 | for mp in get_iterable(df.get("monitoring-parameter")): |
| 711 | mp_id = mp.get("id") |
| 712 | if mp_id and mp_id in all_monitoring_params: |
| 713 | raise EngineException("Duplicated monitoring-parameter id in " |
| 714 | "df[id='{}']:monitoring-parameter[id='{}']" |
| 715 | .format(df["id"], mp_id), |
| 716 | http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
| 717 | else: |
| 718 | all_monitoring_params.add(mp_id) |
| gcalvino | 5e72d15 | 2018-10-23 11:46:57 +0200 | [diff] [blame] | 719 | |
| garciaale | 960531a | 2020-10-20 18:29:45 -0300 | [diff] [blame] | 720 | @staticmethod |
| 721 | def validate_scaling_group_descriptor(indata): |
| garciaale | 7cbd03c | 2020-11-27 10:38:35 -0300 | [diff] [blame] | 722 | all_monitoring_params = set() |
| 723 | for ivld in get_iterable(indata.get("int-virtual-link-desc")): |
| 724 | for mp in get_iterable(ivld.get("monitoring-parameters")): |
| 725 | all_monitoring_params.add(mp.get("id")) |
| 726 | |
| 727 | for vdu in get_iterable(indata.get("vdu")): |
| 728 | for mp in get_iterable(vdu.get("monitoring-parameter")): |
| 729 | all_monitoring_params.add(mp.get("id")) |
| 730 | |
| 731 | for df in get_iterable(indata.get("df")): |
| 732 | for mp in get_iterable(df.get("monitoring-parameter")): |
| 733 | all_monitoring_params.add(mp.get("id")) |
| 734 | |
| 735 | for df in get_iterable(indata.get("df")): |
| 736 | for sa in get_iterable(df.get("scaling-aspect")): |
| 737 | for sp in get_iterable(sa.get("scaling-policy")): |
| 738 | for sc in get_iterable(sp.get("scaling-criteria")): |
| 739 | sc_monitoring_param = sc.get("vnf-monitoring-param-ref") |
| 740 | if sc_monitoring_param and sc_monitoring_param not in all_monitoring_params: |
| 741 | raise EngineException("df[id='{}']:scaling-aspect[id='{}']:scaling-policy" |
| 742 | "[name='{}']:scaling-criteria[name='{}']: " |
| 743 | "vnf-monitoring-param-ref='{}' not defined in any monitoring-param" |
| 744 | .format(df["id"], sa["id"], sp["name"], sc["name"], |
| 745 | sc_monitoring_param), |
| 746 | http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
| 747 | |
| 748 | for sca in get_iterable(sa.get("scaling-config-action")): |
| 749 | if not indata.get("vnf-configuration"): |
| 750 | raise EngineException("'vnf-configuration' not defined in the descriptor but it is referenced " |
| 751 | "by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action" |
| 752 | .format(df["id"], sa["id"]), |
| gcalvino | 5e72d15 | 2018-10-23 11:46:57 +0200 | [diff] [blame] | 753 | http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
| garciaale | 7cbd03c | 2020-11-27 10:38:35 -0300 | [diff] [blame] | 754 | for configuration in get_iterable(indata["vnf-configuration"]): |
| 755 | for primitive in get_iterable(configuration.get("config-primitive")): |
| 756 | if primitive["name"] == sca["vnf-config-primitive-name-ref"]: |
| 757 | break |
| 758 | else: |
| 759 | raise EngineException("df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-" |
| 760 | "config-primitive-name-ref='{}' does not match any " |
| 761 | "vnf-configuration:config-primitive:name" |
| 762 | .format(df["id"], sa["id"], sca["vnf-config-primitive-name-ref"]), |
| 763 | http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
| gcalvino | a6fe000 | 2019-01-09 13:27:11 +0100 | [diff] [blame] | 764 | |
| delacruzramo | 271d200 | 2019-12-02 21:00:37 +0100 | [diff] [blame] | 765 | def delete_extra(self, session, _id, db_content, not_send_msg=None): |
| 766 | """ |
| 767 | Deletes associate file system storage (via super) |
| 768 | Deletes associated vnfpkgops from database. |
| 769 | :param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
| 770 | :param _id: server internal id |
| 771 | :param db_content: The database content of the descriptor |
| 772 | :return: None |
| 773 | :raises: FsException in case of error while deleting associated storage |
| 774 | """ |
| 775 | super().delete_extra(session, _id, db_content, not_send_msg) |
| 776 | self.db.del_list("vnfpkgops", {"vnfPkgId": _id}) |
| garciaale | 960531a | 2020-10-20 18:29:45 -0300 | [diff] [blame] | 777 | |
| Frank Bryden | 19b9752 | 2020-07-10 12:32:02 +0000 | [diff] [blame] | 778 | def sol005_projection(self, data): |
| 779 | data["onboardingState"] = data["_admin"]["onboardingState"] |
| 780 | data["operationalState"] = data["_admin"]["operationalState"] |
| 781 | data["usageState"] = data["_admin"]["usageState"] |
| 782 | |
| 783 | links = {} |
| 784 | links["self"] = {"href": "/vnfpkgm/v1/vnf_packages/{}".format(data["_id"])} |
| 785 | links["vnfd"] = {"href": "/vnfpkgm/v1/vnf_packages/{}/vnfd".format(data["_id"])} |
| 786 | links["packageContent"] = {"href": "/vnfpkgm/v1/vnf_packages/{}/package_content".format(data["_id"])} |
| 787 | data["_links"] = links |
| garciaale | 960531a | 2020-10-20 18:29:45 -0300 | [diff] [blame] | 788 | |
| Frank Bryden | 19b9752 | 2020-07-10 12:32:02 +0000 | [diff] [blame] | 789 | return super().sol005_projection(data) |
| delacruzramo | 271d200 | 2019-12-02 21:00:37 +0100 | [diff] [blame] | 790 | |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 791 | |
| 792 | class NsdTopic(DescriptorTopic): |
| 793 | topic = "nsds" |
| 794 | topic_msg = "nsd" |
| 795 | |
| delacruzramo | 32bab47 | 2019-09-13 12:24:22 +0200 | [diff] [blame] | 796 | def __init__(self, db, fs, msg, auth): |
| 797 | DescriptorTopic.__init__(self, db, fs, msg, auth) |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 798 | |
| garciaale | 7cbd03c | 2020-11-27 10:38:35 -0300 | [diff] [blame] | 799 | def pyangbind_validation(self, item, data, force=False): |
| 800 | try: |
| 801 | mynsd = etsi_nfv_nsd.etsi_nfv_nsd() |
| 802 | pybindJSONDecoder.load_ietf_json({'nsd': {'nsd': [data]}}, None, None, obj=mynsd, |
| 803 | path_helper=True, skip_unknown=force) |
| 804 | out = pybindJSON.dumps(mynsd, mode="ietf") |
| 805 | desc_out = self._remove_envelop(yaml.safe_load(out)) |
| 806 | desc_out = self._remove_yang_prefixes_from_descriptor(desc_out) |
| 807 | return desc_out |
| 808 | except Exception as e: |
| 809 | raise EngineException("Error in pyangbind validation: {}".format(str(e)), |
| 810 | http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
| 811 | |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 812 | @staticmethod |
| 813 | def _remove_envelop(indata=None): |
| 814 | if not indata: |
| 815 | return {} |
| 816 | clean_indata = indata |
| 817 | |
| garciaale | 7cbd03c | 2020-11-27 10:38:35 -0300 | [diff] [blame] | 818 | if clean_indata.get('nsd'): |
| 819 | clean_indata = clean_indata['nsd'] |
| 820 | elif clean_indata.get('etsi-nfv-nsd:nsd'): |
| 821 | clean_indata = clean_indata['etsi-nfv-nsd:nsd'] |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 822 | if clean_indata.get('nsd'): |
| 823 | if not isinstance(clean_indata['nsd'], list) or len(clean_indata['nsd']) != 1: |
| gcalvino | 46e4cb8 | 2018-10-26 13:10:22 +0200 | [diff] [blame] | 824 | raise EngineException("'nsd' must be a list of only one element") |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 825 | clean_indata = clean_indata['nsd'][0] |
| 826 | return clean_indata |
| 827 | |
| gcalvino | a6fe000 | 2019-01-09 13:27:11 +0100 | [diff] [blame] | 828 | def _validate_input_new(self, indata, storage_params, force=False): |
| Frank Bryden | 19b9752 | 2020-07-10 12:32:02 +0000 | [diff] [blame] | 829 | indata.pop("nsdOnboardingState", None) |
| 830 | indata.pop("nsdOperationalState", None) |
| 831 | indata.pop("nsdUsageState", None) |
| 832 | |
| 833 | indata.pop("links", None) |
| 834 | |
| gcalvino | 46e4cb8 | 2018-10-26 13:10:22 +0200 | [diff] [blame] | 835 | indata = self.pyangbind_validation("nsds", indata, force) |
| tierno | 5a5c218 | 2018-11-20 12:27:42 +0000 | [diff] [blame] | 836 | # Cross references validation in the descriptor |
| tierno | aa1ca7b | 2018-11-08 19:00:20 +0100 | [diff] [blame] | 837 | # TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none |
| garciaale | 7cbd03c | 2020-11-27 10:38:35 -0300 | [diff] [blame] | 838 | for vld in get_iterable(indata.get("virtual-link-desc")): |
| 839 | self.validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata) |
| garciaale | 960531a | 2020-10-20 18:29:45 -0300 | [diff] [blame] | 840 | |
| garciaale | 7cbd03c | 2020-11-27 10:38:35 -0300 | [diff] [blame] | 841 | self.validate_vnf_profiles_vnfd_id(indata) |
| garciaale | 960531a | 2020-10-20 18:29:45 -0300 | [diff] [blame] | 842 | |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 843 | return indata |
| 844 | |
| garciaale | 960531a | 2020-10-20 18:29:45 -0300 | [diff] [blame] | 845 | @staticmethod |
| garciaale | 7cbd03c | 2020-11-27 10:38:35 -0300 | [diff] [blame] | 846 | def validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata): |
| 847 | if not vld.get("mgmt-network"): |
| 848 | return |
| 849 | vld_id = vld.get("id") |
| 850 | for df in get_iterable(indata.get("df")): |
| 851 | for vlp in get_iterable(df.get("virtual-link-profile")): |
| 852 | if vld_id and vld_id == vlp.get("virtual-link-desc-id"): |
| 853 | if vlp.get("virtual-link-protocol-data"): |
| 854 | raise EngineException("Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-" |
| 855 | "protocol-data You cannot set a virtual-link-protocol-data " |
| 856 | "when mgmt-network is True" |
| 857 | .format(df["id"], vlp["id"]), http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
| garciaale | 960531a | 2020-10-20 18:29:45 -0300 | [diff] [blame] | 858 | |
| 859 | @staticmethod |
| garciaale | 7cbd03c | 2020-11-27 10:38:35 -0300 | [diff] [blame] | 860 | def validate_vnf_profiles_vnfd_id(indata): |
| 861 | all_vnfd_ids = set(get_iterable(indata.get("vnfd-id"))) |
| 862 | for df in get_iterable(indata.get("df")): |
| 863 | for vnf_profile in get_iterable(df.get("vnf-profile")): |
| 864 | vnfd_id = vnf_profile.get("vnfd-id") |
| 865 | if vnfd_id and vnfd_id not in all_vnfd_ids: |
| 866 | raise EngineException("Error at df[id='{}']:vnf_profile[id='{}']:vnfd-id='{}' " |
| 867 | "does not match any vnfd-id".format(df["id"], vnf_profile["id"], vnfd_id), |
| 868 | http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
| garciaale | 960531a | 2020-10-20 18:29:45 -0300 | [diff] [blame] | 869 | |
| Frank Bryden | deba68e | 2020-07-27 13:55:11 +0000 | [diff] [blame] | 870 | def _validate_input_edit(self, indata, content, force=False): |
| tierno | aa1ca7b | 2018-11-08 19:00:20 +0100 | [diff] [blame] | 871 | # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit |
| Frank Bryden | deba68e | 2020-07-27 13:55:11 +0000 | [diff] [blame] | 872 | """ |
| 873 | indata looks as follows: |
| 874 | - In the new case (conformant) |
| 875 | {'nsdOperationalState': 'DISABLED', 'userDefinedData': {'id': 'string23', |
| 876 | '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}} |
| 877 | - In the old case (backwards-compatible) |
| 878 | {'id': 'string23', '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'} |
| 879 | """ |
| 880 | if "_admin" not in indata: |
| 881 | indata["_admin"] = {} |
| 882 | |
| 883 | if "nsdOperationalState" in indata: |
| 884 | if indata["nsdOperationalState"] in ("ENABLED", "DISABLED"): |
| 885 | indata["_admin"]["operationalState"] = indata.pop("nsdOperationalState") |
| 886 | else: |
| 887 | raise EngineException("State '{}' is not a valid operational state" |
| 888 | .format(indata["nsdOperationalState"]), |
| 889 | http_code=HTTPStatus.BAD_REQUEST) |
| 890 | |
| 891 | # In the case of user defined data, we need to put the data in the root of the object |
| 892 | # to preserve current expected behaviour |
| 893 | if "userDefinedData" in indata: |
| 894 | data = indata.pop("userDefinedData") |
| 895 | if type(data) == dict: |
| 896 | indata["_admin"]["userDefinedData"] = data |
| 897 | else: |
| 898 | raise EngineException("userDefinedData should be an object, but is '{}' instead" |
| 899 | .format(type(data)), |
| 900 | http_code=HTTPStatus.BAD_REQUEST) |
| 901 | if ("operationalState" in indata["_admin"] and |
| 902 | content["_admin"]["operationalState"] == indata["_admin"]["operationalState"]): |
| 903 | raise EngineException("nsdOperationalState already {}".format(content["_admin"]["operationalState"]), |
| 904 | http_code=HTTPStatus.CONFLICT) |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 905 | return indata |
| 906 | |
| tierno | 65ca36d | 2019-02-12 19:27:52 +0100 | [diff] [blame] | 907 | def _check_descriptor_dependencies(self, session, descriptor): |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 908 | """ |
| tierno | 5a5c218 | 2018-11-20 12:27:42 +0000 | [diff] [blame] | 909 | Check that the dependent descriptors exist on a new descriptor or edition. Also checks references to vnfd |
| 910 | connection points are ok |
| tierno | 65ca36d | 2019-02-12 19:27:52 +0100 | [diff] [blame] | 911 | :param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 912 | :param descriptor: descriptor to be inserted or edit |
| 913 | :return: None or raises exception |
| 914 | """ |
| tierno | 65ca36d | 2019-02-12 19:27:52 +0100 | [diff] [blame] | 915 | if session["force"]: |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 916 | return |
| garciaale | 7cbd03c | 2020-11-27 10:38:35 -0300 | [diff] [blame] | 917 | vnfds_index = self._get_descriptor_constituent_vnfds_index(session, descriptor) |
| garciaale | 960531a | 2020-10-20 18:29:45 -0300 | [diff] [blame] | 918 | |
| 919 | # Cross references validation in the descriptor and vnfd connection point validation |
| garciaale | 7cbd03c | 2020-11-27 10:38:35 -0300 | [diff] [blame] | 920 | for df in get_iterable(descriptor.get("df")): |
| 921 | self.validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index) |
| garciaale | 960531a | 2020-10-20 18:29:45 -0300 | [diff] [blame] | 922 | |
| garciaale | 7cbd03c | 2020-11-27 10:38:35 -0300 | [diff] [blame] | 923 | def _get_descriptor_constituent_vnfds_index(self, session, descriptor): |
| 924 | vnfds_index = {} |
| 925 | if descriptor.get("vnfd-id") and not session["force"]: |
| 926 | for vnfd_id in get_iterable(descriptor.get("vnfd-id")): |
| garciaale | 960531a | 2020-10-20 18:29:45 -0300 | [diff] [blame] | 927 | query_filter = self._get_project_filter(session) |
| 928 | query_filter["id"] = vnfd_id |
| 929 | vnf_list = self.db.get_list("vnfds", query_filter) |
| tierno | 5a5c218 | 2018-11-20 12:27:42 +0000 | [diff] [blame] | 930 | if not vnf_list: |
| garciaale | 7cbd03c | 2020-11-27 10:38:35 -0300 | [diff] [blame] | 931 | raise EngineException("Descriptor error at 'vnfd-id'='{}' references a non " |
| tierno | 5a5c218 | 2018-11-20 12:27:42 +0000 | [diff] [blame] | 932 | "existing vnfd".format(vnfd_id), http_code=HTTPStatus.CONFLICT) |
| garciaale | 7cbd03c | 2020-11-27 10:38:35 -0300 | [diff] [blame] | 933 | vnfds_index[vnfd_id] = vnf_list[0] |
| 934 | return vnfds_index |
| garciaale | 960531a | 2020-10-20 18:29:45 -0300 | [diff] [blame] | 935 | |
| 936 | @staticmethod |
| garciaale | 7cbd03c | 2020-11-27 10:38:35 -0300 | [diff] [blame] | 937 | def validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index): |
| 938 | for vnf_profile in get_iterable(df.get("vnf-profile")): |
| 939 | vnfd = vnfds_index.get(vnf_profile["vnfd-id"]) |
| 940 | all_vnfd_ext_cpds = set() |
| 941 | for ext_cpd in get_iterable(vnfd.get("ext-cpd")): |
| 942 | if ext_cpd.get('id'): |
| 943 | all_vnfd_ext_cpds.add(ext_cpd.get('id')) |
| 944 | |
| 945 | for virtual_link in get_iterable(vnf_profile.get("virtual-link-connectivity")): |
| 946 | for vl_cpd in get_iterable(virtual_link.get("constituent-cpd-id")): |
| 947 | vl_cpd_id = vl_cpd.get('constituent-cpd-id') |
| 948 | if vl_cpd_id and vl_cpd_id not in all_vnfd_ext_cpds: |
| 949 | raise EngineException("Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity" |
| 950 | "[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a " |
| 951 | "non existing ext-cpd:id inside vnfd '{}'" |
| 952 | .format(df["id"], vnf_profile["id"], |
| 953 | virtual_link["virtual-link-profile-id"], vl_cpd_id, vnfd["id"]), |
| 954 | http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 955 | |
| tierno | 65ca36d | 2019-02-12 19:27:52 +0100 | [diff] [blame] | 956 | def check_conflict_on_edit(self, session, final_content, edit_content, _id): |
| 957 | super().check_conflict_on_edit(session, final_content, edit_content, _id) |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 958 | |
| tierno | 65ca36d | 2019-02-12 19:27:52 +0100 | [diff] [blame] | 959 | self._check_descriptor_dependencies(session, final_content) |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 960 | |
| tierno | b4844ab | 2019-05-23 08:42:12 +0000 | [diff] [blame] | 961 | def check_conflict_on_del(self, session, _id, db_content): |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 962 | """ |
| 963 | Check that there is not any NSR that uses this NSD. Only NSRs belonging to this project are considered. Note |
| 964 | that NSD can be public and be used by other projects. |
| tierno | 65ca36d | 2019-02-12 19:27:52 +0100 | [diff] [blame] | 965 | :param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
| tierno | b4844ab | 2019-05-23 08:42:12 +0000 | [diff] [blame] | 966 | :param _id: nsd internal id |
| 967 | :param db_content: The database content of the _id |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 968 | :return: None or raises EngineException with the conflict |
| 969 | """ |
| tierno | 65ca36d | 2019-02-12 19:27:52 +0100 | [diff] [blame] | 970 | if session["force"]: |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 971 | return |
| tierno | b4844ab | 2019-05-23 08:42:12 +0000 | [diff] [blame] | 972 | descriptor = db_content |
| 973 | descriptor_id = descriptor.get("id") |
| 974 | if not descriptor_id: # empty nsd not uploaded |
| 975 | return |
| 976 | |
| 977 | # check NSD used by NS |
| tierno | 65ca36d | 2019-02-12 19:27:52 +0100 | [diff] [blame] | 978 | _filter = self._get_project_filter(session) |
| tierno | b4844ab | 2019-05-23 08:42:12 +0000 | [diff] [blame] | 979 | _filter["nsd-id"] = _id |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 980 | if self.db.get_list("nsrs", _filter): |
| tierno | b4844ab | 2019-05-23 08:42:12 +0000 | [diff] [blame] | 981 | raise EngineException("There is at least one NS using this descriptor", http_code=HTTPStatus.CONFLICT) |
| 982 | |
| 983 | # check NSD referenced by NST |
| 984 | del _filter["nsd-id"] |
| 985 | _filter["netslice-subnet.ANYINDEX.nsd-ref"] = descriptor_id |
| 986 | if self.db.get_list("nsts", _filter): |
| 987 | raise EngineException("There is at least one NetSlice Template referencing this descriptor", |
| 988 | http_code=HTTPStatus.CONFLICT) |
| garciaale | 960531a | 2020-10-20 18:29:45 -0300 | [diff] [blame] | 989 | |
| Frank Bryden | 19b9752 | 2020-07-10 12:32:02 +0000 | [diff] [blame] | 990 | def sol005_projection(self, data): |
| 991 | data["nsdOnboardingState"] = data["_admin"]["onboardingState"] |
| 992 | data["nsdOperationalState"] = data["_admin"]["operationalState"] |
| 993 | data["nsdUsageState"] = data["_admin"]["usageState"] |
| 994 | |
| 995 | links = {} |
| 996 | links["self"] = {"href": "/nsd/v1/ns_descriptors/{}".format(data["_id"])} |
| 997 | links["nsd_content"] = {"href": "/nsd/v1/ns_descriptors/{}/nsd_content".format(data["_id"])} |
| 998 | data["_links"] = links |
| garciaale | 960531a | 2020-10-20 18:29:45 -0300 | [diff] [blame] | 999 | |
| Frank Bryden | 19b9752 | 2020-07-10 12:32:02 +0000 | [diff] [blame] | 1000 | return super().sol005_projection(data) |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 1001 | |
| 1002 | |
| Felipe Vicens | b57758d | 2018-10-16 16:00:20 +0200 | [diff] [blame] | 1003 | class NstTopic(DescriptorTopic): |
| 1004 | topic = "nsts" |
| 1005 | topic_msg = "nst" |
| tierno | 6b02b05 | 2020-06-02 10:07:41 +0000 | [diff] [blame] | 1006 | quota_name = "slice_templates" |
| Felipe Vicens | b57758d | 2018-10-16 16:00:20 +0200 | [diff] [blame] | 1007 | |
| delacruzramo | 32bab47 | 2019-09-13 12:24:22 +0200 | [diff] [blame] | 1008 | def __init__(self, db, fs, msg, auth): |
| 1009 | DescriptorTopic.__init__(self, db, fs, msg, auth) |
| Felipe Vicens | b57758d | 2018-10-16 16:00:20 +0200 | [diff] [blame] | 1010 | |
| garciaale | 7cbd03c | 2020-11-27 10:38:35 -0300 | [diff] [blame] | 1011 | def pyangbind_validation(self, item, data, force=False): |
| 1012 | try: |
| 1013 | mynst = nst_im() |
| 1014 | pybindJSONDecoder.load_ietf_json({'nst': [data]}, None, None, obj=mynst, |
| 1015 | path_helper=True, skip_unknown=force) |
| 1016 | out = pybindJSON.dumps(mynst, mode="ietf") |
| 1017 | desc_out = self._remove_envelop(yaml.safe_load(out)) |
| 1018 | return desc_out |
| 1019 | except Exception as e: |
| 1020 | raise EngineException("Error in pyangbind validation: {}".format(str(e)), |
| 1021 | http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
| 1022 | |
| Felipe Vicens | b57758d | 2018-10-16 16:00:20 +0200 | [diff] [blame] | 1023 | @staticmethod |
| 1024 | def _remove_envelop(indata=None): |
| 1025 | if not indata: |
| 1026 | return {} |
| 1027 | clean_indata = indata |
| 1028 | |
| Felipe Vicens | b57758d | 2018-10-16 16:00:20 +0200 | [diff] [blame] | 1029 | if clean_indata.get('nst'): |
| 1030 | if not isinstance(clean_indata['nst'], list) or len(clean_indata['nst']) != 1: |
| 1031 | raise EngineException("'nst' must be a list only one element") |
| 1032 | clean_indata = clean_indata['nst'][0] |
| gcalvino | 70434c1 | 2018-11-27 15:17:04 +0100 | [diff] [blame] | 1033 | elif clean_indata.get('nst:nst'): |
| 1034 | if not isinstance(clean_indata['nst:nst'], list) or len(clean_indata['nst:nst']) != 1: |
| 1035 | raise EngineException("'nst:nst' must be a list only one element") |
| 1036 | clean_indata = clean_indata['nst:nst'][0] |
| Felipe Vicens | b57758d | 2018-10-16 16:00:20 +0200 | [diff] [blame] | 1037 | return clean_indata |
| 1038 | |
| gcalvino | a6fe000 | 2019-01-09 13:27:11 +0100 | [diff] [blame] | 1039 | def _validate_input_new(self, indata, storage_params, force=False): |
| Frank Bryden | 19b9752 | 2020-07-10 12:32:02 +0000 | [diff] [blame] | 1040 | indata.pop("onboardingState", None) |
| 1041 | indata.pop("operationalState", None) |
| 1042 | indata.pop("usageState", None) |
| gcalvino | 70434c1 | 2018-11-27 15:17:04 +0100 | [diff] [blame] | 1043 | indata = self.pyangbind_validation("nsts", indata, force) |
| Felipe Vicens | e36ab85 | 2018-11-23 14:12:09 +0100 | [diff] [blame] | 1044 | return indata.copy() |
| 1045 | |
| Felipe Vicens | b57758d | 2018-10-16 16:00:20 +0200 | [diff] [blame] | 1046 | def _check_descriptor_dependencies(self, session, descriptor): |
| 1047 | """ |
| 1048 | Check that the dependent descriptors exist on a new descriptor or edition |
| tierno | 65ca36d | 2019-02-12 19:27:52 +0100 | [diff] [blame] | 1049 | :param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
| Felipe Vicens | b57758d | 2018-10-16 16:00:20 +0200 | [diff] [blame] | 1050 | :param descriptor: descriptor to be inserted or edit |
| 1051 | :return: None or raises exception |
| 1052 | """ |
| 1053 | if not descriptor.get("netslice-subnet"): |
| 1054 | return |
| 1055 | for nsd in descriptor["netslice-subnet"]: |
| 1056 | nsd_id = nsd["nsd-ref"] |
| tierno | 65ca36d | 2019-02-12 19:27:52 +0100 | [diff] [blame] | 1057 | filter_q = self._get_project_filter(session) |
| Felipe Vicens | b57758d | 2018-10-16 16:00:20 +0200 | [diff] [blame] | 1058 | filter_q["id"] = nsd_id |
| 1059 | if not self.db.get_list("nsds", filter_q): |
| 1060 | raise EngineException("Descriptor error at 'netslice-subnet':'nsd-ref'='{}' references a non " |
| 1061 | "existing nsd".format(nsd_id), http_code=HTTPStatus.CONFLICT) |
| 1062 | |
| tierno | 65ca36d | 2019-02-12 19:27:52 +0100 | [diff] [blame] | 1063 | def check_conflict_on_edit(self, session, final_content, edit_content, _id): |
| 1064 | super().check_conflict_on_edit(session, final_content, edit_content, _id) |
| Felipe Vicens | b57758d | 2018-10-16 16:00:20 +0200 | [diff] [blame] | 1065 | |
| 1066 | self._check_descriptor_dependencies(session, final_content) |
| 1067 | |
| tierno | b4844ab | 2019-05-23 08:42:12 +0000 | [diff] [blame] | 1068 | def check_conflict_on_del(self, session, _id, db_content): |
| Felipe Vicens | b57758d | 2018-10-16 16:00:20 +0200 | [diff] [blame] | 1069 | """ |
| 1070 | Check that there is not any NSIR that uses this NST. Only NSIRs belonging to this project are considered. Note |
| 1071 | that NST can be public and be used by other projects. |
| tierno | 65ca36d | 2019-02-12 19:27:52 +0100 | [diff] [blame] | 1072 | :param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
| Felipe Vicens | 07f3172 | 2018-10-29 15:16:44 +0100 | [diff] [blame] | 1073 | :param _id: nst internal id |
| tierno | b4844ab | 2019-05-23 08:42:12 +0000 | [diff] [blame] | 1074 | :param db_content: The database content of the _id. |
| Felipe Vicens | b57758d | 2018-10-16 16:00:20 +0200 | [diff] [blame] | 1075 | :return: None or raises EngineException with the conflict |
| 1076 | """ |
| 1077 | # TODO: Check this method |
| tierno | 65ca36d | 2019-02-12 19:27:52 +0100 | [diff] [blame] | 1078 | if session["force"]: |
| Felipe Vicens | b57758d | 2018-10-16 16:00:20 +0200 | [diff] [blame] | 1079 | return |
| Felipe Vicens | 07f3172 | 2018-10-29 15:16:44 +0100 | [diff] [blame] | 1080 | # Get Network Slice Template from Database |
| tierno | 65ca36d | 2019-02-12 19:27:52 +0100 | [diff] [blame] | 1081 | _filter = self._get_project_filter(session) |
| tierno | ea97c04 | 2019-09-13 09:44:42 +0000 | [diff] [blame] | 1082 | _filter["_admin.nst-id"] = _id |
| tierno | b4844ab | 2019-05-23 08:42:12 +0000 | [diff] [blame] | 1083 | if self.db.get_list("nsis", _filter): |
| 1084 | raise EngineException("there is at least one Netslice Instance using this descriptor", |
| 1085 | http_code=HTTPStatus.CONFLICT) |
| Felipe Vicens | b57758d | 2018-10-16 16:00:20 +0200 | [diff] [blame] | 1086 | |
| Frank Bryden | 19b9752 | 2020-07-10 12:32:02 +0000 | [diff] [blame] | 1087 | def sol005_projection(self, data): |
| 1088 | data["onboardingState"] = data["_admin"]["onboardingState"] |
| 1089 | data["operationalState"] = data["_admin"]["operationalState"] |
| 1090 | data["usageState"] = data["_admin"]["usageState"] |
| 1091 | |
| 1092 | links = {} |
| 1093 | links["self"] = {"href": "/nst/v1/netslice_templates/{}".format(data["_id"])} |
| 1094 | links["nst"] = {"href": "/nst/v1/netslice_templates/{}/nst".format(data["_id"])} |
| 1095 | data["_links"] = links |
| 1096 | |
| 1097 | return super().sol005_projection(data) |
| 1098 | |
| Felipe Vicens | b57758d | 2018-10-16 16:00:20 +0200 | [diff] [blame] | 1099 | |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 1100 | class PduTopic(BaseTopic): |
| 1101 | topic = "pdus" |
| 1102 | topic_msg = "pdu" |
| tierno | 6b02b05 | 2020-06-02 10:07:41 +0000 | [diff] [blame] | 1103 | quota_name = "pduds" |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 1104 | schema_new = pdu_new_schema |
| 1105 | schema_edit = pdu_edit_schema |
| 1106 | |
| delacruzramo | 32bab47 | 2019-09-13 12:24:22 +0200 | [diff] [blame] | 1107 | def __init__(self, db, fs, msg, auth): |
| 1108 | BaseTopic.__init__(self, db, fs, msg, auth) |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 1109 | |
| 1110 | @staticmethod |
| 1111 | def format_on_new(content, project_id=None, make_public=False): |
| tierno | 36ec860 | 2018-11-02 17:27:11 +0100 | [diff] [blame] | 1112 | BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public) |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 1113 | content["_admin"]["onboardingState"] = "CREATED" |
| tierno | 36ec860 | 2018-11-02 17:27:11 +0100 | [diff] [blame] | 1114 | content["_admin"]["operationalState"] = "ENABLED" |
| 1115 | content["_admin"]["usageState"] = "NOT_IN_USE" |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 1116 | |
| tierno | b4844ab | 2019-05-23 08:42:12 +0000 | [diff] [blame] | 1117 | def check_conflict_on_del(self, session, _id, db_content): |
| 1118 | """ |
| 1119 | Check that there is not any vnfr that uses this PDU |
| 1120 | :param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
| 1121 | :param _id: pdu internal id |
| 1122 | :param db_content: The database content of the _id. |
| 1123 | :return: None or raises EngineException with the conflict |
| 1124 | """ |
| tierno | 65ca36d | 2019-02-12 19:27:52 +0100 | [diff] [blame] | 1125 | if session["force"]: |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 1126 | return |
| tierno | b4844ab | 2019-05-23 08:42:12 +0000 | [diff] [blame] | 1127 | |
| 1128 | _filter = self._get_project_filter(session) |
| 1129 | _filter["vdur.pdu-id"] = _id |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 1130 | if self.db.get_list("vnfrs", _filter): |
| tierno | b4844ab | 2019-05-23 08:42:12 +0000 | [diff] [blame] | 1131 | raise EngineException("There is at least one VNF using this PDU", http_code=HTTPStatus.CONFLICT) |
| delacruzramo | 271d200 | 2019-12-02 21:00:37 +0100 | [diff] [blame] | 1132 | |
| 1133 | |
| 1134 | class VnfPkgOpTopic(BaseTopic): |
| 1135 | topic = "vnfpkgops" |
| 1136 | topic_msg = "vnfd" |
| 1137 | schema_new = vnfpkgop_new_schema |
| 1138 | schema_edit = None |
| 1139 | |
| 1140 | def __init__(self, db, fs, msg, auth): |
| 1141 | BaseTopic.__init__(self, db, fs, msg, auth) |
| 1142 | |
| 1143 | def edit(self, session, _id, indata=None, kwargs=None, content=None): |
| 1144 | raise EngineException("Method 'edit' not allowed for topic '{}'".format(self.topic), |
| 1145 | HTTPStatus.METHOD_NOT_ALLOWED) |
| 1146 | |
| 1147 | def delete(self, session, _id, dry_run=False): |
| 1148 | raise EngineException("Method 'delete' not allowed for topic '{}'".format(self.topic), |
| 1149 | HTTPStatus.METHOD_NOT_ALLOWED) |
| 1150 | |
| 1151 | def delete_list(self, session, filter_q=None): |
| 1152 | raise EngineException("Method 'delete_list' not allowed for topic '{}'".format(self.topic), |
| 1153 | HTTPStatus.METHOD_NOT_ALLOWED) |
| 1154 | |
| 1155 | def new(self, rollback, session, indata=None, kwargs=None, headers=None): |
| 1156 | """ |
| 1157 | Creates a new entry into database. |
| 1158 | :param rollback: list to append created items at database in case a rollback may to be done |
| 1159 | :param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
| 1160 | :param indata: data to be inserted |
| 1161 | :param kwargs: used to override the indata descriptor |
| 1162 | :param headers: http request headers |
| 1163 | :return: _id, op_id: |
| 1164 | _id: identity of the inserted data. |
| 1165 | op_id: None |
| 1166 | """ |
| 1167 | self._update_input_with_kwargs(indata, kwargs) |
| 1168 | validate_input(indata, self.schema_new) |
| 1169 | vnfpkg_id = indata["vnfPkgId"] |
| 1170 | filter_q = BaseTopic._get_project_filter(session) |
| 1171 | filter_q["_id"] = vnfpkg_id |
| 1172 | vnfd = self.db.get_one("vnfds", filter_q) |
| 1173 | operation = indata["lcmOperationType"] |
| 1174 | kdu_name = indata["kdu_name"] |
| 1175 | for kdu in vnfd.get("kdu", []): |
| 1176 | if kdu["name"] == kdu_name: |
| 1177 | helm_chart = kdu.get("helm-chart") |
| 1178 | juju_bundle = kdu.get("juju-bundle") |
| 1179 | break |
| 1180 | else: |
| 1181 | raise EngineException("Not found vnfd[id='{}']:kdu[name='{}']".format(vnfpkg_id, kdu_name)) |
| 1182 | if helm_chart: |
| 1183 | indata["helm-chart"] = helm_chart |
| 1184 | match = fullmatch(r"([^/]*)/([^/]*)", helm_chart) |
| 1185 | repo_name = match.group(1) if match else None |
| 1186 | elif juju_bundle: |
| 1187 | indata["juju-bundle"] = juju_bundle |
| 1188 | match = fullmatch(r"([^/]*)/([^/]*)", juju_bundle) |
| 1189 | repo_name = match.group(1) if match else None |
| 1190 | else: |
| 1191 | raise EngineException("Found neither 'helm-chart' nor 'juju-bundle' in vnfd[id='{}']:kdu[name='{}']" |
| 1192 | .format(vnfpkg_id, kdu_name)) |
| 1193 | if repo_name: |
| 1194 | del filter_q["_id"] |
| 1195 | filter_q["name"] = repo_name |
| 1196 | repo = self.db.get_one("k8srepos", filter_q) |
| 1197 | k8srepo_id = repo.get("_id") |
| 1198 | k8srepo_url = repo.get("url") |
| 1199 | else: |
| 1200 | k8srepo_id = None |
| 1201 | k8srepo_url = None |
| 1202 | indata["k8srepoId"] = k8srepo_id |
| 1203 | indata["k8srepo_url"] = k8srepo_url |
| 1204 | vnfpkgop_id = str(uuid4()) |
| 1205 | vnfpkgop_desc = { |
| 1206 | "_id": vnfpkgop_id, |
| 1207 | "operationState": "PROCESSING", |
| 1208 | "vnfPkgId": vnfpkg_id, |
| 1209 | "lcmOperationType": operation, |
| 1210 | "isAutomaticInvocation": False, |
| 1211 | "isCancelPending": False, |
| 1212 | "operationParams": indata, |
| 1213 | "links": { |
| 1214 | "self": "/osm/vnfpkgm/v1/vnfpkg_op_occs/" + vnfpkgop_id, |
| 1215 | "vnfpkg": "/osm/vnfpkgm/v1/vnf_packages/" + vnfpkg_id, |
| 1216 | } |
| 1217 | } |
| 1218 | self.format_on_new(vnfpkgop_desc, session["project_id"], make_public=session["public"]) |
| 1219 | ctime = vnfpkgop_desc["_admin"]["created"] |
| 1220 | vnfpkgop_desc["statusEnteredTime"] = ctime |
| 1221 | vnfpkgop_desc["startTime"] = ctime |
| 1222 | self.db.create(self.topic, vnfpkgop_desc) |
| 1223 | rollback.append({"topic": self.topic, "_id": vnfpkgop_id}) |
| 1224 | self.msg.write(self.topic_msg, operation, vnfpkgop_desc) |
| 1225 | return vnfpkgop_id, None |