| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 1 | # -*- coding: utf-8 -*- |
| 2 | |
| tierno | d125caf | 2018-11-22 16:05:54 +0000 | [diff] [blame] | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); |
| 4 | # you may not use this file except in compliance with the License. |
| 5 | # You may obtain a copy of the License at |
| 6 | # |
| 7 | # http://www.apache.org/licenses/LICENSE-2.0 |
| 8 | # |
| 9 | # Unless required by applicable law or agreed to in writing, software |
| 10 | # distributed under the License is distributed on an "AS IS" BASIS, |
| 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or |
| 12 | # implied. |
| 13 | # See the License for the specific language governing permissions and |
| 14 | # limitations under the License. |
| 15 | |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 16 | import tarfile |
| 17 | import yaml |
| 18 | import json |
| 19 | # import logging |
| 20 | from hashlib import md5 |
| 21 | from osm_common.dbbase import DbException, deep_update_rfc7396 |
| 22 | from http import HTTPStatus |
| 23 | from validation import ValidationError, pdu_new_schema, pdu_edit_schema |
| gcalvino | 5e72d15 | 2018-10-23 11:46:57 +0200 | [diff] [blame] | 24 | from base_topic import BaseTopic, EngineException, get_iterable |
| gcalvino | 46e4cb8 | 2018-10-26 13:10:22 +0200 | [diff] [blame] | 25 | from osm_im.vnfd import vnfd as vnfd_im |
| 26 | from osm_im.nsd import nsd as nsd_im |
| gcalvino | 70434c1 | 2018-11-27 15:17:04 +0100 | [diff] [blame] | 27 | from osm_im.nst import nst as nst_im |
| gcalvino | 46e4cb8 | 2018-10-26 13:10:22 +0200 | [diff] [blame] | 28 | from pyangbind.lib.serialise import pybindJSONDecoder |
| 29 | import pyangbind.lib.pybindJSON as pybindJSON |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 30 | |
| 31 | __author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>" |
| 32 | |
| 33 | |
| 34 | class DescriptorTopic(BaseTopic): |
| 35 | |
| 36 | def __init__(self, db, fs, msg): |
| 37 | BaseTopic.__init__(self, db, fs, msg) |
| 38 | |
| 39 | def check_conflict_on_edit(self, session, final_content, edit_content, _id, force=False): |
| tierno | aa1ca7b | 2018-11-08 19:00:20 +0100 | [diff] [blame] | 40 | # 1. validate again with pyangbind |
| 41 | # 1.1. remove internal keys |
| 42 | internal_keys = {} |
| 43 | for k in ("_id", "_admin"): |
| 44 | if k in final_content: |
| 45 | internal_keys[k] = final_content.pop(k) |
| gcalvino | a6fe000 | 2019-01-09 13:27:11 +0100 | [diff] [blame] | 46 | storage_params = internal_keys["_admin"].get("storage") |
| 47 | serialized = self._validate_input_new(final_content, storage_params, force) |
| tierno | aa1ca7b | 2018-11-08 19:00:20 +0100 | [diff] [blame] | 48 | # 1.2. modify final_content with a serialized version |
| 49 | final_content.clear() |
| 50 | final_content.update(serialized) |
| 51 | # 1.3. restore internal keys |
| 52 | for k, v in internal_keys.items(): |
| 53 | final_content[k] = v |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 54 | |
| tierno | 5a5c218 | 2018-11-20 12:27:42 +0000 | [diff] [blame] | 55 | if force: |
| 56 | return |
| tierno | aa1ca7b | 2018-11-08 19:00:20 +0100 | [diff] [blame] | 57 | # 2. check that this id is not present |
| 58 | if "id" in edit_content: |
| 59 | _filter = self._get_project_filter(session, write=False, show_all=False) |
| 60 | _filter["id"] = final_content["id"] |
| 61 | _filter["_id.neq"] = _id |
| 62 | if self.db.get_one(self.topic, _filter, fail_on_empty=False): |
| 63 | raise EngineException("{} with id '{}' already exists for this project".format(self.topic[:-1], |
| 64 | final_content["id"]), |
| 65 | HTTPStatus.CONFLICT) |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 66 | |
| 67 | @staticmethod |
| 68 | def format_on_new(content, project_id=None, make_public=False): |
| 69 | BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public) |
| 70 | content["_admin"]["onboardingState"] = "CREATED" |
| 71 | content["_admin"]["operationalState"] = "DISABLED" |
| tierno | 36ec860 | 2018-11-02 17:27:11 +0100 | [diff] [blame] | 72 | content["_admin"]["usageState"] = "NOT_IN_USE" |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 73 | |
| 74 | def delete(self, session, _id, force=False, dry_run=False): |
| 75 | """ |
| 76 | Delete item by its internal _id |
| 77 | :param session: contains the used login username, working project, and admin rights |
| 78 | :param _id: server internal id |
| 79 | :param force: indicates if deletion must be forced in case of conflict |
| 80 | :param dry_run: make checking but do not delete |
| 81 | :return: dictionary with deleted item _id. It raises EngineException on error: not found, conflict, ... |
| 82 | """ |
| 83 | # TODO add admin to filter, validate rights |
| 84 | v = BaseTopic.delete(self, session, _id, force, dry_run=True) |
| 85 | if dry_run: |
| 86 | return |
| 87 | v = self.db.del_one(self.topic, {"_id": _id}) |
| 88 | self.fs.file_delete(_id, ignore_non_exist=True) |
| tierno | f717cbe | 2018-12-03 16:35:42 +0000 | [diff] [blame] | 89 | self.fs.file_delete(_id + "_", ignore_non_exist=True) # remove temp folder |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 90 | self._send_msg("delete", {"_id": _id}) |
| 91 | return v |
| 92 | |
| 93 | @staticmethod |
| 94 | def get_one_by_id(db, session, topic, id): |
| 95 | # find owned by this project |
| 96 | _filter = BaseTopic._get_project_filter(session, write=False, show_all=False) |
| 97 | _filter["id"] = id |
| 98 | desc_list = db.get_list(topic, _filter) |
| 99 | if len(desc_list) == 1: |
| 100 | return desc_list[0] |
| 101 | elif len(desc_list) > 1: |
| 102 | raise DbException("Found more than one {} with id='{}' belonging to this project".format(topic[:-1], id), |
| 103 | HTTPStatus.CONFLICT) |
| 104 | |
| 105 | # not found any: try to find public |
| 106 | _filter = BaseTopic._get_project_filter(session, write=False, show_all=True) |
| 107 | _filter["id"] = id |
| 108 | desc_list = db.get_list(topic, _filter) |
| 109 | if not desc_list: |
| 110 | raise DbException("Not found any {} with id='{}'".format(topic[:-1], id), HTTPStatus.NOT_FOUND) |
| 111 | elif len(desc_list) == 1: |
| 112 | return desc_list[0] |
| 113 | else: |
| 114 | raise DbException("Found more than one public {} with id='{}'; and no one belonging to this project".format( |
| 115 | topic[:-1], id), HTTPStatus.CONFLICT) |
| 116 | |
| 117 | def new(self, rollback, session, indata=None, kwargs=None, headers=None, force=False, make_public=False): |
| 118 | """ |
| 119 | Creates a new almost empty DISABLED entry into database. Due to SOL005, it does not follow normal procedure. |
| 120 | Creating a VNFD or NSD is done in two steps: 1. Creates an empty descriptor (this step) and 2) upload content |
| 121 | (self.upload_content) |
| 122 | :param rollback: list to append created items at database in case a rollback may to be done |
| 123 | :param session: contains the used login username and working project |
| 124 | :param indata: data to be inserted |
| 125 | :param kwargs: used to override the indata descriptor |
| 126 | :param headers: http request headers |
| 127 | :param force: If True avoid some dependence checks |
| 128 | :param make_public: Make the created descriptor public to all projects |
| 129 | :return: _id: identity of the inserted data. |
| 130 | """ |
| 131 | |
| 132 | try: |
| 133 | # _remove_envelop |
| 134 | if indata: |
| 135 | if "userDefinedData" in indata: |
| 136 | indata = indata['userDefinedData'] |
| 137 | |
| 138 | # Override descriptor with query string kwargs |
| 139 | self._update_input_with_kwargs(indata, kwargs) |
| 140 | # uncomment when this method is implemented. |
| 141 | # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors |
| 142 | # indata = DescriptorTopic._validate_input_new(self, indata, force=force) |
| 143 | |
| 144 | content = {"_admin": {"userDefinedData": indata}} |
| 145 | self.format_on_new(content, session["project_id"], make_public=make_public) |
| 146 | _id = self.db.create(self.topic, content) |
| 147 | rollback.append({"topic": self.topic, "_id": _id}) |
| 148 | return _id |
| 149 | except ValidationError as e: |
| 150 | raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY) |
| 151 | |
| 152 | def upload_content(self, session, _id, indata, kwargs, headers, force=False): |
| 153 | """ |
| 154 | Used for receiving content by chunks (with a transaction_id header and/or gzip file. It will store and extract) |
| 155 | :param session: session |
| 156 | :param _id : the nsd,vnfd is already created, this is the id |
| 157 | :param indata: http body request |
| 158 | :param kwargs: user query string to override parameters. NOT USED |
| 159 | :param headers: http request headers |
| 160 | :param force: to be more tolerant with validation |
| tierno | 5a5c218 | 2018-11-20 12:27:42 +0000 | [diff] [blame] | 161 | :return: True if package is completely uploaded or False if partial content has been uploded |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 162 | Raise exception on error |
| 163 | """ |
| 164 | # Check that _id exists and it is valid |
| 165 | current_desc = self.show(session, _id) |
| 166 | |
| 167 | content_range_text = headers.get("Content-Range") |
| 168 | expected_md5 = headers.get("Content-File-MD5") |
| 169 | compressed = None |
| 170 | content_type = headers.get("Content-Type") |
| 171 | if content_type and "application/gzip" in content_type or "application/x-gzip" in content_type or \ |
| 172 | "application/zip" in content_type: |
| 173 | compressed = "gzip" |
| 174 | filename = headers.get("Content-Filename") |
| 175 | if not filename: |
| 176 | filename = "package.tar.gz" if compressed else "package" |
| 177 | # TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266 |
| 178 | file_pkg = None |
| 179 | error_text = "" |
| 180 | try: |
| 181 | if content_range_text: |
| 182 | content_range = content_range_text.replace("-", " ").replace("/", " ").split() |
| 183 | if content_range[0] != "bytes": # TODO check x<y not negative < total.... |
| 184 | raise IndexError() |
| 185 | start = int(content_range[1]) |
| 186 | end = int(content_range[2]) + 1 |
| 187 | total = int(content_range[3]) |
| 188 | else: |
| 189 | start = 0 |
| tierno | f717cbe | 2018-12-03 16:35:42 +0000 | [diff] [blame] | 190 | temp_folder = _id + "_" # all the content is upload here and if ok, it is rename from id_ to is folder |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 191 | |
| 192 | if start: |
| tierno | f717cbe | 2018-12-03 16:35:42 +0000 | [diff] [blame] | 193 | if not self.fs.file_exists(temp_folder, 'dir'): |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 194 | raise EngineException("invalid Transaction-Id header", HTTPStatus.NOT_FOUND) |
| 195 | else: |
| tierno | f717cbe | 2018-12-03 16:35:42 +0000 | [diff] [blame] | 196 | self.fs.file_delete(temp_folder, ignore_non_exist=True) |
| 197 | self.fs.mkdir(temp_folder) |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 198 | |
| 199 | storage = self.fs.get_params() |
| 200 | storage["folder"] = _id |
| 201 | |
| tierno | f717cbe | 2018-12-03 16:35:42 +0000 | [diff] [blame] | 202 | file_path = (temp_folder, filename) |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 203 | if self.fs.file_exists(file_path, 'file'): |
| 204 | file_size = self.fs.file_size(file_path) |
| 205 | else: |
| 206 | file_size = 0 |
| 207 | if file_size != start: |
| 208 | raise EngineException("invalid Content-Range start sequence, expected '{}' but received '{}'".format( |
| 209 | file_size, start), HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE) |
| 210 | file_pkg = self.fs.file_open(file_path, 'a+b') |
| 211 | if isinstance(indata, dict): |
| 212 | indata_text = yaml.safe_dump(indata, indent=4, default_flow_style=False) |
| 213 | file_pkg.write(indata_text.encode(encoding="utf-8")) |
| 214 | else: |
| 215 | indata_len = 0 |
| 216 | while True: |
| 217 | indata_text = indata.read(4096) |
| 218 | indata_len += len(indata_text) |
| 219 | if not indata_text: |
| 220 | break |
| 221 | file_pkg.write(indata_text) |
| 222 | if content_range_text: |
| 223 | if indata_len != end-start: |
| 224 | raise EngineException("Mismatch between Content-Range header {}-{} and body length of {}".format( |
| 225 | start, end-1, indata_len), HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE) |
| 226 | if end != total: |
| 227 | # TODO update to UPLOADING |
| 228 | return False |
| 229 | |
| 230 | # PACKAGE UPLOADED |
| 231 | if expected_md5: |
| 232 | file_pkg.seek(0, 0) |
| 233 | file_md5 = md5() |
| 234 | chunk_data = file_pkg.read(1024) |
| 235 | while chunk_data: |
| 236 | file_md5.update(chunk_data) |
| 237 | chunk_data = file_pkg.read(1024) |
| 238 | if expected_md5 != file_md5.hexdigest(): |
| 239 | raise EngineException("Error, MD5 mismatch", HTTPStatus.CONFLICT) |
| 240 | file_pkg.seek(0, 0) |
| 241 | if compressed == "gzip": |
| 242 | tar = tarfile.open(mode='r', fileobj=file_pkg) |
| 243 | descriptor_file_name = None |
| 244 | for tarinfo in tar: |
| 245 | tarname = tarinfo.name |
| 246 | tarname_path = tarname.split("/") |
| 247 | if not tarname_path[0] or ".." in tarname_path: # if start with "/" means absolute path |
| 248 | raise EngineException("Absolute path or '..' are not allowed for package descriptor tar.gz") |
| 249 | if len(tarname_path) == 1 and not tarinfo.isdir(): |
| 250 | raise EngineException("All files must be inside a dir for package descriptor tar.gz") |
| 251 | if tarname.endswith(".yaml") or tarname.endswith(".json") or tarname.endswith(".yml"): |
| 252 | storage["pkg-dir"] = tarname_path[0] |
| 253 | if len(tarname_path) == 2: |
| 254 | if descriptor_file_name: |
| 255 | raise EngineException( |
| 256 | "Found more than one descriptor file at package descriptor tar.gz") |
| 257 | descriptor_file_name = tarname |
| 258 | if not descriptor_file_name: |
| 259 | raise EngineException("Not found any descriptor file at package descriptor tar.gz") |
| 260 | storage["descriptor"] = descriptor_file_name |
| 261 | storage["zipfile"] = filename |
| tierno | f717cbe | 2018-12-03 16:35:42 +0000 | [diff] [blame] | 262 | self.fs.file_extract(tar, temp_folder) |
| 263 | with self.fs.file_open((temp_folder, descriptor_file_name), "r") as descriptor_file: |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 264 | content = descriptor_file.read() |
| 265 | else: |
| 266 | content = file_pkg.read() |
| 267 | storage["descriptor"] = descriptor_file_name = filename |
| 268 | |
| 269 | if descriptor_file_name.endswith(".json"): |
| 270 | error_text = "Invalid json format " |
| 271 | indata = json.load(content) |
| 272 | else: |
| 273 | error_text = "Invalid yaml format " |
| 274 | indata = yaml.load(content) |
| 275 | |
| 276 | current_desc["_admin"]["storage"] = storage |
| 277 | current_desc["_admin"]["onboardingState"] = "ONBOARDED" |
| 278 | current_desc["_admin"]["operationalState"] = "ENABLED" |
| 279 | |
| 280 | indata = self._remove_envelop(indata) |
| 281 | |
| 282 | # Override descriptor with query string kwargs |
| 283 | if kwargs: |
| 284 | self._update_input_with_kwargs(indata, kwargs) |
| 285 | # it will call overrides method at VnfdTopic or NsdTopic |
| tierno | 5a5c218 | 2018-11-20 12:27:42 +0000 | [diff] [blame] | 286 | # indata = self._validate_input_edit(indata, force=force) |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 287 | |
| 288 | deep_update_rfc7396(current_desc, indata) |
| 289 | self.check_conflict_on_edit(session, current_desc, indata, _id=_id, force=force) |
| 290 | self.db.replace(self.topic, _id, current_desc) |
| tierno | f717cbe | 2018-12-03 16:35:42 +0000 | [diff] [blame] | 291 | self.fs.dir_rename(temp_folder, _id) |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 292 | |
| 293 | indata["_id"] = _id |
| 294 | self._send_msg("created", indata) |
| 295 | |
| 296 | # TODO if descriptor has changed because kwargs update content and remove cached zip |
| 297 | # TODO if zip is not present creates one |
| 298 | return True |
| 299 | |
| 300 | except EngineException: |
| 301 | raise |
| 302 | except IndexError: |
| 303 | raise EngineException("invalid Content-Range header format. Expected 'bytes start-end/total'", |
| 304 | HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE) |
| 305 | except IOError as e: |
| 306 | raise EngineException("invalid upload transaction sequence: '{}'".format(e), HTTPStatus.BAD_REQUEST) |
| 307 | except tarfile.ReadError as e: |
| 308 | raise EngineException("invalid file content {}".format(e), HTTPStatus.BAD_REQUEST) |
| 309 | except (ValueError, yaml.YAMLError) as e: |
| 310 | raise EngineException(error_text + str(e)) |
| 311 | except ValidationError as e: |
| 312 | raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY) |
| 313 | finally: |
| 314 | if file_pkg: |
| 315 | file_pkg.close() |
| 316 | |
| 317 | def get_file(self, session, _id, path=None, accept_header=None): |
| 318 | """ |
| 319 | Return the file content of a vnfd or nsd |
| 320 | :param session: contains the used login username and working project |
| tierno | 8700604 | 2018-10-24 12:50:20 +0200 | [diff] [blame] | 321 | :param _id: Identity of the vnfd, nsd |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 322 | :param path: artifact path or "$DESCRIPTOR" or None |
| 323 | :param accept_header: Content of Accept header. Must contain applition/zip or/and text/plain |
| tierno | 8700604 | 2018-10-24 12:50:20 +0200 | [diff] [blame] | 324 | :return: opened file plus Accept format or raises an exception |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 325 | """ |
| 326 | accept_text = accept_zip = False |
| 327 | if accept_header: |
| 328 | if 'text/plain' in accept_header or '*/*' in accept_header: |
| 329 | accept_text = True |
| 330 | if 'application/zip' in accept_header or '*/*' in accept_header: |
| tierno | 8700604 | 2018-10-24 12:50:20 +0200 | [diff] [blame] | 331 | accept_zip = 'application/zip' |
| 332 | elif 'application/gzip' in accept_header: |
| 333 | accept_zip = 'application/gzip' |
| 334 | |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 335 | if not accept_text and not accept_zip: |
| 336 | raise EngineException("provide request header 'Accept' with 'application/zip' or 'text/plain'", |
| 337 | http_code=HTTPStatus.NOT_ACCEPTABLE) |
| 338 | |
| 339 | content = self.show(session, _id) |
| 340 | if content["_admin"]["onboardingState"] != "ONBOARDED": |
| 341 | raise EngineException("Cannot get content because this resource is not at 'ONBOARDED' state. " |
| 342 | "onboardingState is {}".format(content["_admin"]["onboardingState"]), |
| 343 | http_code=HTTPStatus.CONFLICT) |
| 344 | storage = content["_admin"]["storage"] |
| 345 | if path is not None and path != "$DESCRIPTOR": # artifacts |
| 346 | if not storage.get('pkg-dir'): |
| 347 | raise EngineException("Packages does not contains artifacts", http_code=HTTPStatus.BAD_REQUEST) |
| 348 | if self.fs.file_exists((storage['folder'], storage['pkg-dir'], *path), 'dir'): |
| 349 | folder_content = self.fs.dir_ls((storage['folder'], storage['pkg-dir'], *path)) |
| 350 | return folder_content, "text/plain" |
| 351 | # TODO manage folders in http |
| 352 | else: |
| 353 | return self.fs.file_open((storage['folder'], storage['pkg-dir'], *path), "rb"),\ |
| 354 | "application/octet-stream" |
| 355 | |
| 356 | # pkgtype accept ZIP TEXT -> result |
| 357 | # manyfiles yes X -> zip |
| 358 | # no yes -> error |
| 359 | # onefile yes no -> zip |
| 360 | # X yes -> text |
| 361 | |
| 362 | if accept_text and (not storage.get('pkg-dir') or path == "$DESCRIPTOR"): |
| 363 | return self.fs.file_open((storage['folder'], storage['descriptor']), "r"), "text/plain" |
| 364 | elif storage.get('pkg-dir') and not accept_zip: |
| 365 | raise EngineException("Packages that contains several files need to be retrieved with 'application/zip'" |
| 366 | "Accept header", http_code=HTTPStatus.NOT_ACCEPTABLE) |
| 367 | else: |
| 368 | if not storage.get('zipfile'): |
| 369 | # TODO generate zipfile if not present |
| 370 | raise EngineException("Only allowed 'text/plain' Accept header for this descriptor. To be solved in " |
| 371 | "future versions", http_code=HTTPStatus.NOT_ACCEPTABLE) |
| tierno | 8700604 | 2018-10-24 12:50:20 +0200 | [diff] [blame] | 372 | return self.fs.file_open((storage['folder'], storage['zipfile']), "rb"), accept_zip |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 373 | |
| gcalvino | 46e4cb8 | 2018-10-26 13:10:22 +0200 | [diff] [blame] | 374 | def pyangbind_validation(self, item, data, force=False): |
| 375 | try: |
| 376 | if item == "vnfds": |
| 377 | myvnfd = vnfd_im() |
| 378 | pybindJSONDecoder.load_ietf_json({'vnfd:vnfd-catalog': {'vnfd': [data]}}, None, None, obj=myvnfd, |
| 379 | path_helper=True, skip_unknown=force) |
| 380 | out = pybindJSON.dumps(myvnfd, mode="ietf") |
| 381 | elif item == "nsds": |
| 382 | mynsd = nsd_im() |
| 383 | pybindJSONDecoder.load_ietf_json({'nsd:nsd-catalog': {'nsd': [data]}}, None, None, obj=mynsd, |
| 384 | path_helper=True, skip_unknown=force) |
| 385 | out = pybindJSON.dumps(mynsd, mode="ietf") |
| gcalvino | 70434c1 | 2018-11-27 15:17:04 +0100 | [diff] [blame] | 386 | elif item == "nsts": |
| 387 | mynst = nst_im() |
| 388 | pybindJSONDecoder.load_ietf_json({'nst': [data]}, None, None, obj=mynst, |
| 389 | path_helper=True, skip_unknown=force) |
| 390 | out = pybindJSON.dumps(mynst, mode="ietf") |
| gcalvino | 46e4cb8 | 2018-10-26 13:10:22 +0200 | [diff] [blame] | 391 | else: |
| 392 | raise EngineException("Not possible to validate '{}' item".format(item), |
| 393 | http_code=HTTPStatus.INTERNAL_SERVER_ERROR) |
| 394 | |
| 395 | desc_out = self._remove_envelop(yaml.safe_load(out)) |
| 396 | return desc_out |
| 397 | |
| 398 | except Exception as e: |
| 399 | raise EngineException("Error in pyangbind validation: {}".format(str(e)), |
| 400 | http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
| 401 | |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 402 | |
| 403 | class VnfdTopic(DescriptorTopic): |
| 404 | topic = "vnfds" |
| 405 | topic_msg = "vnfd" |
| 406 | |
| 407 | def __init__(self, db, fs, msg): |
| 408 | DescriptorTopic.__init__(self, db, fs, msg) |
| 409 | |
| 410 | @staticmethod |
| 411 | def _remove_envelop(indata=None): |
| 412 | if not indata: |
| 413 | return {} |
| 414 | clean_indata = indata |
| 415 | if clean_indata.get('vnfd:vnfd-catalog'): |
| 416 | clean_indata = clean_indata['vnfd:vnfd-catalog'] |
| 417 | elif clean_indata.get('vnfd-catalog'): |
| 418 | clean_indata = clean_indata['vnfd-catalog'] |
| 419 | if clean_indata.get('vnfd'): |
| 420 | if not isinstance(clean_indata['vnfd'], list) or len(clean_indata['vnfd']) != 1: |
| gcalvino | 46e4cb8 | 2018-10-26 13:10:22 +0200 | [diff] [blame] | 421 | raise EngineException("'vnfd' must be a list of only one element") |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 422 | clean_indata = clean_indata['vnfd'][0] |
| gcalvino | 46e4cb8 | 2018-10-26 13:10:22 +0200 | [diff] [blame] | 423 | elif clean_indata.get('vnfd:vnfd'): |
| 424 | if not isinstance(clean_indata['vnfd:vnfd'], list) or len(clean_indata['vnfd:vnfd']) != 1: |
| 425 | raise EngineException("'vnfd:vnfd' must be a list of only one element") |
| 426 | clean_indata = clean_indata['vnfd:vnfd'][0] |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 427 | return clean_indata |
| 428 | |
| tierno | 36ec860 | 2018-11-02 17:27:11 +0100 | [diff] [blame] | 429 | def check_conflict_on_edit(self, session, final_content, edit_content, _id, force=False): |
| 430 | super().check_conflict_on_edit(session, final_content, edit_content, _id, force=force) |
| 431 | |
| 432 | # set type of vnfd |
| 433 | contains_pdu = False |
| 434 | contains_vdu = False |
| 435 | for vdu in get_iterable(final_content.get("vdu")): |
| 436 | if vdu.get("pdu-type"): |
| 437 | contains_pdu = True |
| 438 | else: |
| 439 | contains_vdu = True |
| 440 | if contains_pdu: |
| 441 | final_content["_admin"]["type"] = "hnfd" if contains_vdu else "pnfd" |
| 442 | elif contains_vdu: |
| 443 | final_content["_admin"]["type"] = "vnfd" |
| 444 | # if neither vud nor pdu do not fill type |
| 445 | |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 446 | def check_conflict_on_del(self, session, _id, force=False): |
| 447 | """ |
| 448 | Check that there is not any NSD that uses this VNFD. Only NSDs belonging to this project are considered. Note |
| 449 | that VNFD can be public and be used by NSD of other projects. Also check there are not deployments, or vnfr |
| 450 | that uses this vnfd |
| 451 | :param session: |
| 452 | :param _id: vnfd inernal id |
| 453 | :param force: Avoid this checking |
| 454 | :return: None or raises EngineException with the conflict |
| 455 | """ |
| 456 | if force: |
| 457 | return |
| 458 | descriptor = self.db.get_one("vnfds", {"_id": _id}) |
| 459 | descriptor_id = descriptor.get("id") |
| 460 | if not descriptor_id: # empty vnfd not uploaded |
| 461 | return |
| 462 | |
| 463 | _filter = self._get_project_filter(session, write=False, show_all=False) |
| 464 | # check vnfrs using this vnfd |
| 465 | _filter["vnfd-id"] = _id |
| 466 | if self.db.get_list("vnfrs", _filter): |
| 467 | raise EngineException("There is some VNFR that depends on this VNFD", http_code=HTTPStatus.CONFLICT) |
| 468 | del _filter["vnfd-id"] |
| 469 | # check NSD using this VNFD |
| 470 | _filter["constituent-vnfd.ANYINDEX.vnfd-id-ref"] = descriptor_id |
| 471 | if self.db.get_list("nsds", _filter): |
| 472 | raise EngineException("There is soame NSD that depends on this VNFD", http_code=HTTPStatus.CONFLICT) |
| 473 | |
| gcalvino | a6fe000 | 2019-01-09 13:27:11 +0100 | [diff] [blame] | 474 | def _validate_input_new(self, indata, storage_params, force=False): |
| gcalvino | 46e4cb8 | 2018-10-26 13:10:22 +0200 | [diff] [blame] | 475 | indata = self.pyangbind_validation("vnfds", indata, force) |
| gcalvino | 5e72d15 | 2018-10-23 11:46:57 +0200 | [diff] [blame] | 476 | # Cross references validation in the descriptor |
| gcalvino | e45aded | 2018-11-13 17:17:28 +0100 | [diff] [blame] | 477 | if indata.get("vdu"): |
| 478 | if not indata.get("mgmt-interface"): |
| 479 | raise EngineException("'mgmt-interface' is a mandatory field and it is not defined", |
| tierno | 40fbcad | 2018-10-26 10:58:15 +0200 | [diff] [blame] | 480 | http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
| gcalvino | e45aded | 2018-11-13 17:17:28 +0100 | [diff] [blame] | 481 | if indata["mgmt-interface"].get("cp"): |
| 482 | for cp in get_iterable(indata.get("connection-point")): |
| 483 | if cp["name"] == indata["mgmt-interface"]["cp"]: |
| 484 | break |
| 485 | else: |
| 486 | raise EngineException("mgmt-interface:cp='{}' must match an existing connection-point" |
| 487 | .format(indata["mgmt-interface"]["cp"]), |
| 488 | http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
| gcalvino | 5e72d15 | 2018-10-23 11:46:57 +0200 | [diff] [blame] | 489 | |
| 490 | for vdu in get_iterable(indata.get("vdu")): |
| 491 | for interface in get_iterable(vdu.get("interface")): |
| 492 | if interface.get("external-connection-point-ref"): |
| 493 | for cp in get_iterable(indata.get("connection-point")): |
| tierno | 40fbcad | 2018-10-26 10:58:15 +0200 | [diff] [blame] | 494 | if cp["name"] == interface["external-connection-point-ref"]: |
| gcalvino | 5e72d15 | 2018-10-23 11:46:57 +0200 | [diff] [blame] | 495 | break |
| 496 | else: |
| tierno | 40fbcad | 2018-10-26 10:58:15 +0200 | [diff] [blame] | 497 | raise EngineException("vdu[id='{}']:interface[name='{}']:external-connection-point-ref='{}' " |
| gcalvino | 5e72d15 | 2018-10-23 11:46:57 +0200 | [diff] [blame] | 498 | "must match an existing connection-point" |
| 499 | .format(vdu["id"], interface["name"], |
| 500 | interface["external-connection-point-ref"]), |
| 501 | http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
| tierno | 40fbcad | 2018-10-26 10:58:15 +0200 | [diff] [blame] | 502 | |
| 503 | elif interface.get("internal-connection-point-ref"): |
| gcalvino | 5e72d15 | 2018-10-23 11:46:57 +0200 | [diff] [blame] | 504 | for internal_cp in get_iterable(vdu.get("internal-connection-point")): |
| tierno | 40fbcad | 2018-10-26 10:58:15 +0200 | [diff] [blame] | 505 | if interface["internal-connection-point-ref"] == internal_cp.get("id"): |
| gcalvino | 5e72d15 | 2018-10-23 11:46:57 +0200 | [diff] [blame] | 506 | break |
| 507 | else: |
| tierno | 40fbcad | 2018-10-26 10:58:15 +0200 | [diff] [blame] | 508 | raise EngineException("vdu[id='{}']:interface[name='{}']:internal-connection-point-ref='{}' " |
| 509 | "must match an existing vdu:internal-connection-point" |
| 510 | .format(vdu["id"], interface["name"], |
| 511 | interface["internal-connection-point-ref"]), |
| gcalvino | 5e72d15 | 2018-10-23 11:46:57 +0200 | [diff] [blame] | 512 | http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
| gcalvino | a6fe000 | 2019-01-09 13:27:11 +0100 | [diff] [blame] | 513 | # Validate that if descriptor contains charms, artifacts _admin.storage."pkg-dir" is not none |
| 514 | if vdu.get("vdu-configuration"): |
| 515 | if vdu["vdu-configuration"].get("juju"): |
| 516 | if not self._validate_package_folders(storage_params, 'charms'): |
| 517 | raise EngineException("Charm defined in vnf[id={}]:vdu[id={}] but not present in " |
| 518 | "package".format(indata["id"], vdu["id"])) |
| 519 | # Validate that if descriptor contains cloud-init, artifacts _admin.storage."pkg-dir" is not none |
| 520 | if vdu.get("cloud-init-file"): |
| 521 | if not self._validate_package_folders(storage_params, 'cloud_init', vdu["cloud-init-file"]): |
| 522 | raise EngineException("Cloud-init defined in vnf[id={}]:vdu[id={}] but not present in " |
| 523 | "package".format(indata["id"], vdu["id"])) |
| 524 | # Validate that if descriptor contains charms, artifacts _admin.storage."pkg-dir" is not none |
| 525 | if indata.get("vnf-configuration"): |
| 526 | if indata["vnf-configuration"].get("juju"): |
| 527 | if not self._validate_package_folders(storage_params, 'charms'): |
| 528 | raise EngineException("Charm defined in vnf[id={}] but not present in " |
| 529 | "package".format(indata["id"])) |
| delacruzramo | 5727a37 | 2019-03-28 12:29:04 +0100 | [diff] [blame^] | 530 | vld_names = [] # For detection of duplicated VLD names |
| gcalvino | 5e72d15 | 2018-10-23 11:46:57 +0200 | [diff] [blame] | 531 | for ivld in get_iterable(indata.get("internal-vld")): |
| delacruzramo | 5727a37 | 2019-03-28 12:29:04 +0100 | [diff] [blame^] | 532 | # BEGIN Detection of duplicated VLD names |
| 533 | ivld_name = ivld["name"] |
| 534 | if ivld_name in vld_names: |
| 535 | raise EngineException("Duplicated VLD name '{}' in vnfd[id={}]:internal-vld[id={}]" |
| 536 | .format(ivld["name"], indata["id"], ivld["id"]), |
| 537 | http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
| 538 | else: |
| 539 | vld_names.append(ivld_name) |
| 540 | # END Detection of duplicated VLD names |
| gcalvino | 5e72d15 | 2018-10-23 11:46:57 +0200 | [diff] [blame] | 541 | for icp in get_iterable(ivld.get("internal-connection-point")): |
| 542 | icp_mark = False |
| 543 | for vdu in get_iterable(indata.get("vdu")): |
| 544 | for internal_cp in get_iterable(vdu.get("internal-connection-point")): |
| 545 | if icp["id-ref"] == internal_cp["id"]: |
| 546 | icp_mark = True |
| 547 | break |
| 548 | if icp_mark: |
| 549 | break |
| 550 | else: |
| tierno | 40fbcad | 2018-10-26 10:58:15 +0200 | [diff] [blame] | 551 | raise EngineException("internal-vld[id='{}']:internal-connection-point='{}' must match an existing " |
| 552 | "vdu:internal-connection-point".format(ivld["id"], icp["id-ref"]), |
| gcalvino | 5e72d15 | 2018-10-23 11:46:57 +0200 | [diff] [blame] | 553 | http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
| 554 | if ivld.get("ip-profile-ref"): |
| tierno | 40fbcad | 2018-10-26 10:58:15 +0200 | [diff] [blame] | 555 | for ip_prof in get_iterable(indata.get("ip-profiles")): |
| gcalvino | 5e72d15 | 2018-10-23 11:46:57 +0200 | [diff] [blame] | 556 | if ip_prof["name"] == get_iterable(ivld.get("ip-profile-ref")): |
| 557 | break |
| 558 | else: |
| tierno | 40fbcad | 2018-10-26 10:58:15 +0200 | [diff] [blame] | 559 | raise EngineException("internal-vld[id='{}']:ip-profile-ref='{}' does not exist".format( |
| gcalvino | 5e72d15 | 2018-10-23 11:46:57 +0200 | [diff] [blame] | 560 | ivld["id"], ivld["ip-profile-ref"]), |
| 561 | http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
| 562 | for mp in get_iterable(indata.get("monitoring-param")): |
| 563 | if mp.get("vdu-monitoring-param"): |
| 564 | mp_vmp_mark = False |
| 565 | for vdu in get_iterable(indata.get("vdu")): |
| 566 | for vmp in get_iterable(vdu.get("monitoring-param")): |
| tierno | 40fbcad | 2018-10-26 10:58:15 +0200 | [diff] [blame] | 567 | if vmp["id"] == mp["vdu-monitoring-param"].get("vdu-monitoring-param-ref") and vdu["id"] ==\ |
| gcalvino | 5e72d15 | 2018-10-23 11:46:57 +0200 | [diff] [blame] | 568 | mp["vdu-monitoring-param"]["vdu-ref"]: |
| 569 | mp_vmp_mark = True |
| 570 | break |
| 571 | if mp_vmp_mark: |
| 572 | break |
| 573 | else: |
| 574 | raise EngineException("monitoring-param:vdu-monitoring-param:vdu-monitoring-param-ref='{}' not " |
| tierno | 40fbcad | 2018-10-26 10:58:15 +0200 | [diff] [blame] | 575 | "defined at vdu[id='{}'] or vdu does not exist" |
| gcalvino | 5e72d15 | 2018-10-23 11:46:57 +0200 | [diff] [blame] | 576 | .format(mp["vdu-monitoring-param"]["vdu-monitoring-param-ref"], |
| 577 | mp["vdu-monitoring-param"]["vdu-ref"]), |
| 578 | http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
| 579 | elif mp.get("vdu-metric"): |
| 580 | mp_vm_mark = False |
| 581 | for vdu in get_iterable(indata.get("vdu")): |
| 582 | if vdu.get("vdu-configuration"): |
| 583 | for metric in get_iterable(vdu["vdu-configuration"].get("metrics")): |
| 584 | if metric["name"] == mp["vdu-metric"]["vdu-metric-name-ref"] and vdu["id"] == \ |
| 585 | mp["vdu-metric"]["vdu-ref"]: |
| 586 | mp_vm_mark = True |
| 587 | break |
| 588 | if mp_vm_mark: |
| 589 | break |
| 590 | else: |
| tierno | 40fbcad | 2018-10-26 10:58:15 +0200 | [diff] [blame] | 591 | raise EngineException("monitoring-param:vdu-metric:vdu-metric-name-ref='{}' not defined at " |
| 592 | "vdu[id='{}'] or vdu does not exist" |
| gcalvino | 5e72d15 | 2018-10-23 11:46:57 +0200 | [diff] [blame] | 593 | .format(mp["vdu-metric"]["vdu-metric-name-ref"], |
| 594 | mp["vdu-metric"]["vdu-ref"]), |
| 595 | http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
| 596 | |
| 597 | for sgd in get_iterable(indata.get("scaling-group-descriptor")): |
| 598 | for sp in get_iterable(sgd.get("scaling-policy")): |
| 599 | for sc in get_iterable(sp.get("scaling-criteria")): |
| 600 | for mp in get_iterable(indata.get("monitoring-param")): |
| 601 | if mp["id"] == get_iterable(sc.get("vnf-monitoring-param-ref")): |
| 602 | break |
| 603 | else: |
| tierno | 40fbcad | 2018-10-26 10:58:15 +0200 | [diff] [blame] | 604 | raise EngineException("scaling-group-descriptor[name='{}']:scaling-criteria[name='{}']:" |
| 605 | "vnf-monitoring-param-ref='{}' not defined in any monitoring-param" |
| 606 | .format(sgd["name"], sc["name"], sc["vnf-monitoring-param-ref"]), |
| gcalvino | 5e72d15 | 2018-10-23 11:46:57 +0200 | [diff] [blame] | 607 | http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
| 608 | for sgd_vdu in get_iterable(sgd.get("vdu")): |
| 609 | sgd_vdu_mark = False |
| 610 | for vdu in get_iterable(indata.get("vdu")): |
| 611 | if vdu["id"] == sgd_vdu["vdu-id-ref"]: |
| 612 | sgd_vdu_mark = True |
| 613 | break |
| 614 | if sgd_vdu_mark: |
| 615 | break |
| 616 | else: |
| tierno | 40fbcad | 2018-10-26 10:58:15 +0200 | [diff] [blame] | 617 | raise EngineException("scaling-group-descriptor[name='{}']:vdu-id-ref={} does not match any vdu" |
| 618 | .format(sgd["name"], sgd_vdu["vdu-id-ref"]), |
| gcalvino | 5e72d15 | 2018-10-23 11:46:57 +0200 | [diff] [blame] | 619 | http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
| 620 | for sca in get_iterable(sgd.get("scaling-config-action")): |
| tierno | 40fbcad | 2018-10-26 10:58:15 +0200 | [diff] [blame] | 621 | if not indata.get("vnf-configuration"): |
| 622 | raise EngineException("'vnf-configuration' not defined in the descriptor but it is referenced by " |
| 623 | "scaling-group-descriptor[name='{}']:scaling-config-action" |
| 624 | .format(sgd["name"]), |
| gcalvino | 5e72d15 | 2018-10-23 11:46:57 +0200 | [diff] [blame] | 625 | http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
| tierno | 40fbcad | 2018-10-26 10:58:15 +0200 | [diff] [blame] | 626 | for primitive in get_iterable(indata["vnf-configuration"].get("config-primitive")): |
| 627 | if primitive["name"] == sca["vnf-config-primitive-name-ref"]: |
| 628 | break |
| 629 | else: |
| 630 | raise EngineException("scaling-group-descriptor[name='{}']:scaling-config-action:vnf-config-" |
| 631 | "primitive-name-ref='{}' does not match any " |
| 632 | "vnf-configuration:config-primitive:name" |
| 633 | .format(sgd["name"], sca["vnf-config-primitive-name-ref"]), |
| 634 | http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 635 | return indata |
| 636 | |
| 637 | def _validate_input_edit(self, indata, force=False): |
| tierno | aa1ca7b | 2018-11-08 19:00:20 +0100 | [diff] [blame] | 638 | # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 639 | return indata |
| 640 | |
| gcalvino | a6fe000 | 2019-01-09 13:27:11 +0100 | [diff] [blame] | 641 | def _validate_package_folders(self, storage_params, folder, file=None): |
| 642 | if not storage_params or not storage_params.get("pkg-dir"): |
| 643 | return False |
| 644 | else: |
| 645 | if self.fs.file_exists("{}_".format(storage_params["folder"]), 'dir'): |
| 646 | f = "{}_/{}/{}".format(storage_params["folder"], storage_params["pkg-dir"], folder) |
| 647 | else: |
| 648 | f = "{}/{}/{}".format(storage_params["folder"], storage_params["pkg-dir"], folder) |
| 649 | if file: |
| 650 | return self.fs.file_exists("{}/{}".format(f, file), 'file') |
| 651 | else: |
| 652 | if self.fs.file_exists(f, 'dir'): |
| 653 | if self.fs.dir_ls(f): |
| 654 | return True |
| 655 | return False |
| 656 | |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 657 | |
| 658 | class NsdTopic(DescriptorTopic): |
| 659 | topic = "nsds" |
| 660 | topic_msg = "nsd" |
| 661 | |
| 662 | def __init__(self, db, fs, msg): |
| 663 | DescriptorTopic.__init__(self, db, fs, msg) |
| 664 | |
| 665 | @staticmethod |
| 666 | def _remove_envelop(indata=None): |
| 667 | if not indata: |
| 668 | return {} |
| 669 | clean_indata = indata |
| 670 | |
| 671 | if clean_indata.get('nsd:nsd-catalog'): |
| 672 | clean_indata = clean_indata['nsd:nsd-catalog'] |
| 673 | elif clean_indata.get('nsd-catalog'): |
| 674 | clean_indata = clean_indata['nsd-catalog'] |
| 675 | if clean_indata.get('nsd'): |
| 676 | if not isinstance(clean_indata['nsd'], list) or len(clean_indata['nsd']) != 1: |
| gcalvino | 46e4cb8 | 2018-10-26 13:10:22 +0200 | [diff] [blame] | 677 | raise EngineException("'nsd' must be a list of only one element") |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 678 | clean_indata = clean_indata['nsd'][0] |
| gcalvino | 46e4cb8 | 2018-10-26 13:10:22 +0200 | [diff] [blame] | 679 | elif clean_indata.get('nsd:nsd'): |
| 680 | if not isinstance(clean_indata['nsd:nsd'], list) or len(clean_indata['nsd:nsd']) != 1: |
| 681 | raise EngineException("'nsd:nsd' must be a list of only one element") |
| 682 | clean_indata = clean_indata['nsd:nsd'][0] |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 683 | return clean_indata |
| 684 | |
| gcalvino | a6fe000 | 2019-01-09 13:27:11 +0100 | [diff] [blame] | 685 | def _validate_input_new(self, indata, storage_params, force=False): |
| gcalvino | 46e4cb8 | 2018-10-26 13:10:22 +0200 | [diff] [blame] | 686 | indata = self.pyangbind_validation("nsds", indata, force) |
| tierno | 5a5c218 | 2018-11-20 12:27:42 +0000 | [diff] [blame] | 687 | # Cross references validation in the descriptor |
| tierno | aa1ca7b | 2018-11-08 19:00:20 +0100 | [diff] [blame] | 688 | # TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none |
| tierno | 5a5c218 | 2018-11-20 12:27:42 +0000 | [diff] [blame] | 689 | for vld in get_iterable(indata.get("vld")): |
| 690 | for vnfd_cp in get_iterable(vld.get("vnfd-connection-point-ref")): |
| 691 | for constituent_vnfd in get_iterable(indata.get("constituent-vnfd")): |
| 692 | if vnfd_cp["member-vnf-index-ref"] == constituent_vnfd["member-vnf-index"]: |
| 693 | if vnfd_cp.get("vnfd-id-ref") and vnfd_cp["vnfd-id-ref"] != constituent_vnfd["vnfd-id-ref"]: |
| 694 | raise EngineException("Error at vld[id='{}']:vnfd-connection-point-ref[vnfd-id-ref='{}'] " |
| 695 | "does not match constituent-vnfd[member-vnf-index='{}']:vnfd-id-ref" |
| 696 | " '{}'".format(vld["id"], vnfd_cp["vnfd-id-ref"], |
| 697 | constituent_vnfd["member-vnf-index"], |
| 698 | constituent_vnfd["vnfd-id-ref"]), |
| 699 | http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
| 700 | break |
| 701 | else: |
| 702 | raise EngineException("Error at vld[id='{}']:vnfd-connection-point-ref[member-vnf-index-ref='{}'] " |
| 703 | "does not match any constituent-vnfd:member-vnf-index" |
| 704 | .format(vld["id"], vnfd_cp["member-vnf-index-ref"]), |
| 705 | http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 706 | return indata |
| 707 | |
| 708 | def _validate_input_edit(self, indata, force=False): |
| tierno | aa1ca7b | 2018-11-08 19:00:20 +0100 | [diff] [blame] | 709 | # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 710 | return indata |
| 711 | |
| tierno | 5a5c218 | 2018-11-20 12:27:42 +0000 | [diff] [blame] | 712 | def _check_descriptor_dependencies(self, session, descriptor, force=False): |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 713 | """ |
| tierno | 5a5c218 | 2018-11-20 12:27:42 +0000 | [diff] [blame] | 714 | Check that the dependent descriptors exist on a new descriptor or edition. Also checks references to vnfd |
| 715 | connection points are ok |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 716 | :param session: client session information |
| 717 | :param descriptor: descriptor to be inserted or edit |
| tierno | 5a5c218 | 2018-11-20 12:27:42 +0000 | [diff] [blame] | 718 | :param force: if true skip dependencies checking |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 719 | :return: None or raises exception |
| 720 | """ |
| tierno | 5a5c218 | 2018-11-20 12:27:42 +0000 | [diff] [blame] | 721 | if force: |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 722 | return |
| tierno | 5a5c218 | 2018-11-20 12:27:42 +0000 | [diff] [blame] | 723 | member_vnfd_index = {} |
| 724 | if descriptor.get("constituent-vnfd") and not force: |
| 725 | for vnf in descriptor["constituent-vnfd"]: |
| 726 | vnfd_id = vnf["vnfd-id-ref"] |
| 727 | filter_q = self._get_project_filter(session, write=False, show_all=True) |
| 728 | filter_q["id"] = vnfd_id |
| 729 | vnf_list = self.db.get_list("vnfds", filter_q) |
| 730 | if not vnf_list: |
| 731 | raise EngineException("Descriptor error at 'constituent-vnfd':'vnfd-id-ref'='{}' references a non " |
| 732 | "existing vnfd".format(vnfd_id), http_code=HTTPStatus.CONFLICT) |
| 733 | # elif len(vnf_list) > 1: |
| 734 | # raise EngineException("More than one vnfd found for id='{}'".format(vnfd_id), |
| 735 | # http_code=HTTPStatus.CONFLICT) |
| 736 | member_vnfd_index[vnf["member-vnf-index"]] = vnf_list[0] |
| 737 | |
| 738 | # Cross references validation in the descriptor and vnfd connection point validation |
| 739 | for vld in get_iterable(descriptor.get("vld")): |
| 740 | for referenced_vnfd_cp in get_iterable(vld.get("vnfd-connection-point-ref")): |
| 741 | # look if this vnfd contains this connection point |
| 742 | vnfd = member_vnfd_index.get(referenced_vnfd_cp["member-vnf-index-ref"]) |
| 743 | if not vnfd: |
| 744 | raise EngineException("Error at vld[id='{}']:vnfd-connection-point-ref[member-vnf-index-ref='{}'] " |
| 745 | "does not match any constituent-vnfd:member-vnf-index" |
| 746 | .format(vld["id"], referenced_vnfd_cp["member-vnf-index-ref"]), |
| 747 | http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
| 748 | for vnfd_cp in get_iterable(vnfd.get("connection-point")): |
| 749 | if referenced_vnfd_cp.get("vnfd-connection-point-ref") == vnfd_cp["name"]: |
| 750 | break |
| 751 | else: |
| 752 | raise EngineException( |
| 753 | "Error at vld[id='{}']:vnfd-connection-point-ref[member-vnf-index-ref='{}']:vnfd-" |
| 754 | "connection-point-ref='{}' references a non existing conection-point:name inside vnfd '{}'" |
| 755 | .format(vld["id"], referenced_vnfd_cp["member-vnf-index-ref"], |
| 756 | referenced_vnfd_cp["vnfd-connection-point-ref"], vnfd["id"]), |
| 757 | http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 758 | |
| 759 | def check_conflict_on_edit(self, session, final_content, edit_content, _id, force=False): |
| 760 | super().check_conflict_on_edit(session, final_content, edit_content, _id, force=force) |
| 761 | |
| tierno | 5a5c218 | 2018-11-20 12:27:42 +0000 | [diff] [blame] | 762 | self._check_descriptor_dependencies(session, final_content, force) |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 763 | |
| 764 | def check_conflict_on_del(self, session, _id, force=False): |
| 765 | """ |
| 766 | Check that there is not any NSR that uses this NSD. Only NSRs belonging to this project are considered. Note |
| 767 | that NSD can be public and be used by other projects. |
| 768 | :param session: |
| 769 | :param _id: vnfd inernal id |
| 770 | :param force: Avoid this checking |
| 771 | :return: None or raises EngineException with the conflict |
| 772 | """ |
| 773 | if force: |
| 774 | return |
| 775 | _filter = self._get_project_filter(session, write=False, show_all=False) |
| 776 | _filter["nsdId"] = _id |
| 777 | if self.db.get_list("nsrs", _filter): |
| 778 | raise EngineException("There is some NSR that depends on this NSD", http_code=HTTPStatus.CONFLICT) |
| 779 | |
| 780 | |
| Felipe Vicens | b57758d | 2018-10-16 16:00:20 +0200 | [diff] [blame] | 781 | class NstTopic(DescriptorTopic): |
| 782 | topic = "nsts" |
| 783 | topic_msg = "nst" |
| 784 | |
| 785 | def __init__(self, db, fs, msg): |
| 786 | DescriptorTopic.__init__(self, db, fs, msg) |
| 787 | |
| 788 | @staticmethod |
| 789 | def _remove_envelop(indata=None): |
| 790 | if not indata: |
| 791 | return {} |
| 792 | clean_indata = indata |
| 793 | |
| Felipe Vicens | b57758d | 2018-10-16 16:00:20 +0200 | [diff] [blame] | 794 | if clean_indata.get('nst'): |
| 795 | if not isinstance(clean_indata['nst'], list) or len(clean_indata['nst']) != 1: |
| 796 | raise EngineException("'nst' must be a list only one element") |
| 797 | clean_indata = clean_indata['nst'][0] |
| gcalvino | 70434c1 | 2018-11-27 15:17:04 +0100 | [diff] [blame] | 798 | elif clean_indata.get('nst:nst'): |
| 799 | if not isinstance(clean_indata['nst:nst'], list) or len(clean_indata['nst:nst']) != 1: |
| 800 | raise EngineException("'nst:nst' must be a list only one element") |
| 801 | clean_indata = clean_indata['nst:nst'][0] |
| Felipe Vicens | b57758d | 2018-10-16 16:00:20 +0200 | [diff] [blame] | 802 | return clean_indata |
| 803 | |
| Felipe Vicens | b57758d | 2018-10-16 16:00:20 +0200 | [diff] [blame] | 804 | def _validate_input_edit(self, indata, force=False): |
| 805 | # TODO validate with pyangbind, serialize |
| 806 | return indata |
| 807 | |
| gcalvino | a6fe000 | 2019-01-09 13:27:11 +0100 | [diff] [blame] | 808 | def _validate_input_new(self, indata, storage_params, force=False): |
| gcalvino | 70434c1 | 2018-11-27 15:17:04 +0100 | [diff] [blame] | 809 | indata = self.pyangbind_validation("nsts", indata, force) |
| Felipe Vicens | e36ab85 | 2018-11-23 14:12:09 +0100 | [diff] [blame] | 810 | return indata.copy() |
| 811 | |
| Felipe Vicens | b57758d | 2018-10-16 16:00:20 +0200 | [diff] [blame] | 812 | def _check_descriptor_dependencies(self, session, descriptor): |
| 813 | """ |
| 814 | Check that the dependent descriptors exist on a new descriptor or edition |
| 815 | :param session: client session information |
| 816 | :param descriptor: descriptor to be inserted or edit |
| 817 | :return: None or raises exception |
| 818 | """ |
| 819 | if not descriptor.get("netslice-subnet"): |
| 820 | return |
| 821 | for nsd in descriptor["netslice-subnet"]: |
| 822 | nsd_id = nsd["nsd-ref"] |
| 823 | filter_q = self._get_project_filter(session, write=False, show_all=True) |
| 824 | filter_q["id"] = nsd_id |
| 825 | if not self.db.get_list("nsds", filter_q): |
| 826 | raise EngineException("Descriptor error at 'netslice-subnet':'nsd-ref'='{}' references a non " |
| 827 | "existing nsd".format(nsd_id), http_code=HTTPStatus.CONFLICT) |
| 828 | |
| 829 | def check_conflict_on_edit(self, session, final_content, edit_content, _id, force=False): |
| 830 | super().check_conflict_on_edit(session, final_content, edit_content, _id, force=force) |
| 831 | |
| 832 | self._check_descriptor_dependencies(session, final_content) |
| 833 | |
| 834 | def check_conflict_on_del(self, session, _id, force=False): |
| 835 | """ |
| 836 | Check that there is not any NSIR that uses this NST. Only NSIRs belonging to this project are considered. Note |
| 837 | that NST can be public and be used by other projects. |
| 838 | :param session: |
| Felipe Vicens | 07f3172 | 2018-10-29 15:16:44 +0100 | [diff] [blame] | 839 | :param _id: nst internal id |
| Felipe Vicens | b57758d | 2018-10-16 16:00:20 +0200 | [diff] [blame] | 840 | :param force: Avoid this checking |
| 841 | :return: None or raises EngineException with the conflict |
| 842 | """ |
| 843 | # TODO: Check this method |
| 844 | if force: |
| 845 | return |
| Felipe Vicens | 07f3172 | 2018-10-29 15:16:44 +0100 | [diff] [blame] | 846 | # Get Network Slice Template from Database |
| Felipe Vicens | b57758d | 2018-10-16 16:00:20 +0200 | [diff] [blame] | 847 | _filter = self._get_project_filter(session, write=False, show_all=False) |
| Felipe Vicens | 07f3172 | 2018-10-29 15:16:44 +0100 | [diff] [blame] | 848 | _filter["_id"] = _id |
| Felipe Vicens | e36ab85 | 2018-11-23 14:12:09 +0100 | [diff] [blame] | 849 | nst = self.db.get_one("nsts", _filter) |
| Felipe Vicens | 07f3172 | 2018-10-29 15:16:44 +0100 | [diff] [blame] | 850 | |
| 851 | # Search NSIs using NST via nst-ref |
| 852 | _filter = self._get_project_filter(session, write=False, show_all=False) |
| 853 | _filter["nst-ref"] = nst["id"] |
| Felipe Vicens | e36ab85 | 2018-11-23 14:12:09 +0100 | [diff] [blame] | 854 | nsis_list = self.db.get_list("nsis", _filter) |
| 855 | for nsi_item in nsis_list: |
| 856 | if nsi_item["_admin"].get("nsiState") != "TERMINATED": |
| 857 | raise EngineException("There is some NSIS that depends on this NST", http_code=HTTPStatus.CONFLICT) |
| Felipe Vicens | b57758d | 2018-10-16 16:00:20 +0200 | [diff] [blame] | 858 | |
| 859 | |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 860 | class PduTopic(BaseTopic): |
| 861 | topic = "pdus" |
| 862 | topic_msg = "pdu" |
| 863 | schema_new = pdu_new_schema |
| 864 | schema_edit = pdu_edit_schema |
| 865 | |
| 866 | def __init__(self, db, fs, msg): |
| 867 | BaseTopic.__init__(self, db, fs, msg) |
| 868 | |
| 869 | @staticmethod |
| 870 | def format_on_new(content, project_id=None, make_public=False): |
| tierno | 36ec860 | 2018-11-02 17:27:11 +0100 | [diff] [blame] | 871 | BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public) |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 872 | content["_admin"]["onboardingState"] = "CREATED" |
| tierno | 36ec860 | 2018-11-02 17:27:11 +0100 | [diff] [blame] | 873 | content["_admin"]["operationalState"] = "ENABLED" |
| 874 | content["_admin"]["usageState"] = "NOT_IN_USE" |
| tierno | b24258a | 2018-10-04 18:39:49 +0200 | [diff] [blame] | 875 | |
| 876 | def check_conflict_on_del(self, session, _id, force=False): |
| 877 | if force: |
| 878 | return |
| 879 | # TODO Is it needed to check descriptors _admin.project_read/project_write?? |
| 880 | _filter = {"vdur.pdu-id": _id} |
| 881 | if self.db.get_list("vnfrs", _filter): |
| 882 | raise EngineException("There is some NSR that uses this PDU", http_code=HTTPStatus.CONFLICT) |