Coverage for osm_nbi/descriptor_topics.py: 67%

1001 statements  

« prev     ^ index     » next       coverage.py v7.3.1, created at 2024-06-30 10:14 +0000

1# -*- coding: utf-8 -*- 

2 

3# Licensed under the Apache License, Version 2.0 (the "License"); 

4# you may not use this file except in compliance with the License. 

5# You may obtain a copy of the License at 

6# 

7# http://www.apache.org/licenses/LICENSE-2.0 

8# 

9# Unless required by applicable law or agreed to in writing, software 

10# distributed under the License is distributed on an "AS IS" BASIS, 

11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 

12# implied. 

13# See the License for the specific language governing permissions and 

14# limitations under the License. 

15 

16import tarfile 

17import yaml 

18import json 

19import copy 

20import os 

21import shutil 

22import functools 

23import re 

24 

25# import logging 

26from deepdiff import DeepDiff 

27from hashlib import md5 

28from osm_common.dbbase import DbException, deep_update_rfc7396 

29from http import HTTPStatus 

30from time import time 

31from uuid import uuid4 

32from re import fullmatch 

33from zipfile import ZipFile 

34from urllib.parse import urlparse 

35from osm_nbi.validation import ( 

36 ValidationError, 

37 pdu_new_schema, 

38 pdu_edit_schema, 

39 validate_input, 

40 vnfpkgop_new_schema, 

41) 

42from osm_nbi.base_topic import ( 

43 BaseTopic, 

44 EngineException, 

45 get_iterable, 

46 detect_descriptor_usage, 

47) 

48from osm_im import etsi_nfv_vnfd, etsi_nfv_nsd 

49from osm_im.nst import nst as nst_im 

50from pyangbind.lib.serialise import pybindJSONDecoder 

51import pyangbind.lib.pybindJSON as pybindJSON 

52from osm_nbi import utils 

53 

54__author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>" 

55 

56valid_helm_chart_re = re.compile( 

57 r"^[a-z0-9]([-a-z0-9]*[a-z0-9]/)?([a-z0-9]([-a-z0-9]*[a-z0-9])?)*$" 

58) 

59 

60 

61class DescriptorTopic(BaseTopic): 

62 def __init__(self, db, fs, msg, auth): 

63 super().__init__(db, fs, msg, auth) 

64 

65 def _validate_input_new(self, indata, storage_params, force=False): 

66 return indata 

67 

68 def check_conflict_on_edit(self, session, final_content, edit_content, _id): 

69 final_content = super().check_conflict_on_edit( 

70 session, final_content, edit_content, _id 

71 ) 

72 

73 def _check_unique_id_name(descriptor, position=""): 

74 for desc_key, desc_item in descriptor.items(): 

75 if isinstance(desc_item, list) and desc_item: 

76 used_ids = [] 

77 desc_item_id = None 

78 for index, list_item in enumerate(desc_item): 

79 if isinstance(list_item, dict): 

80 _check_unique_id_name( 

81 list_item, "{}.{}[{}]".format(position, desc_key, index) 

82 ) 

83 # Base case 

84 if index == 0 and ( 

85 list_item.get("id") or list_item.get("name") 

86 ): 

87 desc_item_id = "id" if list_item.get("id") else "name" 

88 if desc_item_id and list_item.get(desc_item_id): 

89 if list_item[desc_item_id] in used_ids: 

90 position = "{}.{}[{}]".format( 

91 position, desc_key, index 

92 ) 

93 raise EngineException( 

94 "Error: identifier {} '{}' is not unique and repeats at '{}'".format( 

95 desc_item_id, 

96 list_item[desc_item_id], 

97 position, 

98 ), 

99 HTTPStatus.UNPROCESSABLE_ENTITY, 

100 ) 

101 used_ids.append(list_item[desc_item_id]) 

102 

103 _check_unique_id_name(final_content) 

104 # 1. validate again with pyangbind 

105 # 1.1. remove internal keys 

106 internal_keys = {} 

107 for k in ("_id", "_admin"): 

108 if k in final_content: 

109 internal_keys[k] = final_content.pop(k) 

110 storage_params = internal_keys["_admin"].get("storage") 

111 serialized = self._validate_input_new( 

112 final_content, storage_params, session["force"] 

113 ) 

114 

115 # 1.2. modify final_content with a serialized version 

116 final_content = copy.deepcopy(serialized) 

117 # 1.3. restore internal keys 

118 for k, v in internal_keys.items(): 

119 final_content[k] = v 

120 if session["force"]: 

121 return final_content 

122 

123 # 2. check that this id is not present 

124 if "id" in edit_content: 

125 _filter = self._get_project_filter(session) 

126 

127 _filter["id"] = final_content["id"] 

128 _filter["_id.neq"] = _id 

129 

130 if self.db.get_one(self.topic, _filter, fail_on_empty=False): 

131 raise EngineException( 

132 "{} with id '{}' already exists for this project".format( 

133 (str(self.topic))[:-1], final_content["id"] 

134 ), 

135 HTTPStatus.CONFLICT, 

136 ) 

137 

138 return final_content 

139 

140 @staticmethod 

141 def format_on_new(content, project_id=None, make_public=False): 

142 BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public) 

143 content["_admin"]["onboardingState"] = "CREATED" 

144 content["_admin"]["operationalState"] = "DISABLED" 

145 content["_admin"]["usageState"] = "NOT_IN_USE" 

146 

147 def delete_extra(self, session, _id, db_content, not_send_msg=None): 

148 """ 

149 Deletes file system storage associated with the descriptor 

150 :param session: contains "username", "admin", "force", "public", "project_id", "set_project" 

151 :param _id: server internal id 

152 :param db_content: The database content of the descriptor 

153 :param not_send_msg: To not send message (False) or store content (list) instead 

154 :return: None if ok or raises EngineException with the problem 

155 """ 

156 self.fs.file_delete(_id, ignore_non_exist=True) 

157 self.fs.file_delete(_id + "_", ignore_non_exist=True) # remove temp folder 

158 # Remove file revisions 

159 if "revision" in db_content["_admin"]: 

160 revision = db_content["_admin"]["revision"] 

161 while revision > 0: 

162 self.fs.file_delete(_id + ":" + str(revision), ignore_non_exist=True) 

163 revision = revision - 1 

164 

165 @staticmethod 

166 def get_one_by_id(db, session, topic, id): 

167 # find owned by this project 

168 _filter = BaseTopic._get_project_filter(session) 

169 _filter["id"] = id 

170 desc_list = db.get_list(topic, _filter) 

171 if len(desc_list) == 1: 

172 return desc_list[0] 

173 elif len(desc_list) > 1: 

174 raise DbException( 

175 "Found more than one {} with id='{}' belonging to this project".format( 

176 topic[:-1], id 

177 ), 

178 HTTPStatus.CONFLICT, 

179 ) 

180 

181 # not found any: try to find public 

182 _filter = BaseTopic._get_project_filter(session) 

183 _filter["id"] = id 

184 desc_list = db.get_list(topic, _filter) 

185 if not desc_list: 

186 raise DbException( 

187 "Not found any {} with id='{}'".format(topic[:-1], id), 

188 HTTPStatus.NOT_FOUND, 

189 ) 

190 elif len(desc_list) == 1: 

191 return desc_list[0] 

192 else: 

193 raise DbException( 

194 "Found more than one public {} with id='{}'; and no one belonging to this project".format( 

195 topic[:-1], id 

196 ), 

197 HTTPStatus.CONFLICT, 

198 ) 

199 

200 def new(self, rollback, session, indata=None, kwargs=None, headers=None): 

201 """ 

202 Creates a new almost empty DISABLED entry into database. Due to SOL005, it does not follow normal procedure. 

203 Creating a VNFD or NSD is done in two steps: 1. Creates an empty descriptor (this step) and 2) upload content 

204 (self.upload_content) 

205 :param rollback: list to append created items at database in case a rollback may to be done 

206 :param session: contains "username", "admin", "force", "public", "project_id", "set_project" 

207 :param indata: data to be inserted 

208 :param kwargs: used to override the indata descriptor 

209 :param headers: http request headers 

210 :return: _id, None: identity of the inserted data; and None as there is not any operation 

211 """ 

212 

213 # No needed to capture exceptions 

214 # Check Quota 

215 self.check_quota(session) 

216 

217 # _remove_envelop 

218 if indata: 

219 if "userDefinedData" in indata: 

220 indata = indata["userDefinedData"] 

221 

222 # Override descriptor with query string kwargs 

223 self._update_input_with_kwargs(indata, kwargs) 

224 # uncomment when this method is implemented. 

225 # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors 

226 # indata = DescriptorTopic._validate_input_new(self, indata, project_id=session["force"]) 

227 

228 content = {"_admin": {"userDefinedData": indata, "revision": 0}} 

229 

230 self.format_on_new( 

231 content, session["project_id"], make_public=session["public"] 

232 ) 

233 _id = self.db.create(self.topic, content) 

234 rollback.append({"topic": self.topic, "_id": _id}) 

235 self._send_msg("created", {"_id": _id}) 

236 return _id, None 

237 

238 def upload_content(self, session, _id, indata, kwargs, headers): 

239 """ 

240 Used for receiving content by chunks (with a transaction_id header and/or gzip file. It will store and extract) 

241 :param session: contains "username", "admin", "force", "public", "project_id", "set_project" 

242 :param _id : the nsd,vnfd is already created, this is the id 

243 :param indata: http body request 

244 :param kwargs: user query string to override parameters. NOT USED 

245 :param headers: http request headers 

246 :return: True if package is completely uploaded or False if partial content has been uploded 

247 Raise exception on error 

248 """ 

249 # Check that _id exists and it is valid 

250 current_desc = self.show(session, _id) 

251 

252 content_range_text = headers.get("Content-Range") 

253 expected_md5 = headers.get("Content-File-MD5") 

254 compressed = None 

255 content_type = headers.get("Content-Type") 

256 if ( 

257 content_type 

258 and "application/gzip" in content_type 

259 or "application/x-gzip" in content_type 

260 ): 

261 compressed = "gzip" 

262 if content_type and "application/zip" in content_type: 

263 compressed = "zip" 

264 filename = headers.get("Content-Filename") 

265 if not filename and compressed: 

266 filename = "package.tar.gz" if compressed == "gzip" else "package.zip" 

267 elif not filename: 

268 filename = "package" 

269 

270 revision = 1 

271 if "revision" in current_desc["_admin"]: 

272 revision = current_desc["_admin"]["revision"] + 1 

273 

274 # TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266 

275 file_pkg = None 

276 error_text = "" 

277 fs_rollback = [] 

278 

279 try: 

280 if content_range_text: 

281 content_range = ( 

282 content_range_text.replace("-", " ").replace("/", " ").split() 

283 ) 

284 if ( 

285 content_range[0] != "bytes" 

286 ): # TODO check x<y not negative < total.... 

287 raise IndexError() 

288 start = int(content_range[1]) 

289 end = int(content_range[2]) + 1 

290 total = int(content_range[3]) 

291 else: 

292 start = 0 

293 # Rather than using a temp folder, we will store the package in a folder based on 

294 # the current revision. 

295 proposed_revision_path = ( 

296 _id + ":" + str(revision) 

297 ) # all the content is upload here and if ok, it is rename from id_ to is folder 

298 

299 if start: 

300 if not self.fs.file_exists(proposed_revision_path, "dir"): 

301 raise EngineException( 

302 "invalid Transaction-Id header", HTTPStatus.NOT_FOUND 

303 ) 

304 else: 

305 self.fs.file_delete(proposed_revision_path, ignore_non_exist=True) 

306 self.fs.mkdir(proposed_revision_path) 

307 fs_rollback.append(proposed_revision_path) 

308 

309 storage = self.fs.get_params() 

310 storage["folder"] = proposed_revision_path 

311 

312 file_path = (proposed_revision_path, filename) 

313 if self.fs.file_exists(file_path, "file"): 

314 file_size = self.fs.file_size(file_path) 

315 else: 

316 file_size = 0 

317 if file_size != start: 

318 raise EngineException( 

319 "invalid Content-Range start sequence, expected '{}' but received '{}'".format( 

320 file_size, start 

321 ), 

322 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE, 

323 ) 

324 file_pkg = self.fs.file_open(file_path, "a+b") 

325 if isinstance(indata, dict): 

326 indata_text = yaml.safe_dump(indata, indent=4, default_flow_style=False) 

327 file_pkg.write(indata_text.encode(encoding="utf-8")) 

328 else: 

329 indata_len = 0 

330 while True: 

331 indata_text = indata.read(4096) 

332 indata_len += len(indata_text) 

333 if not indata_text: 

334 break 

335 file_pkg.write(indata_text) 

336 if content_range_text: 

337 if indata_len != end - start: 

338 raise EngineException( 

339 "Mismatch between Content-Range header {}-{} and body length of {}".format( 

340 start, end - 1, indata_len 

341 ), 

342 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE, 

343 ) 

344 if end != total: 

345 # TODO update to UPLOADING 

346 return False 

347 

348 # PACKAGE UPLOADED 

349 if expected_md5: 

350 file_pkg.seek(0, 0) 

351 file_md5 = md5() 

352 chunk_data = file_pkg.read(1024) 

353 while chunk_data: 

354 file_md5.update(chunk_data) 

355 chunk_data = file_pkg.read(1024) 

356 if expected_md5 != file_md5.hexdigest(): 

357 raise EngineException("Error, MD5 mismatch", HTTPStatus.CONFLICT) 

358 file_pkg.seek(0, 0) 

359 if compressed == "gzip": 

360 tar = tarfile.open(mode="r", fileobj=file_pkg) 

361 descriptor_file_name = None 

362 for tarinfo in tar: 

363 tarname = tarinfo.name 

364 tarname_path = tarname.split("/") 

365 if ( 

366 not tarname_path[0] or ".." in tarname_path 

367 ): # if start with "/" means absolute path 

368 raise EngineException( 

369 "Absolute path or '..' are not allowed for package descriptor tar.gz" 

370 ) 

371 if len(tarname_path) == 1 and not tarinfo.isdir(): 

372 raise EngineException( 

373 "All files must be inside a dir for package descriptor tar.gz" 

374 ) 

375 if ( 

376 tarname.endswith(".yaml") 

377 or tarname.endswith(".json") 

378 or tarname.endswith(".yml") 

379 ): 

380 storage["pkg-dir"] = tarname_path[0] 

381 if len(tarname_path) == 2: 

382 if descriptor_file_name: 

383 raise EngineException( 

384 "Found more than one descriptor file at package descriptor tar.gz" 

385 ) 

386 descriptor_file_name = tarname 

387 if not descriptor_file_name: 

388 raise EngineException( 

389 "Not found any descriptor file at package descriptor tar.gz" 

390 ) 

391 storage["descriptor"] = descriptor_file_name 

392 storage["zipfile"] = filename 

393 self.fs.file_extract(tar, proposed_revision_path) 

394 with self.fs.file_open( 

395 (proposed_revision_path, descriptor_file_name), "r" 

396 ) as descriptor_file: 

397 content = descriptor_file.read() 

398 elif compressed == "zip": 

399 zipfile = ZipFile(file_pkg) 

400 descriptor_file_name = None 

401 for package_file in zipfile.infolist(): 

402 zipfilename = package_file.filename 

403 file_path = zipfilename.split("/") 

404 if ( 

405 not file_path[0] or ".." in zipfilename 

406 ): # if start with "/" means absolute path 

407 raise EngineException( 

408 "Absolute path or '..' are not allowed for package descriptor zip" 

409 ) 

410 

411 if ( 

412 zipfilename.endswith(".yaml") 

413 or zipfilename.endswith(".json") 

414 or zipfilename.endswith(".yml") 

415 ) and ( 

416 zipfilename.find("/") < 0 

417 or zipfilename.find("Definitions") >= 0 

418 ): 

419 storage["pkg-dir"] = "" 

420 if descriptor_file_name: 

421 raise EngineException( 

422 "Found more than one descriptor file at package descriptor zip" 

423 ) 

424 descriptor_file_name = zipfilename 

425 if not descriptor_file_name: 

426 raise EngineException( 

427 "Not found any descriptor file at package descriptor zip" 

428 ) 

429 storage["descriptor"] = descriptor_file_name 

430 storage["zipfile"] = filename 

431 self.fs.file_extract(zipfile, proposed_revision_path) 

432 

433 with self.fs.file_open( 

434 (proposed_revision_path, descriptor_file_name), "r" 

435 ) as descriptor_file: 

436 content = descriptor_file.read() 

437 else: 

438 content = file_pkg.read() 

439 storage["descriptor"] = descriptor_file_name = filename 

440 

441 if descriptor_file_name.endswith(".json"): 

442 error_text = "Invalid json format " 

443 indata = json.load(content) 

444 else: 

445 error_text = "Invalid yaml format " 

446 indata = yaml.safe_load(content) 

447 

448 # Need to close the file package here so it can be copied from the 

449 # revision to the current, unrevisioned record 

450 if file_pkg: 

451 file_pkg.close() 

452 file_pkg = None 

453 

454 # Fetch both the incoming, proposed revision and the original revision so we 

455 # can call a validate method to compare them 

456 current_revision_path = _id + "/" 

457 self.fs.sync(from_path=current_revision_path) 

458 self.fs.sync(from_path=proposed_revision_path) 

459 

460 if revision > 1: 

461 try: 

462 self._validate_descriptor_changes( 

463 _id, 

464 descriptor_file_name, 

465 current_revision_path, 

466 proposed_revision_path, 

467 ) 

468 except Exception as e: 

469 shutil.rmtree( 

470 self.fs.path + current_revision_path, ignore_errors=True 

471 ) 

472 shutil.rmtree( 

473 self.fs.path + proposed_revision_path, ignore_errors=True 

474 ) 

475 # Only delete the new revision. We need to keep the original version in place 

476 # as it has not been changed. 

477 self.fs.file_delete(proposed_revision_path, ignore_non_exist=True) 

478 raise e 

479 

480 indata = self._remove_envelop(indata) 

481 

482 # Override descriptor with query string kwargs 

483 if kwargs: 

484 self._update_input_with_kwargs(indata, kwargs) 

485 

486 current_desc["_admin"]["storage"] = storage 

487 current_desc["_admin"]["onboardingState"] = "ONBOARDED" 

488 current_desc["_admin"]["operationalState"] = "ENABLED" 

489 current_desc["_admin"]["modified"] = time() 

490 current_desc["_admin"]["revision"] = revision 

491 

492 deep_update_rfc7396(current_desc, indata) 

493 current_desc = self.check_conflict_on_edit( 

494 session, current_desc, indata, _id=_id 

495 ) 

496 

497 # Copy the revision to the active package name by its original id 

498 shutil.rmtree(self.fs.path + current_revision_path, ignore_errors=True) 

499 os.rename( 

500 self.fs.path + proposed_revision_path, 

501 self.fs.path + current_revision_path, 

502 ) 

503 self.fs.file_delete(current_revision_path, ignore_non_exist=True) 

504 self.fs.mkdir(current_revision_path) 

505 self.fs.reverse_sync(from_path=current_revision_path) 

506 

507 shutil.rmtree(self.fs.path + _id) 

508 

509 self.db.replace(self.topic, _id, current_desc) 

510 

511 # Store a copy of the package as a point in time revision 

512 revision_desc = dict(current_desc) 

513 revision_desc["_id"] = _id + ":" + str(revision_desc["_admin"]["revision"]) 

514 self.db.create(self.topic + "_revisions", revision_desc) 

515 fs_rollback = [] 

516 

517 indata["_id"] = _id 

518 self._send_msg("edited", indata) 

519 

520 # TODO if descriptor has changed because kwargs update content and remove cached zip 

521 # TODO if zip is not present creates one 

522 return True 

523 

524 except EngineException: 

525 raise 

526 except IndexError: 

527 raise EngineException( 

528 "invalid Content-Range header format. Expected 'bytes start-end/total'", 

529 HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE, 

530 ) 

531 except IOError as e: 

532 raise EngineException( 

533 "invalid upload transaction sequence: '{}'".format(e), 

534 HTTPStatus.BAD_REQUEST, 

535 ) 

536 except tarfile.ReadError as e: 

537 raise EngineException( 

538 "invalid file content {}".format(e), HTTPStatus.BAD_REQUEST 

539 ) 

540 except (ValueError, yaml.YAMLError) as e: 

541 raise EngineException(error_text + str(e)) 

542 except ValidationError as e: 

543 raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY) 

544 finally: 

545 if file_pkg: 

546 file_pkg.close() 

547 for file in fs_rollback: 

548 self.fs.file_delete(file, ignore_non_exist=True) 

549 

550 def get_file(self, session, _id, path=None, accept_header=None): 

551 """ 

552 Return the file content of a vnfd or nsd 

553 :param session: contains "username", "admin", "force", "public", "project_id", "set_project" 

554 :param _id: Identity of the vnfd, nsd 

555 :param path: artifact path or "$DESCRIPTOR" or None 

556 :param accept_header: Content of Accept header. Must contain applition/zip or/and text/plain 

557 :return: opened file plus Accept format or raises an exception 

558 """ 

559 accept_text = accept_zip = False 

560 if accept_header: 

561 if "text/plain" in accept_header or "*/*" in accept_header: 

562 accept_text = True 

563 if "application/zip" in accept_header or "*/*" in accept_header: 

564 accept_zip = "application/zip" 

565 elif "application/gzip" in accept_header: 

566 accept_zip = "application/gzip" 

567 

568 if not accept_text and not accept_zip: 

569 raise EngineException( 

570 "provide request header 'Accept' with 'application/zip' or 'text/plain'", 

571 http_code=HTTPStatus.NOT_ACCEPTABLE, 

572 ) 

573 

574 content = self.show(session, _id) 

575 if content["_admin"]["onboardingState"] != "ONBOARDED": 

576 raise EngineException( 

577 "Cannot get content because this resource is not at 'ONBOARDED' state. " 

578 "onboardingState is {}".format(content["_admin"]["onboardingState"]), 

579 http_code=HTTPStatus.CONFLICT, 

580 ) 

581 storage = content["_admin"]["storage"] 

582 if path is not None and path != "$DESCRIPTOR": # artifacts 

583 if not storage.get("pkg-dir") and not storage.get("folder"): 

584 raise EngineException( 

585 "Packages does not contains artifacts", 

586 http_code=HTTPStatus.BAD_REQUEST, 

587 ) 

588 if self.fs.file_exists( 

589 (storage["folder"], storage["pkg-dir"], *path), "dir" 

590 ): 

591 folder_content = self.fs.dir_ls( 

592 (storage["folder"], storage["pkg-dir"], *path) 

593 ) 

594 return folder_content, "text/plain" 

595 # TODO manage folders in http 

596 else: 

597 return ( 

598 self.fs.file_open( 

599 (storage["folder"], storage["pkg-dir"], *path), "rb" 

600 ), 

601 "application/octet-stream", 

602 ) 

603 

604 # pkgtype accept ZIP TEXT -> result 

605 # manyfiles yes X -> zip 

606 # no yes -> error 

607 # onefile yes no -> zip 

608 # X yes -> text 

609 contain_many_files = False 

610 if storage.get("pkg-dir"): 

611 # check if there are more than one file in the package, ignoring checksums.txt. 

612 pkg_files = self.fs.dir_ls((storage["folder"], storage["pkg-dir"])) 

613 if len(pkg_files) >= 3 or ( 

614 len(pkg_files) == 2 and "checksums.txt" not in pkg_files 

615 ): 

616 contain_many_files = True 

617 if accept_text and (not contain_many_files or path == "$DESCRIPTOR"): 

618 return ( 

619 self.fs.file_open((storage["folder"], storage["descriptor"]), "r"), 

620 "text/plain", 

621 ) 

622 elif contain_many_files and not accept_zip: 

623 raise EngineException( 

624 "Packages that contains several files need to be retrieved with 'application/zip'" 

625 "Accept header", 

626 http_code=HTTPStatus.NOT_ACCEPTABLE, 

627 ) 

628 else: 

629 if not storage.get("zipfile"): 

630 # TODO generate zipfile if not present 

631 raise EngineException( 

632 "Only allowed 'text/plain' Accept header for this descriptor. To be solved in " 

633 "future versions", 

634 http_code=HTTPStatus.NOT_ACCEPTABLE, 

635 ) 

636 return ( 

637 self.fs.file_open((storage["folder"], storage["zipfile"]), "rb"), 

638 accept_zip, 

639 ) 

640 

641 def _remove_yang_prefixes_from_descriptor(self, descriptor): 

642 new_descriptor = {} 

643 for k, v in descriptor.items(): 

644 new_v = v 

645 if isinstance(v, dict): 

646 new_v = self._remove_yang_prefixes_from_descriptor(v) 

647 elif isinstance(v, list): 

648 new_v = list() 

649 for x in v: 

650 if isinstance(x, dict): 

651 new_v.append(self._remove_yang_prefixes_from_descriptor(x)) 

652 else: 

653 new_v.append(x) 

654 new_descriptor[k.split(":")[-1]] = new_v 

655 return new_descriptor 

656 

657 def pyangbind_validation(self, item, data, force=False): 

658 raise EngineException( 

659 "Not possible to validate '{}' item".format(item), 

660 http_code=HTTPStatus.INTERNAL_SERVER_ERROR, 

661 ) 

662 

663 def _validate_input_edit(self, indata, content, force=False): 

664 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit 

665 if "_id" in indata: 

666 indata.pop("_id") 

667 if "_admin" not in indata: 

668 indata["_admin"] = {} 

669 

670 if "operationalState" in indata: 

671 if indata["operationalState"] in ("ENABLED", "DISABLED"): 

672 indata["_admin"]["operationalState"] = indata.pop("operationalState") 

673 else: 

674 raise EngineException( 

675 "State '{}' is not a valid operational state".format( 

676 indata["operationalState"] 

677 ), 

678 http_code=HTTPStatus.BAD_REQUEST, 

679 ) 

680 

681 # In the case of user defined data, we need to put the data in the root of the object 

682 # to preserve current expected behaviour 

683 if "userDefinedData" in indata: 

684 data = indata.pop("userDefinedData") 

685 if isinstance(data, dict): 

686 indata["_admin"]["userDefinedData"] = data 

687 else: 

688 raise EngineException( 

689 "userDefinedData should be an object, but is '{}' instead".format( 

690 type(data) 

691 ), 

692 http_code=HTTPStatus.BAD_REQUEST, 

693 ) 

694 

695 if ( 

696 "operationalState" in indata["_admin"] 

697 and content["_admin"]["operationalState"] 

698 == indata["_admin"]["operationalState"] 

699 ): 

700 raise EngineException( 

701 "operationalState already {}".format( 

702 content["_admin"]["operationalState"] 

703 ), 

704 http_code=HTTPStatus.CONFLICT, 

705 ) 

706 

707 return indata 

708 

709 def _validate_descriptor_changes( 

710 self, 

711 descriptor_id, 

712 descriptor_file_name, 

713 old_descriptor_directory, 

714 new_descriptor_directory, 

715 ): 

716 # Example: 

717 # raise EngineException( 

718 # "Error in validating new descriptor: <NODE> cannot be modified", 

719 # http_code=HTTPStatus.UNPROCESSABLE_ENTITY, 

720 # ) 

721 pass 

722 

723 

724class VnfdTopic(DescriptorTopic): 

725 topic = "vnfds" 

726 topic_msg = "vnfd" 

727 

728 def __init__(self, db, fs, msg, auth): 

729 DescriptorTopic.__init__(self, db, fs, msg, auth) 

730 

731 def pyangbind_validation(self, item, data, force=False): 

732 if self._descriptor_data_is_in_old_format(data): 

733 raise EngineException( 

734 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.", 

735 http_code=HTTPStatus.UNPROCESSABLE_ENTITY, 

736 ) 

737 try: 

738 myvnfd = etsi_nfv_vnfd.etsi_nfv_vnfd() 

739 pybindJSONDecoder.load_ietf_json( 

740 {"etsi-nfv-vnfd:vnfd": data}, 

741 None, 

742 None, 

743 obj=myvnfd, 

744 path_helper=True, 

745 skip_unknown=force, 

746 ) 

747 out = pybindJSON.dumps(myvnfd, mode="ietf") 

748 desc_out = self._remove_envelop(yaml.safe_load(out)) 

749 desc_out = self._remove_yang_prefixes_from_descriptor(desc_out) 

750 return utils.deep_update_dict(data, desc_out) 

751 except Exception as e: 

752 raise EngineException( 

753 "Error in pyangbind validation: {}".format(str(e)), 

754 http_code=HTTPStatus.UNPROCESSABLE_ENTITY, 

755 ) 

756 

757 @staticmethod 

758 def _descriptor_data_is_in_old_format(data): 

759 return ("vnfd-catalog" in data) or ("vnfd:vnfd-catalog" in data) 

760 

761 @staticmethod 

762 def _remove_envelop(indata=None): 

763 if not indata: 

764 return {} 

765 clean_indata = indata 

766 

767 if clean_indata.get("etsi-nfv-vnfd:vnfd"): 

768 if not isinstance(clean_indata["etsi-nfv-vnfd:vnfd"], dict): 

769 raise EngineException("'etsi-nfv-vnfd:vnfd' must be a dict") 

770 clean_indata = clean_indata["etsi-nfv-vnfd:vnfd"] 

771 elif clean_indata.get("vnfd"): 

772 if not isinstance(clean_indata["vnfd"], dict): 

773 raise EngineException("'vnfd' must be dict") 

774 clean_indata = clean_indata["vnfd"] 

775 

776 return clean_indata 

777 

778 def check_conflict_on_edit(self, session, final_content, edit_content, _id): 

779 final_content = super().check_conflict_on_edit( 

780 session, final_content, edit_content, _id 

781 ) 

782 

783 # set type of vnfd 

784 contains_pdu = False 

785 contains_vdu = False 

786 for vdu in get_iterable(final_content.get("vdu")): 

787 if vdu.get("pdu-type"): 

788 contains_pdu = True 

789 else: 

790 contains_vdu = True 

791 if contains_pdu: 

792 final_content["_admin"]["type"] = "hnfd" if contains_vdu else "pnfd" 

793 elif contains_vdu: 

794 final_content["_admin"]["type"] = "vnfd" 

795 # if neither vud nor pdu do not fill type 

796 return final_content 

797 

798 def check_conflict_on_del(self, session, _id, db_content): 

799 """ 

800 Check that there is not any NSD that uses this VNFD. Only NSDs belonging to this project are considered. Note 

801 that VNFD can be public and be used by NSD of other projects. Also check there are not deployments, or vnfr 

802 that uses this vnfd 

803 :param session: contains "username", "admin", "force", "public", "project_id", "set_project" 

804 :param _id: vnfd internal id 

805 :param db_content: The database content of the _id. 

806 :return: None or raises EngineException with the conflict 

807 """ 

808 if session["force"]: 

809 return 

810 descriptor = db_content 

811 descriptor_id = descriptor.get("id") 

812 if not descriptor_id: # empty vnfd not uploaded 

813 return 

814 

815 _filter = self._get_project_filter(session) 

816 

817 # check vnfrs using this vnfd 

818 _filter["vnfd-id"] = _id 

819 if self.db.get_list("vnfrs", _filter): 

820 raise EngineException( 

821 "There is at least one VNF instance using this descriptor", 

822 http_code=HTTPStatus.CONFLICT, 

823 ) 

824 

825 # check NSD referencing this VNFD 

826 del _filter["vnfd-id"] 

827 _filter["vnfd-id"] = descriptor_id 

828 if self.db.get_list("nsds", _filter): 

829 raise EngineException( 

830 "There is at least one NS package referencing this descriptor", 

831 http_code=HTTPStatus.CONFLICT, 

832 ) 

833 

834 def _validate_input_new(self, indata, storage_params, force=False): 

835 indata.pop("onboardingState", None) 

836 indata.pop("operationalState", None) 

837 indata.pop("usageState", None) 

838 indata.pop("links", None) 

839 

840 indata = self.pyangbind_validation("vnfds", indata, force) 

841 # Cross references validation in the descriptor 

842 

843 self.validate_mgmt_interface_connection_point(indata) 

844 

845 for vdu in get_iterable(indata.get("vdu")): 

846 self.validate_vdu_internal_connection_points(vdu) 

847 self._validate_vdu_cloud_init_in_package(storage_params, vdu, indata) 

848 self._validate_vdu_charms_in_package(storage_params, indata) 

849 

850 self._validate_vnf_charms_in_package(storage_params, indata) 

851 

852 self.validate_external_connection_points(indata) 

853 self.validate_internal_virtual_links(indata) 

854 self.validate_monitoring_params(indata) 

855 self.validate_scaling_group_descriptor(indata) 

856 self.validate_helm_chart(indata) 

857 

858 return indata 

859 

860 @staticmethod 

861 def validate_helm_chart(indata): 

862 def is_url(url): 

863 result = urlparse(url) 

864 return all([result.scheme, result.netloc]) 

865 

866 kdus = indata.get("kdu", []) 

867 for kdu in kdus: 

868 helm_chart_value = kdu.get("helm-chart") 

869 if not helm_chart_value: 

870 continue 

871 if not ( 

872 valid_helm_chart_re.match(helm_chart_value) or is_url(helm_chart_value) 

873 ): 

874 raise EngineException( 

875 "helm-chart '{}' is not valid".format(helm_chart_value), 

876 http_code=HTTPStatus.UNPROCESSABLE_ENTITY, 

877 ) 

878 

879 @staticmethod 

880 def validate_mgmt_interface_connection_point(indata): 

881 if not indata.get("vdu"): 

882 return 

883 if not indata.get("mgmt-cp"): 

884 raise EngineException( 

885 "'mgmt-cp' is a mandatory field and it is not defined", 

886 http_code=HTTPStatus.UNPROCESSABLE_ENTITY, 

887 ) 

888 

889 for cp in get_iterable(indata.get("ext-cpd")): 

890 if cp["id"] == indata["mgmt-cp"]: 

891 break 

892 else: 

893 raise EngineException( 

894 "mgmt-cp='{}' must match an existing ext-cpd".format(indata["mgmt-cp"]), 

895 http_code=HTTPStatus.UNPROCESSABLE_ENTITY, 

896 ) 

897 

898 @staticmethod 

899 def validate_vdu_internal_connection_points(vdu): 

900 int_cpds = set() 

901 for cpd in get_iterable(vdu.get("int-cpd")): 

902 cpd_id = cpd.get("id") 

903 if cpd_id and cpd_id in int_cpds: 

904 raise EngineException( 

905 "vdu[id='{}']:int-cpd[id='{}'] is already used by other int-cpd".format( 

906 vdu["id"], cpd_id 

907 ), 

908 http_code=HTTPStatus.UNPROCESSABLE_ENTITY, 

909 ) 

910 int_cpds.add(cpd_id) 

911 

912 @staticmethod 

913 def validate_external_connection_points(indata): 

914 all_vdus_int_cpds = set() 

915 for vdu in get_iterable(indata.get("vdu")): 

916 for int_cpd in get_iterable(vdu.get("int-cpd")): 

917 all_vdus_int_cpds.add((vdu.get("id"), int_cpd.get("id"))) 

918 

919 ext_cpds = set() 

920 for cpd in get_iterable(indata.get("ext-cpd")): 

921 cpd_id = cpd.get("id") 

922 if cpd_id and cpd_id in ext_cpds: 

923 raise EngineException( 

924 "ext-cpd[id='{}'] is already used by other ext-cpd".format(cpd_id), 

925 http_code=HTTPStatus.UNPROCESSABLE_ENTITY, 

926 ) 

927 ext_cpds.add(cpd_id) 

928 

929 int_cpd = cpd.get("int-cpd") 

930 if int_cpd: 

931 if (int_cpd.get("vdu-id"), int_cpd.get("cpd")) not in all_vdus_int_cpds: 

932 raise EngineException( 

933 "ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd".format( 

934 cpd_id 

935 ), 

936 http_code=HTTPStatus.UNPROCESSABLE_ENTITY, 

937 ) 

938 # TODO: Validate k8s-cluster-net points to a valid k8s-cluster:nets ? 

939 

940 def _validate_vdu_charms_in_package(self, storage_params, indata): 

941 for df in indata["df"]: 

942 if ( 

943 "lcm-operations-configuration" in df 

944 and "operate-vnf-op-config" in df["lcm-operations-configuration"] 

945 ): 

946 configs = df["lcm-operations-configuration"][ 

947 "operate-vnf-op-config" 

948 ].get("day1-2", []) 

949 vdus = df.get("vdu-profile", []) 

950 for vdu in vdus: 

951 for config in configs: 

952 if config["id"] == vdu["id"] and utils.find_in_list( 

953 config.get("execution-environment-list", []), 

954 lambda ee: "juju" in ee, 

955 ): 

956 if not self._validate_package_folders( 

957 storage_params, "charms" 

958 ) and not self._validate_package_folders( 

959 storage_params, "Scripts/charms" 

960 ): 

961 raise EngineException( 

962 "Charm defined in vnf[id={}] but not present in " 

963 "package".format(indata["id"]) 

964 ) 

965 

966 def _validate_vdu_cloud_init_in_package(self, storage_params, vdu, indata): 

967 if not vdu.get("cloud-init-file"): 

968 return 

969 if not self._validate_package_folders( 

970 storage_params, "cloud_init", vdu["cloud-init-file"] 

971 ) and not self._validate_package_folders( 

972 storage_params, "Scripts/cloud_init", vdu["cloud-init-file"] 

973 ): 

974 raise EngineException( 

975 "Cloud-init defined in vnf[id={}]:vdu[id={}] but not present in " 

976 "package".format(indata["id"], vdu["id"]) 

977 ) 

978 

979 def _validate_vnf_charms_in_package(self, storage_params, indata): 

980 # Get VNF configuration through new container 

981 for deployment_flavor in indata.get("df", []): 

982 if "lcm-operations-configuration" not in deployment_flavor: 

983 return 

984 if ( 

985 "operate-vnf-op-config" 

986 not in deployment_flavor["lcm-operations-configuration"] 

987 ): 

988 return 

989 for day_1_2_config in deployment_flavor["lcm-operations-configuration"][ 

990 "operate-vnf-op-config" 

991 ]["day1-2"]: 

992 if day_1_2_config["id"] == indata["id"]: 

993 if utils.find_in_list( 

994 day_1_2_config.get("execution-environment-list", []), 

995 lambda ee: "juju" in ee, 

996 ): 

997 if not self._validate_package_folders( 

998 storage_params, "charms" 

999 ) and not self._validate_package_folders( 

1000 storage_params, "Scripts/charms" 

1001 ): 

1002 raise EngineException( 

1003 "Charm defined in vnf[id={}] but not present in " 

1004 "package".format(indata["id"]) 

1005 ) 

1006 

1007 def _validate_package_folders(self, storage_params, folder, file=None): 

1008 if not storage_params: 

1009 return False 

1010 elif not storage_params.get("pkg-dir"): 

1011 if self.fs.file_exists("{}_".format(storage_params["folder"]), "dir"): 

1012 f = "{}_/{}".format(storage_params["folder"], folder) 

1013 else: 

1014 f = "{}/{}".format(storage_params["folder"], folder) 

1015 if file: 

1016 return self.fs.file_exists("{}/{}".format(f, file), "file") 

1017 else: 

1018 if self.fs.file_exists(f, "dir"): 

1019 if self.fs.dir_ls(f): 

1020 return True 

1021 return False 

1022 else: 

1023 if self.fs.file_exists("{}_".format(storage_params["folder"]), "dir"): 

1024 f = "{}_/{}/{}".format( 

1025 storage_params["folder"], storage_params["pkg-dir"], folder 

1026 ) 

1027 else: 

1028 f = "{}/{}/{}".format( 

1029 storage_params["folder"], storage_params["pkg-dir"], folder 

1030 ) 

1031 if file: 

1032 return self.fs.file_exists("{}/{}".format(f, file), "file") 

1033 else: 

1034 if self.fs.file_exists(f, "dir"): 

1035 if self.fs.dir_ls(f): 

1036 return True 

1037 return False 

1038 

1039 @staticmethod 

1040 def validate_internal_virtual_links(indata): 

1041 all_ivld_ids = set() 

1042 for ivld in get_iterable(indata.get("int-virtual-link-desc")): 

1043 ivld_id = ivld.get("id") 

1044 if ivld_id and ivld_id in all_ivld_ids: 

1045 raise EngineException( 

1046 "Duplicated VLD id in int-virtual-link-desc[id={}]".format(ivld_id), 

1047 http_code=HTTPStatus.UNPROCESSABLE_ENTITY, 

1048 ) 

1049 else: 

1050 all_ivld_ids.add(ivld_id) 

1051 

1052 for vdu in get_iterable(indata.get("vdu")): 

1053 for int_cpd in get_iterable(vdu.get("int-cpd")): 

1054 int_cpd_ivld_id = int_cpd.get("int-virtual-link-desc") 

1055 if int_cpd_ivld_id and int_cpd_ivld_id not in all_ivld_ids: 

1056 raise EngineException( 

1057 "vdu[id='{}']:int-cpd[id='{}']:int-virtual-link-desc='{}' must match an existing " 

1058 "int-virtual-link-desc".format( 

1059 vdu["id"], int_cpd["id"], int_cpd_ivld_id 

1060 ), 

1061 http_code=HTTPStatus.UNPROCESSABLE_ENTITY, 

1062 ) 

1063 

1064 for df in get_iterable(indata.get("df")): 

1065 for vlp in get_iterable(df.get("virtual-link-profile")): 

1066 vlp_ivld_id = vlp.get("id") 

1067 if vlp_ivld_id and vlp_ivld_id not in all_ivld_ids: 

1068 raise EngineException( 

1069 "df[id='{}']:virtual-link-profile='{}' must match an existing " 

1070 "int-virtual-link-desc".format(df["id"], vlp_ivld_id), 

1071 http_code=HTTPStatus.UNPROCESSABLE_ENTITY, 

1072 ) 

1073 

1074 @staticmethod 

1075 def validate_monitoring_params(indata): 

1076 all_monitoring_params = set() 

1077 for ivld in get_iterable(indata.get("int-virtual-link-desc")): 

1078 for mp in get_iterable(ivld.get("monitoring-parameters")): 

1079 mp_id = mp.get("id") 

1080 if mp_id and mp_id in all_monitoring_params: 

1081 raise EngineException( 

1082 "Duplicated monitoring-parameter id in " 

1083 "int-virtual-link-desc[id='{}']:monitoring-parameters[id='{}']".format( 

1084 ivld["id"], mp_id 

1085 ), 

1086 http_code=HTTPStatus.UNPROCESSABLE_ENTITY, 

1087 ) 

1088 else: 

1089 all_monitoring_params.add(mp_id) 

1090 

1091 for vdu in get_iterable(indata.get("vdu")): 

1092 for mp in get_iterable(vdu.get("monitoring-parameter")): 

1093 mp_id = mp.get("id") 

1094 if mp_id and mp_id in all_monitoring_params: 

1095 raise EngineException( 

1096 "Duplicated monitoring-parameter id in " 

1097 "vdu[id='{}']:monitoring-parameter[id='{}']".format( 

1098 vdu["id"], mp_id 

1099 ), 

1100 http_code=HTTPStatus.UNPROCESSABLE_ENTITY, 

1101 ) 

1102 else: 

1103 all_monitoring_params.add(mp_id) 

1104 

1105 for df in get_iterable(indata.get("df")): 

1106 for mp in get_iterable(df.get("monitoring-parameter")): 

1107 mp_id = mp.get("id") 

1108 if mp_id and mp_id in all_monitoring_params: 

1109 raise EngineException( 

1110 "Duplicated monitoring-parameter id in " 

1111 "df[id='{}']:monitoring-parameter[id='{}']".format( 

1112 df["id"], mp_id 

1113 ), 

1114 http_code=HTTPStatus.UNPROCESSABLE_ENTITY, 

1115 ) 

1116 else: 

1117 all_monitoring_params.add(mp_id) 

1118 

1119 @staticmethod 

1120 def validate_scaling_group_descriptor(indata): 

1121 all_monitoring_params = set() 

1122 for ivld in get_iterable(indata.get("int-virtual-link-desc")): 

1123 for mp in get_iterable(ivld.get("monitoring-parameters")): 

1124 all_monitoring_params.add(mp.get("id")) 

1125 

1126 for vdu in get_iterable(indata.get("vdu")): 

1127 for mp in get_iterable(vdu.get("monitoring-parameter")): 

1128 all_monitoring_params.add(mp.get("id")) 

1129 

1130 for df in get_iterable(indata.get("df")): 

1131 for mp in get_iterable(df.get("monitoring-parameter")): 

1132 all_monitoring_params.add(mp.get("id")) 

1133 

1134 for df in get_iterable(indata.get("df")): 

1135 for sa in get_iterable(df.get("scaling-aspect")): 

1136 for sp in get_iterable(sa.get("scaling-policy")): 

1137 for sc in get_iterable(sp.get("scaling-criteria")): 

1138 sc_monitoring_param = sc.get("vnf-monitoring-param-ref") 

1139 if ( 

1140 sc_monitoring_param 

1141 and sc_monitoring_param not in all_monitoring_params 

1142 ): 

1143 raise EngineException( 

1144 "df[id='{}']:scaling-aspect[id='{}']:scaling-policy" 

1145 "[name='{}']:scaling-criteria[name='{}']: " 

1146 "vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format( 

1147 df["id"], 

1148 sa["id"], 

1149 sp["name"], 

1150 sc["name"], 

1151 sc_monitoring_param, 

1152 ), 

1153 http_code=HTTPStatus.UNPROCESSABLE_ENTITY, 

1154 ) 

1155 

1156 for sca in get_iterable(sa.get("scaling-config-action")): 

1157 if ( 

1158 "lcm-operations-configuration" not in df 

1159 or "operate-vnf-op-config" 

1160 not in df["lcm-operations-configuration"] 

1161 or not utils.find_in_list( 

1162 df["lcm-operations-configuration"][ 

1163 "operate-vnf-op-config" 

1164 ].get("day1-2", []), 

1165 lambda config: config["id"] == indata["id"], 

1166 ) 

1167 ): 

1168 raise EngineException( 

1169 "'day1-2 configuration' not defined in the descriptor but it is " 

1170 "referenced by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action".format( 

1171 df["id"], sa["id"] 

1172 ), 

1173 http_code=HTTPStatus.UNPROCESSABLE_ENTITY, 

1174 ) 

1175 for configuration in get_iterable( 

1176 df["lcm-operations-configuration"]["operate-vnf-op-config"].get( 

1177 "day1-2", [] 

1178 ) 

1179 ): 

1180 for primitive in get_iterable( 

1181 configuration.get("config-primitive") 

1182 ): 

1183 if ( 

1184 primitive["name"] 

1185 == sca["vnf-config-primitive-name-ref"] 

1186 ): 

1187 break 

1188 else: 

1189 raise EngineException( 

1190 "df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-" 

1191 "config-primitive-name-ref='{}' does not match any " 

1192 "day1-2 configuration:config-primitive:name".format( 

1193 df["id"], 

1194 sa["id"], 

1195 sca["vnf-config-primitive-name-ref"], 

1196 ), 

1197 http_code=HTTPStatus.UNPROCESSABLE_ENTITY, 

1198 ) 

1199 

1200 def delete_extra(self, session, _id, db_content, not_send_msg=None): 

1201 """ 

1202 Deletes associate file system storage (via super) 

1203 Deletes associated vnfpkgops from database. 

1204 :param session: contains "username", "admin", "force", "public", "project_id", "set_project" 

1205 :param _id: server internal id 

1206 :param db_content: The database content of the descriptor 

1207 :return: None 

1208 :raises: FsException in case of error while deleting associated storage 

1209 """ 

1210 super().delete_extra(session, _id, db_content, not_send_msg) 

1211 self.db.del_list("vnfpkgops", {"vnfPkgId": _id}) 

1212 self.db.del_list(self.topic + "_revisions", {"_id": {"$regex": _id}}) 

1213 

1214 def sol005_projection(self, data): 

1215 data["onboardingState"] = data["_admin"]["onboardingState"] 

1216 data["operationalState"] = data["_admin"]["operationalState"] 

1217 data["usageState"] = data["_admin"]["usageState"] 

1218 

1219 links = {} 

1220 links["self"] = {"href": "/vnfpkgm/v1/vnf_packages/{}".format(data["_id"])} 

1221 links["vnfd"] = {"href": "/vnfpkgm/v1/vnf_packages/{}/vnfd".format(data["_id"])} 

1222 links["packageContent"] = { 

1223 "href": "/vnfpkgm/v1/vnf_packages/{}/package_content".format(data["_id"]) 

1224 } 

1225 data["_links"] = links 

1226 

1227 return super().sol005_projection(data) 

1228 

1229 @staticmethod 

1230 def find_software_version(vnfd: dict) -> str: 

1231 """Find the sotware version in the VNFD descriptors 

1232 

1233 Args: 

1234 vnfd (dict): Descriptor as a dictionary 

1235 

1236 Returns: 

1237 software-version (str) 

1238 """ 

1239 default_sw_version = "1.0" 

1240 if vnfd.get("vnfd"): 

1241 vnfd = vnfd["vnfd"] 

1242 if vnfd.get("software-version"): 

1243 return vnfd["software-version"] 

1244 else: 

1245 return default_sw_version 

1246 

1247 @staticmethod 

1248 def extract_policies(vnfd: dict) -> dict: 

1249 """Removes the policies from the VNFD descriptors 

1250 

1251 Args: 

1252 vnfd (dict): Descriptor as a dictionary 

1253 

1254 Returns: 

1255 vnfd (dict): VNFD which does not include policies 

1256 """ 

1257 for df in vnfd.get("df", {}): 

1258 for policy in ["scaling-aspect", "healing-aspect"]: 

1259 if df.get(policy, {}): 

1260 df.pop(policy) 

1261 for vdu in vnfd.get("vdu", {}): 

1262 for alarm_policy in ["alarm", "monitoring-parameter"]: 

1263 if vdu.get(alarm_policy, {}): 

1264 vdu.pop(alarm_policy) 

1265 return vnfd 

1266 

1267 @staticmethod 

1268 def extract_day12_primitives(vnfd: dict) -> dict: 

1269 """Removes the day12 primitives from the VNFD descriptors 

1270 

1271 Args: 

1272 vnfd (dict): Descriptor as a dictionary 

1273 

1274 Returns: 

1275 vnfd (dict) 

1276 """ 

1277 for df_id, df in enumerate(vnfd.get("df", {})): 

1278 if ( 

1279 df.get("lcm-operations-configuration", {}) 

1280 .get("operate-vnf-op-config", {}) 

1281 .get("day1-2") 

1282 ): 

1283 day12 = df["lcm-operations-configuration"]["operate-vnf-op-config"].get( 

1284 "day1-2" 

1285 ) 

1286 for config_id, config in enumerate(day12): 

1287 for key in [ 

1288 "initial-config-primitive", 

1289 "config-primitive", 

1290 "terminate-config-primitive", 

1291 ]: 

1292 config.pop(key, None) 

1293 day12[config_id] = config 

1294 df["lcm-operations-configuration"]["operate-vnf-op-config"][ 

1295 "day1-2" 

1296 ] = day12 

1297 vnfd["df"][df_id] = df 

1298 return vnfd 

1299 

1300 def remove_modifiable_items(self, vnfd: dict) -> dict: 

1301 """Removes the modifiable parts from the VNFD descriptors 

1302 

1303 It calls different extract functions according to different update types 

1304 to clear all the modifiable items from VNFD 

1305 

1306 Args: 

1307 vnfd (dict): Descriptor as a dictionary 

1308 

1309 Returns: 

1310 vnfd (dict): Descriptor which does not include modifiable contents 

1311 """ 

1312 if vnfd.get("vnfd"): 

1313 vnfd = vnfd["vnfd"] 

1314 vnfd.pop("_admin", None) 

1315 # If the other extractions need to be done from VNFD, 

1316 # the new extract methods could be appended to below list. 

1317 for extract_function in [self.extract_day12_primitives, self.extract_policies]: 

1318 vnfd_temp = extract_function(vnfd) 

1319 vnfd = vnfd_temp 

1320 return vnfd 

1321 

1322 def _validate_descriptor_changes( 

1323 self, 

1324 descriptor_id: str, 

1325 descriptor_file_name: str, 

1326 old_descriptor_directory: str, 

1327 new_descriptor_directory: str, 

1328 ): 

1329 """Compares the old and new VNFD descriptors and validates the new descriptor. 

1330 

1331 Args: 

1332 old_descriptor_directory (str): Directory of descriptor which is in-use 

1333 new_descriptor_directory (str): Directory of descriptor which is proposed to update (new revision) 

1334 

1335 Returns: 

1336 None 

1337 

1338 Raises: 

1339 EngineException: In case of error when there are unallowed changes 

1340 """ 

1341 try: 

1342 # If VNFD does not exist in DB or it is not in use by any NS, 

1343 # validation is not required. 

1344 vnfd = self.db.get_one("vnfds", {"_id": descriptor_id}) 

1345 if not vnfd or not detect_descriptor_usage(vnfd, "vnfds", self.db): 

1346 return 

1347 

1348 # Get the old and new descriptor contents in order to compare them. 

1349 with self.fs.file_open( 

1350 (old_descriptor_directory.rstrip("/"), descriptor_file_name), "r" 

1351 ) as old_descriptor_file: 

1352 with self.fs.file_open( 

1353 (new_descriptor_directory.rstrip("/"), descriptor_file_name), "r" 

1354 ) as new_descriptor_file: 

1355 old_content = yaml.safe_load(old_descriptor_file.read()) 

1356 new_content = yaml.safe_load(new_descriptor_file.read()) 

1357 

1358 # If software version has changed, we do not need to validate 

1359 # the differences anymore. 

1360 if old_content and new_content: 

1361 if self.find_software_version( 

1362 old_content 

1363 ) != self.find_software_version(new_content): 

1364 return 

1365 

1366 disallowed_change = DeepDiff( 

1367 self.remove_modifiable_items(old_content), 

1368 self.remove_modifiable_items(new_content), 

1369 ) 

1370 

1371 if disallowed_change: 

1372 changed_nodes = functools.reduce( 

1373 lambda a, b: a + " , " + b, 

1374 [ 

1375 node.lstrip("root") 

1376 for node in disallowed_change.get( 

1377 "values_changed" 

1378 ).keys() 

1379 ], 

1380 ) 

1381 

1382 raise EngineException( 

1383 f"Error in validating new descriptor: {changed_nodes} cannot be modified, " 

1384 "there are disallowed changes in the vnf descriptor.", 

1385 http_code=HTTPStatus.UNPROCESSABLE_ENTITY, 

1386 ) 

1387 except ( 

1388 DbException, 

1389 AttributeError, 

1390 IndexError, 

1391 KeyError, 

1392 ValueError, 

1393 ) as e: 

1394 raise type(e)( 

1395 "VNF Descriptor could not be processed with error: {}.".format(e) 

1396 ) 

1397 

1398 

1399class NsdTopic(DescriptorTopic): 

1400 topic = "nsds" 

1401 topic_msg = "nsd" 

1402 

1403 def __init__(self, db, fs, msg, auth): 

1404 super().__init__(db, fs, msg, auth) 

1405 

1406 def pyangbind_validation(self, item, data, force=False): 

1407 if self._descriptor_data_is_in_old_format(data): 

1408 raise EngineException( 

1409 "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.", 

1410 http_code=HTTPStatus.UNPROCESSABLE_ENTITY, 

1411 ) 

1412 try: 

1413 nsd_vnf_profiles = data.get("df", [{}])[0].get("vnf-profile", []) 

1414 mynsd = etsi_nfv_nsd.etsi_nfv_nsd() 

1415 pybindJSONDecoder.load_ietf_json( 

1416 {"nsd": {"nsd": [data]}}, 

1417 None, 

1418 None, 

1419 obj=mynsd, 

1420 path_helper=True, 

1421 skip_unknown=force, 

1422 ) 

1423 out = pybindJSON.dumps(mynsd, mode="ietf") 

1424 desc_out = self._remove_envelop(yaml.safe_load(out)) 

1425 desc_out = self._remove_yang_prefixes_from_descriptor(desc_out) 

1426 if nsd_vnf_profiles: 

1427 desc_out["df"][0]["vnf-profile"] = nsd_vnf_profiles 

1428 return desc_out 

1429 except Exception as e: 

1430 raise EngineException( 

1431 "Error in pyangbind validation: {}".format(str(e)), 

1432 http_code=HTTPStatus.UNPROCESSABLE_ENTITY, 

1433 ) 

1434 

1435 @staticmethod 

1436 def _descriptor_data_is_in_old_format(data): 

1437 return ("nsd-catalog" in data) or ("nsd:nsd-catalog" in data) 

1438 

1439 @staticmethod 

1440 def _remove_envelop(indata=None): 

1441 if not indata: 

1442 return {} 

1443 clean_indata = indata 

1444 

1445 if clean_indata.get("nsd"): 

1446 clean_indata = clean_indata["nsd"] 

1447 elif clean_indata.get("etsi-nfv-nsd:nsd"): 

1448 clean_indata = clean_indata["etsi-nfv-nsd:nsd"] 

1449 if clean_indata.get("nsd"): 

1450 if ( 

1451 not isinstance(clean_indata["nsd"], list) 

1452 or len(clean_indata["nsd"]) != 1 

1453 ): 

1454 raise EngineException("'nsd' must be a list of only one element") 

1455 clean_indata = clean_indata["nsd"][0] 

1456 return clean_indata 

1457 

1458 def _validate_input_new(self, indata, storage_params, force=False): 

1459 indata.pop("nsdOnboardingState", None) 

1460 indata.pop("nsdOperationalState", None) 

1461 indata.pop("nsdUsageState", None) 

1462 

1463 indata.pop("links", None) 

1464 

1465 indata = self.pyangbind_validation("nsds", indata, force) 

1466 # Cross references validation in the descriptor 

1467 # TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none 

1468 for vld in get_iterable(indata.get("virtual-link-desc")): 

1469 self.validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata) 

1470 for fg in get_iterable(indata.get("vnffgd")): 

1471 self.validate_vnffgd_data(fg, indata) 

1472 

1473 self.validate_vnf_profiles_vnfd_id(indata) 

1474 

1475 return indata 

1476 

1477 @staticmethod 

1478 def validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata): 

1479 if not vld.get("mgmt-network"): 

1480 return 

1481 vld_id = vld.get("id") 

1482 for df in get_iterable(indata.get("df")): 

1483 for vlp in get_iterable(df.get("virtual-link-profile")): 

1484 if vld_id and vld_id == vlp.get("virtual-link-desc-id"): 

1485 if vlp.get("virtual-link-protocol-data"): 

1486 raise EngineException( 

1487 "Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-" 

1488 "protocol-data You cannot set a virtual-link-protocol-data " 

1489 "when mgmt-network is True".format(df["id"], vlp["id"]), 

1490 http_code=HTTPStatus.UNPROCESSABLE_ENTITY, 

1491 ) 

1492 

1493 @staticmethod 

1494 def validate_vnffgd_data(fg, indata): 

1495 position_list = [] 

1496 all_vnf_ids = set(get_iterable(fg.get("vnf-profile-id"))) 

1497 for fgposition in get_iterable(fg.get("nfp-position-element")): 

1498 position_list.append(fgposition["id"]) 

1499 

1500 for nfpd in get_iterable(fg.get("nfpd")): 

1501 nfp_position = [] 

1502 for position in get_iterable(nfpd.get("position-desc-id")): 

1503 nfp_position = position.get("nfp-position-element-id") 

1504 if position == "nfp-position-element-id": 

1505 nfp_position = position.get("nfp-position-element-id") 

1506 if nfp_position[0] not in position_list: 

1507 raise EngineException( 

1508 "Error at vnffgd nfpd[id='{}']:nfp-position-element-id='{}' " 

1509 "does not match any nfp-position-element".format( 

1510 nfpd["id"], nfp_position[0] 

1511 ), 

1512 http_code=HTTPStatus.UNPROCESSABLE_ENTITY, 

1513 ) 

1514 

1515 for cp in get_iterable(position.get("cp-profile-id")): 

1516 for cpe in get_iterable(cp.get("constituent-profile-elements")): 

1517 constituent_base_element_id = cpe.get( 

1518 "constituent-base-element-id" 

1519 ) 

1520 if ( 

1521 constituent_base_element_id 

1522 and constituent_base_element_id not in all_vnf_ids 

1523 ): 

1524 raise EngineException( 

1525 "Error at vnffgd constituent_profile[id='{}']:vnfd-id='{}' " 

1526 "does not match any constituent-base-element-id".format( 

1527 cpe["id"], constituent_base_element_id 

1528 ), 

1529 http_code=HTTPStatus.UNPROCESSABLE_ENTITY, 

1530 ) 

1531 

1532 @staticmethod 

1533 def validate_vnf_profiles_vnfd_id(indata): 

1534 all_vnfd_ids = set(get_iterable(indata.get("vnfd-id"))) 

1535 for df in get_iterable(indata.get("df")): 

1536 for vnf_profile in get_iterable(df.get("vnf-profile")): 

1537 vnfd_id = vnf_profile.get("vnfd-id") 

1538 if vnfd_id and vnfd_id not in all_vnfd_ids: 

1539 raise EngineException( 

1540 "Error at df[id='{}']:vnf_profile[id='{}']:vnfd-id='{}' " 

1541 "does not match any vnfd-id".format( 

1542 df["id"], vnf_profile["id"], vnfd_id 

1543 ), 

1544 http_code=HTTPStatus.UNPROCESSABLE_ENTITY, 

1545 ) 

1546 

1547 def _validate_input_edit(self, indata, content, force=False): 

1548 # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit 

1549 """ 

1550 indata looks as follows: 

1551 - In the new case (conformant) 

1552 {'nsdOperationalState': 'DISABLED', 'userDefinedData': {'id': 'string23', 

1553 '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}} 

1554 - In the old case (backwards-compatible) 

1555 {'id': 'string23', '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'} 

1556 """ 

1557 if "_admin" not in indata: 

1558 indata["_admin"] = {} 

1559 

1560 if "nsdOperationalState" in indata: 

1561 if indata["nsdOperationalState"] in ("ENABLED", "DISABLED"): 

1562 indata["_admin"]["operationalState"] = indata.pop("nsdOperationalState") 

1563 else: 

1564 raise EngineException( 

1565 "State '{}' is not a valid operational state".format( 

1566 indata["nsdOperationalState"] 

1567 ), 

1568 http_code=HTTPStatus.BAD_REQUEST, 

1569 ) 

1570 

1571 # In the case of user defined data, we need to put the data in the root of the object 

1572 # to preserve current expected behaviour 

1573 if "userDefinedData" in indata: 

1574 data = indata.pop("userDefinedData") 

1575 if isinstance(data, dict): 

1576 indata["_admin"]["userDefinedData"] = data 

1577 else: 

1578 raise EngineException( 

1579 "userDefinedData should be an object, but is '{}' instead".format( 

1580 type(data) 

1581 ), 

1582 http_code=HTTPStatus.BAD_REQUEST, 

1583 ) 

1584 if ( 

1585 "operationalState" in indata["_admin"] 

1586 and content["_admin"]["operationalState"] 

1587 == indata["_admin"]["operationalState"] 

1588 ): 

1589 raise EngineException( 

1590 "nsdOperationalState already {}".format( 

1591 content["_admin"]["operationalState"] 

1592 ), 

1593 http_code=HTTPStatus.CONFLICT, 

1594 ) 

1595 return indata 

1596 

1597 def _check_descriptor_dependencies(self, session, descriptor): 

1598 """ 

1599 Check that the dependent descriptors exist on a new descriptor or edition. Also checks references to vnfd 

1600 connection points are ok 

1601 :param session: contains "username", "admin", "force", "public", "project_id", "set_project" 

1602 :param descriptor: descriptor to be inserted or edit 

1603 :return: None or raises exception 

1604 """ 

1605 if session["force"]: 

1606 return 

1607 vnfds_index = self._get_descriptor_constituent_vnfds_index(session, descriptor) 

1608 

1609 # Cross references validation in the descriptor and vnfd connection point validation 

1610 for df in get_iterable(descriptor.get("df")): 

1611 self.validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index) 

1612 

1613 def _get_descriptor_constituent_vnfds_index(self, session, descriptor): 

1614 vnfds_index = {} 

1615 if descriptor.get("vnfd-id") and not session["force"]: 

1616 for vnfd_id in get_iterable(descriptor.get("vnfd-id")): 

1617 query_filter = self._get_project_filter(session) 

1618 query_filter["id"] = vnfd_id 

1619 vnf_list = self.db.get_list("vnfds", query_filter) 

1620 if not vnf_list: 

1621 raise EngineException( 

1622 "Descriptor error at 'vnfd-id'='{}' references a non " 

1623 "existing vnfd".format(vnfd_id), 

1624 http_code=HTTPStatus.CONFLICT, 

1625 ) 

1626 vnfds_index[vnfd_id] = vnf_list[0] 

1627 return vnfds_index 

1628 

1629 @staticmethod 

1630 def validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index): 

1631 for vnf_profile in get_iterable(df.get("vnf-profile")): 

1632 vnfd = vnfds_index.get(vnf_profile["vnfd-id"]) 

1633 all_vnfd_ext_cpds = set() 

1634 for ext_cpd in get_iterable(vnfd.get("ext-cpd")): 

1635 if ext_cpd.get("id"): 

1636 all_vnfd_ext_cpds.add(ext_cpd.get("id")) 

1637 

1638 for virtual_link in get_iterable( 

1639 vnf_profile.get("virtual-link-connectivity") 

1640 ): 

1641 for vl_cpd in get_iterable(virtual_link.get("constituent-cpd-id")): 

1642 vl_cpd_id = vl_cpd.get("constituent-cpd-id") 

1643 if vl_cpd_id and vl_cpd_id not in all_vnfd_ext_cpds: 

1644 raise EngineException( 

1645 "Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity" 

1646 "[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a " 

1647 "non existing ext-cpd:id inside vnfd '{}'".format( 

1648 df["id"], 

1649 vnf_profile["id"], 

1650 virtual_link["virtual-link-profile-id"], 

1651 vl_cpd_id, 

1652 vnfd["id"], 

1653 ), 

1654 http_code=HTTPStatus.UNPROCESSABLE_ENTITY, 

1655 ) 

1656 

1657 def check_conflict_on_edit(self, session, final_content, edit_content, _id): 

1658 final_content = super().check_conflict_on_edit( 

1659 session, final_content, edit_content, _id 

1660 ) 

1661 

1662 self._check_descriptor_dependencies(session, final_content) 

1663 

1664 return final_content 

1665 

1666 def check_conflict_on_del(self, session, _id, db_content): 

1667 """ 

1668 Check that there is not any NSR that uses this NSD. Only NSRs belonging to this project are considered. Note 

1669 that NSD can be public and be used by other projects. 

1670 :param session: contains "username", "admin", "force", "public", "project_id", "set_project" 

1671 :param _id: nsd internal id 

1672 :param db_content: The database content of the _id 

1673 :return: None or raises EngineException with the conflict 

1674 """ 

1675 if session["force"]: 

1676 return 

1677 descriptor = db_content 

1678 descriptor_id = descriptor.get("id") 

1679 if not descriptor_id: # empty nsd not uploaded 

1680 return 

1681 

1682 # check NSD used by NS 

1683 _filter = self._get_project_filter(session) 

1684 _filter["nsd-id"] = _id 

1685 if self.db.get_list("nsrs", _filter): 

1686 raise EngineException( 

1687 "There is at least one NS instance using this descriptor", 

1688 http_code=HTTPStatus.CONFLICT, 

1689 ) 

1690 

1691 # check NSD referenced by NST 

1692 del _filter["nsd-id"] 

1693 _filter["netslice-subnet.ANYINDEX.nsd-ref"] = descriptor_id 

1694 if self.db.get_list("nsts", _filter): 

1695 raise EngineException( 

1696 "There is at least one NetSlice Template referencing this descriptor", 

1697 http_code=HTTPStatus.CONFLICT, 

1698 ) 

1699 

1700 def delete_extra(self, session, _id, db_content, not_send_msg=None): 

1701 """ 

1702 Deletes associate file system storage (via super) 

1703 Deletes associated vnfpkgops from database. 

1704 :param session: contains "username", "admin", "force", "public", "project_id", "set_project" 

1705 :param _id: server internal id 

1706 :param db_content: The database content of the descriptor 

1707 :return: None 

1708 :raises: FsException in case of error while deleting associated storage 

1709 """ 

1710 super().delete_extra(session, _id, db_content, not_send_msg) 

1711 self.db.del_list(self.topic + "_revisions", {"_id": {"$regex": _id}}) 

1712 

1713 @staticmethod 

1714 def extract_day12_primitives(nsd: dict) -> dict: 

1715 """Removes the day12 primitives from the NSD descriptors 

1716 

1717 Args: 

1718 nsd (dict): Descriptor as a dictionary 

1719 

1720 Returns: 

1721 nsd (dict): Cleared NSD 

1722 """ 

1723 if nsd.get("ns-configuration"): 

1724 for key in [ 

1725 "config-primitive", 

1726 "initial-config-primitive", 

1727 "terminate-config-primitive", 

1728 ]: 

1729 nsd["ns-configuration"].pop(key, None) 

1730 return nsd 

1731 

1732 def remove_modifiable_items(self, nsd: dict) -> dict: 

1733 """Removes the modifiable parts from the VNFD descriptors 

1734 

1735 It calls different extract functions according to different update types 

1736 to clear all the modifiable items from NSD 

1737 

1738 Args: 

1739 nsd (dict): Descriptor as a dictionary 

1740 

1741 Returns: 

1742 nsd (dict): Descriptor which does not include modifiable contents 

1743 """ 

1744 while isinstance(nsd, dict) and nsd.get("nsd"): 

1745 nsd = nsd["nsd"] 

1746 if isinstance(nsd, list): 

1747 nsd = nsd[0] 

1748 nsd.pop("_admin", None) 

1749 # If the more extractions need to be done from NSD, 

1750 # the new extract methods could be appended to below list. 

1751 for extract_function in [self.extract_day12_primitives]: 

1752 nsd_temp = extract_function(nsd) 

1753 nsd = nsd_temp 

1754 return nsd 

1755 

1756 def _validate_descriptor_changes( 

1757 self, 

1758 descriptor_id: str, 

1759 descriptor_file_name: str, 

1760 old_descriptor_directory: str, 

1761 new_descriptor_directory: str, 

1762 ): 

1763 """Compares the old and new NSD descriptors and validates the new descriptor 

1764 

1765 Args: 

1766 old_descriptor_directory: Directory of descriptor which is in-use 

1767 new_descriptor_directory: Directory of descriptor which is proposed to update (new revision) 

1768 

1769 Returns: 

1770 None 

1771 

1772 Raises: 

1773 EngineException: In case of error if the changes are not allowed 

1774 """ 

1775 

1776 try: 

1777 # If NSD does not exist in DB, or it is not in use by any NS, 

1778 # validation is not required. 

1779 nsd = self.db.get_one("nsds", {"_id": descriptor_id}, fail_on_empty=False) 

1780 if not nsd or not detect_descriptor_usage(nsd, "nsds", self.db): 

1781 return 

1782 

1783 # Get the old and new descriptor contents in order to compare them. 

1784 with self.fs.file_open( 

1785 (old_descriptor_directory.rstrip("/"), descriptor_file_name), "r" 

1786 ) as old_descriptor_file: 

1787 with self.fs.file_open( 

1788 (new_descriptor_directory.rstrip("/"), descriptor_file_name), "r" 

1789 ) as new_descriptor_file: 

1790 old_content = yaml.safe_load(old_descriptor_file.read()) 

1791 new_content = yaml.safe_load(new_descriptor_file.read()) 

1792 

1793 if old_content and new_content: 

1794 disallowed_change = DeepDiff( 

1795 self.remove_modifiable_items(old_content), 

1796 self.remove_modifiable_items(new_content), 

1797 ) 

1798 

1799 if disallowed_change: 

1800 changed_nodes = functools.reduce( 

1801 lambda a, b: a + ", " + b, 

1802 [ 

1803 node.lstrip("root") 

1804 for node in disallowed_change.get( 

1805 "values_changed" 

1806 ).keys() 

1807 ], 

1808 ) 

1809 

1810 raise EngineException( 

1811 f"Error in validating new descriptor: {changed_nodes} cannot be modified, " 

1812 "there are disallowed changes in the ns descriptor. ", 

1813 http_code=HTTPStatus.UNPROCESSABLE_ENTITY, 

1814 ) 

1815 except ( 

1816 DbException, 

1817 AttributeError, 

1818 IndexError, 

1819 KeyError, 

1820 ValueError, 

1821 ) as e: 

1822 raise type(e)( 

1823 "NS Descriptor could not be processed with error: {}.".format(e) 

1824 ) 

1825 

1826 def sol005_projection(self, data): 

1827 data["nsdOnboardingState"] = data["_admin"]["onboardingState"] 

1828 data["nsdOperationalState"] = data["_admin"]["operationalState"] 

1829 data["nsdUsageState"] = data["_admin"]["usageState"] 

1830 

1831 links = {} 

1832 links["self"] = {"href": "/nsd/v1/ns_descriptors/{}".format(data["_id"])} 

1833 links["nsd_content"] = { 

1834 "href": "/nsd/v1/ns_descriptors/{}/nsd_content".format(data["_id"]) 

1835 } 

1836 data["_links"] = links 

1837 

1838 return super().sol005_projection(data) 

1839 

1840 

1841class NstTopic(DescriptorTopic): 

1842 topic = "nsts" 

1843 topic_msg = "nst" 

1844 quota_name = "slice_templates" 

1845 

1846 def __init__(self, db, fs, msg, auth): 

1847 DescriptorTopic.__init__(self, db, fs, msg, auth) 

1848 

1849 def pyangbind_validation(self, item, data, force=False): 

1850 try: 

1851 mynst = nst_im() 

1852 pybindJSONDecoder.load_ietf_json( 

1853 {"nst": [data]}, 

1854 None, 

1855 None, 

1856 obj=mynst, 

1857 path_helper=True, 

1858 skip_unknown=force, 

1859 ) 

1860 out = pybindJSON.dumps(mynst, mode="ietf") 

1861 desc_out = self._remove_envelop(yaml.safe_load(out)) 

1862 return desc_out 

1863 except Exception as e: 

1864 raise EngineException( 

1865 "Error in pyangbind validation: {}".format(str(e)), 

1866 http_code=HTTPStatus.UNPROCESSABLE_ENTITY, 

1867 ) 

1868 

1869 @staticmethod 

1870 def _remove_envelop(indata=None): 

1871 if not indata: 

1872 return {} 

1873 clean_indata = indata 

1874 

1875 if clean_indata.get("nst"): 

1876 if ( 

1877 not isinstance(clean_indata["nst"], list) 

1878 or len(clean_indata["nst"]) != 1 

1879 ): 

1880 raise EngineException("'nst' must be a list only one element") 

1881 clean_indata = clean_indata["nst"][0] 

1882 elif clean_indata.get("nst:nst"): 

1883 if ( 

1884 not isinstance(clean_indata["nst:nst"], list) 

1885 or len(clean_indata["nst:nst"]) != 1 

1886 ): 

1887 raise EngineException("'nst:nst' must be a list only one element") 

1888 clean_indata = clean_indata["nst:nst"][0] 

1889 return clean_indata 

1890 

1891 def _validate_input_new(self, indata, storage_params, force=False): 

1892 indata.pop("onboardingState", None) 

1893 indata.pop("operationalState", None) 

1894 indata.pop("usageState", None) 

1895 indata = self.pyangbind_validation("nsts", indata, force) 

1896 return indata.copy() 

1897 

1898 def _check_descriptor_dependencies(self, session, descriptor): 

1899 """ 

1900 Check that the dependent descriptors exist on a new descriptor or edition 

1901 :param session: contains "username", "admin", "force", "public", "project_id", "set_project" 

1902 :param descriptor: descriptor to be inserted or edit 

1903 :return: None or raises exception 

1904 """ 

1905 if not descriptor.get("netslice-subnet"): 

1906 return 

1907 for nsd in descriptor["netslice-subnet"]: 

1908 nsd_id = nsd["nsd-ref"] 

1909 filter_q = self._get_project_filter(session) 

1910 filter_q["id"] = nsd_id 

1911 if not self.db.get_list("nsds", filter_q): 

1912 raise EngineException( 

1913 "Descriptor error at 'netslice-subnet':'nsd-ref'='{}' references a non " 

1914 "existing nsd".format(nsd_id), 

1915 http_code=HTTPStatus.CONFLICT, 

1916 ) 

1917 

1918 def check_conflict_on_edit(self, session, final_content, edit_content, _id): 

1919 final_content = super().check_conflict_on_edit( 

1920 session, final_content, edit_content, _id 

1921 ) 

1922 

1923 self._check_descriptor_dependencies(session, final_content) 

1924 return final_content 

1925 

1926 def check_conflict_on_del(self, session, _id, db_content): 

1927 """ 

1928 Check that there is not any NSIR that uses this NST. Only NSIRs belonging to this project are considered. Note 

1929 that NST can be public and be used by other projects. 

1930 :param session: contains "username", "admin", "force", "public", "project_id", "set_project" 

1931 :param _id: nst internal id 

1932 :param db_content: The database content of the _id. 

1933 :return: None or raises EngineException with the conflict 

1934 """ 

1935 # TODO: Check this method 

1936 if session["force"]: 

1937 return 

1938 # Get Network Slice Template from Database 

1939 _filter = self._get_project_filter(session) 

1940 _filter["_admin.nst-id"] = _id 

1941 if self.db.get_list("nsis", _filter): 

1942 raise EngineException( 

1943 "there is at least one Netslice Instance using this descriptor", 

1944 http_code=HTTPStatus.CONFLICT, 

1945 ) 

1946 

1947 def sol005_projection(self, data): 

1948 data["onboardingState"] = data["_admin"]["onboardingState"] 

1949 data["operationalState"] = data["_admin"]["operationalState"] 

1950 data["usageState"] = data["_admin"]["usageState"] 

1951 

1952 links = {} 

1953 links["self"] = {"href": "/nst/v1/netslice_templates/{}".format(data["_id"])} 

1954 links["nst"] = {"href": "/nst/v1/netslice_templates/{}/nst".format(data["_id"])} 

1955 data["_links"] = links 

1956 

1957 return super().sol005_projection(data) 

1958 

1959 

1960class PduTopic(BaseTopic): 

1961 topic = "pdus" 

1962 topic_msg = "pdu" 

1963 quota_name = "pduds" 

1964 schema_new = pdu_new_schema 

1965 schema_edit = pdu_edit_schema 

1966 

1967 def __init__(self, db, fs, msg, auth): 

1968 BaseTopic.__init__(self, db, fs, msg, auth) 

1969 

1970 @staticmethod 

1971 def format_on_new(content, project_id=None, make_public=False): 

1972 BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public) 

1973 content["_admin"]["onboardingState"] = "CREATED" 

1974 content["_admin"]["operationalState"] = "ENABLED" 

1975 content["_admin"]["usageState"] = "NOT_IN_USE" 

1976 

1977 def check_conflict_on_del(self, session, _id, db_content): 

1978 """ 

1979 Check that there is not any vnfr that uses this PDU 

1980 :param session: contains "username", "admin", "force", "public", "project_id", "set_project" 

1981 :param _id: pdu internal id 

1982 :param db_content: The database content of the _id. 

1983 :return: None or raises EngineException with the conflict 

1984 """ 

1985 if session["force"]: 

1986 return 

1987 

1988 _filter = self._get_project_filter(session) 

1989 _filter["vdur.pdu-id"] = _id 

1990 if self.db.get_list("vnfrs", _filter): 

1991 raise EngineException( 

1992 "There is at least one VNF instance using this PDU", 

1993 http_code=HTTPStatus.CONFLICT, 

1994 ) 

1995 

1996 

1997class VnfPkgOpTopic(BaseTopic): 

1998 topic = "vnfpkgops" 

1999 topic_msg = "vnfd" 

2000 schema_new = vnfpkgop_new_schema 

2001 schema_edit = None 

2002 

2003 def __init__(self, db, fs, msg, auth): 

2004 BaseTopic.__init__(self, db, fs, msg, auth) 

2005 

2006 def edit(self, session, _id, indata=None, kwargs=None, content=None): 

2007 raise EngineException( 

2008 "Method 'edit' not allowed for topic '{}'".format(self.topic), 

2009 HTTPStatus.METHOD_NOT_ALLOWED, 

2010 ) 

2011 

2012 def delete(self, session, _id, dry_run=False): 

2013 raise EngineException( 

2014 "Method 'delete' not allowed for topic '{}'".format(self.topic), 

2015 HTTPStatus.METHOD_NOT_ALLOWED, 

2016 ) 

2017 

2018 def delete_list(self, session, filter_q=None): 

2019 raise EngineException( 

2020 "Method 'delete_list' not allowed for topic '{}'".format(self.topic), 

2021 HTTPStatus.METHOD_NOT_ALLOWED, 

2022 ) 

2023 

2024 def new(self, rollback, session, indata=None, kwargs=None, headers=None): 

2025 """ 

2026 Creates a new entry into database. 

2027 :param rollback: list to append created items at database in case a rollback may to be done 

2028 :param session: contains "username", "admin", "force", "public", "project_id", "set_project" 

2029 :param indata: data to be inserted 

2030 :param kwargs: used to override the indata descriptor 

2031 :param headers: http request headers 

2032 :return: _id, op_id: 

2033 _id: identity of the inserted data. 

2034 op_id: None 

2035 """ 

2036 self._update_input_with_kwargs(indata, kwargs) 

2037 validate_input(indata, self.schema_new) 

2038 vnfpkg_id = indata["vnfPkgId"] 

2039 filter_q = BaseTopic._get_project_filter(session) 

2040 filter_q["_id"] = vnfpkg_id 

2041 vnfd = self.db.get_one("vnfds", filter_q) 

2042 operation = indata["lcmOperationType"] 

2043 kdu_name = indata["kdu_name"] 

2044 for kdu in vnfd.get("kdu", []): 

2045 if kdu["name"] == kdu_name: 

2046 helm_chart = kdu.get("helm-chart") 

2047 juju_bundle = kdu.get("juju-bundle") 

2048 break 

2049 else: 

2050 raise EngineException( 

2051 "Not found vnfd[id='{}']:kdu[name='{}']".format(vnfpkg_id, kdu_name) 

2052 ) 

2053 if helm_chart: 

2054 indata["helm-chart"] = helm_chart 

2055 match = fullmatch(r"([^/]*)/([^/]*)", helm_chart) 

2056 repo_name = match.group(1) if match else None 

2057 elif juju_bundle: 

2058 indata["juju-bundle"] = juju_bundle 

2059 match = fullmatch(r"([^/]*)/([^/]*)", juju_bundle) 

2060 repo_name = match.group(1) if match else None 

2061 else: 

2062 raise EngineException( 

2063 "Found neither 'helm-chart' nor 'juju-bundle' in vnfd[id='{}']:kdu[name='{}']".format( 

2064 vnfpkg_id, kdu_name 

2065 ) 

2066 ) 

2067 if repo_name: 

2068 del filter_q["_id"] 

2069 filter_q["name"] = repo_name 

2070 repo = self.db.get_one("k8srepos", filter_q) 

2071 k8srepo_id = repo.get("_id") 

2072 k8srepo_url = repo.get("url") 

2073 else: 

2074 k8srepo_id = None 

2075 k8srepo_url = None 

2076 indata["k8srepoId"] = k8srepo_id 

2077 indata["k8srepo_url"] = k8srepo_url 

2078 vnfpkgop_id = str(uuid4()) 

2079 vnfpkgop_desc = { 

2080 "_id": vnfpkgop_id, 

2081 "operationState": "PROCESSING", 

2082 "vnfPkgId": vnfpkg_id, 

2083 "lcmOperationType": operation, 

2084 "isAutomaticInvocation": False, 

2085 "isCancelPending": False, 

2086 "operationParams": indata, 

2087 "links": { 

2088 "self": "/osm/vnfpkgm/v1/vnfpkg_op_occs/" + vnfpkgop_id, 

2089 "vnfpkg": "/osm/vnfpkgm/v1/vnf_packages/" + vnfpkg_id, 

2090 }, 

2091 } 

2092 self.format_on_new( 

2093 vnfpkgop_desc, session["project_id"], make_public=session["public"] 

2094 ) 

2095 ctime = vnfpkgop_desc["_admin"]["created"] 

2096 vnfpkgop_desc["statusEnteredTime"] = ctime 

2097 vnfpkgop_desc["startTime"] = ctime 

2098 self.db.create(self.topic, vnfpkgop_desc) 

2099 rollback.append({"topic": self.topic, "_id": vnfpkgop_id}) 

2100 self.msg.write(self.topic_msg, operation, vnfpkgop_desc) 

2101 return vnfpkgop_id, None