1 |
|
# -*- coding: utf-8 -*- |
2 |
|
|
3 |
|
# Licensed under the Apache License, Version 2.0 (the "License"); |
4 |
|
# you may not use this file except in compliance with the License. |
5 |
|
# You may obtain a copy of the License at |
6 |
|
# |
7 |
|
# http://www.apache.org/licenses/LICENSE-2.0 |
8 |
|
# |
9 |
|
# Unless required by applicable law or agreed to in writing, software |
10 |
|
# distributed under the License is distributed on an "AS IS" BASIS, |
11 |
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or |
12 |
|
# implied. |
13 |
|
# See the License for the specific language governing permissions and |
14 |
|
# limitations under the License. |
15 |
|
|
16 |
1 |
import tarfile |
17 |
1 |
import yaml |
18 |
1 |
import json |
19 |
1 |
import copy |
20 |
1 |
import os |
21 |
1 |
import shutil |
22 |
1 |
import functools |
23 |
|
|
24 |
|
# import logging |
25 |
1 |
from deepdiff import DeepDiff |
26 |
1 |
from hashlib import md5 |
27 |
1 |
from osm_common.dbbase import DbException, deep_update_rfc7396 |
28 |
1 |
from http import HTTPStatus |
29 |
1 |
from time import time |
30 |
1 |
from uuid import uuid4 |
31 |
1 |
from re import fullmatch |
32 |
1 |
from zipfile import ZipFile |
33 |
1 |
from osm_nbi.validation import ( |
34 |
|
ValidationError, |
35 |
|
pdu_new_schema, |
36 |
|
pdu_edit_schema, |
37 |
|
validate_input, |
38 |
|
vnfpkgop_new_schema, |
39 |
|
) |
40 |
1 |
from osm_nbi.base_topic import ( |
41 |
|
BaseTopic, |
42 |
|
EngineException, |
43 |
|
get_iterable, |
44 |
|
detect_descriptor_usage, |
45 |
|
) |
46 |
1 |
from osm_im import etsi_nfv_vnfd, etsi_nfv_nsd |
47 |
1 |
from osm_im.nst import nst as nst_im |
48 |
1 |
from pyangbind.lib.serialise import pybindJSONDecoder |
49 |
1 |
import pyangbind.lib.pybindJSON as pybindJSON |
50 |
1 |
from osm_nbi import utils |
51 |
|
|
52 |
1 |
__author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>" |
53 |
|
|
54 |
|
|
55 |
1 |
class DescriptorTopic(BaseTopic): |
56 |
1 |
def __init__(self, db, fs, msg, auth): |
57 |
|
|
58 |
1 |
BaseTopic.__init__(self, db, fs, msg, auth) |
59 |
|
|
60 |
1 |
def check_conflict_on_edit(self, session, final_content, edit_content, _id): |
61 |
1 |
final_content = super().check_conflict_on_edit( |
62 |
|
session, final_content, edit_content, _id |
63 |
|
) |
64 |
|
|
65 |
1 |
def _check_unique_id_name(descriptor, position=""): |
66 |
1 |
for desc_key, desc_item in descriptor.items(): |
67 |
1 |
if isinstance(desc_item, list) and desc_item: |
68 |
1 |
used_ids = [] |
69 |
1 |
desc_item_id = None |
70 |
1 |
for index, list_item in enumerate(desc_item): |
71 |
1 |
if isinstance(list_item, dict): |
72 |
1 |
_check_unique_id_name( |
73 |
|
list_item, "{}.{}[{}]".format(position, desc_key, index) |
74 |
|
) |
75 |
|
# Base case |
76 |
1 |
if index == 0 and ( |
77 |
|
list_item.get("id") or list_item.get("name") |
78 |
|
): |
79 |
1 |
desc_item_id = "id" if list_item.get("id") else "name" |
80 |
1 |
if desc_item_id and list_item.get(desc_item_id): |
81 |
1 |
if list_item[desc_item_id] in used_ids: |
82 |
1 |
position = "{}.{}[{}]".format( |
83 |
|
position, desc_key, index |
84 |
|
) |
85 |
1 |
raise EngineException( |
86 |
|
"Error: identifier {} '{}' is not unique and repeats at '{}'".format( |
87 |
|
desc_item_id, |
88 |
|
list_item[desc_item_id], |
89 |
|
position, |
90 |
|
), |
91 |
|
HTTPStatus.UNPROCESSABLE_ENTITY, |
92 |
|
) |
93 |
1 |
used_ids.append(list_item[desc_item_id]) |
94 |
|
|
95 |
1 |
_check_unique_id_name(final_content) |
96 |
|
# 1. validate again with pyangbind |
97 |
|
# 1.1. remove internal keys |
98 |
1 |
internal_keys = {} |
99 |
1 |
for k in ("_id", "_admin"): |
100 |
1 |
if k in final_content: |
101 |
1 |
internal_keys[k] = final_content.pop(k) |
102 |
1 |
storage_params = internal_keys["_admin"].get("storage") |
103 |
1 |
serialized = self._validate_input_new( |
104 |
|
final_content, storage_params, session["force"] |
105 |
|
) |
106 |
|
|
107 |
|
# 1.2. modify final_content with a serialized version |
108 |
1 |
final_content = copy.deepcopy(serialized) |
109 |
|
# 1.3. restore internal keys |
110 |
1 |
for k, v in internal_keys.items(): |
111 |
1 |
final_content[k] = v |
112 |
1 |
if session["force"]: |
113 |
1 |
return final_content |
114 |
|
|
115 |
|
# 2. check that this id is not present |
116 |
1 |
if "id" in edit_content: |
117 |
1 |
_filter = self._get_project_filter(session) |
118 |
|
|
119 |
1 |
_filter["id"] = final_content["id"] |
120 |
1 |
_filter["_id.neq"] = _id |
121 |
|
|
122 |
1 |
if self.db.get_one(self.topic, _filter, fail_on_empty=False): |
123 |
1 |
raise EngineException( |
124 |
|
"{} with id '{}' already exists for this project".format( |
125 |
|
self.topic[:-1], final_content["id"] |
126 |
|
), |
127 |
|
HTTPStatus.CONFLICT, |
128 |
|
) |
129 |
|
|
130 |
1 |
return final_content |
131 |
|
|
132 |
1 |
@staticmethod |
133 |
1 |
def format_on_new(content, project_id=None, make_public=False): |
134 |
1 |
BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public) |
135 |
1 |
content["_admin"]["onboardingState"] = "CREATED" |
136 |
1 |
content["_admin"]["operationalState"] = "DISABLED" |
137 |
1 |
content["_admin"]["usageState"] = "NOT_IN_USE" |
138 |
|
|
139 |
1 |
def delete_extra(self, session, _id, db_content, not_send_msg=None): |
140 |
|
""" |
141 |
|
Deletes file system storage associated with the descriptor |
142 |
|
:param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
143 |
|
:param _id: server internal id |
144 |
|
:param db_content: The database content of the descriptor |
145 |
|
:param not_send_msg: To not send message (False) or store content (list) instead |
146 |
|
:return: None if ok or raises EngineException with the problem |
147 |
|
""" |
148 |
1 |
self.fs.file_delete(_id, ignore_non_exist=True) |
149 |
1 |
self.fs.file_delete(_id + "_", ignore_non_exist=True) # remove temp folder |
150 |
|
# Remove file revisions |
151 |
1 |
if "revision" in db_content["_admin"]: |
152 |
0 |
revision = db_content["_admin"]["revision"] |
153 |
0 |
while revision > 0: |
154 |
0 |
self.fs.file_delete(_id + ":" + str(revision), ignore_non_exist=True) |
155 |
0 |
revision = revision - 1 |
156 |
|
|
157 |
|
|
158 |
1 |
@staticmethod |
159 |
1 |
def get_one_by_id(db, session, topic, id): |
160 |
|
# find owned by this project |
161 |
0 |
_filter = BaseTopic._get_project_filter(session) |
162 |
0 |
_filter["id"] = id |
163 |
0 |
desc_list = db.get_list(topic, _filter) |
164 |
0 |
if len(desc_list) == 1: |
165 |
0 |
return desc_list[0] |
166 |
0 |
elif len(desc_list) > 1: |
167 |
0 |
raise DbException( |
168 |
|
"Found more than one {} with id='{}' belonging to this project".format( |
169 |
|
topic[:-1], id |
170 |
|
), |
171 |
|
HTTPStatus.CONFLICT, |
172 |
|
) |
173 |
|
|
174 |
|
# not found any: try to find public |
175 |
0 |
_filter = BaseTopic._get_project_filter(session) |
176 |
0 |
_filter["id"] = id |
177 |
0 |
desc_list = db.get_list(topic, _filter) |
178 |
0 |
if not desc_list: |
179 |
0 |
raise DbException( |
180 |
|
"Not found any {} with id='{}'".format(topic[:-1], id), |
181 |
|
HTTPStatus.NOT_FOUND, |
182 |
|
) |
183 |
0 |
elif len(desc_list) == 1: |
184 |
0 |
return desc_list[0] |
185 |
|
else: |
186 |
0 |
raise DbException( |
187 |
|
"Found more than one public {} with id='{}'; and no one belonging to this project".format( |
188 |
|
topic[:-1], id |
189 |
|
), |
190 |
|
HTTPStatus.CONFLICT, |
191 |
|
) |
192 |
|
|
193 |
1 |
def new(self, rollback, session, indata=None, kwargs=None, headers=None): |
194 |
|
""" |
195 |
|
Creates a new almost empty DISABLED entry into database. Due to SOL005, it does not follow normal procedure. |
196 |
|
Creating a VNFD or NSD is done in two steps: 1. Creates an empty descriptor (this step) and 2) upload content |
197 |
|
(self.upload_content) |
198 |
|
:param rollback: list to append created items at database in case a rollback may to be done |
199 |
|
:param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
200 |
|
:param indata: data to be inserted |
201 |
|
:param kwargs: used to override the indata descriptor |
202 |
|
:param headers: http request headers |
203 |
|
:return: _id, None: identity of the inserted data; and None as there is not any operation |
204 |
|
""" |
205 |
|
|
206 |
|
# No needed to capture exceptions |
207 |
|
# Check Quota |
208 |
1 |
self.check_quota(session) |
209 |
|
|
210 |
|
# _remove_envelop |
211 |
1 |
if indata: |
212 |
0 |
if "userDefinedData" in indata: |
213 |
0 |
indata = indata["userDefinedData"] |
214 |
|
|
215 |
|
# Override descriptor with query string kwargs |
216 |
1 |
self._update_input_with_kwargs(indata, kwargs) |
217 |
|
# uncomment when this method is implemented. |
218 |
|
# Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors |
219 |
|
# indata = DescriptorTopic._validate_input_new(self, indata, project_id=session["force"]) |
220 |
|
|
221 |
1 |
content = {"_admin": { |
222 |
|
"userDefinedData": indata, |
223 |
|
"revision": 0 |
224 |
|
}} |
225 |
|
|
226 |
1 |
self.format_on_new( |
227 |
|
content, session["project_id"], make_public=session["public"] |
228 |
|
) |
229 |
1 |
_id = self.db.create(self.topic, content) |
230 |
1 |
rollback.append({"topic": self.topic, "_id": _id}) |
231 |
1 |
self._send_msg("created", {"_id": _id}) |
232 |
1 |
return _id, None |
233 |
|
|
234 |
1 |
def upload_content(self, session, _id, indata, kwargs, headers): |
235 |
|
""" |
236 |
|
Used for receiving content by chunks (with a transaction_id header and/or gzip file. It will store and extract) |
237 |
|
:param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
238 |
|
:param _id : the nsd,vnfd is already created, this is the id |
239 |
|
:param indata: http body request |
240 |
|
:param kwargs: user query string to override parameters. NOT USED |
241 |
|
:param headers: http request headers |
242 |
|
:return: True if package is completely uploaded or False if partial content has been uploded |
243 |
|
Raise exception on error |
244 |
|
""" |
245 |
|
# Check that _id exists and it is valid |
246 |
1 |
current_desc = self.show(session, _id) |
247 |
|
|
248 |
1 |
content_range_text = headers.get("Content-Range") |
249 |
1 |
expected_md5 = headers.get("Content-File-MD5") |
250 |
1 |
compressed = None |
251 |
1 |
content_type = headers.get("Content-Type") |
252 |
1 |
if ( |
253 |
|
content_type |
254 |
|
and "application/gzip" in content_type |
255 |
|
or "application/x-gzip" in content_type |
256 |
|
): |
257 |
0 |
compressed = "gzip" |
258 |
1 |
if ( |
259 |
|
content_type |
260 |
|
and "application/zip" in content_type |
261 |
|
): |
262 |
0 |
compressed = "zip" |
263 |
1 |
filename = headers.get("Content-Filename") |
264 |
1 |
if not filename and compressed: |
265 |
0 |
filename = "package.tar.gz" if compressed == "gzip" else "package.zip" |
266 |
1 |
elif not filename: |
267 |
1 |
filename = "package" |
268 |
|
|
269 |
1 |
revision = 1 |
270 |
1 |
if "revision" in current_desc["_admin"]: |
271 |
1 |
revision = current_desc["_admin"]["revision"] + 1 |
272 |
|
|
273 |
|
# TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266 |
274 |
1 |
file_pkg = None |
275 |
1 |
error_text = "" |
276 |
1 |
fs_rollback = [] |
277 |
|
|
278 |
1 |
try: |
279 |
1 |
if content_range_text: |
280 |
0 |
content_range = ( |
281 |
|
content_range_text.replace("-", " ").replace("/", " ").split() |
282 |
|
) |
283 |
0 |
if ( |
284 |
|
content_range[0] != "bytes" |
285 |
|
): # TODO check x<y not negative < total.... |
286 |
0 |
raise IndexError() |
287 |
0 |
start = int(content_range[1]) |
288 |
0 |
end = int(content_range[2]) + 1 |
289 |
0 |
total = int(content_range[3]) |
290 |
|
else: |
291 |
1 |
start = 0 |
292 |
|
# Rather than using a temp folder, we will store the package in a folder based on |
293 |
|
# the current revision. |
294 |
1 |
proposed_revision_path = ( |
295 |
|
_id + ":" + str(revision) |
296 |
|
) # all the content is upload here and if ok, it is rename from id_ to is folder |
297 |
|
|
298 |
1 |
if start: |
299 |
0 |
if not self.fs.file_exists(proposed_revision_path, "dir"): |
300 |
0 |
raise EngineException( |
301 |
|
"invalid Transaction-Id header", HTTPStatus.NOT_FOUND |
302 |
|
) |
303 |
|
else: |
304 |
1 |
self.fs.file_delete(proposed_revision_path, ignore_non_exist=True) |
305 |
1 |
self.fs.mkdir(proposed_revision_path) |
306 |
1 |
fs_rollback.append(proposed_revision_path) |
307 |
|
|
308 |
1 |
storage = self.fs.get_params() |
309 |
1 |
storage["folder"] = proposed_revision_path |
310 |
|
|
311 |
1 |
file_path = (proposed_revision_path, filename) |
312 |
1 |
if self.fs.file_exists(file_path, "file"): |
313 |
0 |
file_size = self.fs.file_size(file_path) |
314 |
|
else: |
315 |
1 |
file_size = 0 |
316 |
1 |
if file_size != start: |
317 |
0 |
raise EngineException( |
318 |
|
"invalid Content-Range start sequence, expected '{}' but received '{}'".format( |
319 |
|
file_size, start |
320 |
|
), |
321 |
|
HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE, |
322 |
|
) |
323 |
1 |
file_pkg = self.fs.file_open(file_path, "a+b") |
324 |
1 |
if isinstance(indata, dict): |
325 |
1 |
indata_text = yaml.safe_dump(indata, indent=4, default_flow_style=False) |
326 |
1 |
file_pkg.write(indata_text.encode(encoding="utf-8")) |
327 |
|
else: |
328 |
0 |
indata_len = 0 |
329 |
|
while True: |
330 |
0 |
indata_text = indata.read(4096) |
331 |
0 |
indata_len += len(indata_text) |
332 |
0 |
if not indata_text: |
333 |
0 |
break |
334 |
0 |
file_pkg.write(indata_text) |
335 |
1 |
if content_range_text: |
336 |
0 |
if indata_len != end - start: |
337 |
0 |
raise EngineException( |
338 |
|
"Mismatch between Content-Range header {}-{} and body length of {}".format( |
339 |
|
start, end - 1, indata_len |
340 |
|
), |
341 |
|
HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE, |
342 |
|
) |
343 |
0 |
if end != total: |
344 |
|
# TODO update to UPLOADING |
345 |
0 |
return False |
346 |
|
|
347 |
|
# PACKAGE UPLOADED |
348 |
1 |
if expected_md5: |
349 |
0 |
file_pkg.seek(0, 0) |
350 |
0 |
file_md5 = md5() |
351 |
0 |
chunk_data = file_pkg.read(1024) |
352 |
0 |
while chunk_data: |
353 |
0 |
file_md5.update(chunk_data) |
354 |
0 |
chunk_data = file_pkg.read(1024) |
355 |
0 |
if expected_md5 != file_md5.hexdigest(): |
356 |
0 |
raise EngineException("Error, MD5 mismatch", HTTPStatus.CONFLICT) |
357 |
1 |
file_pkg.seek(0, 0) |
358 |
1 |
if compressed == "gzip": |
359 |
0 |
tar = tarfile.open(mode="r", fileobj=file_pkg) |
360 |
0 |
descriptor_file_name = None |
361 |
0 |
for tarinfo in tar: |
362 |
0 |
tarname = tarinfo.name |
363 |
0 |
tarname_path = tarname.split("/") |
364 |
0 |
if ( |
365 |
|
not tarname_path[0] or ".." in tarname_path |
366 |
|
): # if start with "/" means absolute path |
367 |
0 |
raise EngineException( |
368 |
|
"Absolute path or '..' are not allowed for package descriptor tar.gz" |
369 |
|
) |
370 |
0 |
if len(tarname_path) == 1 and not tarinfo.isdir(): |
371 |
0 |
raise EngineException( |
372 |
|
"All files must be inside a dir for package descriptor tar.gz" |
373 |
|
) |
374 |
0 |
if ( |
375 |
|
tarname.endswith(".yaml") |
376 |
|
or tarname.endswith(".json") |
377 |
|
or tarname.endswith(".yml") |
378 |
|
): |
379 |
0 |
storage["pkg-dir"] = tarname_path[0] |
380 |
0 |
if len(tarname_path) == 2: |
381 |
0 |
if descriptor_file_name: |
382 |
0 |
raise EngineException( |
383 |
|
"Found more than one descriptor file at package descriptor tar.gz" |
384 |
|
) |
385 |
0 |
descriptor_file_name = tarname |
386 |
0 |
if not descriptor_file_name: |
387 |
0 |
raise EngineException( |
388 |
|
"Not found any descriptor file at package descriptor tar.gz" |
389 |
|
) |
390 |
0 |
storage["descriptor"] = descriptor_file_name |
391 |
0 |
storage["zipfile"] = filename |
392 |
0 |
self.fs.file_extract(tar, proposed_revision_path) |
393 |
0 |
with self.fs.file_open( |
394 |
|
(proposed_revision_path, descriptor_file_name), "r" |
395 |
|
) as descriptor_file: |
396 |
0 |
content = descriptor_file.read() |
397 |
1 |
elif compressed == "zip": |
398 |
0 |
zipfile = ZipFile(file_pkg) |
399 |
0 |
descriptor_file_name = None |
400 |
0 |
for package_file in zipfile.infolist(): |
401 |
0 |
zipfilename = package_file.filename |
402 |
0 |
file_path = zipfilename.split("/") |
403 |
0 |
if ( |
404 |
|
not file_path[0] or ".." in zipfilename |
405 |
|
): # if start with "/" means absolute path |
406 |
0 |
raise EngineException( |
407 |
|
"Absolute path or '..' are not allowed for package descriptor zip" |
408 |
|
) |
409 |
|
|
410 |
0 |
if ( |
411 |
|
( |
412 |
|
zipfilename.endswith(".yaml") |
413 |
|
or zipfilename.endswith(".json") |
414 |
|
or zipfilename.endswith(".yml") |
415 |
|
) and ( |
416 |
|
zipfilename.find("/") < 0 |
417 |
|
or zipfilename.find("Definitions") >= 0 |
418 |
|
) |
419 |
|
): |
420 |
0 |
storage["pkg-dir"] = "" |
421 |
0 |
if descriptor_file_name: |
422 |
0 |
raise EngineException( |
423 |
|
"Found more than one descriptor file at package descriptor zip" |
424 |
|
) |
425 |
0 |
descriptor_file_name = zipfilename |
426 |
0 |
if not descriptor_file_name: |
427 |
0 |
raise EngineException( |
428 |
|
"Not found any descriptor file at package descriptor zip" |
429 |
|
) |
430 |
0 |
storage["descriptor"] = descriptor_file_name |
431 |
0 |
storage["zipfile"] = filename |
432 |
0 |
self.fs.file_extract(zipfile, proposed_revision_path) |
433 |
|
|
434 |
0 |
with self.fs.file_open( |
435 |
|
(proposed_revision_path, descriptor_file_name), "r" |
436 |
|
) as descriptor_file: |
437 |
0 |
content = descriptor_file.read() |
438 |
|
else: |
439 |
1 |
content = file_pkg.read() |
440 |
1 |
storage["descriptor"] = descriptor_file_name = filename |
441 |
|
|
442 |
1 |
if descriptor_file_name.endswith(".json"): |
443 |
0 |
error_text = "Invalid json format " |
444 |
0 |
indata = json.load(content) |
445 |
|
else: |
446 |
1 |
error_text = "Invalid yaml format " |
447 |
1 |
indata = yaml.load(content, Loader=yaml.SafeLoader) |
448 |
|
|
449 |
|
# Need to close the file package here so it can be copied from the |
450 |
|
# revision to the current, unrevisioned record |
451 |
1 |
if file_pkg: |
452 |
1 |
file_pkg.close() |
453 |
1 |
file_pkg = None |
454 |
|
|
455 |
|
# Fetch both the incoming, proposed revision and the original revision so we |
456 |
|
# can call a validate method to compare them |
457 |
1 |
current_revision_path = _id + "/" |
458 |
1 |
self.fs.sync(from_path=current_revision_path) |
459 |
1 |
self.fs.sync(from_path=proposed_revision_path) |
460 |
|
|
461 |
1 |
if revision > 1: |
462 |
1 |
try: |
463 |
1 |
self._validate_descriptor_changes( |
464 |
|
_id, |
465 |
|
descriptor_file_name, |
466 |
|
current_revision_path, |
467 |
|
proposed_revision_path, |
468 |
|
) |
469 |
0 |
except Exception as e: |
470 |
0 |
shutil.rmtree(self.fs.path + current_revision_path, ignore_errors=True) |
471 |
0 |
shutil.rmtree(self.fs.path + proposed_revision_path, ignore_errors=True) |
472 |
|
# Only delete the new revision. We need to keep the original version in place |
473 |
|
# as it has not been changed. |
474 |
0 |
self.fs.file_delete(proposed_revision_path, ignore_non_exist=True) |
475 |
0 |
raise e |
476 |
|
|
477 |
|
|
478 |
1 |
indata = self._remove_envelop(indata) |
479 |
|
|
480 |
|
# Override descriptor with query string kwargs |
481 |
1 |
if kwargs: |
482 |
0 |
self._update_input_with_kwargs(indata, kwargs) |
483 |
|
|
484 |
1 |
current_desc["_admin"]["storage"] = storage |
485 |
1 |
current_desc["_admin"]["onboardingState"] = "ONBOARDED" |
486 |
1 |
current_desc["_admin"]["operationalState"] = "ENABLED" |
487 |
1 |
current_desc["_admin"]["modified"] = time() |
488 |
1 |
current_desc["_admin"]["revision"] = revision |
489 |
|
|
490 |
1 |
deep_update_rfc7396(current_desc, indata) |
491 |
1 |
current_desc = self.check_conflict_on_edit( |
492 |
|
session, current_desc, indata, _id=_id |
493 |
|
) |
494 |
|
|
495 |
|
# Copy the revision to the active package name by its original id |
496 |
1 |
shutil.rmtree(self.fs.path + current_revision_path, ignore_errors=True) |
497 |
1 |
os.rename(self.fs.path + proposed_revision_path, self.fs.path + current_revision_path) |
498 |
1 |
self.fs.file_delete(current_revision_path, ignore_non_exist=True) |
499 |
1 |
self.fs.mkdir(current_revision_path) |
500 |
1 |
self.fs.reverse_sync(from_path=current_revision_path) |
501 |
|
|
502 |
1 |
shutil.rmtree(self.fs.path + _id) |
503 |
|
|
504 |
1 |
self.db.replace(self.topic, _id, current_desc) |
505 |
|
|
506 |
|
# Store a copy of the package as a point in time revision |
507 |
1 |
revision_desc = dict(current_desc) |
508 |
1 |
revision_desc["_id"] = _id + ":" + str(revision_desc["_admin"]["revision"]) |
509 |
1 |
self.db.create(self.topic + "_revisions", revision_desc) |
510 |
1 |
fs_rollback = [] |
511 |
|
|
512 |
1 |
indata["_id"] = _id |
513 |
1 |
self._send_msg("edited", indata) |
514 |
|
|
515 |
|
# TODO if descriptor has changed because kwargs update content and remove cached zip |
516 |
|
# TODO if zip is not present creates one |
517 |
1 |
return True |
518 |
|
|
519 |
1 |
except EngineException: |
520 |
1 |
raise |
521 |
0 |
except IndexError: |
522 |
0 |
raise EngineException( |
523 |
|
"invalid Content-Range header format. Expected 'bytes start-end/total'", |
524 |
|
HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE, |
525 |
|
) |
526 |
0 |
except IOError as e: |
527 |
0 |
raise EngineException( |
528 |
|
"invalid upload transaction sequence: '{}'".format(e), |
529 |
|
HTTPStatus.BAD_REQUEST, |
530 |
|
) |
531 |
0 |
except tarfile.ReadError as e: |
532 |
0 |
raise EngineException( |
533 |
|
"invalid file content {}".format(e), HTTPStatus.BAD_REQUEST |
534 |
|
) |
535 |
0 |
except (ValueError, yaml.YAMLError) as e: |
536 |
0 |
raise EngineException(error_text + str(e)) |
537 |
0 |
except ValidationError as e: |
538 |
0 |
raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY) |
539 |
|
finally: |
540 |
1 |
if file_pkg: |
541 |
0 |
file_pkg.close() |
542 |
1 |
for file in fs_rollback: |
543 |
1 |
self.fs.file_delete(file, ignore_non_exist=True) |
544 |
|
|
545 |
1 |
def get_file(self, session, _id, path=None, accept_header=None): |
546 |
|
""" |
547 |
|
Return the file content of a vnfd or nsd |
548 |
|
:param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
549 |
|
:param _id: Identity of the vnfd, nsd |
550 |
|
:param path: artifact path or "$DESCRIPTOR" or None |
551 |
|
:param accept_header: Content of Accept header. Must contain applition/zip or/and text/plain |
552 |
|
:return: opened file plus Accept format or raises an exception |
553 |
|
""" |
554 |
0 |
accept_text = accept_zip = False |
555 |
0 |
if accept_header: |
556 |
0 |
if "text/plain" in accept_header or "*/*" in accept_header: |
557 |
0 |
accept_text = True |
558 |
0 |
if "application/zip" in accept_header or "*/*" in accept_header: |
559 |
0 |
accept_zip = "application/zip" |
560 |
0 |
elif "application/gzip" in accept_header: |
561 |
0 |
accept_zip = "application/gzip" |
562 |
|
|
563 |
0 |
if not accept_text and not accept_zip: |
564 |
0 |
raise EngineException( |
565 |
|
"provide request header 'Accept' with 'application/zip' or 'text/plain'", |
566 |
|
http_code=HTTPStatus.NOT_ACCEPTABLE, |
567 |
|
) |
568 |
|
|
569 |
0 |
content = self.show(session, _id) |
570 |
0 |
if content["_admin"]["onboardingState"] != "ONBOARDED": |
571 |
0 |
raise EngineException( |
572 |
|
"Cannot get content because this resource is not at 'ONBOARDED' state. " |
573 |
|
"onboardingState is {}".format(content["_admin"]["onboardingState"]), |
574 |
|
http_code=HTTPStatus.CONFLICT, |
575 |
|
) |
576 |
0 |
storage = content["_admin"]["storage"] |
577 |
0 |
if path is not None and path != "$DESCRIPTOR": # artifacts |
578 |
0 |
if not storage.get("pkg-dir") and not storage.get("folder"): |
579 |
0 |
raise EngineException( |
580 |
|
"Packages does not contains artifacts", |
581 |
|
http_code=HTTPStatus.BAD_REQUEST, |
582 |
|
) |
583 |
0 |
if self.fs.file_exists( |
584 |
|
(storage["folder"], storage["pkg-dir"], *path), "dir" |
585 |
|
): |
586 |
0 |
folder_content = self.fs.dir_ls( |
587 |
|
(storage["folder"], storage["pkg-dir"], *path) |
588 |
|
) |
589 |
0 |
return folder_content, "text/plain" |
590 |
|
# TODO manage folders in http |
591 |
|
else: |
592 |
0 |
return ( |
593 |
|
self.fs.file_open( |
594 |
|
(storage["folder"], storage["pkg-dir"], *path), "rb" |
595 |
|
), |
596 |
|
"application/octet-stream", |
597 |
|
) |
598 |
|
|
599 |
|
# pkgtype accept ZIP TEXT -> result |
600 |
|
# manyfiles yes X -> zip |
601 |
|
# no yes -> error |
602 |
|
# onefile yes no -> zip |
603 |
|
# X yes -> text |
604 |
0 |
contain_many_files = False |
605 |
0 |
if storage.get("pkg-dir"): |
606 |
|
# check if there are more than one file in the package, ignoring checksums.txt. |
607 |
0 |
pkg_files = self.fs.dir_ls((storage["folder"], storage["pkg-dir"])) |
608 |
0 |
if len(pkg_files) >= 3 or ( |
609 |
|
len(pkg_files) == 2 and "checksums.txt" not in pkg_files |
610 |
|
): |
611 |
0 |
contain_many_files = True |
612 |
0 |
if accept_text and (not contain_many_files or path == "$DESCRIPTOR"): |
613 |
0 |
return ( |
614 |
|
self.fs.file_open((storage["folder"], storage["descriptor"]), "r"), |
615 |
|
"text/plain", |
616 |
|
) |
617 |
0 |
elif contain_many_files and not accept_zip: |
618 |
0 |
raise EngineException( |
619 |
|
"Packages that contains several files need to be retrieved with 'application/zip'" |
620 |
|
"Accept header", |
621 |
|
http_code=HTTPStatus.NOT_ACCEPTABLE, |
622 |
|
) |
623 |
|
else: |
624 |
0 |
if not storage.get("zipfile"): |
625 |
|
# TODO generate zipfile if not present |
626 |
0 |
raise EngineException( |
627 |
|
"Only allowed 'text/plain' Accept header for this descriptor. To be solved in " |
628 |
|
"future versions", |
629 |
|
http_code=HTTPStatus.NOT_ACCEPTABLE, |
630 |
|
) |
631 |
0 |
return ( |
632 |
|
self.fs.file_open((storage["folder"], storage["zipfile"]), "rb"), |
633 |
|
accept_zip, |
634 |
|
) |
635 |
|
|
636 |
1 |
def _remove_yang_prefixes_from_descriptor(self, descriptor): |
637 |
1 |
new_descriptor = {} |
638 |
1 |
for k, v in descriptor.items(): |
639 |
1 |
new_v = v |
640 |
1 |
if isinstance(v, dict): |
641 |
1 |
new_v = self._remove_yang_prefixes_from_descriptor(v) |
642 |
1 |
elif isinstance(v, list): |
643 |
1 |
new_v = list() |
644 |
1 |
for x in v: |
645 |
1 |
if isinstance(x, dict): |
646 |
1 |
new_v.append(self._remove_yang_prefixes_from_descriptor(x)) |
647 |
|
else: |
648 |
1 |
new_v.append(x) |
649 |
1 |
new_descriptor[k.split(":")[-1]] = new_v |
650 |
1 |
return new_descriptor |
651 |
|
|
652 |
1 |
def pyangbind_validation(self, item, data, force=False): |
653 |
0 |
raise EngineException( |
654 |
|
"Not possible to validate '{}' item".format(item), |
655 |
|
http_code=HTTPStatus.INTERNAL_SERVER_ERROR, |
656 |
|
) |
657 |
|
|
658 |
1 |
def _validate_input_edit(self, indata, content, force=False): |
659 |
|
# not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit |
660 |
1 |
if "_id" in indata: |
661 |
0 |
indata.pop("_id") |
662 |
1 |
if "_admin" not in indata: |
663 |
1 |
indata["_admin"] = {} |
664 |
|
|
665 |
1 |
if "operationalState" in indata: |
666 |
0 |
if indata["operationalState"] in ("ENABLED", "DISABLED"): |
667 |
0 |
indata["_admin"]["operationalState"] = indata.pop("operationalState") |
668 |
|
else: |
669 |
0 |
raise EngineException( |
670 |
|
"State '{}' is not a valid operational state".format( |
671 |
|
indata["operationalState"] |
672 |
|
), |
673 |
|
http_code=HTTPStatus.BAD_REQUEST, |
674 |
|
) |
675 |
|
|
676 |
|
# In the case of user defined data, we need to put the data in the root of the object |
677 |
|
# to preserve current expected behaviour |
678 |
1 |
if "userDefinedData" in indata: |
679 |
0 |
data = indata.pop("userDefinedData") |
680 |
0 |
if type(data) == dict: |
681 |
0 |
indata["_admin"]["userDefinedData"] = data |
682 |
|
else: |
683 |
0 |
raise EngineException( |
684 |
|
"userDefinedData should be an object, but is '{}' instead".format( |
685 |
|
type(data) |
686 |
|
), |
687 |
|
http_code=HTTPStatus.BAD_REQUEST, |
688 |
|
) |
689 |
|
|
690 |
1 |
if ( |
691 |
|
"operationalState" in indata["_admin"] |
692 |
|
and content["_admin"]["operationalState"] |
693 |
|
== indata["_admin"]["operationalState"] |
694 |
|
): |
695 |
0 |
raise EngineException( |
696 |
|
"operationalState already {}".format( |
697 |
|
content["_admin"]["operationalState"] |
698 |
|
), |
699 |
|
http_code=HTTPStatus.CONFLICT, |
700 |
|
) |
701 |
|
|
702 |
1 |
return indata |
703 |
|
|
704 |
1 |
def _validate_descriptor_changes( |
705 |
|
self, |
706 |
|
descriptor_id, |
707 |
|
descriptor_file_name, |
708 |
|
old_descriptor_directory, |
709 |
|
new_descriptor_directory |
710 |
|
): |
711 |
|
# Example: |
712 |
|
# raise EngineException( |
713 |
|
# "Error in validating new descriptor: <NODE> cannot be modified", |
714 |
|
# http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
715 |
|
# ) |
716 |
0 |
pass |
717 |
|
|
718 |
1 |
class VnfdTopic(DescriptorTopic): |
719 |
1 |
topic = "vnfds" |
720 |
1 |
topic_msg = "vnfd" |
721 |
|
|
722 |
1 |
def __init__(self, db, fs, msg, auth): |
723 |
1 |
DescriptorTopic.__init__(self, db, fs, msg, auth) |
724 |
|
|
725 |
1 |
def pyangbind_validation(self, item, data, force=False): |
726 |
1 |
if self._descriptor_data_is_in_old_format(data): |
727 |
0 |
raise EngineException( |
728 |
|
"ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.", |
729 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
730 |
|
) |
731 |
1 |
try: |
732 |
1 |
myvnfd = etsi_nfv_vnfd.etsi_nfv_vnfd() |
733 |
1 |
pybindJSONDecoder.load_ietf_json( |
734 |
|
{"etsi-nfv-vnfd:vnfd": data}, |
735 |
|
None, |
736 |
|
None, |
737 |
|
obj=myvnfd, |
738 |
|
path_helper=True, |
739 |
|
skip_unknown=force, |
740 |
|
) |
741 |
1 |
out = pybindJSON.dumps(myvnfd, mode="ietf") |
742 |
1 |
desc_out = self._remove_envelop(yaml.safe_load(out)) |
743 |
1 |
desc_out = self._remove_yang_prefixes_from_descriptor(desc_out) |
744 |
1 |
return utils.deep_update_dict(data, desc_out) |
745 |
1 |
except Exception as e: |
746 |
1 |
raise EngineException( |
747 |
|
"Error in pyangbind validation: {}".format(str(e)), |
748 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
749 |
|
) |
750 |
|
|
751 |
1 |
@staticmethod |
752 |
1 |
def _descriptor_data_is_in_old_format(data): |
753 |
1 |
return ("vnfd-catalog" in data) or ("vnfd:vnfd-catalog" in data) |
754 |
|
|
755 |
1 |
@staticmethod |
756 |
1 |
def _remove_envelop(indata=None): |
757 |
1 |
if not indata: |
758 |
0 |
return {} |
759 |
1 |
clean_indata = indata |
760 |
|
|
761 |
1 |
if clean_indata.get("etsi-nfv-vnfd:vnfd"): |
762 |
1 |
if not isinstance(clean_indata["etsi-nfv-vnfd:vnfd"], dict): |
763 |
0 |
raise EngineException("'etsi-nfv-vnfd:vnfd' must be a dict") |
764 |
1 |
clean_indata = clean_indata["etsi-nfv-vnfd:vnfd"] |
765 |
1 |
elif clean_indata.get("vnfd"): |
766 |
1 |
if not isinstance(clean_indata["vnfd"], dict): |
767 |
1 |
raise EngineException("'vnfd' must be dict") |
768 |
0 |
clean_indata = clean_indata["vnfd"] |
769 |
|
|
770 |
1 |
return clean_indata |
771 |
|
|
772 |
1 |
def check_conflict_on_edit(self, session, final_content, edit_content, _id): |
773 |
1 |
final_content = super().check_conflict_on_edit( |
774 |
|
session, final_content, edit_content, _id |
775 |
|
) |
776 |
|
|
777 |
|
# set type of vnfd |
778 |
1 |
contains_pdu = False |
779 |
1 |
contains_vdu = False |
780 |
1 |
for vdu in get_iterable(final_content.get("vdu")): |
781 |
1 |
if vdu.get("pdu-type"): |
782 |
0 |
contains_pdu = True |
783 |
|
else: |
784 |
1 |
contains_vdu = True |
785 |
1 |
if contains_pdu: |
786 |
0 |
final_content["_admin"]["type"] = "hnfd" if contains_vdu else "pnfd" |
787 |
1 |
elif contains_vdu: |
788 |
1 |
final_content["_admin"]["type"] = "vnfd" |
789 |
|
# if neither vud nor pdu do not fill type |
790 |
1 |
return final_content |
791 |
|
|
792 |
1 |
def check_conflict_on_del(self, session, _id, db_content): |
793 |
|
""" |
794 |
|
Check that there is not any NSD that uses this VNFD. Only NSDs belonging to this project are considered. Note |
795 |
|
that VNFD can be public and be used by NSD of other projects. Also check there are not deployments, or vnfr |
796 |
|
that uses this vnfd |
797 |
|
:param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
798 |
|
:param _id: vnfd internal id |
799 |
|
:param db_content: The database content of the _id. |
800 |
|
:return: None or raises EngineException with the conflict |
801 |
|
""" |
802 |
1 |
if session["force"]: |
803 |
0 |
return |
804 |
1 |
descriptor = db_content |
805 |
1 |
descriptor_id = descriptor.get("id") |
806 |
1 |
if not descriptor_id: # empty vnfd not uploaded |
807 |
0 |
return |
808 |
|
|
809 |
1 |
_filter = self._get_project_filter(session) |
810 |
|
|
811 |
|
# check vnfrs using this vnfd |
812 |
1 |
_filter["vnfd-id"] = _id |
813 |
1 |
if self.db.get_list("vnfrs", _filter): |
814 |
1 |
raise EngineException( |
815 |
|
"There is at least one VNF instance using this descriptor", |
816 |
|
http_code=HTTPStatus.CONFLICT, |
817 |
|
) |
818 |
|
|
819 |
|
# check NSD referencing this VNFD |
820 |
1 |
del _filter["vnfd-id"] |
821 |
1 |
_filter["vnfd-id"] = descriptor_id |
822 |
1 |
if self.db.get_list("nsds", _filter): |
823 |
1 |
raise EngineException( |
824 |
|
"There is at least one NS package referencing this descriptor", |
825 |
|
http_code=HTTPStatus.CONFLICT, |
826 |
|
) |
827 |
|
|
828 |
1 |
def _validate_input_new(self, indata, storage_params, force=False): |
829 |
1 |
indata.pop("onboardingState", None) |
830 |
1 |
indata.pop("operationalState", None) |
831 |
1 |
indata.pop("usageState", None) |
832 |
1 |
indata.pop("links", None) |
833 |
|
|
834 |
1 |
indata = self.pyangbind_validation("vnfds", indata, force) |
835 |
|
# Cross references validation in the descriptor |
836 |
|
|
837 |
1 |
self.validate_mgmt_interface_connection_point(indata) |
838 |
|
|
839 |
1 |
for vdu in get_iterable(indata.get("vdu")): |
840 |
1 |
self.validate_vdu_internal_connection_points(vdu) |
841 |
1 |
self._validate_vdu_cloud_init_in_package(storage_params, vdu, indata) |
842 |
1 |
self._validate_vdu_charms_in_package(storage_params, indata) |
843 |
|
|
844 |
1 |
self._validate_vnf_charms_in_package(storage_params, indata) |
845 |
|
|
846 |
1 |
self.validate_external_connection_points(indata) |
847 |
1 |
self.validate_internal_virtual_links(indata) |
848 |
1 |
self.validate_monitoring_params(indata) |
849 |
1 |
self.validate_scaling_group_descriptor(indata) |
850 |
|
|
851 |
1 |
return indata |
852 |
|
|
853 |
1 |
@staticmethod |
854 |
1 |
def validate_mgmt_interface_connection_point(indata): |
855 |
1 |
if not indata.get("vdu"): |
856 |
0 |
return |
857 |
1 |
if not indata.get("mgmt-cp"): |
858 |
1 |
raise EngineException( |
859 |
|
"'mgmt-cp' is a mandatory field and it is not defined", |
860 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
861 |
|
) |
862 |
|
|
863 |
1 |
for cp in get_iterable(indata.get("ext-cpd")): |
864 |
1 |
if cp["id"] == indata["mgmt-cp"]: |
865 |
1 |
break |
866 |
|
else: |
867 |
1 |
raise EngineException( |
868 |
|
"mgmt-cp='{}' must match an existing ext-cpd".format(indata["mgmt-cp"]), |
869 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
870 |
|
) |
871 |
|
|
872 |
1 |
@staticmethod |
873 |
1 |
def validate_vdu_internal_connection_points(vdu): |
874 |
1 |
int_cpds = set() |
875 |
1 |
for cpd in get_iterable(vdu.get("int-cpd")): |
876 |
1 |
cpd_id = cpd.get("id") |
877 |
1 |
if cpd_id and cpd_id in int_cpds: |
878 |
1 |
raise EngineException( |
879 |
|
"vdu[id='{}']:int-cpd[id='{}'] is already used by other int-cpd".format( |
880 |
|
vdu["id"], cpd_id |
881 |
|
), |
882 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
883 |
|
) |
884 |
1 |
int_cpds.add(cpd_id) |
885 |
|
|
886 |
1 |
@staticmethod |
887 |
1 |
def validate_external_connection_points(indata): |
888 |
1 |
all_vdus_int_cpds = set() |
889 |
1 |
for vdu in get_iterable(indata.get("vdu")): |
890 |
1 |
for int_cpd in get_iterable(vdu.get("int-cpd")): |
891 |
1 |
all_vdus_int_cpds.add((vdu.get("id"), int_cpd.get("id"))) |
892 |
|
|
893 |
1 |
ext_cpds = set() |
894 |
1 |
for cpd in get_iterable(indata.get("ext-cpd")): |
895 |
1 |
cpd_id = cpd.get("id") |
896 |
1 |
if cpd_id and cpd_id in ext_cpds: |
897 |
1 |
raise EngineException( |
898 |
|
"ext-cpd[id='{}'] is already used by other ext-cpd".format(cpd_id), |
899 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
900 |
|
) |
901 |
1 |
ext_cpds.add(cpd_id) |
902 |
|
|
903 |
1 |
int_cpd = cpd.get("int-cpd") |
904 |
1 |
if int_cpd: |
905 |
1 |
if (int_cpd.get("vdu-id"), int_cpd.get("cpd")) not in all_vdus_int_cpds: |
906 |
1 |
raise EngineException( |
907 |
|
"ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd".format( |
908 |
|
cpd_id |
909 |
|
), |
910 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
911 |
|
) |
912 |
|
# TODO: Validate k8s-cluster-net points to a valid k8s-cluster:nets ? |
913 |
|
|
914 |
1 |
def _validate_vdu_charms_in_package(self, storage_params, indata): |
915 |
1 |
for df in indata["df"]: |
916 |
1 |
if ( |
917 |
|
"lcm-operations-configuration" in df |
918 |
|
and "operate-vnf-op-config" in df["lcm-operations-configuration"] |
919 |
|
): |
920 |
1 |
configs = df["lcm-operations-configuration"][ |
921 |
|
"operate-vnf-op-config" |
922 |
|
].get("day1-2", []) |
923 |
1 |
vdus = df.get("vdu-profile", []) |
924 |
1 |
for vdu in vdus: |
925 |
1 |
for config in configs: |
926 |
1 |
if config["id"] == vdu["id"] and utils.find_in_list( |
927 |
|
config.get("execution-environment-list", []), |
928 |
|
lambda ee: "juju" in ee, |
929 |
|
): |
930 |
0 |
if not self._validate_package_folders( |
931 |
|
storage_params, "charms" |
932 |
|
) and not self._validate_package_folders( |
933 |
|
storage_params, "Scripts/charms" |
934 |
|
): |
935 |
0 |
raise EngineException( |
936 |
|
"Charm defined in vnf[id={}] but not present in " |
937 |
|
"package".format(indata["id"]) |
938 |
|
) |
939 |
|
|
940 |
1 |
def _validate_vdu_cloud_init_in_package(self, storage_params, vdu, indata): |
941 |
1 |
if not vdu.get("cloud-init-file"): |
942 |
1 |
return |
943 |
1 |
if not self._validate_package_folders( |
944 |
|
storage_params, "cloud_init", vdu["cloud-init-file"] |
945 |
|
) and not self._validate_package_folders( |
946 |
|
storage_params, "Scripts/cloud_init", vdu["cloud-init-file"] |
947 |
|
): |
948 |
1 |
raise EngineException( |
949 |
|
"Cloud-init defined in vnf[id={}]:vdu[id={}] but not present in " |
950 |
|
"package".format(indata["id"], vdu["id"]) |
951 |
|
) |
952 |
|
|
953 |
1 |
def _validate_vnf_charms_in_package(self, storage_params, indata): |
954 |
|
# Get VNF configuration through new container |
955 |
1 |
for deployment_flavor in indata.get("df", []): |
956 |
1 |
if "lcm-operations-configuration" not in deployment_flavor: |
957 |
0 |
return |
958 |
1 |
if ( |
959 |
|
"operate-vnf-op-config" |
960 |
|
not in deployment_flavor["lcm-operations-configuration"] |
961 |
|
): |
962 |
0 |
return |
963 |
1 |
for day_1_2_config in deployment_flavor["lcm-operations-configuration"][ |
964 |
|
"operate-vnf-op-config" |
965 |
|
]["day1-2"]: |
966 |
1 |
if day_1_2_config["id"] == indata["id"]: |
967 |
1 |
if utils.find_in_list( |
968 |
|
day_1_2_config.get("execution-environment-list", []), |
969 |
|
lambda ee: "juju" in ee, |
970 |
|
): |
971 |
1 |
if not self._validate_package_folders( |
972 |
|
storage_params, "charms" |
973 |
|
) and not self._validate_package_folders( |
974 |
|
storage_params, "Scripts/charms" |
975 |
|
): |
976 |
1 |
raise EngineException( |
977 |
|
"Charm defined in vnf[id={}] but not present in " |
978 |
|
"package".format(indata["id"]) |
979 |
|
) |
980 |
|
|
981 |
1 |
def _validate_package_folders(self, storage_params, folder, file=None): |
982 |
1 |
if not storage_params: |
983 |
0 |
return False |
984 |
1 |
elif not storage_params.get("pkg-dir"): |
985 |
0 |
if self.fs.file_exists("{}_".format(storage_params["folder"]), "dir"): |
986 |
0 |
f = "{}_/{}".format( |
987 |
|
storage_params["folder"], folder |
988 |
|
) |
989 |
|
else: |
990 |
0 |
f = "{}/{}".format( |
991 |
|
storage_params["folder"], folder |
992 |
|
) |
993 |
0 |
if file: |
994 |
0 |
return self.fs.file_exists("{}/{}".format(f, file), "file") |
995 |
|
else: |
996 |
0 |
if self.fs.file_exists(f, "dir"): |
997 |
0 |
if self.fs.dir_ls(f): |
998 |
0 |
return True |
999 |
0 |
return False |
1000 |
|
else: |
1001 |
1 |
if self.fs.file_exists("{}_".format(storage_params["folder"]), "dir"): |
1002 |
1 |
f = "{}_/{}/{}".format( |
1003 |
|
storage_params["folder"], storage_params["pkg-dir"], folder |
1004 |
|
) |
1005 |
|
else: |
1006 |
1 |
f = "{}/{}/{}".format( |
1007 |
|
storage_params["folder"], storage_params["pkg-dir"], folder |
1008 |
|
) |
1009 |
1 |
if file: |
1010 |
1 |
return self.fs.file_exists("{}/{}".format(f, file), "file") |
1011 |
|
else: |
1012 |
1 |
if self.fs.file_exists(f, "dir"): |
1013 |
1 |
if self.fs.dir_ls(f): |
1014 |
1 |
return True |
1015 |
1 |
return False |
1016 |
|
|
1017 |
1 |
@staticmethod |
1018 |
1 |
def validate_internal_virtual_links(indata): |
1019 |
1 |
all_ivld_ids = set() |
1020 |
1 |
for ivld in get_iterable(indata.get("int-virtual-link-desc")): |
1021 |
1 |
ivld_id = ivld.get("id") |
1022 |
1 |
if ivld_id and ivld_id in all_ivld_ids: |
1023 |
1 |
raise EngineException( |
1024 |
|
"Duplicated VLD id in int-virtual-link-desc[id={}]".format(ivld_id), |
1025 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
1026 |
|
) |
1027 |
|
else: |
1028 |
1 |
all_ivld_ids.add(ivld_id) |
1029 |
|
|
1030 |
1 |
for vdu in get_iterable(indata.get("vdu")): |
1031 |
1 |
for int_cpd in get_iterable(vdu.get("int-cpd")): |
1032 |
1 |
int_cpd_ivld_id = int_cpd.get("int-virtual-link-desc") |
1033 |
1 |
if int_cpd_ivld_id and int_cpd_ivld_id not in all_ivld_ids: |
1034 |
1 |
raise EngineException( |
1035 |
|
"vdu[id='{}']:int-cpd[id='{}']:int-virtual-link-desc='{}' must match an existing " |
1036 |
|
"int-virtual-link-desc".format( |
1037 |
|
vdu["id"], int_cpd["id"], int_cpd_ivld_id |
1038 |
|
), |
1039 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
1040 |
|
) |
1041 |
|
|
1042 |
1 |
for df in get_iterable(indata.get("df")): |
1043 |
1 |
for vlp in get_iterable(df.get("virtual-link-profile")): |
1044 |
1 |
vlp_ivld_id = vlp.get("id") |
1045 |
1 |
if vlp_ivld_id and vlp_ivld_id not in all_ivld_ids: |
1046 |
1 |
raise EngineException( |
1047 |
|
"df[id='{}']:virtual-link-profile='{}' must match an existing " |
1048 |
|
"int-virtual-link-desc".format(df["id"], vlp_ivld_id), |
1049 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
1050 |
|
) |
1051 |
|
|
1052 |
1 |
@staticmethod |
1053 |
1 |
def validate_monitoring_params(indata): |
1054 |
1 |
all_monitoring_params = set() |
1055 |
1 |
for ivld in get_iterable(indata.get("int-virtual-link-desc")): |
1056 |
1 |
for mp in get_iterable(ivld.get("monitoring-parameters")): |
1057 |
1 |
mp_id = mp.get("id") |
1058 |
1 |
if mp_id and mp_id in all_monitoring_params: |
1059 |
1 |
raise EngineException( |
1060 |
|
"Duplicated monitoring-parameter id in " |
1061 |
|
"int-virtual-link-desc[id='{}']:monitoring-parameters[id='{}']".format( |
1062 |
|
ivld["id"], mp_id |
1063 |
|
), |
1064 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
1065 |
|
) |
1066 |
|
else: |
1067 |
1 |
all_monitoring_params.add(mp_id) |
1068 |
|
|
1069 |
1 |
for vdu in get_iterable(indata.get("vdu")): |
1070 |
1 |
for mp in get_iterable(vdu.get("monitoring-parameter")): |
1071 |
1 |
mp_id = mp.get("id") |
1072 |
1 |
if mp_id and mp_id in all_monitoring_params: |
1073 |
1 |
raise EngineException( |
1074 |
|
"Duplicated monitoring-parameter id in " |
1075 |
|
"vdu[id='{}']:monitoring-parameter[id='{}']".format( |
1076 |
|
vdu["id"], mp_id |
1077 |
|
), |
1078 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
1079 |
|
) |
1080 |
|
else: |
1081 |
1 |
all_monitoring_params.add(mp_id) |
1082 |
|
|
1083 |
1 |
for df in get_iterable(indata.get("df")): |
1084 |
1 |
for mp in get_iterable(df.get("monitoring-parameter")): |
1085 |
1 |
mp_id = mp.get("id") |
1086 |
1 |
if mp_id and mp_id in all_monitoring_params: |
1087 |
1 |
raise EngineException( |
1088 |
|
"Duplicated monitoring-parameter id in " |
1089 |
|
"df[id='{}']:monitoring-parameter[id='{}']".format( |
1090 |
|
df["id"], mp_id |
1091 |
|
), |
1092 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
1093 |
|
) |
1094 |
|
else: |
1095 |
1 |
all_monitoring_params.add(mp_id) |
1096 |
|
|
1097 |
1 |
@staticmethod |
1098 |
1 |
def validate_scaling_group_descriptor(indata): |
1099 |
1 |
all_monitoring_params = set() |
1100 |
1 |
for ivld in get_iterable(indata.get("int-virtual-link-desc")): |
1101 |
1 |
for mp in get_iterable(ivld.get("monitoring-parameters")): |
1102 |
0 |
all_monitoring_params.add(mp.get("id")) |
1103 |
|
|
1104 |
1 |
for vdu in get_iterable(indata.get("vdu")): |
1105 |
1 |
for mp in get_iterable(vdu.get("monitoring-parameter")): |
1106 |
1 |
all_monitoring_params.add(mp.get("id")) |
1107 |
|
|
1108 |
1 |
for df in get_iterable(indata.get("df")): |
1109 |
1 |
for mp in get_iterable(df.get("monitoring-parameter")): |
1110 |
0 |
all_monitoring_params.add(mp.get("id")) |
1111 |
|
|
1112 |
1 |
for df in get_iterable(indata.get("df")): |
1113 |
1 |
for sa in get_iterable(df.get("scaling-aspect")): |
1114 |
1 |
for sp in get_iterable(sa.get("scaling-policy")): |
1115 |
1 |
for sc in get_iterable(sp.get("scaling-criteria")): |
1116 |
1 |
sc_monitoring_param = sc.get("vnf-monitoring-param-ref") |
1117 |
1 |
if ( |
1118 |
|
sc_monitoring_param |
1119 |
|
and sc_monitoring_param not in all_monitoring_params |
1120 |
|
): |
1121 |
1 |
raise EngineException( |
1122 |
|
"df[id='{}']:scaling-aspect[id='{}']:scaling-policy" |
1123 |
|
"[name='{}']:scaling-criteria[name='{}']: " |
1124 |
|
"vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format( |
1125 |
|
df["id"], |
1126 |
|
sa["id"], |
1127 |
|
sp["name"], |
1128 |
|
sc["name"], |
1129 |
|
sc_monitoring_param, |
1130 |
|
), |
1131 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
1132 |
|
) |
1133 |
|
|
1134 |
1 |
for sca in get_iterable(sa.get("scaling-config-action")): |
1135 |
1 |
if ( |
1136 |
|
"lcm-operations-configuration" not in df |
1137 |
|
or "operate-vnf-op-config" |
1138 |
|
not in df["lcm-operations-configuration"] |
1139 |
|
or not utils.find_in_list( |
1140 |
|
df["lcm-operations-configuration"][ |
1141 |
|
"operate-vnf-op-config" |
1142 |
|
].get("day1-2", []), |
1143 |
|
lambda config: config["id"] == indata["id"], |
1144 |
|
) |
1145 |
|
): |
1146 |
1 |
raise EngineException( |
1147 |
|
"'day1-2 configuration' not defined in the descriptor but it is " |
1148 |
|
"referenced by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action".format( |
1149 |
|
df["id"], sa["id"] |
1150 |
|
), |
1151 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
1152 |
|
) |
1153 |
1 |
for configuration in get_iterable( |
1154 |
|
df["lcm-operations-configuration"]["operate-vnf-op-config"].get( |
1155 |
|
"day1-2", [] |
1156 |
|
) |
1157 |
|
): |
1158 |
1 |
for primitive in get_iterable( |
1159 |
|
configuration.get("config-primitive") |
1160 |
|
): |
1161 |
1 |
if ( |
1162 |
|
primitive["name"] |
1163 |
|
== sca["vnf-config-primitive-name-ref"] |
1164 |
|
): |
1165 |
1 |
break |
1166 |
|
else: |
1167 |
1 |
raise EngineException( |
1168 |
|
"df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-" |
1169 |
|
"config-primitive-name-ref='{}' does not match any " |
1170 |
|
"day1-2 configuration:config-primitive:name".format( |
1171 |
|
df["id"], |
1172 |
|
sa["id"], |
1173 |
|
sca["vnf-config-primitive-name-ref"], |
1174 |
|
), |
1175 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
1176 |
|
) |
1177 |
|
|
1178 |
1 |
def delete_extra(self, session, _id, db_content, not_send_msg=None): |
1179 |
|
""" |
1180 |
|
Deletes associate file system storage (via super) |
1181 |
|
Deletes associated vnfpkgops from database. |
1182 |
|
:param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
1183 |
|
:param _id: server internal id |
1184 |
|
:param db_content: The database content of the descriptor |
1185 |
|
:return: None |
1186 |
|
:raises: FsException in case of error while deleting associated storage |
1187 |
|
""" |
1188 |
1 |
super().delete_extra(session, _id, db_content, not_send_msg) |
1189 |
1 |
self.db.del_list("vnfpkgops", {"vnfPkgId": _id}) |
1190 |
1 |
self.db.del_list(self.topic+"_revisions", {"_id": {"$regex": _id}}) |
1191 |
|
|
1192 |
1 |
def sol005_projection(self, data): |
1193 |
0 |
data["onboardingState"] = data["_admin"]["onboardingState"] |
1194 |
0 |
data["operationalState"] = data["_admin"]["operationalState"] |
1195 |
0 |
data["usageState"] = data["_admin"]["usageState"] |
1196 |
|
|
1197 |
0 |
links = {} |
1198 |
0 |
links["self"] = {"href": "/vnfpkgm/v1/vnf_packages/{}".format(data["_id"])} |
1199 |
0 |
links["vnfd"] = {"href": "/vnfpkgm/v1/vnf_packages/{}/vnfd".format(data["_id"])} |
1200 |
0 |
links["packageContent"] = { |
1201 |
|
"href": "/vnfpkgm/v1/vnf_packages/{}/package_content".format(data["_id"]) |
1202 |
|
} |
1203 |
0 |
data["_links"] = links |
1204 |
|
|
1205 |
0 |
return super().sol005_projection(data) |
1206 |
|
|
1207 |
1 |
@staticmethod |
1208 |
1 |
def find_software_version(vnfd: dict) -> str: |
1209 |
|
"""Find the sotware version in the VNFD descriptors |
1210 |
|
|
1211 |
|
Args: |
1212 |
|
vnfd (dict): Descriptor as a dictionary |
1213 |
|
|
1214 |
|
Returns: |
1215 |
|
software-version (str) |
1216 |
|
""" |
1217 |
1 |
default_sw_version = "1.0" |
1218 |
1 |
if vnfd.get("vnfd"): |
1219 |
0 |
vnfd = vnfd["vnfd"] |
1220 |
1 |
if vnfd.get("software-version"): |
1221 |
1 |
return vnfd["software-version"] |
1222 |
|
else: |
1223 |
1 |
return default_sw_version |
1224 |
|
|
1225 |
1 |
@staticmethod |
1226 |
1 |
def extract_policies(vnfd: dict) -> dict: |
1227 |
|
"""Removes the policies from the VNFD descriptors |
1228 |
|
|
1229 |
|
Args: |
1230 |
|
vnfd (dict): Descriptor as a dictionary |
1231 |
|
|
1232 |
|
Returns: |
1233 |
|
vnfd (dict): VNFD which does not include policies |
1234 |
|
""" |
1235 |
1 |
for df in vnfd.get("df", {}): |
1236 |
1 |
for policy in ["scaling-aspect", "healing-aspect"]: |
1237 |
1 |
if (df.get(policy, {})): |
1238 |
1 |
df.pop(policy) |
1239 |
1 |
for vdu in vnfd.get("vdu", {}): |
1240 |
1 |
for alarm_policy in ["alarm", "monitoring-parameter"]: |
1241 |
1 |
if (vdu.get(alarm_policy, {})): |
1242 |
1 |
vdu.pop(alarm_policy) |
1243 |
1 |
return vnfd |
1244 |
|
|
1245 |
1 |
@staticmethod |
1246 |
1 |
def extract_day12_primitives(vnfd: dict) -> dict: |
1247 |
|
"""Removes the day12 primitives from the VNFD descriptors |
1248 |
|
|
1249 |
|
Args: |
1250 |
|
vnfd (dict): Descriptor as a dictionary |
1251 |
|
|
1252 |
|
Returns: |
1253 |
|
vnfd (dict) |
1254 |
|
""" |
1255 |
1 |
for df_id, df in enumerate(vnfd.get("df", {})): |
1256 |
1 |
if ( |
1257 |
|
df.get("lcm-operations-configuration", {}) |
1258 |
|
.get("operate-vnf-op-config", {}) |
1259 |
|
.get("day1-2") |
1260 |
|
): |
1261 |
1 |
day12 = df["lcm-operations-configuration"]["operate-vnf-op-config"].get( |
1262 |
|
"day1-2" |
1263 |
|
) |
1264 |
1 |
for config_id, config in enumerate(day12): |
1265 |
1 |
for key in [ |
1266 |
|
"initial-config-primitive", |
1267 |
|
"config-primitive", |
1268 |
|
"terminate-config-primitive", |
1269 |
|
]: |
1270 |
1 |
config.pop(key, None) |
1271 |
1 |
day12[config_id] = config |
1272 |
1 |
df["lcm-operations-configuration"]["operate-vnf-op-config"][ |
1273 |
|
"day1-2" |
1274 |
|
] = day12 |
1275 |
1 |
vnfd["df"][df_id] = df |
1276 |
1 |
return vnfd |
1277 |
|
|
1278 |
1 |
def remove_modifiable_items(self, vnfd: dict) -> dict: |
1279 |
|
"""Removes the modifiable parts from the VNFD descriptors |
1280 |
|
|
1281 |
|
It calls different extract functions according to different update types |
1282 |
|
to clear all the modifiable items from VNFD |
1283 |
|
|
1284 |
|
Args: |
1285 |
|
vnfd (dict): Descriptor as a dictionary |
1286 |
|
|
1287 |
|
Returns: |
1288 |
|
vnfd (dict): Descriptor which does not include modifiable contents |
1289 |
|
""" |
1290 |
1 |
if vnfd.get("vnfd"): |
1291 |
0 |
vnfd = vnfd["vnfd"] |
1292 |
1 |
vnfd.pop("_admin", None) |
1293 |
|
# If the other extractions need to be done from VNFD, |
1294 |
|
# the new extract methods could be appended to below list. |
1295 |
1 |
for extract_function in [self.extract_day12_primitives, self.extract_policies]: |
1296 |
1 |
vnfd_temp = extract_function(vnfd) |
1297 |
1 |
vnfd = vnfd_temp |
1298 |
1 |
return vnfd |
1299 |
|
|
1300 |
1 |
def _validate_descriptor_changes( |
1301 |
|
self, |
1302 |
|
descriptor_id: str, |
1303 |
|
descriptor_file_name: str, |
1304 |
|
old_descriptor_directory: str, |
1305 |
|
new_descriptor_directory: str, |
1306 |
|
): |
1307 |
|
"""Compares the old and new VNFD descriptors and validates the new descriptor. |
1308 |
|
|
1309 |
|
Args: |
1310 |
|
old_descriptor_directory (str): Directory of descriptor which is in-use |
1311 |
|
new_descriptor_directory (str): Directory of descriptor which is proposed to update (new revision) |
1312 |
|
|
1313 |
|
Returns: |
1314 |
|
None |
1315 |
|
|
1316 |
|
Raises: |
1317 |
|
EngineException: In case of error when there are unallowed changes |
1318 |
|
""" |
1319 |
1 |
try: |
1320 |
|
# If VNFD does not exist in DB or it is not in use by any NS, |
1321 |
|
# validation is not required. |
1322 |
1 |
vnfd = self.db.get_one("vnfds", {"_id": descriptor_id}) |
1323 |
1 |
if not vnfd or not detect_descriptor_usage(vnfd, "vnfds", self.db): |
1324 |
1 |
return |
1325 |
|
|
1326 |
|
# Get the old and new descriptor contents in order to compare them. |
1327 |
1 |
with self.fs.file_open( |
1328 |
|
(old_descriptor_directory.rstrip("/"), descriptor_file_name), "r" |
1329 |
|
) as old_descriptor_file: |
1330 |
|
|
1331 |
1 |
with self.fs.file_open( |
1332 |
|
(new_descriptor_directory.rstrip("/"), descriptor_file_name), "r" |
1333 |
|
) as new_descriptor_file: |
1334 |
|
|
1335 |
1 |
old_content = yaml.safe_load(old_descriptor_file.read()) |
1336 |
1 |
new_content = yaml.safe_load(new_descriptor_file.read()) |
1337 |
|
|
1338 |
|
# If software version has changed, we do not need to validate |
1339 |
|
# the differences anymore. |
1340 |
1 |
if old_content and new_content: |
1341 |
1 |
if self.find_software_version( |
1342 |
|
old_content |
1343 |
|
) != self.find_software_version(new_content): |
1344 |
1 |
return |
1345 |
|
|
1346 |
1 |
disallowed_change = DeepDiff( |
1347 |
|
self.remove_modifiable_items(old_content), |
1348 |
|
self.remove_modifiable_items(new_content), |
1349 |
|
) |
1350 |
|
|
1351 |
1 |
if disallowed_change: |
1352 |
1 |
changed_nodes = functools.reduce( |
1353 |
|
lambda a, b: a + " , " + b, |
1354 |
|
[ |
1355 |
|
node.lstrip("root") |
1356 |
|
for node in disallowed_change.get( |
1357 |
|
"values_changed" |
1358 |
|
).keys() |
1359 |
|
], |
1360 |
|
) |
1361 |
|
|
1362 |
1 |
raise EngineException( |
1363 |
|
f"Error in validating new descriptor: {changed_nodes} cannot be modified, " |
1364 |
|
"there are disallowed changes in the vnf descriptor.", |
1365 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
1366 |
|
) |
1367 |
1 |
except ( |
1368 |
|
DbException, |
1369 |
|
AttributeError, |
1370 |
|
IndexError, |
1371 |
|
KeyError, |
1372 |
|
ValueError, |
1373 |
|
) as e: |
1374 |
0 |
raise type(e)( |
1375 |
|
"VNF Descriptor could not be processed with error: {}.".format(e) |
1376 |
|
) |
1377 |
|
|
1378 |
|
|
1379 |
1 |
class NsdTopic(DescriptorTopic): |
1380 |
1 |
topic = "nsds" |
1381 |
1 |
topic_msg = "nsd" |
1382 |
|
|
1383 |
1 |
def __init__(self, db, fs, msg, auth): |
1384 |
1 |
DescriptorTopic.__init__(self, db, fs, msg, auth) |
1385 |
|
|
1386 |
1 |
def pyangbind_validation(self, item, data, force=False): |
1387 |
1 |
if self._descriptor_data_is_in_old_format(data): |
1388 |
0 |
raise EngineException( |
1389 |
|
"ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.", |
1390 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
1391 |
|
) |
1392 |
1 |
try: |
1393 |
1 |
nsd_vnf_profiles = data.get("df", [{}])[0].get("vnf-profile", []) |
1394 |
1 |
mynsd = etsi_nfv_nsd.etsi_nfv_nsd() |
1395 |
1 |
pybindJSONDecoder.load_ietf_json( |
1396 |
|
{"nsd": {"nsd": [data]}}, |
1397 |
|
None, |
1398 |
|
None, |
1399 |
|
obj=mynsd, |
1400 |
|
path_helper=True, |
1401 |
|
skip_unknown=force, |
1402 |
|
) |
1403 |
1 |
out = pybindJSON.dumps(mynsd, mode="ietf") |
1404 |
1 |
desc_out = self._remove_envelop(yaml.safe_load(out)) |
1405 |
1 |
desc_out = self._remove_yang_prefixes_from_descriptor(desc_out) |
1406 |
1 |
if nsd_vnf_profiles: |
1407 |
1 |
desc_out["df"][0]["vnf-profile"] = nsd_vnf_profiles |
1408 |
1 |
return desc_out |
1409 |
1 |
except Exception as e: |
1410 |
1 |
raise EngineException( |
1411 |
|
"Error in pyangbind validation: {}".format(str(e)), |
1412 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
1413 |
|
) |
1414 |
|
|
1415 |
1 |
@staticmethod |
1416 |
1 |
def _descriptor_data_is_in_old_format(data): |
1417 |
1 |
return ("nsd-catalog" in data) or ("nsd:nsd-catalog" in data) |
1418 |
|
|
1419 |
1 |
@staticmethod |
1420 |
1 |
def _remove_envelop(indata=None): |
1421 |
1 |
if not indata: |
1422 |
0 |
return {} |
1423 |
1 |
clean_indata = indata |
1424 |
|
|
1425 |
1 |
if clean_indata.get("nsd"): |
1426 |
1 |
clean_indata = clean_indata["nsd"] |
1427 |
1 |
elif clean_indata.get("etsi-nfv-nsd:nsd"): |
1428 |
1 |
clean_indata = clean_indata["etsi-nfv-nsd:nsd"] |
1429 |
1 |
if clean_indata.get("nsd"): |
1430 |
1 |
if ( |
1431 |
|
not isinstance(clean_indata["nsd"], list) |
1432 |
|
or len(clean_indata["nsd"]) != 1 |
1433 |
|
): |
1434 |
1 |
raise EngineException("'nsd' must be a list of only one element") |
1435 |
1 |
clean_indata = clean_indata["nsd"][0] |
1436 |
1 |
return clean_indata |
1437 |
|
|
1438 |
1 |
def _validate_input_new(self, indata, storage_params, force=False): |
1439 |
1 |
indata.pop("nsdOnboardingState", None) |
1440 |
1 |
indata.pop("nsdOperationalState", None) |
1441 |
1 |
indata.pop("nsdUsageState", None) |
1442 |
|
|
1443 |
1 |
indata.pop("links", None) |
1444 |
|
|
1445 |
1 |
indata = self.pyangbind_validation("nsds", indata, force) |
1446 |
|
# Cross references validation in the descriptor |
1447 |
|
# TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none |
1448 |
1 |
for vld in get_iterable(indata.get("virtual-link-desc")): |
1449 |
1 |
self.validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata) |
1450 |
|
|
1451 |
1 |
self.validate_vnf_profiles_vnfd_id(indata) |
1452 |
|
|
1453 |
1 |
return indata |
1454 |
|
|
1455 |
1 |
@staticmethod |
1456 |
1 |
def validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata): |
1457 |
1 |
if not vld.get("mgmt-network"): |
1458 |
1 |
return |
1459 |
1 |
vld_id = vld.get("id") |
1460 |
1 |
for df in get_iterable(indata.get("df")): |
1461 |
1 |
for vlp in get_iterable(df.get("virtual-link-profile")): |
1462 |
1 |
if vld_id and vld_id == vlp.get("virtual-link-desc-id"): |
1463 |
1 |
if vlp.get("virtual-link-protocol-data"): |
1464 |
1 |
raise EngineException( |
1465 |
|
"Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-" |
1466 |
|
"protocol-data You cannot set a virtual-link-protocol-data " |
1467 |
|
"when mgmt-network is True".format(df["id"], vlp["id"]), |
1468 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
1469 |
|
) |
1470 |
|
|
1471 |
1 |
@staticmethod |
1472 |
1 |
def validate_vnf_profiles_vnfd_id(indata): |
1473 |
1 |
all_vnfd_ids = set(get_iterable(indata.get("vnfd-id"))) |
1474 |
1 |
for df in get_iterable(indata.get("df")): |
1475 |
1 |
for vnf_profile in get_iterable(df.get("vnf-profile")): |
1476 |
1 |
vnfd_id = vnf_profile.get("vnfd-id") |
1477 |
1 |
if vnfd_id and vnfd_id not in all_vnfd_ids: |
1478 |
1 |
raise EngineException( |
1479 |
|
"Error at df[id='{}']:vnf_profile[id='{}']:vnfd-id='{}' " |
1480 |
|
"does not match any vnfd-id".format( |
1481 |
|
df["id"], vnf_profile["id"], vnfd_id |
1482 |
|
), |
1483 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
1484 |
|
) |
1485 |
|
|
1486 |
1 |
def _validate_input_edit(self, indata, content, force=False): |
1487 |
|
# not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit |
1488 |
|
""" |
1489 |
|
indata looks as follows: |
1490 |
|
- In the new case (conformant) |
1491 |
|
{'nsdOperationalState': 'DISABLED', 'userDefinedData': {'id': 'string23', |
1492 |
|
'_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}} |
1493 |
|
- In the old case (backwards-compatible) |
1494 |
|
{'id': 'string23', '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'} |
1495 |
|
""" |
1496 |
1 |
if "_admin" not in indata: |
1497 |
1 |
indata["_admin"] = {} |
1498 |
|
|
1499 |
1 |
if "nsdOperationalState" in indata: |
1500 |
0 |
if indata["nsdOperationalState"] in ("ENABLED", "DISABLED"): |
1501 |
0 |
indata["_admin"]["operationalState"] = indata.pop("nsdOperationalState") |
1502 |
|
else: |
1503 |
0 |
raise EngineException( |
1504 |
|
"State '{}' is not a valid operational state".format( |
1505 |
|
indata["nsdOperationalState"] |
1506 |
|
), |
1507 |
|
http_code=HTTPStatus.BAD_REQUEST, |
1508 |
|
) |
1509 |
|
|
1510 |
|
# In the case of user defined data, we need to put the data in the root of the object |
1511 |
|
# to preserve current expected behaviour |
1512 |
1 |
if "userDefinedData" in indata: |
1513 |
0 |
data = indata.pop("userDefinedData") |
1514 |
0 |
if type(data) == dict: |
1515 |
0 |
indata["_admin"]["userDefinedData"] = data |
1516 |
|
else: |
1517 |
0 |
raise EngineException( |
1518 |
|
"userDefinedData should be an object, but is '{}' instead".format( |
1519 |
|
type(data) |
1520 |
|
), |
1521 |
|
http_code=HTTPStatus.BAD_REQUEST, |
1522 |
|
) |
1523 |
1 |
if ( |
1524 |
|
"operationalState" in indata["_admin"] |
1525 |
|
and content["_admin"]["operationalState"] |
1526 |
|
== indata["_admin"]["operationalState"] |
1527 |
|
): |
1528 |
0 |
raise EngineException( |
1529 |
|
"nsdOperationalState already {}".format( |
1530 |
|
content["_admin"]["operationalState"] |
1531 |
|
), |
1532 |
|
http_code=HTTPStatus.CONFLICT, |
1533 |
|
) |
1534 |
1 |
return indata |
1535 |
|
|
1536 |
1 |
def _check_descriptor_dependencies(self, session, descriptor): |
1537 |
|
""" |
1538 |
|
Check that the dependent descriptors exist on a new descriptor or edition. Also checks references to vnfd |
1539 |
|
connection points are ok |
1540 |
|
:param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
1541 |
|
:param descriptor: descriptor to be inserted or edit |
1542 |
|
:return: None or raises exception |
1543 |
|
""" |
1544 |
1 |
if session["force"]: |
1545 |
1 |
return |
1546 |
1 |
vnfds_index = self._get_descriptor_constituent_vnfds_index(session, descriptor) |
1547 |
|
|
1548 |
|
# Cross references validation in the descriptor and vnfd connection point validation |
1549 |
1 |
for df in get_iterable(descriptor.get("df")): |
1550 |
1 |
self.validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index) |
1551 |
|
|
1552 |
1 |
def _get_descriptor_constituent_vnfds_index(self, session, descriptor): |
1553 |
1 |
vnfds_index = {} |
1554 |
1 |
if descriptor.get("vnfd-id") and not session["force"]: |
1555 |
1 |
for vnfd_id in get_iterable(descriptor.get("vnfd-id")): |
1556 |
1 |
query_filter = self._get_project_filter(session) |
1557 |
1 |
query_filter["id"] = vnfd_id |
1558 |
1 |
vnf_list = self.db.get_list("vnfds", query_filter) |
1559 |
1 |
if not vnf_list: |
1560 |
1 |
raise EngineException( |
1561 |
|
"Descriptor error at 'vnfd-id'='{}' references a non " |
1562 |
|
"existing vnfd".format(vnfd_id), |
1563 |
|
http_code=HTTPStatus.CONFLICT, |
1564 |
|
) |
1565 |
1 |
vnfds_index[vnfd_id] = vnf_list[0] |
1566 |
1 |
return vnfds_index |
1567 |
|
|
1568 |
1 |
@staticmethod |
1569 |
1 |
def validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index): |
1570 |
1 |
for vnf_profile in get_iterable(df.get("vnf-profile")): |
1571 |
1 |
vnfd = vnfds_index.get(vnf_profile["vnfd-id"]) |
1572 |
1 |
all_vnfd_ext_cpds = set() |
1573 |
1 |
for ext_cpd in get_iterable(vnfd.get("ext-cpd")): |
1574 |
1 |
if ext_cpd.get("id"): |
1575 |
1 |
all_vnfd_ext_cpds.add(ext_cpd.get("id")) |
1576 |
|
|
1577 |
1 |
for virtual_link in get_iterable( |
1578 |
|
vnf_profile.get("virtual-link-connectivity") |
1579 |
|
): |
1580 |
1 |
for vl_cpd in get_iterable(virtual_link.get("constituent-cpd-id")): |
1581 |
1 |
vl_cpd_id = vl_cpd.get("constituent-cpd-id") |
1582 |
1 |
if vl_cpd_id and vl_cpd_id not in all_vnfd_ext_cpds: |
1583 |
1 |
raise EngineException( |
1584 |
|
"Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity" |
1585 |
|
"[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a " |
1586 |
|
"non existing ext-cpd:id inside vnfd '{}'".format( |
1587 |
|
df["id"], |
1588 |
|
vnf_profile["id"], |
1589 |
|
virtual_link["virtual-link-profile-id"], |
1590 |
|
vl_cpd_id, |
1591 |
|
vnfd["id"], |
1592 |
|
), |
1593 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
1594 |
|
) |
1595 |
|
|
1596 |
1 |
def check_conflict_on_edit(self, session, final_content, edit_content, _id): |
1597 |
1 |
final_content = super().check_conflict_on_edit( |
1598 |
|
session, final_content, edit_content, _id |
1599 |
|
) |
1600 |
|
|
1601 |
1 |
self._check_descriptor_dependencies(session, final_content) |
1602 |
|
|
1603 |
1 |
return final_content |
1604 |
|
|
1605 |
1 |
def check_conflict_on_del(self, session, _id, db_content): |
1606 |
|
""" |
1607 |
|
Check that there is not any NSR that uses this NSD. Only NSRs belonging to this project are considered. Note |
1608 |
|
that NSD can be public and be used by other projects. |
1609 |
|
:param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
1610 |
|
:param _id: nsd internal id |
1611 |
|
:param db_content: The database content of the _id |
1612 |
|
:return: None or raises EngineException with the conflict |
1613 |
|
""" |
1614 |
1 |
if session["force"]: |
1615 |
0 |
return |
1616 |
1 |
descriptor = db_content |
1617 |
1 |
descriptor_id = descriptor.get("id") |
1618 |
1 |
if not descriptor_id: # empty nsd not uploaded |
1619 |
0 |
return |
1620 |
|
|
1621 |
|
# check NSD used by NS |
1622 |
1 |
_filter = self._get_project_filter(session) |
1623 |
1 |
_filter["nsd-id"] = _id |
1624 |
1 |
if self.db.get_list("nsrs", _filter): |
1625 |
1 |
raise EngineException( |
1626 |
|
"There is at least one NS instance using this descriptor", |
1627 |
|
http_code=HTTPStatus.CONFLICT, |
1628 |
|
) |
1629 |
|
|
1630 |
|
# check NSD referenced by NST |
1631 |
1 |
del _filter["nsd-id"] |
1632 |
1 |
_filter["netslice-subnet.ANYINDEX.nsd-ref"] = descriptor_id |
1633 |
1 |
if self.db.get_list("nsts", _filter): |
1634 |
1 |
raise EngineException( |
1635 |
|
"There is at least one NetSlice Template referencing this descriptor", |
1636 |
|
http_code=HTTPStatus.CONFLICT, |
1637 |
|
) |
1638 |
|
|
1639 |
1 |
def delete_extra(self, session, _id, db_content, not_send_msg=None): |
1640 |
|
""" |
1641 |
|
Deletes associate file system storage (via super) |
1642 |
|
Deletes associated vnfpkgops from database. |
1643 |
|
:param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
1644 |
|
:param _id: server internal id |
1645 |
|
:param db_content: The database content of the descriptor |
1646 |
|
:return: None |
1647 |
|
:raises: FsException in case of error while deleting associated storage |
1648 |
|
""" |
1649 |
1 |
super().delete_extra(session, _id, db_content, not_send_msg) |
1650 |
1 |
self.db.del_list(self.topic+"_revisions", { "_id": { "$regex": _id}}) |
1651 |
|
|
1652 |
1 |
@staticmethod |
1653 |
1 |
def extract_day12_primitives(nsd: dict) -> dict: |
1654 |
|
"""Removes the day12 primitives from the NSD descriptors |
1655 |
|
|
1656 |
|
Args: |
1657 |
|
nsd (dict): Descriptor as a dictionary |
1658 |
|
|
1659 |
|
Returns: |
1660 |
|
nsd (dict): Cleared NSD |
1661 |
|
""" |
1662 |
1 |
if nsd.get("ns-configuration"): |
1663 |
1 |
for key in [ |
1664 |
|
"config-primitive", |
1665 |
|
"initial-config-primitive", |
1666 |
|
"terminate-config-primitive", |
1667 |
|
]: |
1668 |
1 |
nsd["ns-configuration"].pop(key, None) |
1669 |
1 |
return nsd |
1670 |
|
|
1671 |
1 |
def remove_modifiable_items(self, nsd: dict) -> dict: |
1672 |
|
"""Removes the modifiable parts from the VNFD descriptors |
1673 |
|
|
1674 |
|
It calls different extract functions according to different update types |
1675 |
|
to clear all the modifiable items from NSD |
1676 |
|
|
1677 |
|
Args: |
1678 |
|
nsd (dict): Descriptor as a dictionary |
1679 |
|
|
1680 |
|
Returns: |
1681 |
|
nsd (dict): Descriptor which does not include modifiable contents |
1682 |
|
""" |
1683 |
1 |
while isinstance(nsd, dict) and nsd.get("nsd"): |
1684 |
0 |
nsd = nsd["nsd"] |
1685 |
1 |
if isinstance(nsd, list): |
1686 |
0 |
nsd = nsd[0] |
1687 |
1 |
nsd.pop("_admin", None) |
1688 |
|
# If the more extractions need to be done from NSD, |
1689 |
|
# the new extract methods could be appended to below list. |
1690 |
1 |
for extract_function in [self.extract_day12_primitives]: |
1691 |
1 |
nsd_temp = extract_function(nsd) |
1692 |
1 |
nsd = nsd_temp |
1693 |
1 |
return nsd |
1694 |
|
|
1695 |
1 |
def _validate_descriptor_changes( |
1696 |
|
self, |
1697 |
|
descriptor_id: str, |
1698 |
|
descriptor_file_name: str, |
1699 |
|
old_descriptor_directory: str, |
1700 |
|
new_descriptor_directory: str, |
1701 |
|
): |
1702 |
|
"""Compares the old and new NSD descriptors and validates the new descriptor |
1703 |
|
|
1704 |
|
Args: |
1705 |
|
old_descriptor_directory: Directory of descriptor which is in-use |
1706 |
|
new_descriptor_directory: Directory of descriptor which is proposed to update (new revision) |
1707 |
|
|
1708 |
|
Returns: |
1709 |
|
None |
1710 |
|
|
1711 |
|
Raises: |
1712 |
|
EngineException: In case of error if the changes are not allowed |
1713 |
|
""" |
1714 |
|
|
1715 |
1 |
try: |
1716 |
|
# If NSD does not exist in DB, or it is not in use by any NS, |
1717 |
|
# validation is not required. |
1718 |
1 |
nsd = self.db.get_one("nsds", {"_id": descriptor_id}, fail_on_empty=False) |
1719 |
1 |
if not nsd or not detect_descriptor_usage(nsd, "nsds", self.db): |
1720 |
1 |
return |
1721 |
|
|
1722 |
|
# Get the old and new descriptor contents in order to compare them. |
1723 |
1 |
with self.fs.file_open( |
1724 |
|
(old_descriptor_directory.rstrip("/"), descriptor_file_name), "r" |
1725 |
|
) as old_descriptor_file: |
1726 |
|
|
1727 |
1 |
with self.fs.file_open( |
1728 |
|
(new_descriptor_directory.rstrip("/"), descriptor_file_name), "r" |
1729 |
|
) as new_descriptor_file: |
1730 |
|
|
1731 |
1 |
old_content = yaml.safe_load(old_descriptor_file.read()) |
1732 |
1 |
new_content = yaml.safe_load(new_descriptor_file.read()) |
1733 |
|
|
1734 |
1 |
if old_content and new_content: |
1735 |
1 |
disallowed_change = DeepDiff( |
1736 |
|
self.remove_modifiable_items(old_content), |
1737 |
|
self.remove_modifiable_items(new_content), |
1738 |
|
) |
1739 |
|
|
1740 |
1 |
if disallowed_change: |
1741 |
1 |
changed_nodes = functools.reduce( |
1742 |
|
lambda a, b: a + ", " + b, |
1743 |
|
[ |
1744 |
|
node.lstrip("root") |
1745 |
|
for node in disallowed_change.get( |
1746 |
|
"values_changed" |
1747 |
|
).keys() |
1748 |
|
], |
1749 |
|
) |
1750 |
|
|
1751 |
1 |
raise EngineException( |
1752 |
|
f"Error in validating new descriptor: {changed_nodes} cannot be modified, " |
1753 |
|
"there are disallowed changes in the ns descriptor. ", |
1754 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
1755 |
|
) |
1756 |
1 |
except ( |
1757 |
|
DbException, |
1758 |
|
AttributeError, |
1759 |
|
IndexError, |
1760 |
|
KeyError, |
1761 |
|
ValueError, |
1762 |
|
) as e: |
1763 |
0 |
raise type(e)( |
1764 |
|
"NS Descriptor could not be processed with error: {}.".format(e) |
1765 |
|
) |
1766 |
|
|
1767 |
1 |
def sol005_projection(self, data): |
1768 |
0 |
data["nsdOnboardingState"] = data["_admin"]["onboardingState"] |
1769 |
0 |
data["nsdOperationalState"] = data["_admin"]["operationalState"] |
1770 |
0 |
data["nsdUsageState"] = data["_admin"]["usageState"] |
1771 |
|
|
1772 |
0 |
links = {} |
1773 |
0 |
links["self"] = {"href": "/nsd/v1/ns_descriptors/{}".format(data["_id"])} |
1774 |
0 |
links["nsd_content"] = { |
1775 |
|
"href": "/nsd/v1/ns_descriptors/{}/nsd_content".format(data["_id"]) |
1776 |
|
} |
1777 |
0 |
data["_links"] = links |
1778 |
|
|
1779 |
0 |
return super().sol005_projection(data) |
1780 |
|
|
1781 |
|
|
1782 |
1 |
class NstTopic(DescriptorTopic): |
1783 |
1 |
topic = "nsts" |
1784 |
1 |
topic_msg = "nst" |
1785 |
1 |
quota_name = "slice_templates" |
1786 |
|
|
1787 |
1 |
def __init__(self, db, fs, msg, auth): |
1788 |
0 |
DescriptorTopic.__init__(self, db, fs, msg, auth) |
1789 |
|
|
1790 |
1 |
def pyangbind_validation(self, item, data, force=False): |
1791 |
0 |
try: |
1792 |
0 |
mynst = nst_im() |
1793 |
0 |
pybindJSONDecoder.load_ietf_json( |
1794 |
|
{"nst": [data]}, |
1795 |
|
None, |
1796 |
|
None, |
1797 |
|
obj=mynst, |
1798 |
|
path_helper=True, |
1799 |
|
skip_unknown=force, |
1800 |
|
) |
1801 |
0 |
out = pybindJSON.dumps(mynst, mode="ietf") |
1802 |
0 |
desc_out = self._remove_envelop(yaml.safe_load(out)) |
1803 |
0 |
return desc_out |
1804 |
0 |
except Exception as e: |
1805 |
0 |
raise EngineException( |
1806 |
|
"Error in pyangbind validation: {}".format(str(e)), |
1807 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY, |
1808 |
|
) |
1809 |
|
|
1810 |
1 |
@staticmethod |
1811 |
1 |
def _remove_envelop(indata=None): |
1812 |
0 |
if not indata: |
1813 |
0 |
return {} |
1814 |
0 |
clean_indata = indata |
1815 |
|
|
1816 |
0 |
if clean_indata.get("nst"): |
1817 |
0 |
if ( |
1818 |
|
not isinstance(clean_indata["nst"], list) |
1819 |
|
or len(clean_indata["nst"]) != 1 |
1820 |
|
): |
1821 |
0 |
raise EngineException("'nst' must be a list only one element") |
1822 |
0 |
clean_indata = clean_indata["nst"][0] |
1823 |
0 |
elif clean_indata.get("nst:nst"): |
1824 |
0 |
if ( |
1825 |
|
not isinstance(clean_indata["nst:nst"], list) |
1826 |
|
or len(clean_indata["nst:nst"]) != 1 |
1827 |
|
): |
1828 |
0 |
raise EngineException("'nst:nst' must be a list only one element") |
1829 |
0 |
clean_indata = clean_indata["nst:nst"][0] |
1830 |
0 |
return clean_indata |
1831 |
|
|
1832 |
1 |
def _validate_input_new(self, indata, storage_params, force=False): |
1833 |
0 |
indata.pop("onboardingState", None) |
1834 |
0 |
indata.pop("operationalState", None) |
1835 |
0 |
indata.pop("usageState", None) |
1836 |
0 |
indata = self.pyangbind_validation("nsts", indata, force) |
1837 |
0 |
return indata.copy() |
1838 |
|
|
1839 |
1 |
def _check_descriptor_dependencies(self, session, descriptor): |
1840 |
|
""" |
1841 |
|
Check that the dependent descriptors exist on a new descriptor or edition |
1842 |
|
:param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
1843 |
|
:param descriptor: descriptor to be inserted or edit |
1844 |
|
:return: None or raises exception |
1845 |
|
""" |
1846 |
0 |
if not descriptor.get("netslice-subnet"): |
1847 |
0 |
return |
1848 |
0 |
for nsd in descriptor["netslice-subnet"]: |
1849 |
0 |
nsd_id = nsd["nsd-ref"] |
1850 |
0 |
filter_q = self._get_project_filter(session) |
1851 |
0 |
filter_q["id"] = nsd_id |
1852 |
0 |
if not self.db.get_list("nsds", filter_q): |
1853 |
0 |
raise EngineException( |
1854 |
|
"Descriptor error at 'netslice-subnet':'nsd-ref'='{}' references a non " |
1855 |
|
"existing nsd".format(nsd_id), |
1856 |
|
http_code=HTTPStatus.CONFLICT, |
1857 |
|
) |
1858 |
|
|
1859 |
1 |
def check_conflict_on_edit(self, session, final_content, edit_content, _id): |
1860 |
0 |
final_content = super().check_conflict_on_edit( |
1861 |
|
session, final_content, edit_content, _id |
1862 |
|
) |
1863 |
|
|
1864 |
0 |
self._check_descriptor_dependencies(session, final_content) |
1865 |
0 |
return final_content |
1866 |
|
|
1867 |
1 |
def check_conflict_on_del(self, session, _id, db_content): |
1868 |
|
""" |
1869 |
|
Check that there is not any NSIR that uses this NST. Only NSIRs belonging to this project are considered. Note |
1870 |
|
that NST can be public and be used by other projects. |
1871 |
|
:param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
1872 |
|
:param _id: nst internal id |
1873 |
|
:param db_content: The database content of the _id. |
1874 |
|
:return: None or raises EngineException with the conflict |
1875 |
|
""" |
1876 |
|
# TODO: Check this method |
1877 |
0 |
if session["force"]: |
1878 |
0 |
return |
1879 |
|
# Get Network Slice Template from Database |
1880 |
0 |
_filter = self._get_project_filter(session) |
1881 |
0 |
_filter["_admin.nst-id"] = _id |
1882 |
0 |
if self.db.get_list("nsis", _filter): |
1883 |
0 |
raise EngineException( |
1884 |
|
"there is at least one Netslice Instance using this descriptor", |
1885 |
|
http_code=HTTPStatus.CONFLICT, |
1886 |
|
) |
1887 |
|
|
1888 |
1 |
def sol005_projection(self, data): |
1889 |
0 |
data["onboardingState"] = data["_admin"]["onboardingState"] |
1890 |
0 |
data["operationalState"] = data["_admin"]["operationalState"] |
1891 |
0 |
data["usageState"] = data["_admin"]["usageState"] |
1892 |
|
|
1893 |
0 |
links = {} |
1894 |
0 |
links["self"] = {"href": "/nst/v1/netslice_templates/{}".format(data["_id"])} |
1895 |
0 |
links["nst"] = {"href": "/nst/v1/netslice_templates/{}/nst".format(data["_id"])} |
1896 |
0 |
data["_links"] = links |
1897 |
|
|
1898 |
0 |
return super().sol005_projection(data) |
1899 |
|
|
1900 |
|
|
1901 |
1 |
class PduTopic(BaseTopic): |
1902 |
1 |
topic = "pdus" |
1903 |
1 |
topic_msg = "pdu" |
1904 |
1 |
quota_name = "pduds" |
1905 |
1 |
schema_new = pdu_new_schema |
1906 |
1 |
schema_edit = pdu_edit_schema |
1907 |
|
|
1908 |
1 |
def __init__(self, db, fs, msg, auth): |
1909 |
0 |
BaseTopic.__init__(self, db, fs, msg, auth) |
1910 |
|
|
1911 |
1 |
@staticmethod |
1912 |
1 |
def format_on_new(content, project_id=None, make_public=False): |
1913 |
0 |
BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public) |
1914 |
0 |
content["_admin"]["onboardingState"] = "CREATED" |
1915 |
0 |
content["_admin"]["operationalState"] = "ENABLED" |
1916 |
0 |
content["_admin"]["usageState"] = "NOT_IN_USE" |
1917 |
|
|
1918 |
1 |
def check_conflict_on_del(self, session, _id, db_content): |
1919 |
|
""" |
1920 |
|
Check that there is not any vnfr that uses this PDU |
1921 |
|
:param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
1922 |
|
:param _id: pdu internal id |
1923 |
|
:param db_content: The database content of the _id. |
1924 |
|
:return: None or raises EngineException with the conflict |
1925 |
|
""" |
1926 |
0 |
if session["force"]: |
1927 |
0 |
return |
1928 |
|
|
1929 |
0 |
_filter = self._get_project_filter(session) |
1930 |
0 |
_filter["vdur.pdu-id"] = _id |
1931 |
0 |
if self.db.get_list("vnfrs", _filter): |
1932 |
0 |
raise EngineException( |
1933 |
|
"There is at least one VNF instance using this PDU", |
1934 |
|
http_code=HTTPStatus.CONFLICT, |
1935 |
|
) |
1936 |
|
|
1937 |
|
|
1938 |
1 |
class VnfPkgOpTopic(BaseTopic): |
1939 |
1 |
topic = "vnfpkgops" |
1940 |
1 |
topic_msg = "vnfd" |
1941 |
1 |
schema_new = vnfpkgop_new_schema |
1942 |
1 |
schema_edit = None |
1943 |
|
|
1944 |
1 |
def __init__(self, db, fs, msg, auth): |
1945 |
0 |
BaseTopic.__init__(self, db, fs, msg, auth) |
1946 |
|
|
1947 |
1 |
def edit(self, session, _id, indata=None, kwargs=None, content=None): |
1948 |
0 |
raise EngineException( |
1949 |
|
"Method 'edit' not allowed for topic '{}'".format(self.topic), |
1950 |
|
HTTPStatus.METHOD_NOT_ALLOWED, |
1951 |
|
) |
1952 |
|
|
1953 |
1 |
def delete(self, session, _id, dry_run=False): |
1954 |
0 |
raise EngineException( |
1955 |
|
"Method 'delete' not allowed for topic '{}'".format(self.topic), |
1956 |
|
HTTPStatus.METHOD_NOT_ALLOWED, |
1957 |
|
) |
1958 |
|
|
1959 |
1 |
def delete_list(self, session, filter_q=None): |
1960 |
0 |
raise EngineException( |
1961 |
|
"Method 'delete_list' not allowed for topic '{}'".format(self.topic), |
1962 |
|
HTTPStatus.METHOD_NOT_ALLOWED, |
1963 |
|
) |
1964 |
|
|
1965 |
1 |
def new(self, rollback, session, indata=None, kwargs=None, headers=None): |
1966 |
|
""" |
1967 |
|
Creates a new entry into database. |
1968 |
|
:param rollback: list to append created items at database in case a rollback may to be done |
1969 |
|
:param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
1970 |
|
:param indata: data to be inserted |
1971 |
|
:param kwargs: used to override the indata descriptor |
1972 |
|
:param headers: http request headers |
1973 |
|
:return: _id, op_id: |
1974 |
|
_id: identity of the inserted data. |
1975 |
|
op_id: None |
1976 |
|
""" |
1977 |
0 |
self._update_input_with_kwargs(indata, kwargs) |
1978 |
0 |
validate_input(indata, self.schema_new) |
1979 |
0 |
vnfpkg_id = indata["vnfPkgId"] |
1980 |
0 |
filter_q = BaseTopic._get_project_filter(session) |
1981 |
0 |
filter_q["_id"] = vnfpkg_id |
1982 |
0 |
vnfd = self.db.get_one("vnfds", filter_q) |
1983 |
0 |
operation = indata["lcmOperationType"] |
1984 |
0 |
kdu_name = indata["kdu_name"] |
1985 |
0 |
for kdu in vnfd.get("kdu", []): |
1986 |
0 |
if kdu["name"] == kdu_name: |
1987 |
0 |
helm_chart = kdu.get("helm-chart") |
1988 |
0 |
juju_bundle = kdu.get("juju-bundle") |
1989 |
0 |
break |
1990 |
|
else: |
1991 |
0 |
raise EngineException( |
1992 |
|
"Not found vnfd[id='{}']:kdu[name='{}']".format(vnfpkg_id, kdu_name) |
1993 |
|
) |
1994 |
0 |
if helm_chart: |
1995 |
0 |
indata["helm-chart"] = helm_chart |
1996 |
0 |
match = fullmatch(r"([^/]*)/([^/]*)", helm_chart) |
1997 |
0 |
repo_name = match.group(1) if match else None |
1998 |
0 |
elif juju_bundle: |
1999 |
0 |
indata["juju-bundle"] = juju_bundle |
2000 |
0 |
match = fullmatch(r"([^/]*)/([^/]*)", juju_bundle) |
2001 |
0 |
repo_name = match.group(1) if match else None |
2002 |
|
else: |
2003 |
0 |
raise EngineException( |
2004 |
|
"Found neither 'helm-chart' nor 'juju-bundle' in vnfd[id='{}']:kdu[name='{}']".format( |
2005 |
|
vnfpkg_id, kdu_name |
2006 |
|
) |
2007 |
|
) |
2008 |
0 |
if repo_name: |
2009 |
0 |
del filter_q["_id"] |
2010 |
0 |
filter_q["name"] = repo_name |
2011 |
0 |
repo = self.db.get_one("k8srepos", filter_q) |
2012 |
0 |
k8srepo_id = repo.get("_id") |
2013 |
0 |
k8srepo_url = repo.get("url") |
2014 |
|
else: |
2015 |
0 |
k8srepo_id = None |
2016 |
0 |
k8srepo_url = None |
2017 |
0 |
indata["k8srepoId"] = k8srepo_id |
2018 |
0 |
indata["k8srepo_url"] = k8srepo_url |
2019 |
0 |
vnfpkgop_id = str(uuid4()) |
2020 |
0 |
vnfpkgop_desc = { |
2021 |
|
"_id": vnfpkgop_id, |
2022 |
|
"operationState": "PROCESSING", |
2023 |
|
"vnfPkgId": vnfpkg_id, |
2024 |
|
"lcmOperationType": operation, |
2025 |
|
"isAutomaticInvocation": False, |
2026 |
|
"isCancelPending": False, |
2027 |
|
"operationParams": indata, |
2028 |
|
"links": { |
2029 |
|
"self": "/osm/vnfpkgm/v1/vnfpkg_op_occs/" + vnfpkgop_id, |
2030 |
|
"vnfpkg": "/osm/vnfpkgm/v1/vnf_packages/" + vnfpkg_id, |
2031 |
|
}, |
2032 |
|
} |
2033 |
0 |
self.format_on_new( |
2034 |
|
vnfpkgop_desc, session["project_id"], make_public=session["public"] |
2035 |
|
) |
2036 |
0 |
ctime = vnfpkgop_desc["_admin"]["created"] |
2037 |
0 |
vnfpkgop_desc["statusEnteredTime"] = ctime |
2038 |
0 |
vnfpkgop_desc["startTime"] = ctime |
2039 |
0 |
self.db.create(self.topic, vnfpkgop_desc) |
2040 |
0 |
rollback.append({"topic": self.topic, "_id": vnfpkgop_id}) |
2041 |
0 |
self.msg.write(self.topic_msg, operation, vnfpkgop_desc) |
2042 |
0 |
return vnfpkgop_id, None |