1 |
|
# -*- coding: utf-8 -*- |
2 |
|
|
3 |
|
# Licensed under the Apache License, Version 2.0 (the "License"); |
4 |
|
# you may not use this file except in compliance with the License. |
5 |
|
# You may obtain a copy of the License at |
6 |
|
# |
7 |
|
# http://www.apache.org/licenses/LICENSE-2.0 |
8 |
|
# |
9 |
|
# Unless required by applicable law or agreed to in writing, software |
10 |
|
# distributed under the License is distributed on an "AS IS" BASIS, |
11 |
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or |
12 |
|
# implied. |
13 |
|
# See the License for the specific language governing permissions and |
14 |
|
# limitations under the License. |
15 |
|
|
16 |
1 |
import tarfile |
17 |
1 |
import yaml |
18 |
1 |
import json |
19 |
|
# import logging |
20 |
1 |
from hashlib import md5 |
21 |
1 |
from osm_common.dbbase import DbException, deep_update_rfc7396 |
22 |
1 |
from http import HTTPStatus |
23 |
1 |
from time import time |
24 |
1 |
from uuid import uuid4 |
25 |
1 |
from re import fullmatch |
26 |
1 |
from osm_nbi.validation import ValidationError, pdu_new_schema, pdu_edit_schema, \ |
27 |
|
validate_input, vnfpkgop_new_schema |
28 |
1 |
from osm_nbi.base_topic import BaseTopic, EngineException, get_iterable |
29 |
1 |
from osm_im.vnfd import vnfd as vnfd_im |
30 |
1 |
from osm_im.nsd import nsd as nsd_im |
31 |
1 |
from osm_im.nst import nst as nst_im |
32 |
1 |
from pyangbind.lib.serialise import pybindJSONDecoder |
33 |
1 |
import pyangbind.lib.pybindJSON as pybindJSON |
34 |
|
|
35 |
1 |
__author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>" |
36 |
|
|
37 |
|
|
38 |
1 |
class DescriptorTopic(BaseTopic): |
39 |
|
|
40 |
1 |
def __init__(self, db, fs, msg, auth): |
41 |
1 |
BaseTopic.__init__(self, db, fs, msg, auth) |
42 |
|
|
43 |
1 |
def check_conflict_on_edit(self, session, final_content, edit_content, _id): |
44 |
1 |
super().check_conflict_on_edit(session, final_content, edit_content, _id) |
45 |
|
|
46 |
1 |
def _check_unique_id_name(descriptor, position=""): |
47 |
1 |
for desc_key, desc_item in descriptor.items(): |
48 |
1 |
if isinstance(desc_item, list) and desc_item: |
49 |
1 |
used_ids = [] |
50 |
1 |
desc_item_id = None |
51 |
1 |
for index, list_item in enumerate(desc_item): |
52 |
1 |
if isinstance(list_item, dict): |
53 |
1 |
_check_unique_id_name(list_item, "{}.{}[{}]" |
54 |
|
.format(position, desc_key, index)) |
55 |
|
# Base case |
56 |
1 |
if index == 0 and (list_item.get("id") or list_item.get("name")): |
57 |
1 |
desc_item_id = "id" if list_item.get("id") else "name" |
58 |
1 |
if desc_item_id and list_item.get(desc_item_id): |
59 |
1 |
if list_item[desc_item_id] in used_ids: |
60 |
0 |
position = "{}.{}[{}]".format(position, desc_key, index) |
61 |
0 |
raise EngineException("Error: identifier {} '{}' is not unique and repeats at '{}'" |
62 |
|
.format(desc_item_id, list_item[desc_item_id], |
63 |
|
position), HTTPStatus.UNPROCESSABLE_ENTITY) |
64 |
1 |
used_ids.append(list_item[desc_item_id]) |
65 |
1 |
_check_unique_id_name(final_content) |
66 |
|
# 1. validate again with pyangbind |
67 |
|
# 1.1. remove internal keys |
68 |
1 |
internal_keys = {} |
69 |
1 |
for k in ("_id", "_admin"): |
70 |
1 |
if k in final_content: |
71 |
1 |
internal_keys[k] = final_content.pop(k) |
72 |
1 |
storage_params = internal_keys["_admin"].get("storage") |
73 |
1 |
serialized = self._validate_input_new(final_content, storage_params, session["force"]) |
74 |
|
# 1.2. modify final_content with a serialized version |
75 |
1 |
final_content.clear() |
76 |
1 |
final_content.update(serialized) |
77 |
|
# 1.3. restore internal keys |
78 |
1 |
for k, v in internal_keys.items(): |
79 |
1 |
final_content[k] = v |
80 |
|
|
81 |
1 |
if session["force"]: |
82 |
0 |
return |
83 |
|
# 2. check that this id is not present |
84 |
1 |
if "id" in edit_content: |
85 |
1 |
_filter = self._get_project_filter(session) |
86 |
1 |
_filter["id"] = final_content["id"] |
87 |
1 |
_filter["_id.neq"] = _id |
88 |
1 |
if self.db.get_one(self.topic, _filter, fail_on_empty=False): |
89 |
1 |
raise EngineException("{} with id '{}' already exists for this project".format(self.topic[:-1], |
90 |
|
final_content["id"]), |
91 |
|
HTTPStatus.CONFLICT) |
92 |
|
|
93 |
1 |
@staticmethod |
94 |
1 |
def format_on_new(content, project_id=None, make_public=False): |
95 |
1 |
BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public) |
96 |
1 |
content["_admin"]["onboardingState"] = "CREATED" |
97 |
1 |
content["_admin"]["operationalState"] = "DISABLED" |
98 |
1 |
content["_admin"]["usageState"] = "NOT_IN_USE" |
99 |
|
|
100 |
1 |
def delete_extra(self, session, _id, db_content, not_send_msg=None): |
101 |
|
""" |
102 |
|
Deletes file system storage associated with the descriptor |
103 |
|
:param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
104 |
|
:param _id: server internal id |
105 |
|
:param db_content: The database content of the descriptor |
106 |
|
:param not_send_msg: To not send message (False) or store content (list) instead |
107 |
|
:return: None if ok or raises EngineException with the problem |
108 |
|
""" |
109 |
1 |
self.fs.file_delete(_id, ignore_non_exist=True) |
110 |
1 |
self.fs.file_delete(_id + "_", ignore_non_exist=True) # remove temp folder |
111 |
|
|
112 |
1 |
@staticmethod |
113 |
|
def get_one_by_id(db, session, topic, id): |
114 |
|
# find owned by this project |
115 |
0 |
_filter = BaseTopic._get_project_filter(session) |
116 |
0 |
_filter["id"] = id |
117 |
0 |
desc_list = db.get_list(topic, _filter) |
118 |
0 |
if len(desc_list) == 1: |
119 |
0 |
return desc_list[0] |
120 |
0 |
elif len(desc_list) > 1: |
121 |
0 |
raise DbException("Found more than one {} with id='{}' belonging to this project".format(topic[:-1], id), |
122 |
|
HTTPStatus.CONFLICT) |
123 |
|
|
124 |
|
# not found any: try to find public |
125 |
0 |
_filter = BaseTopic._get_project_filter(session) |
126 |
0 |
_filter["id"] = id |
127 |
0 |
desc_list = db.get_list(topic, _filter) |
128 |
0 |
if not desc_list: |
129 |
0 |
raise DbException("Not found any {} with id='{}'".format(topic[:-1], id), HTTPStatus.NOT_FOUND) |
130 |
0 |
elif len(desc_list) == 1: |
131 |
0 |
return desc_list[0] |
132 |
|
else: |
133 |
0 |
raise DbException("Found more than one public {} with id='{}'; and no one belonging to this project".format( |
134 |
|
topic[:-1], id), HTTPStatus.CONFLICT) |
135 |
|
|
136 |
1 |
def new(self, rollback, session, indata=None, kwargs=None, headers=None): |
137 |
|
""" |
138 |
|
Creates a new almost empty DISABLED entry into database. Due to SOL005, it does not follow normal procedure. |
139 |
|
Creating a VNFD or NSD is done in two steps: 1. Creates an empty descriptor (this step) and 2) upload content |
140 |
|
(self.upload_content) |
141 |
|
:param rollback: list to append created items at database in case a rollback may to be done |
142 |
|
:param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
143 |
|
:param indata: data to be inserted |
144 |
|
:param kwargs: used to override the indata descriptor |
145 |
|
:param headers: http request headers |
146 |
|
:return: _id, None: identity of the inserted data; and None as there is not any operation |
147 |
|
""" |
148 |
|
|
149 |
|
# No needed to capture exceptions |
150 |
|
# Check Quota |
151 |
1 |
self.check_quota(session) |
152 |
|
|
153 |
|
# _remove_envelop |
154 |
1 |
if indata: |
155 |
0 |
if "userDefinedData" in indata: |
156 |
0 |
indata = indata['userDefinedData'] |
157 |
|
|
158 |
|
# Override descriptor with query string kwargs |
159 |
1 |
self._update_input_with_kwargs(indata, kwargs) |
160 |
|
# uncomment when this method is implemented. |
161 |
|
# Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors |
162 |
|
# indata = DescriptorTopic._validate_input_new(self, indata, project_id=session["force"]) |
163 |
|
|
164 |
1 |
content = {"_admin": {"userDefinedData": indata}} |
165 |
1 |
self.format_on_new(content, session["project_id"], make_public=session["public"]) |
166 |
1 |
_id = self.db.create(self.topic, content) |
167 |
1 |
rollback.append({"topic": self.topic, "_id": _id}) |
168 |
1 |
self._send_msg("created", {"_id": _id}) |
169 |
1 |
return _id, None |
170 |
|
|
171 |
1 |
def upload_content(self, session, _id, indata, kwargs, headers): |
172 |
|
""" |
173 |
|
Used for receiving content by chunks (with a transaction_id header and/or gzip file. It will store and extract) |
174 |
|
:param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
175 |
|
:param _id : the nsd,vnfd is already created, this is the id |
176 |
|
:param indata: http body request |
177 |
|
:param kwargs: user query string to override parameters. NOT USED |
178 |
|
:param headers: http request headers |
179 |
|
:return: True if package is completely uploaded or False if partial content has been uploded |
180 |
|
Raise exception on error |
181 |
|
""" |
182 |
|
# Check that _id exists and it is valid |
183 |
1 |
current_desc = self.show(session, _id) |
184 |
|
|
185 |
1 |
content_range_text = headers.get("Content-Range") |
186 |
1 |
expected_md5 = headers.get("Content-File-MD5") |
187 |
1 |
compressed = None |
188 |
1 |
content_type = headers.get("Content-Type") |
189 |
1 |
if content_type and "application/gzip" in content_type or "application/x-gzip" in content_type or \ |
190 |
|
"application/zip" in content_type: |
191 |
0 |
compressed = "gzip" |
192 |
1 |
filename = headers.get("Content-Filename") |
193 |
1 |
if not filename: |
194 |
1 |
filename = "package.tar.gz" if compressed else "package" |
195 |
|
# TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266 |
196 |
1 |
file_pkg = None |
197 |
1 |
error_text = "" |
198 |
1 |
try: |
199 |
1 |
if content_range_text: |
200 |
0 |
content_range = content_range_text.replace("-", " ").replace("/", " ").split() |
201 |
0 |
if content_range[0] != "bytes": # TODO check x<y not negative < total.... |
202 |
0 |
raise IndexError() |
203 |
0 |
start = int(content_range[1]) |
204 |
0 |
end = int(content_range[2]) + 1 |
205 |
0 |
total = int(content_range[3]) |
206 |
|
else: |
207 |
1 |
start = 0 |
208 |
1 |
temp_folder = _id + "_" # all the content is upload here and if ok, it is rename from id_ to is folder |
209 |
|
|
210 |
1 |
if start: |
211 |
0 |
if not self.fs.file_exists(temp_folder, 'dir'): |
212 |
0 |
raise EngineException("invalid Transaction-Id header", HTTPStatus.NOT_FOUND) |
213 |
|
else: |
214 |
1 |
self.fs.file_delete(temp_folder, ignore_non_exist=True) |
215 |
1 |
self.fs.mkdir(temp_folder) |
216 |
|
|
217 |
1 |
storage = self.fs.get_params() |
218 |
1 |
storage["folder"] = _id |
219 |
|
|
220 |
1 |
file_path = (temp_folder, filename) |
221 |
1 |
if self.fs.file_exists(file_path, 'file'): |
222 |
0 |
file_size = self.fs.file_size(file_path) |
223 |
|
else: |
224 |
1 |
file_size = 0 |
225 |
1 |
if file_size != start: |
226 |
0 |
raise EngineException("invalid Content-Range start sequence, expected '{}' but received '{}'".format( |
227 |
|
file_size, start), HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE) |
228 |
1 |
file_pkg = self.fs.file_open(file_path, 'a+b') |
229 |
1 |
if isinstance(indata, dict): |
230 |
1 |
indata_text = yaml.safe_dump(indata, indent=4, default_flow_style=False) |
231 |
1 |
file_pkg.write(indata_text.encode(encoding="utf-8")) |
232 |
|
else: |
233 |
0 |
indata_len = 0 |
234 |
0 |
while True: |
235 |
0 |
indata_text = indata.read(4096) |
236 |
0 |
indata_len += len(indata_text) |
237 |
0 |
if not indata_text: |
238 |
0 |
break |
239 |
0 |
file_pkg.write(indata_text) |
240 |
1 |
if content_range_text: |
241 |
0 |
if indata_len != end-start: |
242 |
0 |
raise EngineException("Mismatch between Content-Range header {}-{} and body length of {}".format( |
243 |
|
start, end-1, indata_len), HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE) |
244 |
0 |
if end != total: |
245 |
|
# TODO update to UPLOADING |
246 |
0 |
return False |
247 |
|
|
248 |
|
# PACKAGE UPLOADED |
249 |
1 |
if expected_md5: |
250 |
0 |
file_pkg.seek(0, 0) |
251 |
0 |
file_md5 = md5() |
252 |
0 |
chunk_data = file_pkg.read(1024) |
253 |
0 |
while chunk_data: |
254 |
0 |
file_md5.update(chunk_data) |
255 |
0 |
chunk_data = file_pkg.read(1024) |
256 |
0 |
if expected_md5 != file_md5.hexdigest(): |
257 |
0 |
raise EngineException("Error, MD5 mismatch", HTTPStatus.CONFLICT) |
258 |
1 |
file_pkg.seek(0, 0) |
259 |
1 |
if compressed == "gzip": |
260 |
0 |
tar = tarfile.open(mode='r', fileobj=file_pkg) |
261 |
0 |
descriptor_file_name = None |
262 |
0 |
for tarinfo in tar: |
263 |
0 |
tarname = tarinfo.name |
264 |
0 |
tarname_path = tarname.split("/") |
265 |
0 |
if not tarname_path[0] or ".." in tarname_path: # if start with "/" means absolute path |
266 |
0 |
raise EngineException("Absolute path or '..' are not allowed for package descriptor tar.gz") |
267 |
0 |
if len(tarname_path) == 1 and not tarinfo.isdir(): |
268 |
0 |
raise EngineException("All files must be inside a dir for package descriptor tar.gz") |
269 |
0 |
if tarname.endswith(".yaml") or tarname.endswith(".json") or tarname.endswith(".yml"): |
270 |
0 |
storage["pkg-dir"] = tarname_path[0] |
271 |
0 |
if len(tarname_path) == 2: |
272 |
0 |
if descriptor_file_name: |
273 |
0 |
raise EngineException( |
274 |
|
"Found more than one descriptor file at package descriptor tar.gz") |
275 |
0 |
descriptor_file_name = tarname |
276 |
0 |
if not descriptor_file_name: |
277 |
0 |
raise EngineException("Not found any descriptor file at package descriptor tar.gz") |
278 |
0 |
storage["descriptor"] = descriptor_file_name |
279 |
0 |
storage["zipfile"] = filename |
280 |
0 |
self.fs.file_extract(tar, temp_folder) |
281 |
0 |
with self.fs.file_open((temp_folder, descriptor_file_name), "r") as descriptor_file: |
282 |
0 |
content = descriptor_file.read() |
283 |
|
else: |
284 |
1 |
content = file_pkg.read() |
285 |
1 |
storage["descriptor"] = descriptor_file_name = filename |
286 |
|
|
287 |
1 |
if descriptor_file_name.endswith(".json"): |
288 |
0 |
error_text = "Invalid json format " |
289 |
0 |
indata = json.load(content) |
290 |
|
else: |
291 |
1 |
error_text = "Invalid yaml format " |
292 |
1 |
indata = yaml.load(content, Loader=yaml.SafeLoader) |
293 |
|
|
294 |
1 |
current_desc["_admin"]["storage"] = storage |
295 |
1 |
current_desc["_admin"]["onboardingState"] = "ONBOARDED" |
296 |
1 |
current_desc["_admin"]["operationalState"] = "ENABLED" |
297 |
|
|
298 |
1 |
indata = self._remove_envelop(indata) |
299 |
|
|
300 |
|
# Override descriptor with query string kwargs |
301 |
1 |
if kwargs: |
302 |
0 |
self._update_input_with_kwargs(indata, kwargs) |
303 |
|
# it will call overrides method at VnfdTopic or NsdTopic |
304 |
|
# indata = self._validate_input_edit(indata, force=session["force"]) |
305 |
|
|
306 |
1 |
deep_update_rfc7396(current_desc, indata) |
307 |
1 |
self.check_conflict_on_edit(session, current_desc, indata, _id=_id) |
308 |
1 |
current_desc["_admin"]["modified"] = time() |
309 |
1 |
self.db.replace(self.topic, _id, current_desc) |
310 |
1 |
self.fs.dir_rename(temp_folder, _id) |
311 |
|
|
312 |
1 |
indata["_id"] = _id |
313 |
1 |
self._send_msg("edited", indata) |
314 |
|
|
315 |
|
# TODO if descriptor has changed because kwargs update content and remove cached zip |
316 |
|
# TODO if zip is not present creates one |
317 |
1 |
return True |
318 |
|
|
319 |
1 |
except EngineException: |
320 |
1 |
raise |
321 |
0 |
except IndexError: |
322 |
0 |
raise EngineException("invalid Content-Range header format. Expected 'bytes start-end/total'", |
323 |
|
HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE) |
324 |
0 |
except IOError as e: |
325 |
0 |
raise EngineException("invalid upload transaction sequence: '{}'".format(e), HTTPStatus.BAD_REQUEST) |
326 |
0 |
except tarfile.ReadError as e: |
327 |
0 |
raise EngineException("invalid file content {}".format(e), HTTPStatus.BAD_REQUEST) |
328 |
0 |
except (ValueError, yaml.YAMLError) as e: |
329 |
0 |
raise EngineException(error_text + str(e)) |
330 |
0 |
except ValidationError as e: |
331 |
0 |
raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY) |
332 |
|
finally: |
333 |
1 |
if file_pkg: |
334 |
1 |
file_pkg.close() |
335 |
|
|
336 |
1 |
def get_file(self, session, _id, path=None, accept_header=None): |
337 |
|
""" |
338 |
|
Return the file content of a vnfd or nsd |
339 |
|
:param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
340 |
|
:param _id: Identity of the vnfd, nsd |
341 |
|
:param path: artifact path or "$DESCRIPTOR" or None |
342 |
|
:param accept_header: Content of Accept header. Must contain applition/zip or/and text/plain |
343 |
|
:return: opened file plus Accept format or raises an exception |
344 |
|
""" |
345 |
0 |
accept_text = accept_zip = False |
346 |
0 |
if accept_header: |
347 |
0 |
if 'text/plain' in accept_header or '*/*' in accept_header: |
348 |
0 |
accept_text = True |
349 |
0 |
if 'application/zip' in accept_header or '*/*' in accept_header: |
350 |
0 |
accept_zip = 'application/zip' |
351 |
0 |
elif 'application/gzip' in accept_header: |
352 |
0 |
accept_zip = 'application/gzip' |
353 |
|
|
354 |
0 |
if not accept_text and not accept_zip: |
355 |
0 |
raise EngineException("provide request header 'Accept' with 'application/zip' or 'text/plain'", |
356 |
|
http_code=HTTPStatus.NOT_ACCEPTABLE) |
357 |
|
|
358 |
0 |
content = self.show(session, _id) |
359 |
0 |
if content["_admin"]["onboardingState"] != "ONBOARDED": |
360 |
0 |
raise EngineException("Cannot get content because this resource is not at 'ONBOARDED' state. " |
361 |
|
"onboardingState is {}".format(content["_admin"]["onboardingState"]), |
362 |
|
http_code=HTTPStatus.CONFLICT) |
363 |
0 |
storage = content["_admin"]["storage"] |
364 |
0 |
if path is not None and path != "$DESCRIPTOR": # artifacts |
365 |
0 |
if not storage.get('pkg-dir'): |
366 |
0 |
raise EngineException("Packages does not contains artifacts", http_code=HTTPStatus.BAD_REQUEST) |
367 |
0 |
if self.fs.file_exists((storage['folder'], storage['pkg-dir'], *path), 'dir'): |
368 |
0 |
folder_content = self.fs.dir_ls((storage['folder'], storage['pkg-dir'], *path)) |
369 |
0 |
return folder_content, "text/plain" |
370 |
|
# TODO manage folders in http |
371 |
|
else: |
372 |
0 |
return self.fs.file_open((storage['folder'], storage['pkg-dir'], *path), "rb"),\ |
373 |
|
"application/octet-stream" |
374 |
|
|
375 |
|
# pkgtype accept ZIP TEXT -> result |
376 |
|
# manyfiles yes X -> zip |
377 |
|
# no yes -> error |
378 |
|
# onefile yes no -> zip |
379 |
|
# X yes -> text |
380 |
0 |
contain_many_files = False |
381 |
0 |
if storage.get('pkg-dir'): |
382 |
|
# check if there are more than one file in the package, ignoring checksums.txt. |
383 |
0 |
pkg_files = self.fs.dir_ls((storage['folder'], storage['pkg-dir'])) |
384 |
0 |
if len(pkg_files) >= 3 or (len(pkg_files) == 2 and 'checksums.txt' not in pkg_files): |
385 |
0 |
contain_many_files = True |
386 |
0 |
if accept_text and (not contain_many_files or path == "$DESCRIPTOR"): |
387 |
0 |
return self.fs.file_open((storage['folder'], storage['descriptor']), "r"), "text/plain" |
388 |
0 |
elif contain_many_files and not accept_zip: |
389 |
0 |
raise EngineException("Packages that contains several files need to be retrieved with 'application/zip'" |
390 |
|
"Accept header", http_code=HTTPStatus.NOT_ACCEPTABLE) |
391 |
|
else: |
392 |
0 |
if not storage.get('zipfile'): |
393 |
|
# TODO generate zipfile if not present |
394 |
0 |
raise EngineException("Only allowed 'text/plain' Accept header for this descriptor. To be solved in " |
395 |
|
"future versions", http_code=HTTPStatus.NOT_ACCEPTABLE) |
396 |
0 |
return self.fs.file_open((storage['folder'], storage['zipfile']), "rb"), accept_zip |
397 |
|
|
398 |
1 |
def pyangbind_validation(self, item, data, force=False): |
399 |
1 |
try: |
400 |
1 |
if item == "vnfds": |
401 |
1 |
myvnfd = vnfd_im() |
402 |
1 |
pybindJSONDecoder.load_ietf_json({'vnfd:vnfd-catalog': {'vnfd': [data]}}, None, None, obj=myvnfd, |
403 |
|
path_helper=True, skip_unknown=force) |
404 |
1 |
out = pybindJSON.dumps(myvnfd, mode="ietf") |
405 |
1 |
elif item == "nsds": |
406 |
1 |
mynsd = nsd_im() |
407 |
1 |
pybindJSONDecoder.load_ietf_json({'nsd:nsd-catalog': {'nsd': [data]}}, None, None, obj=mynsd, |
408 |
|
path_helper=True, skip_unknown=force) |
409 |
1 |
out = pybindJSON.dumps(mynsd, mode="ietf") |
410 |
0 |
elif item == "nsts": |
411 |
0 |
mynst = nst_im() |
412 |
0 |
pybindJSONDecoder.load_ietf_json({'nst': [data]}, None, None, obj=mynst, |
413 |
|
path_helper=True, skip_unknown=force) |
414 |
0 |
out = pybindJSON.dumps(mynst, mode="ietf") |
415 |
|
else: |
416 |
0 |
raise EngineException("Not possible to validate '{}' item".format(item), |
417 |
|
http_code=HTTPStatus.INTERNAL_SERVER_ERROR) |
418 |
|
|
419 |
1 |
desc_out = self._remove_envelop(yaml.safe_load(out)) |
420 |
1 |
return desc_out |
421 |
|
|
422 |
1 |
except Exception as e: |
423 |
1 |
raise EngineException("Error in pyangbind validation: {}".format(str(e)), |
424 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
425 |
|
|
426 |
1 |
def _validate_input_edit(self, indata, content, force=False): |
427 |
|
# not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit |
428 |
1 |
if "_id" in indata: |
429 |
0 |
indata.pop("_id") |
430 |
1 |
if "_admin" not in indata: |
431 |
1 |
indata["_admin"] = {} |
432 |
|
|
433 |
1 |
if "operationalState" in indata: |
434 |
0 |
if indata["operationalState"] in ("ENABLED", "DISABLED"): |
435 |
0 |
indata["_admin"]["operationalState"] = indata.pop("operationalState") |
436 |
|
else: |
437 |
0 |
raise EngineException("State '{}' is not a valid operational state" |
438 |
|
.format(indata["operationalState"]), |
439 |
|
http_code=HTTPStatus.BAD_REQUEST) |
440 |
|
|
441 |
|
# In the case of user defined data, we need to put the data in the root of the object |
442 |
|
# to preserve current expected behaviour |
443 |
1 |
if "userDefinedData" in indata: |
444 |
0 |
data = indata.pop("userDefinedData") |
445 |
0 |
if type(data) == dict: |
446 |
0 |
indata["_admin"]["userDefinedData"] = data |
447 |
|
else: |
448 |
0 |
raise EngineException("userDefinedData should be an object, but is '{}' instead" |
449 |
|
.format(type(data)), |
450 |
|
http_code=HTTPStatus.BAD_REQUEST) |
451 |
|
|
452 |
1 |
if ("operationalState" in indata["_admin"] and |
453 |
|
content["_admin"]["operationalState"] == indata["_admin"]["operationalState"]): |
454 |
0 |
raise EngineException("operationalState already {}".format(content["_admin"]["operationalState"]), |
455 |
|
http_code=HTTPStatus.CONFLICT) |
456 |
|
|
457 |
1 |
return indata |
458 |
|
|
459 |
|
|
460 |
1 |
class VnfdTopic(DescriptorTopic): |
461 |
1 |
topic = "vnfds" |
462 |
1 |
topic_msg = "vnfd" |
463 |
|
|
464 |
1 |
def __init__(self, db, fs, msg, auth): |
465 |
1 |
DescriptorTopic.__init__(self, db, fs, msg, auth) |
466 |
|
|
467 |
1 |
@staticmethod |
468 |
1 |
def _remove_envelop(indata=None): |
469 |
1 |
if not indata: |
470 |
0 |
return {} |
471 |
1 |
clean_indata = indata |
472 |
1 |
if clean_indata.get('vnfd:vnfd-catalog'): |
473 |
1 |
clean_indata = clean_indata['vnfd:vnfd-catalog'] |
474 |
1 |
elif clean_indata.get('vnfd-catalog'): |
475 |
0 |
clean_indata = clean_indata['vnfd-catalog'] |
476 |
1 |
if clean_indata.get('vnfd'): |
477 |
1 |
if not isinstance(clean_indata['vnfd'], list) or len(clean_indata['vnfd']) != 1: |
478 |
1 |
raise EngineException("'vnfd' must be a list of only one element") |
479 |
1 |
clean_indata = clean_indata['vnfd'][0] |
480 |
1 |
elif clean_indata.get('vnfd:vnfd'): |
481 |
0 |
if not isinstance(clean_indata['vnfd:vnfd'], list) or len(clean_indata['vnfd:vnfd']) != 1: |
482 |
0 |
raise EngineException("'vnfd:vnfd' must be a list of only one element") |
483 |
0 |
clean_indata = clean_indata['vnfd:vnfd'][0] |
484 |
1 |
return clean_indata |
485 |
|
|
486 |
1 |
def check_conflict_on_edit(self, session, final_content, edit_content, _id): |
487 |
1 |
super().check_conflict_on_edit(session, final_content, edit_content, _id) |
488 |
|
|
489 |
|
# set type of vnfd |
490 |
1 |
contains_pdu = False |
491 |
1 |
contains_vdu = False |
492 |
1 |
for vdu in get_iterable(final_content.get("vdu")): |
493 |
1 |
if vdu.get("pdu-type"): |
494 |
0 |
contains_pdu = True |
495 |
|
else: |
496 |
1 |
contains_vdu = True |
497 |
1 |
if contains_pdu: |
498 |
0 |
final_content["_admin"]["type"] = "hnfd" if contains_vdu else "pnfd" |
499 |
1 |
elif contains_vdu: |
500 |
1 |
final_content["_admin"]["type"] = "vnfd" |
501 |
|
# if neither vud nor pdu do not fill type |
502 |
|
|
503 |
1 |
def check_conflict_on_del(self, session, _id, db_content): |
504 |
|
""" |
505 |
|
Check that there is not any NSD that uses this VNFD. Only NSDs belonging to this project are considered. Note |
506 |
|
that VNFD can be public and be used by NSD of other projects. Also check there are not deployments, or vnfr |
507 |
|
that uses this vnfd |
508 |
|
:param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
509 |
|
:param _id: vnfd internal id |
510 |
|
:param db_content: The database content of the _id. |
511 |
|
:return: None or raises EngineException with the conflict |
512 |
|
""" |
513 |
1 |
if session["force"]: |
514 |
0 |
return |
515 |
1 |
descriptor = db_content |
516 |
1 |
descriptor_id = descriptor.get("id") |
517 |
1 |
if not descriptor_id: # empty vnfd not uploaded |
518 |
0 |
return |
519 |
|
|
520 |
1 |
_filter = self._get_project_filter(session) |
521 |
|
|
522 |
|
# check vnfrs using this vnfd |
523 |
1 |
_filter["vnfd-id"] = _id |
524 |
1 |
if self.db.get_list("vnfrs", _filter): |
525 |
1 |
raise EngineException("There is at least one VNF using this descriptor", http_code=HTTPStatus.CONFLICT) |
526 |
|
|
527 |
|
# check NSD referencing this VNFD |
528 |
1 |
del _filter["vnfd-id"] |
529 |
1 |
_filter["constituent-vnfd.ANYINDEX.vnfd-id-ref"] = descriptor_id |
530 |
1 |
if self.db.get_list("nsds", _filter): |
531 |
1 |
raise EngineException("There is at least one NSD referencing this descriptor", |
532 |
|
http_code=HTTPStatus.CONFLICT) |
533 |
|
|
534 |
1 |
def _validate_input_new(self, indata, storage_params, force=False): |
535 |
1 |
indata.pop("onboardingState", None) |
536 |
1 |
indata.pop("operationalState", None) |
537 |
1 |
indata.pop("usageState", None) |
538 |
|
|
539 |
1 |
indata.pop("links", None) |
540 |
|
|
541 |
1 |
indata = self.pyangbind_validation("vnfds", indata, force) |
542 |
|
# Cross references validation in the descriptor |
543 |
1 |
if indata.get("vdu"): |
544 |
1 |
if not indata.get("mgmt-interface"): |
545 |
1 |
raise EngineException("'mgmt-interface' is a mandatory field and it is not defined", |
546 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
547 |
1 |
if indata["mgmt-interface"].get("cp"): |
548 |
1 |
for cp in get_iterable(indata.get("connection-point")): |
549 |
1 |
if cp["name"] == indata["mgmt-interface"]["cp"]: |
550 |
1 |
break |
551 |
|
else: |
552 |
1 |
raise EngineException("mgmt-interface:cp='{}' must match an existing connection-point" |
553 |
|
.format(indata["mgmt-interface"]["cp"]), |
554 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
555 |
|
|
556 |
1 |
for vdu in get_iterable(indata.get("vdu")): |
557 |
1 |
icp_refs = [] |
558 |
1 |
ecp_refs = [] |
559 |
1 |
for interface in get_iterable(vdu.get("interface")): |
560 |
1 |
if interface.get("external-connection-point-ref"): |
561 |
1 |
if interface.get("external-connection-point-ref") in ecp_refs: |
562 |
0 |
raise EngineException("vdu[id='{}']:interface[name='{}']:external-connection-point-ref='{}' " |
563 |
|
"is referenced by other interface" |
564 |
|
.format(vdu["id"], interface["name"], |
565 |
|
interface["external-connection-point-ref"]), |
566 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
567 |
1 |
ecp_refs.append(interface.get("external-connection-point-ref")) |
568 |
1 |
for cp in get_iterable(indata.get("connection-point")): |
569 |
1 |
if cp["name"] == interface["external-connection-point-ref"]: |
570 |
1 |
break |
571 |
|
else: |
572 |
1 |
raise EngineException("vdu[id='{}']:interface[name='{}']:external-connection-point-ref='{}' " |
573 |
|
"must match an existing connection-point" |
574 |
|
.format(vdu["id"], interface["name"], |
575 |
|
interface["external-connection-point-ref"]), |
576 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
577 |
1 |
elif interface.get("internal-connection-point-ref"): |
578 |
1 |
if interface.get("internal-connection-point-ref") in icp_refs: |
579 |
0 |
raise EngineException("vdu[id='{}']:interface[name='{}']:internal-connection-point-ref='{}' " |
580 |
|
"is referenced by other interface" |
581 |
|
.format(vdu["id"], interface["name"], |
582 |
|
interface["internal-connection-point-ref"]), |
583 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
584 |
1 |
icp_refs.append(interface.get("internal-connection-point-ref")) |
585 |
1 |
for internal_cp in get_iterable(vdu.get("internal-connection-point")): |
586 |
1 |
if interface["internal-connection-point-ref"] == internal_cp.get("id"): |
587 |
1 |
break |
588 |
|
else: |
589 |
1 |
raise EngineException("vdu[id='{}']:interface[name='{}']:internal-connection-point-ref='{}' " |
590 |
|
"must match an existing vdu:internal-connection-point" |
591 |
|
.format(vdu["id"], interface["name"], |
592 |
|
interface["internal-connection-point-ref"]), |
593 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
594 |
|
# Validate that if descriptor contains charms, artifacts _admin.storage."pkg-dir" is not none |
595 |
1 |
if vdu.get("vdu-configuration"): |
596 |
1 |
if vdu["vdu-configuration"].get("juju"): |
597 |
1 |
if not self._validate_package_folders(storage_params, 'charms'): |
598 |
1 |
raise EngineException("Charm defined in vnf[id={}]:vdu[id={}] but not present in " |
599 |
|
"package".format(indata["id"], vdu["id"])) |
600 |
|
# Validate that if descriptor contains cloud-init, artifacts _admin.storage."pkg-dir" is not none |
601 |
1 |
if vdu.get("cloud-init-file"): |
602 |
1 |
if not self._validate_package_folders(storage_params, 'cloud_init', vdu["cloud-init-file"]): |
603 |
1 |
raise EngineException("Cloud-init defined in vnf[id={}]:vdu[id={}] but not present in " |
604 |
|
"package".format(indata["id"], vdu["id"])) |
605 |
|
# Validate that if descriptor contains charms, artifacts _admin.storage."pkg-dir" is not none |
606 |
1 |
if indata.get("vnf-configuration"): |
607 |
1 |
if indata["vnf-configuration"].get("juju"): |
608 |
1 |
if not self._validate_package_folders(storage_params, 'charms'): |
609 |
1 |
raise EngineException("Charm defined in vnf[id={}] but not present in " |
610 |
|
"package".format(indata["id"])) |
611 |
1 |
vld_names = [] # For detection of duplicated VLD names |
612 |
1 |
for ivld in get_iterable(indata.get("internal-vld")): |
613 |
|
# BEGIN Detection of duplicated VLD names |
614 |
1 |
ivld_name = ivld.get("name") |
615 |
1 |
if ivld_name: |
616 |
1 |
if ivld_name in vld_names: |
617 |
1 |
raise EngineException("Duplicated VLD name '{}' in vnfd[id={}]:internal-vld[id={}]" |
618 |
|
.format(ivld["name"], indata["id"], ivld["id"]), |
619 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
620 |
|
else: |
621 |
1 |
vld_names.append(ivld_name) |
622 |
|
# END Detection of duplicated VLD names |
623 |
1 |
for icp in get_iterable(ivld.get("internal-connection-point")): |
624 |
1 |
icp_mark = False |
625 |
1 |
for vdu in get_iterable(indata.get("vdu")): |
626 |
1 |
for internal_cp in get_iterable(vdu.get("internal-connection-point")): |
627 |
1 |
if icp["id-ref"] == internal_cp["id"]: |
628 |
1 |
icp_mark = True |
629 |
1 |
break |
630 |
1 |
if icp_mark: |
631 |
1 |
break |
632 |
|
else: |
633 |
1 |
raise EngineException("internal-vld[id='{}']:internal-connection-point='{}' must match an existing " |
634 |
|
"vdu:internal-connection-point".format(ivld["id"], icp["id-ref"]), |
635 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
636 |
1 |
if ivld.get("ip-profile-ref"): |
637 |
1 |
for ip_prof in get_iterable(indata.get("ip-profiles")): |
638 |
1 |
if ip_prof["name"] == get_iterable(ivld.get("ip-profile-ref")): |
639 |
0 |
break |
640 |
|
else: |
641 |
1 |
raise EngineException("internal-vld[id='{}']:ip-profile-ref='{}' does not exist".format( |
642 |
|
ivld["id"], ivld["ip-profile-ref"]), |
643 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
644 |
1 |
for mp in get_iterable(indata.get("monitoring-param")): |
645 |
1 |
if mp.get("vdu-monitoring-param"): |
646 |
1 |
mp_vmp_mark = False |
647 |
1 |
for vdu in get_iterable(indata.get("vdu")): |
648 |
1 |
for vmp in get_iterable(vdu.get("monitoring-param")): |
649 |
1 |
if vmp["id"] == mp["vdu-monitoring-param"].get("vdu-monitoring-param-ref") and vdu["id"] ==\ |
650 |
|
mp["vdu-monitoring-param"]["vdu-ref"]: |
651 |
1 |
mp_vmp_mark = True |
652 |
1 |
break |
653 |
1 |
if mp_vmp_mark: |
654 |
1 |
break |
655 |
|
else: |
656 |
1 |
raise EngineException("monitoring-param:vdu-monitoring-param:vdu-monitoring-param-ref='{}' not " |
657 |
|
"defined at vdu[id='{}'] or vdu does not exist" |
658 |
|
.format(mp["vdu-monitoring-param"]["vdu-monitoring-param-ref"], |
659 |
|
mp["vdu-monitoring-param"]["vdu-ref"]), |
660 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
661 |
1 |
elif mp.get("vdu-metric"): |
662 |
1 |
mp_vm_mark = False |
663 |
1 |
for vdu in get_iterable(indata.get("vdu")): |
664 |
1 |
if vdu.get("vdu-configuration"): |
665 |
1 |
for metric in get_iterable(vdu["vdu-configuration"].get("metrics")): |
666 |
1 |
if metric["name"] == mp["vdu-metric"]["vdu-metric-name-ref"] and vdu["id"] == \ |
667 |
|
mp["vdu-metric"]["vdu-ref"]: |
668 |
0 |
mp_vm_mark = True |
669 |
0 |
break |
670 |
1 |
if mp_vm_mark: |
671 |
0 |
break |
672 |
|
else: |
673 |
1 |
raise EngineException("monitoring-param:vdu-metric:vdu-metric-name-ref='{}' not defined at " |
674 |
|
"vdu[id='{}'] or vdu does not exist" |
675 |
|
.format(mp["vdu-metric"]["vdu-metric-name-ref"], |
676 |
|
mp["vdu-metric"]["vdu-ref"]), |
677 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
678 |
|
|
679 |
1 |
for sgd in get_iterable(indata.get("scaling-group-descriptor")): |
680 |
1 |
for sp in get_iterable(sgd.get("scaling-policy")): |
681 |
1 |
for sc in get_iterable(sp.get("scaling-criteria")): |
682 |
1 |
for mp in get_iterable(indata.get("monitoring-param")): |
683 |
1 |
if mp["id"] == get_iterable(sc.get("vnf-monitoring-param-ref")): |
684 |
1 |
break |
685 |
|
else: |
686 |
1 |
raise EngineException("scaling-group-descriptor[name='{}']:scaling-criteria[name='{}']:" |
687 |
|
"vnf-monitoring-param-ref='{}' not defined in any monitoring-param" |
688 |
|
.format(sgd["name"], sc["name"], sc["vnf-monitoring-param-ref"]), |
689 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
690 |
1 |
for sgd_vdu in get_iterable(sgd.get("vdu")): |
691 |
1 |
sgd_vdu_mark = False |
692 |
1 |
for vdu in get_iterable(indata.get("vdu")): |
693 |
1 |
if vdu["id"] == sgd_vdu["vdu-id-ref"]: |
694 |
1 |
sgd_vdu_mark = True |
695 |
1 |
break |
696 |
1 |
if sgd_vdu_mark: |
697 |
1 |
break |
698 |
|
else: |
699 |
1 |
raise EngineException("scaling-group-descriptor[name='{}']:vdu-id-ref={} does not match any vdu" |
700 |
|
.format(sgd["name"], sgd_vdu["vdu-id-ref"]), |
701 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
702 |
1 |
for sca in get_iterable(sgd.get("scaling-config-action")): |
703 |
1 |
if not indata.get("vnf-configuration"): |
704 |
1 |
raise EngineException("'vnf-configuration' not defined in the descriptor but it is referenced by " |
705 |
|
"scaling-group-descriptor[name='{}']:scaling-config-action" |
706 |
|
.format(sgd["name"]), |
707 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
708 |
1 |
for primitive in get_iterable(indata["vnf-configuration"].get("config-primitive")): |
709 |
1 |
if primitive["name"] == sca["vnf-config-primitive-name-ref"]: |
710 |
1 |
break |
711 |
|
else: |
712 |
1 |
raise EngineException("scaling-group-descriptor[name='{}']:scaling-config-action:vnf-config-" |
713 |
|
"primitive-name-ref='{}' does not match any " |
714 |
|
"vnf-configuration:config-primitive:name" |
715 |
|
.format(sgd["name"], sca["vnf-config-primitive-name-ref"]), |
716 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
717 |
1 |
return indata |
718 |
|
|
719 |
1 |
def _validate_package_folders(self, storage_params, folder, file=None): |
720 |
1 |
if not storage_params or not storage_params.get("pkg-dir"): |
721 |
1 |
return False |
722 |
|
else: |
723 |
1 |
if self.fs.file_exists("{}_".format(storage_params["folder"]), 'dir'): |
724 |
1 |
f = "{}_/{}/{}".format(storage_params["folder"], storage_params["pkg-dir"], folder) |
725 |
|
else: |
726 |
0 |
f = "{}/{}/{}".format(storage_params["folder"], storage_params["pkg-dir"], folder) |
727 |
1 |
if file: |
728 |
1 |
return self.fs.file_exists("{}/{}".format(f, file), 'file') |
729 |
|
else: |
730 |
1 |
if self.fs.file_exists(f, 'dir'): |
731 |
1 |
if self.fs.dir_ls(f): |
732 |
1 |
return True |
733 |
0 |
return False |
734 |
|
|
735 |
1 |
def delete_extra(self, session, _id, db_content, not_send_msg=None): |
736 |
|
""" |
737 |
|
Deletes associate file system storage (via super) |
738 |
|
Deletes associated vnfpkgops from database. |
739 |
|
:param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
740 |
|
:param _id: server internal id |
741 |
|
:param db_content: The database content of the descriptor |
742 |
|
:return: None |
743 |
|
:raises: FsException in case of error while deleting associated storage |
744 |
|
""" |
745 |
1 |
super().delete_extra(session, _id, db_content, not_send_msg) |
746 |
1 |
self.db.del_list("vnfpkgops", {"vnfPkgId": _id}) |
747 |
|
|
748 |
1 |
def sol005_projection(self, data): |
749 |
0 |
data["onboardingState"] = data["_admin"]["onboardingState"] |
750 |
0 |
data["operationalState"] = data["_admin"]["operationalState"] |
751 |
0 |
data["usageState"] = data["_admin"]["usageState"] |
752 |
|
|
753 |
0 |
links = {} |
754 |
0 |
links["self"] = {"href": "/vnfpkgm/v1/vnf_packages/{}".format(data["_id"])} |
755 |
0 |
links["vnfd"] = {"href": "/vnfpkgm/v1/vnf_packages/{}/vnfd".format(data["_id"])} |
756 |
0 |
links["packageContent"] = {"href": "/vnfpkgm/v1/vnf_packages/{}/package_content".format(data["_id"])} |
757 |
0 |
data["_links"] = links |
758 |
|
|
759 |
0 |
return super().sol005_projection(data) |
760 |
|
|
761 |
|
|
762 |
1 |
class NsdTopic(DescriptorTopic): |
763 |
1 |
topic = "nsds" |
764 |
1 |
topic_msg = "nsd" |
765 |
|
|
766 |
1 |
def __init__(self, db, fs, msg, auth): |
767 |
1 |
DescriptorTopic.__init__(self, db, fs, msg, auth) |
768 |
|
|
769 |
1 |
@staticmethod |
770 |
1 |
def _remove_envelop(indata=None): |
771 |
1 |
if not indata: |
772 |
0 |
return {} |
773 |
1 |
clean_indata = indata |
774 |
|
|
775 |
1 |
if clean_indata.get('nsd:nsd-catalog'): |
776 |
1 |
clean_indata = clean_indata['nsd:nsd-catalog'] |
777 |
1 |
elif clean_indata.get('nsd-catalog'): |
778 |
0 |
clean_indata = clean_indata['nsd-catalog'] |
779 |
1 |
if clean_indata.get('nsd'): |
780 |
1 |
if not isinstance(clean_indata['nsd'], list) or len(clean_indata['nsd']) != 1: |
781 |
1 |
raise EngineException("'nsd' must be a list of only one element") |
782 |
1 |
clean_indata = clean_indata['nsd'][0] |
783 |
1 |
elif clean_indata.get('nsd:nsd'): |
784 |
0 |
if not isinstance(clean_indata['nsd:nsd'], list) or len(clean_indata['nsd:nsd']) != 1: |
785 |
0 |
raise EngineException("'nsd:nsd' must be a list of only one element") |
786 |
0 |
clean_indata = clean_indata['nsd:nsd'][0] |
787 |
1 |
return clean_indata |
788 |
|
|
789 |
1 |
def _validate_input_new(self, indata, storage_params, force=False): |
790 |
1 |
indata.pop("nsdOnboardingState", None) |
791 |
1 |
indata.pop("nsdOperationalState", None) |
792 |
1 |
indata.pop("nsdUsageState", None) |
793 |
|
|
794 |
1 |
indata.pop("links", None) |
795 |
|
|
796 |
1 |
indata = self.pyangbind_validation("nsds", indata, force) |
797 |
|
# Cross references validation in the descriptor |
798 |
|
# TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none |
799 |
1 |
for vld in get_iterable(indata.get("vld")): |
800 |
1 |
if vld.get("mgmt-network") and vld.get("ip-profile-ref"): |
801 |
1 |
raise EngineException("Error at vld[id='{}']:ip-profile-ref" |
802 |
|
" You cannot set an ip-profile when mgmt-network is True" |
803 |
|
.format(vld["id"]), http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
804 |
1 |
for vnfd_cp in get_iterable(vld.get("vnfd-connection-point-ref")): |
805 |
1 |
for constituent_vnfd in get_iterable(indata.get("constituent-vnfd")): |
806 |
1 |
if vnfd_cp["member-vnf-index-ref"] == constituent_vnfd["member-vnf-index"]: |
807 |
1 |
if vnfd_cp.get("vnfd-id-ref") and vnfd_cp["vnfd-id-ref"] != constituent_vnfd["vnfd-id-ref"]: |
808 |
1 |
raise EngineException("Error at vld[id='{}']:vnfd-connection-point-ref[vnfd-id-ref='{}'] " |
809 |
|
"does not match constituent-vnfd[member-vnf-index='{}']:vnfd-id-ref" |
810 |
|
" '{}'".format(vld["id"], vnfd_cp["vnfd-id-ref"], |
811 |
|
constituent_vnfd["member-vnf-index"], |
812 |
|
constituent_vnfd["vnfd-id-ref"]), |
813 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
814 |
1 |
break |
815 |
|
else: |
816 |
1 |
raise EngineException("Error at vld[id='{}']:vnfd-connection-point-ref[member-vnf-index-ref='{}'] " |
817 |
|
"does not match any constituent-vnfd:member-vnf-index" |
818 |
|
.format(vld["id"], vnfd_cp["member-vnf-index-ref"]), |
819 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
820 |
|
# Check VNFFGD |
821 |
1 |
for fgd in get_iterable(indata.get("vnffgd")): |
822 |
1 |
for cls in get_iterable(fgd.get("classifier")): |
823 |
1 |
rspref = cls.get("rsp-id-ref") |
824 |
1 |
for rsp in get_iterable(fgd.get("rsp")): |
825 |
1 |
rspid = rsp.get("id") |
826 |
1 |
if rspid and rspref and rspid == rspref: |
827 |
1 |
break |
828 |
|
else: |
829 |
1 |
raise EngineException( |
830 |
|
"Error at vnffgd[id='{}']:classifier[id='{}']:rsp-id-ref '{}' does not match any rsp:id" |
831 |
|
.format(fgd["id"], cls["id"], rspref), |
832 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
833 |
1 |
return indata |
834 |
|
|
835 |
1 |
def _validate_input_edit(self, indata, content, force=False): |
836 |
|
# not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit |
837 |
|
""" |
838 |
|
indata looks as follows: |
839 |
|
- In the new case (conformant) |
840 |
|
{'nsdOperationalState': 'DISABLED', 'userDefinedData': {'id': 'string23', |
841 |
|
'_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}} |
842 |
|
- In the old case (backwards-compatible) |
843 |
|
{'id': 'string23', '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'} |
844 |
|
""" |
845 |
1 |
if "_admin" not in indata: |
846 |
1 |
indata["_admin"] = {} |
847 |
|
|
848 |
1 |
if "nsdOperationalState" in indata: |
849 |
0 |
if indata["nsdOperationalState"] in ("ENABLED", "DISABLED"): |
850 |
0 |
indata["_admin"]["operationalState"] = indata.pop("nsdOperationalState") |
851 |
|
else: |
852 |
0 |
raise EngineException("State '{}' is not a valid operational state" |
853 |
|
.format(indata["nsdOperationalState"]), |
854 |
|
http_code=HTTPStatus.BAD_REQUEST) |
855 |
|
|
856 |
|
# In the case of user defined data, we need to put the data in the root of the object |
857 |
|
# to preserve current expected behaviour |
858 |
1 |
if "userDefinedData" in indata: |
859 |
0 |
data = indata.pop("userDefinedData") |
860 |
0 |
if type(data) == dict: |
861 |
0 |
indata["_admin"]["userDefinedData"] = data |
862 |
|
else: |
863 |
0 |
raise EngineException("userDefinedData should be an object, but is '{}' instead" |
864 |
|
.format(type(data)), |
865 |
|
http_code=HTTPStatus.BAD_REQUEST) |
866 |
1 |
if ("operationalState" in indata["_admin"] and |
867 |
|
content["_admin"]["operationalState"] == indata["_admin"]["operationalState"]): |
868 |
0 |
raise EngineException("nsdOperationalState already {}".format(content["_admin"]["operationalState"]), |
869 |
|
http_code=HTTPStatus.CONFLICT) |
870 |
1 |
return indata |
871 |
|
|
872 |
1 |
def _check_descriptor_dependencies(self, session, descriptor): |
873 |
|
""" |
874 |
|
Check that the dependent descriptors exist on a new descriptor or edition. Also checks references to vnfd |
875 |
|
connection points are ok |
876 |
|
:param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
877 |
|
:param descriptor: descriptor to be inserted or edit |
878 |
|
:return: None or raises exception |
879 |
|
""" |
880 |
1 |
if session["force"]: |
881 |
0 |
return |
882 |
1 |
member_vnfd_index = {} |
883 |
1 |
if descriptor.get("constituent-vnfd") and not session["force"]: |
884 |
1 |
for vnf in descriptor["constituent-vnfd"]: |
885 |
1 |
vnfd_id = vnf["vnfd-id-ref"] |
886 |
1 |
filter_q = self._get_project_filter(session) |
887 |
1 |
filter_q["id"] = vnfd_id |
888 |
1 |
vnf_list = self.db.get_list("vnfds", filter_q) |
889 |
1 |
if not vnf_list: |
890 |
1 |
raise EngineException("Descriptor error at 'constituent-vnfd':'vnfd-id-ref'='{}' references a non " |
891 |
|
"existing vnfd".format(vnfd_id), http_code=HTTPStatus.CONFLICT) |
892 |
|
# elif len(vnf_list) > 1: |
893 |
|
# raise EngineException("More than one vnfd found for id='{}'".format(vnfd_id), |
894 |
|
# http_code=HTTPStatus.CONFLICT) |
895 |
1 |
member_vnfd_index[vnf["member-vnf-index"]] = vnf_list[0] |
896 |
|
|
897 |
|
# Cross references validation in the descriptor and vnfd connection point validation |
898 |
1 |
for vld in get_iterable(descriptor.get("vld")): |
899 |
1 |
for referenced_vnfd_cp in get_iterable(vld.get("vnfd-connection-point-ref")): |
900 |
|
# look if this vnfd contains this connection point |
901 |
1 |
vnfd = member_vnfd_index.get(referenced_vnfd_cp["member-vnf-index-ref"]) |
902 |
1 |
for vnfd_cp in get_iterable(vnfd.get("connection-point")): |
903 |
1 |
if referenced_vnfd_cp.get("vnfd-connection-point-ref") == vnfd_cp["name"]: |
904 |
1 |
break |
905 |
|
else: |
906 |
1 |
raise EngineException( |
907 |
|
"Error at vld[id='{}']:vnfd-connection-point-ref[member-vnf-index-ref='{}']:vnfd-" |
908 |
|
"connection-point-ref='{}' references a non existing conection-point:name inside vnfd '{}'" |
909 |
|
.format(vld["id"], referenced_vnfd_cp["member-vnf-index-ref"], |
910 |
|
referenced_vnfd_cp["vnfd-connection-point-ref"], vnfd["id"]), |
911 |
|
http_code=HTTPStatus.UNPROCESSABLE_ENTITY) |
912 |
|
|
913 |
1 |
def check_conflict_on_edit(self, session, final_content, edit_content, _id): |
914 |
1 |
super().check_conflict_on_edit(session, final_content, edit_content, _id) |
915 |
|
|
916 |
1 |
self._check_descriptor_dependencies(session, final_content) |
917 |
|
|
918 |
1 |
def check_conflict_on_del(self, session, _id, db_content): |
919 |
|
""" |
920 |
|
Check that there is not any NSR that uses this NSD. Only NSRs belonging to this project are considered. Note |
921 |
|
that NSD can be public and be used by other projects. |
922 |
|
:param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
923 |
|
:param _id: nsd internal id |
924 |
|
:param db_content: The database content of the _id |
925 |
|
:return: None or raises EngineException with the conflict |
926 |
|
""" |
927 |
1 |
if session["force"]: |
928 |
0 |
return |
929 |
1 |
descriptor = db_content |
930 |
1 |
descriptor_id = descriptor.get("id") |
931 |
1 |
if not descriptor_id: # empty nsd not uploaded |
932 |
0 |
return |
933 |
|
|
934 |
|
# check NSD used by NS |
935 |
1 |
_filter = self._get_project_filter(session) |
936 |
1 |
_filter["nsd-id"] = _id |
937 |
1 |
if self.db.get_list("nsrs", _filter): |
938 |
1 |
raise EngineException("There is at least one NS using this descriptor", http_code=HTTPStatus.CONFLICT) |
939 |
|
|
940 |
|
# check NSD referenced by NST |
941 |
1 |
del _filter["nsd-id"] |
942 |
1 |
_filter["netslice-subnet.ANYINDEX.nsd-ref"] = descriptor_id |
943 |
1 |
if self.db.get_list("nsts", _filter): |
944 |
1 |
raise EngineException("There is at least one NetSlice Template referencing this descriptor", |
945 |
|
http_code=HTTPStatus.CONFLICT) |
946 |
|
|
947 |
1 |
def sol005_projection(self, data): |
948 |
0 |
data["nsdOnboardingState"] = data["_admin"]["onboardingState"] |
949 |
0 |
data["nsdOperationalState"] = data["_admin"]["operationalState"] |
950 |
0 |
data["nsdUsageState"] = data["_admin"]["usageState"] |
951 |
|
|
952 |
0 |
links = {} |
953 |
0 |
links["self"] = {"href": "/nsd/v1/ns_descriptors/{}".format(data["_id"])} |
954 |
0 |
links["nsd_content"] = {"href": "/nsd/v1/ns_descriptors/{}/nsd_content".format(data["_id"])} |
955 |
0 |
data["_links"] = links |
956 |
|
|
957 |
0 |
return super().sol005_projection(data) |
958 |
|
|
959 |
|
|
960 |
1 |
class NstTopic(DescriptorTopic): |
961 |
1 |
topic = "nsts" |
962 |
1 |
topic_msg = "nst" |
963 |
1 |
quota_name = "slice_templates" |
964 |
|
|
965 |
1 |
def __init__(self, db, fs, msg, auth): |
966 |
0 |
DescriptorTopic.__init__(self, db, fs, msg, auth) |
967 |
|
|
968 |
1 |
@staticmethod |
969 |
1 |
def _remove_envelop(indata=None): |
970 |
0 |
if not indata: |
971 |
0 |
return {} |
972 |
0 |
clean_indata = indata |
973 |
|
|
974 |
0 |
if clean_indata.get('nst'): |
975 |
0 |
if not isinstance(clean_indata['nst'], list) or len(clean_indata['nst']) != 1: |
976 |
0 |
raise EngineException("'nst' must be a list only one element") |
977 |
0 |
clean_indata = clean_indata['nst'][0] |
978 |
0 |
elif clean_indata.get('nst:nst'): |
979 |
0 |
if not isinstance(clean_indata['nst:nst'], list) or len(clean_indata['nst:nst']) != 1: |
980 |
0 |
raise EngineException("'nst:nst' must be a list only one element") |
981 |
0 |
clean_indata = clean_indata['nst:nst'][0] |
982 |
0 |
return clean_indata |
983 |
|
|
984 |
1 |
def _validate_input_new(self, indata, storage_params, force=False): |
985 |
0 |
indata.pop("onboardingState", None) |
986 |
0 |
indata.pop("operationalState", None) |
987 |
0 |
indata.pop("usageState", None) |
988 |
0 |
indata = self.pyangbind_validation("nsts", indata, force) |
989 |
0 |
return indata.copy() |
990 |
|
|
991 |
1 |
def _check_descriptor_dependencies(self, session, descriptor): |
992 |
|
""" |
993 |
|
Check that the dependent descriptors exist on a new descriptor or edition |
994 |
|
:param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
995 |
|
:param descriptor: descriptor to be inserted or edit |
996 |
|
:return: None or raises exception |
997 |
|
""" |
998 |
0 |
if not descriptor.get("netslice-subnet"): |
999 |
0 |
return |
1000 |
0 |
for nsd in descriptor["netslice-subnet"]: |
1001 |
0 |
nsd_id = nsd["nsd-ref"] |
1002 |
0 |
filter_q = self._get_project_filter(session) |
1003 |
0 |
filter_q["id"] = nsd_id |
1004 |
0 |
if not self.db.get_list("nsds", filter_q): |
1005 |
0 |
raise EngineException("Descriptor error at 'netslice-subnet':'nsd-ref'='{}' references a non " |
1006 |
|
"existing nsd".format(nsd_id), http_code=HTTPStatus.CONFLICT) |
1007 |
|
|
1008 |
1 |
def check_conflict_on_edit(self, session, final_content, edit_content, _id): |
1009 |
0 |
super().check_conflict_on_edit(session, final_content, edit_content, _id) |
1010 |
|
|
1011 |
0 |
self._check_descriptor_dependencies(session, final_content) |
1012 |
|
|
1013 |
1 |
def check_conflict_on_del(self, session, _id, db_content): |
1014 |
|
""" |
1015 |
|
Check that there is not any NSIR that uses this NST. Only NSIRs belonging to this project are considered. Note |
1016 |
|
that NST can be public and be used by other projects. |
1017 |
|
:param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
1018 |
|
:param _id: nst internal id |
1019 |
|
:param db_content: The database content of the _id. |
1020 |
|
:return: None or raises EngineException with the conflict |
1021 |
|
""" |
1022 |
|
# TODO: Check this method |
1023 |
0 |
if session["force"]: |
1024 |
0 |
return |
1025 |
|
# Get Network Slice Template from Database |
1026 |
0 |
_filter = self._get_project_filter(session) |
1027 |
0 |
_filter["_admin.nst-id"] = _id |
1028 |
0 |
if self.db.get_list("nsis", _filter): |
1029 |
0 |
raise EngineException("there is at least one Netslice Instance using this descriptor", |
1030 |
|
http_code=HTTPStatus.CONFLICT) |
1031 |
|
|
1032 |
1 |
def sol005_projection(self, data): |
1033 |
0 |
data["onboardingState"] = data["_admin"]["onboardingState"] |
1034 |
0 |
data["operationalState"] = data["_admin"]["operationalState"] |
1035 |
0 |
data["usageState"] = data["_admin"]["usageState"] |
1036 |
|
|
1037 |
0 |
links = {} |
1038 |
0 |
links["self"] = {"href": "/nst/v1/netslice_templates/{}".format(data["_id"])} |
1039 |
0 |
links["nst"] = {"href": "/nst/v1/netslice_templates/{}/nst".format(data["_id"])} |
1040 |
0 |
data["_links"] = links |
1041 |
|
|
1042 |
0 |
return super().sol005_projection(data) |
1043 |
|
|
1044 |
|
|
1045 |
1 |
class PduTopic(BaseTopic): |
1046 |
1 |
topic = "pdus" |
1047 |
1 |
topic_msg = "pdu" |
1048 |
1 |
quota_name = "pduds" |
1049 |
1 |
schema_new = pdu_new_schema |
1050 |
1 |
schema_edit = pdu_edit_schema |
1051 |
|
|
1052 |
1 |
def __init__(self, db, fs, msg, auth): |
1053 |
0 |
BaseTopic.__init__(self, db, fs, msg, auth) |
1054 |
|
|
1055 |
1 |
@staticmethod |
1056 |
1 |
def format_on_new(content, project_id=None, make_public=False): |
1057 |
0 |
BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public) |
1058 |
0 |
content["_admin"]["onboardingState"] = "CREATED" |
1059 |
0 |
content["_admin"]["operationalState"] = "ENABLED" |
1060 |
0 |
content["_admin"]["usageState"] = "NOT_IN_USE" |
1061 |
|
|
1062 |
1 |
def check_conflict_on_del(self, session, _id, db_content): |
1063 |
|
""" |
1064 |
|
Check that there is not any vnfr that uses this PDU |
1065 |
|
:param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
1066 |
|
:param _id: pdu internal id |
1067 |
|
:param db_content: The database content of the _id. |
1068 |
|
:return: None or raises EngineException with the conflict |
1069 |
|
""" |
1070 |
0 |
if session["force"]: |
1071 |
0 |
return |
1072 |
|
|
1073 |
0 |
_filter = self._get_project_filter(session) |
1074 |
0 |
_filter["vdur.pdu-id"] = _id |
1075 |
0 |
if self.db.get_list("vnfrs", _filter): |
1076 |
0 |
raise EngineException("There is at least one VNF using this PDU", http_code=HTTPStatus.CONFLICT) |
1077 |
|
|
1078 |
|
|
1079 |
1 |
class VnfPkgOpTopic(BaseTopic): |
1080 |
1 |
topic = "vnfpkgops" |
1081 |
1 |
topic_msg = "vnfd" |
1082 |
1 |
schema_new = vnfpkgop_new_schema |
1083 |
1 |
schema_edit = None |
1084 |
|
|
1085 |
1 |
def __init__(self, db, fs, msg, auth): |
1086 |
0 |
BaseTopic.__init__(self, db, fs, msg, auth) |
1087 |
|
|
1088 |
1 |
def edit(self, session, _id, indata=None, kwargs=None, content=None): |
1089 |
0 |
raise EngineException("Method 'edit' not allowed for topic '{}'".format(self.topic), |
1090 |
|
HTTPStatus.METHOD_NOT_ALLOWED) |
1091 |
|
|
1092 |
1 |
def delete(self, session, _id, dry_run=False): |
1093 |
0 |
raise EngineException("Method 'delete' not allowed for topic '{}'".format(self.topic), |
1094 |
|
HTTPStatus.METHOD_NOT_ALLOWED) |
1095 |
|
|
1096 |
1 |
def delete_list(self, session, filter_q=None): |
1097 |
0 |
raise EngineException("Method 'delete_list' not allowed for topic '{}'".format(self.topic), |
1098 |
|
HTTPStatus.METHOD_NOT_ALLOWED) |
1099 |
|
|
1100 |
1 |
def new(self, rollback, session, indata=None, kwargs=None, headers=None): |
1101 |
|
""" |
1102 |
|
Creates a new entry into database. |
1103 |
|
:param rollback: list to append created items at database in case a rollback may to be done |
1104 |
|
:param session: contains "username", "admin", "force", "public", "project_id", "set_project" |
1105 |
|
:param indata: data to be inserted |
1106 |
|
:param kwargs: used to override the indata descriptor |
1107 |
|
:param headers: http request headers |
1108 |
|
:return: _id, op_id: |
1109 |
|
_id: identity of the inserted data. |
1110 |
|
op_id: None |
1111 |
|
""" |
1112 |
0 |
self._update_input_with_kwargs(indata, kwargs) |
1113 |
0 |
validate_input(indata, self.schema_new) |
1114 |
0 |
vnfpkg_id = indata["vnfPkgId"] |
1115 |
0 |
filter_q = BaseTopic._get_project_filter(session) |
1116 |
0 |
filter_q["_id"] = vnfpkg_id |
1117 |
0 |
vnfd = self.db.get_one("vnfds", filter_q) |
1118 |
0 |
operation = indata["lcmOperationType"] |
1119 |
0 |
kdu_name = indata["kdu_name"] |
1120 |
0 |
for kdu in vnfd.get("kdu", []): |
1121 |
0 |
if kdu["name"] == kdu_name: |
1122 |
0 |
helm_chart = kdu.get("helm-chart") |
1123 |
0 |
juju_bundle = kdu.get("juju-bundle") |
1124 |
0 |
break |
1125 |
|
else: |
1126 |
0 |
raise EngineException("Not found vnfd[id='{}']:kdu[name='{}']".format(vnfpkg_id, kdu_name)) |
1127 |
0 |
if helm_chart: |
1128 |
0 |
indata["helm-chart"] = helm_chart |
1129 |
0 |
match = fullmatch(r"([^/]*)/([^/]*)", helm_chart) |
1130 |
0 |
repo_name = match.group(1) if match else None |
1131 |
0 |
elif juju_bundle: |
1132 |
0 |
indata["juju-bundle"] = juju_bundle |
1133 |
0 |
match = fullmatch(r"([^/]*)/([^/]*)", juju_bundle) |
1134 |
0 |
repo_name = match.group(1) if match else None |
1135 |
|
else: |
1136 |
0 |
raise EngineException("Found neither 'helm-chart' nor 'juju-bundle' in vnfd[id='{}']:kdu[name='{}']" |
1137 |
|
.format(vnfpkg_id, kdu_name)) |
1138 |
0 |
if repo_name: |
1139 |
0 |
del filter_q["_id"] |
1140 |
0 |
filter_q["name"] = repo_name |
1141 |
0 |
repo = self.db.get_one("k8srepos", filter_q) |
1142 |
0 |
k8srepo_id = repo.get("_id") |
1143 |
0 |
k8srepo_url = repo.get("url") |
1144 |
|
else: |
1145 |
0 |
k8srepo_id = None |
1146 |
0 |
k8srepo_url = None |
1147 |
0 |
indata["k8srepoId"] = k8srepo_id |
1148 |
0 |
indata["k8srepo_url"] = k8srepo_url |
1149 |
0 |
vnfpkgop_id = str(uuid4()) |
1150 |
0 |
vnfpkgop_desc = { |
1151 |
|
"_id": vnfpkgop_id, |
1152 |
|
"operationState": "PROCESSING", |
1153 |
|
"vnfPkgId": vnfpkg_id, |
1154 |
|
"lcmOperationType": operation, |
1155 |
|
"isAutomaticInvocation": False, |
1156 |
|
"isCancelPending": False, |
1157 |
|
"operationParams": indata, |
1158 |
|
"links": { |
1159 |
|
"self": "/osm/vnfpkgm/v1/vnfpkg_op_occs/" + vnfpkgop_id, |
1160 |
|
"vnfpkg": "/osm/vnfpkgm/v1/vnf_packages/" + vnfpkg_id, |
1161 |
|
} |
1162 |
|
} |
1163 |
0 |
self.format_on_new(vnfpkgop_desc, session["project_id"], make_public=session["public"]) |
1164 |
0 |
ctime = vnfpkgop_desc["_admin"]["created"] |
1165 |
0 |
vnfpkgop_desc["statusEnteredTime"] = ctime |
1166 |
0 |
vnfpkgop_desc["startTime"] = ctime |
1167 |
0 |
self.db.create(self.topic, vnfpkgop_desc) |
1168 |
0 |
rollback.append({"topic": self.topic, "_id": vnfpkgop_id}) |
1169 |
0 |
self.msg.write(self.topic_msg, operation, vnfpkgop_desc) |
1170 |
0 |
return vnfpkgop_id, None |