1 # -*- coding: utf-8 -*-
7 from hashlib
import md5
8 from osm_common
.dbbase
import DbException
, deep_update_rfc7396
9 from http
import HTTPStatus
10 from validation
import ValidationError
, pdu_new_schema
, pdu_edit_schema
11 from base_topic
import BaseTopic
, EngineException
, get_iterable
12 from osm_im
.vnfd
import vnfd
as vnfd_im
13 from osm_im
.nsd
import nsd
as nsd_im
14 from pyangbind
.lib
.serialise
import pybindJSONDecoder
15 import pyangbind
.lib
.pybindJSON
as pybindJSON
17 __author__
= "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
20 class DescriptorTopic(BaseTopic
):
22 def __init__(self
, db
, fs
, msg
):
23 BaseTopic
.__init
__(self
, db
, fs
, msg
)
25 def check_conflict_on_edit(self
, session
, final_content
, edit_content
, _id
, force
=False):
26 # check that this id is not present
27 _filter
= {"id": final_content
["id"]}
29 _filter
["_id.neq"] = _id
31 _filter
.update(self
._get
_project
_filter
(session
, write
=False, show_all
=False))
32 if self
.db
.get_one(self
.topic
, _filter
, fail_on_empty
=False):
33 raise EngineException("{} with id '{}' already exists for this project".format(self
.topic
[:-1],
36 # TODO validate with pyangbind. Load and dumps to convert data types
39 def format_on_new(content
, project_id
=None, make_public
=False):
40 BaseTopic
.format_on_new(content
, project_id
=project_id
, make_public
=make_public
)
41 content
["_admin"]["onboardingState"] = "CREATED"
42 content
["_admin"]["operationalState"] = "DISABLED"
43 content
["_admin"]["usageSate"] = "NOT_IN_USE"
45 def delete(self
, session
, _id
, force
=False, dry_run
=False):
47 Delete item by its internal _id
48 :param session: contains the used login username, working project, and admin rights
49 :param _id: server internal id
50 :param force: indicates if deletion must be forced in case of conflict
51 :param dry_run: make checking but do not delete
52 :return: dictionary with deleted item _id. It raises EngineException on error: not found, conflict, ...
54 # TODO add admin to filter, validate rights
55 v
= BaseTopic
.delete(self
, session
, _id
, force
, dry_run
=True)
58 v
= self
.db
.del_one(self
.topic
, {"_id": _id
})
59 self
.fs
.file_delete(_id
, ignore_non_exist
=True)
60 self
._send
_msg
("delete", {"_id": _id
})
64 def get_one_by_id(db
, session
, topic
, id):
65 # find owned by this project
66 _filter
= BaseTopic
._get
_project
_filter
(session
, write
=False, show_all
=False)
68 desc_list
= db
.get_list(topic
, _filter
)
69 if len(desc_list
) == 1:
71 elif len(desc_list
) > 1:
72 raise DbException("Found more than one {} with id='{}' belonging to this project".format(topic
[:-1], id),
75 # not found any: try to find public
76 _filter
= BaseTopic
._get
_project
_filter
(session
, write
=False, show_all
=True)
78 desc_list
= db
.get_list(topic
, _filter
)
80 raise DbException("Not found any {} with id='{}'".format(topic
[:-1], id), HTTPStatus
.NOT_FOUND
)
81 elif len(desc_list
) == 1:
84 raise DbException("Found more than one public {} with id='{}'; and no one belonging to this project".format(
85 topic
[:-1], id), HTTPStatus
.CONFLICT
)
87 def new(self
, rollback
, session
, indata
=None, kwargs
=None, headers
=None, force
=False, make_public
=False):
89 Creates a new almost empty DISABLED entry into database. Due to SOL005, it does not follow normal procedure.
90 Creating a VNFD or NSD is done in two steps: 1. Creates an empty descriptor (this step) and 2) upload content
92 :param rollback: list to append created items at database in case a rollback may to be done
93 :param session: contains the used login username and working project
94 :param indata: data to be inserted
95 :param kwargs: used to override the indata descriptor
96 :param headers: http request headers
97 :param force: If True avoid some dependence checks
98 :param make_public: Make the created descriptor public to all projects
99 :return: _id: identity of the inserted data.
105 if "userDefinedData" in indata
:
106 indata
= indata
['userDefinedData']
108 # Override descriptor with query string kwargs
109 self
._update
_input
_with
_kwargs
(indata
, kwargs
)
110 # uncomment when this method is implemented.
111 # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors
112 # indata = DescriptorTopic._validate_input_new(self, indata, force=force)
114 content
= {"_admin": {"userDefinedData": indata
}}
115 self
.format_on_new(content
, session
["project_id"], make_public
=make_public
)
116 _id
= self
.db
.create(self
.topic
, content
)
117 rollback
.append({"topic": self
.topic
, "_id": _id
})
119 except ValidationError
as e
:
120 raise EngineException(e
, HTTPStatus
.UNPROCESSABLE_ENTITY
)
122 def upload_content(self
, session
, _id
, indata
, kwargs
, headers
, force
=False):
124 Used for receiving content by chunks (with a transaction_id header and/or gzip file. It will store and extract)
125 :param session: session
126 :param _id : the nsd,vnfd is already created, this is the id
127 :param indata: http body request
128 :param kwargs: user query string to override parameters. NOT USED
129 :param headers: http request headers
130 :param force: to be more tolerant with validation
131 :return: True package has is completely uploaded or False if partial content has been uplodaed.
132 Raise exception on error
134 # Check that _id exists and it is valid
135 current_desc
= self
.show(session
, _id
)
137 content_range_text
= headers
.get("Content-Range")
138 expected_md5
= headers
.get("Content-File-MD5")
140 content_type
= headers
.get("Content-Type")
141 if content_type
and "application/gzip" in content_type
or "application/x-gzip" in content_type
or \
142 "application/zip" in content_type
:
144 filename
= headers
.get("Content-Filename")
146 filename
= "package.tar.gz" if compressed
else "package"
147 # TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266
151 if content_range_text
:
152 content_range
= content_range_text
.replace("-", " ").replace("/", " ").split()
153 if content_range
[0] != "bytes": # TODO check x<y not negative < total....
155 start
= int(content_range
[1])
156 end
= int(content_range
[2]) + 1
157 total
= int(content_range
[3])
162 if not self
.fs
.file_exists(_id
, 'dir'):
163 raise EngineException("invalid Transaction-Id header", HTTPStatus
.NOT_FOUND
)
165 self
.fs
.file_delete(_id
, ignore_non_exist
=True)
168 storage
= self
.fs
.get_params()
169 storage
["folder"] = _id
171 file_path
= (_id
, filename
)
172 if self
.fs
.file_exists(file_path
, 'file'):
173 file_size
= self
.fs
.file_size(file_path
)
176 if file_size
!= start
:
177 raise EngineException("invalid Content-Range start sequence, expected '{}' but received '{}'".format(
178 file_size
, start
), HTTPStatus
.REQUESTED_RANGE_NOT_SATISFIABLE
)
179 file_pkg
= self
.fs
.file_open(file_path
, 'a+b')
180 if isinstance(indata
, dict):
181 indata_text
= yaml
.safe_dump(indata
, indent
=4, default_flow_style
=False)
182 file_pkg
.write(indata_text
.encode(encoding
="utf-8"))
186 indata_text
= indata
.read(4096)
187 indata_len
+= len(indata_text
)
190 file_pkg
.write(indata_text
)
191 if content_range_text
:
192 if indata_len
!= end
-start
:
193 raise EngineException("Mismatch between Content-Range header {}-{} and body length of {}".format(
194 start
, end
-1, indata_len
), HTTPStatus
.REQUESTED_RANGE_NOT_SATISFIABLE
)
196 # TODO update to UPLOADING
203 chunk_data
= file_pkg
.read(1024)
205 file_md5
.update(chunk_data
)
206 chunk_data
= file_pkg
.read(1024)
207 if expected_md5
!= file_md5
.hexdigest():
208 raise EngineException("Error, MD5 mismatch", HTTPStatus
.CONFLICT
)
210 if compressed
== "gzip":
211 tar
= tarfile
.open(mode
='r', fileobj
=file_pkg
)
212 descriptor_file_name
= None
214 tarname
= tarinfo
.name
215 tarname_path
= tarname
.split("/")
216 if not tarname_path
[0] or ".." in tarname_path
: # if start with "/" means absolute path
217 raise EngineException("Absolute path or '..' are not allowed for package descriptor tar.gz")
218 if len(tarname_path
) == 1 and not tarinfo
.isdir():
219 raise EngineException("All files must be inside a dir for package descriptor tar.gz")
220 if tarname
.endswith(".yaml") or tarname
.endswith(".json") or tarname
.endswith(".yml"):
221 storage
["pkg-dir"] = tarname_path
[0]
222 if len(tarname_path
) == 2:
223 if descriptor_file_name
:
224 raise EngineException(
225 "Found more than one descriptor file at package descriptor tar.gz")
226 descriptor_file_name
= tarname
227 if not descriptor_file_name
:
228 raise EngineException("Not found any descriptor file at package descriptor tar.gz")
229 storage
["descriptor"] = descriptor_file_name
230 storage
["zipfile"] = filename
231 self
.fs
.file_extract(tar
, _id
)
232 with self
.fs
.file_open((_id
, descriptor_file_name
), "r") as descriptor_file
:
233 content
= descriptor_file
.read()
235 content
= file_pkg
.read()
236 storage
["descriptor"] = descriptor_file_name
= filename
238 if descriptor_file_name
.endswith(".json"):
239 error_text
= "Invalid json format "
240 indata
= json
.load(content
)
242 error_text
= "Invalid yaml format "
243 indata
= yaml
.load(content
)
245 current_desc
["_admin"]["storage"] = storage
246 current_desc
["_admin"]["onboardingState"] = "ONBOARDED"
247 current_desc
["_admin"]["operationalState"] = "ENABLED"
249 indata
= self
._remove
_envelop
(indata
)
251 # Override descriptor with query string kwargs
253 self
._update
_input
_with
_kwargs
(indata
, kwargs
)
254 # it will call overrides method at VnfdTopic or NsdTopic
255 indata
= self
._validate
_input
_new
(indata
, force
=force
)
257 deep_update_rfc7396(current_desc
, indata
)
258 self
.check_conflict_on_edit(session
, current_desc
, indata
, _id
=_id
, force
=force
)
259 self
.db
.replace(self
.topic
, _id
, current_desc
)
262 self
._send
_msg
("created", indata
)
264 # TODO if descriptor has changed because kwargs update content and remove cached zip
265 # TODO if zip is not present creates one
268 except EngineException
:
271 raise EngineException("invalid Content-Range header format. Expected 'bytes start-end/total'",
272 HTTPStatus
.REQUESTED_RANGE_NOT_SATISFIABLE
)
274 raise EngineException("invalid upload transaction sequence: '{}'".format(e
), HTTPStatus
.BAD_REQUEST
)
275 except tarfile
.ReadError
as e
:
276 raise EngineException("invalid file content {}".format(e
), HTTPStatus
.BAD_REQUEST
)
277 except (ValueError, yaml
.YAMLError
) as e
:
278 raise EngineException(error_text
+ str(e
))
279 except ValidationError
as e
:
280 raise EngineException(e
, HTTPStatus
.UNPROCESSABLE_ENTITY
)
285 def get_file(self
, session
, _id
, path
=None, accept_header
=None):
287 Return the file content of a vnfd or nsd
288 :param session: contains the used login username and working project
289 :param _id: Identity of the vnfd, nsd
290 :param path: artifact path or "$DESCRIPTOR" or None
291 :param accept_header: Content of Accept header. Must contain applition/zip or/and text/plain
292 :return: opened file plus Accept format or raises an exception
294 accept_text
= accept_zip
= False
296 if 'text/plain' in accept_header
or '*/*' in accept_header
:
298 if 'application/zip' in accept_header
or '*/*' in accept_header
:
299 accept_zip
= 'application/zip'
300 elif 'application/gzip' in accept_header
:
301 accept_zip
= 'application/gzip'
303 if not accept_text
and not accept_zip
:
304 raise EngineException("provide request header 'Accept' with 'application/zip' or 'text/plain'",
305 http_code
=HTTPStatus
.NOT_ACCEPTABLE
)
307 content
= self
.show(session
, _id
)
308 if content
["_admin"]["onboardingState"] != "ONBOARDED":
309 raise EngineException("Cannot get content because this resource is not at 'ONBOARDED' state. "
310 "onboardingState is {}".format(content
["_admin"]["onboardingState"]),
311 http_code
=HTTPStatus
.CONFLICT
)
312 storage
= content
["_admin"]["storage"]
313 if path
is not None and path
!= "$DESCRIPTOR": # artifacts
314 if not storage
.get('pkg-dir'):
315 raise EngineException("Packages does not contains artifacts", http_code
=HTTPStatus
.BAD_REQUEST
)
316 if self
.fs
.file_exists((storage
['folder'], storage
['pkg-dir'], *path
), 'dir'):
317 folder_content
= self
.fs
.dir_ls((storage
['folder'], storage
['pkg-dir'], *path
))
318 return folder_content
, "text/plain"
319 # TODO manage folders in http
321 return self
.fs
.file_open((storage
['folder'], storage
['pkg-dir'], *path
), "rb"),\
322 "application/octet-stream"
324 # pkgtype accept ZIP TEXT -> result
325 # manyfiles yes X -> zip
327 # onefile yes no -> zip
330 if accept_text
and (not storage
.get('pkg-dir') or path
== "$DESCRIPTOR"):
331 return self
.fs
.file_open((storage
['folder'], storage
['descriptor']), "r"), "text/plain"
332 elif storage
.get('pkg-dir') and not accept_zip
:
333 raise EngineException("Packages that contains several files need to be retrieved with 'application/zip'"
334 "Accept header", http_code
=HTTPStatus
.NOT_ACCEPTABLE
)
336 if not storage
.get('zipfile'):
337 # TODO generate zipfile if not present
338 raise EngineException("Only allowed 'text/plain' Accept header for this descriptor. To be solved in "
339 "future versions", http_code
=HTTPStatus
.NOT_ACCEPTABLE
)
340 return self
.fs
.file_open((storage
['folder'], storage
['zipfile']), "rb"), accept_zip
342 def pyangbind_validation(self
, item
, data
, force
=False):
346 pybindJSONDecoder
.load_ietf_json({'vnfd:vnfd-catalog': {'vnfd': [data
]}}, None, None, obj
=myvnfd
,
347 path_helper
=True, skip_unknown
=force
)
348 out
= pybindJSON
.dumps(myvnfd
, mode
="ietf")
351 pybindJSONDecoder
.load_ietf_json({'nsd:nsd-catalog': {'nsd': [data
]}}, None, None, obj
=mynsd
,
352 path_helper
=True, skip_unknown
=force
)
353 out
= pybindJSON
.dumps(mynsd
, mode
="ietf")
355 raise EngineException("Not possible to validate '{}' item".format(item
),
356 http_code
=HTTPStatus
.INTERNAL_SERVER_ERROR
)
358 desc_out
= self
._remove
_envelop
(yaml
.safe_load(out
))
361 except Exception as e
:
362 raise EngineException("Error in pyangbind validation: {}".format(str(e
)),
363 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
)
366 class VnfdTopic(DescriptorTopic
):
370 def __init__(self
, db
, fs
, msg
):
371 DescriptorTopic
.__init
__(self
, db
, fs
, msg
)
374 def _remove_envelop(indata
=None):
377 clean_indata
= indata
378 if clean_indata
.get('vnfd:vnfd-catalog'):
379 clean_indata
= clean_indata
['vnfd:vnfd-catalog']
380 elif clean_indata
.get('vnfd-catalog'):
381 clean_indata
= clean_indata
['vnfd-catalog']
382 if clean_indata
.get('vnfd'):
383 if not isinstance(clean_indata
['vnfd'], list) or len(clean_indata
['vnfd']) != 1:
384 raise EngineException("'vnfd' must be a list of only one element")
385 clean_indata
= clean_indata
['vnfd'][0]
386 elif clean_indata
.get('vnfd:vnfd'):
387 if not isinstance(clean_indata
['vnfd:vnfd'], list) or len(clean_indata
['vnfd:vnfd']) != 1:
388 raise EngineException("'vnfd:vnfd' must be a list of only one element")
389 clean_indata
= clean_indata
['vnfd:vnfd'][0]
392 def check_conflict_on_del(self
, session
, _id
, force
=False):
394 Check that there is not any NSD that uses this VNFD. Only NSDs belonging to this project are considered. Note
395 that VNFD can be public and be used by NSD of other projects. Also check there are not deployments, or vnfr
398 :param _id: vnfd inernal id
399 :param force: Avoid this checking
400 :return: None or raises EngineException with the conflict
404 descriptor
= self
.db
.get_one("vnfds", {"_id": _id
})
405 descriptor_id
= descriptor
.get("id")
406 if not descriptor_id
: # empty vnfd not uploaded
409 _filter
= self
._get
_project
_filter
(session
, write
=False, show_all
=False)
410 # check vnfrs using this vnfd
411 _filter
["vnfd-id"] = _id
412 if self
.db
.get_list("vnfrs", _filter
):
413 raise EngineException("There is some VNFR that depends on this VNFD", http_code
=HTTPStatus
.CONFLICT
)
414 del _filter
["vnfd-id"]
415 # check NSD using this VNFD
416 _filter
["constituent-vnfd.ANYINDEX.vnfd-id-ref"] = descriptor_id
417 if self
.db
.get_list("nsds", _filter
):
418 raise EngineException("There is soame NSD that depends on this VNFD", http_code
=HTTPStatus
.CONFLICT
)
420 def _validate_input_new(self
, indata
, force
=False):
421 # TODO validate with pyangbind, serialize
422 indata
= self
.pyangbind_validation("vnfds", indata
, force
)
423 # Cross references validation in the descriptor
424 if not indata
.get("mgmt-interface"):
425 raise EngineException("'mgmt-interface' is a mandatory field and it is not defined",
426 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
)
427 if indata
["mgmt-interface"].get("cp"):
428 for cp
in get_iterable(indata
.get("connection-point")):
429 if cp
["name"] == indata
["mgmt-interface"]["cp"]:
432 raise EngineException("mgmt-interface:cp='{}' must match an existing connection-point"
433 .format(indata
["mgmt-interface"]["cp"]),
434 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
)
436 for vdu
in get_iterable(indata
.get("vdu")):
437 for interface
in get_iterable(vdu
.get("interface")):
438 if interface
.get("external-connection-point-ref"):
439 for cp
in get_iterable(indata
.get("connection-point")):
440 if cp
["name"] == interface
["external-connection-point-ref"]:
443 raise EngineException("vdu[id='{}']:interface[name='{}']:external-connection-point-ref='{}' "
444 "must match an existing connection-point"
445 .format(vdu
["id"], interface
["name"],
446 interface
["external-connection-point-ref"]),
447 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
)
449 elif interface
.get("internal-connection-point-ref"):
450 for internal_cp
in get_iterable(vdu
.get("internal-connection-point")):
451 if interface
["internal-connection-point-ref"] == internal_cp
.get("id"):
454 raise EngineException("vdu[id='{}']:interface[name='{}']:internal-connection-point-ref='{}' "
455 "must match an existing vdu:internal-connection-point"
456 .format(vdu
["id"], interface
["name"],
457 interface
["internal-connection-point-ref"]),
458 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
)
459 for ivld
in get_iterable(indata
.get("internal-vld")):
460 for icp
in get_iterable(ivld
.get("internal-connection-point")):
462 for vdu
in get_iterable(indata
.get("vdu")):
463 for internal_cp
in get_iterable(vdu
.get("internal-connection-point")):
464 if icp
["id-ref"] == internal_cp
["id"]:
470 raise EngineException("internal-vld[id='{}']:internal-connection-point='{}' must match an existing "
471 "vdu:internal-connection-point".format(ivld
["id"], icp
["id-ref"]),
472 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
)
473 if ivld
.get("ip-profile-ref"):
474 for ip_prof
in get_iterable(indata
.get("ip-profiles")):
475 if ip_prof
["name"] == get_iterable(ivld
.get("ip-profile-ref")):
478 raise EngineException("internal-vld[id='{}']:ip-profile-ref='{}' does not exist".format(
479 ivld
["id"], ivld
["ip-profile-ref"]),
480 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
)
481 for mp
in get_iterable(indata
.get("monitoring-param")):
482 if mp
.get("vdu-monitoring-param"):
484 for vdu
in get_iterable(indata
.get("vdu")):
485 for vmp
in get_iterable(vdu
.get("monitoring-param")):
486 if vmp
["id"] == mp
["vdu-monitoring-param"].get("vdu-monitoring-param-ref") and vdu
["id"] ==\
487 mp
["vdu-monitoring-param"]["vdu-ref"]:
493 raise EngineException("monitoring-param:vdu-monitoring-param:vdu-monitoring-param-ref='{}' not "
494 "defined at vdu[id='{}'] or vdu does not exist"
495 .format(mp
["vdu-monitoring-param"]["vdu-monitoring-param-ref"],
496 mp
["vdu-monitoring-param"]["vdu-ref"]),
497 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
)
498 elif mp
.get("vdu-metric"):
500 for vdu
in get_iterable(indata
.get("vdu")):
501 if vdu
.get("vdu-configuration"):
502 for metric
in get_iterable(vdu
["vdu-configuration"].get("metrics")):
503 if metric
["name"] == mp
["vdu-metric"]["vdu-metric-name-ref"] and vdu
["id"] == \
504 mp
["vdu-metric"]["vdu-ref"]:
510 raise EngineException("monitoring-param:vdu-metric:vdu-metric-name-ref='{}' not defined at "
511 "vdu[id='{}'] or vdu does not exist"
512 .format(mp
["vdu-metric"]["vdu-metric-name-ref"],
513 mp
["vdu-metric"]["vdu-ref"]),
514 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
)
516 for sgd
in get_iterable(indata
.get("scaling-group-descriptor")):
517 for sp
in get_iterable(sgd
.get("scaling-policy")):
518 for sc
in get_iterable(sp
.get("scaling-criteria")):
519 for mp
in get_iterable(indata
.get("monitoring-param")):
520 if mp
["id"] == get_iterable(sc
.get("vnf-monitoring-param-ref")):
523 raise EngineException("scaling-group-descriptor[name='{}']:scaling-criteria[name='{}']:"
524 "vnf-monitoring-param-ref='{}' not defined in any monitoring-param"
525 .format(sgd
["name"], sc
["name"], sc
["vnf-monitoring-param-ref"]),
526 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
)
527 for sgd_vdu
in get_iterable(sgd
.get("vdu")):
529 for vdu
in get_iterable(indata
.get("vdu")):
530 if vdu
["id"] == sgd_vdu
["vdu-id-ref"]:
536 raise EngineException("scaling-group-descriptor[name='{}']:vdu-id-ref={} does not match any vdu"
537 .format(sgd
["name"], sgd_vdu
["vdu-id-ref"]),
538 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
)
539 for sca
in get_iterable(sgd
.get("scaling-config-action")):
540 if not indata
.get("vnf-configuration"):
541 raise EngineException("'vnf-configuration' not defined in the descriptor but it is referenced by "
542 "scaling-group-descriptor[name='{}']:scaling-config-action"
543 .format(sgd
["name"]),
544 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
)
545 for primitive
in get_iterable(indata
["vnf-configuration"].get("config-primitive")):
546 if primitive
["name"] == sca
["vnf-config-primitive-name-ref"]:
549 raise EngineException("scaling-group-descriptor[name='{}']:scaling-config-action:vnf-config-"
550 "primitive-name-ref='{}' does not match any "
551 "vnf-configuration:config-primitive:name"
552 .format(sgd
["name"], sca
["vnf-config-primitive-name-ref"]),
553 http_code
=HTTPStatus
.UNPROCESSABLE_ENTITY
)
556 def _validate_input_edit(self
, indata
, force
=False):
557 # TODO validate with pyangbind, serialize
561 class NsdTopic(DescriptorTopic
):
565 def __init__(self
, db
, fs
, msg
):
566 DescriptorTopic
.__init
__(self
, db
, fs
, msg
)
569 def _remove_envelop(indata
=None):
572 clean_indata
= indata
574 if clean_indata
.get('nsd:nsd-catalog'):
575 clean_indata
= clean_indata
['nsd:nsd-catalog']
576 elif clean_indata
.get('nsd-catalog'):
577 clean_indata
= clean_indata
['nsd-catalog']
578 if clean_indata
.get('nsd'):
579 if not isinstance(clean_indata
['nsd'], list) or len(clean_indata
['nsd']) != 1:
580 raise EngineException("'nsd' must be a list of only one element")
581 clean_indata
= clean_indata
['nsd'][0]
582 elif clean_indata
.get('nsd:nsd'):
583 if not isinstance(clean_indata
['nsd:nsd'], list) or len(clean_indata
['nsd:nsd']) != 1:
584 raise EngineException("'nsd:nsd' must be a list of only one element")
585 clean_indata
= clean_indata
['nsd:nsd'][0]
588 def _validate_input_new(self
, indata
, force
=False):
590 # TODO validate with pyangbind, serialize
591 indata
= self
.pyangbind_validation("nsds", indata
, force
)
594 def _validate_input_edit(self
, indata
, force
=False):
595 # TODO validate with pyangbind, serialize
598 def _check_descriptor_dependencies(self
, session
, descriptor
):
600 Check that the dependent descriptors exist on a new descriptor or edition
601 :param session: client session information
602 :param descriptor: descriptor to be inserted or edit
603 :return: None or raises exception
605 if not descriptor
.get("constituent-vnfd"):
607 for vnf
in descriptor
["constituent-vnfd"]:
608 vnfd_id
= vnf
["vnfd-id-ref"]
609 filter_q
= self
._get
_project
_filter
(session
, write
=False, show_all
=True)
610 filter_q
["id"] = vnfd_id
611 if not self
.db
.get_list("vnfds", filter_q
):
612 raise EngineException("Descriptor error at 'constituent-vnfd':'vnfd-id-ref'='{}' references a non "
613 "existing vnfd".format(vnfd_id
), http_code
=HTTPStatus
.CONFLICT
)
615 def check_conflict_on_edit(self
, session
, final_content
, edit_content
, _id
, force
=False):
616 super().check_conflict_on_edit(session
, final_content
, edit_content
, _id
, force
=force
)
618 self
._check
_descriptor
_dependencies
(session
, final_content
)
620 def check_conflict_on_del(self
, session
, _id
, force
=False):
622 Check that there is not any NSR that uses this NSD. Only NSRs belonging to this project are considered. Note
623 that NSD can be public and be used by other projects.
625 :param _id: vnfd inernal id
626 :param force: Avoid this checking
627 :return: None or raises EngineException with the conflict
631 _filter
= self
._get
_project
_filter
(session
, write
=False, show_all
=False)
632 _filter
["nsdId"] = _id
633 if self
.db
.get_list("nsrs", _filter
):
634 raise EngineException("There is some NSR that depends on this NSD", http_code
=HTTPStatus
.CONFLICT
)
637 class PduTopic(BaseTopic
):
640 schema_new
= pdu_new_schema
641 schema_edit
= pdu_edit_schema
643 def __init__(self
, db
, fs
, msg
):
644 BaseTopic
.__init
__(self
, db
, fs
, msg
)
647 def format_on_new(content
, project_id
=None, make_public
=False):
648 BaseTopic
.format_on_new(content
, project_id
=None, make_public
=make_public
)
649 content
["_admin"]["onboardingState"] = "CREATED"
650 content
["_admin"]["operationalState"] = "DISABLED"
651 content
["_admin"]["usageSate"] = "NOT_IN_USE"
653 def check_conflict_on_del(self
, session
, _id
, force
=False):
656 # TODO Is it needed to check descriptors _admin.project_read/project_write??
657 _filter
= {"vdur.pdu-id": _id
}
658 if self
.db
.get_list("vnfrs", _filter
):
659 raise EngineException("There is some NSR that uses this PDU", http_code
=HTTPStatus
.CONFLICT
)