+ with open('/tmp/' + package.name, 'wb+') as destination:
+ for chunk in package.chunks():
+ destination.write(chunk)
+ headers['Content-File-MD5'] = self.md5(
+ open('/tmp/' + package.name, 'rb'))
+ _url = "{0}/vnfpkgm/v1/vnf_packages_content".format(self._base_path)
+ try:
+ r = requests.post(_url, data=open(
+ '/tmp/' + package.name, 'rb'), verify=False, headers=headers)
+ except Exception as e:
+ log.exception(e)
+ result['data'] = str(e)
+ return result
+ if r.status_code in (200, 201, 202, 204):
+ result['error'] = False
+ result['data'] = Util.json_loads_byteified(r.text)
+ return result
+
+ def nsd_create_pkg_base(self, token, pkg_name):
+ result = {'error': True, 'data': ''}
+ headers = {"Content-Type": "application/gzip", "accept": "application/json",
+ 'Authorization': 'Bearer {}'.format(token['id'])}
+
+ _url = "{0}/nsd/v1/ns_descriptors_content/".format(self._base_path)
+
+ try:
+ self._create_base_pkg('nsd', pkg_name)
+ headers['Content-Filename'] = pkg_name + '.tar.gz'
+ r = requests.post(_url, data=open(
+ '/tmp/' + pkg_name + '.tar.gz', 'rb'), verify=False, headers=headers)
+ except Exception as e:
+ log.exception(e)
+ result['data'] = str(e)
+ return result
+ if r.status_code in (200, 201, 202, 204):
+ result['data'] = r.json()
+ result['error'] = False
+ if r.status_code == requests.codes.conflict:
+ result['data'] = "Invalid ID."
+ return result
+
+ def vnfd_create_pkg_base(self, token, pkg_name):
+ result = {'error': True, 'data': ''}
+ headers = {"Content-Type": "application/gzip", "accept": "application/json",
+ 'Authorization': 'Bearer {}'.format(token['id'])}
+
+ _url = "{0}/vnfpkgm/v1/vnf_packages_content".format(self._base_path)
+
+ try:
+ self._create_base_pkg('vnfd', pkg_name)
+ r = requests.post(_url, data=open(
+ '/tmp/' + pkg_name + '.tar.gz', 'rb'), verify=False, headers=headers)
+ except Exception as e:
+ log.exception(e)
+ result['data'] = str(e)
+ return result
+ if r.status_code in (200, 201, 202, 204):
+ result['data'] = r.json()
+ result['error'] = False
+ if r.status_code == requests.codes.conflict:
+ result['data'] = "Invalid ID."
+ return result
+
+ def nsd_clone(self, token, id):
+ result = {'error': True, 'data': ''}
+ headers = {"Content-Type": "application/gzip", "accept": "application/json",
+ 'Authorization': 'Bearer {}'.format(token['id'])}
+
+ # get the package onboarded
+ tar_pkg = self.get_nsd_pkg(token, id)
+ tarf = tarfile.open(fileobj=tar_pkg)
+ tarf = self._descriptor_clone(tarf, 'nsd')
+ headers['Content-File-MD5'] = self.md5(
+ open('/tmp/' + tarf.getnames()[0] + "_clone.tar.gz", 'rb'))
+
+ _url = "{0}/nsd/v1/ns_descriptors_content/".format(self._base_path)
+
+ try:
+ r = requests.post(_url, data=open('/tmp/' + tarf.getnames()[0] + "_clone.tar.gz", 'rb'), verify=False,
+ headers=headers)
+ except Exception as e:
+ log.exception(e)
+ result['data'] = str(e)
+ return result
+ if r.status_code in (200, 201, 202, 204):
+ result['error'] = False
+ if r.status_code == requests.codes.conflict:
+ result['data'] = "Invalid ID."
+
+ return result
+
+ def vnfd_clone(self, token, id):
+ result = {'error': True, 'data': ''}
+ headers = {"Content-Type": "application/gzip", "accept": "application/json",
+ 'Authorization': 'Bearer {}'.format(token['id'])}
+
+ # get the package onboarded
+ tar_pkg = self.get_vnfd_pkg(token, id)
+ tarf = tarfile.open(fileobj=tar_pkg)
+
+ tarf = self._descriptor_clone(tarf, 'vnfd')
+ headers['Content-File-MD5'] = self.md5(
+ open('/tmp/' + tarf.getnames()[0] + "_clone.tar.gz", 'rb'))
+
+ _url = "{0}/vnfpkgm/v1/vnf_packages_content".format(self._base_path)
+
+ try:
+ r = requests.post(_url, data=open('/tmp/' + tarf.getnames()[0] + "_clone.tar.gz", 'rb'), verify=False,
+ headers=headers)
+ except Exception as e:
+ log.exception(e)
+ result['data'] = str(e)
+ return result
+ if r.status_code in (200, 201, 202, 204):
+ result['error'] = False
+ if r.status_code == requests.codes.conflict:
+ result['data'] = "Invalid ID."
+
+ return result
+
+ def nst_content_update(self, token, id, template):
+ result = {'error': True, 'data': ''}
+ headers = {"Content-Type": "application/yaml", "accept": "application/json",
+ 'Authorization': 'Bearer {}'.format(token['id'])}
+ _url = "{0}/nst/v1/netslice_templates/{1}/nst_content".format(
+ self._base_path, id)
+ try:
+ r = requests.put(_url, data=template,
+ verify=False, headers=headers)
+ except Exception as e:
+ log.exception(e)
+ result['data'] = str(e)
+ return result
+ if r.status_code in (200, 201, 202, 204):
+ result['error'] = False
+ return result
+
+ def nsd_update(self, token, id, data):
+ result = {'error': True, 'data': ''}
+ headers = {"Content-Type": "application/gzip", "accept": "application/json",
+ 'Authorization': 'Bearer {}'.format(token['id'])}
+
+ # get the package onboarded
+ tar_pkg = self.get_nsd_pkg(token, id)
+ tarf = tarfile.open(fileobj=tar_pkg)
+
+ tarf = self._descriptor_update(tarf, data)
+ headers['Content-File-MD5'] = self.md5(
+ open('/tmp/' + tarf.getnames()[0] + ".tar.gz", 'rb'))
+
+ _url = "{0}/nsd/v1/ns_descriptors/{1}/nsd_content".format(
+ self._base_path, id)
+
+ try:
+ r = requests.put(_url, data=open('/tmp/' + tarf.getnames()[0] + ".tar.gz", 'rb'), verify=False,
+ headers=headers)
+ except Exception as e:
+ log.exception(e)
+ result['data'] = str(e)
+ return result
+ if r.status_code in (200, 201, 202, 204):
+ result['error'] = False
+ else:
+ try:
+ result['data'] = r.json()
+ except Exception as e:
+ result['data'] = {}
+
+ return result
+
+ def vnfd_update(self, token, id, data):
+ result = {'error': True, 'data': ''}
+ headers = {"Content-Type": "application/gzip", "accept": "application/json",
+ 'Authorization': 'Bearer {}'.format(token['id'])}
+
+ # get the package onboarded
+ tar_pkg = self.get_vnfd_pkg(token, id)
+ tarf = tarfile.open(fileobj=tar_pkg)
+
+ tarf = self._descriptor_update(tarf, data)
+ headers['Content-File-MD5'] = self.md5(
+ open('/tmp/' + tarf.getnames()[0] + ".tar.gz", 'rb'))
+
+ _url = "{0}/vnfpkgm/v1/vnf_packages/{1}/package_content".format(
+ self._base_path, id)
+
+ try:
+ r = requests.put(_url, data=open('/tmp/' + tarf.getnames()[0] + ".tar.gz", 'rb'), verify=False,
+ headers=headers)
+ except Exception as e:
+ log.exception(e)
+ result['data'] = str(e)
+ return result
+ if r.status_code in (200, 201, 202, 204):
+ result['error'] = False
+ else:
+ try:
+ result['data'] = r.json()
+ except Exception as e:
+ result['data'] = {}
+
+ return result
+
+ def get_nsd_pkg(self, token, id):
+ result = {'error': True, 'data': ''}
+ headers = {"accept": "application/zip",
+ 'Authorization': 'Bearer {}'.format(token['id'])}
+
+ _url = "{0}/nsd/v1/ns_descriptors/{1}/nsd_content".format(
+ self._base_path, id)
+ try:
+ r = requests.get(_url, params=None, verify=False,
+ stream=True, headers=headers)
+ except Exception as e:
+ log.exception(e)
+ result['data'] = str(e)
+ return result
+ if r.status_code in (200, 201, 202, 204):
+ result['error'] = False
+ tarf = StringIO.StringIO(r.content)
+ return tarf
+ return result
+
+ def get_vnfd_pkg(self, token, id):
+ result = {'error': True, 'data': ''}
+ headers = {"accept": "application/zip",
+ 'Authorization': 'Bearer {}'.format(token['id'])}
+ _url = "{0}/vnfpkgm/v1/vnf_packages/{1}/package_content".format(
+ self._base_path, id)
+ try:
+ r = requests.get(_url, params=None, verify=False,
+ stream=True, headers=headers)
+ except Exception as e:
+ log.exception(e)
+ result['data'] = str(e)
+ return result
+ if r.status_code in (200, 201, 202, 204):
+ result['error'] = False
+ tarf = StringIO.StringIO(r.content)
+ return tarf
+ return result
+
+ def _descriptor_update(self, tarf, data):
+ # extract the package on a tmp directory
+ tarf.extractall('/tmp')
+ regex = re.compile(r"^[^/]+(/[^/]+\.(yaml|yml))$", re.U)
+ for name in tarf.getnames():
+ if regex.match(name):
+ with open('/tmp/' + name, 'w') as outfile:
+ yaml.safe_dump(data, outfile, default_flow_style=False)
+ break
+
+ tarf_temp = tarfile.open(
+ '/tmp/' + tarf.getnames()[0] + ".tar.gz", "w:gz")
+
+ for tarinfo in tarf:
+ tarf_temp.add('/tmp/' + tarinfo.name,
+ tarinfo.name, recursive=False)
+ tarf_temp.close()
+ return tarf
+
+ def _create_base_pkg(self, descriptor_type, pkg_name):
+ filename = '/tmp/'+pkg_name+'/' + pkg_name + '.yaml'
+ if descriptor_type == 'nsd':
+ descriptor = {
+ "nsd:nsd-catalog": {
+ "nsd": [
+ {
+ "short-name": str(pkg_name),
+ "vendor": "OSM Composer",
+ "description": str(pkg_name) + " descriptor",
+ "vld": [],
+ "constituent-vnfd": [],
+ "version": "1.0",
+ "id": str(pkg_name),
+ "name": str(pkg_name)
+ }
+ ]
+ }
+ }
+
+ elif descriptor_type == 'vnfd':
+ descriptor = {
+ "vnfd:vnfd-catalog": {
+ "vnfd": [
+ {
+ "short-name": str(pkg_name),
+ "vdu": [],
+ "description": "",
+ "mgmt-interface": {
+ "cp": ""
+ },
+ "id": str(pkg_name),
+ "version": "1.0",
+ "internal-vld": [],
+ "connection-point": [],
+ "name": str(pkg_name)
+ }
+ ]
+ }
+ }
+
+ if not os.path.exists(os.path.dirname(filename)):
+ try:
+ os.makedirs(os.path.dirname(filename))
+ except OSError as exc: # Guard against race condition
+ if exc.errno != errno.EEXIST:
+ raise
+
+ with open('/tmp/' + pkg_name + '/' + pkg_name + '.yaml', 'w') as yaml_file:
+ yaml_file.write(yaml.dump(descriptor, default_flow_style=False))
+
+ tarf_temp = tarfile.open('/tmp/' + pkg_name + '.tar.gz', "w:gz")
+ tarf_temp.add('/tmp/'+pkg_name+'/' + pkg_name + '.yaml',
+ pkg_name + '/' + pkg_name + '.yaml', recursive=False)
+ tarf_temp.close()
+
+ def _descriptor_clone(self, tarf, descriptor_type):
+ # extract the package on a tmp directory
+ tarf.extractall('/tmp')
+
+ for name in tarf.getnames():
+ if name.endswith(".yaml") or name.endswith(".yml"):
+ with open('/tmp/' + name, 'r') as outfile:
+ yaml_object = yaml.load(outfile)
+
+ if descriptor_type == 'nsd':
+ nsd_list = yaml_object['nsd:nsd-catalog']['nsd']
+ for nsd in nsd_list:
+ nsd['id'] = 'clone_' + nsd['id']
+ nsd['name'] = 'clone_' + nsd['name']
+ nsd['short-name'] = 'clone_' + nsd['short-name']
+ elif descriptor_type == 'vnfd':
+ vnfd_list = yaml_object['vnfd:vnfd-catalog']['vnfd']
+ for vnfd in vnfd_list:
+ vnfd['id'] = 'clone_' + vnfd['id']
+ vnfd['name'] = 'clone_' + vnfd['name']
+ vnfd['short-name'] = 'clone_' + vnfd['short-name']
+
+ with open('/tmp/' + name, 'w') as yaml_file:
+ yaml_file.write(
+ yaml.dump(yaml_object, default_flow_style=False))
+ break
+
+ tarf_temp = tarfile.open(
+ '/tmp/' + tarf.getnames()[0] + "_clone.tar.gz", "w:gz")
+
+ for tarinfo in tarf:
+ tarf_temp.add('/tmp/' + tarinfo.name,
+ tarinfo.name, recursive=False)
+ tarf_temp.close()
+ return tarf
+
+ def nsd_get(self, token, id):
+ result = {'error': True, 'data': ''}
+ headers = {'Content-Type': 'application/yaml',
+ 'Authorization': 'Bearer {}'.format(token['id'])}
+ _url = "{0}/nsd/v1/ns_descriptors/{1}/nsd".format(self._base_path, id)
+ try:
+ r = requests.get(_url, params=None, verify=False,
+ stream=True, headers=headers)
+ except Exception as e:
+ log.exception(e)
+ result['data'] = str(e)
+ return result
+ if r.status_code in (200, 201, 202, 204):
+ result['error'] = False
+ return yaml.load(r.text)
+ else:
+ try:
+ result['data'] = r.json()
+ except Exception as e:
+ result['data'] = {}
+ return result
+
+ def vnfd_get(self, token, id):
+ result = {'error': True, 'data': ''}
+ headers = {'Content-Type': 'application/yaml',
+ 'Authorization': 'Bearer {}'.format(token['id'])}
+ _url = "{0}/vnfpkgm/v1/vnf_packages/{1}/vnfd".format(
+ self._base_path, id)
+ try:
+ r = requests.get(_url, params=None, verify=False,
+ stream=True, headers=headers)
+ except Exception as e:
+ log.exception(e)
+ result['data'] = str(e)
+ return result
+ if r.status_code in (200, 201, 202, 204):
+ result['error'] = False
+ return yaml.load(r.text)
+ else:
+ try:
+ result['data'] = r.json()
+ except Exception as e:
+ result['data'] = {}
+ return result
+
+ def nsd_artifacts(self, token, id):
+ result = {'error': True, 'data': ''}
+ headers = {'Content-Type': 'application/yaml', 'accept': 'text/plain',
+ 'Authorization': 'Bearer {}'.format(token['id'])}
+ _url = "{0}/nsd/v1/ns_descriptors/{1}/artifacts".format(
+ self._base_path, id)
+ try:
+ r = requests.get(_url, params=None, verify=False,
+ stream=True, headers=headers)
+ except Exception as e:
+ log.exception(e)
+ result['data'] = str(e)
+ return result
+ if r.status_code in (200, 201, 202, 204):
+ result['error'] = False
+ result['data'] = r.text
+ else:
+ try:
+ result['data'] = r.json()
+ except Exception as e:
+ result['data'] = {}
+
+ return result
+
+ def vnf_packages_artifacts(self, token, id):
+ result = {'error': True, 'data': ''}
+ headers = {'Content-Type': 'application/yaml', 'accept': 'text/plain',
+ 'Authorization': 'Bearer {}'.format(token['id'])}
+ _url = "{0}/vnfpkgm/v1/vnf_packages/{1}/artifacts".format(
+ self._base_path, id)
+ try:
+ r = requests.get(_url, params=None, verify=False,
+ stream=True, headers=headers)
+ except Exception as e:
+ log.exception(e)
+ result['data'] = str(e)
+ return result
+ if r.status_code in (200, 201, 202, 204):
+ result['error'] = False
+ result['data'] = r.text
+ else:
+ try:
+ result['data'] = r.json()
+ except Exception as e:
+ result['data'] = {}
+
+ return result
+
+ def nsi_create(self, token, nsi_data):
+ result = {'error': True, 'data': ''}
+ headers = {"Content-Type": "application/yaml", "accept": "application/json",
+ 'Authorization': 'Bearer {}'.format(token['id'])}
+
+ _url = "{0}/nsilcm/v1/netslice_instances_content".format(
+ self._base_path)
+
+ try:
+ r = requests.post(_url, json=nsi_data,
+ verify=False, headers=headers)
+ except Exception as e:
+ log.exception(e)
+ result['data'] = str(e)
+ return result
+ if r.status_code in (200, 201, 202, 204):
+ result['error'] = False
+ result['data'] = Util.json_loads_byteified(r.text)
+ return result
+
+ def ns_create(self, token, ns_data):
+ result = {'error': True, 'data': ''}
+ headers = {"Content-Type": "application/yaml", "accept": "application/json",
+ 'Authorization': 'Bearer {}'.format(token['id'])}
+
+ _url = "{0}/nslcm/v1/ns_instances_content".format(self._base_path)
+
+ try:
+ r = requests.post(_url, json=ns_data,
+ verify=False, headers=headers)
+ except Exception as e:
+ log.exception(e)
+ result['data'] = str(e)
+ return result
+ if r.status_code in (200, 201, 202, 204):
+ result['error'] = False
+ result['data'] = Util.json_loads_byteified(r.text)
+ return result
+
+ def pdu_create(self, token, pdu_data):
+ result = {'error': True, 'data': ''}
+ headers = {"Content-Type": "application/yaml", "accept": "application/json",
+ 'Authorization': 'Bearer {}'.format(token['id'])}
+
+ _url = "{0}/pdu/v1/pdu_descriptors".format(self._base_path)
+
+ try:
+ r = requests.post(_url, json=pdu_data,
+ verify=False, headers=headers)
+ except Exception as e:
+ log.exception(e)
+ result['data'] = str(e)
+ return result
+ if r.status_code in (200, 201, 202, 204):
+ result['error'] = False
+ result['data'] = Util.json_loads_byteified(r.text)
+ return result
+
+ def ns_op_list(self, token, id):
+ result = {'error': True, 'data': ''}
+ headers = {"Content-Type": "application/json", "accept": "application/json",
+ 'Authorization': 'Bearer {}'.format(token['id'])}
+ _url = "{0}/nslcm/v1/ns_lcm_op_occs/?nsInstanceId={1}".format(
+ self._base_path, id)
+
+ try:
+ r = requests.get(_url, params=None, verify=False,
+ stream=True, headers=headers)
+ except Exception as e:
+ log.exception(e)
+ result['data'] = str(e)
+ return result
+ if r.status_code in (200, 201, 202, 204):
+ result['error'] = False
+ result['data'] = Util.json_loads_byteified(r.text)
+
+ return result
+
+ def nsi_op_list(self, token, id):
+ result = {'error': True, 'data': ''}
+ headers = {"Content-Type": "application/json", "accept": "application/json",
+ 'Authorization': 'Bearer {}'.format(token['id'])}
+ _url = "{0}/nsilcm/v1/nsi_lcm_op_occs/?netsliceInstanceId={1}".format(
+ self._base_path, id)
+
+ try:
+ r = requests.get(_url, params=None, verify=False,
+ stream=True, headers=headers)
+ except Exception as e:
+ log.exception(e)
+ result['data'] = str(e)
+ return result
+ if r.status_code in (200, 201, 202, 204):
+ result['error'] = False
+ result['data'] = Util.json_loads_byteified(r.text)
+
+ return result
+
+ def ns_op(self, token, id):
+ result = {'error': True, 'data': ''}
+ headers = {"Content-Type": "application/json", "accept": "application/json",
+ 'Authorization': 'Bearer {}'.format(token['id'])}
+ _url = "{0}/nslcm/v1/ns_lcm_op_occs/{1}".format(self._base_path, id)
+
+ try:
+ r = requests.get(_url, params=None, verify=False,
+ stream=True, headers=headers)
+ except Exception as e:
+ log.exception(e)
+ result['data'] = str(e)
+ return result
+ if r.status_code in (200, 201, 202, 204):
+ result['error'] = False
+ result['data'] = Util.json_loads_byteified(r.text)
+
+ return result
+
+ def ns_action(self, token, id, action_payload):
+ result = {'error': True, 'data': ''}
+ headers = {"Content-Type": "application/json", "accept": "application/json",
+ 'Authorization': 'Bearer {}'.format(token['id'])}
+
+ _url = "{0}/nslcm/v1/ns_instances/{1}/action".format(
+ self._base_path, id)
+
+ try:
+ r = requests.post(_url, json=action_payload,
+ verify=False, headers=headers)
+ except Exception as e:
+ log.exception(e)
+ result['data'] = str(e)
+ return result
+ if r.status_code in (200, 201, 202, 204):
+ result['error'] = False
+ result['data'] = Util.json_loads_byteified(r.text)
+ return result
+
+ def nsi_delete(self, token, id, force=None):
+ result = {'error': True, 'data': ''}
+ headers = {"Content-Type": "application/yaml", "accept": "application/json",
+ 'Authorization': 'Bearer {}'.format(token['id'])}
+ query_path = ''
+ if force:
+ query_path = '?FORCE=true'
+ _url = "{0}/nsilcm/v1/netslice_instances_content/{1}{2}".format(
+ self._base_path, id, query_path)
+ try:
+ r = requests.delete(_url, params=None,
+ verify=False, headers=headers)
+ except Exception as e:
+ log.exception(e)
+ result['data'] = str(e)
+ return result
+ if r:
+ result['error'] = False
+ if r.status_code != requests.codes.no_content:
+ result['data'] = Util.json_loads_byteified(r.text)
+ return result
+
+ def ns_delete(self, token, id, force=None):
+ result = {'error': True, 'data': ''}
+ headers = {"Content-Type": "application/yaml", "accept": "application/json",
+ 'Authorization': 'Bearer {}'.format(token['id'])}
+ query_path = ''
+ if force:
+ query_path = '?FORCE=true'
+ _url = "{0}/nslcm/v1/ns_instances_content/{1}{2}".format(
+ self._base_path, id, query_path)
+ try:
+ r = requests.delete(_url, params=None,
+ verify=False, headers=headers)
+ except Exception as e:
+ log.exception(e)
+ result['data'] = str(e)
+ return result
+ if r:
+ result['error'] = False
+ if r.status_code != requests.codes.no_content:
+ result['data'] = Util.json_loads_byteified(r.text)
+ return result
+
+ def pdu_delete(self, token, id):
+ result = {'error': True, 'data': ''}
+ headers = {"Content-Type": "application/yaml", "accept": "application/json",
+ 'Authorization': 'Bearer {}'.format(token['id'])}
+ _url = "{0}/pdu/v1/pdu_descriptors/{1}".format(self._base_path, id)
+ try:
+ r = requests.delete(_url, params=None,
+ verify=False, headers=headers)
+ except Exception as e:
+ log.exception(e)
+ result['data'] = str(e)
+ return result
+ if r:
+ result['error'] = False
+ if r.status_code != requests.codes.no_content:
+ result['data'] = Util.json_loads_byteified(r.text)
+ return result
+
+ def nsi_get(self, token, id):
+ result = {'error': True, 'data': ''}
+ headers = {"Content-Type": "application/json", "accept": "application/json",
+ 'Authorization': 'Bearer {}'.format(token['id'])}
+ _url = "{0}/nsilcm/v1/netslice_instances/{1}".format(
+ self._base_path, id)
+
+ try:
+ r = requests.get(_url, params=None, verify=False,
+ stream=True, headers=headers)
+ except Exception as e:
+ log.exception(e)
+ result['data'] = str(e)
+ return result
+ if r.status_code in (200, 201, 202, 204):
+ result['error'] = False
+ result['data'] = Util.json_loads_byteified(r.text)
+ return result
+
+ def ns_get(self, token, id):
+ result = {'error': True, 'data': ''}
+ headers = {"Content-Type": "application/json", "accept": "application/json",
+ 'Authorization': 'Bearer {}'.format(token['id'])}
+ _url = "{0}/nslcm/v1/ns_instances_content/{1}".format(
+ self._base_path, id)
+
+ try:
+ r = requests.get(_url, params=None, verify=False,
+ stream=True, headers=headers)
+ except Exception as e:
+ log.exception(e)
+ result['data'] = str(e)
+ return result
+ if r.status_code in (200, 201, 202, 204):
+ result['error'] = False
+ result['data'] = Util.json_loads_byteified(r.text)
+ return result
+
+ def vnf_get(self, token, id):
+ result = {'error': True, 'data': ''}
+ headers = {"Content-Type": "application/json", "accept": "application/json",
+ 'Authorization': 'Bearer {}'.format(token['id'])}
+ _url = "{0}/nslcm/v1/vnfrs/{1}".format(self._base_path, id)
+
+ try:
+ r = requests.get(_url, params=None, verify=False,
+ stream=True, headers=headers)
+ except Exception as e:
+ log.exception(e)
+ result['data'] = str(e)
+ return result
+ if r.status_code in (200, 201, 202, 204):
+ result['error'] = False
+ result['data'] = Util.json_loads_byteified(r.text)
+ return result
+
+ def pdu_get(self, token, id):
+ result = {'error': True, 'data': ''}
+ headers = {"Content-Type": "application/json", "accept": "application/json",
+ 'Authorization': 'Bearer {}'.format(token['id'])}
+ _url = "{0}/pdu/v1/pdu_descriptors/{1}".format(self._base_path, id)
+
+ try:
+ r = requests.get(_url, params=None, verify=False,
+ stream=True, headers=headers)
+ except Exception as e:
+ log.exception(e)
+ result['data'] = str(e)
+ return result
+ if r.status_code in (200, 201, 202, 204):
+ result['error'] = False
+ result['data'] = Util.json_loads_byteified(r.text)
+ return result
+
+ def ns_alarm_create(self, token, id, alarm_payload):
+ result = {'error': True, 'data': ''}
+ headers = {"Content-Type": "application/json",
+ 'Authorization': 'Bearer {}'.format(token['id'])}
+ _url = "{0}/test/message/alarm_request".format(self._base_path)
+ try:
+ r = requests.post(_url, json=alarm_payload,
+ verify=False, headers=headers)
+ except Exception as e: