# under the License.
##
-version = '7.0.1.post23'
-version_date = '2020-04-17'
+version = "7.0.1.post23"
+version_date = "2020-04-17"
# Obtain installed package version. Ignore if error, e.g. pkg_resources not installed
try:
from pkg_resources import get_distribution
+
version = get_distribution("osm_nbi").version
except Exception:
pass
from hashlib import sha256
from http import HTTPStatus
from time import time
-from osm_nbi.validation import user_new_schema, user_edit_schema, project_new_schema, project_edit_schema, \
- vim_account_new_schema, vim_account_edit_schema, sdn_new_schema, sdn_edit_schema, \
- wim_account_new_schema, wim_account_edit_schema, roles_new_schema, roles_edit_schema, \
- k8scluster_new_schema, k8scluster_edit_schema, k8srepo_new_schema, k8srepo_edit_schema, \
- vca_new_schema, vca_edit_schema, \
- osmrepo_new_schema, osmrepo_edit_schema, \
- validate_input, ValidationError, is_valid_uuid # To check that User/Project Names don't look like UUIDs
+from osm_nbi.validation import (
+ user_new_schema,
+ user_edit_schema,
+ project_new_schema,
+ project_edit_schema,
+ vim_account_new_schema,
+ vim_account_edit_schema,
+ sdn_new_schema,
+ sdn_edit_schema,
+ wim_account_new_schema,
+ wim_account_edit_schema,
+ roles_new_schema,
+ roles_edit_schema,
+ k8scluster_new_schema,
+ k8scluster_edit_schema,
+ k8srepo_new_schema,
+ k8srepo_edit_schema,
+ vca_new_schema,
+ vca_edit_schema,
+ osmrepo_new_schema,
+ osmrepo_edit_schema,
+ validate_input,
+ ValidationError,
+ is_valid_uuid,
+) # To check that User/Project Names don't look like UUIDs
from osm_nbi.base_topic import BaseTopic, EngineException
from osm_nbi.authconn import AuthconnNotFoundException, AuthconnConflictException
from osm_common.dbbase import deep_update_rfc7396
def check_conflict_on_new(self, session, indata):
# check username not exists
- if self.db.get_one(self.topic, {"username": indata.get("username")}, fail_on_empty=False, fail_on_more=False):
- raise EngineException("username '{}' exists".format(indata["username"]), HTTPStatus.CONFLICT)
+ if self.db.get_one(
+ self.topic,
+ {"username": indata.get("username")},
+ fail_on_empty=False,
+ fail_on_more=False,
+ ):
+ raise EngineException(
+ "username '{}' exists".format(indata["username"]), HTTPStatus.CONFLICT
+ )
# check projects
if not session["force"]:
for p in indata.get("projects") or []:
# To allow project addressing by Name as well as ID
- if not self.db.get_one("projects", {BaseTopic.id_field("projects", p): p}, fail_on_empty=False,
- fail_on_more=False):
- raise EngineException("project '{}' does not exist".format(p), HTTPStatus.CONFLICT)
+ if not self.db.get_one(
+ "projects",
+ {BaseTopic.id_field("projects", p): p},
+ fail_on_empty=False,
+ fail_on_more=False,
+ ):
+ raise EngineException(
+ "project '{}' does not exist".format(p), HTTPStatus.CONFLICT
+ )
def check_conflict_on_del(self, session, _id, db_content):
"""
:return: None if ok or raises EngineException with the conflict
"""
if _id == session["username"]:
- raise EngineException("You cannot delete your own user", http_code=HTTPStatus.CONFLICT)
+ raise EngineException(
+ "You cannot delete your own user", http_code=HTTPStatus.CONFLICT
+ )
@staticmethod
def format_on_new(content, project_id=None, make_public=False):
salt = uuid4().hex
content["_admin"]["salt"] = salt
if content.get("password"):
- content["password"] = sha256(content["password"].encode('utf-8') + salt.encode('utf-8')).hexdigest()
+ content["password"] = sha256(
+ content["password"].encode("utf-8") + salt.encode("utf-8")
+ ).hexdigest()
if content.get("project_role_mappings"):
- projects = [mapping["project"] for mapping in content["project_role_mappings"]]
+ projects = [
+ mapping["project"] for mapping in content["project_role_mappings"]
+ ]
if content.get("projects"):
content["projects"] += projects
if edit_content.get("password"):
salt = uuid4().hex
final_content["_admin"]["salt"] = salt
- final_content["password"] = sha256(edit_content["password"].encode('utf-8') +
- salt.encode('utf-8')).hexdigest()
+ final_content["password"] = sha256(
+ edit_content["password"].encode("utf-8") + salt.encode("utf-8")
+ ).hexdigest()
return None
def edit(self, session, _id, indata=None, kwargs=None, content=None):
if not session["admin"]:
- raise EngineException("needed admin privileges", http_code=HTTPStatus.UNAUTHORIZED)
+ raise EngineException(
+ "needed admin privileges", http_code=HTTPStatus.UNAUTHORIZED
+ )
# Names that look like UUIDs are not allowed
name = (indata if indata else kwargs).get("username")
if is_valid_uuid(name):
- raise EngineException("Usernames that look like UUIDs are not allowed",
- http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
- return BaseTopic.edit(self, session, _id, indata=indata, kwargs=kwargs, content=content)
+ raise EngineException(
+ "Usernames that look like UUIDs are not allowed",
+ http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+ )
+ return BaseTopic.edit(
+ self, session, _id, indata=indata, kwargs=kwargs, content=content
+ )
def new(self, rollback, session, indata=None, kwargs=None, headers=None):
if not session["admin"]:
- raise EngineException("needed admin privileges", http_code=HTTPStatus.UNAUTHORIZED)
+ raise EngineException(
+ "needed admin privileges", http_code=HTTPStatus.UNAUTHORIZED
+ )
# Names that look like UUIDs are not allowed
name = indata["username"] if indata else kwargs["username"]
if is_valid_uuid(name):
- raise EngineException("Usernames that look like UUIDs are not allowed",
- http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
- return BaseTopic.new(self, rollback, session, indata=indata, kwargs=kwargs, headers=headers)
+ raise EngineException(
+ "Usernames that look like UUIDs are not allowed",
+ http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+ )
+ return BaseTopic.new(
+ self, rollback, session, indata=indata, kwargs=kwargs, headers=headers
+ )
class ProjectTopic(BaseTopic):
if not indata.get("name"):
raise EngineException("missing 'name'")
# check name not exists
- if self.db.get_one(self.topic, {"name": indata.get("name")}, fail_on_empty=False, fail_on_more=False):
- raise EngineException("name '{}' exists".format(indata["name"]), HTTPStatus.CONFLICT)
+ if self.db.get_one(
+ self.topic,
+ {"name": indata.get("name")},
+ fail_on_empty=False,
+ fail_on_more=False,
+ ):
+ raise EngineException(
+ "name '{}' exists".format(indata["name"]), HTTPStatus.CONFLICT
+ )
@staticmethod
def format_on_new(content, project_id=None, make_public=False):
:return: None if ok or raises EngineException with the conflict
"""
if _id in session["project_id"]:
- raise EngineException("You cannot delete your own project", http_code=HTTPStatus.CONFLICT)
+ raise EngineException(
+ "You cannot delete your own project", http_code=HTTPStatus.CONFLICT
+ )
if session["force"]:
return
_filter = {"projects": _id}
if self.db.get_list("users", _filter):
- raise EngineException("There is some USER that contains this project", http_code=HTTPStatus.CONFLICT)
+ raise EngineException(
+ "There is some USER that contains this project",
+ http_code=HTTPStatus.CONFLICT,
+ )
def edit(self, session, _id, indata=None, kwargs=None, content=None):
if not session["admin"]:
- raise EngineException("needed admin privileges", http_code=HTTPStatus.UNAUTHORIZED)
+ raise EngineException(
+ "needed admin privileges", http_code=HTTPStatus.UNAUTHORIZED
+ )
# Names that look like UUIDs are not allowed
name = (indata if indata else kwargs).get("name")
if is_valid_uuid(name):
- raise EngineException("Project names that look like UUIDs are not allowed",
- http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
- return BaseTopic.edit(self, session, _id, indata=indata, kwargs=kwargs, content=content)
+ raise EngineException(
+ "Project names that look like UUIDs are not allowed",
+ http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+ )
+ return BaseTopic.edit(
+ self, session, _id, indata=indata, kwargs=kwargs, content=content
+ )
def new(self, rollback, session, indata=None, kwargs=None, headers=None):
if not session["admin"]:
- raise EngineException("needed admin privileges", http_code=HTTPStatus.UNAUTHORIZED)
+ raise EngineException(
+ "needed admin privileges", http_code=HTTPStatus.UNAUTHORIZED
+ )
# Names that look like UUIDs are not allowed
name = indata["name"] if indata else kwargs["name"]
if is_valid_uuid(name):
- raise EngineException("Project names that look like UUIDs are not allowed",
- http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
- return BaseTopic.new(self, rollback, session, indata=indata, kwargs=kwargs, headers=headers)
+ raise EngineException(
+ "Project names that look like UUIDs are not allowed",
+ http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+ )
+ return BaseTopic.new(
+ self, rollback, session, indata=indata, kwargs=kwargs, headers=headers
+ )
class CommonVimWimSdn(BaseTopic):
"""Common class for VIM, WIM SDN just to unify methods that are equal to all of them"""
- config_to_encrypt = {} # what keys at config must be encrypted because contains passwords
- password_to_encrypt = "" # key that contains a password
+
+ config_to_encrypt = (
+ {}
+ ) # what keys at config must be encrypted because contains passwords
+ password_to_encrypt = "" # key that contains a password
@staticmethod
def _create_operation(op_type, params=None):
schema_version = final_content.get("schema_version")
if schema_version:
if edit_content.get(self.password_to_encrypt):
- final_content[self.password_to_encrypt] = self.db.encrypt(edit_content[self.password_to_encrypt],
- schema_version=schema_version,
- salt=final_content["_id"])
- config_to_encrypt_keys = self.config_to_encrypt.get(schema_version) or self.config_to_encrypt.get("default")
+ final_content[self.password_to_encrypt] = self.db.encrypt(
+ edit_content[self.password_to_encrypt],
+ schema_version=schema_version,
+ salt=final_content["_id"],
+ )
+ config_to_encrypt_keys = self.config_to_encrypt.get(
+ schema_version
+ ) or self.config_to_encrypt.get("default")
if edit_content.get("config") and config_to_encrypt_keys:
for p in config_to_encrypt_keys:
if edit_content["config"].get(p):
- final_content["config"][p] = self.db.encrypt(edit_content["config"][p],
- schema_version=schema_version,
- salt=final_content["_id"])
+ final_content["config"][p] = self.db.encrypt(
+ edit_content["config"][p],
+ schema_version=schema_version,
+ salt=final_content["_id"],
+ )
# create edit operation
final_content["_admin"]["operations"].append(self._create_operation("edit"))
- return "{}:{}".format(final_content["_id"], len(final_content["_admin"]["operations"]) - 1)
+ return "{}:{}".format(
+ final_content["_id"], len(final_content["_admin"]["operations"]) - 1
+ )
def format_on_new(self, content, project_id=None, make_public=False):
"""
# encrypt passwords
if content.get(self.password_to_encrypt):
- content[self.password_to_encrypt] = self.db.encrypt(content[self.password_to_encrypt],
- schema_version=schema_version,
- salt=content["_id"])
- config_to_encrypt_keys = self.config_to_encrypt.get(schema_version) or self.config_to_encrypt.get("default")
+ content[self.password_to_encrypt] = self.db.encrypt(
+ content[self.password_to_encrypt],
+ schema_version=schema_version,
+ salt=content["_id"],
+ )
+ config_to_encrypt_keys = self.config_to_encrypt.get(
+ schema_version
+ ) or self.config_to_encrypt.get("default")
if content.get("config") and config_to_encrypt_keys:
for p in config_to_encrypt_keys:
if content["config"].get(p):
- content["config"][p] = self.db.encrypt(content["config"][p],
- schema_version=schema_version,
- salt=content["_id"])
+ content["config"][p] = self.db.encrypt(
+ content["config"][p],
+ schema_version=schema_version,
+ salt=content["_id"],
+ )
content["_admin"]["operationalState"] = "PROCESSING"
# remove reference from project_read if there are more projects referencing it. If it last one,
# do not remove reference, but order via kafka to delete it
if session["project_id"] and session["project_id"]:
- other_projects_referencing = next((p for p in db_content["_admin"]["projects_read"]
- if p not in session["project_id"] and p != "ANY"), None)
+ other_projects_referencing = next(
+ (
+ p
+ for p in db_content["_admin"]["projects_read"]
+ if p not in session["project_id"] and p != "ANY"
+ ),
+ None,
+ )
# check if there are projects referencing it (apart from ANY, that means, public)....
if other_projects_referencing:
# remove references but not delete
- update_dict_pull = {"_admin.projects_read": session["project_id"],
- "_admin.projects_write": session["project_id"]}
- self.db.set_one(self.topic, filter_q, update_dict=None, pull_list=update_dict_pull)
+ update_dict_pull = {
+ "_admin.projects_read": session["project_id"],
+ "_admin.projects_write": session["project_id"],
+ }
+ self.db.set_one(
+ self.topic, filter_q, update_dict=None, pull_list=update_dict_pull
+ )
return None
else:
- can_write = next((p for p in db_content["_admin"]["projects_write"] if p == "ANY" or
- p in session["project_id"]), None)
+ can_write = next(
+ (
+ p
+ for p in db_content["_admin"]["projects_write"]
+ if p == "ANY" or p in session["project_id"]
+ ),
+ None,
+ )
if not can_write:
- raise EngineException("You have not write permission to delete it",
- http_code=HTTPStatus.UNAUTHORIZED)
+ raise EngineException(
+ "You have not write permission to delete it",
+ http_code=HTTPStatus.UNAUTHORIZED,
+ )
# It must be deleted
if session["force"]:
self.db.del_one(self.topic, {"_id": _id})
op_id = None
- self._send_msg("deleted", {"_id": _id, "op_id": op_id}, not_send_msg=not_send_msg)
+ self._send_msg(
+ "deleted", {"_id": _id, "op_id": op_id}, not_send_msg=not_send_msg
+ )
else:
update_dict = {"_admin.to_delete": True}
- self.db.set_one(self.topic, {"_id": _id},
- update_dict=update_dict,
- push={"_admin.operations": self._create_operation("delete")}
- )
+ self.db.set_one(
+ self.topic,
+ {"_id": _id},
+ update_dict=update_dict,
+ push={"_admin.operations": self._create_operation("delete")},
+ )
# the number of operations is the operation_id. db_content does not contains the new operation inserted,
# so the -1 is not needed
- op_id = "{}:{}".format(db_content["_id"], len(db_content["_admin"]["operations"]))
- self._send_msg("delete", {"_id": _id, "op_id": op_id}, not_send_msg=not_send_msg)
+ op_id = "{}:{}".format(
+ db_content["_id"], len(db_content["_admin"]["operations"])
+ )
+ self._send_msg(
+ "delete", {"_id": _id, "op_id": op_id}, not_send_msg=not_send_msg
+ )
return op_id
schema_edit = vim_account_edit_schema
multiproject = True
password_to_encrypt = "vim_password"
- config_to_encrypt = {"1.1": ("admin_password", "nsx_password", "vcenter_password"),
- "default": ("admin_password", "nsx_password", "vcenter_password", "vrops_password")}
+ config_to_encrypt = {
+ "1.1": ("admin_password", "nsx_password", "vcenter_password"),
+ "default": (
+ "admin_password",
+ "nsx_password",
+ "vcenter_password",
+ "vrops_password",
+ ),
+ }
def check_conflict_on_del(self, session, _id, db_content):
"""
return
# check if used by VNF
if self.db.get_list("vnfrs", {"vim-account-id": _id}):
- raise EngineException("There is at least one VNF using this VIM account", http_code=HTTPStatus.CONFLICT)
+ raise EngineException(
+ "There is at least one VNF using this VIM account",
+ http_code=HTTPStatus.CONFLICT,
+ )
super().check_conflict_on_del(session, _id, db_content)
def _obtain_url(self, input, create):
if input.get("ip") or input.get("port"):
- if not input.get("ip") or not input.get("port") or input.get('url'):
- raise ValidationError("You must provide both 'ip' and 'port' (deprecated); or just 'url' (prefered)")
- input['url'] = "http://{}:{}/".format(input["ip"], input["port"])
+ if not input.get("ip") or not input.get("port") or input.get("url"):
+ raise ValidationError(
+ "You must provide both 'ip' and 'port' (deprecated); or just 'url' (prefered)"
+ )
+ input["url"] = "http://{}:{}/".format(input["ip"], input["port"])
del input["ip"]
del input["port"]
- elif create and not input.get('url'):
+ elif create and not input.get("url"):
raise ValidationError("You must provide 'url'")
return input
def format_on_new(self, content, project_id=None, make_public=False):
oid = super().format_on_new(content, project_id, make_public)
- self.db.encrypt_decrypt_fields(content["credentials"], 'encrypt', ['password', 'secret'],
- schema_version=content["schema_version"], salt=content["_id"])
+ self.db.encrypt_decrypt_fields(
+ content["credentials"],
+ "encrypt",
+ ["password", "secret"],
+ schema_version=content["schema_version"],
+ salt=content["_id"],
+ )
# Add Helm/Juju Repo lists
repos = {"helm-chart": [], "juju-bundle": []}
for proj in content["_admin"]["projects_read"]:
- if proj != 'ANY':
- for repo in self.db.get_list("k8srepos", {"_admin.projects_read": proj}):
+ if proj != "ANY":
+ for repo in self.db.get_list(
+ "k8srepos", {"_admin.projects_read": proj}
+ ):
if repo["_id"] not in repos[repo["type"]]:
repos[repo["type"]].append(repo["_id"])
for k in repos:
- content["_admin"][k.replace('-', '_')+"_repos"] = repos[k]
+ content["_admin"][k.replace("-", "_") + "_repos"] = repos[k]
return oid
def format_on_edit(self, final_content, edit_content):
if final_content.get("schema_version") and edit_content.get("credentials"):
- self.db.encrypt_decrypt_fields(edit_content["credentials"], 'encrypt', ['password', 'secret'],
- schema_version=final_content["schema_version"], salt=final_content["_id"])
- deep_update_rfc7396(final_content["credentials"], edit_content["credentials"])
+ self.db.encrypt_decrypt_fields(
+ edit_content["credentials"],
+ "encrypt",
+ ["password", "secret"],
+ schema_version=final_content["schema_version"],
+ salt=final_content["_id"],
+ )
+ deep_update_rfc7396(
+ final_content["credentials"], edit_content["credentials"]
+ )
oid = super().format_on_edit(final_content, edit_content)
return oid
def check_conflict_on_edit(self, session, final_content, edit_content, _id):
- final_content = super(CommonVimWimSdn, self).check_conflict_on_edit(session, final_content, edit_content, _id)
- final_content = super().check_conflict_on_edit(session, final_content, edit_content, _id)
+ final_content = super(CommonVimWimSdn, self).check_conflict_on_edit(
+ session, final_content, edit_content, _id
+ )
+ final_content = super().check_conflict_on_edit(
+ session, final_content, edit_content, _id
+ )
# Update Helm/Juju Repo lists
repos = {"helm-chart": [], "juju-bundle": []}
for proj in session.get("set_project", []):
- if proj != 'ANY':
- for repo in self.db.get_list("k8srepos", {"_admin.projects_read": proj}):
+ if proj != "ANY":
+ for repo in self.db.get_list(
+ "k8srepos", {"_admin.projects_read": proj}
+ ):
if repo["_id"] not in repos[repo["type"]]:
repos[repo["type"]].append(repo["_id"])
for k in repos:
- rlist = k.replace('-', '_') + "_repos"
+ rlist = k.replace("-", "_") + "_repos"
if rlist not in final_content["_admin"]:
final_content["_admin"][rlist] = []
final_content["_admin"][rlist] += repos[k]
if session["project_id"]:
filter_q["_admin.projects_read.cont"] = session["project_id"]
if self.db.get_list("vnfrs", filter_q):
- raise EngineException("There is at least one VNF using this k8scluster", http_code=HTTPStatus.CONFLICT)
+ raise EngineException(
+ "There is at least one VNF using this k8scluster",
+ http_code=HTTPStatus.CONFLICT,
+ )
super().check_conflict_on_del(session, _id, db_content)
content["schema_version"] = schema_version = "1.11"
for key in ["secret", "cacert"]:
content[key] = self.db.encrypt(
- content[key],
- schema_version=schema_version,
- salt=content["_id"]
+ content[key], schema_version=schema_version, salt=content["_id"]
)
return oid
final_content[key] = self.db.encrypt(
edit_content[key],
schema_version=schema_version,
- salt=final_content["_id"]
+ salt=final_content["_id"],
)
return oid
if session["project_id"]:
filter_q["_admin.projects_read.cont"] = session["project_id"]
if self.db.get_list("vim_accounts", filter_q):
- raise EngineException("There is at least one VIM account using this vca", http_code=HTTPStatus.CONFLICT)
+ raise EngineException(
+ "There is at least one VIM account using this vca",
+ http_code=HTTPStatus.CONFLICT,
+ )
super().check_conflict_on_del(session, _id, db_content)
def format_on_new(self, content, project_id=None, make_public=False):
oid = super().format_on_new(content, project_id, make_public)
# Update Helm/Juju Repo lists
- repo_list = content["type"].replace('-', '_')+"_repos"
+ repo_list = content["type"].replace("-", "_") + "_repos"
for proj in content["_admin"]["projects_read"]:
- if proj != 'ANY':
- self.db.set_list("k8sclusters",
- {"_admin.projects_read": proj, "_admin."+repo_list+".ne": content["_id"]}, {},
- push={"_admin."+repo_list: content["_id"]})
+ if proj != "ANY":
+ self.db.set_list(
+ "k8sclusters",
+ {
+ "_admin.projects_read": proj,
+ "_admin." + repo_list + ".ne": content["_id"],
+ },
+ {},
+ push={"_admin." + repo_list: content["_id"]},
+ )
return oid
def delete(self, session, _id, dry_run=False, not_send_msg=None):
oid = super().delete(session, _id, dry_run, not_send_msg)
if oid:
# Remove from Helm/Juju Repo lists
- repo_list = type.replace('-', '_') + "_repos"
- self.db.set_list("k8sclusters", {"_admin."+repo_list: _id}, {}, pull={"_admin."+repo_list: _id})
+ repo_list = type.replace("-", "_") + "_repos"
+ self.db.set_list(
+ "k8sclusters",
+ {"_admin." + repo_list: _id},
+ {},
+ pull={"_admin." + repo_list: _id},
+ )
return oid
"""
username = indata.get("username")
if is_valid_uuid(username):
- raise EngineException("username '{}' cannot have a uuid format".format(username),
- HTTPStatus.UNPROCESSABLE_ENTITY)
+ raise EngineException(
+ "username '{}' cannot have a uuid format".format(username),
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ )
# Check that username is not used, regardless keystone already checks this
if self.auth.get_user_list(filter_q={"name": username}):
- raise EngineException("username '{}' is already used".format(username), HTTPStatus.CONFLICT)
+ raise EngineException(
+ "username '{}' is already used".format(username), HTTPStatus.CONFLICT
+ )
if "projects" in indata.keys():
# convert to new format project_role_mappings
if not role:
role = self.auth.get_role_list()
if not role:
- raise AuthconnNotFoundException("Can't find default role for user '{}'".format(username))
+ raise AuthconnNotFoundException(
+ "Can't find default role for user '{}'".format(username)
+ )
rid = role[0]["_id"]
if not indata.get("project_role_mappings"):
indata["project_role_mappings"] = []
if "username" in edit_content:
username = edit_content.get("username")
if is_valid_uuid(username):
- raise EngineException("username '{}' cannot have an uuid format".format(username),
- HTTPStatus.UNPROCESSABLE_ENTITY)
+ raise EngineException(
+ "username '{}' cannot have an uuid format".format(username),
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ )
# Check that username is not used, regardless keystone already checks this
if self.auth.get_user_list(filter_q={"name": username}):
- raise EngineException("username '{}' is already used".format(username), HTTPStatus.CONFLICT)
+ raise EngineException(
+ "username '{}' is already used".format(username),
+ HTTPStatus.CONFLICT,
+ )
if final_content["username"] == "admin":
for mapping in edit_content.get("remove_project_role_mappings", ()):
- if mapping["project"] == "admin" and mapping.get("role") in (None, "system_admin"):
+ if mapping["project"] == "admin" and mapping.get("role") in (
+ None,
+ "system_admin",
+ ):
# TODO make this also available for project id and role id
- raise EngineException("You cannot remove system_admin role from admin user",
- http_code=HTTPStatus.FORBIDDEN)
+ raise EngineException(
+ "You cannot remove system_admin role from admin user",
+ http_code=HTTPStatus.FORBIDDEN,
+ )
return final_content
:return: None if ok or raises EngineException with the conflict
"""
if db_content["username"] == session["username"]:
- raise EngineException("You cannot delete your own login user ", http_code=HTTPStatus.CONFLICT)
+ raise EngineException(
+ "You cannot delete your own login user ", http_code=HTTPStatus.CONFLICT
+ )
# TODO: Check that user is not logged in ? How? (Would require listing current tokens)
@staticmethod
if "projects" in content:
for project in content["projects"]:
for role in project["roles"]:
- project_role_mappings.append({"project": project["_id"],
- "project_name": project["name"],
- "role": role["_id"],
- "role_name": role["name"]})
+ project_role_mappings.append(
+ {
+ "project": project["_id"],
+ "project_name": project["name"],
+ "role": role["_id"],
+ "role_name": role["name"],
+ }
+ )
del content["projects"]
content["project_role_mappings"] = project_role_mappings
# Allow _id to be a name or uuid
filter_q = {"username": _id}
# users = self.auth.get_user_list(filter_q)
- users = self.list(session, filter_q) # To allow default filtering (Bug 853)
+ users = self.list(session, filter_q) # To allow default filtering (Bug 853)
if len(users) == 1:
return users[0]
elif len(users) > 1:
- raise EngineException("Too many users found for '{}'".format(_id), HTTPStatus.CONFLICT)
+ raise EngineException(
+ "Too many users found for '{}'".format(_id), HTTPStatus.CONFLICT
+ )
else:
- raise EngineException("User '{}' not found".format(_id), HTTPStatus.NOT_FOUND)
+ raise EngineException(
+ "User '{}' not found".format(_id), HTTPStatus.NOT_FOUND
+ )
def edit(self, session, _id, indata=None, kwargs=None, content=None):
"""
content = self.check_conflict_on_edit(session, content, indata, _id=_id)
# self.format_on_edit(content, indata)
- if not ("password" in indata or "username" in indata or indata.get("remove_project_role_mappings") or
- indata.get("add_project_role_mappings") or indata.get("project_role_mappings") or
- indata.get("projects") or indata.get("add_projects")):
+ if not (
+ "password" in indata
+ or "username" in indata
+ or indata.get("remove_project_role_mappings")
+ or indata.get("add_project_role_mappings")
+ or indata.get("project_role_mappings")
+ or indata.get("projects")
+ or indata.get("add_projects")
+ ):
return _id
- if indata.get("project_role_mappings") \
- and (indata.get("remove_project_role_mappings") or indata.get("add_project_role_mappings")):
- raise EngineException("Option 'project_role_mappings' is incompatible with 'add_project_role_mappings"
- "' or 'remove_project_role_mappings'", http_code=HTTPStatus.BAD_REQUEST)
+ if indata.get("project_role_mappings") and (
+ indata.get("remove_project_role_mappings")
+ or indata.get("add_project_role_mappings")
+ ):
+ raise EngineException(
+ "Option 'project_role_mappings' is incompatible with 'add_project_role_mappings"
+ "' or 'remove_project_role_mappings'",
+ http_code=HTTPStatus.BAD_REQUEST,
+ )
if indata.get("projects") or indata.get("add_projects"):
role = self.auth.get_role_list({"name": "project_admin"})
if not role:
role = self.auth.get_role_list()
if not role:
- raise AuthconnNotFoundException("Can't find a default role for user '{}'"
- .format(content["username"]))
+ raise AuthconnNotFoundException(
+ "Can't find a default role for user '{}'".format(
+ content["username"]
+ )
+ )
rid = role[0]["_id"]
if "add_project_role_mappings" not in indata:
indata["add_project_role_mappings"] = []
# backward compatible
for k, v in indata["projects"].items():
if k.startswith("$") and v is None:
- indata["remove_project_role_mappings"].append({"project": k[1:]})
+ indata["remove_project_role_mappings"].append(
+ {"project": k[1:]}
+ )
elif k.startswith("$+"):
- indata["add_project_role_mappings"].append({"project": v, "role": rid})
+ indata["add_project_role_mappings"].append(
+ {"project": v, "role": rid}
+ )
del indata["projects"]
for proj in indata.get("projects", []) + indata.get("add_projects", []):
- indata["add_project_role_mappings"].append({"project": proj, "role": rid})
+ indata["add_project_role_mappings"].append(
+ {"project": proj, "role": rid}
+ )
# user = self.show(session, _id) # Already in 'content'
original_mapping = content["project_role_mappings"]
# remove
for to_remove in indata.get("remove_project_role_mappings", ()):
for mapping in original_mapping:
- if to_remove["project"] in (mapping["project"], mapping["project_name"]):
- if not to_remove.get("role") or to_remove["role"] in (mapping["role"], mapping["role_name"]):
+ if to_remove["project"] in (
+ mapping["project"],
+ mapping["project_name"],
+ ):
+ if not to_remove.get("role") or to_remove["role"] in (
+ mapping["role"],
+ mapping["role_name"],
+ ):
mappings_to_remove.append(mapping)
# add
for to_add in indata.get("add_project_role_mappings", ()):
for mapping in original_mapping:
- if to_add["project"] in (mapping["project"], mapping["project_name"]) and \
- to_add["role"] in (mapping["role"], mapping["role_name"]):
-
- if mapping in mappings_to_remove: # do not remove
+ if to_add["project"] in (
+ mapping["project"],
+ mapping["project_name"],
+ ) and to_add["role"] in (
+ mapping["role"],
+ mapping["role_name"],
+ ):
+
+ if mapping in mappings_to_remove: # do not remove
mappings_to_remove.remove(mapping)
break # do not add, it is already at user
else:
if indata.get("project_role_mappings"):
for to_set in indata["project_role_mappings"]:
for mapping in original_mapping:
- if to_set["project"] in (mapping["project"], mapping["project_name"]) and \
- to_set["role"] in (mapping["role"], mapping["role_name"]):
- if mapping in mappings_to_remove: # do not remove
+ if to_set["project"] in (
+ mapping["project"],
+ mapping["project_name"],
+ ) and to_set["role"] in (
+ mapping["role"],
+ mapping["role_name"],
+ ):
+ if mapping in mappings_to_remove: # do not remove
mappings_to_remove.remove(mapping)
break # do not add, it is already at user
else:
mappings_to_add.append({"project": pid, "role": rid})
for mapping in original_mapping:
for to_set in indata["project_role_mappings"]:
- if to_set["project"] in (mapping["project"], mapping["project_name"]) and \
- to_set["role"] in (mapping["role"], mapping["role_name"]):
+ if to_set["project"] in (
+ mapping["project"],
+ mapping["project_name"],
+ ) and to_set["role"] in (
+ mapping["role"],
+ mapping["role_name"],
+ ):
break
else:
# delete
- if mapping not in mappings_to_remove: # do not remove
+ if mapping not in mappings_to_remove: # do not remove
mappings_to_remove.append(mapping)
- self.auth.update_user({"_id": _id, "username": indata.get("username"), "password": indata.get("password"),
- "add_project_role_mappings": mappings_to_add,
- "remove_project_role_mappings": mappings_to_remove
- })
- data_to_send = {'_id': _id, "changes": indata}
+ self.auth.update_user(
+ {
+ "_id": _id,
+ "username": indata.get("username"),
+ "password": indata.get("password"),
+ "add_project_role_mappings": mappings_to_add,
+ "remove_project_role_mappings": mappings_to_remove,
+ }
+ )
+ data_to_send = {"_id": _id, "changes": indata}
self._send_msg("edited", data_to_send, not_send_msg=None)
# return _id
user_list = self.auth.get_user_list(filter_q)
if not session["allow_show_user_project_role"]:
# Bug 853 - Default filtering
- user_list = [usr for usr in user_list if usr["username"] == session["username"]]
+ user_list = [
+ usr for usr in user_list if usr["username"] == session["username"]
+ ]
return user_list
def delete(self, session, _id, dry_run=False, not_send_msg=None):
"""
project_name = indata.get("name")
if is_valid_uuid(project_name):
- raise EngineException("project name '{}' cannot have an uuid format".format(project_name),
- HTTPStatus.UNPROCESSABLE_ENTITY)
+ raise EngineException(
+ "project name '{}' cannot have an uuid format".format(project_name),
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ )
project_list = self.auth.get_project_list(filter_q={"name": project_name})
if project_list:
- raise EngineException("project '{}' exists".format(project_name), HTTPStatus.CONFLICT)
+ raise EngineException(
+ "project '{}' exists".format(project_name), HTTPStatus.CONFLICT
+ )
def check_conflict_on_edit(self, session, final_content, edit_content, _id):
"""
project_name = edit_content.get("name")
if project_name != final_content["name"]: # It is a true renaming
if is_valid_uuid(project_name):
- raise EngineException("project name '{}' cannot have an uuid format".format(project_name),
- HTTPStatus.UNPROCESSABLE_ENTITY)
+ raise EngineException(
+ "project name '{}' cannot have an uuid format".format(project_name),
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ )
if final_content["name"] == "admin":
- raise EngineException("You cannot rename project 'admin'", http_code=HTTPStatus.CONFLICT)
+ raise EngineException(
+ "You cannot rename project 'admin'", http_code=HTTPStatus.CONFLICT
+ )
# Check that project name is not used, regardless keystone already checks this
- if project_name and self.auth.get_project_list(filter_q={"name": project_name}):
- raise EngineException("project '{}' is already used".format(project_name), HTTPStatus.CONFLICT)
+ if project_name and self.auth.get_project_list(
+ filter_q={"name": project_name}
+ ):
+ raise EngineException(
+ "project '{}' is already used".format(project_name),
+ HTTPStatus.CONFLICT,
+ )
return final_content
def check_conflict_on_del(self, session, _id, db_content):
def check_rw_projects(topic, title, id_field):
for desc in self.db.get_list(topic):
- if _id in desc["_admin"]["projects_read"] + desc["_admin"]["projects_write"]:
- raise EngineException("Project '{}' ({}) is being used by {} '{}'"
- .format(db_content["name"], _id, title, desc[id_field]), HTTPStatus.CONFLICT)
+ if (
+ _id
+ in desc["_admin"]["projects_read"]
+ + desc["_admin"]["projects_write"]
+ ):
+ raise EngineException(
+ "Project '{}' ({}) is being used by {} '{}'".format(
+ db_content["name"], _id, title, desc[id_field]
+ ),
+ HTTPStatus.CONFLICT,
+ )
if _id in session["project_id"]:
- raise EngineException("You cannot delete your own project", http_code=HTTPStatus.CONFLICT)
+ raise EngineException(
+ "You cannot delete your own project", http_code=HTTPStatus.CONFLICT
+ )
if db_content["name"] == "admin":
- raise EngineException("You cannot delete project 'admin'", http_code=HTTPStatus.CONFLICT)
+ raise EngineException(
+ "You cannot delete project 'admin'", http_code=HTTPStatus.CONFLICT
+ )
# If any user is using this project, raise CONFLICT exception
if not session["force"]:
for user in self.auth.get_user_list():
for prm in user.get("project_role_mappings"):
if prm["project"] == _id:
- raise EngineException("Project '{}' ({}) is being used by user '{}'"
- .format(db_content["name"], _id, user["username"]), HTTPStatus.CONFLICT)
+ raise EngineException(
+ "Project '{}' ({}) is being used by user '{}'".format(
+ db_content["name"], _id, user["username"]
+ ),
+ HTTPStatus.CONFLICT,
+ )
# If any VNFD, NSD, NST, PDU, etc. is using this project, raise CONFLICT exception
if not session["force"]:
BaseTopic._update_input_with_kwargs(content, kwargs)
content = self._validate_input_new(content, session["force"])
self.check_conflict_on_new(session, content)
- self.format_on_new(content, project_id=session["project_id"], make_public=session["public"])
+ self.format_on_new(
+ content, project_id=session["project_id"], make_public=session["public"]
+ )
_id = self.auth.create_project(content)
rollback.append({"topic": self.topic, "_id": _id})
self._send_msg("created", content, not_send_msg=None)
# Allow _id to be a name or uuid
filter_q = {self.id_field(self.topic, _id): _id}
# projects = self.auth.get_project_list(filter_q=filter_q)
- projects = self.list(session, filter_q) # To allow default filtering (Bug 853)
+ projects = self.list(session, filter_q) # To allow default filtering (Bug 853)
if len(projects) == 1:
return projects[0]
elif len(projects) > 1:
class RoleTopicAuth(BaseTopic):
topic = "roles"
- topic_msg = None # "roles"
+ topic_msg = None # "roles"
schema_new = roles_new_schema
schema_edit = roles_edit_schema
multiproject = False
if role_def[-1] == ":":
raise ValidationError("Operation cannot end with ':'")
- match = next((op for op in operations if op == role_def or op.startswith(role_def + ":")), None)
+ match = next(
+ (
+ op
+ for op in operations
+ if op == role_def or op.startswith(role_def + ":")
+ ),
+ None,
+ )
if not match:
raise ValidationError("Invalid permission '{}'".format(role_def))
# check name is not uuid
role_name = indata.get("name")
if is_valid_uuid(role_name):
- raise EngineException("role name '{}' cannot have an uuid format".format(role_name),
- HTTPStatus.UNPROCESSABLE_ENTITY)
+ raise EngineException(
+ "role name '{}' cannot have an uuid format".format(role_name),
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ )
# check name not exists
name = indata["name"]
# if self.db.get_one(self.topic, {"name": indata.get("name")}, fail_on_empty=False, fail_on_more=False):
if self.auth.get_role_list({"name": name}):
- raise EngineException("role name '{}' exists".format(name), HTTPStatus.CONFLICT)
+ raise EngineException(
+ "role name '{}' exists".format(name), HTTPStatus.CONFLICT
+ )
def check_conflict_on_edit(self, session, final_content, edit_content, _id):
"""
# check name is not uuid
role_name = edit_content.get("name")
if is_valid_uuid(role_name):
- raise EngineException("role name '{}' cannot have an uuid format".format(role_name),
- HTTPStatus.UNPROCESSABLE_ENTITY)
+ raise EngineException(
+ "role name '{}' cannot have an uuid format".format(role_name),
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ )
# Check renaming of admin roles
role = self.auth.get_role(_id)
if role["name"] in ["system_admin", "project_admin"]:
- raise EngineException("You cannot rename role '{}'".format(role["name"]), http_code=HTTPStatus.FORBIDDEN)
+ raise EngineException(
+ "You cannot rename role '{}'".format(role["name"]),
+ http_code=HTTPStatus.FORBIDDEN,
+ )
# check name not exists
if "name" in edit_content:
# if self.db.get_one(self.topic, {"name":role_name,"_id.ne":_id}, fail_on_empty=False, fail_on_more=False):
roles = self.auth.get_role_list({"name": role_name})
if roles and roles[0][BaseTopic.id_field("roles", _id)] != _id:
- raise EngineException("role name '{}' exists".format(role_name), HTTPStatus.CONFLICT)
+ raise EngineException(
+ "role name '{}' exists".format(role_name), HTTPStatus.CONFLICT
+ )
return final_content
"""
role = self.auth.get_role(_id)
if role["name"] in ["system_admin", "project_admin"]:
- raise EngineException("You cannot delete role '{}'".format(role["name"]), http_code=HTTPStatus.FORBIDDEN)
+ raise EngineException(
+ "You cannot delete role '{}'".format(role["name"]),
+ http_code=HTTPStatus.FORBIDDEN,
+ )
# If any user is using this role, raise CONFLICT exception
if not session["force"]:
for user in self.auth.get_user_list():
for prm in user.get("project_role_mappings"):
if prm["role"] == _id:
- raise EngineException("Role '{}' ({}) is being used by user '{}'"
- .format(role["name"], _id, user["username"]), HTTPStatus.CONFLICT)
+ raise EngineException(
+ "Role '{}' ({}) is being used by user '{}'".format(
+ role["name"], _id, user["username"]
+ ),
+ HTTPStatus.CONFLICT,
+ )
@staticmethod
- def format_on_new(content, project_id=None, make_public=False): # TO BE REMOVED ?
+ def format_on_new(content, project_id=None, make_public=False): # TO BE REMOVED ?
"""
Modifies content descriptor to include _admin
"""
filter_q = {BaseTopic.id_field(self.topic, _id): _id}
# roles = self.auth.get_role_list(filter_q)
- roles = self.list(session, filter_q) # To allow default filtering (Bug 853)
+ roles = self.list(session, filter_q) # To allow default filtering (Bug 853)
if not roles:
- raise AuthconnNotFoundException("Not found any role with filter {}".format(filter_q))
+ raise AuthconnNotFoundException(
+ "Not found any role with filter {}".format(filter_q)
+ )
elif len(roles) > 1:
- raise AuthconnConflictException("Found more than one role with filter {}".format(filter_q))
+ raise AuthconnConflictException(
+ "Found more than one role with filter {}".format(filter_q)
+ )
return roles[0]
def list(self, session, filter_q=None, api_req=False):
self._update_input_with_kwargs(content, kwargs)
content = self._validate_input_new(content, session["force"])
self.check_conflict_on_new(session, content)
- self.format_on_new(content, project_id=session["project_id"], make_public=session["public"])
+ self.format_on_new(
+ content, project_id=session["project_id"], make_public=session["public"]
+ )
# role_name = content["name"]
rid = self.auth.create_role(content)
content["_id"] = rid
filter_q = {BaseTopic.id_field(self.topic, _id): _id}
roles = self.auth.get_role_list(filter_q)
if not roles:
- raise AuthconnNotFoundException("Not found any role with filter {}".format(filter_q))
+ raise AuthconnNotFoundException(
+ "Not found any role with filter {}".format(filter_q)
+ )
elif len(roles) > 1:
- raise AuthconnConflictException("Found more than one role with filter {}".format(filter_q))
+ raise AuthconnConflictException(
+ "Found more than one role with filter {}".format(filter_q)
+ )
rid = roles[0]["_id"]
self.check_conflict_on_del(session, rid, None)
# filter_q = {"_id": _id}
import yaml
from base64 import standard_b64decode
from copy import deepcopy
+
# from functools import reduce
from http import HTTPStatus
from time import time
This class must be threading safe
"""
- periodin_db_pruning = 60 * 30 # for the internal backend only. every 30 minutes expired tokens will be pruned
- token_limit = 500 # when reached, the token cache will be cleared
+ periodin_db_pruning = (
+ 60 * 30
+ ) # for the internal backend only. every 30 minutes expired tokens will be pruned
+ token_limit = 500 # when reached, the token cache will be cleared
def __init__(self, valid_methods, valid_query_string):
"""
self.db = None
self.msg = None
self.tokens_cache = dict()
- self.next_db_prune_time = 0 # time when next cleaning of expired tokens must be done
+ self.next_db_prune_time = (
+ 0 # time when next cleaning of expired tokens must be done
+ )
self.roles_to_operations_file = None
# self.roles_to_operations_table = None
self.resources_to_operations_mapping = {}
self.role_permissions = []
self.valid_methods = valid_methods
self.valid_query_string = valid_query_string
- self.system_admin_role_id = None # system_role id
+ self.system_admin_role_id = None # system_role id
self.test_project_id = None # test_project_id
def start(self, config):
self.db = dbmemory.DbMemory()
self.db.db_connect(config["database"])
else:
- raise AuthException("Invalid configuration param '{}' at '[database]':'driver'"
- .format(config["database"]["driver"]))
+ raise AuthException(
+ "Invalid configuration param '{}' at '[database]':'driver'".format(
+ config["database"]["driver"]
+ )
+ )
if not self.msg:
if config["message"]["driver"] == "local":
self.msg = msglocal.MsgLocal()
self.msg = msgkafka.MsgKafka()
self.msg.connect(config["message"])
else:
- raise AuthException("Invalid configuration param '{}' at '[message]':'driver'"
- .format(config["message"]["driver"]))
+ raise AuthException(
+ "Invalid configuration param '{}' at '[message]':'driver'".format(
+ config["message"]["driver"]
+ )
+ )
if not self.backend:
if config["authentication"]["backend"] == "keystone":
- self.backend = AuthconnKeystone(self.config["authentication"], self.db, self.role_permissions)
+ self.backend = AuthconnKeystone(
+ self.config["authentication"], self.db, self.role_permissions
+ )
elif config["authentication"]["backend"] == "internal":
- self.backend = AuthconnInternal(self.config["authentication"], self.db, self.role_permissions)
+ self.backend = AuthconnInternal(
+ self.config["authentication"], self.db, self.role_permissions
+ )
self._internal_tokens_prune("tokens")
elif config["authentication"]["backend"] == "tacacs":
- self.backend = AuthconnTacacs(self.config["authentication"], self.db, self.role_permissions)
+ self.backend = AuthconnTacacs(
+ self.config["authentication"], self.db, self.role_permissions
+ )
self._internal_tokens_prune("tokens_tacacs")
else:
- raise AuthException("Unknown authentication backend: {}"
- .format(config["authentication"]["backend"]))
+ raise AuthException(
+ "Unknown authentication backend: {}".format(
+ config["authentication"]["backend"]
+ )
+ )
if not self.roles_to_operations_file:
if "roles_to_operations" in config["rbac"]:
- self.roles_to_operations_file = config["rbac"]["roles_to_operations"]
+ self.roles_to_operations_file = config["rbac"][
+ "roles_to_operations"
+ ]
else:
possible_paths = (
- __file__[:__file__.rfind("auth.py")] + "roles_to_operations.yml",
- "./roles_to_operations.yml"
+ __file__[: __file__.rfind("auth.py")]
+ + "roles_to_operations.yml",
+ "./roles_to_operations.yml",
)
for config_file in possible_paths:
if path.isfile(config_file):
self.roles_to_operations_file = config_file
break
if not self.roles_to_operations_file:
- raise AuthException("Invalid permission configuration: roles_to_operations file missing")
+ raise AuthException(
+ "Invalid permission configuration: roles_to_operations file missing"
+ )
# load role_permissions
def load_role_permissions(method_dict):
for k in method_dict:
if k == "ROLE_PERMISSION":
- for method in chain(method_dict.get("METHODS", ()), method_dict.get("TODO", ())):
+ for method in chain(
+ method_dict.get("METHODS", ()), method_dict.get("TODO", ())
+ ):
permission = method_dict["ROLE_PERMISSION"] + method.lower()
if permission not in self.role_permissions:
self.role_permissions.append(permission)
self.role_permissions.append(permission)
# get ids of role system_admin and test project
- role_system_admin = self.db.get_one("roles", {"name": "system_admin"}, fail_on_empty=False)
+ role_system_admin = self.db.get_one(
+ "roles", {"name": "system_admin"}, fail_on_empty=False
+ )
if role_system_admin:
self.system_admin_role_id = role_system_admin["_id"]
- test_project_name = self.config["authentication"].get("project_not_authorized", "admin")
- test_project = self.db.get_one("projects", {"name": test_project_name}, fail_on_empty=False)
+ test_project_name = self.config["authentication"].get(
+ "project_not_authorized", "admin"
+ )
+ test_project = self.db.get_one(
+ "projects", {"name": test_project_name}, fail_on_empty=False
+ )
if test_project:
self.test_project_id = test_project["_id"]
project_desc["_id"] = str(uuid4())
project_desc["_admin"] = {"created": now, "modified": now}
pid = self.backend.create_project(project_desc)
- self.logger.info("Project '{}' created at database".format(project_desc["name"]))
+ self.logger.info(
+ "Project '{}' created at database".format(project_desc["name"])
+ )
return pid
def create_admin_user(self, project_id):
return None
# user_desc = {"username": "admin", "password": "admin", "projects": [project_id]}
now = time()
- user_desc = {"username": "admin", "password": "admin", "_admin": {"created": now, "modified": now}}
+ user_desc = {
+ "username": "admin",
+ "password": "admin",
+ "_admin": {"created": now, "modified": now},
+ }
if project_id:
pid = project_id
else:
# role = self.db.get_one("roles", {"name": "system_admin"}, fail_on_empty=False, fail_on_more=False)
roles = self.backend.get_role_list({"name": "system_admin"})
if pid and roles:
- user_desc["project_role_mappings"] = [{"project": pid, "role": roles[0]["_id"]}]
+ user_desc["project_role_mappings"] = [
+ {"project": pid, "role": roles[0]["_id"]}
+ ]
uid = self.backend.create_user(user_desc)
self.logger.info("User '{}' created at database".format(user_desc["username"]))
return uid
- def init_db(self, target_version='1.0'):
+ def init_db(self, target_version="1.0"):
"""
Check if the database has been initialized, with at least one user. If not, create the required tables
and insert the predefined mappings between roles and permissions.
records = self.backend.get_role_list()
# Loading permissions to AUTH. At lease system_admin must be present.
- if not records or not next((r for r in records if r["name"] == "system_admin"), None):
+ if not records or not next(
+ (r for r in records if r["name"] == "system_admin"), None
+ ):
with open(self.roles_to_operations_file, "r") as stream:
roles_to_operations_yaml = yaml.load(stream, Loader=yaml.Loader)
if role_with_operations["name"] not in role_names:
role_names.append(role_with_operations["name"])
else:
- raise AuthException("Duplicated role name '{}' at file '{}''"
- .format(role_with_operations["name"], self.roles_to_operations_file))
+ raise AuthException(
+ "Duplicated role name '{}' at file '{}''".format(
+ role_with_operations["name"], self.roles_to_operations_file
+ )
+ )
if not role_with_operations["permissions"]:
continue
- for permission, is_allowed in role_with_operations["permissions"].items():
+ for permission, is_allowed in role_with_operations[
+ "permissions"
+ ].items():
if not isinstance(is_allowed, bool):
- raise AuthException("Invalid value for permission '{}' at role '{}'; at file '{}'"
- .format(permission, role_with_operations["name"],
- self.roles_to_operations_file))
+ raise AuthException(
+ "Invalid value for permission '{}' at role '{}'; at file '{}'".format(
+ permission,
+ role_with_operations["name"],
+ self.roles_to_operations_file,
+ )
+ )
# TODO check permission is ok
if permission[-1] == ":":
- raise AuthException("Invalid permission '{}' terminated in ':' for role '{}'; at file {}"
- .format(permission, role_with_operations["name"],
- self.roles_to_operations_file))
+ raise AuthException(
+ "Invalid permission '{}' terminated in ':' for role '{}'; at file {}".format(
+ permission,
+ role_with_operations["name"],
+ self.roles_to_operations_file,
+ )
+ )
if "default" not in role_with_operations["permissions"]:
role_with_operations["permissions"]["default"] = False
# self.db.create(self.roles_to_operations_table, role_with_operations)
try:
self.backend.create_role(role_with_operations)
- self.logger.info("Role '{}' created".format(role_with_operations["name"]))
+ self.logger.info(
+ "Role '{}' created".format(role_with_operations["name"])
+ )
except (AuthException, AuthconnException) as e:
if role_with_operations["name"] == "system_admin":
raise
- self.logger.error("Role '{}' cannot be created: {}".format(role_with_operations["name"], e))
+ self.logger.error(
+ "Role '{}' cannot be created: {}".format(
+ role_with_operations["name"], e
+ )
+ )
# Create admin project&user if required
pid = self.create_admin_project()
if user_with_system_admin:
break
if not user_with_system_admin:
- self.backend.update_user({"_id": user_admin_id,
- "add_project_role_mappings": [{"project": pid, "role": role_id}]})
- self.logger.info("Added role system admin to user='{}' project=admin".format(user_admin_id))
+ self.backend.update_user(
+ {
+ "_id": user_admin_id,
+ "add_project_role_mappings": [
+ {"project": pid, "role": role_id}
+ ],
+ }
+ )
+ self.logger.info(
+ "Added role system admin to user='{}' project=admin".format(
+ user_admin_id
+ )
+ )
except Exception as e:
- self.logger.error("Error in Authorization DataBase initialization: {}: {}".format(type(e).__name__, e))
+ self.logger.error(
+ "Error in Authorization DataBase initialization: {}: {}".format(
+ type(e).__name__, e
+ )
+ )
self.load_operation_to_allowed_roles()
for record in records:
if not record.get("permissions"):
continue
- record_permissions = {oper: record["permissions"].get("default", False) for oper in self.role_permissions}
- operations_joined = [(oper, value) for oper, value in record["permissions"].items()
- if oper not in ignore_fields]
+ record_permissions = {
+ oper: record["permissions"].get("default", False)
+ for oper in self.role_permissions
+ }
+ operations_joined = [
+ (oper, value)
+ for oper, value in record["permissions"].items()
+ if oper not in ignore_fields
+ ]
operations_joined.sort(key=lambda x: x[0].count(":"))
for oper in operations_joined:
- match = list(filter(lambda x: x.find(oper[0]) == 0, record_permissions.keys()))
+ match = list(
+ filter(lambda x: x.find(oper[0]) == 0, record_permissions.keys())
+ )
for m in match:
record_permissions[m] = oper[1]
self.operation_to_allowed_roles = permissions
- def authorize(self, role_permission=None, query_string_operations=None, item_id=None):
+ def authorize(
+ self, role_permission=None, query_string_operations=None, item_id=None
+ ):
token = None
user_passwd64 = None
try:
token = cherrypy.session.get("Authorization")
if token == "logout":
token = None # force Unauthorized response to insert user password again
- elif user_passwd64 and cherrypy.request.config.get("auth.allow_basic_authentication"):
+ elif user_passwd64 and cherrypy.request.config.get(
+ "auth.allow_basic_authentication"
+ ):
# 3. Get new token from user password
user = None
passwd = None
user, _, passwd = user_passwd.partition(":")
except Exception:
pass
- outdata = self.new_token(None, {"username": user, "password": passwd})
+ outdata = self.new_token(
+ None, {"username": user, "password": passwd}
+ )
token = outdata["_id"]
- cherrypy.session['Authorization'] = token
+ cherrypy.session["Authorization"] = token
if not token:
- raise AuthException("Needed a token or Authorization http header",
- http_code=HTTPStatus.UNAUTHORIZED)
+ raise AuthException(
+ "Needed a token or Authorization http header",
+ http_code=HTTPStatus.UNAUTHORIZED,
+ )
# try to get from cache first
now = time()
# TODO add to token info remote host, port
if role_permission:
- RBAC_auth = self.check_permissions(token_info, cherrypy.request.method, role_permission,
- query_string_operations, item_id)
+ RBAC_auth = self.check_permissions(
+ token_info,
+ cherrypy.request.method,
+ role_permission,
+ query_string_operations,
+ item_id,
+ )
token_info["allow_show_user_project_role"] = RBAC_auth
return token_info
except AuthException as e:
if not isinstance(e, AuthExceptionUnauthorized):
- if cherrypy.session.get('Authorization'):
- del cherrypy.session['Authorization']
- cherrypy.response.headers["WWW-Authenticate"] = 'Bearer realm="{}"'.format(e)
+ if cherrypy.session.get("Authorization"):
+ del cherrypy.session["Authorization"]
+ cherrypy.response.headers[
+ "WWW-Authenticate"
+ ] = 'Bearer realm="{}"'.format(e)
if self.config["authentication"].get("user_not_authorized"):
- return {"id": "testing-token", "_id": "testing-token",
- "project_id": self.test_project_id,
- "username": self.config["authentication"]["user_not_authorized"],
- "roles": [self.system_admin_role_id],
- "admin": True, "allow_show_user_project_role": True}
+ return {
+ "id": "testing-token",
+ "_id": "testing-token",
+ "project_id": self.test_project_id,
+ "username": self.config["authentication"]["user_not_authorized"],
+ "roles": [self.system_admin_role_id],
+ "admin": True,
+ "allow_show_user_project_role": True,
+ }
raise
def new_token(self, token_info, indata, remote):
if not new_token_info.get("expires"):
new_token_info["expires"] = time() + 3600
if not new_token_info.get("admin"):
- new_token_info["admin"] = True if new_token_info.get("project_name") == "admin" else False
+ new_token_info["admin"] = (
+ True if new_token_info.get("project_name") == "admin" else False
+ )
# TODO put admin in RBAC
if remote.name:
return self._internal_get_token_list(token_info)
else:
# TODO: check if this can be avoided. Backend may provide enough information
- return [deepcopy(token) for token in self.tokens_cache.values()
- if token["username"] == token_info["username"]]
+ return [
+ deepcopy(token)
+ for token in self.tokens_cache.values()
+ if token["username"] == token_info["username"]
+ ]
def get_token(self, token_info, token):
if self.config["authentication"]["backend"] == "internal":
token_value = self.tokens_cache.get(token)
if not token_value:
raise AuthException("token not found", http_code=HTTPStatus.NOT_FOUND)
- if token_value["username"] != token_info["username"] and not token_info["admin"]:
- raise AuthException("needed admin privileges", http_code=HTTPStatus.UNAUTHORIZED)
+ if (
+ token_value["username"] != token_info["username"]
+ and not token_info["admin"]
+ ):
+ raise AuthException(
+ "needed admin privileges", http_code=HTTPStatus.UNAUTHORIZED
+ )
return token_value
def del_token(self, token):
self.remove_token_from_cache(token)
return "token '{}' deleted".format(token)
except KeyError:
- raise AuthException("Token '{}' not found".format(token), http_code=HTTPStatus.NOT_FOUND)
-
- def check_permissions(self, token_info, method, role_permission=None, query_string_operations=None, item_id=None):
+ raise AuthException(
+ "Token '{}' not found".format(token), http_code=HTTPStatus.NOT_FOUND
+ )
+
+ def check_permissions(
+ self,
+ token_info,
+ method,
+ role_permission=None,
+ query_string_operations=None,
+ item_id=None,
+ ):
"""
Checks that operation has permissions to be done, base on the assigned roles to this user project
:param token_info: Dictionary that contains "roles" with a list of assigned roles.
if not query_string_operations:
return True
for query_string_operation in query_string_operations:
- if role not in self.operation_to_allowed_roles[query_string_operation]:
+ if (
+ role
+ not in self.operation_to_allowed_roles[query_string_operation]
+ ):
break
else:
return True
# User/Project/Role whole listings are filtered elsewhere
# uid, pid, rid = ("user_id", "project_id", "id") if is_valid_uuid(id) else ("username", "project_name", "name")
uid = "user_id" if is_valid_uuid(item_id) else "username"
- if (role_permission in ["projects:get", "projects:id:get", "roles:get", "roles:id:get", "users:get"]) \
- or (role_permission == "users:id:get" and item_id == token_info[uid]):
+ if (
+ role_permission
+ in [
+ "projects:get",
+ "projects:id:get",
+ "roles:get",
+ "roles:id:get",
+ "users:get",
+ ]
+ ) or (role_permission == "users:id:get" and item_id == token_info[uid]):
# or (role_permission == "projects:id:get" and item_id == token_info[pid]) \
# or (role_permission == "roles:id:get" and item_id in [role[rid] for role in token_info["roles"]]):
return False
if not operation_allowed:
raise AuthExceptionUnauthorized("Access denied: lack of permissions.")
else:
- raise AuthExceptionUnauthorized("Access denied: You have not permissions to use these admin query string")
+ raise AuthExceptionUnauthorized(
+ "Access denied: You have not permissions to use these admin query string"
+ )
def get_user_list(self):
return self.backend.get_user_list()
def _normalize_url(self, url, method):
# DEPRECATED !!!
# Removing query strings
- normalized_url = url if '?' not in url else url[:url.find("?")]
+ normalized_url = url if "?" not in url else url[: url.find("?")]
normalized_url_splitted = normalized_url.split("/")
parameters = {}
- filtered_keys = [key for key in self.resources_to_operations_mapping.keys()
- if method in key.split()[0]]
+ filtered_keys = [
+ key
+ for key in self.resources_to_operations_mapping.keys()
+ if method in key.split()[0]
+ ]
for idx, path_part in enumerate(normalized_url_splitted):
tmp_keys = []
if splitted[idx] == "<artifactPath>":
tmp_keys.append(tmp_key)
continue
- elif idx == len(normalized_url_splitted) - 1 and \
- len(normalized_url_splitted) != len(splitted):
+ elif idx == len(normalized_url_splitted) - 1 and len(
+ normalized_url_splitted
+ ) != len(splitted):
continue
else:
tmp_keys.append(tmp_key)
elif splitted[idx] == path_part:
- if idx == len(normalized_url_splitted) - 1 and \
- len(normalized_url_splitted) != len(splitted):
+ if idx == len(normalized_url_splitted) - 1 and len(
+ normalized_url_splitted
+ ) != len(splitted):
continue
else:
tmp_keys.append(tmp_key)
filtered_keys = tmp_keys
- if len(filtered_keys) == 1 and \
- filtered_keys[0].split("/")[-1] == "<artifactPath>":
+ if (
+ len(filtered_keys) == 1
+ and filtered_keys[0].split("/")[-1] == "<artifactPath>"
+ ):
break
if len(filtered_keys) == 0:
- raise AuthException("Cannot make an authorization decision. URL not found. URL: {0}".format(url))
+ raise AuthException(
+ "Cannot make an authorization decision. URL not found. URL: {0}".format(
+ url
+ )
+ )
elif len(filtered_keys) > 1:
- raise AuthException("Cannot make an authorization decision. Multiple URLs found. URL: {0}".format(url))
+ raise AuthException(
+ "Cannot make an authorization decision. Multiple URLs found. URL: {0}".format(
+ url
+ )
+ )
filtered_key = filtered_keys[0]
for idx, path_part in enumerate(filtered_key.split()[1].split("/")):
if "<" in path_part and ">" in path_part:
if path_part == "<artifactPath>":
- parameters[path_part[1:-1]] = "/".join(normalized_url_splitted[idx:])
+ parameters[path_part[1:-1]] = "/".join(
+ normalized_url_splitted[idx:]
+ )
else:
parameters[path_part[1:-1]] = normalized_url_splitted[idx]
def _internal_get_token_list(self, token_info):
now = time()
- token_list = self.db.get_list("tokens", {"username": token_info["username"], "expires.gt": now})
+ token_list = self.db.get_list(
+ "tokens", {"username": token_info["username"], "expires.gt": now}
+ )
return token_list
def _internal_get_token(self, token_info, token_id):
token_value = self.db.get_one("tokens", {"_id": token_id}, fail_on_empty=False)
if not token_value:
raise AuthException("token not found", http_code=HTTPStatus.NOT_FOUND)
- if token_value["username"] != token_info["username"] and not token_info["admin"]:
- raise AuthException("needed admin privileges", http_code=HTTPStatus.UNAUTHORIZED)
+ if (
+ token_value["username"] != token_info["username"]
+ and not token_info["admin"]
+ ):
+ raise AuthException(
+ "needed admin privileges", http_code=HTTPStatus.UNAUTHORIZED
+ )
return token_value
def _internal_tokens_prune(self, token_collection, now=None):
plugins with the definition of the methods to be implemented.
"""
-__author__ = "Eduardo Sousa <esousa@whitestack.com>, " \
- "Pedro de la Cruz Ramos <pdelacruzramos@altran.com>"
+__author__ = (
+ "Eduardo Sousa <esousa@whitestack.com>, "
+ "Pedro de la Cruz Ramos <pdelacruzramos@altran.com>"
+)
__date__ = "$27-jul-2018 23:59:59$"
from http import HTTPStatus
"""
Authentication error, because token, user password not recognized
"""
+
def __init__(self, message, http_code=HTTPStatus.UNAUTHORIZED):
super(AuthException, self).__init__(message)
self.http_code = http_code
"""
Authentication error, because not having rights to make this operation
"""
+
pass
"""
Common and base class Exception for all authconn exceptions.
"""
+
def __init__(self, message, http_code=HTTPStatus.UNAUTHORIZED):
super(AuthconnException, self).__init__(message)
self.http_code = http_code
"""
Connectivity error with Auth backend.
"""
+
def __init__(self, message, http_code=HTTPStatus.BAD_GATEWAY):
super(AuthconnConnectionException, self).__init__(message, http_code)
"""
The request is not supported by the Auth backend.
"""
+
def __init__(self, message, http_code=HTTPStatus.NOT_IMPLEMENTED):
super(AuthconnNotSupportedException, self).__init__(message, http_code)
"""
The method is not implemented by the Auth backend.
"""
+
def __init__(self, message, http_code=HTTPStatus.NOT_IMPLEMENTED):
super(AuthconnNotImplementedException, self).__init__(message, http_code)
"""
The operation executed failed.
"""
+
def __init__(self, message, http_code=HTTPStatus.INTERNAL_SERVER_ERROR):
super(AuthconnOperationException, self).__init__(message, http_code)
"""
The operation executed failed because element not found.
"""
+
def __init__(self, message, http_code=HTTPStatus.NOT_FOUND):
super().__init__(message, http_code)
"""
The operation has conflicts.
"""
+
def __init__(self, message, http_code=HTTPStatus.CONFLICT):
super().__init__(message, http_code)
Each Auth backend connector plugin must be a subclass of
Authconn class.
"""
+
def __init__(self, config, db, role_permissions):
"""
Constructor of the Authconn class.
users = self.get_user_list(filt)
if not users:
if fail:
- raise AuthconnNotFoundException("User with {} not found".format(filt), http_code=HTTPStatus.NOT_FOUND)
+ raise AuthconnNotFoundException(
+ "User with {} not found".format(filt),
+ http_code=HTTPStatus.NOT_FOUND,
+ )
else:
return None
return users[0]
projs = self.get_project_list(filt)
if not projs:
if fail:
- raise AuthconnNotFoundException("project with {} not found".format(filt))
+ raise AuthconnNotFoundException(
+ "project with {} not found".format(filt)
+ )
else:
return None
return projs[0]
OSM Internal Authentication Backend and leverages the RBAC model
"""
-__author__ = "Pedro de la Cruz Ramos <pdelacruzramos@altran.com>, " \
- "Alfonso Tierno <alfonso.tiernosepulveda@telefoncia.com"
+__author__ = (
+ "Pedro de la Cruz Ramos <pdelacruzramos@altran.com>, "
+ "Alfonso Tierno <alfonso.tiernosepulveda@telefoncia.com"
+)
__date__ = "$06-jun-2019 11:16:08$"
import logging
import re
-from osm_nbi.authconn import Authconn, AuthException # , AuthconnOperationException
+from osm_nbi.authconn import Authconn, AuthException # , AuthconnOperationException
from osm_common.dbbase import DbException
from osm_nbi.base_topic import BaseTopic
from osm_nbi.validation import is_valid_uuid
class AuthconnInternal(Authconn):
- token_time_window = 2 # seconds
- token_delay = 1 # seconds to wait upon second request within time window
+ token_time_window = 2 # seconds
+ token_delay = 1 # seconds to wait upon second request within time window
users_collection = "users"
roles_collection = "roles"
try:
if not token:
- raise AuthException("Needed a token or Authorization HTTP header", http_code=HTTPStatus.UNAUTHORIZED)
+ raise AuthException(
+ "Needed a token or Authorization HTTP header",
+ http_code=HTTPStatus.UNAUTHORIZED,
+ )
now = time()
# if not token_info:
token_info = self.db.get_one(self.tokens_collection, {"_id": token})
if token_info["expires"] < now:
- raise AuthException("Expired Token or Authorization HTTP header", http_code=HTTPStatus.UNAUTHORIZED)
+ raise AuthException(
+ "Expired Token or Authorization HTTP header",
+ http_code=HTTPStatus.UNAUTHORIZED,
+ )
return token_info
except DbException as e:
if e.http_code == HTTPStatus.NOT_FOUND:
- raise AuthException("Invalid Token or Authorization HTTP header", http_code=HTTPStatus.UNAUTHORIZED)
+ raise AuthException(
+ "Invalid Token or Authorization HTTP header",
+ http_code=HTTPStatus.UNAUTHORIZED,
+ )
else:
raise
except AuthException:
raise
except Exception:
- self.logger.exception("Error during token validation using internal backend")
- raise AuthException("Error during token validation using internal backend",
- http_code=HTTPStatus.UNAUTHORIZED)
+ self.logger.exception(
+ "Error during token validation using internal backend"
+ )
+ raise AuthException(
+ "Error during token validation using internal backend",
+ http_code=HTTPStatus.UNAUTHORIZED,
+ )
def revoke_token(self, token):
"""
return True
except DbException as e:
if e.http_code == HTTPStatus.NOT_FOUND:
- raise AuthException("Token '{}' not found".format(token), http_code=HTTPStatus.NOT_FOUND)
+ raise AuthException(
+ "Token '{}' not found".format(token), http_code=HTTPStatus.NOT_FOUND
+ )
else:
# raise
exmsg = "Error during token revocation using internal backend"
:param user: username of the user.
:param password: password to be validated.
"""
- user_rows = self.db.get_list(self.users_collection, {BaseTopic.id_field("users", user): user})
+ user_rows = self.db.get_list(
+ self.users_collection, {BaseTopic.id_field("users", user): user}
+ )
user_content = None
if user_rows:
user_content = user_rows[0]
salt = user_content["_admin"]["salt"]
- shadow_password = sha256(password.encode('utf-8') + salt.encode('utf-8')).hexdigest()
+ shadow_password = sha256(
+ password.encode("utf-8") + salt.encode("utf-8")
+ ).hexdigest()
if shadow_password != user_content["password"]:
user_content = None
return user_content
if user:
user_content = self.validate_user(user, password)
if not user_content:
- raise AuthException("Invalid username/password", http_code=HTTPStatus.UNAUTHORIZED)
+ raise AuthException(
+ "Invalid username/password", http_code=HTTPStatus.UNAUTHORIZED
+ )
if not user_content.get("_admin", None):
- raise AuthException("No default project for this user.", http_code=HTTPStatus.UNAUTHORIZED)
+ raise AuthException(
+ "No default project for this user.",
+ http_code=HTTPStatus.UNAUTHORIZED,
+ )
elif token_info:
- user_rows = self.db.get_list(self.users_collection, {"username": token_info["username"]})
+ user_rows = self.db.get_list(
+ self.users_collection, {"username": token_info["username"]}
+ )
if user_rows:
user_content = user_rows[0]
else:
raise AuthException("Invalid token", http_code=HTTPStatus.UNAUTHORIZED)
else:
- raise AuthException("Provide credentials: username/password or Authorization Bearer token",
- http_code=HTTPStatus.UNAUTHORIZED)
+ raise AuthException(
+ "Provide credentials: username/password or Authorization Bearer token",
+ http_code=HTTPStatus.UNAUTHORIZED,
+ )
# Delay upon second request within time window
- if now - user_content["_admin"].get("last_token_time", 0) < self.token_time_window:
+ if (
+ now - user_content["_admin"].get("last_token_time", 0)
+ < self.token_time_window
+ ):
sleep(self.token_delay)
# user_content["_admin"]["last_token_time"] = now
# self.db.replace("users", user_content["_id"], user_content) # might cause race conditions
- self.db.set_one(self.users_collection,
- {"_id": user_content["_id"]}, {"_admin.last_token_time": now})
-
- token_id = ''.join(random_choice('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789')
- for _ in range(0, 32))
+ self.db.set_one(
+ self.users_collection,
+ {"_id": user_content["_id"]},
+ {"_admin.last_token_time": now},
+ )
+
+ token_id = "".join(
+ random_choice(
+ "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
+ )
+ for _ in range(0, 32)
+ )
# projects = user_content.get("projects", [])
prm_list = user_content.get("project_role_mappings", [])
if not project:
project = prm_list[0]["project"] if prm_list else None
if not project:
- raise AuthException("can't find a default project for this user", http_code=HTTPStatus.UNAUTHORIZED)
+ raise AuthException(
+ "can't find a default project for this user",
+ http_code=HTTPStatus.UNAUTHORIZED,
+ )
projects = [prm["project"] for prm in prm_list]
- proj = self.db.get_one(self.projects_collection,
- {BaseTopic.id_field("projects", project): project})
+ proj = self.db.get_one(
+ self.projects_collection, {BaseTopic.id_field("projects", project): project}
+ )
project_name = proj["name"]
project_id = proj["_id"]
if project_name not in projects and project_id not in projects:
- raise AuthException("project {} not allowed for this user".format(project),
- http_code=HTTPStatus.UNAUTHORIZED)
+ raise AuthException(
+ "project {} not allowed for this user".format(project),
+ http_code=HTTPStatus.UNAUTHORIZED,
+ )
# TODO remove admin, this vill be used by roles RBAC
if project_name == "admin":
roles_list = []
for prm in prm_list:
if prm["project"] in [project_id, project_name]:
- role = self.db.get_one(self.roles_collection,
- {BaseTopic.id_field("roles", prm["role"]): prm["role"]})
+ role = self.db.get_one(
+ self.roles_collection,
+ {BaseTopic.id_field("roles", prm["role"]): prm["role"]},
+ )
rid = role["_id"]
if rid not in roles:
rnm = role["name"]
roles.append(rid)
roles_list.append({"name": rnm, "id": rid})
if not roles_list:
- rid = self.db.get_one(self.roles_collection, {"name": "project_admin"})["_id"]
+ rid = self.db.get_one(self.roles_collection, {"name": "project_admin"})[
+ "_id"
+ ]
roles_list = [{"name": "project_admin", "id": rid}]
- new_token = {"issued_at": now,
- "expires": now + 3600,
- "_id": token_id,
- "id": token_id,
- "project_id": proj["_id"],
- "project_name": proj["name"],
- "username": user_content["username"],
- "user_id": user_content["_id"],
- "admin": token_admin,
- "roles": roles_list,
- }
+ new_token = {
+ "issued_at": now,
+ "expires": now + 3600,
+ "_id": token_id,
+ "id": token_id,
+ "project_id": proj["_id"],
+ "project_name": proj["name"],
+ "username": user_content["username"],
+ "user_id": user_content["_id"],
+ "admin": token_admin,
+ "roles": roles_list,
+ }
self.db.create(self.tokens_collection, new_token)
return deepcopy(new_token)
salt = uuid4().hex
user_info["_admin"]["salt"] = salt
if "password" in user_info:
- user_info["password"] = sha256(user_info["password"].encode('utf-8') + salt.encode('utf-8')).hexdigest()
+ user_info["password"] = sha256(
+ user_info["password"].encode("utf-8") + salt.encode("utf-8")
+ ).hexdigest()
# "projects" are not stored any more
if "projects" in user_info:
del user_info["projects"]
:param user_info: user info modifications
"""
uid = user_info["_id"]
- user_data = self.db.get_one(self.users_collection, {BaseTopic.id_field("users", uid): uid})
+ user_data = self.db.get_one(
+ self.users_collection, {BaseTopic.id_field("users", uid): uid}
+ )
BaseTopic.format_on_edit(user_data, user_info)
# User Name
usnm = user_info.get("username")
user_data["username"] = usnm
# If password is given and is not already encripted
pswd = user_info.get("password")
- if pswd and (len(pswd) != 64 or not re.match('[a-fA-F0-9]*', pswd)): # TODO: Improve check?
+ if pswd and (
+ len(pswd) != 64 or not re.match("[a-fA-F0-9]*", pswd)
+ ): # TODO: Improve check?
salt = uuid4().hex
if "_admin" not in user_data:
user_data["_admin"] = {}
user_data["_admin"]["salt"] = salt
- user_data["password"] = sha256(pswd.encode('utf-8') + salt.encode('utf-8')).hexdigest()
+ user_data["password"] = sha256(
+ pswd.encode("utf-8") + salt.encode("utf-8")
+ ).hexdigest()
# Project-Role Mappings
# TODO: Check that user_info NEVER includes "project_role_mappings"
if "project_role_mappings" not in user_data:
for pidf in ["project", "project_name"]:
for ridf in ["role", "role_name"]:
try:
- user_data["project_role_mappings"].remove({"role": prm[ridf], "project": prm[pidf]})
+ user_data["project_role_mappings"].remove(
+ {"role": prm[ridf], "project": prm[pidf]}
+ )
except KeyError:
pass
except ValueError:
for prm in prms:
project_id = prm["project"]
if project_id not in project_id_name:
- pr = self.db.get_one(self.projects_collection,
- {BaseTopic.id_field("projects", project_id): project_id},
- fail_on_empty=False)
+ pr = self.db.get_one(
+ self.projects_collection,
+ {BaseTopic.id_field("projects", project_id): project_id},
+ fail_on_empty=False,
+ )
project_id_name[project_id] = pr["name"] if pr else None
prm["project_name"] = project_id_name[project_id]
if prm["project_name"] not in projects:
role_id = prm["role"]
if role_id not in role_id_name:
- role = self.db.get_one(self.roles_collection,
- {BaseTopic.id_field("roles", role_id): role_id},
- fail_on_empty=False)
+ role = self.db.get_one(
+ self.roles_collection,
+ {BaseTopic.id_field("roles", role_id): role_id},
+ fail_on_empty=False,
+ )
role_id_name[role_id] = role["name"] if role else None
prm["role_name"] = role_id_name[role_id]
user["projects"] = projects # for backward compatibility
elif projects:
# user created with an old version. Create a project_role mapping with role project_admin
user["project_role_mappings"] = []
- role = self.db.get_one(self.roles_collection,
- {BaseTopic.id_field("roles", "project_admin"): "project_admin"})
+ role = self.db.get_one(
+ self.roles_collection,
+ {BaseTopic.id_field("roles", "project_admin"): "project_admin"},
+ )
for p_id_name in projects:
- pr = self.db.get_one(self.projects_collection,
- {BaseTopic.id_field("projects", p_id_name): p_id_name})
- prm = {"project": pr["_id"],
- "project_name": pr["name"],
- "role_name": "project_admin",
- "role": role["_id"]
- }
+ pr = self.db.get_one(
+ self.projects_collection,
+ {BaseTopic.id_field("projects", p_id_name): p_id_name},
+ )
+ prm = {
+ "project": pr["_id"],
+ "project_name": pr["name"],
+ "role_name": "project_admin",
+ "role": role["_id"],
+ }
user["project_role_mappings"].append(prm)
else:
user["projects"] = []
:return: None
:raises AuthconnOperationException: if project update failed.
"""
- self.db.set_one(self.projects_collection, {BaseTopic.id_field("projects", project_id): project_id},
- project_info)
+ self.db.set_one(
+ self.projects_collection,
+ {BaseTopic.id_field("projects", project_id): project_id},
+ project_info,
+ )
"""
-__author__ = "Eduardo Sousa <esousa@whitestack.com>, " \
- "Pedro de la Cruz Ramos <pdelacruzramos@altran.com>"
+__author__ = (
+ "Eduardo Sousa <esousa@whitestack.com>, "
+ "Pedro de la Cruz Ramos <pdelacruzramos@altran.com>"
+)
__date__ = "$27-jul-2018 23:59:59$"
-from osm_nbi.authconn import Authconn, AuthException, AuthconnOperationException, AuthconnNotFoundException, \
- AuthconnConflictException
+from osm_nbi.authconn import (
+ Authconn,
+ AuthException,
+ AuthconnOperationException,
+ AuthconnNotFoundException,
+ AuthconnConflictException,
+)
import logging
import requests
if config.get("auth_url"):
validate_input(self.auth_url, http_schema)
else:
- self.auth_url = "http://{0}:{1}/v3".format(config.get("auth_host", "keystone"),
- config.get("auth_port", "5000"))
+ self.auth_url = "http://{0}:{1}/v3".format(
+ config.get("auth_host", "keystone"), config.get("auth_port", "5000")
+ )
self.user_domain_name_list = config.get("user_domain_name", "default")
self.user_domain_name_list = self.user_domain_name_list.split(",")
# read only domain list
- self.user_domain_ro_list = [x[:-3] for x in self.user_domain_name_list if x.endswith(":ro")]
+ self.user_domain_ro_list = [
+ x[:-3] for x in self.user_domain_name_list if x.endswith(":ro")
+ ]
# remove the ":ro"
- self.user_domain_name_list = [x if not x.endswith(":ro") else x[:-3] for x in self.user_domain_name_list]
+ self.user_domain_name_list = [
+ x if not x.endswith(":ro") else x[:-3] for x in self.user_domain_name_list
+ ]
self.admin_project = config.get("service_project", "service")
self.admin_username = config.get("service_username", "nbi")
self.project_domain_name_list = config.get("project_domain_name", "default")
self.project_domain_name_list = self.project_domain_name_list.split(",")
if len(self.user_domain_name_list) != len(self.project_domain_name_list):
- raise ValueError("Invalid configuration parameter fo authenticate. 'project_domain_name' and "
- "'user_domain_name' must be a comma-separated list with the same size. Revise "
- "configuration or/and 'OSMNBI_AUTHENTICATION_PROJECT_DOMAIN_NAME', "
- "'OSMNBI_AUTHENTICATION_USER_DOMAIN_NAME' Variables")
+ raise ValueError(
+ "Invalid configuration parameter fo authenticate. 'project_domain_name' and "
+ "'user_domain_name' must be a comma-separated list with the same size. Revise "
+ "configuration or/and 'OSMNBI_AUTHENTICATION_PROJECT_DOMAIN_NAME', "
+ "'OSMNBI_AUTHENTICATION_USER_DOMAIN_NAME' Variables"
+ )
# Waiting for Keystone to be up
available = None
if counter == 0:
raise AuthException("Keystone not available after 300s timeout")
- self.auth = v3.Password(user_domain_name=self.user_domain_name_list[0],
- username=self.admin_username,
- password=self.admin_password,
- project_domain_name=self.project_domain_name_list[0],
- project_name=self.admin_project,
- auth_url=self.auth_url)
+ self.auth = v3.Password(
+ user_domain_name=self.user_domain_name_list[0],
+ username=self.admin_username,
+ password=self.admin_password,
+ project_domain_name=self.project_domain_name_list[0],
+ project_name=self.admin_project,
+ auth_url=self.auth_url,
+ )
self.sess = session.Session(auth=self.auth)
- self.keystone = client.Client(session=self.sess, endpoint_override=self.auth_url)
+ self.keystone = client.Client(
+ session=self.sess, endpoint_override=self.auth_url
+ )
def authenticate(self, credentials, token_info=None):
"""
project_id = None
project_name = None
if credentials.get("project_domain_name"):
- project_domain_name_list = (credentials["project_domain_name"], )
+ project_domain_name_list = (credentials["project_domain_name"],)
else:
project_domain_name_list = self.project_domain_name_list
if credentials.get("user_domain_name"):
- user_domain_name_list = (credentials["user_domain_name"], )
+ user_domain_name_list = (credentials["user_domain_name"],)
else:
user_domain_name_list = self.user_domain_name_list
username=username,
password=credentials.get("password"),
user_domain_name=user_domain_name,
- project_domain_name=project_domain_name)
+ project_domain_name=project_domain_name,
+ )
elif token_info:
- unscoped_token = self.keystone.tokens.validate(token=token_info.get("_id"))
+ unscoped_token = self.keystone.tokens.validate(
+ token=token_info.get("_id")
+ )
else:
- raise AuthException("Provide credentials: username/password or Authorization Bearer token",
- http_code=HTTPStatus.UNAUTHORIZED)
+ raise AuthException(
+ "Provide credentials: username/password or Authorization Bearer token",
+ http_code=HTTPStatus.UNAUTHORIZED,
+ )
if not credentials.get("project_id"):
# get first project for the user
- project_list = self.keystone.projects.list(user=unscoped_token["user"]["id"])
+ project_list = self.keystone.projects.list(
+ user=unscoped_token["user"]["id"]
+ )
if not project_list:
- raise AuthException("The user {} has not any project and cannot be used for authentication".
- format(credentials.get("username")), http_code=HTTPStatus.UNAUTHORIZED)
+ raise AuthException(
+ "The user {} has not any project and cannot be used for authentication".format(
+ credentials.get("username")
+ ),
+ http_code=HTTPStatus.UNAUTHORIZED,
+ )
project_id = project_list[0].id
else:
if is_valid_uuid(credentials["project_id"]):
project_id=project_id,
user_domain_name=user_domain_name,
project_domain_name=project_domain_name,
- token=unscoped_token["auth_token"])
+ token=unscoped_token["auth_token"],
+ )
auth_token = {
"_id": scoped_token.auth_token,
"project_domain_name": scoped_token.project_domain_name,
"user_domain_name": scoped_token.user_domain_name,
"expires": scoped_token.expires.timestamp(),
- "issued_at": scoped_token.issued.timestamp()
+ "issued_at": scoped_token.issued.timestamp(),
}
return auth_token
except ClientException as e:
- if index >= len(user_domain_name_list)-1 or index >= len(project_domain_name_list)-1:
+ if (
+ index >= len(user_domain_name_list) - 1
+ or index >= len(project_domain_name_list) - 1
+ ):
# if last try, launch exception
# self.logger.exception("Error during user authentication using keystone: {}".format(e))
- raise AuthException("Error during user authentication using Keystone: {}".format(e),
- http_code=HTTPStatus.UNAUTHORIZED)
+ raise AuthException(
+ "Error during user authentication using Keystone: {}".format(e),
+ http_code=HTTPStatus.UNAUTHORIZED,
+ )
def validate_token(self, token):
"""
"username": token_info["user"]["name"],
"roles": token_info["roles"],
"expires": token_info.expires.timestamp(),
- "issued_at": token_info.issued.timestamp()
+ "issued_at": token_info.issued.timestamp(),
}
return ses
except ClientException as e:
# self.logger.exception("Error during token validation using keystone: {}".format(e))
- raise AuthException("Error during token validation using Keystone: {}".format(e),
- http_code=HTTPStatus.UNAUTHORIZED)
+ raise AuthException(
+ "Error during token validation using Keystone: {}".format(e),
+ http_code=HTTPStatus.UNAUTHORIZED,
+ )
def revoke_token(self, token):
"""
return True
except ClientException as e:
# self.logger.exception("Error during token revocation using keystone: {}".format(e))
- raise AuthException("Error during token revocation using Keystone: {}".format(e),
- http_code=HTTPStatus.UNAUTHORIZED)
+ raise AuthException(
+ "Error during token revocation using Keystone: {}".format(e),
+ http_code=HTTPStatus.UNAUTHORIZED,
+ )
def _get_domain_id(self, domain_name, fail_if_not_found=True):
"""
# domain_name is already an id
return domain_name
if not domain_id and fail_if_not_found:
- raise AuthconnNotFoundException("Domain {} cannot be found".format(domain_name))
+ raise AuthconnNotFoundException(
+ "Domain {} cannot be found".format(domain_name)
+ )
return domain_id
def _get_domains(self):
"""
try:
- if user_info.get("domain_name") and user_info["domain_name"] in self.user_domain_ro_list:
- raise AuthconnConflictException("Cannot create a user in the read only domain {}".
- format(user_info["domain_name"]))
+ if (
+ user_info.get("domain_name")
+ and user_info["domain_name"] in self.user_domain_ro_list
+ ):
+ raise AuthconnConflictException(
+ "Cannot create a user in the read only domain {}".format(
+ user_info["domain_name"]
+ )
+ )
new_user = self.keystone.users.create(
- user_info["username"], password=user_info["password"],
- domain=self._get_domain_id(user_info.get("domain_name", self.user_domain_name_list[0])),
- _admin=user_info["_admin"])
+ user_info["username"],
+ password=user_info["password"],
+ domain=self._get_domain_id(
+ user_info.get("domain_name", self.user_domain_name_list[0])
+ ),
+ _admin=user_info["_admin"],
+ )
if "project_role_mappings" in user_info.keys():
for mapping in user_info["project_role_mappings"]:
- self.assign_role_to_user(new_user, mapping["project"], mapping["role"])
+ self.assign_role_to_user(
+ new_user, mapping["project"], mapping["role"]
+ )
return {"username": new_user.name, "_id": new_user.id}
except Conflict as e:
# self.logger.exception("Error during user creation using keystone: {}".format(e))
raise AuthconnOperationException(e, http_code=HTTPStatus.CONFLICT)
except ClientException as e:
# self.logger.exception("Error during user creation using keystone: {}".format(e))
- raise AuthconnOperationException("Error during user creation using Keystone: {}".format(e))
+ raise AuthconnOperationException(
+ "Error during user creation using Keystone: {}".format(e)
+ )
def update_user(self, user_info):
"""
user_obj = None
if not user_obj:
for user_domain in self.user_domain_name_list:
- domain_id = self._get_domain_id(user_domain, fail_if_not_found=False)
+ domain_id = self._get_domain_id(
+ user_domain, fail_if_not_found=False
+ )
if not domain_id:
continue
- user_obj_list = self.keystone.users.list(name=user, domain=domain_id)
+ user_obj_list = self.keystone.users.list(
+ name=user, domain=domain_id
+ )
if user_obj_list:
user_obj = user_obj_list[0]
break
- else: # user not found
+ else: # user not found
raise AuthconnNotFoundException("User '{}' not found".format(user))
user_id = user_obj.id
if domain_name in self.user_domain_ro_list:
if user_info.get("password") or user_info.get("username"):
- raise AuthconnConflictException("Cannot update the user {} belonging to a read only domain {}".
- format(user, domain_name))
-
- elif user_info.get("password") or user_info.get("username") \
- or user_info.get("add_project_role_mappings") or user_info.get("remove_project_role_mappings"):
+ raise AuthconnConflictException(
+ "Cannot update the user {} belonging to a read only domain {}".format(
+ user, domain_name
+ )
+ )
+
+ elif (
+ user_info.get("password")
+ or user_info.get("username")
+ or user_info.get("add_project_role_mappings")
+ or user_info.get("remove_project_role_mappings")
+ ):
# if user_index>0, it is an external domain, that should not be updated
- ctime = user_obj._admin.get("created", 0) if hasattr(user_obj, "_admin") else 0
+ ctime = (
+ user_obj._admin.get("created", 0)
+ if hasattr(user_obj, "_admin")
+ else 0
+ )
try:
- self.keystone.users.update(user_id, password=user_info.get("password"),
- name=user_info.get("username"),
- _admin={"created": ctime, "modified": time.time()})
+ self.keystone.users.update(
+ user_id,
+ password=user_info.get("password"),
+ name=user_info.get("username"),
+ _admin={"created": ctime, "modified": time.time()},
+ )
except Exception as e:
if user_info.get("username") or user_info.get("password"):
- raise AuthconnOperationException("Error during username/password change: {}".format(str(e)))
- self.logger.error("Error during updating user profile: {}".format(str(e)))
+ raise AuthconnOperationException(
+ "Error during username/password change: {}".format(str(e))
+ )
+ self.logger.error(
+ "Error during updating user profile: {}".format(str(e))
+ )
for mapping in user_info.get("remove_project_role_mappings", []):
- self.remove_role_from_user(user_obj, mapping["project"], mapping["role"])
+ self.remove_role_from_user(
+ user_obj, mapping["project"], mapping["role"]
+ )
for mapping in user_info.get("add_project_role_mappings", []):
self.assign_role_to_user(user_obj, mapping["project"], mapping["role"])
except ClientException as e:
# self.logger.exception("Error during user password/name update using keystone: {}".format(e))
- raise AuthconnOperationException("Error during user update using Keystone: {}".format(e))
+ raise AuthconnOperationException(
+ "Error during user update using Keystone: {}".format(e)
+ )
def delete_user(self, user_id):
"""
domain_id = user_obj.domain_id
domain_name = self.domains_id2name.get(domain_id)
if domain_name in self.user_domain_ro_list:
- raise AuthconnConflictException("Cannot delete user {} belonging to a read only domain {}".
- format(user_id, domain_name))
+ raise AuthconnConflictException(
+ "Cannot delete user {} belonging to a read only domain {}".format(
+ user_id, domain_name
+ )
+ )
result, detail = self.keystone.users.delete(user_id)
if result.status_code != 204:
return True
except ClientException as e:
# self.logger.exception("Error during user deletion using keystone: {}".format(e))
- raise AuthconnOperationException("Error during user deletion using Keystone: {}".format(e))
+ raise AuthconnOperationException(
+ "Error during user deletion using Keystone: {}".format(e)
+ )
def get_user_list(self, filter_q=None):
"""
if filter_q:
filter_name = filter_q.get("name") or filter_q.get("username")
if filter_q.get("domain_name"):
- filter_domain = self._get_domain_id(filter_q["domain_name"], fail_if_not_found=False)
+ filter_domain = self._get_domain_id(
+ filter_q["domain_name"], fail_if_not_found=False
+ )
# If domain is not found, use the same name to obtain an empty list
filter_domain = filter_domain or filter_q["domain_name"]
if filter_q.get("domain_id"):
# get users from user_domain_name_list[1:], because it will not be provided in case of LDAP
if filter_domain is None and len(self.user_domain_name_list) > 1:
for user_domain in self.user_domain_name_list[1:]:
- domain_id = self._get_domain_id(user_domain, fail_if_not_found=False)
+ domain_id = self._get_domain_id(
+ user_domain, fail_if_not_found=False
+ )
if not domain_id:
continue
# find if users of this domain are already provided. In this case ignore
if u.domain_id == domain_id:
break
else:
- users += self.keystone.users.list(name=filter_name, domain=domain_id)
+ users += self.keystone.users.list(
+ name=filter_name, domain=domain_id
+ )
# if filter name matches a user id, provide it also
if filter_name:
except Exception:
pass
- users = [{
- "username": user.name,
- "_id": user.id,
- "id": user.id,
- "_admin": user.to_dict().get("_admin", {}), # TODO: REVISE
- "domain_name": self.domains_id2name.get(user.domain_id)
- } for user in users if user.name != self.admin_username]
+ users = [
+ {
+ "username": user.name,
+ "_id": user.id,
+ "id": user.id,
+ "_admin": user.to_dict().get("_admin", {}), # TODO: REVISE
+ "domain_name": self.domains_id2name.get(user.domain_id),
+ }
+ for user in users
+ if user.name != self.admin_username
+ ]
if filter_q and filter_q.get("_id"):
users = [user for user in users if filter_q["_id"] == user["_id"]]
for project in projects:
user["projects"].append(project.name)
- roles = self.keystone.roles.list(user=user["_id"], project=project.id)
+ roles = self.keystone.roles.list(
+ user=user["_id"], project=project.id
+ )
for role in roles:
prm = {
"project": project.id,
return users
except ClientException as e:
# self.logger.exception("Error during user listing using keystone: {}".format(e))
- raise AuthconnOperationException("Error during user listing using Keystone: {}".format(e))
+ raise AuthconnOperationException(
+ "Error during user listing using Keystone: {}".format(e)
+ )
def get_role_list(self, filter_q=None):
"""
filter_name = filter_q.get("name")
roles_list = self.keystone.roles.list(name=filter_name)
- roles = [{
- "name": role.name,
- "_id": role.id,
- "_admin": role.to_dict().get("_admin", {}),
- "permissions": role.to_dict().get("permissions", {})
- } for role in roles_list if role.name != "service"]
+ roles = [
+ {
+ "name": role.name,
+ "_id": role.id,
+ "_admin": role.to_dict().get("_admin", {}),
+ "permissions": role.to_dict().get("permissions", {}),
+ }
+ for role in roles_list
+ if role.name != "service"
+ ]
if filter_q and filter_q.get("_id"):
roles = [role for role in roles if filter_q["_id"] == role["_id"]]
return roles
except ClientException as e:
# self.logger.exception("Error during user role listing using keystone: {}".format(e))
- raise AuthException("Error during user role listing using Keystone: {}".format(e),
- http_code=HTTPStatus.UNAUTHORIZED)
+ raise AuthException(
+ "Error during user role listing using Keystone: {}".format(e),
+ http_code=HTTPStatus.UNAUTHORIZED,
+ )
def create_role(self, role_info):
"""
:raises AuthconnOperationException: if role creation failed.
"""
try:
- result = self.keystone.roles.create(role_info["name"], permissions=role_info.get("permissions"),
- _admin=role_info.get("_admin"))
+ result = self.keystone.roles.create(
+ role_info["name"],
+ permissions=role_info.get("permissions"),
+ _admin=role_info.get("_admin"),
+ )
return result.id
except Conflict as ex:
raise AuthconnConflictException(str(ex))
except ClientException as e:
# self.logger.exception("Error during role creation using keystone: {}".format(e))
- raise AuthconnOperationException("Error during role creation using Keystone: {}".format(e))
+ raise AuthconnOperationException(
+ "Error during role creation using Keystone: {}".format(e)
+ )
def delete_role(self, role_id):
"""
return True
except ClientException as e:
# self.logger.exception("Error during role deletion using keystone: {}".format(e))
- raise AuthconnOperationException("Error during role deletion using Keystone: {}".format(e))
+ raise AuthconnOperationException(
+ "Error during role deletion using Keystone: {}".format(e)
+ )
def update_role(self, role_info):
"""
"""
try:
rid = role_info["_id"]
- if not is_valid_uuid(rid): # Is this required?
+ if not is_valid_uuid(rid): # Is this required?
role_obj_list = self.keystone.roles.list(name=rid)
if not role_obj_list:
raise AuthconnNotFoundException("Role '{}' not found".format(rid))
rid = role_obj_list[0].id
- self.keystone.roles.update(rid, name=role_info["name"], permissions=role_info.get("permissions"),
- _admin=role_info.get("_admin"))
+ self.keystone.roles.update(
+ rid,
+ name=role_info["name"],
+ permissions=role_info.get("permissions"),
+ _admin=role_info.get("_admin"),
+ )
except ClientException as e:
# self.logger.exception("Error during role update using keystone: {}".format(e))
- raise AuthconnOperationException("Error during role updating using Keystone: {}".format(e))
+ raise AuthconnOperationException(
+ "Error during role updating using Keystone: {}".format(e)
+ )
def get_project_list(self, filter_q=None):
"""
if filter_q.get("domain_id"):
filter_domain = filter_q["domain_id"]
- projects = self.keystone.projects.list(name=filter_name, domain=filter_domain)
+ projects = self.keystone.projects.list(
+ name=filter_name, domain=filter_domain
+ )
- projects = [{
- "name": project.name,
- "_id": project.id,
- "_admin": project.to_dict().get("_admin", {}), # TODO: REVISE
- "quotas": project.to_dict().get("quotas", {}), # TODO: REVISE
- "domain_name": self.domains_id2name.get(project.domain_id)
- } for project in projects]
+ projects = [
+ {
+ "name": project.name,
+ "_id": project.id,
+ "_admin": project.to_dict().get("_admin", {}), # TODO: REVISE
+ "quotas": project.to_dict().get("quotas", {}), # TODO: REVISE
+ "domain_name": self.domains_id2name.get(project.domain_id),
+ }
+ for project in projects
+ ]
if filter_q and filter_q.get("_id"):
- projects = [project for project in projects
- if filter_q["_id"] == project["_id"]]
+ projects = [
+ project for project in projects if filter_q["_id"] == project["_id"]
+ ]
return projects
except ClientException as e:
# self.logger.exception("Error during user project listing using keystone: {}".format(e))
- raise AuthException("Error during user project listing using Keystone: {}".format(e),
- http_code=HTTPStatus.UNAUTHORIZED)
+ raise AuthException(
+ "Error during user project listing using Keystone: {}".format(e),
+ http_code=HTTPStatus.UNAUTHORIZED,
+ )
def create_project(self, project_info):
"""
try:
result = self.keystone.projects.create(
project_info["name"],
- domain=self._get_domain_id(project_info.get("domain_name", self.project_domain_name_list[0])),
+ domain=self._get_domain_id(
+ project_info.get("domain_name", self.project_domain_name_list[0])
+ ),
_admin=project_info["_admin"],
- quotas=project_info.get("quotas", {})
+ quotas=project_info.get("quotas", {}),
)
return result.id
except ClientException as e:
# self.logger.exception("Error during project creation using keystone: {}".format(e))
- raise AuthconnOperationException("Error during project creation using Keystone: {}".format(e))
+ raise AuthconnOperationException(
+ "Error during project creation using Keystone: {}".format(e)
+ )
def delete_project(self, project_id):
"""
return True
except ClientException as e:
# self.logger.exception("Error during project deletion using keystone: {}".format(e))
- raise AuthconnOperationException("Error during project deletion using Keystone: {}".format(e))
+ raise AuthconnOperationException(
+ "Error during project deletion using Keystone: {}".format(e)
+ )
def update_project(self, project_id, project_info):
"""
:return: None
"""
try:
- self.keystone.projects.update(project_id, name=project_info["name"],
- _admin=project_info["_admin"],
- quotas=project_info.get("quotas", {})
- )
+ self.keystone.projects.update(
+ project_id,
+ name=project_info["name"],
+ _admin=project_info["_admin"],
+ quotas=project_info.get("quotas", {}),
+ )
except ClientException as e:
# self.logger.exception("Error during project update using keystone: {}".format(e))
- raise AuthconnOperationException("Error during project update using Keystone: {}".format(e))
+ raise AuthconnOperationException(
+ "Error during project update using Keystone: {}".format(e)
+ )
def assign_role_to_user(self, user_obj, project, role):
"""
except Exception:
project_obj_list = self.keystone.projects.list(name=project)
if not project_obj_list:
- raise AuthconnNotFoundException("Project '{}' not found".format(project))
+ raise AuthconnNotFoundException(
+ "Project '{}' not found".format(project)
+ )
project_obj = project_obj_list[0]
try:
self.keystone.roles.grant(role_obj, user=user_obj, project=project_obj)
except ClientException as e:
# self.logger.exception("Error during user role assignment using keystone: {}".format(e))
- raise AuthconnOperationException("Error during role '{}' assignment to user '{}' and project '{}' using "
- "Keystone: {}".format(role, user_obj.name, project, e))
+ raise AuthconnOperationException(
+ "Error during role '{}' assignment to user '{}' and project '{}' using "
+ "Keystone: {}".format(role, user_obj.name, project, e)
+ )
def remove_role_from_user(self, user_obj, project, role):
"""
except Exception:
project_obj_list = self.keystone.projects.list(name=project)
if not project_obj_list:
- raise AuthconnNotFoundException("Project '{}' not found".format(project))
+ raise AuthconnNotFoundException(
+ "Project '{}' not found".format(project)
+ )
project_obj = project_obj_list[0]
try:
self.keystone.roles.revoke(role_obj, user=user_obj, project=project_obj)
except ClientException as e:
# self.logger.exception("Error during user role revocation using keystone: {}".format(e))
- raise AuthconnOperationException("Error during role '{}' revocation to user '{}' and project '{}' using "
- "Keystone: {}".format(role, user_obj.name, project, e))
+ raise AuthconnOperationException(
+ "Error during role '{}' revocation to user '{}' and project '{}' using "
+ "Keystone: {}".format(role, user_obj.name, project, e)
+ )
##
-"""
+"""
AuthconnTacacs implements implements the connector for TACACS.
Leverages AuthconnInternal for token lifecycle management and the RBAC model.
When NBI bootstraps, it tries to create admin user with admin role associated to admin project.
Hence, the TACACS server should contain admin user.
-"""
+"""
__author__ = "K Sai Kiran <saikiran.k@tataelxsi.co.in>"
__date__ = "$11-Nov-2020 11:04:00$"
-from osm_nbi.authconn import Authconn, AuthException
+from osm_nbi.authconn import Authconn, AuthException
from osm_nbi.authconn_internal import AuthconnInternal
from osm_nbi.base_topic import BaseTopic
self.db = db
self.tacacs_host = config["tacacs_host"]
self.tacacs_secret = config["tacacs_secret"]
- self.tacacs_port = config["tacacs_port"] if config.get("tacacs_port") else self.tacacs_def_port
- self.tacacs_timeout = config["tacacs_timeout"] if config.get("tacacs_timeout") else self.tacacs_def_timeout
- self.tacacs_cli = TACACSClient(self.tacacs_host, self.tacacs_port, self.tacacs_secret,
- self.tacacs_timeout)
+ self.tacacs_port = (
+ config["tacacs_port"] if config.get("tacacs_port") else self.tacacs_def_port
+ )
+ self.tacacs_timeout = (
+ config["tacacs_timeout"]
+ if config.get("tacacs_timeout")
+ else self.tacacs_def_timeout
+ )
+ self.tacacs_cli = TACACSClient(
+ self.tacacs_host, self.tacacs_port, self.tacacs_secret, self.tacacs_timeout
+ )
def validate_user(self, user, password):
- """
- """
+ """"""
now = time()
try:
tacacs_authen = self.tacacs_cli.authenticate(user, password)
except Exception as e:
- raise AuthException("TACACS server error: {}".format(e), http_code=HTTPStatus.UNAUTHORIZED)
+ raise AuthException(
+ "TACACS server error: {}".format(e), http_code=HTTPStatus.UNAUTHORIZED
+ )
user_content = None
- user_rows = self.db.get_list(self.users_collection, {BaseTopic.id_field("users", user): user})
+ user_rows = self.db.get_list(
+ self.users_collection, {BaseTopic.id_field("users", user): user}
+ )
if not tacacs_authen.valid:
if user_rows:
# To remove TACACS stale user from system.
if user_rows:
user_content = user_rows[0]
else:
- new_user = {'username': user,
- 'password': password,
- '_admin': {
- 'created': now,
- 'modified': now
- },
- 'project_role_mappings': []
- }
+ new_user = {
+ "username": user,
+ "password": password,
+ "_admin": {"created": now, "modified": now},
+ "project_role_mappings": [],
+ }
user_content = self.create_user(new_user)
return user_content
"""
BaseTopic.format_on_new(user_info, make_public=False)
try:
- authen = self.tacacs_cli.authenticate(user_info["username"], user_info["password"])
+ authen = self.tacacs_cli.authenticate(
+ user_info["username"], user_info["password"]
+ )
if authen.valid:
user_info.pop("password")
self.db.create(self.users_collection, user_info)
else:
- raise AuthException("TACACS server error: Invalid credentials", http_code=HTTPStatus.FORBIDDEN)
+ raise AuthException(
+ "TACACS server error: Invalid credentials",
+ http_code=HTTPStatus.FORBIDDEN,
+ )
except Exception as e:
- raise AuthException("TACACS server error: {}".format(e), http_code=HTTPStatus.BAD_REQUEST)
+ raise AuthException(
+ "TACACS server error: {}".format(e), http_code=HTTPStatus.BAD_REQUEST
+ )
return {"username": user_info["username"], "_id": user_info["_id"]}
def update_user(self, user_info):
:param user_info: Full user information in dict.
:return: returns None for successful add/remove of project and role map.
"""
- if(user_info.get("username")):
- raise AuthException("Can not update username of this user", http_code=HTTPStatus.FORBIDDEN)
- if(user_info.get("password")):
- raise AuthException("Can not update password of this user", http_code=HTTPStatus.FORBIDDEN)
+ if user_info.get("username"):
+ raise AuthException(
+ "Can not update username of this user", http_code=HTTPStatus.FORBIDDEN
+ )
+ if user_info.get("password"):
+ raise AuthException(
+ "Can not update password of this user", http_code=HTTPStatus.FORBIDDEN
+ )
super(AuthconnTacacs, self).update_user(user_info)
class EngineException(Exception):
-
def __init__(self, message, http_code=HTTPStatus.BAD_REQUEST):
self.http_code = http_code
super(Exception, self).__init__(message)
if i > 0:
i += 1
# format in hex, len can be 2 for mac or 4 for ipv6
- return ("{}{:0" + str(len(ip_mac) - i) + "x}").format(ip_mac[:i], int(ip_mac[i:], 16) + vm_index)
+ return ("{}{:0" + str(len(ip_mac) - i) + "x}").format(
+ ip_mac[:i], int(ip_mac[i:], 16) + vm_index
+ )
except Exception:
pass
return None
class BaseTopic:
# static variables for all instance classes
- topic = None # to_override
- topic_msg = None # to_override
- quota_name = None # to_override. If not provided topic will be used for quota_name
- schema_new = None # to_override
+ topic = None # to_override
+ topic_msg = None # to_override
+ quota_name = None # to_override. If not provided topic will be used for quota_name
+ schema_new = None # to_override
schema_edit = None # to_override
multiproject = True # True if this Topic can be shared by several projects. Then it contains _admin.projects_read
default_quota = 500
# Alternative ID Fields for some Topics
- alt_id_field = {
- "projects": "name",
- "users": "username",
- "roles": "name"
- }
+ alt_id_field = {"projects": "name", "users": "username", "roles": "name"}
def __init__(self, db, fs, msg, auth):
self.db = db
count = self.db.count(self.topic, {"_admin.projects_read": pid})
if count >= quota:
name = proj["name"]
- raise ValidationError("quota ({}={}) exceeded for project {} ({})".format(quota_name, quota, name, pid),
- http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+ raise ValidationError(
+ "quota ({}={}) exceeded for project {} ({})".format(
+ quota_name, quota, name, pid
+ ),
+ http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+ )
def _validate_input_new(self, input, force=False):
"""
not present or contains ANY mean public.
:param session: contains:
project_id: project list this session has rights to access. Can be empty, one or several
- set_project: items created will contain this project list
+ set_project: items created will contain this project list
force: True or False
public: True, False or None
method: "list", "show", "write", "delete"
project_filter_n.append(session["PROJECT.ne"])
if project_filter:
- if session["method"] in ("list", "show", "delete") or session.get("set_project"):
+ if session["method"] in ("list", "show", "delete") or session.get(
+ "set_project"
+ ):
p_filter["_admin.projects_read.cont"] = project_filter
else:
p_filter["_admin.projects_write.cont"] = project_filter
if project_filter_n:
- if session["method"] in ("list", "show", "delete") or session.get("set_project"):
+ if session["method"] in ("list", "show", "delete") or session.get(
+ "set_project"
+ ):
p_filter["_admin.projects_read.ncont"] = project_filter_n
else:
p_filter["_admin.projects_write.ncont"] = project_filter_n
return final_content
# Change public status
if session["public"] is not None:
- if session["public"] and "ANY" not in final_content["_admin"]["projects_read"]:
+ if (
+ session["public"]
+ and "ANY" not in final_content["_admin"]["projects_read"]
+ ):
final_content["_admin"]["projects_read"].append("ANY")
final_content["_admin"]["projects_write"].clear()
- if not session["public"] and "ANY" in final_content["_admin"]["projects_read"]:
+ if (
+ not session["public"]
+ and "ANY" in final_content["_admin"]["projects_read"]
+ ):
final_content["_admin"]["projects_read"].remove("ANY")
# Change project status
_filter["name"] = name
if _id:
_filter["_id.neq"] = _id
- if self.db.get_one(self.topic, _filter, fail_on_empty=False, fail_on_more=False):
- raise EngineException("name '{}' already exists for {}".format(name, self.topic), HTTPStatus.CONFLICT)
+ if self.db.get_one(
+ self.topic, _filter, fail_on_empty=False, fail_on_more=False
+ ):
+ raise EngineException(
+ "name '{}' already exists for {}".format(name, self.topic),
+ HTTPStatus.CONFLICT,
+ )
@staticmethod
def format_on_new(content, project_id=None, make_public=False):
kitem_old = int(kitem)
# if index greater than list, extend the list
if kitem_old >= len(update_content):
- update_content += [None] * (kitem_old - len(update_content) + 1)
+ update_content += [None] * (
+ kitem_old - len(update_content) + 1
+ )
if not isinstance(update_content[kitem_old], (dict, list)):
update_content[kitem_old] = {}
else:
raise EngineException(
- "Invalid query string '{}'. Descriptor is not a list nor dict at '{}'".format(k, kitem))
+ "Invalid query string '{}'. Descriptor is not a list nor dict at '{}'".format(
+ k, kitem
+ )
+ )
if v is None:
del update_content[kitem_old]
else:
update_content[kitem_old] = v if not yaml_format else safe_load(v)
except KeyError:
raise EngineException(
- "Invalid query string '{}'. Descriptor does not contain '{}'".format(k, kitem_old))
+ "Invalid query string '{}'. Descriptor does not contain '{}'".format(
+ k, kitem_old
+ )
+ )
except ValueError:
- raise EngineException("Invalid query string '{}'. Expected integer index list instead of '{}'".format(
- k, kitem))
+ raise EngineException(
+ "Invalid query string '{}'. Expected integer index list instead of '{}'".format(
+ k, kitem
+ )
+ )
except IndexError:
raise EngineException(
- "Invalid query string '{}'. Index '{}' out of range".format(k, kitem_old))
+ "Invalid query string '{}'. Index '{}' out of range".format(
+ k, kitem_old
+ )
+ )
except YAMLError:
raise EngineException("Invalid query string '{}' yaml format".format(k))
self.sol005_projection(data)
return data
-
+
# TODO transform data for SOL005 URL requests
# TODO remove _admin if not admin
:param accept_header: Content of Accept header. Must contain applition/zip or/and text/plain
:return: opened file or raises an exception
"""
- raise EngineException("Method get_file not valid for this topic", HTTPStatus.INTERNAL_SERVER_ERROR)
+ raise EngineException(
+ "Method get_file not valid for this topic", HTTPStatus.INTERNAL_SERVER_ERROR
+ )
def list(self, session, filter_q=None, api_req=False):
"""
# Only perform SOL005 projection if we are serving an external request
if api_req:
data = [self.sol005_projection(inst) for inst in data]
-
+
return data
def new(self, rollback, session, indata=None, kwargs=None, headers=None):
self._update_input_with_kwargs(content, kwargs)
content = self._validate_input_new(content, force=session["force"])
self.check_conflict_on_new(session, content)
- op_id = self.format_on_new(content, project_id=session["project_id"], make_public=session["public"])
+ op_id = self.format_on_new(
+ content, project_id=session["project_id"], make_public=session["public"]
+ )
_id = self.db.create(self.topic, content)
rollback.append({"topic": self.topic, "_id": _id})
if op_id:
:return: True package has is completely uploaded or False if partial content has been uplodaed.
Raise exception on error
"""
- raise EngineException("Method upload_content not valid for this topic", HTTPStatus.INTERNAL_SERVER_ERROR)
+ raise EngineException(
+ "Method upload_content not valid for this topic",
+ HTTPStatus.INTERNAL_SERVER_ERROR,
+ )
def delete_list(self, session, filter_q=None):
"""
self.check_conflict_on_del(session, _id, item_content)
if dry_run:
return None
-
+
if self.multiproject and session["project_id"]:
# remove reference from project_read if there are more projects referencing it. If it last one,
# do not remove reference, but delete
- other_projects_referencing = next((p for p in item_content["_admin"]["projects_read"]
- if p not in session["project_id"] and p != "ANY"), None)
+ other_projects_referencing = next(
+ (
+ p
+ for p in item_content["_admin"]["projects_read"]
+ if p not in session["project_id"] and p != "ANY"
+ ),
+ None,
+ )
# check if there are projects referencing it (apart from ANY, that means, public)....
if other_projects_referencing:
# remove references but not delete
- update_dict_pull = {"_admin.projects_read": session["project_id"],
- "_admin.projects_write": session["project_id"]}
- self.db.set_one(self.topic, filter_q, update_dict=None, pull_list=update_dict_pull)
+ update_dict_pull = {
+ "_admin.projects_read": session["project_id"],
+ "_admin.projects_write": session["project_id"],
+ }
+ self.db.set_one(
+ self.topic, filter_q, update_dict=None, pull_list=update_dict_pull
+ )
return None
else:
- can_write = next((p for p in item_content["_admin"]["projects_write"] if p == "ANY" or
- p in session["project_id"]), None)
+ can_write = next(
+ (
+ p
+ for p in item_content["_admin"]["projects_write"]
+ if p == "ANY" or p in session["project_id"]
+ ),
+ None,
+ )
if not can_write:
- raise EngineException("You have not write permission to delete it",
- http_code=HTTPStatus.UNAUTHORIZED)
+ raise EngineException(
+ "You have not write permission to delete it",
+ http_code=HTTPStatus.UNAUTHORIZED,
+ )
# delete
self.db.del_one(self.topic, filter_q)
self._update_input_with_kwargs(indata, kwargs)
try:
if indata and session.get("set_project"):
- raise EngineException("Cannot edit content and set to project (query string SET_PROJECT) at same time",
- HTTPStatus.UNPROCESSABLE_ENTITY)
+ raise EngineException(
+ "Cannot edit content and set to project (query string SET_PROJECT) at same time",
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ )
# TODO self._check_edition(session, indata, _id, force)
if not content:
content = self.show(session, _id)
import json
import importlib
import copy
+
# import logging
from hashlib import md5
from osm_common.dbbase import DbException, deep_update_rfc7396
from time import time
from uuid import uuid4
from re import fullmatch
-from osm_nbi.validation import ValidationError, pdu_new_schema, pdu_edit_schema, \
- validate_input, vnfpkgop_new_schema
+from osm_nbi.validation import (
+ ValidationError,
+ pdu_new_schema,
+ pdu_edit_schema,
+ validate_input,
+ vnfpkgop_new_schema,
+)
from osm_nbi.base_topic import BaseTopic, EngineException, get_iterable
+
etsi_nfv_vnfd = importlib.import_module("osm_im.etsi-nfv-vnfd")
etsi_nfv_nsd = importlib.import_module("osm_im.etsi-nfv-nsd")
from osm_im.nst import nst as nst_im
class DescriptorTopic(BaseTopic):
-
def __init__(self, db, fs, msg, auth):
BaseTopic.__init__(self, db, fs, msg, auth)
def check_conflict_on_edit(self, session, final_content, edit_content, _id):
- final_content = super().check_conflict_on_edit(session, final_content, edit_content, _id)
+ final_content = super().check_conflict_on_edit(
+ session, final_content, edit_content, _id
+ )
def _check_unique_id_name(descriptor, position=""):
for desc_key, desc_item in descriptor.items():
desc_item_id = None
for index, list_item in enumerate(desc_item):
if isinstance(list_item, dict):
- _check_unique_id_name(list_item, "{}.{}[{}]"
- .format(position, desc_key, index))
+ _check_unique_id_name(
+ list_item, "{}.{}[{}]".format(position, desc_key, index)
+ )
# Base case
- if index == 0 and (list_item.get("id") or list_item.get("name")):
+ if index == 0 and (
+ list_item.get("id") or list_item.get("name")
+ ):
desc_item_id = "id" if list_item.get("id") else "name"
if desc_item_id and list_item.get(desc_item_id):
if list_item[desc_item_id] in used_ids:
- position = "{}.{}[{}]".format(position, desc_key, index)
- raise EngineException("Error: identifier {} '{}' is not unique and repeats at '{}'"
- .format(desc_item_id, list_item[desc_item_id],
- position), HTTPStatus.UNPROCESSABLE_ENTITY)
+ position = "{}.{}[{}]".format(
+ position, desc_key, index
+ )
+ raise EngineException(
+ "Error: identifier {} '{}' is not unique and repeats at '{}'".format(
+ desc_item_id,
+ list_item[desc_item_id],
+ position,
+ ),
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ )
used_ids.append(list_item[desc_item_id])
_check_unique_id_name(final_content)
if k in final_content:
internal_keys[k] = final_content.pop(k)
storage_params = internal_keys["_admin"].get("storage")
- serialized = self._validate_input_new(final_content, storage_params, session["force"])
+ serialized = self._validate_input_new(
+ final_content, storage_params, session["force"]
+ )
# 1.2. modify final_content with a serialized version
final_content = copy.deepcopy(serialized)
_filter["_id.neq"] = _id
if self.db.get_one(self.topic, _filter, fail_on_empty=False):
- raise EngineException("{} with id '{}' already exists for this project".format(self.topic[:-1],
- final_content["id"]),
- HTTPStatus.CONFLICT)
+ raise EngineException(
+ "{} with id '{}' already exists for this project".format(
+ self.topic[:-1], final_content["id"]
+ ),
+ HTTPStatus.CONFLICT,
+ )
return final_content
if len(desc_list) == 1:
return desc_list[0]
elif len(desc_list) > 1:
- raise DbException("Found more than one {} with id='{}' belonging to this project".format(topic[:-1], id),
- HTTPStatus.CONFLICT)
+ raise DbException(
+ "Found more than one {} with id='{}' belonging to this project".format(
+ topic[:-1], id
+ ),
+ HTTPStatus.CONFLICT,
+ )
# not found any: try to find public
_filter = BaseTopic._get_project_filter(session)
_filter["id"] = id
desc_list = db.get_list(topic, _filter)
if not desc_list:
- raise DbException("Not found any {} with id='{}'".format(topic[:-1], id), HTTPStatus.NOT_FOUND)
+ raise DbException(
+ "Not found any {} with id='{}'".format(topic[:-1], id),
+ HTTPStatus.NOT_FOUND,
+ )
elif len(desc_list) == 1:
return desc_list[0]
else:
- raise DbException("Found more than one public {} with id='{}'; and no one belonging to this project".format(
- topic[:-1], id), HTTPStatus.CONFLICT)
+ raise DbException(
+ "Found more than one public {} with id='{}'; and no one belonging to this project".format(
+ topic[:-1], id
+ ),
+ HTTPStatus.CONFLICT,
+ )
def new(self, rollback, session, indata=None, kwargs=None, headers=None):
"""
# _remove_envelop
if indata:
if "userDefinedData" in indata:
- indata = indata['userDefinedData']
+ indata = indata["userDefinedData"]
# Override descriptor with query string kwargs
self._update_input_with_kwargs(indata, kwargs)
# indata = DescriptorTopic._validate_input_new(self, indata, project_id=session["force"])
content = {"_admin": {"userDefinedData": indata}}
- self.format_on_new(content, session["project_id"], make_public=session["public"])
+ self.format_on_new(
+ content, session["project_id"], make_public=session["public"]
+ )
_id = self.db.create(self.topic, content)
rollback.append({"topic": self.topic, "_id": _id})
self._send_msg("created", {"_id": _id})
expected_md5 = headers.get("Content-File-MD5")
compressed = None
content_type = headers.get("Content-Type")
- if content_type and "application/gzip" in content_type or "application/x-gzip" in content_type or \
- "application/zip" in content_type:
+ if (
+ content_type
+ and "application/gzip" in content_type
+ or "application/x-gzip" in content_type
+ or "application/zip" in content_type
+ ):
compressed = "gzip"
filename = headers.get("Content-Filename")
if not filename:
error_text = ""
try:
if content_range_text:
- content_range = content_range_text.replace("-", " ").replace("/", " ").split()
- if content_range[0] != "bytes": # TODO check x<y not negative < total....
+ content_range = (
+ content_range_text.replace("-", " ").replace("/", " ").split()
+ )
+ if (
+ content_range[0] != "bytes"
+ ): # TODO check x<y not negative < total....
raise IndexError()
start = int(content_range[1])
end = int(content_range[2]) + 1
total = int(content_range[3])
else:
start = 0
- temp_folder = _id + "_" # all the content is upload here and if ok, it is rename from id_ to is folder
+ temp_folder = (
+ _id + "_"
+ ) # all the content is upload here and if ok, it is rename from id_ to is folder
if start:
- if not self.fs.file_exists(temp_folder, 'dir'):
- raise EngineException("invalid Transaction-Id header", HTTPStatus.NOT_FOUND)
+ if not self.fs.file_exists(temp_folder, "dir"):
+ raise EngineException(
+ "invalid Transaction-Id header", HTTPStatus.NOT_FOUND
+ )
else:
self.fs.file_delete(temp_folder, ignore_non_exist=True)
self.fs.mkdir(temp_folder)
storage["folder"] = _id
file_path = (temp_folder, filename)
- if self.fs.file_exists(file_path, 'file'):
+ if self.fs.file_exists(file_path, "file"):
file_size = self.fs.file_size(file_path)
else:
file_size = 0
if file_size != start:
- raise EngineException("invalid Content-Range start sequence, expected '{}' but received '{}'".format(
- file_size, start), HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE)
- file_pkg = self.fs.file_open(file_path, 'a+b')
+ raise EngineException(
+ "invalid Content-Range start sequence, expected '{}' but received '{}'".format(
+ file_size, start
+ ),
+ HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
+ )
+ file_pkg = self.fs.file_open(file_path, "a+b")
if isinstance(indata, dict):
indata_text = yaml.safe_dump(indata, indent=4, default_flow_style=False)
file_pkg.write(indata_text.encode(encoding="utf-8"))
file_pkg.write(indata_text)
if content_range_text:
if indata_len != end - start:
- raise EngineException("Mismatch between Content-Range header {}-{} and body length of {}".format(
- start, end - 1, indata_len), HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE)
+ raise EngineException(
+ "Mismatch between Content-Range header {}-{} and body length of {}".format(
+ start, end - 1, indata_len
+ ),
+ HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
+ )
if end != total:
# TODO update to UPLOADING
return False
raise EngineException("Error, MD5 mismatch", HTTPStatus.CONFLICT)
file_pkg.seek(0, 0)
if compressed == "gzip":
- tar = tarfile.open(mode='r', fileobj=file_pkg)
+ tar = tarfile.open(mode="r", fileobj=file_pkg)
descriptor_file_name = None
for tarinfo in tar:
tarname = tarinfo.name
tarname_path = tarname.split("/")
- if not tarname_path[0] or ".." in tarname_path: # if start with "/" means absolute path
- raise EngineException("Absolute path or '..' are not allowed for package descriptor tar.gz")
+ if (
+ not tarname_path[0] or ".." in tarname_path
+ ): # if start with "/" means absolute path
+ raise EngineException(
+ "Absolute path or '..' are not allowed for package descriptor tar.gz"
+ )
if len(tarname_path) == 1 and not tarinfo.isdir():
- raise EngineException("All files must be inside a dir for package descriptor tar.gz")
- if tarname.endswith(".yaml") or tarname.endswith(".json") or tarname.endswith(".yml"):
+ raise EngineException(
+ "All files must be inside a dir for package descriptor tar.gz"
+ )
+ if (
+ tarname.endswith(".yaml")
+ or tarname.endswith(".json")
+ or tarname.endswith(".yml")
+ ):
storage["pkg-dir"] = tarname_path[0]
if len(tarname_path) == 2:
if descriptor_file_name:
raise EngineException(
- "Found more than one descriptor file at package descriptor tar.gz")
+ "Found more than one descriptor file at package descriptor tar.gz"
+ )
descriptor_file_name = tarname
if not descriptor_file_name:
- raise EngineException("Not found any descriptor file at package descriptor tar.gz")
+ raise EngineException(
+ "Not found any descriptor file at package descriptor tar.gz"
+ )
storage["descriptor"] = descriptor_file_name
storage["zipfile"] = filename
self.fs.file_extract(tar, temp_folder)
- with self.fs.file_open((temp_folder, descriptor_file_name), "r") as descriptor_file:
+ with self.fs.file_open(
+ (temp_folder, descriptor_file_name), "r"
+ ) as descriptor_file:
content = descriptor_file.read()
else:
content = file_pkg.read()
self._update_input_with_kwargs(indata, kwargs)
deep_update_rfc7396(current_desc, indata)
- current_desc = self.check_conflict_on_edit(session, current_desc, indata, _id=_id)
+ current_desc = self.check_conflict_on_edit(
+ session, current_desc, indata, _id=_id
+ )
current_desc["_admin"]["modified"] = time()
self.db.replace(self.topic, _id, current_desc)
self.fs.dir_rename(temp_folder, _id)
except EngineException:
raise
except IndexError:
- raise EngineException("invalid Content-Range header format. Expected 'bytes start-end/total'",
- HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE)
+ raise EngineException(
+ "invalid Content-Range header format. Expected 'bytes start-end/total'",
+ HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
+ )
except IOError as e:
- raise EngineException("invalid upload transaction sequence: '{}'".format(e), HTTPStatus.BAD_REQUEST)
+ raise EngineException(
+ "invalid upload transaction sequence: '{}'".format(e),
+ HTTPStatus.BAD_REQUEST,
+ )
except tarfile.ReadError as e:
- raise EngineException("invalid file content {}".format(e), HTTPStatus.BAD_REQUEST)
+ raise EngineException(
+ "invalid file content {}".format(e), HTTPStatus.BAD_REQUEST
+ )
except (ValueError, yaml.YAMLError) as e:
raise EngineException(error_text + str(e))
except ValidationError as e:
"""
accept_text = accept_zip = False
if accept_header:
- if 'text/plain' in accept_header or '*/*' in accept_header:
+ if "text/plain" in accept_header or "*/*" in accept_header:
accept_text = True
- if 'application/zip' in accept_header or '*/*' in accept_header:
- accept_zip = 'application/zip'
- elif 'application/gzip' in accept_header:
- accept_zip = 'application/gzip'
+ if "application/zip" in accept_header or "*/*" in accept_header:
+ accept_zip = "application/zip"
+ elif "application/gzip" in accept_header:
+ accept_zip = "application/gzip"
if not accept_text and not accept_zip:
- raise EngineException("provide request header 'Accept' with 'application/zip' or 'text/plain'",
- http_code=HTTPStatus.NOT_ACCEPTABLE)
+ raise EngineException(
+ "provide request header 'Accept' with 'application/zip' or 'text/plain'",
+ http_code=HTTPStatus.NOT_ACCEPTABLE,
+ )
content = self.show(session, _id)
if content["_admin"]["onboardingState"] != "ONBOARDED":
- raise EngineException("Cannot get content because this resource is not at 'ONBOARDED' state. "
- "onboardingState is {}".format(content["_admin"]["onboardingState"]),
- http_code=HTTPStatus.CONFLICT)
+ raise EngineException(
+ "Cannot get content because this resource is not at 'ONBOARDED' state. "
+ "onboardingState is {}".format(content["_admin"]["onboardingState"]),
+ http_code=HTTPStatus.CONFLICT,
+ )
storage = content["_admin"]["storage"]
if path is not None and path != "$DESCRIPTOR": # artifacts
- if not storage.get('pkg-dir'):
- raise EngineException("Packages does not contains artifacts", http_code=HTTPStatus.BAD_REQUEST)
- if self.fs.file_exists((storage['folder'], storage['pkg-dir'], *path), 'dir'):
- folder_content = self.fs.dir_ls((storage['folder'], storage['pkg-dir'], *path))
+ if not storage.get("pkg-dir"):
+ raise EngineException(
+ "Packages does not contains artifacts",
+ http_code=HTTPStatus.BAD_REQUEST,
+ )
+ if self.fs.file_exists(
+ (storage["folder"], storage["pkg-dir"], *path), "dir"
+ ):
+ folder_content = self.fs.dir_ls(
+ (storage["folder"], storage["pkg-dir"], *path)
+ )
return folder_content, "text/plain"
# TODO manage folders in http
else:
- return self.fs.file_open((storage['folder'], storage['pkg-dir'], *path), "rb"), \
- "application/octet-stream"
+ return (
+ self.fs.file_open(
+ (storage["folder"], storage["pkg-dir"], *path), "rb"
+ ),
+ "application/octet-stream",
+ )
# pkgtype accept ZIP TEXT -> result
# manyfiles yes X -> zip
# onefile yes no -> zip
# X yes -> text
contain_many_files = False
- if storage.get('pkg-dir'):
+ if storage.get("pkg-dir"):
# check if there are more than one file in the package, ignoring checksums.txt.
- pkg_files = self.fs.dir_ls((storage['folder'], storage['pkg-dir']))
- if len(pkg_files) >= 3 or (len(pkg_files) == 2 and 'checksums.txt' not in pkg_files):
+ pkg_files = self.fs.dir_ls((storage["folder"], storage["pkg-dir"]))
+ if len(pkg_files) >= 3 or (
+ len(pkg_files) == 2 and "checksums.txt" not in pkg_files
+ ):
contain_many_files = True
if accept_text and (not contain_many_files or path == "$DESCRIPTOR"):
- return self.fs.file_open((storage['folder'], storage['descriptor']), "r"), "text/plain"
+ return (
+ self.fs.file_open((storage["folder"], storage["descriptor"]), "r"),
+ "text/plain",
+ )
elif contain_many_files and not accept_zip:
- raise EngineException("Packages that contains several files need to be retrieved with 'application/zip'"
- "Accept header", http_code=HTTPStatus.NOT_ACCEPTABLE)
+ raise EngineException(
+ "Packages that contains several files need to be retrieved with 'application/zip'"
+ "Accept header",
+ http_code=HTTPStatus.NOT_ACCEPTABLE,
+ )
else:
- if not storage.get('zipfile'):
+ if not storage.get("zipfile"):
# TODO generate zipfile if not present
- raise EngineException("Only allowed 'text/plain' Accept header for this descriptor. To be solved in "
- "future versions", http_code=HTTPStatus.NOT_ACCEPTABLE)
- return self.fs.file_open((storage['folder'], storage['zipfile']), "rb"), accept_zip
+ raise EngineException(
+ "Only allowed 'text/plain' Accept header for this descriptor. To be solved in "
+ "future versions",
+ http_code=HTTPStatus.NOT_ACCEPTABLE,
+ )
+ return (
+ self.fs.file_open((storage["folder"], storage["zipfile"]), "rb"),
+ accept_zip,
+ )
def _remove_yang_prefixes_from_descriptor(self, descriptor):
new_descriptor = {}
new_v.append(self._remove_yang_prefixes_from_descriptor(x))
else:
new_v.append(x)
- new_descriptor[k.split(':')[-1]] = new_v
+ new_descriptor[k.split(":")[-1]] = new_v
return new_descriptor
def pyangbind_validation(self, item, data, force=False):
- raise EngineException("Not possible to validate '{}' item".format(item),
- http_code=HTTPStatus.INTERNAL_SERVER_ERROR)
+ raise EngineException(
+ "Not possible to validate '{}' item".format(item),
+ http_code=HTTPStatus.INTERNAL_SERVER_ERROR,
+ )
def _validate_input_edit(self, indata, content, force=False):
# not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
if indata["operationalState"] in ("ENABLED", "DISABLED"):
indata["_admin"]["operationalState"] = indata.pop("operationalState")
else:
- raise EngineException("State '{}' is not a valid operational state"
- .format(indata["operationalState"]),
- http_code=HTTPStatus.BAD_REQUEST)
-
- # In the case of user defined data, we need to put the data in the root of the object
+ raise EngineException(
+ "State '{}' is not a valid operational state".format(
+ indata["operationalState"]
+ ),
+ http_code=HTTPStatus.BAD_REQUEST,
+ )
+
+ # In the case of user defined data, we need to put the data in the root of the object
# to preserve current expected behaviour
if "userDefinedData" in indata:
data = indata.pop("userDefinedData")
if type(data) == dict:
indata["_admin"]["userDefinedData"] = data
else:
- raise EngineException("userDefinedData should be an object, but is '{}' instead"
- .format(type(data)),
- http_code=HTTPStatus.BAD_REQUEST)
-
- if ("operationalState" in indata["_admin"] and
- content["_admin"]["operationalState"] == indata["_admin"]["operationalState"]):
- raise EngineException("operationalState already {}".format(content["_admin"]["operationalState"]),
- http_code=HTTPStatus.CONFLICT)
+ raise EngineException(
+ "userDefinedData should be an object, but is '{}' instead".format(
+ type(data)
+ ),
+ http_code=HTTPStatus.BAD_REQUEST,
+ )
+
+ if (
+ "operationalState" in indata["_admin"]
+ and content["_admin"]["operationalState"]
+ == indata["_admin"]["operationalState"]
+ ):
+ raise EngineException(
+ "operationalState already {}".format(
+ content["_admin"]["operationalState"]
+ ),
+ http_code=HTTPStatus.CONFLICT,
+ )
return indata
def pyangbind_validation(self, item, data, force=False):
if self._descriptor_data_is_in_old_format(data):
- raise EngineException("ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
- http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+ raise EngineException(
+ "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
+ http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+ )
try:
myvnfd = etsi_nfv_vnfd.etsi_nfv_vnfd()
- pybindJSONDecoder.load_ietf_json({'etsi-nfv-vnfd:vnfd': data}, None, None, obj=myvnfd,
- path_helper=True, skip_unknown=force)
+ pybindJSONDecoder.load_ietf_json(
+ {"etsi-nfv-vnfd:vnfd": data},
+ None,
+ None,
+ obj=myvnfd,
+ path_helper=True,
+ skip_unknown=force,
+ )
out = pybindJSON.dumps(myvnfd, mode="ietf")
desc_out = self._remove_envelop(yaml.safe_load(out))
desc_out = self._remove_yang_prefixes_from_descriptor(desc_out)
return utils.deep_update_dict(data, desc_out)
except Exception as e:
- raise EngineException("Error in pyangbind validation: {}".format(str(e)),
- http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+ raise EngineException(
+ "Error in pyangbind validation: {}".format(str(e)),
+ http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+ )
@staticmethod
def _descriptor_data_is_in_old_format(data):
- return ('vnfd-catalog' in data) or ('vnfd:vnfd-catalog' in data)
+ return ("vnfd-catalog" in data) or ("vnfd:vnfd-catalog" in data)
@staticmethod
def _remove_envelop(indata=None):
return {}
clean_indata = indata
- if clean_indata.get('etsi-nfv-vnfd:vnfd'):
- if not isinstance(clean_indata['etsi-nfv-vnfd:vnfd'], dict):
+ if clean_indata.get("etsi-nfv-vnfd:vnfd"):
+ if not isinstance(clean_indata["etsi-nfv-vnfd:vnfd"], dict):
raise EngineException("'etsi-nfv-vnfd:vnfd' must be a dict")
- clean_indata = clean_indata['etsi-nfv-vnfd:vnfd']
- elif clean_indata.get('vnfd'):
- if not isinstance(clean_indata['vnfd'], dict):
+ clean_indata = clean_indata["etsi-nfv-vnfd:vnfd"]
+ elif clean_indata.get("vnfd"):
+ if not isinstance(clean_indata["vnfd"], dict):
raise EngineException("'vnfd' must be dict")
- clean_indata = clean_indata['vnfd']
+ clean_indata = clean_indata["vnfd"]
return clean_indata
def check_conflict_on_edit(self, session, final_content, edit_content, _id):
- final_content = super().check_conflict_on_edit(session, final_content, edit_content, _id)
+ final_content = super().check_conflict_on_edit(
+ session, final_content, edit_content, _id
+ )
# set type of vnfd
contains_pdu = False
# check vnfrs using this vnfd
_filter["vnfd-id"] = _id
if self.db.get_list("vnfrs", _filter):
- raise EngineException("There is at least one VNF instance using this descriptor",
- http_code=HTTPStatus.CONFLICT)
+ raise EngineException(
+ "There is at least one VNF instance using this descriptor",
+ http_code=HTTPStatus.CONFLICT,
+ )
# check NSD referencing this VNFD
del _filter["vnfd-id"]
_filter["vnfd-id"] = descriptor_id
if self.db.get_list("nsds", _filter):
- raise EngineException("There is at least one NS package referencing this descriptor",
- http_code=HTTPStatus.CONFLICT)
+ raise EngineException(
+ "There is at least one NS package referencing this descriptor",
+ http_code=HTTPStatus.CONFLICT,
+ )
def _validate_input_new(self, indata, storage_params, force=False):
indata.pop("onboardingState", None)
if not indata.get("vdu"):
return
if not indata.get("mgmt-cp"):
- raise EngineException("'mgmt-cp' is a mandatory field and it is not defined",
- http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+ raise EngineException(
+ "'mgmt-cp' is a mandatory field and it is not defined",
+ http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+ )
for cp in get_iterable(indata.get("ext-cpd")):
if cp["id"] == indata["mgmt-cp"]:
break
else:
- raise EngineException("mgmt-cp='{}' must match an existing ext-cpd".format(indata["mgmt-cp"]),
- http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+ raise EngineException(
+ "mgmt-cp='{}' must match an existing ext-cpd".format(indata["mgmt-cp"]),
+ http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+ )
@staticmethod
def validate_vdu_internal_connection_points(vdu):
for cpd in get_iterable(vdu.get("int-cpd")):
cpd_id = cpd.get("id")
if cpd_id and cpd_id in int_cpds:
- raise EngineException("vdu[id='{}']:int-cpd[id='{}'] is already used by other int-cpd"
- .format(vdu["id"], cpd_id),
- http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+ raise EngineException(
+ "vdu[id='{}']:int-cpd[id='{}'] is already used by other int-cpd".format(
+ vdu["id"], cpd_id
+ ),
+ http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+ )
int_cpds.add(cpd_id)
@staticmethod
for cpd in get_iterable(indata.get("ext-cpd")):
cpd_id = cpd.get("id")
if cpd_id and cpd_id in ext_cpds:
- raise EngineException("ext-cpd[id='{}'] is already used by other ext-cpd".format(cpd_id),
- http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+ raise EngineException(
+ "ext-cpd[id='{}'] is already used by other ext-cpd".format(cpd_id),
+ http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+ )
ext_cpds.add(cpd_id)
int_cpd = cpd.get("int-cpd")
if int_cpd:
if (int_cpd.get("vdu-id"), int_cpd.get("cpd")) not in all_vdus_int_cpds:
- raise EngineException("ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd".format(cpd_id),
- http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+ raise EngineException(
+ "ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd".format(
+ cpd_id
+ ),
+ http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+ )
# TODO: Validate k8s-cluster-net points to a valid k8s-cluster:nets ?
def _validate_vdu_charms_in_package(self, storage_params, indata):
for df in indata["df"]:
- if "lcm-operations-configuration" in df and "operate-vnf-op-config" in df["lcm-operations-configuration"]:
- configs = df["lcm-operations-configuration"]["operate-vnf-op-config"].get("day1-2", [])
+ if (
+ "lcm-operations-configuration" in df
+ and "operate-vnf-op-config" in df["lcm-operations-configuration"]
+ ):
+ configs = df["lcm-operations-configuration"][
+ "operate-vnf-op-config"
+ ].get("day1-2", [])
vdus = df.get("vdu-profile", [])
for vdu in vdus:
for config in configs:
if config["id"] == vdu["id"] and utils.find_in_list(
config.get("execution-environment-list", []),
- lambda ee: "juju" in ee
+ lambda ee: "juju" in ee,
):
- if not self._validate_package_folders(storage_params, 'charms'):
- raise EngineException("Charm defined in vnf[id={}] but not present in "
- "package".format(indata["id"]))
+ if not self._validate_package_folders(
+ storage_params, "charms"
+ ):
+ raise EngineException(
+ "Charm defined in vnf[id={}] but not present in "
+ "package".format(indata["id"])
+ )
def _validate_vdu_cloud_init_in_package(self, storage_params, vdu, indata):
if not vdu.get("cloud-init-file"):
return
- if not self._validate_package_folders(storage_params, 'cloud_init', vdu["cloud-init-file"]):
- raise EngineException("Cloud-init defined in vnf[id={}]:vdu[id={}] but not present in "
- "package".format(indata["id"], vdu["id"]))
+ if not self._validate_package_folders(
+ storage_params, "cloud_init", vdu["cloud-init-file"]
+ ):
+ raise EngineException(
+ "Cloud-init defined in vnf[id={}]:vdu[id={}] but not present in "
+ "package".format(indata["id"], vdu["id"])
+ )
def _validate_vnf_charms_in_package(self, storage_params, indata):
# Get VNF configuration through new container
- for deployment_flavor in indata.get('df', []):
+ for deployment_flavor in indata.get("df", []):
if "lcm-operations-configuration" not in deployment_flavor:
return
- if "operate-vnf-op-config" not in deployment_flavor["lcm-operations-configuration"]:
+ if (
+ "operate-vnf-op-config"
+ not in deployment_flavor["lcm-operations-configuration"]
+ ):
return
- for day_1_2_config in deployment_flavor["lcm-operations-configuration"]["operate-vnf-op-config"]["day1-2"]:
+ for day_1_2_config in deployment_flavor["lcm-operations-configuration"][
+ "operate-vnf-op-config"
+ ]["day1-2"]:
if day_1_2_config["id"] == indata["id"]:
if utils.find_in_list(
day_1_2_config.get("execution-environment-list", []),
- lambda ee: "juju" in ee
+ lambda ee: "juju" in ee,
):
- if not self._validate_package_folders(storage_params, 'charms'):
- raise EngineException("Charm defined in vnf[id={}] but not present in "
- "package".format(indata["id"]))
+ if not self._validate_package_folders(storage_params, "charms"):
+ raise EngineException(
+ "Charm defined in vnf[id={}] but not present in "
+ "package".format(indata["id"])
+ )
def _validate_package_folders(self, storage_params, folder, file=None):
if not storage_params or not storage_params.get("pkg-dir"):
return False
else:
- if self.fs.file_exists("{}_".format(storage_params["folder"]), 'dir'):
- f = "{}_/{}/{}".format(storage_params["folder"], storage_params["pkg-dir"], folder)
+ if self.fs.file_exists("{}_".format(storage_params["folder"]), "dir"):
+ f = "{}_/{}/{}".format(
+ storage_params["folder"], storage_params["pkg-dir"], folder
+ )
else:
- f = "{}/{}/{}".format(storage_params["folder"], storage_params["pkg-dir"], folder)
+ f = "{}/{}/{}".format(
+ storage_params["folder"], storage_params["pkg-dir"], folder
+ )
if file:
- return self.fs.file_exists("{}/{}".format(f, file), 'file')
+ return self.fs.file_exists("{}/{}".format(f, file), "file")
else:
- if self.fs.file_exists(f, 'dir'):
+ if self.fs.file_exists(f, "dir"):
if self.fs.dir_ls(f):
return True
return False
for ivld in get_iterable(indata.get("int-virtual-link-desc")):
ivld_id = ivld.get("id")
if ivld_id and ivld_id in all_ivld_ids:
- raise EngineException("Duplicated VLD id in int-virtual-link-desc[id={}]".format(ivld_id),
- http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+ raise EngineException(
+ "Duplicated VLD id in int-virtual-link-desc[id={}]".format(ivld_id),
+ http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+ )
else:
all_ivld_ids.add(ivld_id)
if int_cpd_ivld_id and int_cpd_ivld_id not in all_ivld_ids:
raise EngineException(
"vdu[id='{}']:int-cpd[id='{}']:int-virtual-link-desc='{}' must match an existing "
- "int-virtual-link-desc".format(vdu["id"], int_cpd["id"], int_cpd_ivld_id),
- http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+ "int-virtual-link-desc".format(
+ vdu["id"], int_cpd["id"], int_cpd_ivld_id
+ ),
+ http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+ )
for df in get_iterable(indata.get("df")):
for vlp in get_iterable(df.get("virtual-link-profile")):
vlp_ivld_id = vlp.get("id")
if vlp_ivld_id and vlp_ivld_id not in all_ivld_ids:
- raise EngineException("df[id='{}']:virtual-link-profile='{}' must match an existing "
- "int-virtual-link-desc".format(df["id"], vlp_ivld_id),
- http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+ raise EngineException(
+ "df[id='{}']:virtual-link-profile='{}' must match an existing "
+ "int-virtual-link-desc".format(df["id"], vlp_ivld_id),
+ http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+ )
@staticmethod
def validate_monitoring_params(indata):
for mp in get_iterable(ivld.get("monitoring-parameters")):
mp_id = mp.get("id")
if mp_id and mp_id in all_monitoring_params:
- raise EngineException("Duplicated monitoring-parameter id in "
- "int-virtual-link-desc[id='{}']:monitoring-parameters[id='{}']"
- .format(ivld["id"], mp_id),
- http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+ raise EngineException(
+ "Duplicated monitoring-parameter id in "
+ "int-virtual-link-desc[id='{}']:monitoring-parameters[id='{}']".format(
+ ivld["id"], mp_id
+ ),
+ http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+ )
else:
all_monitoring_params.add(mp_id)
for mp in get_iterable(vdu.get("monitoring-parameter")):
mp_id = mp.get("id")
if mp_id and mp_id in all_monitoring_params:
- raise EngineException("Duplicated monitoring-parameter id in "
- "vdu[id='{}']:monitoring-parameter[id='{}']"
- .format(vdu["id"], mp_id),
- http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+ raise EngineException(
+ "Duplicated monitoring-parameter id in "
+ "vdu[id='{}']:monitoring-parameter[id='{}']".format(
+ vdu["id"], mp_id
+ ),
+ http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+ )
else:
all_monitoring_params.add(mp_id)
for mp in get_iterable(df.get("monitoring-parameter")):
mp_id = mp.get("id")
if mp_id and mp_id in all_monitoring_params:
- raise EngineException("Duplicated monitoring-parameter id in "
- "df[id='{}']:monitoring-parameter[id='{}']"
- .format(df["id"], mp_id),
- http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+ raise EngineException(
+ "Duplicated monitoring-parameter id in "
+ "df[id='{}']:monitoring-parameter[id='{}']".format(
+ df["id"], mp_id
+ ),
+ http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+ )
else:
all_monitoring_params.add(mp_id)
for sp in get_iterable(sa.get("scaling-policy")):
for sc in get_iterable(sp.get("scaling-criteria")):
sc_monitoring_param = sc.get("vnf-monitoring-param-ref")
- if sc_monitoring_param and sc_monitoring_param not in all_monitoring_params:
- raise EngineException("df[id='{}']:scaling-aspect[id='{}']:scaling-policy"
- "[name='{}']:scaling-criteria[name='{}']: "
- "vnf-monitoring-param-ref='{}' not defined in any monitoring-param"
- .format(df["id"], sa["id"], sp["name"], sc["name"],
- sc_monitoring_param),
- http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+ if (
+ sc_monitoring_param
+ and sc_monitoring_param not in all_monitoring_params
+ ):
+ raise EngineException(
+ "df[id='{}']:scaling-aspect[id='{}']:scaling-policy"
+ "[name='{}']:scaling-criteria[name='{}']: "
+ "vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format(
+ df["id"],
+ sa["id"],
+ sp["name"],
+ sc["name"],
+ sc_monitoring_param,
+ ),
+ http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+ )
for sca in get_iterable(sa.get("scaling-config-action")):
- if "lcm-operations-configuration" not in df \
- or "operate-vnf-op-config" not in df["lcm-operations-configuration"] \
+ if (
+ "lcm-operations-configuration" not in df
+ or "operate-vnf-op-config"
+ not in df["lcm-operations-configuration"]
or not utils.find_in_list(
- df["lcm-operations-configuration"]["operate-vnf-op-config"].get("day1-2", []),
- lambda config: config["id"] == indata["id"]):
- raise EngineException("'day1-2 configuration' not defined in the descriptor but it is "
- "referenced by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action"
- .format(df["id"], sa["id"]),
- http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+ df["lcm-operations-configuration"][
+ "operate-vnf-op-config"
+ ].get("day1-2", []),
+ lambda config: config["id"] == indata["id"],
+ )
+ ):
+ raise EngineException(
+ "'day1-2 configuration' not defined in the descriptor but it is "
+ "referenced by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action".format(
+ df["id"], sa["id"]
+ ),
+ http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+ )
for configuration in get_iterable(
- df["lcm-operations-configuration"]["operate-vnf-op-config"].get("day1-2", [])
+ df["lcm-operations-configuration"]["operate-vnf-op-config"].get(
+ "day1-2", []
+ )
):
- for primitive in get_iterable(configuration.get("config-primitive")):
- if primitive["name"] == sca["vnf-config-primitive-name-ref"]:
+ for primitive in get_iterable(
+ configuration.get("config-primitive")
+ ):
+ if (
+ primitive["name"]
+ == sca["vnf-config-primitive-name-ref"]
+ ):
break
else:
- raise EngineException("df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-"
- "config-primitive-name-ref='{}' does not match any "
- "day1-2 configuration:config-primitive:name"
- .format(df["id"], sa["id"], sca["vnf-config-primitive-name-ref"]),
- http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+ raise EngineException(
+ "df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-"
+ "config-primitive-name-ref='{}' does not match any "
+ "day1-2 configuration:config-primitive:name".format(
+ df["id"],
+ sa["id"],
+ sca["vnf-config-primitive-name-ref"],
+ ),
+ http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+ )
def delete_extra(self, session, _id, db_content, not_send_msg=None):
"""
links = {}
links["self"] = {"href": "/vnfpkgm/v1/vnf_packages/{}".format(data["_id"])}
links["vnfd"] = {"href": "/vnfpkgm/v1/vnf_packages/{}/vnfd".format(data["_id"])}
- links["packageContent"] = {"href": "/vnfpkgm/v1/vnf_packages/{}/package_content".format(data["_id"])}
+ links["packageContent"] = {
+ "href": "/vnfpkgm/v1/vnf_packages/{}/package_content".format(data["_id"])
+ }
data["_links"] = links
return super().sol005_projection(data)
def pyangbind_validation(self, item, data, force=False):
if self._descriptor_data_is_in_old_format(data):
- raise EngineException("ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
- http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+ raise EngineException(
+ "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
+ http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+ )
try:
- nsd_vnf_profiles = data.get('df', [{}])[0].get('vnf-profile', [])
+ nsd_vnf_profiles = data.get("df", [{}])[0].get("vnf-profile", [])
mynsd = etsi_nfv_nsd.etsi_nfv_nsd()
- pybindJSONDecoder.load_ietf_json({'nsd': {'nsd': [data]}}, None, None, obj=mynsd,
- path_helper=True, skip_unknown=force)
+ pybindJSONDecoder.load_ietf_json(
+ {"nsd": {"nsd": [data]}},
+ None,
+ None,
+ obj=mynsd,
+ path_helper=True,
+ skip_unknown=force,
+ )
out = pybindJSON.dumps(mynsd, mode="ietf")
desc_out = self._remove_envelop(yaml.safe_load(out))
desc_out = self._remove_yang_prefixes_from_descriptor(desc_out)
if nsd_vnf_profiles:
- desc_out['df'][0]['vnf-profile'] = nsd_vnf_profiles
+ desc_out["df"][0]["vnf-profile"] = nsd_vnf_profiles
return desc_out
except Exception as e:
- raise EngineException("Error in pyangbind validation: {}".format(str(e)),
- http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+ raise EngineException(
+ "Error in pyangbind validation: {}".format(str(e)),
+ http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+ )
@staticmethod
def _descriptor_data_is_in_old_format(data):
- return ('nsd-catalog' in data) or ('nsd:nsd-catalog' in data)
+ return ("nsd-catalog" in data) or ("nsd:nsd-catalog" in data)
@staticmethod
def _remove_envelop(indata=None):
return {}
clean_indata = indata
- if clean_indata.get('nsd'):
- clean_indata = clean_indata['nsd']
- elif clean_indata.get('etsi-nfv-nsd:nsd'):
- clean_indata = clean_indata['etsi-nfv-nsd:nsd']
- if clean_indata.get('nsd'):
- if not isinstance(clean_indata['nsd'], list) or len(clean_indata['nsd']) != 1:
+ if clean_indata.get("nsd"):
+ clean_indata = clean_indata["nsd"]
+ elif clean_indata.get("etsi-nfv-nsd:nsd"):
+ clean_indata = clean_indata["etsi-nfv-nsd:nsd"]
+ if clean_indata.get("nsd"):
+ if (
+ not isinstance(clean_indata["nsd"], list)
+ or len(clean_indata["nsd"]) != 1
+ ):
raise EngineException("'nsd' must be a list of only one element")
- clean_indata = clean_indata['nsd'][0]
+ clean_indata = clean_indata["nsd"][0]
return clean_indata
def _validate_input_new(self, indata, storage_params, force=False):
for vlp in get_iterable(df.get("virtual-link-profile")):
if vld_id and vld_id == vlp.get("virtual-link-desc-id"):
if vlp.get("virtual-link-protocol-data"):
- raise EngineException("Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-"
- "protocol-data You cannot set a virtual-link-protocol-data "
- "when mgmt-network is True"
- .format(df["id"], vlp["id"]), http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+ raise EngineException(
+ "Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-"
+ "protocol-data You cannot set a virtual-link-protocol-data "
+ "when mgmt-network is True".format(df["id"], vlp["id"]),
+ http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+ )
@staticmethod
def validate_vnf_profiles_vnfd_id(indata):
for vnf_profile in get_iterable(df.get("vnf-profile")):
vnfd_id = vnf_profile.get("vnfd-id")
if vnfd_id and vnfd_id not in all_vnfd_ids:
- raise EngineException("Error at df[id='{}']:vnf_profile[id='{}']:vnfd-id='{}' "
- "does not match any vnfd-id".format(df["id"], vnf_profile["id"], vnfd_id),
- http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+ raise EngineException(
+ "Error at df[id='{}']:vnf_profile[id='{}']:vnfd-id='{}' "
+ "does not match any vnfd-id".format(
+ df["id"], vnf_profile["id"], vnfd_id
+ ),
+ http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+ )
def _validate_input_edit(self, indata, content, force=False):
# not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
"""
indata looks as follows:
- - In the new case (conformant)
- {'nsdOperationalState': 'DISABLED', 'userDefinedData': {'id': 'string23',
+ - In the new case (conformant)
+ {'nsdOperationalState': 'DISABLED', 'userDefinedData': {'id': 'string23',
'_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}}
- In the old case (backwards-compatible)
{'id': 'string23', '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}
if indata["nsdOperationalState"] in ("ENABLED", "DISABLED"):
indata["_admin"]["operationalState"] = indata.pop("nsdOperationalState")
else:
- raise EngineException("State '{}' is not a valid operational state"
- .format(indata["nsdOperationalState"]),
- http_code=HTTPStatus.BAD_REQUEST)
-
- # In the case of user defined data, we need to put the data in the root of the object
+ raise EngineException(
+ "State '{}' is not a valid operational state".format(
+ indata["nsdOperationalState"]
+ ),
+ http_code=HTTPStatus.BAD_REQUEST,
+ )
+
+ # In the case of user defined data, we need to put the data in the root of the object
# to preserve current expected behaviour
if "userDefinedData" in indata:
data = indata.pop("userDefinedData")
if type(data) == dict:
indata["_admin"]["userDefinedData"] = data
else:
- raise EngineException("userDefinedData should be an object, but is '{}' instead"
- .format(type(data)),
- http_code=HTTPStatus.BAD_REQUEST)
- if ("operationalState" in indata["_admin"] and
- content["_admin"]["operationalState"] == indata["_admin"]["operationalState"]):
- raise EngineException("nsdOperationalState already {}".format(content["_admin"]["operationalState"]),
- http_code=HTTPStatus.CONFLICT)
+ raise EngineException(
+ "userDefinedData should be an object, but is '{}' instead".format(
+ type(data)
+ ),
+ http_code=HTTPStatus.BAD_REQUEST,
+ )
+ if (
+ "operationalState" in indata["_admin"]
+ and content["_admin"]["operationalState"]
+ == indata["_admin"]["operationalState"]
+ ):
+ raise EngineException(
+ "nsdOperationalState already {}".format(
+ content["_admin"]["operationalState"]
+ ),
+ http_code=HTTPStatus.CONFLICT,
+ )
return indata
def _check_descriptor_dependencies(self, session, descriptor):
query_filter["id"] = vnfd_id
vnf_list = self.db.get_list("vnfds", query_filter)
if not vnf_list:
- raise EngineException("Descriptor error at 'vnfd-id'='{}' references a non "
- "existing vnfd".format(vnfd_id), http_code=HTTPStatus.CONFLICT)
+ raise EngineException(
+ "Descriptor error at 'vnfd-id'='{}' references a non "
+ "existing vnfd".format(vnfd_id),
+ http_code=HTTPStatus.CONFLICT,
+ )
vnfds_index[vnfd_id] = vnf_list[0]
return vnfds_index
vnfd = vnfds_index.get(vnf_profile["vnfd-id"])
all_vnfd_ext_cpds = set()
for ext_cpd in get_iterable(vnfd.get("ext-cpd")):
- if ext_cpd.get('id'):
- all_vnfd_ext_cpds.add(ext_cpd.get('id'))
+ if ext_cpd.get("id"):
+ all_vnfd_ext_cpds.add(ext_cpd.get("id"))
- for virtual_link in get_iterable(vnf_profile.get("virtual-link-connectivity")):
+ for virtual_link in get_iterable(
+ vnf_profile.get("virtual-link-connectivity")
+ ):
for vl_cpd in get_iterable(virtual_link.get("constituent-cpd-id")):
- vl_cpd_id = vl_cpd.get('constituent-cpd-id')
+ vl_cpd_id = vl_cpd.get("constituent-cpd-id")
if vl_cpd_id and vl_cpd_id not in all_vnfd_ext_cpds:
- raise EngineException("Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity"
- "[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a "
- "non existing ext-cpd:id inside vnfd '{}'"
- .format(df["id"], vnf_profile["id"],
- virtual_link["virtual-link-profile-id"], vl_cpd_id, vnfd["id"]),
- http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+ raise EngineException(
+ "Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity"
+ "[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a "
+ "non existing ext-cpd:id inside vnfd '{}'".format(
+ df["id"],
+ vnf_profile["id"],
+ virtual_link["virtual-link-profile-id"],
+ vl_cpd_id,
+ vnfd["id"],
+ ),
+ http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+ )
def check_conflict_on_edit(self, session, final_content, edit_content, _id):
- final_content = super().check_conflict_on_edit(session, final_content, edit_content, _id)
+ final_content = super().check_conflict_on_edit(
+ session, final_content, edit_content, _id
+ )
self._check_descriptor_dependencies(session, final_content)
_filter = self._get_project_filter(session)
_filter["nsd-id"] = _id
if self.db.get_list("nsrs", _filter):
- raise EngineException("There is at least one NS instance using this descriptor",
- http_code=HTTPStatus.CONFLICT)
+ raise EngineException(
+ "There is at least one NS instance using this descriptor",
+ http_code=HTTPStatus.CONFLICT,
+ )
# check NSD referenced by NST
del _filter["nsd-id"]
_filter["netslice-subnet.ANYINDEX.nsd-ref"] = descriptor_id
if self.db.get_list("nsts", _filter):
- raise EngineException("There is at least one NetSlice Template referencing this descriptor",
- http_code=HTTPStatus.CONFLICT)
+ raise EngineException(
+ "There is at least one NetSlice Template referencing this descriptor",
+ http_code=HTTPStatus.CONFLICT,
+ )
def sol005_projection(self, data):
data["nsdOnboardingState"] = data["_admin"]["onboardingState"]
links = {}
links["self"] = {"href": "/nsd/v1/ns_descriptors/{}".format(data["_id"])}
- links["nsd_content"] = {"href": "/nsd/v1/ns_descriptors/{}/nsd_content".format(data["_id"])}
+ links["nsd_content"] = {
+ "href": "/nsd/v1/ns_descriptors/{}/nsd_content".format(data["_id"])
+ }
data["_links"] = links
return super().sol005_projection(data)
def pyangbind_validation(self, item, data, force=False):
try:
mynst = nst_im()
- pybindJSONDecoder.load_ietf_json({'nst': [data]}, None, None, obj=mynst,
- path_helper=True, skip_unknown=force)
+ pybindJSONDecoder.load_ietf_json(
+ {"nst": [data]},
+ None,
+ None,
+ obj=mynst,
+ path_helper=True,
+ skip_unknown=force,
+ )
out = pybindJSON.dumps(mynst, mode="ietf")
desc_out = self._remove_envelop(yaml.safe_load(out))
return desc_out
except Exception as e:
- raise EngineException("Error in pyangbind validation: {}".format(str(e)),
- http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+ raise EngineException(
+ "Error in pyangbind validation: {}".format(str(e)),
+ http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+ )
@staticmethod
def _remove_envelop(indata=None):
return {}
clean_indata = indata
- if clean_indata.get('nst'):
- if not isinstance(clean_indata['nst'], list) or len(clean_indata['nst']) != 1:
+ if clean_indata.get("nst"):
+ if (
+ not isinstance(clean_indata["nst"], list)
+ or len(clean_indata["nst"]) != 1
+ ):
raise EngineException("'nst' must be a list only one element")
- clean_indata = clean_indata['nst'][0]
- elif clean_indata.get('nst:nst'):
- if not isinstance(clean_indata['nst:nst'], list) or len(clean_indata['nst:nst']) != 1:
+ clean_indata = clean_indata["nst"][0]
+ elif clean_indata.get("nst:nst"):
+ if (
+ not isinstance(clean_indata["nst:nst"], list)
+ or len(clean_indata["nst:nst"]) != 1
+ ):
raise EngineException("'nst:nst' must be a list only one element")
- clean_indata = clean_indata['nst:nst'][0]
+ clean_indata = clean_indata["nst:nst"][0]
return clean_indata
def _validate_input_new(self, indata, storage_params, force=False):
filter_q = self._get_project_filter(session)
filter_q["id"] = nsd_id
if not self.db.get_list("nsds", filter_q):
- raise EngineException("Descriptor error at 'netslice-subnet':'nsd-ref'='{}' references a non "
- "existing nsd".format(nsd_id), http_code=HTTPStatus.CONFLICT)
+ raise EngineException(
+ "Descriptor error at 'netslice-subnet':'nsd-ref'='{}' references a non "
+ "existing nsd".format(nsd_id),
+ http_code=HTTPStatus.CONFLICT,
+ )
def check_conflict_on_edit(self, session, final_content, edit_content, _id):
- final_content = super().check_conflict_on_edit(session, final_content, edit_content, _id)
+ final_content = super().check_conflict_on_edit(
+ session, final_content, edit_content, _id
+ )
self._check_descriptor_dependencies(session, final_content)
return final_content
_filter = self._get_project_filter(session)
_filter["_admin.nst-id"] = _id
if self.db.get_list("nsis", _filter):
- raise EngineException("there is at least one Netslice Instance using this descriptor",
- http_code=HTTPStatus.CONFLICT)
+ raise EngineException(
+ "there is at least one Netslice Instance using this descriptor",
+ http_code=HTTPStatus.CONFLICT,
+ )
def sol005_projection(self, data):
data["onboardingState"] = data["_admin"]["onboardingState"]
_filter = self._get_project_filter(session)
_filter["vdur.pdu-id"] = _id
if self.db.get_list("vnfrs", _filter):
- raise EngineException("There is at least one VNF instance using this PDU", http_code=HTTPStatus.CONFLICT)
+ raise EngineException(
+ "There is at least one VNF instance using this PDU",
+ http_code=HTTPStatus.CONFLICT,
+ )
class VnfPkgOpTopic(BaseTopic):
BaseTopic.__init__(self, db, fs, msg, auth)
def edit(self, session, _id, indata=None, kwargs=None, content=None):
- raise EngineException("Method 'edit' not allowed for topic '{}'".format(self.topic),
- HTTPStatus.METHOD_NOT_ALLOWED)
+ raise EngineException(
+ "Method 'edit' not allowed for topic '{}'".format(self.topic),
+ HTTPStatus.METHOD_NOT_ALLOWED,
+ )
def delete(self, session, _id, dry_run=False):
- raise EngineException("Method 'delete' not allowed for topic '{}'".format(self.topic),
- HTTPStatus.METHOD_NOT_ALLOWED)
+ raise EngineException(
+ "Method 'delete' not allowed for topic '{}'".format(self.topic),
+ HTTPStatus.METHOD_NOT_ALLOWED,
+ )
def delete_list(self, session, filter_q=None):
- raise EngineException("Method 'delete_list' not allowed for topic '{}'".format(self.topic),
- HTTPStatus.METHOD_NOT_ALLOWED)
+ raise EngineException(
+ "Method 'delete_list' not allowed for topic '{}'".format(self.topic),
+ HTTPStatus.METHOD_NOT_ALLOWED,
+ )
def new(self, rollback, session, indata=None, kwargs=None, headers=None):
"""
juju_bundle = kdu.get("juju-bundle")
break
else:
- raise EngineException("Not found vnfd[id='{}']:kdu[name='{}']".format(vnfpkg_id, kdu_name))
+ raise EngineException(
+ "Not found vnfd[id='{}']:kdu[name='{}']".format(vnfpkg_id, kdu_name)
+ )
if helm_chart:
indata["helm-chart"] = helm_chart
match = fullmatch(r"([^/]*)/([^/]*)", helm_chart)
match = fullmatch(r"([^/]*)/([^/]*)", juju_bundle)
repo_name = match.group(1) if match else None
else:
- raise EngineException("Found neither 'helm-chart' nor 'juju-bundle' in vnfd[id='{}']:kdu[name='{}']"
- .format(vnfpkg_id, kdu_name))
+ raise EngineException(
+ "Found neither 'helm-chart' nor 'juju-bundle' in vnfd[id='{}']:kdu[name='{}']".format(
+ vnfpkg_id, kdu_name
+ )
+ )
if repo_name:
del filter_q["_id"]
filter_q["name"] = repo_name
"links": {
"self": "/osm/vnfpkgm/v1/vnfpkg_op_occs/" + vnfpkgop_id,
"vnfpkg": "/osm/vnfpkgm/v1/vnf_packages/" + vnfpkg_id,
- }
+ },
}
- self.format_on_new(vnfpkgop_desc, session["project_id"], make_public=session["public"])
+ self.format_on_new(
+ vnfpkgop_desc, session["project_id"], make_public=session["public"]
+ )
ctime = vnfpkgop_desc["_admin"]["created"]
vnfpkgop_desc["statusEnteredTime"] = ctime
vnfpkgop_desc["startTime"] = ctime
# limitations under the License.
import logging
+
# import yaml
-from osm_common import dbmongo, dbmemory, fslocal, fsmongo, msglocal, msgkafka, version as common_version
+from osm_common import (
+ dbmongo,
+ dbmemory,
+ fslocal,
+ fsmongo,
+ msglocal,
+ msgkafka,
+ version as common_version,
+)
from osm_common.dbbase import DbException
from osm_common.fsbase import FsException
from osm_common.msgbase import MsgException
from osm_nbi.admin_topics import K8sClusterTopic, K8sRepoTopic, OsmRepoTopic
from osm_nbi.admin_topics import VcaTopic
from osm_nbi.admin_topics import UserTopicAuth, ProjectTopicAuth, RoleTopicAuth
-from osm_nbi.descriptor_topics import VnfdTopic, NsdTopic, PduTopic, NstTopic, VnfPkgOpTopic
-from osm_nbi.instance_topics import NsrTopic, VnfrTopic, NsLcmOpTopic, NsiTopic, NsiLcmOpTopic
+from osm_nbi.descriptor_topics import (
+ VnfdTopic,
+ NsdTopic,
+ PduTopic,
+ NstTopic,
+ VnfPkgOpTopic,
+)
+from osm_nbi.instance_topics import (
+ NsrTopic,
+ VnfrTopic,
+ NsLcmOpTopic,
+ NsiTopic,
+ NsiLcmOpTopic,
+)
from osm_nbi.pmjobs_topics import PmJobsTopic
from osm_nbi.subscription_topics import NslcmSubscriptionsTopic
from base64 import b64encode
-from os import urandom # , path
+from os import urandom # , path
from threading import Lock
__author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
"vca": VcaTopic,
"k8srepos": K8sRepoTopic,
"osmrepos": OsmRepoTopic,
- "users": UserTopicAuth, # Valid for both internal and keystone authentication backends
- "projects": ProjectTopicAuth, # Valid for both internal and keystone authentication backends
- "roles": RoleTopicAuth, # Valid for both internal and keystone authentication backends
+ "users": UserTopicAuth, # Valid for both internal and keystone authentication backends
+ "projects": ProjectTopicAuth, # Valid for both internal and keystone authentication backends
+ "roles": RoleTopicAuth, # Valid for both internal and keystone authentication backends
"nsis": NsiTopic,
"nsilcmops": NsiLcmOpTopic,
"vnfpkgops": VnfPkgOpTopic,
self.config = config
# check right version of common
if versiontuple(common_version) < versiontuple(min_common_version):
- raise EngineException("Not compatible osm/common version '{}'. Needed '{}' or higher".format(
- common_version, min_common_version))
+ raise EngineException(
+ "Not compatible osm/common version '{}'. Needed '{}' or higher".format(
+ common_version, min_common_version
+ )
+ )
try:
if not self.db:
self.db = dbmemory.DbMemory()
self.db.db_connect(config["database"])
else:
- raise EngineException("Invalid configuration param '{}' at '[database]':'driver'".format(
- config["database"]["driver"]))
+ raise EngineException(
+ "Invalid configuration param '{}' at '[database]':'driver'".format(
+ config["database"]["driver"]
+ )
+ )
if not self.fs:
if config["storage"]["driver"] == "local":
self.fs = fslocal.FsLocal()
self.fs = fsmongo.FsMongo()
self.fs.fs_connect(config["storage"])
else:
- raise EngineException("Invalid configuration param '{}' at '[storage]':'driver'".format(
- config["storage"]["driver"]))
+ raise EngineException(
+ "Invalid configuration param '{}' at '[storage]':'driver'".format(
+ config["storage"]["driver"]
+ )
+ )
if not self.msg:
if config["message"]["driver"] == "local":
self.msg = msglocal.MsgLocal()
self.msg = msgkafka.MsgKafka()
self.msg.connect(config["message"])
else:
- raise EngineException("Invalid configuration param '{}' at '[message]':'driver'".format(
- config["message"]["driver"]))
+ raise EngineException(
+ "Invalid configuration param '{}' at '[message]':'driver'".format(
+ config["message"]["driver"]
+ )
+ )
if not self.authconn:
if config["authentication"]["backend"] == "keystone":
- self.authconn = AuthconnKeystone(config["authentication"], self.db,
- self.authenticator.role_permissions)
+ self.authconn = AuthconnKeystone(
+ config["authentication"],
+ self.db,
+ self.authenticator.role_permissions,
+ )
elif config["authentication"]["backend"] == "tacacs":
- self.authconn = AuthconnTacacs(config["authentication"], self.db,
- self.authenticator.role_permissions)
+ self.authconn = AuthconnTacacs(
+ config["authentication"],
+ self.db,
+ self.authenticator.role_permissions,
+ )
else:
- self.authconn = AuthconnInternal(config["authentication"], self.db,
- self.authenticator.role_permissions)
+ self.authconn = AuthconnInternal(
+ config["authentication"],
+ self.db,
+ self.authenticator.role_permissions,
+ )
# if not self.operations:
# if "resources_to_operations" in config["rbac"]:
# resources_to_operations_file = config["rbac"]["resources_to_operations"]
for topic, topic_class in self.map_from_topic_to_class.items():
# if self.auth and topic_class in (UserTopicAuth, ProjectTopicAuth):
# self.map_topic[topic] = topic_class(self.db, self.fs, self.msg, self.auth)
- self.map_topic[topic] = topic_class(self.db, self.fs, self.msg, self.authconn)
-
- self.map_topic["pm_jobs"] = PmJobsTopic(self.db, config["prometheus"].get("host"),
- config["prometheus"].get("port"))
+ self.map_topic[topic] = topic_class(
+ self.db, self.fs, self.msg, self.authconn
+ )
+
+ self.map_topic["pm_jobs"] = PmJobsTopic(
+ self.db,
+ config["prometheus"].get("host"),
+ config["prometheus"].get("port"),
+ )
except (DbException, FsException, MsgException) as e:
raise EngineException(str(e), http_code=e.http_code)
except (DbException, FsException, MsgException) as e:
raise EngineException(str(e), http_code=e.http_code)
- def new_item(self, rollback, session, topic, indata=None, kwargs=None, headers=None):
+ def new_item(
+ self, rollback, session, topic, indata=None, kwargs=None, headers=None
+ ):
"""
Creates a new entry into database. For nsds and vnfds it creates an almost empty DISABLED entry,
that must be completed with a call to method upload_content
:return: _id: identity of the inserted data.
"""
if topic not in self.map_topic:
- raise EngineException("Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR)
+ raise EngineException(
+ "Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR
+ )
with self.write_lock:
return self.map_topic[topic].new(rollback, session, indata, kwargs, headers)
:return: _id: identity of the inserted data.
"""
if topic not in self.map_topic:
- raise EngineException("Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR)
+ raise EngineException(
+ "Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR
+ )
with self.write_lock:
- return self.map_topic[topic].upload_content(session, _id, indata, kwargs, headers)
+ return self.map_topic[topic].upload_content(
+ session, _id, indata, kwargs, headers
+ )
def get_item_list(self, session, topic, filter_q=None, api_req=False):
"""
:return: The list, it can be empty if no one match the filter_q.
"""
if topic not in self.map_topic:
- raise EngineException("Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR)
+ raise EngineException(
+ "Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR
+ )
return self.map_topic[topic].list(session, filter_q, api_req)
def get_item(self, session, topic, _id, api_req=False):
:return: dictionary, raise exception if not found.
"""
if topic not in self.map_topic:
- raise EngineException("Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR)
+ raise EngineException(
+ "Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR
+ )
return self.map_topic[topic].show(session, _id, api_req)
def get_file(self, session, topic, _id, path=None, accept_header=None):
:return: opened file plus Accept format or raises an exception
"""
if topic not in self.map_topic:
- raise EngineException("Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR)
+ raise EngineException(
+ "Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR
+ )
return self.map_topic[topic].get_file(session, _id, path, accept_header)
def del_item_list(self, session, topic, _filter=None):
:return: The deleted list, it can be empty if no one match the _filter.
"""
if topic not in self.map_topic:
- raise EngineException("Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR)
+ raise EngineException(
+ "Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR
+ )
with self.write_lock:
return self.map_topic[topic].delete_list(session, _filter)
:return: dictionary with deleted item _id. It raises exception if not found.
"""
if topic not in self.map_topic:
- raise EngineException("Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR)
+ raise EngineException(
+ "Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR
+ )
with self.write_lock:
return self.map_topic[topic].delete(session, _id, not_send_msg=not_send_msg)
:return: dictionary with edited item _id, raise exception if not found.
"""
if topic not in self.map_topic:
- raise EngineException("Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR)
+ raise EngineException(
+ "Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR
+ )
with self.write_lock:
return self.map_topic[topic].edit(session, _id, indata, kwargs)
def upgrade_db(self, current_version, target_version):
if target_version not in self.map_target_version_to_int.keys():
- raise EngineException("Cannot upgrade to version '{}' with this version of code".format(target_version),
- http_code=HTTPStatus.INTERNAL_SERVER_ERROR)
+ raise EngineException(
+ "Cannot upgrade to version '{}' with this version of code".format(
+ target_version
+ ),
+ http_code=HTTPStatus.INTERNAL_SERVER_ERROR,
+ )
if current_version == target_version:
return
-
+
target_version_int = self.map_target_version_to_int[target_version]
if not current_version:
# create database version
serial = urandom(32)
version_data = {
- "_id": "version", # Always "version"
- "version_int": 1000, # version number
- "version": "1.0", # version text
- "date": "2018-10-25", # version date
+ "_id": "version", # Always "version"
+ "version_int": 1000, # version number
+ "version": "1.0", # version text
+ "date": "2018-10-25", # version date
"description": "added serial", # changes in this version
- 'status': "ENABLED", # ENABLED, DISABLED (migration in process), ERROR,
- 'serial': b64encode(serial)
+ "status": "ENABLED", # ENABLED, DISABLED (migration in process), ERROR,
+ "serial": b64encode(serial),
}
self.db.create("admin", version_data)
self.db.set_secret_key(serial)
current_version = "1.0"
-
- if current_version in ("1.0", "1.1") and target_version_int >= self.map_target_version_to_int["1.2"]:
- if self.config['authentication']['backend'] == "internal":
+
+ if (
+ current_version in ("1.0", "1.1")
+ and target_version_int >= self.map_target_version_to_int["1.2"]
+ ):
+ if self.config["authentication"]["backend"] == "internal":
self.db.del_list("roles")
version_data = {
"version_int": 1002,
"version": "1.2",
"date": "2019-06-11",
- "description": "set new format for roles_operations"
+ "description": "set new format for roles_operations",
}
self.db.set_one("admin", {"_id": "version"}, version_data)
current_version = "1.2"
# TODO add future migrations here
- def init_db(self, target_version='1.0'):
+ def init_db(self, target_version="1.0"):
"""
Init database if empty. If not empty it checks that database version and migrates if needed
If empty, it creates a new user admin/admin at 'users' and a new entry at 'version'
:return: None if ok, exception if error or if the version is different.
"""
- version_data = self.db.get_one("admin", {"_id": "version"}, fail_on_empty=False, fail_on_more=True)
+ version_data = self.db.get_one(
+ "admin", {"_id": "version"}, fail_on_empty=False, fail_on_more=True
+ )
# check database status is ok
- if version_data and version_data.get("status") != 'ENABLED':
- raise EngineException("Wrong database status '{}'".format(
- version_data["status"]), HTTPStatus.INTERNAL_SERVER_ERROR)
+ if version_data and version_data.get("status") != "ENABLED":
+ raise EngineException(
+ "Wrong database status '{}'".format(version_data["status"]),
+ HTTPStatus.INTERNAL_SERVER_ERROR,
+ )
# check version
db_version = None if not version_data else version_data.get("version")
</form>
"""
-html_vnfpackage_body = """<a href="/osm/vnfpkgm/v1/vnf_packages/{id}/artifacts">Artifacts </a>"""
-html_nspackage_body = """<a href="/osm/nsd/v1/ns_descriptors/{id}/artifacts">Artifacts </a>"""
+html_vnfpackage_body = (
+ """<a href="/osm/vnfpkgm/v1/vnf_packages/{id}/artifacts">Artifacts </a>"""
+)
+html_nspackage_body = (
+ """<a href="/osm/nsd/v1/ns_descriptors/{id}/artifacts">Artifacts </a>"""
+)
def format(data, request, response, toke_info):
:param response: cherrypy response
:return: string with teh html response
"""
- response.headers["Content-Type"] = 'text/html'
+ response.headers["Content-Type"] = "text/html"
if response.status == HTTPStatus.UNAUTHORIZED.value:
- if response.headers.get("WWW-Authenticate") and request.config.get("auth.allow_basic_authentication"):
- response.headers["WWW-Authenticate"] = "Basic" + response.headers["WWW-Authenticate"][6:]
+ if response.headers.get("WWW-Authenticate") and request.config.get(
+ "auth.allow_basic_authentication"
+ ):
+ response.headers["WWW-Authenticate"] = (
+ "Basic" + response.headers["WWW-Authenticate"][6:]
+ )
return
else:
return html_auth2.format(error=data)
if request.path_info in ("/version", "/system"):
- return "<pre>" + yaml.safe_dump(data, explicit_start=False, indent=4, default_flow_style=False) + "</pre>"
+ return (
+ "<pre>"
+ + yaml.safe_dump(
+ data, explicit_start=False, indent=4, default_flow_style=False
+ )
+ + "</pre>"
+ )
body = html_body.format(item=html_escape(request.path_info))
if response.status and response.status > 202:
# input request.path_info (URL) can contain XSS that are translated into output error detail
- body += html_body_error.format(html_escape(
- yaml.safe_dump(data, explicit_start=True, indent=4, default_flow_style=False)))
+ body += html_body_error.format(
+ html_escape(
+ yaml.safe_dump(
+ data, explicit_start=True, indent=4, default_flow_style=False
+ )
+ )
+ )
elif isinstance(data, (list, tuple)):
if request.path_info == "/vnfpkgm/v1/vnf_packages":
body += html_upload_body.format(request.path_info + "_content", "VNFD")
data_id = k.pop("_id", None)
elif isinstance(k, str):
data_id = k
- body += '<p> <a href="/osm/{url}/{id}">{id}</a>: {t} </p>'.format(url=request.path_info, id=data_id,
- t=html_escape(str(k)))
+ body += '<p> <a href="/osm/{url}/{id}">{id}</a>: {t} </p>'.format(
+ url=request.path_info, id=data_id, t=html_escape(str(k))
+ )
elif isinstance(data, dict):
if "Location" in response.headers:
body += '<a href="{}"> show </a>'.format(response.headers["Location"])
else:
- _id = request.path_info[request.path_info.rfind("/")+1:]
- body += '<a href="/osm/{}?METHOD=DELETE"> <img src="/osm/static/delete.png" height="25" width="25"> </a>'\
- .format(request.path_info)
- if request.path_info.startswith("/nslcm/v1/ns_instances_content/") or \
- request.path_info.startswith("/nslcm/v1/ns_instances/"):
+ _id = request.path_info[request.path_info.rfind("/") + 1 :]
+ body += '<a href="/osm/{}?METHOD=DELETE"> <img src="/osm/static/delete.png" height="25" width="25"> </a>'.format(
+ request.path_info
+ )
+ if request.path_info.startswith(
+ "/nslcm/v1/ns_instances_content/"
+ ) or request.path_info.startswith("/nslcm/v1/ns_instances/"):
body += html_nslcmop_body.format(id=_id)
- elif request.path_info.startswith("/nsilcm/v1/netslice_instances_content/") or \
- request.path_info.startswith("/nsilcm/v1/netslice_instances/"):
+ elif request.path_info.startswith(
+ "/nsilcm/v1/netslice_instances_content/"
+ ) or request.path_info.startswith("/nsilcm/v1/netslice_instances/"):
body += html_nsilcmop_body.format(id=_id)
- elif request.path_info.startswith("/vnfpkgm/v1/vnf_packages/") or \
- request.path_info.startswith("/vnfpkgm/v1/vnf_packages_content/"):
+ elif request.path_info.startswith(
+ "/vnfpkgm/v1/vnf_packages/"
+ ) or request.path_info.startswith("/vnfpkgm/v1/vnf_packages_content/"):
body += html_vnfpackage_body.format(id=_id)
- elif request.path_info.startswith("/nsd/v1/ns_descriptors/") or \
- request.path_info.startswith("/nsd/v1/ns_descriptors_content/"):
+ elif request.path_info.startswith(
+ "/nsd/v1/ns_descriptors/"
+ ) or request.path_info.startswith("/nsd/v1/ns_descriptors_content/"):
body += html_nspackage_body.format(id=_id)
- body += "<pre>" + html_escape(yaml.safe_dump(data, explicit_start=True, indent=4, default_flow_style=False)) + \
- "</pre>"
+ body += (
+ "<pre>"
+ + html_escape(
+ yaml.safe_dump(
+ data, explicit_start=True, indent=4, default_flow_style=False
+ )
+ )
+ + "</pre>"
+ )
elif data is None:
if request.method == "DELETE" or "METHOD=DELETE" in request.query_string:
body += "<pre> deleted </pre>"
from http import HTTPStatus
from time import time
from copy import copy, deepcopy
-from osm_nbi.validation import validate_input, ValidationError, ns_instantiate, ns_terminate, ns_action, ns_scale,\
- nsi_instantiate
-from osm_nbi.base_topic import BaseTopic, EngineException, get_iterable, deep_get, increment_ip_mac
+from osm_nbi.validation import (
+ validate_input,
+ ValidationError,
+ ns_instantiate,
+ ns_terminate,
+ ns_action,
+ ns_scale,
+ nsi_instantiate,
+)
+from osm_nbi.base_topic import (
+ BaseTopic,
+ EngineException,
+ get_iterable,
+ deep_get,
+ increment_ip_mac,
+)
from yaml import safe_dump
from osm_common.dbbase import DbException
from osm_common.msgbase import MsgException
from osm_common.fsbase import FsException
from osm_nbi import utils
-from re import match # For checking that additional parameter names are valid Jinja2 identifiers
+from re import (
+ match,
+) # For checking that additional parameter names are valid Jinja2 identifiers
__author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
return
nsd_id = descriptor["nsdId"]
if not self.get_item_list(session, "nsds", {"id": nsd_id}):
- raise EngineException("Descriptor error at nsdId='{}' references a non exist nsd".format(nsd_id),
- http_code=HTTPStatus.CONFLICT)
+ raise EngineException(
+ "Descriptor error at nsdId='{}' references a non exist nsd".format(
+ nsd_id
+ ),
+ http_code=HTTPStatus.CONFLICT,
+ )
@staticmethod
def format_on_new(content, project_id=None, make_public=False):
return
nsr = db_content
if nsr["_admin"].get("nsState") == "INSTANTIATED":
- raise EngineException("nsr '{}' cannot be deleted because it is in 'INSTANTIATED' state. "
- "Launch 'terminate' operation first; or force deletion".format(_id),
- http_code=HTTPStatus.CONFLICT)
+ raise EngineException(
+ "nsr '{}' cannot be deleted because it is in 'INSTANTIATED' state. "
+ "Launch 'terminate' operation first; or force deletion".format(_id),
+ http_code=HTTPStatus.CONFLICT,
+ )
def delete_extra(self, session, _id, db_content, not_send_msg=None):
"""
self.db.del_list("vnfrs", {"nsr-id-ref": _id})
# set all used pdus as free
- self.db.set_list("pdus", {"_admin.usage.nsr_id": _id},
- {"_admin.usageState": "NOT_IN_USE", "_admin.usage": None})
+ self.db.set_list(
+ "pdus",
+ {"_admin.usage.nsr_id": _id},
+ {"_admin.usageState": "NOT_IN_USE", "_admin.usage": None},
+ )
# Set NSD usageState
nsr = db_content
used_nsd_id = nsr.get("nsd-id")
if used_nsd_id:
# check if used by another NSR
- nsrs_list = self.db.get_one("nsrs", {"nsd-id": used_nsd_id},
- fail_on_empty=False, fail_on_more=False)
+ nsrs_list = self.db.get_one(
+ "nsrs", {"nsd-id": used_nsd_id}, fail_on_empty=False, fail_on_more=False
+ )
if not nsrs_list:
- self.db.set_one("nsds", {"_id": used_nsd_id}, {"_admin.usageState": "NOT_IN_USE"})
+ self.db.set_one(
+ "nsds", {"_id": used_nsd_id}, {"_admin.usageState": "NOT_IN_USE"}
+ )
# Set VNFD usageState
used_vnfd_id_list = nsr.get("vnfd-id")
if used_vnfd_id_list:
for used_vnfd_id in used_vnfd_id_list:
# check if used by another NSR
- nsrs_list = self.db.get_one("nsrs", {"vnfd-id": used_vnfd_id},
- fail_on_empty=False, fail_on_more=False)
+ nsrs_list = self.db.get_one(
+ "nsrs",
+ {"vnfd-id": used_vnfd_id},
+ fail_on_empty=False,
+ fail_on_more=False,
+ )
if not nsrs_list:
- self.db.set_one("vnfds", {"_id": used_vnfd_id}, {"_admin.usageState": "NOT_IN_USE"})
+ self.db.set_one(
+ "vnfds",
+ {"_id": used_vnfd_id},
+ {"_admin.usageState": "NOT_IN_USE"},
+ )
# delete extra ro_nsrs used for internal RO module
self.db.del_one("ro_nsrs", q_filter={"_id": _id}, fail_on_empty=False)
return formated_request
@staticmethod
- def _format_additional_params(ns_request, member_vnf_index=None, vdu_id=None, kdu_name=None, descriptor=None):
+ def _format_additional_params(
+ ns_request, member_vnf_index=None, vdu_id=None, kdu_name=None, descriptor=None
+ ):
"""
Get and format user additional params for NS or VNF
:param ns_request: User instantiation additional parameters
additional_params = copy(ns_request.get("additionalParamsForNs"))
where_ = "additionalParamsForNs"
elif ns_request.get("additionalParamsForVnf"):
- where_ = "additionalParamsForVnf[member-vnf-index={}]".format(member_vnf_index)
- item = next((x for x in ns_request["additionalParamsForVnf"] if x["member-vnf-index"] == member_vnf_index),
- None)
+ where_ = "additionalParamsForVnf[member-vnf-index={}]".format(
+ member_vnf_index
+ )
+ item = next(
+ (
+ x
+ for x in ns_request["additionalParamsForVnf"]
+ if x["member-vnf-index"] == member_vnf_index
+ ),
+ None,
+ )
if item:
if not vdu_id and not kdu_name:
other_params = item
additional_params = copy(item.get("additionalParams")) or {}
if vdu_id and item.get("additionalParamsForVdu"):
- item_vdu = next((x for x in item["additionalParamsForVdu"] if x["vdu_id"] == vdu_id), None)
+ item_vdu = next(
+ (
+ x
+ for x in item["additionalParamsForVdu"]
+ if x["vdu_id"] == vdu_id
+ ),
+ None,
+ )
other_params = item_vdu
if item_vdu and item_vdu.get("additionalParams"):
where_ += ".additionalParamsForVdu[vdu_id={}]".format(vdu_id)
if kdu_name:
additional_params = {}
if item.get("additionalParamsForKdu"):
- item_kdu = next((x for x in item["additionalParamsForKdu"] if x["kdu_name"] == kdu_name), None)
+ item_kdu = next(
+ (
+ x
+ for x in item["additionalParamsForKdu"]
+ if x["kdu_name"] == kdu_name
+ ),
+ None,
+ )
other_params = item_kdu
if item_kdu and item_kdu.get("additionalParams"):
- where_ += ".additionalParamsForKdu[kdu_name={}]".format(kdu_name)
+ where_ += ".additionalParamsForKdu[kdu_name={}]".format(
+ kdu_name
+ )
additional_params = item_kdu["additionalParams"]
if additional_params:
for k, v in additional_params.items():
# BEGIN Check that additional parameter names are valid Jinja2 identifiers if target is not Kdu
- if not kdu_name and not match('^[a-zA-Z_][a-zA-Z0-9_]*$', k):
- raise EngineException("Invalid param name at {}:{}. Must contain only alphanumeric characters "
- "and underscores, and cannot start with a digit"
- .format(where_, k))
+ if not kdu_name and not match("^[a-zA-Z_][a-zA-Z0-9_]*$", k):
+ raise EngineException(
+ "Invalid param name at {}:{}. Must contain only alphanumeric characters "
+ "and underscores, and cannot start with a digit".format(
+ where_, k
+ )
+ )
# END Check that additional parameter names are valid Jinja2 identifiers
if not isinstance(k, str):
- raise EngineException("Invalid param at {}:{}. Only string keys are allowed".format(where_, k))
+ raise EngineException(
+ "Invalid param at {}:{}. Only string keys are allowed".format(
+ where_, k
+ )
+ )
if "." in k or "$" in k:
- raise EngineException("Invalid param at {}:{}. Keys must not contain dots or $".format(where_, k))
+ raise EngineException(
+ "Invalid param at {}:{}. Keys must not contain dots or $".format(
+ where_, k
+ )
+ )
if isinstance(v, (dict, tuple, list)):
additional_params[k] = "!!yaml " + safe_dump(v)
# TODO: check for cloud-init
if member_vnf_index:
initial_primitives = []
- if "lcm-operations-configuration" in df \
- and "operate-vnf-op-config" in df["lcm-operations-configuration"]:
- for config in df["lcm-operations-configuration"]["operate-vnf-op-config"].get("day1-2", []):
- for primitive in get_iterable(config.get("initial-config-primitive")):
+ if (
+ "lcm-operations-configuration" in df
+ and "operate-vnf-op-config"
+ in df["lcm-operations-configuration"]
+ ):
+ for config in df["lcm-operations-configuration"][
+ "operate-vnf-op-config"
+ ].get("day1-2", []):
+ for primitive in get_iterable(
+ config.get("initial-config-primitive")
+ ):
initial_primitives.append(primitive)
else:
- initial_primitives = deep_get(descriptor, ("ns-configuration", "initial-config-primitive"))
+ initial_primitives = deep_get(
+ descriptor, ("ns-configuration", "initial-config-primitive")
+ )
for initial_primitive in get_iterable(initial_primitives):
for param in get_iterable(initial_primitive.get("parameter")):
- if param["value"].startswith("<") and param["value"].endswith(">"):
- if param["value"] in ("<rw_mgmt_ip>", "<VDU_SCALE_INFO>", "<ns_config_info>"):
+ if param["value"].startswith("<") and param["value"].endswith(
+ ">"
+ ):
+ if param["value"] in (
+ "<rw_mgmt_ip>",
+ "<VDU_SCALE_INFO>",
+ "<ns_config_info>",
+ ):
continue
- if not additional_params or param["value"][1:-1] not in additional_params:
- raise EngineException("Parameter '{}' needed for vnfd[id={}]:day1-2 configuration:"
- "initial-config-primitive[name={}] not supplied".
- format(param["value"], descriptor["id"],
- initial_primitive["name"]))
+ if (
+ not additional_params
+ or param["value"][1:-1] not in additional_params
+ ):
+ raise EngineException(
+ "Parameter '{}' needed for vnfd[id={}]:day1-2 configuration:"
+ "initial-config-primitive[name={}] not supplied".format(
+ param["value"],
+ descriptor["id"],
+ initial_primitive["name"],
+ )
+ )
return additional_params or None, other_params or None
step = "filling nsr from input data"
nsr_id = str(uuid4())
- nsr_descriptor = self._create_nsr_descriptor_from_nsd(nsd, ns_request, nsr_id, session)
+ nsr_descriptor = self._create_nsr_descriptor_from_nsd(
+ nsd, ns_request, nsr_id, session
+ )
# Create VNFRs
needed_vnfds = {}
for vnfp in vnf_profiles:
vnfd_id = vnfp.get("vnfd-id")
vnf_index = vnfp.get("id")
- step = "getting vnfd id='{}' constituent-vnfd='{}' from database".format(vnfd_id, vnf_index)
+ step = (
+ "getting vnfd id='{}' constituent-vnfd='{}' from database".format(
+ vnfd_id, vnf_index
+ )
+ )
if vnfd_id not in needed_vnfds:
vnfd = self._get_vnfd_from_db(vnfd_id, session)
needed_vnfds[vnfd_id] = vnfd
else:
vnfd = needed_vnfds[vnfd_id]
- step = "filling vnfr vnfd-id='{}' constituent-vnfd='{}'".format(vnfd_id, vnf_index)
- vnfr_descriptor = self._create_vnfr_descriptor_from_vnfd(nsd, vnfd, vnfd_id, vnf_index, nsr_descriptor,
- ns_request, ns_k8s_namespace)
+ step = "filling vnfr vnfd-id='{}' constituent-vnfd='{}'".format(
+ vnfd_id, vnf_index
+ )
+ vnfr_descriptor = self._create_vnfr_descriptor_from_vnfd(
+ nsd,
+ vnfd,
+ vnfd_id,
+ vnf_index,
+ nsr_descriptor,
+ ns_request,
+ ns_k8s_namespace,
+ )
- step = "creating vnfr vnfd-id='{}' constituent-vnfd='{}' at database".format(vnfd_id, vnf_index)
+ step = "creating vnfr vnfd-id='{}' constituent-vnfd='{}' at database".format(
+ vnfd_id, vnf_index
+ )
self._add_vnfr_to_db(vnfr_descriptor, rollback, session)
nsr_descriptor["constituent-vnfr-ref"].append(vnfr_descriptor["id"])
self.fs.mkdir(nsr_id)
return nsr_id, None
- except (ValidationError, EngineException, DbException, MsgException, FsException) as e:
+ except (
+ ValidationError,
+ EngineException,
+ DbException,
+ MsgException,
+ FsException,
+ ) as e:
raise type(e)("{} while '{}'".format(e, step), http_code=e.http_code)
def _get_nsd_from_db(self, nsd_id, session):
return vnfd
def _add_nsr_to_db(self, nsr_descriptor, rollback, session):
- self.format_on_new(nsr_descriptor, session["project_id"], make_public=session["public"])
+ self.format_on_new(
+ nsr_descriptor, session["project_id"], make_public=session["public"]
+ )
self.db.create("nsrs", nsr_descriptor)
rollback.append({"topic": "nsrs", "_id": nsr_descriptor["id"]})
def _add_vnfr_to_db(self, vnfr_descriptor, rollback, session):
- self.format_on_new(vnfr_descriptor, session["project_id"], make_public=session["public"])
+ self.format_on_new(
+ vnfr_descriptor, session["project_id"], make_public=session["public"]
+ )
self.db.create("vnfrs", vnfr_descriptor)
rollback.append({"topic": "vnfrs", "_id": vnfr_descriptor["id"]})
def _check_nsd_operational_state(self, nsd, ns_request):
if nsd["_admin"]["operationalState"] == "DISABLED":
- raise EngineException("nsd with id '{}' is DISABLED, and thus cannot be used to create "
- "a network service".format(ns_request["nsdId"]), http_code=HTTPStatus.CONFLICT)
+ raise EngineException(
+ "nsd with id '{}' is DISABLED, and thus cannot be used to create "
+ "a network service".format(ns_request["nsdId"]),
+ http_code=HTTPStatus.CONFLICT,
+ )
def _get_ns_k8s_namespace(self, nsd, ns_request, session):
- additional_params, _ = self._format_additional_params(ns_request, descriptor=nsd)
+ additional_params, _ = self._format_additional_params(
+ ns_request, descriptor=nsd
+ )
# use for k8s-namespace from ns_request or additionalParamsForNs. By default, the project_id
ns_k8s_namespace = session["project_id"][0] if session["project_id"] else None
if ns_request and ns_request.get("k8s-namespace"):
def _create_nsr_descriptor_from_nsd(self, nsd, ns_request, nsr_id, session):
now = time()
- additional_params, _ = self._format_additional_params(ns_request, descriptor=nsd)
+ additional_params, _ = self._format_additional_params(
+ ns_request, descriptor=nsd
+ )
nsr_descriptor = {
"name": ns_request["nsName"],
for vnf_profile in vnf_profiles:
for vlc in vnf_profile.get("virtual-link-connectivity", ()):
for cpd in vlc.get("constituent-cpd-id", ()):
- all_vld_connection_point_data[vlc.get("virtual-link-profile-id")].append({
- "member-vnf-index-ref": cpd.get("constituent-base-element-id"),
- "vnfd-connection-point-ref": cpd.get("constituent-cpd-id"),
- "vnfd-id-ref": vnf_profile.get("vnfd-id")
- })
+ all_vld_connection_point_data[
+ vlc.get("virtual-link-profile-id")
+ ].append(
+ {
+ "member-vnf-index-ref": cpd.get(
+ "constituent-base-element-id"
+ ),
+ "vnfd-connection-point-ref": cpd.get(
+ "constituent-cpd-id"
+ ),
+ "vnfd-id-ref": vnf_profile.get("vnfd-id"),
+ }
+ )
vnfd = self._get_vnfd_from_db(vnf_profile.get("vnfd-id"), session)
if vsd.get("id") == vdu.get("virtual-storage-desc", [[]])[0]:
vdu_virtual_storage = vsd
# Get this vdu vcpus, memory and storage info for flavor_data
- if vdu_virtual_compute.get("virtual-cpu", {}).get("num-virtual-cpu"):
- flavor_data["vcpu-count"] = vdu_virtual_compute["virtual-cpu"]["num-virtual-cpu"]
+ if vdu_virtual_compute.get("virtual-cpu", {}).get(
+ "num-virtual-cpu"
+ ):
+ flavor_data["vcpu-count"] = vdu_virtual_compute["virtual-cpu"][
+ "num-virtual-cpu"
+ ]
if vdu_virtual_compute.get("virtual-memory", {}).get("size"):
- flavor_data["memory-mb"] = float(vdu_virtual_compute["virtual-memory"]["size"]) * 1024.0
+ flavor_data["memory-mb"] = (
+ float(vdu_virtual_compute["virtual-memory"]["size"])
+ * 1024.0
+ )
if vdu_virtual_storage.get("size-of-storage"):
- flavor_data["storage-gb"] = vdu_virtual_storage["size-of-storage"]
+ flavor_data["storage-gb"] = vdu_virtual_storage[
+ "size-of-storage"
+ ]
# Get this vdu EPA info for guest_epa
if vdu_virtual_compute.get("virtual-cpu", {}).get("cpu-quota"):
- guest_epa["cpu-quota"] = vdu_virtual_compute["virtual-cpu"]["cpu-quota"]
+ guest_epa["cpu-quota"] = vdu_virtual_compute["virtual-cpu"][
+ "cpu-quota"
+ ]
if vdu_virtual_compute.get("virtual-cpu", {}).get("pinning"):
vcpu_pinning = vdu_virtual_compute["virtual-cpu"]["pinning"]
if vcpu_pinning.get("thread-policy"):
- guest_epa["cpu-thread-pinning-policy"] = vcpu_pinning["thread-policy"]
+ guest_epa["cpu-thread-pinning-policy"] = vcpu_pinning[
+ "thread-policy"
+ ]
if vcpu_pinning.get("policy"):
- cpu_policy = "SHARED" if vcpu_pinning["policy"] == "dynamic" else "DEDICATED"
+ cpu_policy = (
+ "SHARED"
+ if vcpu_pinning["policy"] == "dynamic"
+ else "DEDICATED"
+ )
guest_epa["cpu-pinning-policy"] = cpu_policy
if vdu_virtual_compute.get("virtual-memory", {}).get("mem-quota"):
- guest_epa["mem-quota"] = vdu_virtual_compute["virtual-memory"]["mem-quota"]
- if vdu_virtual_compute.get("virtual-memory", {}).get("mempage-size"):
- guest_epa["mempage-size"] = vdu_virtual_compute["virtual-memory"]["mempage-size"]
- if vdu_virtual_compute.get("virtual-memory", {}).get("numa-node-policy"):
- guest_epa["numa-node-policy"] = vdu_virtual_compute["virtual-memory"]["numa-node-policy"]
+ guest_epa["mem-quota"] = vdu_virtual_compute["virtual-memory"][
+ "mem-quota"
+ ]
+ if vdu_virtual_compute.get("virtual-memory", {}).get(
+ "mempage-size"
+ ):
+ guest_epa["mempage-size"] = vdu_virtual_compute[
+ "virtual-memory"
+ ]["mempage-size"]
+ if vdu_virtual_compute.get("virtual-memory", {}).get(
+ "numa-node-policy"
+ ):
+ guest_epa["numa-node-policy"] = vdu_virtual_compute[
+ "virtual-memory"
+ ]["numa-node-policy"]
if vdu_virtual_storage.get("disk-io-quota"):
- guest_epa["disk-io-quota"] = vdu_virtual_storage["disk-io-quota"]
+ guest_epa["disk-io-quota"] = vdu_virtual_storage[
+ "disk-io-quota"
+ ]
if guest_epa:
flavor_data["guest-epa"] = guest_epa
self._add_image_to_nsr(nsr_descriptor, image_data)
for vld in nsr_vld:
- vld["vnfd-connection-point-ref"] = all_vld_connection_point_data.get(vld.get("id"), [])
+ vld["vnfd-connection-point-ref"] = all_vld_connection_point_data.get(
+ vld.get("id"), []
+ )
vld["name"] = vld["id"]
nsr_descriptor["vld"] = nsr_vld
return nsr_descriptor
def _get_image_data_from_vnfd(self, vnfd, sw_image_id):
- sw_image_desc = utils.find_in_list(vnfd.get("sw-image-desc", ()),
- lambda sw: sw["id"] == sw_image_id)
+ sw_image_desc = utils.find_in_list(
+ vnfd.get("sw-image-desc", ()), lambda sw: sw["id"] == sw_image_id
+ )
image_data = {}
if sw_image_desc.get("image"):
image_data["image"] = sw_image_desc["image"]
"""
Adds image to nsr checking first it is not already added
"""
- img = next((f for f in nsr_descriptor["image"] if
- all(f.get(k) == image_data[k] for k in image_data)), None)
+ img = next(
+ (
+ f
+ for f in nsr_descriptor["image"]
+ if all(f.get(k) == image_data[k] for k in image_data)
+ ),
+ None,
+ )
if not img:
image_data["id"] = str(len(nsr_descriptor["image"]))
nsr_descriptor["image"].append(image_data)
- def _create_vnfr_descriptor_from_vnfd(self, nsd, vnfd, vnfd_id, vnf_index, nsr_descriptor,
- ns_request, ns_k8s_namespace):
+ def _create_vnfr_descriptor_from_vnfd(
+ self,
+ nsd,
+ vnfd,
+ vnfd_id,
+ vnf_index,
+ nsr_descriptor,
+ ns_request,
+ ns_k8s_namespace,
+ ):
vnfr_id = str(uuid4())
nsr_id = nsr_descriptor["id"]
now = time()
- additional_params, vnf_params = self._format_additional_params(ns_request, vnf_index, descriptor=vnfd)
+ additional_params, vnf_params = self._format_additional_params(
+ ns_request, vnf_index, descriptor=vnfd
+ )
vnfr_descriptor = {
"id": vnfr_id,
all_k8s_cluster_nets_cpds = {}
for cpd in get_iterable(vnfd.get("ext-cpd")):
if cpd.get("k8s-cluster-net"):
- all_k8s_cluster_nets_cpds[cpd.get("k8s-cluster-net")] = cpd.get("id")
+ all_k8s_cluster_nets_cpds[cpd.get("k8s-cluster-net")] = cpd.get(
+ "id"
+ )
for net in get_iterable(vnfr_descriptor["k8s-cluster"].get("nets")):
if net.get("id") in all_k8s_cluster_nets_cpds:
- net["external-connection-point-ref"] = all_k8s_cluster_nets_cpds[net.get("id")]
+ net["external-connection-point-ref"] = all_k8s_cluster_nets_cpds[
+ net.get("id")
+ ]
# update kdus
for kdu in get_iterable(vnfd.get("kdu")):
- additional_params, kdu_params = self._format_additional_params(ns_request,
- vnf_index,
- kdu_name=kdu["name"],
- descriptor=vnfd)
+ additional_params, kdu_params = self._format_additional_params(
+ ns_request, vnf_index, kdu_name=kdu["name"], descriptor=vnfd
+ )
kdu_k8s_namespace = vnf_k8s_namespace
kdu_model = kdu_params.get("kdu_model") if kdu_params else None
if kdu_params and kdu_params.get("k8s-namespace"):
for vdu in vnfd.get("vdu", ()):
vdu_mgmt_cp = []
try:
- configs = vnfd.get("df")[0]["lcm-operations-configuration"]["operate-vnf-op-config"]["day1-2"]
- vdu_config = utils.find_in_list(configs, lambda config: config["id"] == vdu["id"])
+ configs = vnfd.get("df")[0]["lcm-operations-configuration"][
+ "operate-vnf-op-config"
+ ]["day1-2"]
+ vdu_config = utils.find_in_list(
+ configs, lambda config: config["id"] == vdu["id"]
+ )
except Exception:
vdu_config = None
try:
vdu_instantiation_level = utils.find_in_list(
vnfd.get("df")[0]["instantiation-level"][0]["vdu-level"],
- lambda a_vdu_profile: a_vdu_profile["vdu-id"] == vdu["id"]
+ lambda a_vdu_profile: a_vdu_profile["vdu-id"] == vdu["id"],
)
except Exception:
vdu_instantiation_level = None
if vdu_config:
external_connection_ee = utils.filter_in_list(
vdu_config.get("execution-environment-list", []),
- lambda ee: "external-connection-point-ref" in ee
+ lambda ee: "external-connection-point-ref" in ee,
)
for ee in external_connection_ee:
vdu_mgmt_cp.append(ee["external-connection-point-ref"])
additional_params, vdu_params = self._format_additional_params(
- ns_request, vnf_index, vdu_id=vdu["id"], descriptor=vnfd)
+ ns_request, vnf_index, vdu_id=vdu["id"], descriptor=vnfd
+ )
vdur = {
"vdu-id-ref": vdu["id"],
# TODO "name": "" Name of the VDU in the VIM
"internal-connection-point": [],
"interfaces": [],
"additionalParams": additional_params,
- "vdu-name": vdu["name"]
+ "vdu-name": vdu["name"],
}
if vdu_params and vdu_params.get("config-units"):
vdur["config-units"] = vdu_params["config-units"]
if deep_get(vdu, ("supplemental-boot-data", "boot-data-drive")):
- vdur["boot-data-drive"] = vdu["supplemental-boot-data"]["boot-data-drive"]
+ vdur["boot-data-drive"] = vdu["supplemental-boot-data"][
+ "boot-data-drive"
+ ]
if vdu.get("pdu-type"):
vdur["pdu-type"] = vdu["pdu-type"]
vdur["name"] = vdu["pdu-type"]
for iface in icp.get("virtual-network-interface-requirement", ()):
iface_fields = ("name", "mac-address")
- vdu_iface = {x: iface[x] for x in iface_fields if iface.get(x) is not None}
+ vdu_iface = {
+ x: iface[x] for x in iface_fields if iface.get(x) is not None
+ }
vdu_iface["internal-connection-point-ref"] = vdu_icp["id"]
if "port-security-enabled" in icp:
- vdu_iface["port-security-enabled"] = icp["port-security-enabled"]
+ vdu_iface["port-security-enabled"] = icp[
+ "port-security-enabled"
+ ]
if "port-security-disable-strategy" in icp:
- vdu_iface["port-security-disable-strategy"] = icp["port-security-disable-strategy"]
+ vdu_iface["port-security-disable-strategy"] = icp[
+ "port-security-disable-strategy"
+ ]
for ext_cp in vnfd.get("ext-cpd", ()):
if not ext_cp.get("int-cpd"):
if ext_cp["int-cpd"].get("vdu-id") != vdu["id"]:
continue
if icp["id"] == ext_cp["int-cpd"].get("cpd"):
- vdu_iface["external-connection-point-ref"] = ext_cp.get("id")
+ vdu_iface["external-connection-point-ref"] = ext_cp.get(
+ "id"
+ )
if "port-security-enabled" in ext_cp:
- vdu_iface["port-security-enabled"] = (
- ext_cp["port-security-enabled"]
- )
+ vdu_iface["port-security-enabled"] = ext_cp[
+ "port-security-enabled"
+ ]
if "port-security-disable-strategy" in ext_cp:
- vdu_iface["port-security-disable-strategy"] = (
- ext_cp["port-security-disable-strategy"]
- )
+ vdu_iface["port-security-disable-strategy"] = ext_cp[
+ "port-security-disable-strategy"
+ ]
break
- if vnfd_mgmt_cp and vdu_iface.get("external-connection-point-ref") == vnfd_mgmt_cp:
+ if (
+ vnfd_mgmt_cp
+ and vdu_iface.get("external-connection-point-ref")
+ == vnfd_mgmt_cp
+ ):
vdu_iface["mgmt-vnf"] = True
vdu_iface["mgmt-interface"] = True
# TODO: Change for multiple df support
for df in get_iterable(nsd.get("df")):
for vnf_profile in get_iterable(df.get("vnf-profile")):
- for vlc_index, vlc in \
- enumerate(get_iterable(vnf_profile.get("virtual-link-connectivity"))):
- for cpd in get_iterable(vlc.get("constituent-cpd-id")):
- if cpd.get("constituent-cpd-id") == iface_ext_cp:
- vdu_iface["ns-vld-id"] = vlc.get("virtual-link-profile-id")
+ for vlc_index, vlc in enumerate(
+ get_iterable(
+ vnf_profile.get("virtual-link-connectivity")
+ )
+ ):
+ for cpd in get_iterable(
+ vlc.get("constituent-cpd-id")
+ ):
+ if (
+ cpd.get("constituent-cpd-id")
+ == iface_ext_cp
+ ):
+ vdu_iface["ns-vld-id"] = vlc.get(
+ "virtual-link-profile-id"
+ )
# if iface type is SRIOV or PASSTHROUGH, set pci-interfaces flag to True
- if vdu_iface.get("type") in ("SR-IOV", "PCI-PASSTHROUGH"):
- nsr_descriptor["vld"][vlc_index]["pci-interfaces"] = True
+ if vdu_iface.get("type") in (
+ "SR-IOV",
+ "PCI-PASSTHROUGH",
+ ):
+ nsr_descriptor["vld"][vlc_index][
+ "pci-interfaces"
+ ] = True
break
elif vdu_iface.get("internal-connection-point-ref"):
vdu_iface["vnf-vld-id"] = icp.get("int-virtual-link-desc")
# TODO: store fixed IP address in the record (if it exists in the ICP)
# if iface type is SRIOV or PASSTHROUGH, set pci-interfaces flag to True
if vdu_iface.get("type") in ("SR-IOV", "PCI-PASSTHROUGH"):
- ivld_index = utils.find_index_in_list(vnfd.get("int-virtual-link-desc", ()),
- lambda ivld:
- ivld["id"] == icp.get("int-virtual-link-desc")
- )
+ ivld_index = utils.find_index_in_list(
+ vnfd.get("int-virtual-link-desc", ()),
+ lambda ivld: ivld["id"]
+ == icp.get("int-virtual-link-desc"),
+ )
vnfr_descriptor["vld"][ivld_index]["pci-interfaces"] = True
vdur["interfaces"].append(vdu_iface)
if vdu.get("sw-image-desc"):
sw_image = utils.find_in_list(
vnfd.get("sw-image-desc", ()),
- lambda image: image["id"] == vdu.get("sw-image-desc"))
+ lambda image: image["id"] == vdu.get("sw-image-desc"),
+ )
nsr_sw_image_data = utils.find_in_list(
nsr_descriptor["image"],
- lambda nsr_image: (nsr_image.get("image") == sw_image.get("image"))
+ lambda nsr_image: (nsr_image.get("image") == sw_image.get("image")),
)
vdur["ns-image-id"] = nsr_sw_image_data["id"]
for alt_image_id in vdu.get("alternative-sw-image-desc", ()):
sw_image = utils.find_in_list(
vnfd.get("sw-image-desc", ()),
- lambda image: image["id"] == alt_image_id)
+ lambda image: image["id"] == alt_image_id,
+ )
nsr_sw_image_data = utils.find_in_list(
nsr_descriptor["image"],
- lambda nsr_image: (nsr_image.get("image") == sw_image.get("image"))
+ lambda nsr_image: (
+ nsr_image.get("image") == sw_image.get("image")
+ ),
)
alt_image_ids.append(nsr_sw_image_data["id"])
vdur["alt-image-ids"] = alt_image_ids
flavor_data_name = vdu["id"][:56] + "-flv"
nsr_flavor_desc = utils.find_in_list(
nsr_descriptor["flavor"],
- lambda flavor: flavor["name"] == flavor_data_name)
+ lambda flavor: flavor["name"] == flavor_data_name,
+ )
if nsr_flavor_desc:
vdur["ns-flavor-id"] = nsr_flavor_desc["id"]
return vnfr_descriptor
def edit(self, session, _id, indata=None, kwargs=None, content=None):
- raise EngineException("Method edit called directly", HTTPStatus.INTERNAL_SERVER_ERROR)
+ raise EngineException(
+ "Method edit called directly", HTTPStatus.INTERNAL_SERVER_ERROR
+ )
class VnfrTopic(BaseTopic):
BaseTopic.__init__(self, db, fs, msg, auth)
def delete(self, session, _id, dry_run=False, not_send_msg=None):
- raise EngineException("Method delete called directly", HTTPStatus.INTERNAL_SERVER_ERROR)
+ raise EngineException(
+ "Method delete called directly", HTTPStatus.INTERNAL_SERVER_ERROR
+ )
def edit(self, session, _id, indata=None, kwargs=None, content=None):
- raise EngineException("Method edit called directly", HTTPStatus.INTERNAL_SERVER_ERROR)
+ raise EngineException(
+ "Method edit called directly", HTTPStatus.INTERNAL_SERVER_ERROR
+ )
def new(self, rollback, session, indata=None, kwargs=None, headers=None):
# Not used because vnfrs are created and deleted by NsrTopic class directly
- raise EngineException("Method new called directly", HTTPStatus.INTERNAL_SERVER_ERROR)
+ raise EngineException(
+ "Method new called directly", HTTPStatus.INTERNAL_SERVER_ERROR
+ )
class NsLcmOpTopic(BaseTopic):
topic = "nslcmops"
topic_msg = "ns"
- operation_schema = { # mapping between operation and jsonschema to validate
+ operation_schema = { # mapping between operation and jsonschema to validate
"instantiate": ns_instantiate,
"action": ns_action,
"scale": ns_scale,
nsd = nsr["nsd"]
# check vnf_member_index
if indata.get("vnf_member_index"):
- indata["member_vnf_index"] = indata.pop("vnf_member_index") # for backward compatibility
+ indata["member_vnf_index"] = indata.pop(
+ "vnf_member_index"
+ ) # for backward compatibility
if indata.get("member_vnf_index"):
- vnfd = self._get_vnfd_from_vnf_member_index(indata["member_vnf_index"], nsr["_id"])
+ vnfd = self._get_vnfd_from_vnf_member_index(
+ indata["member_vnf_index"], nsr["_id"]
+ )
try:
- configs = vnfd.get("df")[0]["lcm-operations-configuration"]["operate-vnf-op-config"]["day1-2"]
+ configs = vnfd.get("df")[0]["lcm-operations-configuration"][
+ "operate-vnf-op-config"
+ ]["day1-2"]
except Exception:
configs = []
if indata.get("vdu_id"):
self._check_valid_vdu(vnfd, indata["vdu_id"])
descriptor_configuration = utils.find_in_list(
- configs,
- lambda config: config["id"] == indata["vdu_id"]
+ configs, lambda config: config["id"] == indata["vdu_id"]
)
elif indata.get("kdu_name"):
self._check_valid_kdu(vnfd, indata["kdu_name"])
descriptor_configuration = utils.find_in_list(
- configs,
- lambda config: config["id"] == indata.get("kdu_name")
+ configs, lambda config: config["id"] == indata.get("kdu_name")
)
else:
descriptor_configuration = utils.find_in_list(
- configs,
- lambda config: config["id"] == vnfd["id"]
+ configs, lambda config: config["id"] == vnfd["id"]
)
if descriptor_configuration is not None:
- descriptor_configuration = descriptor_configuration.get("config-primitive")
+ descriptor_configuration = descriptor_configuration.get(
+ "config-primitive"
+ )
else: # use a NSD
- descriptor_configuration = nsd.get("ns-configuration", {}).get("config-primitive")
+ descriptor_configuration = nsd.get("ns-configuration", {}).get(
+ "config-primitive"
+ )
# For k8s allows default primitives without validating the parameters
- if indata.get("kdu_name") and indata["primitive"] in ("upgrade", "rollback", "status", "inspect", "readme"):
+ if indata.get("kdu_name") and indata["primitive"] in (
+ "upgrade",
+ "rollback",
+ "status",
+ "inspect",
+ "readme",
+ ):
# TODO should be checked that rollback only can contains revsision_numbe????
if not indata.get("member_vnf_index"):
- raise EngineException("Missing action parameter 'member_vnf_index' for default KDU primitive '{}'"
- .format(indata["primitive"]))
+ raise EngineException(
+ "Missing action parameter 'member_vnf_index' for default KDU primitive '{}'".format(
+ indata["primitive"]
+ )
+ )
return
# if not, check primitive
for config_primitive in get_iterable(descriptor_configuration):
if paramd["name"] in in_primitive_params_copy:
del in_primitive_params_copy[paramd["name"]]
elif not paramd.get("default-value"):
- raise EngineException("Needed parameter {} not provided for primitive '{}'".format(
- paramd["name"], indata["primitive"]))
+ raise EngineException(
+ "Needed parameter {} not provided for primitive '{}'".format(
+ paramd["name"], indata["primitive"]
+ )
+ )
# check no extra primitive params are provided
if in_primitive_params_copy:
- raise EngineException("parameter/s '{}' not present at vnfd /nsd for primitive '{}'".format(
- list(in_primitive_params_copy.keys()), indata["primitive"]))
+ raise EngineException(
+ "parameter/s '{}' not present at vnfd /nsd for primitive '{}'".format(
+ list(in_primitive_params_copy.keys()), indata["primitive"]
+ )
+ )
break
else:
- raise EngineException("Invalid primitive '{}' is not present at vnfd/nsd".format(indata["primitive"]))
+ raise EngineException(
+ "Invalid primitive '{}' is not present at vnfd/nsd".format(
+ indata["primitive"]
+ )
+ )
def _check_scale_ns_operation(self, indata, nsr):
- vnfd = self._get_vnfd_from_vnf_member_index(indata["scaleVnfData"]["scaleByStepData"]["member-vnf-index"],
- nsr["_id"])
+ vnfd = self._get_vnfd_from_vnf_member_index(
+ indata["scaleVnfData"]["scaleByStepData"]["member-vnf-index"], nsr["_id"]
+ )
for scaling_aspect in get_iterable(vnfd.get("df", ())[0]["scaling-aspect"]):
- if indata["scaleVnfData"]["scaleByStepData"]["scaling-group-descriptor"] == scaling_aspect["id"]:
+ if (
+ indata["scaleVnfData"]["scaleByStepData"]["scaling-group-descriptor"]
+ == scaling_aspect["id"]
+ ):
break
else:
- raise EngineException("Invalid scaleVnfData:scaleByStepData:scaling-group-descriptor '{}' is not "
- "present at vnfd:scaling-aspect"
- .format(indata["scaleVnfData"]["scaleByStepData"]["scaling-group-descriptor"]))
+ raise EngineException(
+ "Invalid scaleVnfData:scaleByStepData:scaling-group-descriptor '{}' is not "
+ "present at vnfd:scaling-aspect".format(
+ indata["scaleVnfData"]["scaleByStepData"][
+ "scaling-group-descriptor"
+ ]
+ )
+ )
def _check_instantiate_ns_operation(self, indata, nsr, session):
vnf_member_index_to_vnfd = {} # map between vnf_member_index to vnf descriptor.
if vnf_member_index_to_vnfd.get(member_vnf_index):
vnfd = vnf_member_index_to_vnfd[member_vnf_index]
else:
- vnfd = self._get_vnfd_from_vnf_member_index(member_vnf_index, nsr["_id"])
- vnf_member_index_to_vnfd[member_vnf_index] = vnfd # add to cache, avoiding a later look for
+ vnfd = self._get_vnfd_from_vnf_member_index(
+ member_vnf_index, nsr["_id"]
+ )
+ vnf_member_index_to_vnfd[
+ member_vnf_index
+ ] = vnfd # add to cache, avoiding a later look for
self._check_vnf_instantiation_params(in_vnf, vnfd)
if in_vnf.get("vimAccountId"):
- self._check_valid_vim_account(in_vnf["vimAccountId"], vim_accounts, session)
+ self._check_valid_vim_account(
+ in_vnf["vimAccountId"], vim_accounts, session
+ )
for in_vld in get_iterable(indata.get("vld")):
- self._check_valid_wim_account(in_vld.get("wimAccountId"), wim_accounts, session)
+ self._check_valid_wim_account(
+ in_vld.get("wimAccountId"), wim_accounts, session
+ )
for vldd in get_iterable(nsd.get("virtual-link-desc")):
if in_vld["name"] == vldd["id"]:
break
else:
- raise EngineException("Invalid parameter vld:name='{}' is not present at nsd:vld".format(
- in_vld["name"]))
+ raise EngineException(
+ "Invalid parameter vld:name='{}' is not present at nsd:vld".format(
+ in_vld["name"]
+ )
+ )
def _get_vnfd_from_vnf_member_index(self, member_vnf_index, nsr_id):
# Obtain vnf descriptor. The vnfr is used to get the vnfd._id used for this member_vnf_index
- vnfr = self.db.get_one("vnfrs",
- {"nsr-id-ref": nsr_id, "member-vnf-index-ref": member_vnf_index},
- fail_on_empty=False)
+ vnfr = self.db.get_one(
+ "vnfrs",
+ {"nsr-id-ref": nsr_id, "member-vnf-index-ref": member_vnf_index},
+ fail_on_empty=False,
+ )
if not vnfr:
- raise EngineException("Invalid parameter member_vnf_index='{}' is not one of the "
- "nsd:constituent-vnfd".format(member_vnf_index))
+ raise EngineException(
+ "Invalid parameter member_vnf_index='{}' is not one of the "
+ "nsd:constituent-vnfd".format(member_vnf_index)
+ )
vnfd = self.db.get_one("vnfds", {"_id": vnfr["vnfd-id"]}, fail_on_empty=False)
if not vnfd:
- raise EngineException("vnfd id={} has been deleted!. Operation cannot be performed".
- format(vnfr["vnfd-id"]))
+ raise EngineException(
+ "vnfd id={} has been deleted!. Operation cannot be performed".format(
+ vnfr["vnfd-id"]
+ )
+ )
return vnfd
def _check_valid_vdu(self, vnfd, vdu_id):
if vdud["id"] == vdu_id:
return vdud
else:
- raise EngineException("Invalid parameter vdu_id='{}' not present at vnfd:vdu:id".format(vdu_id))
+ raise EngineException(
+ "Invalid parameter vdu_id='{}' not present at vnfd:vdu:id".format(
+ vdu_id
+ )
+ )
def _check_valid_kdu(self, vnfd, kdu_name):
for kdud in get_iterable(vnfd.get("kdu")):
if kdud["name"] == kdu_name:
return kdud
else:
- raise EngineException("Invalid parameter kdu_name='{}' not present at vnfd:kdu:name".format(kdu_name))
+ raise EngineException(
+ "Invalid parameter kdu_name='{}' not present at vnfd:kdu:name".format(
+ kdu_name
+ )
+ )
def _check_vnf_instantiation_params(self, in_vnf, vnfd):
for in_vdu in get_iterable(in_vnf.get("vdu")):
if volumed["id"] == volume["name"]:
break
else:
- raise EngineException("Invalid parameter vnf[member-vnf-index='{}']:vdu[id='{}']:"
- "volume:name='{}' is not present at "
- "vnfd:vdu:virtual-storage-desc list".
- format(in_vnf["member-vnf-index"], in_vdu["id"],
- volume["id"]))
+ raise EngineException(
+ "Invalid parameter vnf[member-vnf-index='{}']:vdu[id='{}']:"
+ "volume:name='{}' is not present at "
+ "vnfd:vdu:virtual-storage-desc list".format(
+ in_vnf["member-vnf-index"],
+ in_vdu["id"],
+ volume["id"],
+ )
+ )
vdu_if_names = set()
for cpd in get_iterable(vdu.get("int-cpd")):
- for iface in get_iterable(cpd.get("virtual-network-interface-requirement")):
+ for iface in get_iterable(
+ cpd.get("virtual-network-interface-requirement")
+ ):
vdu_if_names.add(iface.get("name"))
for in_iface in get_iterable(in_vdu["interface"]):
if in_iface["name"] in vdu_if_names:
break
else:
- raise EngineException("Invalid parameter vnf[member-vnf-index='{}']:vdu[id='{}']:"
- "int-cpd[id='{}'] is not present at vnfd:vdu:int-cpd"
- .format(in_vnf["member-vnf-index"], in_vdu["id"],
- in_iface["name"]))
+ raise EngineException(
+ "Invalid parameter vnf[member-vnf-index='{}']:vdu[id='{}']:"
+ "int-cpd[id='{}'] is not present at vnfd:vdu:int-cpd".format(
+ in_vnf["member-vnf-index"],
+ in_vdu["id"],
+ in_iface["name"],
+ )
+ )
break
else:
- raise EngineException("Invalid parameter vnf[member-vnf-index='{}']:vdu[id='{}'] is not present "
- "at vnfd:vdu".format(in_vnf["member-vnf-index"], in_vdu["id"]))
+ raise EngineException(
+ "Invalid parameter vnf[member-vnf-index='{}']:vdu[id='{}'] is not present "
+ "at vnfd:vdu".format(in_vnf["member-vnf-index"], in_vdu["id"])
+ )
- vnfd_ivlds_cpds = {ivld.get("id"): set() for ivld in get_iterable(vnfd.get("int-virtual-link-desc"))}
+ vnfd_ivlds_cpds = {
+ ivld.get("id"): set()
+ for ivld in get_iterable(vnfd.get("int-virtual-link-desc"))
+ }
for vdu in get_iterable(vnfd.get("vdu")):
for cpd in get_iterable(vnfd.get("int-cpd")):
if cpd.get("int-virtual-link-desc"):
if in_icp["id-ref"] in vnfd_ivlds_cpds[in_ivld.get("name")]:
break
else:
- raise EngineException("Invalid parameter vnf[member-vnf-index='{}']:internal-vld[name"
- "='{}']:internal-connection-point[id-ref:'{}'] is not present at "
- "vnfd:internal-vld:name/id:internal-connection-point"
- .format(in_vnf["member-vnf-index"], in_ivld["name"],
- in_icp["id-ref"]))
+ raise EngineException(
+ "Invalid parameter vnf[member-vnf-index='{}']:internal-vld[name"
+ "='{}']:internal-connection-point[id-ref:'{}'] is not present at "
+ "vnfd:internal-vld:name/id:internal-connection-point".format(
+ in_vnf["member-vnf-index"],
+ in_ivld["name"],
+ in_icp["id-ref"],
+ )
+ )
else:
- raise EngineException("Invalid parameter vnf[member-vnf-index='{}']:internal-vld:name='{}'"
- " is not present at vnfd '{}'".format(in_vnf["member-vnf-index"],
- in_ivld["name"], vnfd["id"]))
+ raise EngineException(
+ "Invalid parameter vnf[member-vnf-index='{}']:internal-vld:name='{}'"
+ " is not present at vnfd '{}'".format(
+ in_vnf["member-vnf-index"], in_ivld["name"], vnfd["id"]
+ )
+ )
def _check_valid_vim_account(self, vim_account, vim_accounts, session):
if vim_account in vim_accounts:
db_filter["_id"] = vim_account
self.db.get_one("vim_accounts", db_filter)
except Exception:
- raise EngineException("Invalid vimAccountId='{}' not present for the project".format(vim_account))
+ raise EngineException(
+ "Invalid vimAccountId='{}' not present for the project".format(
+ vim_account
+ )
+ )
vim_accounts.append(vim_account)
def _check_valid_wim_account(self, wim_account, wim_accounts, session):
db_filter["_id"] = wim_account
self.db.get_one("wim_accounts", db_filter)
except Exception:
- raise EngineException("Invalid wimAccountId='{}' not present for the project".format(wim_account))
+ raise EngineException(
+ "Invalid wimAccountId='{}' not present for the project".format(
+ wim_account
+ )
+ )
wim_accounts.append(wim_account)
- def _look_for_pdu(self, session, rollback, vnfr, vim_account, vnfr_update, vnfr_update_rollback):
+ def _look_for_pdu(
+ self, session, rollback, vnfr, vim_account, vnfr_update, vnfr_update_rollback
+ ):
"""
Look for a free PDU in the catalog matching vdur type and interfaces. Fills vnfr.vdur with the interface
(ip_address, ...) information.
else:
raise EngineException(
"No PDU of type={} at vim_account={} found for member_vnf_index={}, vdu={} matching interface "
- "names".format(pdu_type, vim_account, vnfr["member-vnf-index-ref"], vdur["vdu-id-ref"]))
+ "names".format(
+ pdu_type,
+ vim_account,
+ vnfr["member-vnf-index-ref"],
+ vdur["vdu-id-ref"],
+ )
+ )
# step 2. Update pdu
rollback_pdu = {
"_admin.usage.nsr_id": None,
"_admin.usage.vdur": None,
}
- self.db.set_one("pdus", {"_id": pdu["_id"]},
- {"_admin.usageState": "IN_USE",
- "_admin.usage": {"vnfr_id": vnfr["_id"],
- "nsr_id": vnfr["nsr-id-ref"],
- "vdur": vdur["vdu-id-ref"]}
- })
- rollback.append({"topic": "pdus", "_id": pdu["_id"], "operation": "set", "content": rollback_pdu})
+ self.db.set_one(
+ "pdus",
+ {"_id": pdu["_id"]},
+ {
+ "_admin.usageState": "IN_USE",
+ "_admin.usage": {
+ "vnfr_id": vnfr["_id"],
+ "nsr_id": vnfr["nsr-id-ref"],
+ "vdur": vdur["vdu-id-ref"],
+ },
+ },
+ )
+ rollback.append(
+ {
+ "topic": "pdus",
+ "_id": pdu["_id"],
+ "operation": "set",
+ "content": rollback_pdu,
+ }
+ )
# step 3. Fill vnfr info by filling vdur
vdu_text = "vdur.{}".format(vdur_index)
if pdu_interface["name"] == vdur_interface["name"]:
iface_text = vdu_text + ".interfaces.{}".format(iface_index)
for k, v in pdu_interface.items():
- if k in ("ip-address", "mac-address"): # TODO: switch-xxxxx must be inserted
+ if k in (
+ "ip-address",
+ "mac-address",
+ ): # TODO: switch-xxxxx must be inserted
vnfr_update[iface_text + ".{}".format(k)] = v
- vnfr_update_rollback[iface_text + ".{}".format(k)] = vdur_interface.get(v)
+ vnfr_update_rollback[
+ iface_text + ".{}".format(k)
+ ] = vdur_interface.get(v)
if pdu_interface.get("ip-address"):
- if vdur_interface.get("mgmt-interface") or vdur_interface.get("mgmt-vnf"):
- vnfr_update_rollback[vdu_text + ".ip-address"] = vdur.get("ip-address")
- vnfr_update[vdu_text + ".ip-address"] = pdu_interface["ip-address"]
+ if vdur_interface.get(
+ "mgmt-interface"
+ ) or vdur_interface.get("mgmt-vnf"):
+ vnfr_update_rollback[
+ vdu_text + ".ip-address"
+ ] = vdur.get("ip-address")
+ vnfr_update[vdu_text + ".ip-address"] = pdu_interface[
+ "ip-address"
+ ]
if vdur_interface.get("mgmt-vnf"):
- vnfr_update_rollback["ip-address"] = vnfr.get("ip-address")
+ vnfr_update_rollback["ip-address"] = vnfr.get(
+ "ip-address"
+ )
vnfr_update["ip-address"] = pdu_interface["ip-address"]
- vnfr_update[vdu_text + ".ip-address"] = pdu_interface["ip-address"]
- if pdu_interface.get("vim-network-name") or pdu_interface.get("vim-network-id"):
- ifaces_forcing_vim_network.append({
- "name": vdur_interface.get("vnf-vld-id") or vdur_interface.get("ns-vld-id"),
- "vnf-vld-id": vdur_interface.get("vnf-vld-id"),
- "ns-vld-id": vdur_interface.get("ns-vld-id")})
+ vnfr_update[vdu_text + ".ip-address"] = pdu_interface[
+ "ip-address"
+ ]
+ if pdu_interface.get("vim-network-name") or pdu_interface.get(
+ "vim-network-id"
+ ):
+ ifaces_forcing_vim_network.append(
+ {
+ "name": vdur_interface.get("vnf-vld-id")
+ or vdur_interface.get("ns-vld-id"),
+ "vnf-vld-id": vdur_interface.get("vnf-vld-id"),
+ "ns-vld-id": vdur_interface.get("ns-vld-id"),
+ }
+ )
if pdu_interface.get("vim-network-id"):
- ifaces_forcing_vim_network[-1]["vim-network-id"] = pdu_interface["vim-network-id"]
+ ifaces_forcing_vim_network[-1][
+ "vim-network-id"
+ ] = pdu_interface["vim-network-id"]
if pdu_interface.get("vim-network-name"):
- ifaces_forcing_vim_network[-1]["vim-network-name"] = pdu_interface["vim-network-name"]
+ ifaces_forcing_vim_network[-1][
+ "vim-network-name"
+ ] = pdu_interface["vim-network-name"]
break
return ifaces_forcing_vim_network
- def _look_for_k8scluster(self, session, rollback, vnfr, vim_account, vnfr_update, vnfr_update_rollback):
+ def _look_for_k8scluster(
+ self, session, rollback, vnfr, vim_account, vnfr_update, vnfr_update_rollback
+ ):
"""
Look for an available k8scluster for all the kuds in the vnfd matching version and cni requirements.
Fills vnfr.kdur with the selected k8scluster
# restrict by cni
if vnfr["k8s-cluster"].get("cni"):
k8s_requirements["cni"] = vnfr["k8s-cluster"]["cni"]
- if not set(vnfr["k8s-cluster"]["cni"]).intersection(k8scluster.get("cni", ())):
+ if not set(vnfr["k8s-cluster"]["cni"]).intersection(
+ k8scluster.get("cni", ())
+ ):
continue
# restrict by version
if vnfr["k8s-cluster"].get("version"):
# restrict by number of networks
if vnfr["k8s-cluster"].get("nets"):
k8s_requirements["networks"] = len(vnfr["k8s-cluster"]["nets"])
- if not k8scluster.get("nets") or len(k8scluster["nets"]) < len(vnfr["k8s-cluster"]["nets"]):
+ if not k8scluster.get("nets") or len(k8scluster["nets"]) < len(
+ vnfr["k8s-cluster"]["nets"]
+ ):
continue
break
else:
- raise EngineException("No k8scluster with requirements='{}' at vim_account={} found for member_vnf_index={}"
- .format(k8s_requirements, vim_account, vnfr["member-vnf-index-ref"]))
+ raise EngineException(
+ "No k8scluster with requirements='{}' at vim_account={} found for member_vnf_index={}".format(
+ k8s_requirements, vim_account, vnfr["member-vnf-index-ref"]
+ )
+ )
for kdur_index, kdur in enumerate(get_iterable(vnfr.get("kdur"))):
# step 3. Fill vnfr info by filling kdur
else:
vim_net = k8scluster["nets"][k8scluster_net_list[0]]
k8scluster_net_list.pop(0)
- vnfr_update_rollback["k8s-cluster.nets.{}.vim_net".format(net_index)] = None
+ vnfr_update_rollback[
+ "k8s-cluster.nets.{}.vim_net".format(net_index)
+ ] = None
vnfr_update["k8s-cluster.nets.{}.vim_net".format(net_index)] = vim_net
- if vim_net and (kdur_net.get("vnf-vld-id") or kdur_net.get("ns-vld-id")):
- ifaces_forcing_vim_network.append({
- "name": kdur_net.get("vnf-vld-id") or kdur_net.get("ns-vld-id"),
- "vnf-vld-id": kdur_net.get("vnf-vld-id"),
- "ns-vld-id": kdur_net.get("ns-vld-id"),
- "vim-network-name": vim_net, # TODO can it be vim-network-id ???
- })
+ if vim_net and (
+ kdur_net.get("vnf-vld-id") or kdur_net.get("ns-vld-id")
+ ):
+ ifaces_forcing_vim_network.append(
+ {
+ "name": kdur_net.get("vnf-vld-id")
+ or kdur_net.get("ns-vld-id"),
+ "vnf-vld-id": kdur_net.get("vnf-vld-id"),
+ "ns-vld-id": kdur_net.get("ns-vld-id"),
+ "vim-network-name": vim_net, # TODO can it be vim-network-id ???
+ }
+ )
# TODO check that this forcing is not incompatible with other forcing
return ifaces_forcing_vim_network
for vdur_index, vdur in enumerate(vnfr["vdur"]):
if vdu_inst_param["id"] != vdur["vdu-id-ref"]:
continue
- for iface_inst_param in get_iterable(vdu_inst_param.get("interface")):
- iface_index, _ = next(i for i in enumerate(vdur["interfaces"])
- if i[1]["name"] == iface_inst_param["name"])
- vnfr_update_text = "vdur.{}.interfaces.{}".format(vdur_index, iface_index)
+ for iface_inst_param in get_iterable(
+ vdu_inst_param.get("interface")
+ ):
+ iface_index, _ = next(
+ i
+ for i in enumerate(vdur["interfaces"])
+ if i[1]["name"] == iface_inst_param["name"]
+ )
+ vnfr_update_text = "vdur.{}.interfaces.{}".format(
+ vdur_index, iface_index
+ )
if iface_inst_param.get("ip-address"):
- vnfr_update[vnfr_update_text + ".ip-address"] = increment_ip_mac(
- iface_inst_param.get("ip-address"), vdur.get("count-index", 0))
+ vnfr_update[
+ vnfr_update_text + ".ip-address"
+ ] = increment_ip_mac(
+ iface_inst_param.get("ip-address"),
+ vdur.get("count-index", 0),
+ )
vnfr_update[vnfr_update_text + ".fixed-ip"] = True
if iface_inst_param.get("mac-address"):
- vnfr_update[vnfr_update_text + ".mac-address"] = increment_ip_mac(
- iface_inst_param.get("mac-address"), vdur.get("count-index", 0))
+ vnfr_update[
+ vnfr_update_text + ".mac-address"
+ ] = increment_ip_mac(
+ iface_inst_param.get("mac-address"),
+ vdur.get("count-index", 0),
+ )
vnfr_update[vnfr_update_text + ".fixed-mac"] = True
if iface_inst_param.get("floating-ip-required"):
- vnfr_update[vnfr_update_text + ".floating-ip-required"] = True
+ vnfr_update[
+ vnfr_update_text + ".floating-ip-required"
+ ] = True
# get vnf.internal-vld.internal-conection-point instantiation params to update vnfr.vdur.interfaces
# TODO update vld with the ip-profile
- for ivld_inst_param in get_iterable(vnf_inst_params.get("internal-vld")):
- for icp_inst_param in get_iterable(ivld_inst_param.get("internal-connection-point")):
+ for ivld_inst_param in get_iterable(
+ vnf_inst_params.get("internal-vld")
+ ):
+ for icp_inst_param in get_iterable(
+ ivld_inst_param.get("internal-connection-point")
+ ):
# look for iface
for vdur_index, vdur in enumerate(vnfr["vdur"]):
for iface_index, iface in enumerate(vdur["interfaces"]):
- if iface.get("internal-connection-point-ref") == icp_inst_param["id-ref"]:
- vnfr_update_text = "vdur.{}.interfaces.{}".format(vdur_index, iface_index)
+ if (
+ iface.get("internal-connection-point-ref")
+ == icp_inst_param["id-ref"]
+ ):
+ vnfr_update_text = "vdur.{}.interfaces.{}".format(
+ vdur_index, iface_index
+ )
if icp_inst_param.get("ip-address"):
- vnfr_update[vnfr_update_text + ".ip-address"] = increment_ip_mac(
- icp_inst_param.get("ip-address"), vdur.get("count-index", 0))
- vnfr_update[vnfr_update_text + ".fixed-ip"] = True
+ vnfr_update[
+ vnfr_update_text + ".ip-address"
+ ] = increment_ip_mac(
+ icp_inst_param.get("ip-address"),
+ vdur.get("count-index", 0),
+ )
+ vnfr_update[
+ vnfr_update_text + ".fixed-ip"
+ ] = True
if icp_inst_param.get("mac-address"):
- vnfr_update[vnfr_update_text + ".mac-address"] = increment_ip_mac(
- icp_inst_param.get("mac-address"), vdur.get("count-index", 0))
- vnfr_update[vnfr_update_text + ".fixed-mac"] = True
+ vnfr_update[
+ vnfr_update_text + ".mac-address"
+ ] = increment_ip_mac(
+ icp_inst_param.get("mac-address"),
+ vdur.get("count-index", 0),
+ )
+ vnfr_update[
+ vnfr_update_text + ".fixed-mac"
+ ] = True
break
# get ip address from instantiation parameters.vld.vnfd-connection-point-ref
for vld_inst_param in get_iterable(indata.get("vld")):
- for vnfcp_inst_param in get_iterable(vld_inst_param.get("vnfd-connection-point-ref")):
+ for vnfcp_inst_param in get_iterable(
+ vld_inst_param.get("vnfd-connection-point-ref")
+ ):
if vnfcp_inst_param["member-vnf-index-ref"] != member_vnf_index:
continue
# look for iface
for vdur_index, vdur in enumerate(vnfr["vdur"]):
for iface_index, iface in enumerate(vdur["interfaces"]):
- if iface.get("external-connection-point-ref") == \
- vnfcp_inst_param["vnfd-connection-point-ref"]:
- vnfr_update_text = "vdur.{}.interfaces.{}".format(vdur_index, iface_index)
+ if (
+ iface.get("external-connection-point-ref")
+ == vnfcp_inst_param["vnfd-connection-point-ref"]
+ ):
+ vnfr_update_text = "vdur.{}.interfaces.{}".format(
+ vdur_index, iface_index
+ )
if vnfcp_inst_param.get("ip-address"):
- vnfr_update[vnfr_update_text + ".ip-address"] = increment_ip_mac(
- vnfcp_inst_param.get("ip-address"), vdur.get("count-index", 0))
+ vnfr_update[
+ vnfr_update_text + ".ip-address"
+ ] = increment_ip_mac(
+ vnfcp_inst_param.get("ip-address"),
+ vdur.get("count-index", 0),
+ )
vnfr_update[vnfr_update_text + ".fixed-ip"] = True
if vnfcp_inst_param.get("mac-address"):
- vnfr_update[vnfr_update_text + ".mac-address"] = increment_ip_mac(
- vnfcp_inst_param.get("mac-address"), vdur.get("count-index", 0))
+ vnfr_update[
+ vnfr_update_text + ".mac-address"
+ ] = increment_ip_mac(
+ vnfcp_inst_param.get("mac-address"),
+ vdur.get("count-index", 0),
+ )
vnfr_update[vnfr_update_text + ".fixed-mac"] = True
break
vnfr_update_rollback["vca-id"] = vnfr.get("vca-id")
# get pdu
- ifaces_forcing_vim_network = self._look_for_pdu(session, rollback, vnfr, vim_account, vnfr_update,
- vnfr_update_rollback)
+ ifaces_forcing_vim_network = self._look_for_pdu(
+ session, rollback, vnfr, vim_account, vnfr_update, vnfr_update_rollback
+ )
# get kdus
- ifaces_forcing_vim_network += self._look_for_k8scluster(session, rollback, vnfr, vim_account, vnfr_update,
- vnfr_update_rollback)
+ ifaces_forcing_vim_network += self._look_for_k8scluster(
+ session, rollback, vnfr, vim_account, vnfr_update, vnfr_update_rollback
+ )
# update database vnfr
self.db.set_one("vnfrs", {"_id": vnfr["_id"]}, vnfr_update)
- rollback.append({"topic": "vnfrs", "_id": vnfr["_id"], "operation": "set", "content": vnfr_update_rollback})
+ rollback.append(
+ {
+ "topic": "vnfrs",
+ "_id": vnfr["_id"],
+ "operation": "set",
+ "content": vnfr_update_rollback,
+ }
+ )
# Update indada in case pdu forces to use a concrete vim-network-name
# TODO check if user has already insert a vim-network-name and raises an error
if iface_info.get("ns-vld-id"):
if "vld" not in indata:
indata["vld"] = []
- indata["vld"].append({key: iface_info[key] for key in
- ("name", "vim-network-name", "vim-network-id") if iface_info.get(key)})
+ indata["vld"].append(
+ {
+ key: iface_info[key]
+ for key in ("name", "vim-network-name", "vim-network-id")
+ if iface_info.get(key)
+ }
+ )
elif iface_info.get("vnf-vld-id"):
if "vnf" not in indata:
indata["vnf"] = []
- indata["vnf"].append({
- "member-vnf-index": member_vnf_index,
- "internal-vld": [{key: iface_info[key] for key in
- ("name", "vim-network-name", "vim-network-id") if iface_info.get(key)}]
- })
+ indata["vnf"].append(
+ {
+ "member-vnf-index": member_vnf_index,
+ "internal-vld": [
+ {
+ key: iface_info[key]
+ for key in (
+ "name",
+ "vim-network-name",
+ "vim-network-id",
+ )
+ if iface_info.get(key)
+ }
+ ],
+ }
+ )
@staticmethod
def _create_nslcmop(nsr_id, operation, params):
"links": {
"self": "/osm/nslcm/v1/ns_lcm_op_occs/" + _id,
"nsInstance": "/osm/nslcm/v1/ns_instances/" + nsr_id,
- }
+ },
}
return nslcmop
vims = self.db.get_list("vim_accounts", db_filter)
vimAccounts = []
for vim in vims:
- vimAccounts.append(vim['_id'])
+ vimAccounts.append(vim["_id"])
return vimAccounts
- def new(self, rollback, session, indata=None, kwargs=None, headers=None, slice_object=False):
+ def new(
+ self,
+ rollback,
+ session,
+ indata=None,
+ kwargs=None,
+ headers=None,
+ slice_object=False,
+ ):
"""
Performs a new operation over a ns
:param rollback: list to append created items at database in case a rollback must to be done
:param headers: http request headers
:return: id of the nslcmops
"""
+
def check_if_nsr_is_not_slice_member(session, nsr_id):
nsis = None
db_filter = self._get_project_filter(session)
db_filter["_admin.nsrs-detailed-list.ANYINDEX.nsrId"] = nsr_id
- nsis = self.db.get_one("nsis", db_filter, fail_on_empty=False, fail_on_more=False)
+ nsis = self.db.get_one(
+ "nsis", db_filter, fail_on_empty=False, fail_on_more=False
+ )
if nsis:
- raise EngineException("The NS instance {} cannot be terminated because is used by the slice {}".format(
- nsr_id, nsis["_id"]), http_code=HTTPStatus.CONFLICT)
+ raise EngineException(
+ "The NS instance {} cannot be terminated because is used by the slice {}".format(
+ nsr_id, nsis["_id"]
+ ),
+ http_code=HTTPStatus.CONFLICT,
+ )
try:
# Override descriptor with query string kwargs
# initial checking
if operation == "terminate" and slice_object is False:
check_if_nsr_is_not_slice_member(session, nsr["_id"])
- if not nsr["_admin"].get("nsState") or nsr["_admin"]["nsState"] == "NOT_INSTANTIATED":
+ if (
+ not nsr["_admin"].get("nsState")
+ or nsr["_admin"]["nsState"] == "NOT_INSTANTIATED"
+ ):
if operation == "terminate" and indata.get("autoremove"):
# NSR must be deleted
- return None, None # a none in this case is used to indicate not instantiated. It can be removed
+ return (
+ None,
+ None,
+ ) # a none in this case is used to indicate not instantiated. It can be removed
if operation != "instantiate":
- raise EngineException("ns_instance '{}' cannot be '{}' because it is not instantiated".format(
- nsInstanceId, operation), HTTPStatus.CONFLICT)
+ raise EngineException(
+ "ns_instance '{}' cannot be '{}' because it is not instantiated".format(
+ nsInstanceId, operation
+ ),
+ HTTPStatus.CONFLICT,
+ )
else:
if operation == "instantiate" and not session["force"]:
- raise EngineException("ns_instance '{}' cannot be '{}' because it is already instantiated".format(
- nsInstanceId, operation), HTTPStatus.CONFLICT)
+ raise EngineException(
+ "ns_instance '{}' cannot be '{}' because it is already instantiated".format(
+ nsInstanceId, operation
+ ),
+ HTTPStatus.CONFLICT,
+ )
self._check_ns_operation(session, nsr, operation, indata)
if operation == "instantiate":
nslcmop_desc = self._create_nslcmop(nsInstanceId, operation, indata)
_id = nslcmop_desc["_id"]
- self.format_on_new(nslcmop_desc, session["project_id"], make_public=session["public"])
+ self.format_on_new(
+ nslcmop_desc, session["project_id"], make_public=session["public"]
+ )
if indata.get("placement-engine"):
# Save valid vim accounts in lcm operation descriptor
- nslcmop_desc['operationParams']['validVimAccounts'] = self._get_enabled_vims(session)
+ nslcmop_desc["operationParams"][
+ "validVimAccounts"
+ ] = self._get_enabled_vims(session)
self.db.create("nslcmops", nslcmop_desc)
rollback.append({"topic": "nslcmops", "_id": _id})
if not slice_object:
# raise EngineException("Cannot get ns_instance '{}': {}".format(e), HTTPStatus.NOT_FOUND)
def delete(self, session, _id, dry_run=False, not_send_msg=None):
- raise EngineException("Method delete called directly", HTTPStatus.INTERNAL_SERVER_ERROR)
+ raise EngineException(
+ "Method delete called directly", HTTPStatus.INTERNAL_SERVER_ERROR
+ )
def edit(self, session, _id, indata=None, kwargs=None, content=None):
- raise EngineException("Method edit called directly", HTTPStatus.INTERNAL_SERVER_ERROR)
+ raise EngineException(
+ "Method edit called directly", HTTPStatus.INTERNAL_SERVER_ERROR
+ )
class NsiTopic(BaseTopic):
if additional_params:
for k, v in additional_params.items():
if not isinstance(k, str):
- raise EngineException("Invalid param at additionalParamsForNsi:{}. Only string keys are allowed".
- format(k))
+ raise EngineException(
+ "Invalid param at additionalParamsForNsi:{}. Only string keys are allowed".format(
+ k
+ )
+ )
if "." in k or "$" in k:
- raise EngineException("Invalid param at additionalParamsForNsi:{}. Keys must not contain dots or $".
- format(k))
+ raise EngineException(
+ "Invalid param at additionalParamsForNsi:{}. Keys must not contain dots or $".format(
+ k
+ )
+ )
if isinstance(v, (dict, tuple, list)):
additional_params[k] = "!!yaml " + safe_dump(v)
return additional_params
return
nstd_id = descriptor["nst-ref"]
if not self.get_item_list(session, "nsts", {"id": nstd_id}):
- raise EngineException("Descriptor error at nst-ref='{}' references a non exist nstd".format(nstd_id),
- http_code=HTTPStatus.CONFLICT)
+ raise EngineException(
+ "Descriptor error at nst-ref='{}' references a non exist nstd".format(
+ nstd_id
+ ),
+ http_code=HTTPStatus.CONFLICT,
+ )
def check_conflict_on_del(self, session, _id, db_content):
"""
return
nsi = db_content
if nsi["_admin"].get("nsiState") == "INSTANTIATED":
- raise EngineException("nsi '{}' cannot be deleted because it is in 'INSTANTIATED' state. "
- "Launch 'terminate' operation first; or force deletion".format(_id),
- http_code=HTTPStatus.CONFLICT)
+ raise EngineException(
+ "nsi '{}' cannot be deleted because it is in 'INSTANTIATED' state. "
+ "Launch 'terminate' operation first; or force deletion".format(_id),
+ http_code=HTTPStatus.CONFLICT,
+ )
def delete_extra(self, session, _id, db_content, not_send_msg=None):
"""
for nsrs_detailed_item in nsir["_admin"]["nsrs-detailed-list"]:
nsr_id = nsrs_detailed_item["nsrId"]
if nsrs_detailed_item.get("shared"):
- _filter = {"_admin.nsrs-detailed-list.ANYINDEX.shared": True,
- "_admin.nsrs-detailed-list.ANYINDEX.nsrId": nsr_id,
- "_id.ne": nsir["_id"]}
- nsi = self.db.get_one("nsis", _filter, fail_on_empty=False, fail_on_more=False)
+ _filter = {
+ "_admin.nsrs-detailed-list.ANYINDEX.shared": True,
+ "_admin.nsrs-detailed-list.ANYINDEX.nsrId": nsr_id,
+ "_id.ne": nsir["_id"],
+ }
+ nsi = self.db.get_one(
+ "nsis", _filter, fail_on_empty=False, fail_on_more=False
+ )
if nsi: # last one using nsr
continue
try:
- self.nsrTopic.delete(session, nsr_id, dry_run=False, not_send_msg=not_send_msg)
+ self.nsrTopic.delete(
+ session, nsr_id, dry_run=False, not_send_msg=not_send_msg
+ )
except (DbException, EngineException) as e:
if e.http_code == HTTPStatus.NOT_FOUND:
pass
nsir_admin = nsir.get("_admin")
if nsir_admin and nsir_admin.get("nst-id"):
# check if used by another NSI
- nsis_list = self.db.get_one("nsis", {"nst-id": nsir_admin["nst-id"]},
- fail_on_empty=False, fail_on_more=False)
+ nsis_list = self.db.get_one(
+ "nsis",
+ {"nst-id": nsir_admin["nst-id"]},
+ fail_on_empty=False,
+ fail_on_more=False,
+ )
if not nsis_list:
- self.db.set_one("nsts", {"_id": nsir_admin["nst-id"]}, {"_admin.usageState": "NOT_IN_USE"})
+ self.db.set_one(
+ "nsts",
+ {"_id": nsir_admin["nst-id"]},
+ {"_admin.usageState": "NOT_IN_USE"},
+ )
def new(self, rollback, session, indata=None, kwargs=None, headers=None):
"""
slice_request = self._validate_input_new(slice_request, session["force"])
# look for nstd
- step = "getting nstd id='{}' from database".format(slice_request.get("nstId"))
+ step = "getting nstd id='{}' from database".format(
+ slice_request.get("nstId")
+ )
_filter = self._get_project_filter(session)
_filter["_id"] = slice_request["nstId"]
nstd = self.db.get_one("nsts", _filter)
# check NST is not disabled
step = "checking NST operationalState"
if nstd["_admin"]["operationalState"] == "DISABLED":
- raise EngineException("nst with id '{}' is DISABLED, and thus cannot be used to create a netslice "
- "instance".format(slice_request["nstId"]), http_code=HTTPStatus.CONFLICT)
+ raise EngineException(
+ "nst with id '{}' is DISABLED, and thus cannot be used to create a netslice "
+ "instance".format(slice_request["nstId"]),
+ http_code=HTTPStatus.CONFLICT,
+ )
del _filter["_id"]
# check NSD is not disabled
step = "checking operationalState"
if nstd["_admin"]["operationalState"] == "DISABLED":
- raise EngineException("nst with id '{}' is DISABLED, and thus cannot be used to create "
- "a network slice".format(slice_request["nstId"]), http_code=HTTPStatus.CONFLICT)
+ raise EngineException(
+ "nst with id '{}' is DISABLED, and thus cannot be used to create "
+ "a network slice".format(slice_request["nstId"]),
+ http_code=HTTPStatus.CONFLICT,
+ )
nstd.pop("_admin", None)
nstd_id = nstd.pop("_id", None)
"nsr-ref-list": [],
"vlr-list": [],
"_id": nsi_id,
- "additionalParamsForNsi": self._format_addional_params(slice_request)
+ "additionalParamsForNsi": self._format_addional_params(slice_request),
}
step = "creating nsi at database"
- self.format_on_new(nsi_descriptor, session["project_id"], make_public=session["public"])
+ self.format_on_new(
+ nsi_descriptor, session["project_id"], make_public=session["public"]
+ )
nsi_descriptor["_admin"]["nsiState"] = "NOT_INSTANTIATED"
nsi_descriptor["_admin"]["netslice-subnet"] = None
nsi_descriptor["_admin"]["deployed"] = {}
for member_ns in nstd["netslice-subnet"]:
nsd_id = member_ns["nsd-ref"]
step = "getting nstd id='{}' constituent-nsd='{}' from database".format(
- member_ns["nsd-ref"], member_ns["id"])
+ member_ns["nsd-ref"], member_ns["id"]
+ )
if nsd_id not in needed_nsds:
# Obtain nsd
_filter["id"] = nsd_id
- nsd = self.db.get_one("nsds", _filter, fail_on_empty=True, fail_on_more=True)
+ nsd = self.db.get_one(
+ "nsds", _filter, fail_on_empty=True, fail_on_more=True
+ )
del _filter["id"]
nsd.pop("_admin")
needed_nsds[nsd_id] = nsd
services.append(member_ns)
step = "filling nsir nsd-id='{}' constituent-nsd='{}' from database".format(
- member_ns["nsd-ref"], member_ns["id"])
+ member_ns["nsd-ref"], member_ns["id"]
+ )
# creates Network Services records (NSRs)
step = "creating nsrs at database using NsrTopic.new()"
indata_ns = {}
# Is the nss shared and instantiated?
_filter["_admin.nsrs-detailed-list.ANYINDEX.shared"] = True
- _filter["_admin.nsrs-detailed-list.ANYINDEX.nsd-id"] = service["nsd-ref"]
+ _filter["_admin.nsrs-detailed-list.ANYINDEX.nsd-id"] = service[
+ "nsd-ref"
+ ]
_filter["_admin.nsrs-detailed-list.ANYINDEX.nss-id"] = service["id"]
- nsi = self.db.get_one("nsis", _filter, fail_on_empty=False, fail_on_more=False)
+ nsi = self.db.get_one(
+ "nsis", _filter, fail_on_empty=False, fail_on_more=False
+ )
if nsi and service.get("is-shared-nss"):
nsrs_detailed_list = nsi["_admin"]["nsrs-detailed-list"]
for nsrs_detailed_item in nsrs_detailed_list:
if service.get("instantiation-parameters"):
indata_ns = deepcopy(service["instantiation-parameters"])
# del service["instantiation-parameters"]
-
+
indata_ns["nsdId"] = service["_id"]
- indata_ns["nsName"] = slice_request.get("nsiName") + "." + service["id"]
+ indata_ns["nsName"] = (
+ slice_request.get("nsiName") + "." + service["id"]
+ )
indata_ns["vimAccountId"] = slice_request.get("vimAccountId")
indata_ns["nsDescription"] = service["description"]
if slice_request.get("ssh_keys"):
copy_ns_param = deepcopy(ns_param)
del copy_ns_param["id"]
indata_ns.update(copy_ns_param)
- break
+ break
# Creates Nsr objects
- _id_nsr, _ = self.nsrTopic.new(rollback, session, indata_ns, kwargs, headers)
- nsrs_item = {"nsrId": _id_nsr, "shared": service.get("is-shared-nss"), "nsd-id": service["nsd-ref"],
- "nss-id": service["id"], "nslcmop_instantiate": None}
+ _id_nsr, _ = self.nsrTopic.new(
+ rollback, session, indata_ns, kwargs, headers
+ )
+ nsrs_item = {
+ "nsrId": _id_nsr,
+ "shared": service.get("is-shared-nss"),
+ "nsd-id": service["nsd-ref"],
+ "nss-id": service["id"],
+ "nslcmop_instantiate": None,
+ }
indata_ns["nss-id"] = service["id"]
nsrs_list.append(nsrs_item)
nsi_netslice_subnet.append(indata_ns)
# Adding the nsrs list to the nsi
nsi_descriptor["_admin"]["nsrs-detailed-list"] = nsrs_list
nsi_descriptor["_admin"]["netslice-subnet"] = nsi_netslice_subnet
- self.db.set_one("nsts", {"_id": slice_request["nstId"]}, {"_admin.usageState": "IN_USE"})
+ self.db.set_one(
+ "nsts", {"_id": slice_request["nstId"]}, {"_admin.usageState": "IN_USE"}
+ )
# Creating the entry in the database
self.db.create("nsis", nsi_descriptor)
rollback.append({"topic": "nsis", "_id": nsi_id})
return nsi_id, None
- except Exception as e: # TODO remove try Except, it is captured at nbi.py
- self.logger.exception("Exception {} at NsiTopic.new()".format(e), exc_info=True)
+ except Exception as e: # TODO remove try Except, it is captured at nbi.py
+ self.logger.exception(
+ "Exception {} at NsiTopic.new()".format(e), exc_info=True
+ )
raise EngineException("Error {}: {}".format(step, e))
except ValidationError as e:
raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
def edit(self, session, _id, indata=None, kwargs=None, content=None):
- raise EngineException("Method edit called directly", HTTPStatus.INTERNAL_SERVER_ERROR)
+ raise EngineException(
+ "Method edit called directly", HTTPStatus.INTERNAL_SERVER_ERROR
+ )
class NsiLcmOpTopic(BaseTopic):
topic_msg = "nsi"
operation_schema = { # mapping between operation and jsonschema to validate
"instantiate": nsi_instantiate,
- "terminate": None
+ "terminate": None,
}
-
+
def __init__(self, db, fs, msg, auth):
BaseTopic.__init__(self, db, fs, msg, auth)
self.nsi_NsLcmOpTopic = NsLcmOpTopic(self.db, self.fs, self.msg, self.auth)
nsds[nsd_id] = self.db.get_one("nsds", _filter)
return nsds[nsd_id]
else:
- raise EngineException("Invalid parameter nstId='{}' is not one of the "
- "nst:netslice-subnet".format(nstId))
+ raise EngineException(
+ "Invalid parameter nstId='{}' is not one of the "
+ "nst:netslice-subnet".format(nstId)
+ )
+
if operation == "instantiate":
# check the existance of netslice-subnet items
- for in_nst in get_iterable(indata.get("netslice-subnet")):
+ for in_nst in get_iterable(indata.get("netslice-subnet")):
check_valid_netslice_subnet_id(in_nst["id"])
def _create_nsilcmop(self, session, netsliceInstanceId, operation, params):
"isCancelPending": False,
"links": {
"self": "/osm/nsilcm/v1/nsi_lcm_op_occs/" + _id,
- "netsliceInstanceId": "/osm/nsilcm/v1/netslice_instances/" + netsliceInstanceId,
- }
+ "netsliceInstanceId": "/osm/nsilcm/v1/netslice_instances/"
+ + netsliceInstanceId,
+ },
}
return nsilcmop
for admin_subnet_item in nsir["_admin"].get("netslice-subnet"):
if admin_subnet_item["nss-id"] == nst_sb_item["id"]:
for admin_vld_item in nsir["_admin"].get("netslice-vld"):
- for admin_vld_nss_cp_ref_item in admin_vld_item["nss-connection-point-ref"]:
- if admin_subnet_item["nss-id"] == admin_vld_nss_cp_ref_item["nss-ref"]:
- if not nsr_item["nsrId"] in admin_vld_item["shared-nsrs-list"]:
- admin_vld_item["shared-nsrs-list"].append(nsr_item["nsrId"])
+ for admin_vld_nss_cp_ref_item in admin_vld_item[
+ "nss-connection-point-ref"
+ ]:
+ if (
+ admin_subnet_item["nss-id"]
+ == admin_vld_nss_cp_ref_item["nss-ref"]
+ ):
+ if (
+ not nsr_item["nsrId"]
+ in admin_vld_item["shared-nsrs-list"]
+ ):
+ admin_vld_item["shared-nsrs-list"].append(
+ nsr_item["nsrId"]
+ )
break
# self.db.set_one("nsis", {"_id": nsir["_id"]}, nsir)
- self.db.set_one("nsis", {"_id": nsir["_id"]}, {"_admin.netslice-vld": nsir["_admin"].get("netslice-vld")})
+ self.db.set_one(
+ "nsis",
+ {"_id": nsir["_id"]},
+ {"_admin.netslice-vld": nsir["_admin"].get("netslice-vld")},
+ )
def new(self, rollback, session, indata=None, kwargs=None, headers=None):
"""
del _filter["_id"]
# initial checking
- if not nsir["_admin"].get("nsiState") or nsir["_admin"]["nsiState"] == "NOT_INSTANTIATED":
+ if (
+ not nsir["_admin"].get("nsiState")
+ or nsir["_admin"]["nsiState"] == "NOT_INSTANTIATED"
+ ):
if operation == "terminate" and indata.get("autoremove"):
# NSIR must be deleted
- return None, None # a none in this case is used to indicate not instantiated. It can be removed
+ return (
+ None,
+ None,
+ ) # a none in this case is used to indicate not instantiated. It can be removed
if operation != "instantiate":
- raise EngineException("netslice_instance '{}' cannot be '{}' because it is not instantiated".format(
- netsliceInstanceId, operation), HTTPStatus.CONFLICT)
+ raise EngineException(
+ "netslice_instance '{}' cannot be '{}' because it is not instantiated".format(
+ netsliceInstanceId, operation
+ ),
+ HTTPStatus.CONFLICT,
+ )
else:
if operation == "instantiate" and not session["force"]:
- raise EngineException("netslice_instance '{}' cannot be '{}' because it is already instantiated".
- format(netsliceInstanceId, operation), HTTPStatus.CONFLICT)
-
+ raise EngineException(
+ "netslice_instance '{}' cannot be '{}' because it is already instantiated".format(
+ netsliceInstanceId, operation
+ ),
+ HTTPStatus.CONFLICT,
+ )
+
# Creating all the NS_operation (nslcmop)
# Get service list from db
nsrs_list = nsir["_admin"]["nsrs-detailed-list"]
if nsr_item.get("shared"):
_filter["_admin.nsrs-detailed-list.ANYINDEX.shared"] = True
_filter["_admin.nsrs-detailed-list.ANYINDEX.nsrId"] = nsr_id
- _filter["_admin.nsrs-detailed-list.ANYINDEX.nslcmop_instantiate.ne"] = None
+ _filter[
+ "_admin.nsrs-detailed-list.ANYINDEX.nslcmop_instantiate.ne"
+ ] = None
_filter["_id.ne"] = netsliceInstanceId
- nsi = self.db.get_one("nsis", _filter, fail_on_empty=False, fail_on_more=False)
+ nsi = self.db.get_one(
+ "nsis", _filter, fail_on_empty=False, fail_on_more=False
+ )
if operation == "terminate":
- _update = {"_admin.nsrs-detailed-list.{}.nslcmop_instantiate".format(index): None}
+ _update = {
+ "_admin.nsrs-detailed-list.{}.nslcmop_instantiate".format(
+ index
+ ): None
+ }
self.db.set_one("nsis", {"_id": nsir["_id"]}, _update)
- if nsi: # other nsi is using this nsr and it needs this nsr instantiated
+ if (
+ nsi
+ ): # other nsi is using this nsr and it needs this nsr instantiated
continue # do not create nsilcmop
else: # instantiate
# looks the first nsi fulfilling the conditions but not being the current NSIR
if nsi:
- nsi_nsr_item = next(n for n in nsi["_admin"]["nsrs-detailed-list"] if
- n["nsrId"] == nsr_id and n["shared"] and
- n["nslcmop_instantiate"])
+ nsi_nsr_item = next(
+ n
+ for n in nsi["_admin"]["nsrs-detailed-list"]
+ if n["nsrId"] == nsr_id
+ and n["shared"]
+ and n["nslcmop_instantiate"]
+ )
self.add_shared_nsr_2vld(nsir, nsr_item)
nslcmops.append(nsi_nsr_item["nslcmop_instantiate"])
- _update = {"_admin.nsrs-detailed-list.{}".format(index): nsi_nsr_item}
+ _update = {
+ "_admin.nsrs-detailed-list.{}".format(
+ index
+ ): nsi_nsr_item
+ }
self.db.set_one("nsis", {"_id": nsir["_id"]}, _update)
# continue to not create nslcmop since nsrs is shared and nsrs was created
continue
# Creating NS_LCM_OP with the flag slice_object=True to not trigger the service instantiation
# message via kafka bus
- nslcmop, _ = self.nsi_NsLcmOpTopic.new(rollback, session, indata_ns, None, headers,
- slice_object=True)
+ nslcmop, _ = self.nsi_NsLcmOpTopic.new(
+ rollback, session, indata_ns, None, headers, slice_object=True
+ )
nslcmops.append(nslcmop)
if operation == "instantiate":
- _update = {"_admin.nsrs-detailed-list.{}.nslcmop_instantiate".format(index): nslcmop}
+ _update = {
+ "_admin.nsrs-detailed-list.{}.nslcmop_instantiate".format(
+ index
+ ): nslcmop
+ }
self.db.set_one("nsis", {"_id": nsir["_id"]}, _update)
except (DbException, EngineException) as e:
if e.http_code == HTTPStatus.NOT_FOUND:
- self.logger.info(logging_prefix + "skipping NS={} because not found".format(nsr_id))
+ self.logger.info(
+ logging_prefix
+ + "skipping NS={} because not found".format(nsr_id)
+ )
pass
else:
raise
indata["nslcmops_ids"] = nslcmops
self._check_nsi_operation(session, nsir, operation, indata)
- nsilcmop_desc = self._create_nsilcmop(session, netsliceInstanceId, operation, indata)
- self.format_on_new(nsilcmop_desc, session["project_id"], make_public=session["public"])
+ nsilcmop_desc = self._create_nsilcmop(
+ session, netsliceInstanceId, operation, indata
+ )
+ self.format_on_new(
+ nsilcmop_desc, session["project_id"], make_public=session["public"]
+ )
_id = self.db.create("nsilcmops", nsilcmop_desc)
rollback.append({"topic": "nsilcmops", "_id": _id})
self.msg.write("nsi", operation, nsilcmop_desc)
raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
def delete(self, session, _id, dry_run=False, not_send_msg=None):
- raise EngineException("Method delete called directly", HTTPStatus.INTERNAL_SERVER_ERROR)
+ raise EngineException(
+ "Method delete called directly", HTTPStatus.INTERNAL_SERVER_ERROR
+ )
def edit(self, session, _id, indata=None, kwargs=None, content=None):
- raise EngineException("Method edit called directly", HTTPStatus.INTERNAL_SERVER_ERROR)
+ raise EngineException(
+ "Method edit called directly", HTTPStatus.INTERNAL_SERVER_ERROR
+ )
__author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
-__version__ = "0.1.3" # file version, not NBI version
+__version__ = "0.1.3" # file version, not NBI version
version_date = "Aug 2019"
-database_version = '1.2'
-auth_database_version = '1.0'
-nbi_server = None # instance of Server class
+database_version = "1.2"
+auth_database_version = "1.0"
+nbi_server = None # instance of Server class
subscription_thread = None # instance of SubscriptionThread class
"""
# contains allowed URL and methods, and the role_permission name
"admin": {
"v1": {
- "tokens": {"METHODS": ("GET", "POST", "DELETE"),
- "ROLE_PERMISSION": "tokens:",
- "<ID>": {"METHODS": ("GET", "DELETE"),
- "ROLE_PERMISSION": "tokens:id:"
- }
- },
- "users": {"METHODS": ("GET", "POST"),
- "ROLE_PERMISSION": "users:",
- "<ID>": {"METHODS": ("GET", "DELETE", "PATCH"),
- "ROLE_PERMISSION": "users:id:"
- }
- },
- "projects": {"METHODS": ("GET", "POST"),
- "ROLE_PERMISSION": "projects:",
- "<ID>": {"METHODS": ("GET", "DELETE", "PATCH"),
- "ROLE_PERMISSION": "projects:id:"}
- },
- "roles": {"METHODS": ("GET", "POST"),
- "ROLE_PERMISSION": "roles:",
- "<ID>": {"METHODS": ("GET", "DELETE", "PATCH"),
- "ROLE_PERMISSION": "roles:id:"
- }
- },
- "vims": {"METHODS": ("GET", "POST"),
- "ROLE_PERMISSION": "vims:",
- "<ID>": {"METHODS": ("GET", "DELETE", "PATCH"),
- "ROLE_PERMISSION": "vims:id:"
- }
- },
- "vim_accounts": {"METHODS": ("GET", "POST"),
- "ROLE_PERMISSION": "vim_accounts:",
- "<ID>": {"METHODS": ("GET", "DELETE", "PATCH"),
- "ROLE_PERMISSION": "vim_accounts:id:"
- }
- },
- "wim_accounts": {"METHODS": ("GET", "POST"),
- "ROLE_PERMISSION": "wim_accounts:",
- "<ID>": {"METHODS": ("GET", "DELETE", "PATCH"),
- "ROLE_PERMISSION": "wim_accounts:id:"
- }
- },
- "sdns": {"METHODS": ("GET", "POST"),
- "ROLE_PERMISSION": "sdn_controllers:",
- "<ID>": {"METHODS": ("GET", "DELETE", "PATCH"),
- "ROLE_PERMISSION": "sdn_controllers:id:"
- }
- },
- "k8sclusters": {"METHODS": ("GET", "POST"),
- "ROLE_PERMISSION": "k8sclusters:",
- "<ID>": {"METHODS": ("GET", "DELETE", "PATCH"),
- "ROLE_PERMISSION": "k8sclusters:id:"
- }
- },
- "vca": {"METHODS": ("GET", "POST"),
- "ROLE_PERMISSION": "vca:",
- "<ID>": {"METHODS": ("GET", "DELETE", "PATCH"),
- "ROLE_PERMISSION": "vca:id:"
- }
- },
- "k8srepos": {"METHODS": ("GET", "POST"),
- "ROLE_PERMISSION": "k8srepos:",
- "<ID>": {"METHODS": ("GET", "DELETE"),
- "ROLE_PERMISSION": "k8srepos:id:"
- }
- },
- "osmrepos": {"METHODS": ("GET", "POST"),
- "ROLE_PERMISSION": "osmrepos:",
- "<ID>": {"METHODS": ("GET", "DELETE", "PATCH"),
- "ROLE_PERMISSION": "osmrepos:id:"
- }
- },
- "domains": {"METHODS": ("GET", ),
- "ROLE_PERMISSION": "domains:",
- },
+ "tokens": {
+ "METHODS": ("GET", "POST", "DELETE"),
+ "ROLE_PERMISSION": "tokens:",
+ "<ID>": {"METHODS": ("GET", "DELETE"), "ROLE_PERMISSION": "tokens:id:"},
+ },
+ "users": {
+ "METHODS": ("GET", "POST"),
+ "ROLE_PERMISSION": "users:",
+ "<ID>": {
+ "METHODS": ("GET", "DELETE", "PATCH"),
+ "ROLE_PERMISSION": "users:id:",
+ },
+ },
+ "projects": {
+ "METHODS": ("GET", "POST"),
+ "ROLE_PERMISSION": "projects:",
+ "<ID>": {
+ "METHODS": ("GET", "DELETE", "PATCH"),
+ "ROLE_PERMISSION": "projects:id:",
+ },
+ },
+ "roles": {
+ "METHODS": ("GET", "POST"),
+ "ROLE_PERMISSION": "roles:",
+ "<ID>": {
+ "METHODS": ("GET", "DELETE", "PATCH"),
+ "ROLE_PERMISSION": "roles:id:",
+ },
+ },
+ "vims": {
+ "METHODS": ("GET", "POST"),
+ "ROLE_PERMISSION": "vims:",
+ "<ID>": {
+ "METHODS": ("GET", "DELETE", "PATCH"),
+ "ROLE_PERMISSION": "vims:id:",
+ },
+ },
+ "vim_accounts": {
+ "METHODS": ("GET", "POST"),
+ "ROLE_PERMISSION": "vim_accounts:",
+ "<ID>": {
+ "METHODS": ("GET", "DELETE", "PATCH"),
+ "ROLE_PERMISSION": "vim_accounts:id:",
+ },
+ },
+ "wim_accounts": {
+ "METHODS": ("GET", "POST"),
+ "ROLE_PERMISSION": "wim_accounts:",
+ "<ID>": {
+ "METHODS": ("GET", "DELETE", "PATCH"),
+ "ROLE_PERMISSION": "wim_accounts:id:",
+ },
+ },
+ "sdns": {
+ "METHODS": ("GET", "POST"),
+ "ROLE_PERMISSION": "sdn_controllers:",
+ "<ID>": {
+ "METHODS": ("GET", "DELETE", "PATCH"),
+ "ROLE_PERMISSION": "sdn_controllers:id:",
+ },
+ },
+ "k8sclusters": {
+ "METHODS": ("GET", "POST"),
+ "ROLE_PERMISSION": "k8sclusters:",
+ "<ID>": {
+ "METHODS": ("GET", "DELETE", "PATCH"),
+ "ROLE_PERMISSION": "k8sclusters:id:",
+ },
+ },
+ "vca": {
+ "METHODS": ("GET", "POST"),
+ "ROLE_PERMISSION": "vca:",
+ "<ID>": {
+ "METHODS": ("GET", "DELETE", "PATCH"),
+ "ROLE_PERMISSION": "vca:id:",
+ },
+ },
+ "k8srepos": {
+ "METHODS": ("GET", "POST"),
+ "ROLE_PERMISSION": "k8srepos:",
+ "<ID>": {
+ "METHODS": ("GET", "DELETE"),
+ "ROLE_PERMISSION": "k8srepos:id:",
+ },
+ },
+ "osmrepos": {
+ "METHODS": ("GET", "POST"),
+ "ROLE_PERMISSION": "osmrepos:",
+ "<ID>": {
+ "METHODS": ("GET", "DELETE", "PATCH"),
+ "ROLE_PERMISSION": "osmrepos:id:",
+ },
+ },
+ "domains": {
+ "METHODS": ("GET",),
+ "ROLE_PERMISSION": "domains:",
+ },
}
},
"pdu": {
"v1": {
- "pdu_descriptors": {"METHODS": ("GET", "POST"),
- "ROLE_PERMISSION": "pduds:",
- "<ID>": {"METHODS": ("GET", "POST", "DELETE", "PATCH", "PUT"),
- "ROLE_PERMISSION": "pduds:id:"
- }
- },
+ "pdu_descriptors": {
+ "METHODS": ("GET", "POST"),
+ "ROLE_PERMISSION": "pduds:",
+ "<ID>": {
+ "METHODS": ("GET", "POST", "DELETE", "PATCH", "PUT"),
+ "ROLE_PERMISSION": "pduds:id:",
+ },
+ },
}
},
"nsd": {
"v1": {
- "ns_descriptors_content": {"METHODS": ("GET", "POST"),
- "ROLE_PERMISSION": "nsds:",
- "<ID>": {"METHODS": ("GET", "PUT", "DELETE"),
- "ROLE_PERMISSION": "nsds:id:"
- }
- },
- "ns_descriptors": {"METHODS": ("GET", "POST"),
- "ROLE_PERMISSION": "nsds:",
- "<ID>": {"METHODS": ("GET", "DELETE", "PATCH"),
- "ROLE_PERMISSION": "nsds:id:",
- "nsd_content": {"METHODS": ("GET", "PUT"),
- "ROLE_PERMISSION": "nsds:id:content:",
- },
- "nsd": {"METHODS": ("GET",), # descriptor inside package
- "ROLE_PERMISSION": "nsds:id:content:"
- },
- "artifacts": {"METHODS": ("GET",),
- "ROLE_PERMISSION": "nsds:id:nsd_artifact:",
- "*": None,
- }
- }
- },
- "pnf_descriptors": {"TODO": ("GET", "POST"),
- "<ID>": {"TODO": ("GET", "DELETE", "PATCH"),
- "pnfd_content": {"TODO": ("GET", "PUT")}
- }
- },
- "subscriptions": {"TODO": ("GET", "POST"),
- "<ID>": {"TODO": ("GET", "DELETE")}
- },
+ "ns_descriptors_content": {
+ "METHODS": ("GET", "POST"),
+ "ROLE_PERMISSION": "nsds:",
+ "<ID>": {
+ "METHODS": ("GET", "PUT", "DELETE"),
+ "ROLE_PERMISSION": "nsds:id:",
+ },
+ },
+ "ns_descriptors": {
+ "METHODS": ("GET", "POST"),
+ "ROLE_PERMISSION": "nsds:",
+ "<ID>": {
+ "METHODS": ("GET", "DELETE", "PATCH"),
+ "ROLE_PERMISSION": "nsds:id:",
+ "nsd_content": {
+ "METHODS": ("GET", "PUT"),
+ "ROLE_PERMISSION": "nsds:id:content:",
+ },
+ "nsd": {
+ "METHODS": ("GET",), # descriptor inside package
+ "ROLE_PERMISSION": "nsds:id:content:",
+ },
+ "artifacts": {
+ "METHODS": ("GET",),
+ "ROLE_PERMISSION": "nsds:id:nsd_artifact:",
+ "*": None,
+ },
+ },
+ },
+ "pnf_descriptors": {
+ "TODO": ("GET", "POST"),
+ "<ID>": {
+ "TODO": ("GET", "DELETE", "PATCH"),
+ "pnfd_content": {"TODO": ("GET", "PUT")},
+ },
+ },
+ "subscriptions": {
+ "TODO": ("GET", "POST"),
+ "<ID>": {"TODO": ("GET", "DELETE")},
+ },
}
},
"vnfpkgm": {
"v1": {
- "vnf_packages_content": {"METHODS": ("GET", "POST"),
- "ROLE_PERMISSION": "vnfds:",
- "<ID>": {"METHODS": ("GET", "PUT", "DELETE"),
- "ROLE_PERMISSION": "vnfds:id:"}
- },
- "vnf_packages": {"METHODS": ("GET", "POST"),
- "ROLE_PERMISSION": "vnfds:",
- "<ID>": {"METHODS": ("GET", "DELETE", "PATCH"), # GET: vnfPkgInfo
- "ROLE_PERMISSION": "vnfds:id:",
- "package_content": {"METHODS": ("GET", "PUT"), # package
- "ROLE_PERMISSION": "vnfds:id:",
- "upload_from_uri": {"METHODS": (),
- "TODO": ("POST", ),
- "ROLE_PERMISSION": "vnfds:id:upload:"
- }
- },
- "vnfd": {"METHODS": ("GET", ), # descriptor inside package
- "ROLE_PERMISSION": "vnfds:id:content:"
- },
- "artifacts": {"METHODS": ("GET", ),
- "ROLE_PERMISSION": "vnfds:id:vnfd_artifact:",
- "*": None,
- },
- "action": {"METHODS": ("POST", ),
- "ROLE_PERMISSION": "vnfds:id:action:"
- },
- }
- },
- "subscriptions": {"TODO": ("GET", "POST"),
- "<ID>": {"TODO": ("GET", "DELETE")}
- },
- "vnfpkg_op_occs": {"METHODS": ("GET", ),
- "ROLE_PERMISSION": "vnfds:vnfpkgops:",
- "<ID>": {"METHODS": ("GET", ),
- "ROLE_PERMISSION": "vnfds:vnfpkgops:id:"
- }
- },
+ "vnf_packages_content": {
+ "METHODS": ("GET", "POST"),
+ "ROLE_PERMISSION": "vnfds:",
+ "<ID>": {
+ "METHODS": ("GET", "PUT", "DELETE"),
+ "ROLE_PERMISSION": "vnfds:id:",
+ },
+ },
+ "vnf_packages": {
+ "METHODS": ("GET", "POST"),
+ "ROLE_PERMISSION": "vnfds:",
+ "<ID>": {
+ "METHODS": ("GET", "DELETE", "PATCH"), # GET: vnfPkgInfo
+ "ROLE_PERMISSION": "vnfds:id:",
+ "package_content": {
+ "METHODS": ("GET", "PUT"), # package
+ "ROLE_PERMISSION": "vnfds:id:",
+ "upload_from_uri": {
+ "METHODS": (),
+ "TODO": ("POST",),
+ "ROLE_PERMISSION": "vnfds:id:upload:",
+ },
+ },
+ "vnfd": {
+ "METHODS": ("GET",), # descriptor inside package
+ "ROLE_PERMISSION": "vnfds:id:content:",
+ },
+ "artifacts": {
+ "METHODS": ("GET",),
+ "ROLE_PERMISSION": "vnfds:id:vnfd_artifact:",
+ "*": None,
+ },
+ "action": {
+ "METHODS": ("POST",),
+ "ROLE_PERMISSION": "vnfds:id:action:",
+ },
+ },
+ },
+ "subscriptions": {
+ "TODO": ("GET", "POST"),
+ "<ID>": {"TODO": ("GET", "DELETE")},
+ },
+ "vnfpkg_op_occs": {
+ "METHODS": ("GET",),
+ "ROLE_PERMISSION": "vnfds:vnfpkgops:",
+ "<ID>": {"METHODS": ("GET",), "ROLE_PERMISSION": "vnfds:vnfpkgops:id:"},
+ },
}
},
"nslcm": {
"v1": {
- "ns_instances_content": {"METHODS": ("GET", "POST"),
- "ROLE_PERMISSION": "ns_instances:",
- "<ID>": {"METHODS": ("GET", "DELETE"),
- "ROLE_PERMISSION": "ns_instances:id:"
- }
- },
- "ns_instances": {"METHODS": ("GET", "POST"),
- "ROLE_PERMISSION": "ns_instances:",
- "<ID>": {"METHODS": ("GET", "DELETE"),
- "ROLE_PERMISSION": "ns_instances:id:",
- "scale": {"METHODS": ("POST",),
- "ROLE_PERMISSION": "ns_instances:id:scale:"
- },
- "terminate": {"METHODS": ("POST",),
- "ROLE_PERMISSION": "ns_instances:id:terminate:"
- },
- "instantiate": {"METHODS": ("POST",),
- "ROLE_PERMISSION": "ns_instances:id:instantiate:"
- },
- "action": {"METHODS": ("POST",),
- "ROLE_PERMISSION": "ns_instances:id:action:"
- },
- }
- },
- "ns_lcm_op_occs": {"METHODS": ("GET",),
- "ROLE_PERMISSION": "ns_instances:opps:",
- "<ID>": {"METHODS": ("GET",),
- "ROLE_PERMISSION": "ns_instances:opps:id:"
- },
- },
- "vnfrs": {"METHODS": ("GET",),
- "ROLE_PERMISSION": "vnf_instances:",
- "<ID>": {"METHODS": ("GET",),
- "ROLE_PERMISSION": "vnf_instances:id:"
- }
- },
- "vnf_instances": {"METHODS": ("GET",),
- "ROLE_PERMISSION": "vnf_instances:",
- "<ID>": {"METHODS": ("GET",),
- "ROLE_PERMISSION": "vnf_instances:id:"
- }
- },
- "subscriptions": {"METHODS": ("GET", "POST"),
- "ROLE_PERMISSION": "ns_subscriptions:",
- "<ID>": {"METHODS": ("GET", "DELETE"),
- "ROLE_PERMISSION": "ns_subscriptions:id:"
- }
- },
+ "ns_instances_content": {
+ "METHODS": ("GET", "POST"),
+ "ROLE_PERMISSION": "ns_instances:",
+ "<ID>": {
+ "METHODS": ("GET", "DELETE"),
+ "ROLE_PERMISSION": "ns_instances:id:",
+ },
+ },
+ "ns_instances": {
+ "METHODS": ("GET", "POST"),
+ "ROLE_PERMISSION": "ns_instances:",
+ "<ID>": {
+ "METHODS": ("GET", "DELETE"),
+ "ROLE_PERMISSION": "ns_instances:id:",
+ "scale": {
+ "METHODS": ("POST",),
+ "ROLE_PERMISSION": "ns_instances:id:scale:",
+ },
+ "terminate": {
+ "METHODS": ("POST",),
+ "ROLE_PERMISSION": "ns_instances:id:terminate:",
+ },
+ "instantiate": {
+ "METHODS": ("POST",),
+ "ROLE_PERMISSION": "ns_instances:id:instantiate:",
+ },
+ "action": {
+ "METHODS": ("POST",),
+ "ROLE_PERMISSION": "ns_instances:id:action:",
+ },
+ },
+ },
+ "ns_lcm_op_occs": {
+ "METHODS": ("GET",),
+ "ROLE_PERMISSION": "ns_instances:opps:",
+ "<ID>": {
+ "METHODS": ("GET",),
+ "ROLE_PERMISSION": "ns_instances:opps:id:",
+ },
+ },
+ "vnfrs": {
+ "METHODS": ("GET",),
+ "ROLE_PERMISSION": "vnf_instances:",
+ "<ID>": {"METHODS": ("GET",), "ROLE_PERMISSION": "vnf_instances:id:"},
+ },
+ "vnf_instances": {
+ "METHODS": ("GET",),
+ "ROLE_PERMISSION": "vnf_instances:",
+ "<ID>": {"METHODS": ("GET",), "ROLE_PERMISSION": "vnf_instances:id:"},
+ },
+ "subscriptions": {
+ "METHODS": ("GET", "POST"),
+ "ROLE_PERMISSION": "ns_subscriptions:",
+ "<ID>": {
+ "METHODS": ("GET", "DELETE"),
+ "ROLE_PERMISSION": "ns_subscriptions:id:",
+ },
+ },
}
},
"nst": {
"v1": {
- "netslice_templates_content": {"METHODS": ("GET", "POST"),
- "ROLE_PERMISSION": "slice_templates:",
- "<ID>": {"METHODS": ("GET", "PUT", "DELETE"),
- "ROLE_PERMISSION": "slice_templates:id:", }
- },
- "netslice_templates": {"METHODS": ("GET", "POST"),
- "ROLE_PERMISSION": "slice_templates:",
- "<ID>": {"METHODS": ("GET", "DELETE"),
- "TODO": ("PATCH",),
- "ROLE_PERMISSION": "slice_templates:id:",
- "nst_content": {"METHODS": ("GET", "PUT"),
- "ROLE_PERMISSION": "slice_templates:id:content:"
- },
- "nst": {"METHODS": ("GET",), # descriptor inside package
- "ROLE_PERMISSION": "slice_templates:id:content:"
- },
- "artifacts": {"METHODS": ("GET",),
- "ROLE_PERMISSION": "slice_templates:id:content:",
- "*": None
- }
- }
- },
- "subscriptions": {"TODO": ("GET", "POST"),
- "<ID>": {"TODO": ("GET", "DELETE")}
- },
+ "netslice_templates_content": {
+ "METHODS": ("GET", "POST"),
+ "ROLE_PERMISSION": "slice_templates:",
+ "<ID>": {
+ "METHODS": ("GET", "PUT", "DELETE"),
+ "ROLE_PERMISSION": "slice_templates:id:",
+ },
+ },
+ "netslice_templates": {
+ "METHODS": ("GET", "POST"),
+ "ROLE_PERMISSION": "slice_templates:",
+ "<ID>": {
+ "METHODS": ("GET", "DELETE"),
+ "TODO": ("PATCH",),
+ "ROLE_PERMISSION": "slice_templates:id:",
+ "nst_content": {
+ "METHODS": ("GET", "PUT"),
+ "ROLE_PERMISSION": "slice_templates:id:content:",
+ },
+ "nst": {
+ "METHODS": ("GET",), # descriptor inside package
+ "ROLE_PERMISSION": "slice_templates:id:content:",
+ },
+ "artifacts": {
+ "METHODS": ("GET",),
+ "ROLE_PERMISSION": "slice_templates:id:content:",
+ "*": None,
+ },
+ },
+ },
+ "subscriptions": {
+ "TODO": ("GET", "POST"),
+ "<ID>": {"TODO": ("GET", "DELETE")},
+ },
}
},
"nsilcm": {
"v1": {
- "netslice_instances_content": {"METHODS": ("GET", "POST"),
- "ROLE_PERMISSION": "slice_instances:",
- "<ID>": {"METHODS": ("GET", "DELETE"),
- "ROLE_PERMISSION": "slice_instances:id:"
- }
- },
- "netslice_instances": {"METHODS": ("GET", "POST"),
- "ROLE_PERMISSION": "slice_instances:",
- "<ID>": {"METHODS": ("GET", "DELETE"),
- "ROLE_PERMISSION": "slice_instances:id:",
- "terminate": {"METHODS": ("POST",),
- "ROLE_PERMISSION": "slice_instances:id:terminate:"
- },
- "instantiate": {"METHODS": ("POST",),
- "ROLE_PERMISSION": "slice_instances:id:instantiate:"
- },
- "action": {"METHODS": ("POST",),
- "ROLE_PERMISSION": "slice_instances:id:action:"
- },
- }
- },
- "nsi_lcm_op_occs": {"METHODS": ("GET",),
- "ROLE_PERMISSION": "slice_instances:opps:",
- "<ID>": {"METHODS": ("GET",),
- "ROLE_PERMISSION": "slice_instances:opps:id:",
- },
- },
+ "netslice_instances_content": {
+ "METHODS": ("GET", "POST"),
+ "ROLE_PERMISSION": "slice_instances:",
+ "<ID>": {
+ "METHODS": ("GET", "DELETE"),
+ "ROLE_PERMISSION": "slice_instances:id:",
+ },
+ },
+ "netslice_instances": {
+ "METHODS": ("GET", "POST"),
+ "ROLE_PERMISSION": "slice_instances:",
+ "<ID>": {
+ "METHODS": ("GET", "DELETE"),
+ "ROLE_PERMISSION": "slice_instances:id:",
+ "terminate": {
+ "METHODS": ("POST",),
+ "ROLE_PERMISSION": "slice_instances:id:terminate:",
+ },
+ "instantiate": {
+ "METHODS": ("POST",),
+ "ROLE_PERMISSION": "slice_instances:id:instantiate:",
+ },
+ "action": {
+ "METHODS": ("POST",),
+ "ROLE_PERMISSION": "slice_instances:id:action:",
+ },
+ },
+ },
+ "nsi_lcm_op_occs": {
+ "METHODS": ("GET",),
+ "ROLE_PERMISSION": "slice_instances:opps:",
+ "<ID>": {
+ "METHODS": ("GET",),
+ "ROLE_PERMISSION": "slice_instances:opps:id:",
+ },
+ },
}
},
"nspm": {
"pm_jobs": {
"<ID>": {
"reports": {
- "<ID>": {"METHODS": ("GET",),
- "ROLE_PERMISSION": "reports:id:",
- }
+ "<ID>": {
+ "METHODS": ("GET",),
+ "ROLE_PERMISSION": "reports:id:",
+ }
}
},
},
class NbiException(Exception):
-
def __init__(self, message, http_code=HTTPStatus.METHOD_NOT_ALLOWED):
Exception.__init__(self, message)
self.http_code = http_code
cherrypy.request.headers.pop("Content-File-MD5", None)
elif "application/yaml" in cherrypy.request.headers["Content-Type"]:
error_text = "Invalid yaml format "
- indata = yaml.load(cherrypy.request.body, Loader=yaml.SafeLoader)
+ indata = yaml.load(
+ cherrypy.request.body, Loader=yaml.SafeLoader
+ )
cherrypy.request.headers.pop("Content-File-MD5", None)
- elif "application/binary" in cherrypy.request.headers["Content-Type"] or \
- "application/gzip" in cherrypy.request.headers["Content-Type"] or \
- "application/zip" in cherrypy.request.headers["Content-Type"] or \
- "text/plain" in cherrypy.request.headers["Content-Type"]:
+ elif (
+ "application/binary" in cherrypy.request.headers["Content-Type"]
+ or "application/gzip"
+ in cherrypy.request.headers["Content-Type"]
+ or "application/zip" in cherrypy.request.headers["Content-Type"]
+ or "text/plain" in cherrypy.request.headers["Content-Type"]
+ ):
indata = cherrypy.request.body # .read()
- elif "multipart/form-data" in cherrypy.request.headers["Content-Type"]:
+ elif (
+ "multipart/form-data"
+ in cherrypy.request.headers["Content-Type"]
+ ):
if "descriptor_file" in kwargs:
filecontent = kwargs.pop("descriptor_file")
if not filecontent.file:
- raise NbiException("empty file or content", HTTPStatus.BAD_REQUEST)
+ raise NbiException(
+ "empty file or content", HTTPStatus.BAD_REQUEST
+ )
indata = filecontent.file # .read()
if filecontent.content_type.value:
- cherrypy.request.headers["Content-Type"] = filecontent.content_type.value
+ cherrypy.request.headers[
+ "Content-Type"
+ ] = filecontent.content_type.value
else:
# raise cherrypy.HTTPError(HTTPStatus.Not_Acceptable,
# "Only 'Content-Type' of type 'application/json' or
# 'application/yaml' for input format are available")
error_text = "Invalid yaml format "
- indata = yaml.load(cherrypy.request.body, Loader=yaml.SafeLoader)
+ indata = yaml.load(
+ cherrypy.request.body, Loader=yaml.SafeLoader
+ )
cherrypy.request.headers.pop("Content-File-MD5", None)
else:
error_text = "Invalid yaml format "
kwargs[k] = yaml.load(v, Loader=yaml.SafeLoader)
except Exception:
pass
- elif k.endswith(".gt") or k.endswith(".lt") or k.endswith(".gte") or k.endswith(".lte"):
+ elif (
+ k.endswith(".gt")
+ or k.endswith(".lt")
+ or k.endswith(".gte")
+ or k.endswith(".lte")
+ ):
try:
kwargs[k] = int(v)
except Exception:
except (ValueError, yaml.YAMLError) as exc:
raise NbiException(error_text + str(exc), HTTPStatus.BAD_REQUEST)
except KeyError as exc:
- raise NbiException("Query string error: " + str(exc), HTTPStatus.BAD_REQUEST)
+ raise NbiException(
+ "Query string error: " + str(exc), HTTPStatus.BAD_REQUEST
+ )
except Exception as exc:
raise NbiException(error_text + str(exc), HTTPStatus.BAD_REQUEST)
accept = cherrypy.request.headers.get("Accept")
if data is None:
if accept and "text/html" in accept:
- return html.format(data, cherrypy.request, cherrypy.response, token_info)
+ return html.format(
+ data, cherrypy.request, cherrypy.response, token_info
+ )
# cherrypy.response.status = HTTPStatus.NO_CONTENT.value
return
elif hasattr(data, "read"): # file object
if _format:
cherrypy.response.headers["Content-Type"] = _format
elif "b" in data.mode: # binariy asssumig zip
- cherrypy.response.headers["Content-Type"] = 'application/zip'
+ cherrypy.response.headers["Content-Type"] = "application/zip"
else:
- cherrypy.response.headers["Content-Type"] = 'text/plain'
+ cherrypy.response.headers["Content-Type"] = "text/plain"
# TODO check that cherrypy close file. If not implement pending things to close per thread next
return data
if accept:
if "text/html" in accept:
- return html.format(data, cherrypy.request, cherrypy.response, token_info)
+ return html.format(
+ data, cherrypy.request, cherrypy.response, token_info
+ )
elif "application/yaml" in accept or "*/*" in accept:
pass
- elif "application/json" in accept or (cherrypy.response.status and cherrypy.response.status >= 300):
- cherrypy.response.headers["Content-Type"] = 'application/json; charset=utf-8'
+ elif "application/json" in accept or (
+ cherrypy.response.status and cherrypy.response.status >= 300
+ ):
+ cherrypy.response.headers[
+ "Content-Type"
+ ] = "application/json; charset=utf-8"
a = json.dumps(data, indent=4) + "\n"
- return a.encode("utf8")
- cherrypy.response.headers["Content-Type"] = 'application/yaml'
- return yaml.safe_dump(data, explicit_start=True, indent=4, default_flow_style=False, tags=False,
- encoding='utf-8', allow_unicode=True) # , canonical=True, default_style='"'
+ return a.encode("utf8")
+ cherrypy.response.headers["Content-Type"] = "application/yaml"
+ return yaml.safe_dump(
+ data,
+ explicit_start=True,
+ indent=4,
+ default_flow_style=False,
+ tags=False,
+ encoding="utf-8",
+ allow_unicode=True,
+ ) # , canonical=True, default_style='"'
@cherrypy.expose
def index(self, *args, **kwargs):
try:
if cherrypy.request.method == "GET":
token_info = self.authenticator.authorize()
- outdata = token_info # Home page
+ outdata = token_info # Home page
else:
- raise cherrypy.HTTPError(HTTPStatus.METHOD_NOT_ALLOWED.value,
- "Method {} not allowed for tokens".format(cherrypy.request.method))
+ raise cherrypy.HTTPError(
+ HTTPStatus.METHOD_NOT_ALLOWED.value,
+ "Method {} not allowed for tokens".format(cherrypy.request.method),
+ )
return self._format_out(outdata, token_info)
# TODO consider to remove and provide version using the static version file
try:
if cherrypy.request.method != "GET":
- raise NbiException("Only method GET is allowed", HTTPStatus.METHOD_NOT_ALLOWED)
+ raise NbiException(
+ "Only method GET is allowed", HTTPStatus.METHOD_NOT_ALLOWED
+ )
elif args or kwargs:
- raise NbiException("Invalid URL or query string for version", HTTPStatus.METHOD_NOT_ALLOWED)
+ raise NbiException(
+ "Invalid URL or query string for version",
+ HTTPStatus.METHOD_NOT_ALLOWED,
+ )
# TODO include version of other modules, pick up from some kafka admin message
osm_nbi_version = {"version": nbi_version, "date": nbi_version_date}
return self._format_out(osm_nbi_version)
def domain(self):
try:
domains = {
- "user_domain_name": cherrypy.tree.apps['/osm'].config["authentication"].get("user_domain_name"),
- "project_domain_name": cherrypy.tree.apps['/osm'].config["authentication"].get("project_domain_name")}
+ "user_domain_name": cherrypy.tree.apps["/osm"]
+ .config["authentication"]
+ .get("user_domain_name"),
+ "project_domain_name": cherrypy.tree.apps["/osm"]
+ .config["authentication"]
+ .get("project_domain_name"),
+ }
return self._format_out(domains)
except NbiException as e:
cherrypy.response.status = e.http_code.value
# self.engine.load_dbase(cherrypy.request.app.config)
indata = self._format_in(kwargs)
if not isinstance(indata, dict):
- raise NbiException("Expected application/yaml or application/json Content-Type", HTTPStatus.BAD_REQUEST)
+ raise NbiException(
+ "Expected application/yaml or application/json Content-Type",
+ HTTPStatus.BAD_REQUEST,
+ )
if method == "GET":
token_info = self.authenticator.authorize()
indata.update(kwargs)
# This is needed to log the user when authentication fails
cherrypy.request.login = "{}".format(indata.get("username", "-"))
- outdata = token_info = self.authenticator.new_token(token_info, indata, cherrypy.request.remote)
- cherrypy.session['Authorization'] = outdata["_id"]
+ outdata = token_info = self.authenticator.new_token(
+ token_info, indata, cherrypy.request.remote
+ )
+ cherrypy.session["Authorization"] = outdata["_id"]
self._set_location_header("admin", "v1", "tokens", outdata["_id"])
# for logging
self._format_login(token_info)
token_id = token_info["_id"]
outdata = self.authenticator.del_token(token_id)
token_info = None
- cherrypy.session['Authorization'] = "logout"
+ cherrypy.session["Authorization"] = "logout"
# cherrypy.response.cookie["Authorization"] = token_id
# cherrypy.response.cookie["Authorization"]['expires'] = 0
else:
- raise NbiException("Method {} not allowed for token".format(method), HTTPStatus.METHOD_NOT_ALLOWED)
+ raise NbiException(
+ "Method {} not allowed for token".format(method),
+ HTTPStatus.METHOD_NOT_ALLOWED,
+ )
return self._format_out(outdata, token_info)
@cherrypy.expose
def test(self, *args, **kwargs):
- if not cherrypy.config.get("server.enable_test") or (isinstance(cherrypy.config["server.enable_test"], str) and
- cherrypy.config["server.enable_test"].lower() == "false"):
+ if not cherrypy.config.get("server.enable_test") or (
+ isinstance(cherrypy.config["server.enable_test"], str)
+ and cherrypy.config["server.enable_test"].lower() == "false"
+ ):
cherrypy.response.status = HTTPStatus.METHOD_NOT_ALLOWED.value
return "test URL is disabled"
thread_info = None
if args and args[0] == "help":
- return "<html><pre>\ninit\nfile/<name> download file\ndb-clear/table\nfs-clear[/folder]\nlogin\nlogin2\n"\
- "sleep/<time>\nmessage/topic\n</pre></html>"
+ return (
+ "<html><pre>\ninit\nfile/<name> download file\ndb-clear/table\nfs-clear[/folder]\nlogin\nlogin2\n"
+ "sleep/<time>\nmessage/topic\n</pre></html>"
+ )
elif args and args[0] == "init":
try:
cherrypy.response.status = HTTPStatus.FORBIDDEN.value
return self._format_out("Database already initialized")
elif args and args[0] == "file":
- return cherrypy.lib.static.serve_file(cherrypy.tree.apps['/osm'].config["storage"]["path"] + "/" + args[1],
- "text/plain", "attachment")
+ return cherrypy.lib.static.serve_file(
+ cherrypy.tree.apps["/osm"].config["storage"]["path"] + "/" + args[1],
+ "text/plain",
+ "attachment",
+ )
elif args and args[0] == "file2":
- f_path = cherrypy.tree.apps['/osm'].config["storage"]["path"] + "/" + args[1]
+ f_path = (
+ cherrypy.tree.apps["/osm"].config["storage"]["path"] + "/" + args[1]
+ )
f = open(f_path, "r")
cherrypy.response.headers["Content-type"] = "text/plain"
return f
return ",".join(folders) + " folders deleted\n"
elif args and args[0] == "login":
if not cherrypy.request.headers.get("Authorization"):
- cherrypy.response.headers["WWW-Authenticate"] = 'Basic realm="Access to OSM site", charset="UTF-8"'
+ cherrypy.response.headers[
+ "WWW-Authenticate"
+ ] = 'Basic realm="Access to OSM site", charset="UTF-8"'
cherrypy.response.status = HTTPStatus.UNAUTHORIZED.value
elif args and args[0] == "login2":
if not cherrypy.request.headers.get("Authorization"):
- cherrypy.response.headers["WWW-Authenticate"] = 'Bearer realm="Access to OSM site"'
+ cherrypy.response.headers[
+ "WWW-Authenticate"
+ ] = 'Bearer realm="Access to OSM site"'
cherrypy.response.status = HTTPStatus.UNAUTHORIZED.value
elif args and args[0] == "sleep":
sleep_time = 5
main_topic = args[1]
return_text = "<html><pre>{} ->\n".format(main_topic)
try:
- if cherrypy.request.method == 'POST':
+ if cherrypy.request.method == "POST":
to_send = yaml.load(cherrypy.request.body, Loader=yaml.SafeLoader)
for k, v in to_send.items():
self.engine.msg.write(main_topic, k, v)
return_text += " {}: {}\n".format(k, v)
- elif cherrypy.request.method == 'GET':
+ elif cherrypy.request.method == "GET":
for k, v in kwargs.items():
v_dict = yaml.load(v, Loader=yaml.SafeLoader)
self.engine.msg.write(main_topic, k, v_dict)
return return_text
return_text = (
- "<html><pre>\nheaders:\n args: {}\n".format(args) +
- " kwargs: {}\n".format(kwargs) +
- " headers: {}\n".format(cherrypy.request.headers) +
- " path_info: {}\n".format(cherrypy.request.path_info) +
- " query_string: {}\n".format(cherrypy.request.query_string) +
- " session: {}\n".format(cherrypy.session) +
- " cookie: {}\n".format(cherrypy.request.cookie) +
- " method: {}\n".format(cherrypy.request.method) +
- " session: {}\n".format(cherrypy.session.get('fieldname')) +
- " body:\n")
+ "<html><pre>\nheaders:\n args: {}\n".format(args)
+ + " kwargs: {}\n".format(kwargs)
+ + " headers: {}\n".format(cherrypy.request.headers)
+ + " path_info: {}\n".format(cherrypy.request.path_info)
+ + " query_string: {}\n".format(cherrypy.request.query_string)
+ + " session: {}\n".format(cherrypy.session)
+ + " cookie: {}\n".format(cherrypy.request.cookie)
+ + " method: {}\n".format(cherrypy.request.method)
+ + " session: {}\n".format(cherrypy.session.get("fieldname"))
+ + " body:\n"
+ )
return_text += " length: {}\n".format(cherrypy.request.body.length)
if cherrypy.request.body.length:
return_text += " content: {}\n".format(
- str(cherrypy.request.body.read(int(cherrypy.request.headers.get('Content-Length', 0)))))
+ str(
+ cherrypy.request.body.read(
+ int(cherrypy.request.headers.get("Content-Length", 0))
+ )
+ )
+ )
if thread_info:
return_text += "thread: {}\n".format(thread_info)
return_text += "</pre></html>"
@staticmethod
def _check_valid_url_method(method, *args):
if len(args) < 3:
- raise NbiException("URL must contain at least 'main_topic/version/topic'", HTTPStatus.METHOD_NOT_ALLOWED)
+ raise NbiException(
+ "URL must contain at least 'main_topic/version/topic'",
+ HTTPStatus.METHOD_NOT_ALLOWED,
+ )
reference = valid_url_methods
for arg in args:
if arg is None:
break
if not isinstance(reference, dict):
- raise NbiException("URL contains unexpected extra items '{}'".format(arg),
- HTTPStatus.METHOD_NOT_ALLOWED)
+ raise NbiException(
+ "URL contains unexpected extra items '{}'".format(arg),
+ HTTPStatus.METHOD_NOT_ALLOWED,
+ )
if arg in reference:
reference = reference[arg]
reference = reference["*"]
break
else:
- raise NbiException("Unexpected URL item {}".format(arg), HTTPStatus.METHOD_NOT_ALLOWED)
+ raise NbiException(
+ "Unexpected URL item {}".format(arg), HTTPStatus.METHOD_NOT_ALLOWED
+ )
if "TODO" in reference and method in reference["TODO"]:
- raise NbiException("Method {} not supported yet for this URL".format(method), HTTPStatus.NOT_IMPLEMENTED)
+ raise NbiException(
+ "Method {} not supported yet for this URL".format(method),
+ HTTPStatus.NOT_IMPLEMENTED,
+ )
elif "METHODS" in reference and method not in reference["METHODS"]:
- raise NbiException("Method {} not supported for this URL".format(method), HTTPStatus.METHOD_NOT_ALLOWED)
+ raise NbiException(
+ "Method {} not supported for this URL".format(method),
+ HTTPStatus.METHOD_NOT_ALLOWED,
+ )
return reference["ROLE_PERMISSION"] + method.lower()
@staticmethod
:return: None
"""
# Use cherrypy.request.base for absoluted path and make use of request.header HOST just in case behind aNAT
- cherrypy.response.headers["Location"] = "/osm/{}/{}/{}/{}".format(main_topic, version, topic, id)
+ cherrypy.response.headers["Location"] = "/osm/{}/{}/{}/{}".format(
+ main_topic, version, topic, id
+ )
return
@staticmethod
set_project: tuple with projects that a created element will belong to
method: show, list, delete, write
"""
- admin_query = {"force": False, "project_id": (token_info["project_id"], ), "username": token_info["username"],
- "admin": token_info["admin"], "public": None,
- "allow_show_user_project_role": token_info["allow_show_user_project_role"]}
+ admin_query = {
+ "force": False,
+ "project_id": (token_info["project_id"],),
+ "username": token_info["username"],
+ "admin": token_info["admin"],
+ "public": None,
+ "allow_show_user_project_role": token_info["allow_show_user_project_role"],
+ }
if kwargs:
# FORCE
if "FORCE" in kwargs:
- if kwargs["FORCE"].lower() != "false": # if None or True set force to True
+ if (
+ kwargs["FORCE"].lower() != "false"
+ ): # if None or True set force to True
admin_query["force"] = True
del kwargs["FORCE"]
# PUBLIC
if "PUBLIC" in kwargs:
- if kwargs["PUBLIC"].lower() != "false": # if None or True set public to True
+ if (
+ kwargs["PUBLIC"].lower() != "false"
+ ): # if None or True set public to True
admin_query["public"] = True
else:
admin_query["public"] = False
behave_as = kwargs.pop("ADMIN")
if behave_as.lower() != "false":
if not token_info["admin"]:
- raise NbiException("Only admin projects can use 'ADMIN' query string", HTTPStatus.UNAUTHORIZED)
- if not behave_as or behave_as.lower() == "true": # convert True, None to empty list
+ raise NbiException(
+ "Only admin projects can use 'ADMIN' query string",
+ HTTPStatus.UNAUTHORIZED,
+ )
+ if (
+ not behave_as or behave_as.lower() == "true"
+ ): # convert True, None to empty list
admin_query["project_id"] = ()
elif isinstance(behave_as, (list, tuple)):
admin_query["project_id"] = behave_as
- else: # isinstance(behave_as, str)
- admin_query["project_id"] = (behave_as, )
+ else: # isinstance(behave_as, str)
+ admin_query["project_id"] = (behave_as,)
if "SET_PROJECT" in kwargs:
set_project = kwargs.pop("SET_PROJECT")
if not set_project:
admin_query["set_project"] = list(admin_query["project_id"])
else:
if isinstance(set_project, str):
- set_project = (set_project, )
+ set_project = (set_project,)
if admin_query["project_id"]:
for p in set_project:
if p not in admin_query["project_id"]:
- raise NbiException("Unauthorized for 'SET_PROJECT={p}'. Try with 'ADMIN=True' or "
- "'ADMIN='{p}'".format(p=p), HTTPStatus.UNAUTHORIZED)
+ raise NbiException(
+ "Unauthorized for 'SET_PROJECT={p}'. Try with 'ADMIN=True' or "
+ "'ADMIN='{p}'".format(p=p),
+ HTTPStatus.UNAUTHORIZED,
+ )
admin_query["set_project"] = set_project
# PROJECT_READ
return admin_query
@cherrypy.expose
- def default(self, main_topic=None, version=None, topic=None, _id=None, item=None, *args, **kwargs):
+ def default(
+ self,
+ main_topic=None,
+ version=None,
+ topic=None,
+ _id=None,
+ item=None,
+ *args,
+ **kwargs
+ ):
token_info = None
outdata = None
_format = None
engine_session = None
try:
if not main_topic or not version or not topic:
- raise NbiException("URL must contain at least 'main_topic/version/topic'",
- HTTPStatus.METHOD_NOT_ALLOWED)
- if main_topic not in ("admin", "vnfpkgm", "nsd", "nslcm", "pdu", "nst", "nsilcm", "nspm"):
- raise NbiException("URL main_topic '{}' not supported".format(main_topic),
- HTTPStatus.METHOD_NOT_ALLOWED)
- if version != 'v1':
- raise NbiException("URL version '{}' not supported".format(version), HTTPStatus.METHOD_NOT_ALLOWED)
-
- if kwargs and "METHOD" in kwargs and kwargs["METHOD"] in ("PUT", "POST", "DELETE", "GET", "PATCH"):
+ raise NbiException(
+ "URL must contain at least 'main_topic/version/topic'",
+ HTTPStatus.METHOD_NOT_ALLOWED,
+ )
+ if main_topic not in (
+ "admin",
+ "vnfpkgm",
+ "nsd",
+ "nslcm",
+ "pdu",
+ "nst",
+ "nsilcm",
+ "nspm",
+ ):
+ raise NbiException(
+ "URL main_topic '{}' not supported".format(main_topic),
+ HTTPStatus.METHOD_NOT_ALLOWED,
+ )
+ if version != "v1":
+ raise NbiException(
+ "URL version '{}' not supported".format(version),
+ HTTPStatus.METHOD_NOT_ALLOWED,
+ )
+
+ if (
+ kwargs
+ and "METHOD" in kwargs
+ and kwargs["METHOD"] in ("PUT", "POST", "DELETE", "GET", "PATCH")
+ ):
method = kwargs.pop("METHOD")
else:
method = cherrypy.request.method
- role_permission = self._check_valid_url_method(method, main_topic, version, topic, _id, item, *args)
- query_string_operations = self._extract_query_string_operations(kwargs, method)
+ role_permission = self._check_valid_url_method(
+ method, main_topic, version, topic, _id, item, *args
+ )
+ query_string_operations = self._extract_query_string_operations(
+ kwargs, method
+ )
if main_topic == "admin" and topic == "tokens":
return self.token(method, _id, kwargs)
- token_info = self.authenticator.authorize(role_permission, query_string_operations, _id)
+ token_info = self.authenticator.authorize(
+ role_permission, query_string_operations, _id
+ )
if main_topic == "admin" and topic == "domains":
return self.domain()
engine_session = self._manage_admin_query(token_info, kwargs, method, _id)
engine_topic = "nsilcmops"
elif main_topic == "pdu":
engine_topic = "pdus"
- if engine_topic == "vims": # TODO this is for backward compatibility, it will be removed in the future
+ if (
+ engine_topic == "vims"
+ ): # TODO this is for backward compatibility, it will be removed in the future
engine_topic = "vim_accounts"
if topic == "subscriptions":
engine_topic = main_topic + "_" + topic
if method == "GET":
- if item in ("nsd_content", "package_content", "artifacts", "vnfd", "nsd", "nst", "nst_content"):
+ if item in (
+ "nsd_content",
+ "package_content",
+ "artifacts",
+ "vnfd",
+ "nsd",
+ "nst",
+ "nst_content",
+ ):
if item in ("vnfd", "nsd", "nst"):
path = "$DESCRIPTOR"
elif args:
path = ()
else:
path = None
- file, _format = self.engine.get_file(engine_session, engine_topic, _id, path,
- cherrypy.request.headers.get("Accept"))
+ file, _format = self.engine.get_file(
+ engine_session,
+ engine_topic,
+ _id,
+ path,
+ cherrypy.request.headers.get("Accept"),
+ )
outdata = file
elif not _id:
- outdata = self.engine.get_item_list(engine_session, engine_topic, kwargs, api_req=True)
+ outdata = self.engine.get_item_list(
+ engine_session, engine_topic, kwargs, api_req=True
+ )
else:
if item == "reports":
# TODO check that project_id (_id in this context) has permissions
_id = args[0]
- outdata = self.engine.get_item(engine_session, engine_topic, _id, True)
+ outdata = self.engine.get_item(
+ engine_session, engine_topic, _id, True
+ )
elif method == "POST":
cherrypy.response.status = HTTPStatus.CREATED.value
- if topic in ("ns_descriptors_content", "vnf_packages_content", "netslice_templates_content"):
+ if topic in (
+ "ns_descriptors_content",
+ "vnf_packages_content",
+ "netslice_templates_content",
+ ):
_id = cherrypy.request.headers.get("Transaction-Id")
if not _id:
- _id, _ = self.engine.new_item(rollback, engine_session, engine_topic, {}, None,
- cherrypy.request.headers)
- completed = self.engine.upload_content(engine_session, engine_topic, _id, indata, kwargs,
- cherrypy.request.headers)
+ _id, _ = self.engine.new_item(
+ rollback,
+ engine_session,
+ engine_topic,
+ {},
+ None,
+ cherrypy.request.headers,
+ )
+ completed = self.engine.upload_content(
+ engine_session,
+ engine_topic,
+ _id,
+ indata,
+ kwargs,
+ cherrypy.request.headers,
+ )
if completed:
self._set_location_header(main_topic, version, topic, _id)
else:
outdata = {"id": _id}
elif topic == "ns_instances_content":
# creates NSR
- _id, _ = self.engine.new_item(rollback, engine_session, engine_topic, indata, kwargs)
+ _id, _ = self.engine.new_item(
+ rollback, engine_session, engine_topic, indata, kwargs
+ )
# creates nslcmop
indata["lcmOperationType"] = "instantiate"
indata["nsInstanceId"] = _id
- nslcmop_id, _ = self.engine.new_item(rollback, engine_session, "nslcmops", indata, None)
+ nslcmop_id, _ = self.engine.new_item(
+ rollback, engine_session, "nslcmops", indata, None
+ )
self._set_location_header(main_topic, version, topic, _id)
outdata = {"id": _id, "nslcmop_id": nslcmop_id}
elif topic == "ns_instances" and item:
indata["lcmOperationType"] = item
indata["nsInstanceId"] = _id
- _id, _ = self.engine.new_item(rollback, engine_session, "nslcmops", indata, kwargs)
- self._set_location_header(main_topic, version, "ns_lcm_op_occs", _id)
+ _id, _ = self.engine.new_item(
+ rollback, engine_session, "nslcmops", indata, kwargs
+ )
+ self._set_location_header(
+ main_topic, version, "ns_lcm_op_occs", _id
+ )
outdata = {"id": _id}
cherrypy.response.status = HTTPStatus.ACCEPTED.value
elif topic == "netslice_instances_content":
# creates NetSlice_Instance_record (NSIR)
- _id, _ = self.engine.new_item(rollback, engine_session, engine_topic, indata, kwargs)
+ _id, _ = self.engine.new_item(
+ rollback, engine_session, engine_topic, indata, kwargs
+ )
self._set_location_header(main_topic, version, topic, _id)
indata["lcmOperationType"] = "instantiate"
indata["netsliceInstanceId"] = _id
- nsilcmop_id, _ = self.engine.new_item(rollback, engine_session, "nsilcmops", indata, kwargs)
+ nsilcmop_id, _ = self.engine.new_item(
+ rollback, engine_session, "nsilcmops", indata, kwargs
+ )
outdata = {"id": _id, "nsilcmop_id": nsilcmop_id}
elif topic == "netslice_instances" and item:
indata["lcmOperationType"] = item
indata["netsliceInstanceId"] = _id
- _id, _ = self.engine.new_item(rollback, engine_session, "nsilcmops", indata, kwargs)
- self._set_location_header(main_topic, version, "nsi_lcm_op_occs", _id)
+ _id, _ = self.engine.new_item(
+ rollback, engine_session, "nsilcmops", indata, kwargs
+ )
+ self._set_location_header(
+ main_topic, version, "nsi_lcm_op_occs", _id
+ )
outdata = {"id": _id}
cherrypy.response.status = HTTPStatus.ACCEPTED.value
elif topic == "vnf_packages" and item == "action":
indata["lcmOperationType"] = item
indata["vnfPkgId"] = _id
- _id, _ = self.engine.new_item(rollback, engine_session, "vnfpkgops", indata, kwargs)
- self._set_location_header(main_topic, version, "vnfpkg_op_occs", _id)
+ _id, _ = self.engine.new_item(
+ rollback, engine_session, "vnfpkgops", indata, kwargs
+ )
+ self._set_location_header(
+ main_topic, version, "vnfpkg_op_occs", _id
+ )
outdata = {"id": _id}
cherrypy.response.status = HTTPStatus.ACCEPTED.value
elif topic == "subscriptions":
- _id, _ = self.engine.new_item(rollback, engine_session, engine_topic, indata, kwargs)
+ _id, _ = self.engine.new_item(
+ rollback, engine_session, engine_topic, indata, kwargs
+ )
self._set_location_header(main_topic, version, topic, _id)
link = {}
link["self"] = cherrypy.response.headers["Location"]
- outdata = {"id": _id, "filter": indata["filter"], "callbackUri": indata["CallbackUri"],
- "_links": link}
+ outdata = {
+ "id": _id,
+ "filter": indata["filter"],
+ "callbackUri": indata["CallbackUri"],
+ "_links": link,
+ }
cherrypy.response.status = HTTPStatus.CREATED.value
else:
- _id, op_id = self.engine.new_item(rollback, engine_session, engine_topic, indata, kwargs,
- cherrypy.request.headers)
+ _id, op_id = self.engine.new_item(
+ rollback,
+ engine_session,
+ engine_topic,
+ indata,
+ kwargs,
+ cherrypy.request.headers,
+ )
self._set_location_header(main_topic, version, topic, _id)
outdata = {"id": _id}
if op_id:
elif method == "DELETE":
if not _id:
- outdata = self.engine.del_item_list(engine_session, engine_topic, kwargs)
+ outdata = self.engine.del_item_list(
+ engine_session, engine_topic, kwargs
+ )
cherrypy.response.status = HTTPStatus.OK.value
else: # len(args) > 1
# for NS NSI generate an operation
nslcmop_desc = {
"lcmOperationType": "terminate",
"nsInstanceId": _id,
- "autoremove": True
+ "autoremove": True,
}
- op_id, _ = self.engine.new_item(rollback, engine_session, "nslcmops", nslcmop_desc, kwargs)
+ op_id, _ = self.engine.new_item(
+ rollback, engine_session, "nslcmops", nslcmop_desc, kwargs
+ )
if op_id:
outdata = {"_id": op_id}
- elif topic == "netslice_instances_content" and not engine_session["force"]:
+ elif (
+ topic == "netslice_instances_content"
+ and not engine_session["force"]
+ ):
nsilcmop_desc = {
"lcmOperationType": "terminate",
"netsliceInstanceId": _id,
- "autoremove": True
+ "autoremove": True,
}
- op_id, _ = self.engine.new_item(rollback, engine_session, "nsilcmops", nsilcmop_desc, None)
+ op_id, _ = self.engine.new_item(
+ rollback, engine_session, "nsilcmops", nsilcmop_desc, None
+ )
if op_id:
outdata = {"_id": op_id}
# if there is not any deletion in process, delete
op_id = self.engine.del_item(engine_session, engine_topic, _id)
if op_id:
outdata = {"op_id": op_id}
- cherrypy.response.status = HTTPStatus.ACCEPTED.value if op_id else HTTPStatus.NO_CONTENT.value
+ cherrypy.response.status = (
+ HTTPStatus.ACCEPTED.value
+ if op_id
+ else HTTPStatus.NO_CONTENT.value
+ )
elif method in ("PUT", "PATCH"):
op_id = None
if not indata and not kwargs and not engine_session.get("set_project"):
- raise NbiException("Nothing to update. Provide payload and/or query string",
- HTTPStatus.BAD_REQUEST)
- if item in ("nsd_content", "package_content", "nst_content") and method == "PUT":
- completed = self.engine.upload_content(engine_session, engine_topic, _id, indata, kwargs,
- cherrypy.request.headers)
+ raise NbiException(
+ "Nothing to update. Provide payload and/or query string",
+ HTTPStatus.BAD_REQUEST,
+ )
+ if (
+ item in ("nsd_content", "package_content", "nst_content")
+ and method == "PUT"
+ ):
+ completed = self.engine.upload_content(
+ engine_session,
+ engine_topic,
+ _id,
+ indata,
+ kwargs,
+ cherrypy.request.headers,
+ )
if not completed:
cherrypy.response.headers["Transaction-Id"] = id
else:
- op_id = self.engine.edit_item(engine_session, engine_topic, _id, indata, kwargs)
+ op_id = self.engine.edit_item(
+ engine_session, engine_topic, _id, indata, kwargs
+ )
if op_id:
cherrypy.response.status = HTTPStatus.ACCEPTED.value
cherrypy.response.status = HTTPStatus.NO_CONTENT.value
outdata = None
else:
- raise NbiException("Method {} not allowed".format(method), HTTPStatus.METHOD_NOT_ALLOWED)
+ raise NbiException(
+ "Method {} not allowed".format(method),
+ HTTPStatus.METHOD_NOT_ALLOWED,
+ )
# if Role information changes, it is needed to reload the information of roles
if topic == "roles" and method != "GET":
self.authenticator.load_operation_to_allowed_roles()
- if topic == "projects" and method == "DELETE" \
- or topic in ["users", "roles"] and method in ["PUT", "PATCH", "DELETE"]:
+ if (
+ topic == "projects"
+ and method == "DELETE"
+ or topic in ["users", "roles"]
+ and method in ["PUT", "PATCH", "DELETE"]
+ ):
self.authenticator.remove_token_from_cache()
return self._format_out(outdata, token_info, _format)
except Exception as e:
- if isinstance(e, (NbiException, EngineException, DbException, FsException, MsgException, AuthException,
- ValidationError, AuthconnException)):
+ if isinstance(
+ e,
+ (
+ NbiException,
+ EngineException,
+ DbException,
+ FsException,
+ MsgException,
+ AuthException,
+ ValidationError,
+ AuthconnException,
+ ),
+ ):
http_code_value = cherrypy.response.status = e.http_code.value
http_code_name = e.http_code.name
cherrypy.log("Exception {}".format(e))
else:
- http_code_value = cherrypy.response.status = HTTPStatus.BAD_REQUEST.value # INTERNAL_SERVER_ERROR
+ http_code_value = (
+ cherrypy.response.status
+ ) = HTTPStatus.BAD_REQUEST.value # INTERNAL_SERVER_ERROR
cherrypy.log("CRITICAL: Exception {}".format(e), traceback=True)
http_code_name = HTTPStatus.BAD_REQUEST.name
if hasattr(outdata, "close"): # is an open file
for rollback_item in rollback:
try:
if rollback_item.get("operation") == "set":
- self.engine.db.set_one(rollback_item["topic"], {"_id": rollback_item["_id"]},
- rollback_item["content"], fail_on_empty=False)
+ self.engine.db.set_one(
+ rollback_item["topic"],
+ {"_id": rollback_item["_id"]},
+ rollback_item["content"],
+ fail_on_empty=False,
+ )
elif rollback_item.get("operation") == "del_list":
- self.engine.db.del_list(rollback_item["topic"], rollback_item["filter"],
- fail_on_empty=False)
+ self.engine.db.del_list(
+ rollback_item["topic"],
+ rollback_item["filter"],
+ fail_on_empty=False,
+ )
else:
- self.engine.db.del_one(rollback_item["topic"], {"_id": rollback_item["_id"]},
- fail_on_empty=False)
+ self.engine.db.del_one(
+ rollback_item["topic"],
+ {"_id": rollback_item["_id"]},
+ fail_on_empty=False,
+ )
except Exception as e2:
- rollback_error_text = "Rollback Exception {}: {}".format(rollback_item, e2)
+ rollback_error_text = "Rollback Exception {}: {}".format(
+ rollback_item, e2
+ )
cherrypy.log(rollback_error_text)
error_text += ". " + rollback_error_text
# if isinstance(e, MsgException):
if method in ("PUT", "PATCH", "POST") and isinstance(outdata, dict):
for logging_id in ("id", "op_id", "nsilcmop_id", "nslcmop_id"):
if outdata.get(logging_id):
- cherrypy.request.login += ";{}={}".format(logging_id, outdata[logging_id][:36])
+ cherrypy.request.login += ";{}={}".format(
+ logging_id, outdata[logging_id][:36]
+ )
def _start_service():
# update general cherrypy configuration
update_dict = {}
- engine_config = cherrypy.tree.apps['/osm'].config
+ engine_config = cherrypy.tree.apps["/osm"].config
for k, v in environ.items():
if not k.startswith("OSMNBI_"):
continue
continue
try:
# update static configuration
- if k == 'OSMNBI_STATIC_DIR':
- engine_config["/static"]['tools.staticdir.dir'] = v
- engine_config["/static"]['tools.staticdir.on'] = True
- elif k == 'OSMNBI_SOCKET_PORT' or k == 'OSMNBI_SERVER_PORT':
- update_dict['server.socket_port'] = int(v)
- elif k == 'OSMNBI_SOCKET_HOST' or k == 'OSMNBI_SERVER_HOST':
- update_dict['server.socket_host'] = v
+ if k == "OSMNBI_STATIC_DIR":
+ engine_config["/static"]["tools.staticdir.dir"] = v
+ engine_config["/static"]["tools.staticdir.on"] = True
+ elif k == "OSMNBI_SOCKET_PORT" or k == "OSMNBI_SERVER_PORT":
+ update_dict["server.socket_port"] = int(v)
+ elif k == "OSMNBI_SOCKET_HOST" or k == "OSMNBI_SERVER_HOST":
+ update_dict["server.socket_host"] = v
elif k1 in ("server", "test", "auth", "log"):
- update_dict[k1 + '.' + k2] = v
+ update_dict[k1 + "." + k2] = v
elif k1 in ("message", "database", "storage", "authentication"):
# k2 = k2.replace('_', '.')
if k2 in ("port", "db_port"):
engine_config["global"].update(update_dict)
# logging cherrypy
- log_format_simple = "%(asctime)s %(levelname)s %(name)s %(filename)s:%(lineno)s %(message)s"
- log_formatter_simple = logging.Formatter(log_format_simple, datefmt='%Y-%m-%dT%H:%M:%S')
+ log_format_simple = (
+ "%(asctime)s %(levelname)s %(name)s %(filename)s:%(lineno)s %(message)s"
+ )
+ log_formatter_simple = logging.Formatter(
+ log_format_simple, datefmt="%Y-%m-%dT%H:%M:%S"
+ )
logger_server = logging.getLogger("cherrypy.error")
logger_access = logging.getLogger("cherrypy.access")
logger_cherry = logging.getLogger("cherrypy")
logger_nbi = logging.getLogger("nbi")
if "log.file" in engine_config["global"]:
- file_handler = logging.handlers.RotatingFileHandler(engine_config["global"]["log.file"],
- maxBytes=100e6, backupCount=9, delay=0)
+ file_handler = logging.handlers.RotatingFileHandler(
+ engine_config["global"]["log.file"], maxBytes=100e6, backupCount=9, delay=0
+ )
file_handler.setFormatter(log_formatter_simple)
logger_cherry.addHandler(file_handler)
logger_nbi.addHandler(file_handler)
# log always to standard output
- for format_, logger in {"nbi.server %(filename)s:%(lineno)s": logger_server,
- "nbi.access %(filename)s:%(lineno)s": logger_access,
- "%(name)s %(filename)s:%(lineno)s": logger_nbi
- }.items():
+ for format_, logger in {
+ "nbi.server %(filename)s:%(lineno)s": logger_server,
+ "nbi.access %(filename)s:%(lineno)s": logger_access,
+ "%(name)s %(filename)s:%(lineno)s": logger_nbi,
+ }.items():
log_format_cherry = "%(asctime)s %(levelname)s {} %(message)s".format(format_)
- log_formatter_cherry = logging.Formatter(log_format_cherry, datefmt='%Y-%m-%dT%H:%M:%S')
+ log_formatter_cherry = logging.Formatter(
+ log_format_cherry, datefmt="%Y-%m-%dT%H:%M:%S"
+ )
str_handler = logging.StreamHandler()
str_handler.setFormatter(log_formatter_cherry)
logger.addHandler(str_handler)
logger_nbi.setLevel(engine_config["global"]["log.level"])
# logging other modules
- for k1, logname in {"message": "nbi.msg", "database": "nbi.db", "storage": "nbi.fs"}.items():
+ for k1, logname in {
+ "message": "nbi.msg",
+ "database": "nbi.db",
+ "storage": "nbi.fs",
+ }.items():
engine_config[k1]["logger_name"] = logname
logger_module = logging.getLogger(logname)
if "logfile" in engine_config[k1]:
- file_handler = logging.handlers.RotatingFileHandler(engine_config[k1]["logfile"],
- maxBytes=100e6, backupCount=9, delay=0)
+ file_handler = logging.handlers.RotatingFileHandler(
+ engine_config[k1]["logfile"], maxBytes=100e6, backupCount=9, delay=0
+ )
file_handler.setFormatter(log_formatter_simple)
logger_module.addHandler(file_handler)
if "loglevel" in engine_config[k1]:
logger_module.setLevel(engine_config[k1]["loglevel"])
# TODO add more entries, e.g.: storage
- cherrypy.tree.apps['/osm'].root.engine.start(engine_config)
- cherrypy.tree.apps['/osm'].root.authenticator.start(engine_config)
- cherrypy.tree.apps['/osm'].root.engine.init_db(target_version=database_version)
- cherrypy.tree.apps['/osm'].root.authenticator.init_db(target_version=auth_database_version)
+ cherrypy.tree.apps["/osm"].root.engine.start(engine_config)
+ cherrypy.tree.apps["/osm"].root.authenticator.start(engine_config)
+ cherrypy.tree.apps["/osm"].root.engine.init_db(target_version=database_version)
+ cherrypy.tree.apps["/osm"].root.authenticator.init_db(
+ target_version=auth_database_version
+ )
# start subscriptions thread:
- subscription_thread = SubscriptionThread(config=engine_config, engine=nbi_server.engine)
+ subscription_thread = SubscriptionThread(
+ config=engine_config, engine=nbi_server.engine
+ )
subscription_thread.start()
# Do not capture except SubscriptionException
backend = engine_config["authentication"]["backend"]
- cherrypy.log.error("Starting OSM NBI Version '{} {}' with '{}' authentication backend"
- .format(nbi_version, nbi_version_date, backend))
+ cherrypy.log.error(
+ "Starting OSM NBI Version '{} {}' with '{}' authentication backend".format(
+ nbi_version, nbi_version_date, backend
+ )
+ )
def _stop_service():
if subscription_thread:
subscription_thread.terminate()
subscription_thread = None
- cherrypy.tree.apps['/osm'].root.engine.stop()
+ cherrypy.tree.apps["/osm"].root.engine.stop()
cherrypy.log.error("Stopping osm_nbi")
# 'tools.auth_basic.realm': 'localhost',
# 'tools.auth_basic.checkpassword': validate_password})
nbi_server = Server()
- cherrypy.engine.subscribe('start', _start_service)
- cherrypy.engine.subscribe('stop', _stop_service)
- cherrypy.quickstart(nbi_server, '/osm', config_file)
+ cherrypy.engine.subscribe("start", _start_service)
+ cherrypy.engine.subscribe("stop", _stop_service)
+ cherrypy.quickstart(nbi_server, "/osm", config_file)
def usage():
- print("""Usage: {} [options]
+ print(
+ """Usage: {} [options]
-c|--config [configuration_file]: loads the configuration file (default: ./nbi.cfg)
-h|--help: shows this help
- """.format(sys.argv[0]))
+ """.format(
+ sys.argv[0]
+ )
+ )
# --log-socket-host HOST: send logs to this host")
# --log-socket-port PORT: send logs using this port (default: 9022)")
-if __name__ == '__main__':
+if __name__ == "__main__":
try:
# load parameters and configuration
opts, args = getopt.getopt(sys.argv[1:], "hvc:", ["config=", "help"])
assert False, "Unhandled option"
if config_file:
if not path.isfile(config_file):
- print("configuration file '{}' that not exist".format(config_file), file=sys.stderr)
+ print(
+ "configuration file '{}' that not exist".format(config_file),
+ file=sys.stderr,
+ )
exit(1)
else:
- for config_file in (__file__[:__file__.rfind(".")] + ".cfg", "./nbi.cfg", "/etc/osm/nbi.cfg"):
+ for config_file in (
+ __file__[: __file__.rfind(".")] + ".cfg",
+ "./nbi.cfg",
+ "/etc/osm/nbi.cfg",
+ ):
if path.isfile(config_file):
break
else:
- print("No configuration file 'nbi.cfg' found neither at local folder nor at /etc/osm/", file=sys.stderr)
+ print(
+ "No configuration file 'nbi.cfg' found neither at local folder nor at /etc/osm/",
+ file=sys.stderr,
+ )
exit(1)
nbi(config_file)
except getopt.GetoptError as e:
response_models = None
# Common HTTP payload header for all notifications.
- payload_header = {
- "Content-Type": "application/json",
- "Accept": "application/json"
- }
+ payload_header = {"Content-Type": "application/json", "Accept": "application/json"}
def __init__(self, db) -> None:
"""
:param kwargs: any keyword arguments needed for db query.
:return: List of subscribers
"""
- raise NotificationException("Method get_subscribers() is not implemented", http_code=HTTPStatus.NOT_IMPLEMENTED)
+ raise NotificationException(
+ "Method get_subscribers() is not implemented",
+ http_code=HTTPStatus.NOT_IMPLEMENTED,
+ )
@staticmethod
def _get_basic_auth(username: str, password: str) -> tuple:
return aiohttp.BasicAuth(username, password)
- def _decrypt_password(self, hashed: str, salt: str, schema_version: str = "1.1") -> str:
+ def _decrypt_password(
+ self, hashed: str, salt: str, schema_version: str = "1.1"
+ ) -> str:
return self.db.decrypt(hashed, schema_version, salt=salt)
def get_payload(self, meta_notification: dict) -> dict:
model_name = meta_notification["notificationType"]
response_models = self.get_models()
if not response_models or not response_models.get(model_name):
- raise NotificationException("Response model {} is not defined.".format(model_name),
- HTTPStatus.NOT_IMPLEMENTED)
+ raise NotificationException(
+ "Response model {} is not defined.".format(model_name),
+ HTTPStatus.NOT_IMPLEMENTED,
+ )
model_keys = response_models[model_name]
payload = dict.fromkeys(model_keys, "N/A")
notification_keys = set(meta_notification.keys())
for model_key in model_keys.intersection(notification_keys):
payload[model_key] = meta_notification[model_key]
- self.logger.debug("Payload generated for subscriber: {} for {}".format(payload["subscriptionId"],
- payload["notificationType"]))
+ self.logger.debug(
+ "Payload generated for subscriber: {} for {}".format(
+ payload["subscriptionId"], payload["notificationType"]
+ )
+ )
return payload
- async def send_notifications(self, subscribers: list, loop: asyncio.AbstractEventLoop = None):
+ async def send_notifications(
+ self, subscribers: list, loop: asyncio.AbstractEventLoop = None
+ ):
"""
Generate tasks for all notification for an event.
:param subscribers: A list of subscribers who want to be notified for event.
for subscriber in subscribers:
# Notify without auth
if not subscriber.get("authentication"):
- notifications.append({
- "headers": self.payload_header,
- "payload": self.get_payload(subscriber),
- "CallbackUri": subscriber["CallbackUri"]
- })
+ notifications.append(
+ {
+ "headers": self.payload_header,
+ "payload": self.get_payload(subscriber),
+ "CallbackUri": subscriber["CallbackUri"],
+ }
+ )
elif subscriber["authentication"]["authType"] == "basic":
salt = subscriber["subscriptionId"]
- hashed_password = subscriber["authentication"]["paramsBasic"]["password"]
+ hashed_password = subscriber["authentication"]["paramsBasic"][
+ "password"
+ ]
password = self._decrypt_password(hashed_password, salt)
- auth_basic = self._get_basic_auth(subscriber["authentication"]["paramsBasic"]["userName"], password)
- notifications.append({
- "headers": self.payload_header,
- "payload": self.get_payload(subscriber),
- "auth_basic": auth_basic,
- "CallbackUri": subscriber["CallbackUri"]
- })
+ auth_basic = self._get_basic_auth(
+ subscriber["authentication"]["paramsBasic"]["userName"], password
+ )
+ notifications.append(
+ {
+ "headers": self.payload_header,
+ "payload": self.get_payload(subscriber),
+ "auth_basic": auth_basic,
+ "CallbackUri": subscriber["CallbackUri"],
+ }
+ )
# TODO add support for AuthType OAuth and TLS after support is added in subscription.
else:
- self.logger.debug("Subscriber {} can not be notified. {} notification auth type is not implemented"
- .format(subscriber["subscriptionId"],
- subscriber["authentication"]["authType"]))
+ self.logger.debug(
+ "Subscriber {} can not be notified. {} notification auth type is not implemented".format(
+ subscriber["subscriptionId"],
+ subscriber["authentication"]["authType"],
+ )
+ )
if notifications:
tasks = []
async with aiohttp.ClientSession(loop=loop) as session:
for notification in notifications:
- tasks.append(asyncio.ensure_future(self.send_notification(session, notification, loop=loop),
- loop=loop))
+ tasks.append(
+ asyncio.ensure_future(
+ self.send_notification(session, notification, loop=loop),
+ loop=loop,
+ )
+ )
await asyncio.gather(*tasks, loop=loop)
- async def send_notification(self, session: aiohttp.ClientSession, notification: dict,
- loop: asyncio.AbstractEventLoop = None, retry_count: int = 5, timeout: float = 5.0):
+ async def send_notification(
+ self,
+ session: aiohttp.ClientSession,
+ notification: dict,
+ loop: asyncio.AbstractEventLoop = None,
+ retry_count: int = 5,
+ timeout: float = 5.0,
+ ):
"""
Performs HTTP Post request to notify subscriber. In case if for any reason notification is not sent successfully
after maximum number of reties, then notification is dropped.
backoff_delay = 1
while retry_count > 0:
try:
- async with session.post(url=notification["CallbackUri"], headers=notification["headers"],
- auth=notification.get("auth_basic", None),
- data=json.dumps(notification["payload"]),
- timeout=timeout) as resp:
+ async with session.post(
+ url=notification["CallbackUri"],
+ headers=notification["headers"],
+ auth=notification.get("auth_basic", None),
+ data=json.dumps(notification["payload"]),
+ timeout=timeout,
+ ) as resp:
# self.logger.debug("Notification response: {}".format(resp.status))
if resp.status == HTTPStatus.NO_CONTENT:
- self.logger.debug("Notification sent successfully to subscriber {}"
- .format(notification["payload"]["subscriptionId"]))
+ self.logger.debug(
+ "Notification sent successfully to subscriber {}".format(
+ notification["payload"]["subscriptionId"]
+ )
+ )
else:
error_text = "Erroneous response code: {}, ".format(resp.status)
error_text += await resp.text()
return True
except Exception as e:
error_text = type(e).__name__ + ": " + str(e)
- self.logger.debug("Unable to send notification to subscriber {}. Details: {}"
- .format(notification["payload"]["subscriptionId"], error_text))
+ self.logger.debug(
+ "Unable to send notification to subscriber {}. Details: {}".format(
+ notification["payload"]["subscriptionId"], error_text
+ )
+ )
error_detail = {
"error": type(e).__name__,
"error_text": str(e),
- "timestamp": time.time()
+ "timestamp": time.time(),
}
if "error_details" in notification["payload"].keys():
notification["payload"]["error_details"].append(error_detail)
notification["payload"]["error_details"] = [error_detail]
retry_count -= 1
backoff_delay *= 2
- self.logger.debug("Retry Notification for subscriber: {} after backoff delay: {} seconds."
- .format(notification["payload"]["subscriptionId"], backoff_delay))
+ self.logger.debug(
+ "Retry Notification for subscriber: {} after backoff delay: {} seconds.".format(
+ notification["payload"]["subscriptionId"], backoff_delay
+ )
+ )
await asyncio.sleep(backoff_delay, loop=loop)
# Dropping notification
- self.logger.debug("Notification {} sent failed to subscriber:{}."
- .format(notification["payload"]["notificationType"],
- notification["payload"]["subscriptionId"]))
+ self.logger.debug(
+ "Notification {} sent failed to subscriber:{}.".format(
+ notification["payload"]["notificationType"],
+ notification["payload"]["subscriptionId"],
+ )
+ )
return False
# SOL005 response model for nslcm notifications
response_models = {
- "NsLcmOperationOccurrenceNotification": {"id", "nsInstanceId", "nsLcmOpOccId", "operation",
- "notificationType", "subscriptionId", "timestamp",
- "notificationStatus", "operationState", "isAutomaticInvocation",
- "affectedVnf", "affectedVl", "affectedVnffg", "affectedNs",
- "affectedSap", "error", "_links"},
-
- "NsIdentifierCreationNotification": {"notificationType", "subscriptionId", "timestamp",
- "nsInstanceId", "_links"},
-
- "NsIdentifierDeletionNotification": {"notificationType", "subscriptionId", "timestamp",
- "nsInstanceId", "_links"},
-
- "NsChangeNotification": {"nsInstanceId", "nsComponentType", "nsComponentId",
- "lcmOpOccIdImpactngNsComponent", "lcmOpNameImpactingNsComponent",
- "lcmOpOccStatusImpactingNsComponent", "notificationType", "subscriptionId",
- "timeStamp", "error", "_links"}
+ "NsLcmOperationOccurrenceNotification": {
+ "id",
+ "nsInstanceId",
+ "nsLcmOpOccId",
+ "operation",
+ "notificationType",
+ "subscriptionId",
+ "timestamp",
+ "notificationStatus",
+ "operationState",
+ "isAutomaticInvocation",
+ "affectedVnf",
+ "affectedVl",
+ "affectedVnffg",
+ "affectedNs",
+ "affectedSap",
+ "error",
+ "_links",
+ },
+ "NsIdentifierCreationNotification": {
+ "notificationType",
+ "subscriptionId",
+ "timestamp",
+ "nsInstanceId",
+ "_links",
+ },
+ "NsIdentifierDeletionNotification": {
+ "notificationType",
+ "subscriptionId",
+ "timestamp",
+ "nsInstanceId",
+ "_links",
+ },
+ "NsChangeNotification": {
+ "nsInstanceId",
+ "nsComponentType",
+ "nsComponentId",
+ "lcmOpOccIdImpactngNsComponent",
+ "lcmOpNameImpactingNsComponent",
+ "lcmOpOccStatusImpactingNsComponent",
+ "notificationType",
+ "subscriptionId",
+ "timeStamp",
+ "error",
+ "_links",
+ },
}
def __init__(self, db) -> None:
subscriber.update(event_details["params"])
return subscribers
- def get_subscribers(self, nsd_id: str, ns_instance_id: str, command: str, op_state: str,
- event_details: dict) -> list:
+ def get_subscribers(
+ self,
+ nsd_id: str,
+ ns_instance_id: str,
+ command: str,
+ op_state: str,
+ event_details: dict,
+ ) -> list:
"""
Queries database and returns list of subscribers.
:param nsd_id: NSD id of an NS whose lifecycle has changed. (scaled, terminated. etc)
:param event_details: dict containing raw data of event occured.
:return: List of interested subscribers for occurred event.
"""
- filter_q = {"identifier": [nsd_id, ns_instance_id], "operationStates": ["ANY"], "operationTypes": ["ANY"]}
+ filter_q = {
+ "identifier": [nsd_id, ns_instance_id],
+ "operationStates": ["ANY"],
+ "operationTypes": ["ANY"],
+ }
if op_state:
filter_q["operationStates"].append(op_state)
if command:
class NsdNotification(NotificationBase):
-
def __init__(self, db):
"""
Constructor of the class
class VnfdNotification(NotificationBase):
-
def __init__(self, db):
"""
Constructor of the class
__author__ = "Vijay R S <vijay.r@tataelxsi.co.in>"
-class PmJobsTopic():
+class PmJobsTopic:
def __init__(self, db, host=None, port=None):
self.db = db
- self.url = 'http://{}:{}'.format(host, port)
- self.nfvi_metric_list = ['cpu_utilization', 'average_memory_utilization', 'disk_read_ops',
- 'disk_write_ops', 'disk_read_bytes', 'disk_write_bytes',
- 'packets_dropped', 'packets_sent', 'packets_received']
+ self.url = "http://{}:{}".format(host, port)
+ self.nfvi_metric_list = [
+ "cpu_utilization",
+ "average_memory_utilization",
+ "disk_read_ops",
+ "disk_write_ops",
+ "disk_read_bytes",
+ "disk_write_bytes",
+ "packets_dropped",
+ "packets_sent",
+ "packets_received",
+ ]
def _get_vnf_metric_list(self, ns_id):
metric_list = self.nfvi_metric_list.copy()
vnfr_desc = self.db.get_list("vnfrs", {"nsr-id-ref": ns_id})
if not vnfr_desc:
- raise EngineException("NS not found with id {}".format(ns_id), http_code=HTTPStatus.NOT_FOUND)
+ raise EngineException(
+ "NS not found with id {}".format(ns_id), http_code=HTTPStatus.NOT_FOUND
+ )
else:
for vnfr in vnfr_desc:
- vnfd_desc = self.db.get_one("vnfds", {"_id": vnfr["vnfd-id"]}, fail_on_empty=True, fail_on_more=False)
+ vnfd_desc = self.db.get_one(
+ "vnfds",
+ {"_id": vnfr["vnfd-id"]},
+ fail_on_empty=True,
+ fail_on_more=False,
+ )
try:
- configs = vnfd_desc.get("df")[0]["lcm-operations-configuration"]["operate-vnf-op-config"]["day1-2"]
+ configs = vnfd_desc.get("df")[0]["lcm-operations-configuration"][
+ "operate-vnf-op-config"
+ ]["day1-2"]
except Exception:
configs = []
for config in configs:
if "metrics" in config:
- metric_list.extend([quote(metric['name']) for metric in config["metrics"]])
+ metric_list.extend(
+ [quote(metric["name"]) for metric in config["metrics"]]
+ )
metric_list = list(set(metric_list))
return metric_list
async with aiohttp.ClientSession() as session:
data = []
for metlist in metrics_list:
- request_url = self.url+'/api/v1/query?query=osm_'+metlist+"{ns_id='"+ns_id+"'}"
+ request_url = (
+ self.url
+ + "/api/v1/query?query=osm_"
+ + metlist
+ + "{ns_id='"
+ + ns_id
+ + "'}"
+ )
async with session.get(request_url) as resp:
resp = await resp.json()
- resp = resp['data']['result']
+ resp = resp["data"]["result"]
if resp:
data.append(resp)
return data
metrics_list = self._get_vnf_metric_list(ns_id)
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
- prom_metric = loop.run_until_complete(self._prom_metric_request(ns_id, metrics_list))
+ prom_metric = loop.run_until_complete(
+ self._prom_metric_request(ns_id, metrics_list)
+ )
metric = {}
metric_temp = []
for index_list in prom_metric:
for index in index_list:
- process_metric = {'performanceValue': {'performanceValue': {}}}
- process_metric['objectInstanceId'] = index['metric']['ns_id']
- process_metric['performanceMetric'] = index['metric']['__name__']
- process_metric['performanceValue']['timestamp'] = index['value'][0]
- process_metric['performanceValue']['performanceValue']['performanceValue'] = index['value'][1]
- process_metric['performanceValue']['performanceValue']['vnfMemberIndex'] \
- = index['metric']['vnf_member_index']
- if 'vdu_name' not in index['metric']:
+ process_metric = {"performanceValue": {"performanceValue": {}}}
+ process_metric["objectInstanceId"] = index["metric"]["ns_id"]
+ process_metric["performanceMetric"] = index["metric"]["__name__"]
+ process_metric["performanceValue"]["timestamp"] = index["value"][0]
+ process_metric["performanceValue"]["performanceValue"][
+ "performanceValue"
+ ] = index["value"][1]
+ process_metric["performanceValue"]["performanceValue"][
+ "vnfMemberIndex"
+ ] = index["metric"]["vnf_member_index"]
+ if "vdu_name" not in index["metric"]:
pass
else:
- process_metric['performanceValue']['performanceValue']['vduName'] = index['metric']['vdu_name']
+ process_metric["performanceValue"]["performanceValue"][
+ "vduName"
+ ] = index["metric"]["vdu_name"]
metric_temp.append(process_metric)
- metric['entries'] = metric_temp
+ metric["entries"] = metric_temp
return metric
filter_dict["authentication"] = None # For Items without authentication
existing_subscriptions = self.db.get_list("subscriptions", q_filter=filter_dict)
new_sub_pwd = None
- if content.get("authentication") and content["authentication"].get("authType") == "basic":
+ if (
+ content.get("authentication")
+ and content["authentication"].get("authType") == "basic"
+ ):
new_sub_pwd = content["authentication"]["paramsBasic"]["password"]
content["authentication"]["paramsBasic"].pop("password", None)
for existing_subscription in existing_subscriptions:
sub_id = existing_subscription.pop("_id", None)
existing_subscription.pop("_admin", None)
existing_subscription.pop("schema_version", None)
- if existing_subscription.get("authentication") and \
- existing_subscription["authentication"].get("authType") == "basic":
- existing_subscription["authentication"]["paramsBasic"].pop("password", None)
+ if (
+ existing_subscription.get("authentication")
+ and existing_subscription["authentication"].get("authType") == "basic"
+ ):
+ existing_subscription["authentication"]["paramsBasic"].pop(
+ "password", None
+ )
# self.logger.debug(existing_subscription)
if existing_subscription == content:
- raise EngineException("Subscription already exists with id: {}".format(sub_id),
- HTTPStatus.CONFLICT)
+ raise EngineException(
+ "Subscription already exists with id: {}".format(sub_id),
+ HTTPStatus.CONFLICT,
+ )
if new_sub_pwd:
content["authentication"]["paramsBasic"]["password"] = new_sub_pwd
return
if auth is None:
response = requests.get(url, timeout=5)
if response.status_code != HTTPStatus.NO_CONTENT:
- raise EngineException("Cannot access to the notification URL '{}',received {}: {}"
- .format(url, response.status_code, response.content))
+ raise EngineException(
+ "Cannot access to the notification URL '{}',received {}: {}".format(
+ url, response.status_code, response.content
+ )
+ )
elif auth["authType"] == "basic":
username = auth["paramsBasic"].get("userName")
password = auth["paramsBasic"].get("password")
response = requests.get(url, auth=(username, password), timeout=5)
if response.status_code != HTTPStatus.NO_CONTENT:
- raise EngineException("Cannot access to the notification URL '{}',received {}: {}"
- .format(url, response.status_code, response.content))
+ raise EngineException(
+ "Cannot access to the notification URL '{}',received {}: {}".format(
+ url, response.status_code, response.content
+ )
+ )
except requests.exceptions.RequestException as e:
error_text = type(e).__name__ + ": " + str(e)
- raise EngineException("Cannot access to the notification URL '{}': {}".format(url, error_text))
+ raise EngineException(
+ "Cannot access to the notification URL '{}': {}".format(
+ url, error_text
+ )
+ )
url = content["CallbackUri"]
auth = content.get("authentication")
content["schema_version"] = schema_version = "1.1"
if auth is not None and auth["authType"] == "basic":
if content["authentication"]["paramsBasic"].get("password"):
- content["authentication"]["paramsBasic"]["password"] = \
- self.db.encrypt(content["authentication"]["paramsBasic"]["password"],
- schema_version=schema_version, salt=content["_id"])
+ content["authentication"]["paramsBasic"]["password"] = self.db.encrypt(
+ content["authentication"]["paramsBasic"]["password"],
+ schema_version=schema_version,
+ salt=content["_id"],
+ )
return None
def new(self, rollback, session, indata=None, kwargs=None, headers=None):
Uses BaseTopic.new to create entry into db
Once entry is made into subscriptions,mapper function is invoked
"""
- _id, op_id = BaseTopic.new(self, rollback, session, indata=indata, kwargs=kwargs, headers=headers)
- rollback.append({"topic": "mapped_subscriptions", "operation": "del_list", "filter": {"reference": _id}})
+ _id, op_id = BaseTopic.new(
+ self, rollback, session, indata=indata, kwargs=kwargs, headers=headers
+ )
+ rollback.append(
+ {
+ "topic": "mapped_subscriptions",
+ "operation": "del_list",
+ "filter": {"reference": _id},
+ }
+ )
self._subscription_mapper(_id, indata, table="mapped_subscriptions")
return _id, op_id
:param table: table in which transformed data are inserted
"""
formatted_data = []
- formed_data = {"reference": data.get("_id"),
- "CallbackUri": data.get("CallbackUri")}
+ formed_data = {
+ "reference": data.get("_id"),
+ "CallbackUri": data.get("CallbackUri"),
+ }
if data.get("authentication"):
formed_data.update({"authentication": data.get("authentication")})
if data.get("filter"):
formatted_data.append(update_dict)
elif elem == "NsLcmOperationOccurrenceNotification":
if "operationTypes" in data["filter"].keys():
- update_dict["operationTypes"] = data["filter"]["operationTypes"]
+ update_dict["operationTypes"] = data["filter"][
+ "operationTypes"
+ ]
else:
update_dict["operationTypes"] = "ANY"
if "operationStates" in data["filter"].keys():
- update_dict["operationStates"] = data["filter"]["operationStates"]
+ update_dict["operationStates"] = data["filter"][
+ "operationStates"
+ ]
else:
update_dict["operationStates"] = "ANY"
formatted_data.append(update_dict)
elif elem == "NsChangeNotification":
if "nsComponentTypes" in data["filter"].keys():
- update_dict["nsComponentTypes"] = data["filter"]["nsComponentTypes"]
+ update_dict["nsComponentTypes"] = data["filter"][
+ "nsComponentTypes"
+ ]
else:
update_dict["nsComponentTypes"] = "ANY"
if "lcmOpNameImpactingNsComponent" in data["filter"].keys():
- update_dict["lcmOpNameImpactingNsComponent"] = \
- data["filter"]["lcmOpNameImpactingNsComponent"]
+ update_dict["lcmOpNameImpactingNsComponent"] = data[
+ "filter"
+ ]["lcmOpNameImpactingNsComponent"]
else:
update_dict["lcmOpNameImpactingNsComponent"] = "ANY"
- if "lcmOpOccStatusImpactingNsComponent" in data["filter"].keys():
- update_dict["lcmOpOccStatusImpactingNsComponent"] = \
- data["filter"]["lcmOpOccStatusImpactingNsComponent"]
+ if (
+ "lcmOpOccStatusImpactingNsComponent"
+ in data["filter"].keys()
+ ):
+ update_dict["lcmOpOccStatusImpactingNsComponent"] = data[
+ "filter"
+ ]["lcmOpOccStatusImpactingNsComponent"]
else:
update_dict["lcmOpOccStatusImpactingNsComponent"] = "ANY"
formatted_data.append(update_dict)
class SubscriptionException(Exception):
-
def __init__(self, message, http_code=HTTPStatus.BAD_REQUEST):
self.http_code = http_code
Exception.__init__(self, message)
class SubscriptionThread(threading.Thread):
-
def __init__(self, config, engine):
"""
Constructor of class
self.engine = engine
self.loop = None
self.logger = logging.getLogger("nbi.subscriptions")
- self.aiomain_task_admin = None # asyncio task for receiving admin actions from kafka bus
- self.aiomain_task = None # asyncio task for receiving normal actions from kafka bus
+ self.aiomain_task_admin = (
+ None # asyncio task for receiving admin actions from kafka bus
+ )
+ self.aiomain_task = (
+ None # asyncio task for receiving normal actions from kafka bus
+ )
self.internal_session = { # used for a session to the engine methods
"project_id": (),
"set_project": (),
# bug 710 635. The library aiokafka does not recieve anything when the topci at kafka has not been
# created.
# Before subscribe, send dummy messages
- await self.msg.aiowrite("admin", "echo", "dummy message", loop=self.loop)
+ await self.msg.aiowrite(
+ "admin", "echo", "dummy message", loop=self.loop
+ )
await self.msg.aiowrite("ns", "echo", "dummy message", loop=self.loop)
await self.msg.aiowrite("nsi", "echo", "dummy message", loop=self.loop)
if not kafka_working:
if not self.aiomain_task_admin:
await asyncio.sleep(10, loop=self.loop)
self.logger.debug("Starting admin subscription task")
- self.aiomain_task_admin = asyncio.ensure_future(self.msg.aioread(("admin",), loop=self.loop,
- group_id=False,
- aiocallback=self._msg_callback),
- loop=self.loop)
+ self.aiomain_task_admin = asyncio.ensure_future(
+ self.msg.aioread(
+ ("admin",),
+ loop=self.loop,
+ group_id=False,
+ aiocallback=self._msg_callback,
+ ),
+ loop=self.loop,
+ )
if not self.aiomain_task:
await asyncio.sleep(10, loop=self.loop)
self.logger.debug("Starting non-admin subscription task")
- self.aiomain_task = asyncio.ensure_future(self.msg.aioread(("ns", "nsi"), loop=self.loop,
- aiocallback=self._msg_callback),
- loop=self.loop)
- done, _ = await asyncio.wait([self.aiomain_task, self.aiomain_task_admin],
- timeout=None, loop=self.loop, return_when=asyncio.FIRST_COMPLETED)
+ self.aiomain_task = asyncio.ensure_future(
+ self.msg.aioread(
+ ("ns", "nsi"),
+ loop=self.loop,
+ aiocallback=self._msg_callback,
+ ),
+ loop=self.loop,
+ )
+ done, _ = await asyncio.wait(
+ [self.aiomain_task, self.aiomain_task_admin],
+ timeout=None,
+ loop=self.loop,
+ return_when=asyncio.FIRST_COMPLETED,
+ )
try:
if self.aiomain_task_admin in done:
exc = self.aiomain_task_admin.exception()
- self.logger.error("admin subscription task exception: {}".format(exc))
+ self.logger.error(
+ "admin subscription task exception: {}".format(exc)
+ )
self.aiomain_task_admin = None
if self.aiomain_task in done:
exc = self.aiomain_task.exception()
- self.logger.error("non-admin subscription task exception: {}".format(exc))
+ self.logger.error(
+ "non-admin subscription task exception: {}".format(exc)
+ )
self.aiomain_task = None
except asyncio.CancelledError:
pass
return
if kafka_working:
# logging only first time
- self.logger.critical("Error accessing kafka '{}'. Retrying ...".format(e))
+ self.logger.critical(
+ "Error accessing kafka '{}'. Retrying ...".format(e)
+ )
kafka_working = False
await asyncio.sleep(10, loop=self.loop)
self.db = dbmemory.DbMemory()
self.db.db_connect(self.config["database"])
else:
- raise SubscriptionException("Invalid configuration param '{}' at '[database]':'driver'".format(
- self.config["database"]["driver"]))
+ raise SubscriptionException(
+ "Invalid configuration param '{}' at '[database]':'driver'".format(
+ self.config["database"]["driver"]
+ )
+ )
if not self.msg:
config_msg = self.config["message"].copy()
config_msg["loop"] = self.loop
self.msg = msgkafka.MsgKafka()
self.msg.connect(config_msg)
else:
- raise SubscriptionException("Invalid configuration param '{}' at '[message]':'driver'".format(
- config_msg["driver"]))
+ raise SubscriptionException(
+ "Invalid configuration param '{}' at '[message]':'driver'".format(
+ config_msg["driver"]
+ )
+ )
self.nslcm = NsLcmNotification(self.db)
except (DbException, MsgException) as e:
raise SubscriptionException(str(e), http_code=e.http_code)
while not self.to_terminate:
try:
- self.loop.run_until_complete(asyncio.ensure_future(self.start_kafka(), loop=self.loop))
+ self.loop.run_until_complete(
+ asyncio.ensure_future(self.start_kafka(), loop=self.loop)
+ )
# except asyncio.CancelledError:
# break # if cancelled it should end, breaking loop
except Exception as e:
if not self.to_terminate:
- self.logger.exception("Exception '{}' at messaging read loop".format(e), exc_info=True)
+ self.logger.exception(
+ "Exception '{}' at messaging read loop".format(e), exc_info=True
+ )
self.logger.debug("Finishing")
self._stop()
msg_to_send = []
try:
if topic == "ns":
- if command == "terminated" and params["operationState"] in ("COMPLETED", "PARTIALLY_COMPLETED"):
+ if command == "terminated" and params["operationState"] in (
+ "COMPLETED",
+ "PARTIALLY_COMPLETED",
+ ):
self.logger.debug("received ns terminated {}".format(params))
if params.get("autoremove"):
- self.engine.del_item(self.internal_session, "nsrs", _id=params["nsr_id"],
- not_send_msg=msg_to_send)
- self.logger.debug("ns={} deleted from database".format(params["nsr_id"]))
+ self.engine.del_item(
+ self.internal_session,
+ "nsrs",
+ _id=params["nsr_id"],
+ not_send_msg=msg_to_send,
+ )
+ self.logger.debug(
+ "ns={} deleted from database".format(params["nsr_id"])
+ )
# Check for nslcm notification
if isinstance(params, dict):
# Check availability of operationState and command
- if (not params.get("operationState")) or (not command) or (not params.get("operationParams")):
- self.logger.debug("Message can not be used for notification of nslcm")
+ if (
+ (not params.get("operationState"))
+ or (not command)
+ or (not params.get("operationParams"))
+ ):
+ self.logger.debug(
+ "Message can not be used for notification of nslcm"
+ )
else:
nsd_id = params["operationParams"].get("nsdId")
ns_instance_id = params["operationParams"].get("nsInstanceId")
# Any one among nsd_id, ns_instance_id should be present.
if not (nsd_id or ns_instance_id):
- self.logger.debug("Message can not be used for notification of nslcm")
+ self.logger.debug(
+ "Message can not be used for notification of nslcm"
+ )
else:
op_state = params["operationState"]
- event_details = {"topic": topic, "command": command.upper(), "params": params}
- subscribers = self.nslcm.get_subscribers(nsd_id, ns_instance_id, command.upper(), op_state,
- event_details)
+ event_details = {
+ "topic": topic,
+ "command": command.upper(),
+ "params": params,
+ }
+ subscribers = self.nslcm.get_subscribers(
+ nsd_id,
+ ns_instance_id,
+ command.upper(),
+ op_state,
+ event_details,
+ )
# self.logger.debug("subscribers list: ")
# self.logger.debug(subscribers)
if subscribers:
- asyncio.ensure_future(self.nslcm.send_notifications(subscribers, loop=self.loop),
- loop=self.loop)
+ asyncio.ensure_future(
+ self.nslcm.send_notifications(
+ subscribers, loop=self.loop
+ ),
+ loop=self.loop,
+ )
else:
- self.logger.debug("Message can not be used for notification of nslcm")
+ self.logger.debug(
+ "Message can not be used for notification of nslcm"
+ )
elif topic == "nsi":
- if command == "terminated" and params["operationState"] in ("COMPLETED", "PARTIALLY_COMPLETED"):
+ if command == "terminated" and params["operationState"] in (
+ "COMPLETED",
+ "PARTIALLY_COMPLETED",
+ ):
self.logger.debug("received nsi terminated {}".format(params))
if params.get("autoremove"):
- self.engine.del_item(self.internal_session, "nsis", _id=params["nsir_id"],
- not_send_msg=msg_to_send)
- self.logger.debug("nsis={} deleted from database".format(params["nsir_id"]))
+ self.engine.del_item(
+ self.internal_session,
+ "nsis",
+ _id=params["nsir_id"],
+ not_send_msg=msg_to_send,
+ )
+ self.logger.debug(
+ "nsis={} deleted from database".format(params["nsir_id"])
+ )
elif topic == "admin":
self.logger.debug("received {} {} {}".format(topic, command, params))
- if command in ["echo", "ping"]: # ignored commands
+ if command in ["echo", "ping"]: # ignored commands
pass
elif command == "revoke_token":
if params:
if isinstance(params, dict) and "_id" in params:
tid = params.get("_id")
self.engine.authenticator.tokens_cache.pop(tid, None)
- self.logger.debug("token '{}' removed from token_cache".format(tid))
+ self.logger.debug(
+ "token '{}' removed from token_cache".format(tid)
+ )
else:
- self.logger.debug("unrecognized params in command '{} {}': {}"
- .format(topic, command, params))
+ self.logger.debug(
+ "unrecognized params in command '{} {}': {}".format(
+ topic, command, params
+ )
+ )
else:
self.engine.authenticator.tokens_cache.clear()
self.logger.debug("token_cache cleared")
else:
- self.logger.debug("unrecognized command '{} {}'".format(topic, command))
+ self.logger.debug(
+ "unrecognized command '{} {}'".format(topic, command)
+ )
# writing to kafka must be done with our own loop. For this reason it is not allowed Engine to do that,
# but content to be written is stored at msg_to_send
for msg in msg_to_send:
await self.msg.aiowrite(*msg, loop=self.loop)
except (EngineException, DbException, MsgException) as e:
- self.logger.error("Error while processing topic={} command={}: {}".format(topic, command, e))
+ self.logger.error(
+ "Error while processing topic={} command={}: {}".format(
+ topic, command, e
+ )
+ )
except Exception as e:
- self.logger.exception("Exception while processing topic={} command={}: {}".format(topic, command, e),
- exc_info=True)
+ self.logger.exception(
+ "Exception while processing topic={} command={}: {}".format(
+ topic, command, e
+ ),
+ exc_info=True,
+ )
def _stop(self):
"""
import json
import logging
import yaml
+
# import json
# import tarfile
from time import sleep
def usage():
print("Usage: ", sys.argv[0], "[options]")
- print(" Performs system tests over running NBI. It can be used for real OSM test using option '--test-osm'")
- print(" If this is the case env variables 'OSMNBITEST_VIM_NAME' must be supplied to create a VIM if not exist "
- "where deployment is done")
+ print(
+ " Performs system tests over running NBI. It can be used for real OSM test using option '--test-osm'"
+ )
+ print(
+ " If this is the case env variables 'OSMNBITEST_VIM_NAME' must be supplied to create a VIM if not exist "
+ "where deployment is done"
+ )
print("OPTIONS")
print(" -h|--help: shows this help")
print(" --insecure: Allows non trusted https NBI server")
print(" --list: list available tests")
- print(" --manual-check: Deployment tests stop after deployed to allow manual inspection. Only make sense with "
- "'--test-osm'")
+ print(
+ " --manual-check: Deployment tests stop after deployed to allow manual inspection. Only make sense with "
+ "'--test-osm'"
+ )
print(" -p|--password PASSWORD: NBI access password. 'admin' by default")
print(" ---project PROJECT: NBI access project. 'admin' by default")
- print(" --test TEST[,...]: Execute only a test or a comma separated list of tests")
- print(" --params key=val: params to the previous test. key can be vnfd-files, nsd-file, ns-name, ns-config")
- print(" --test-osm: If missing this test is intended for NBI only, no other OSM components are expected. Use "
- "this flag to test the system. LCM and RO components are expected to be up and running")
- print(" --timeout TIMEOUT: General NBI timeout, by default {}s".format(timeout))
- print(" --timeout-deploy TIMEOUT: Timeout used for getting NS deployed, by default {}s".format(timeout_deploy))
- print(" --timeout-configure TIMEOUT: Timeout used for getting NS deployed and configured,"
- " by default {}s".format(timeout_configure))
+ print(
+ " --test TEST[,...]: Execute only a test or a comma separated list of tests"
+ )
+ print(
+ " --params key=val: params to the previous test. key can be vnfd-files, nsd-file, ns-name, ns-config"
+ )
+ print(
+ " --test-osm: If missing this test is intended for NBI only, no other OSM components are expected. Use "
+ "this flag to test the system. LCM and RO components are expected to be up and running"
+ )
+ print(
+ " --timeout TIMEOUT: General NBI timeout, by default {}s".format(timeout)
+ )
+ print(
+ " --timeout-deploy TIMEOUT: Timeout used for getting NS deployed, by default {}s".format(
+ timeout_deploy
+ )
+ )
+ print(
+ " --timeout-configure TIMEOUT: Timeout used for getting NS deployed and configured,"
+ " by default {}s".format(timeout_configure)
+ )
print(" -u|--user USERNAME: NBI access username. 'admin' by default")
- print(" --url URL: complete NBI server URL. 'https//localhost:9999/osm' by default")
+ print(
+ " --url URL: complete NBI server URL. 'https//localhost:9999/osm' by default"
+ )
print(" -v|--verbose print debug information, can be used several times")
print(" --no-verbose remove verbosity")
print(" --version: prints current version")
print(" export OSMNBITEST_VIM_TENANT=vim-tenant")
print(" export OSMNBITEST_VIM_USER=vim-user")
print(" export OSMNBITEST_VIM_PASSWORD=vim-password")
- print(" export OSMNBITEST_VIM_CONFIG=\"vim-config\"")
- print(" export OSMNBITEST_NS_NAME=\"vim-config\"")
+ print(' export OSMNBITEST_VIM_CONFIG="vim-config"')
+ print(' export OSMNBITEST_NS_NAME="vim-config"')
return
headers_zip_yaml = {"Accept": "application/yaml", "Content-type": "application/zip"}
headers_zip_json = {"Accept": "application/json", "Content-type": "application/zip"}
headers_txt_json = {"Accept": "application/json", "Content-type": "text/plain"}
-r_headers_yaml_location_vnfd = {"Location": "/vnfpkgm/v1/vnf_packages_content/", "Content-Type": "application/yaml"}
-r_headers_yaml_location_nsd = {"Location": "/nsd/v1/ns_descriptors_content/", "Content-Type": "application/yaml"}
-r_headers_yaml_location_nst = {"Location": "/nst/v1/netslice_templates_content", "Content-Type": "application/yaml"}
-r_headers_yaml_location_nslcmop = {"Location": "nslcm/v1/ns_lcm_op_occs/", "Content-Type": "application/yaml"}
-r_headers_yaml_location_nsilcmop = {"Location": "/osm/nsilcm/v1/nsi_lcm_op_occs/", "Content-Type": "application/yaml"}
+r_headers_yaml_location_vnfd = {
+ "Location": "/vnfpkgm/v1/vnf_packages_content/",
+ "Content-Type": "application/yaml",
+}
+r_headers_yaml_location_nsd = {
+ "Location": "/nsd/v1/ns_descriptors_content/",
+ "Content-Type": "application/yaml",
+}
+r_headers_yaml_location_nst = {
+ "Location": "/nst/v1/netslice_templates_content",
+ "Content-Type": "application/yaml",
+}
+r_headers_yaml_location_nslcmop = {
+ "Location": "nslcm/v1/ns_lcm_op_occs/",
+ "Content-Type": "application/yaml",
+}
+r_headers_yaml_location_nsilcmop = {
+ "Location": "/osm/nsilcm/v1/nsi_lcm_op_occs/",
+ "Content-Type": "application/yaml",
+}
# test ones authorized
test_authorized_list = (
- ("AU1", "Invalid vnfd id", "GET", "/vnfpkgm/v1/vnf_packages/non-existing-id",
- headers_json, None, 404, r_header_json, "json"),
- ("AU2", "Invalid nsd id", "GET", "/nsd/v1/ns_descriptors/non-existing-id",
- headers_yaml, None, 404, r_header_yaml, "yaml"),
- ("AU3", "Invalid nsd id", "DELETE", "/nsd/v1/ns_descriptors_content/non-existing-id",
- headers_yaml, None, 404, r_header_yaml, "yaml"),
+ (
+ "AU1",
+ "Invalid vnfd id",
+ "GET",
+ "/vnfpkgm/v1/vnf_packages/non-existing-id",
+ headers_json,
+ None,
+ 404,
+ r_header_json,
+ "json",
+ ),
+ (
+ "AU2",
+ "Invalid nsd id",
+ "GET",
+ "/nsd/v1/ns_descriptors/non-existing-id",
+ headers_yaml,
+ None,
+ 404,
+ r_header_yaml,
+ "yaml",
+ ),
+ (
+ "AU3",
+ "Invalid nsd id",
+ "DELETE",
+ "/nsd/v1/ns_descriptors_content/non-existing-id",
+ headers_yaml,
+ None,
+ 404,
+ r_header_yaml,
+ "yaml",
+ ),
)
-timeout = 120 # general timeout
-timeout_deploy = 60*10 # timeout for NS deploying without charms
-timeout_configure = 60*20 # timeout for NS deploying and configuring
+timeout = 120 # general timeout
+timeout_deploy = 60 * 10 # timeout for NS deploying without charms
+timeout_configure = 60 * 20 # timeout for NS deploying and configuring
class TestException(Exception):
class TestRest:
- def __init__(self, url_base, header_base=None, verify=False, user="admin", password="admin", project="admin"):
+ def __init__(
+ self,
+ url_base,
+ header_base=None,
+ verify=False,
+ user="admin",
+ password="admin",
+ project="admin",
+ ):
self.url_base = url_base
if header_base is None:
self.header_base = {}
# contains ID of tests obtained from Location response header. "" key contains last obtained id
self.last_id = ""
self.test_name = None
- self.step = 0 # number of subtest under test
+ self.step = 0 # number of subtest under test
self.passed_tests = 0
self.failed_tests = 0
if key in self.s.headers:
del self.s.headers[key]
- def test(self, description, method, url, headers, payload, expected_codes, expected_headers,
- expected_payload, store_file=None, pooling=False):
+ def test(
+ self,
+ description,
+ method,
+ url,
+ headers,
+ payload,
+ expected_codes,
+ expected_headers,
+ expected_payload,
+ store_file=None,
+ pooling=False,
+ ):
"""
Performs an http request and check http code response. Exit if different than allowed. It get the returned id
that can be used by following test in the URL with {name} where name is the name of the test
payload = json.dumps(payload)
if not pooling:
- test_description = "Test {}{} {} {} {}".format(self.test_name, self.step, description, method, url)
+ test_description = "Test {}{} {} {} {}".format(
+ self.test_name, self.step, description, method, url
+ )
logger.warning(test_description)
self.step += 1
stream = False
__retry = 0
while True:
try:
- r = getattr(self.s, method.lower())(url, data=payload, headers=headers, verify=self.verify,
- stream=stream)
+ r = getattr(self.s, method.lower())(
+ url,
+ data=payload,
+ headers=headers,
+ verify=self.verify,
+ stream=stream,
+ )
break
except requests.exceptions.ConnectionError as e:
if __retry == 2:
expected_codes = (expected_codes,)
if r.status_code not in expected_codes:
raise TestException(
- "Got status {}. Expected {}. {}".format(r.status_code, expected_codes, r.text))
+ "Got status {}. Expected {}. {}".format(
+ r.status_code, expected_codes, r.text
+ )
+ )
if expected_headers:
for header_key, header_val in expected_headers.items():
if header_key.lower() not in r.headers:
raise TestException("Header {} not present".format(header_key))
if header_val and header_val.lower() not in r.headers[header_key]:
- raise TestException("Header {} does not contain {} but {}".format(header_key, header_val,
- r.headers[header_key]))
+ raise TestException(
+ "Header {} does not contain {} but {}".format(
+ header_key, header_val, r.headers[header_key]
+ )
+ )
if expected_payload is not None:
if expected_payload == 0 and len(r.content) > 0:
try:
r.json()
except Exception as e:
- raise TestException("Expected json response payload, but got Exception {}".format(e))
+ raise TestException(
+ "Expected json response payload, but got Exception {}".format(
+ e
+ )
+ )
elif expected_payload == "yaml":
try:
yaml.safe_load(r.text)
except Exception as e:
- raise TestException("Expected yaml response payload, but got Exception {}".format(e))
+ raise TestException(
+ "Expected yaml response payload, but got Exception {}".format(
+ e
+ )
+ )
elif expected_payload in ("zip", "octet-string"):
if len(r.content) == 0:
- raise TestException("Expected some response payload, but got empty")
+ raise TestException(
+ "Expected some response payload, but got empty"
+ )
# try:
# tar = tarfile.open(None, 'r:gz', fileobj=r.raw)
# for tarinfo in tar:
# raise TestException("Expected zip response payload, but got Exception {}".format(e))
elif expected_payload == "text":
if len(r.content) == 0:
- raise TestException("Expected some response payload, but got empty")
+ raise TestException(
+ "Expected some response payload, but got empty"
+ )
# r.text
if store_file:
- with open(store_file, 'wb') as fd:
+ with open(store_file, "wb") as fd:
for chunk in r.iter_content(chunk_size=128):
fd.write(chunk)
location = r.headers.get("Location")
if location:
- _id = location[location.rfind("/") + 1:]
+ _id = location[location.rfind("/") + 1 :]
if _id:
self.last_id = str(_id)
if not pooling:
logger.error("Exception: {}".format(e))
def get_autorization(self): # user=None, password=None, project=None):
- if self.token: # and self.user == user and self.password == password and self.project == project:
+ if (
+ self.token
+ ): # and self.user == user and self.password == password and self.project == project:
return
# self.user = user
# self.password = password
# self.project = project
- r = self.test("Obtain token", "POST", "/admin/v1/tokens", headers_json,
- {"username": self.user, "password": self.password, "project_id": self.project},
- (200, 201), r_header_json, "json")
+ r = self.test(
+ "Obtain token",
+ "POST",
+ "/admin/v1/tokens",
+ headers_json,
+ {
+ "username": self.user,
+ "password": self.password,
+ "project_id": self.project,
+ },
+ (200, 201),
+ r_header_json,
+ "json",
+ )
if not r:
return
response = r.json()
def remove_authorization(self):
if self.token:
- self.test("Delete token", "DELETE", "/admin/v1/tokens/{}".format(self.token), headers_json,
- None, (200, 201, 204), None, None)
+ self.test(
+ "Delete token",
+ "DELETE",
+ "/admin/v1/tokens/{}".format(self.token),
+ headers_json,
+ None,
+ (200, 201, 204),
+ None,
+ None,
+ )
self.token = None
self.unset_header("Authorization")
vim_name = os.environ.get("OSMNBITEST_VIM_NAME")
if not vim_name:
raise TestException(
- "Needed to define OSMNBITEST_VIM_XXX variables to create a real VIM for deployment")
+ "Needed to define OSMNBITEST_VIM_XXX variables to create a real VIM for deployment"
+ )
else:
vim_name = "fakeVim"
# Get VIM
- r = self.test("Get VIM ID", "GET", "/admin/v1/vim_accounts?name={}".format(vim_name), headers_json,
- None, 200, r_header_json, "json")
+ r = self.test(
+ "Get VIM ID",
+ "GET",
+ "/admin/v1/vim_accounts?name={}".format(vim_name),
+ headers_json,
+ None,
+ 200,
+ r_header_json,
+ "json",
+ )
if not r:
return
vims = r.json()
# Add VIM
if test_osm:
# check needed environ parameters:
- if not os.environ.get("OSMNBITEST_VIM_URL") or not os.environ.get("OSMNBITEST_VIM_TENANT"):
- raise TestException("Env OSMNBITEST_VIM_URL and OSMNBITEST_VIM_TENANT are needed for create a real VIM"
- " to deploy on whit the --test-osm option")
- vim_data = "{{schema_version: '1.0', name: '{}', vim_type: {}, vim_url: '{}', vim_tenant_name: '{}', "\
- "vim_user: {}, vim_password: {}".format(vim_name,
- os.environ.get("OSMNBITEST_VIM_TYPE", "openstack"),
- os.environ.get("OSMNBITEST_VIM_URL"),
- os.environ.get("OSMNBITEST_VIM_TENANT"),
- os.environ.get("OSMNBITEST_VIM_USER"),
- os.environ.get("OSMNBITEST_VIM_PASSWORD"))
+ if not os.environ.get("OSMNBITEST_VIM_URL") or not os.environ.get(
+ "OSMNBITEST_VIM_TENANT"
+ ):
+ raise TestException(
+ "Env OSMNBITEST_VIM_URL and OSMNBITEST_VIM_TENANT are needed for create a real VIM"
+ " to deploy on whit the --test-osm option"
+ )
+ vim_data = "{{schema_version: '1.0', name: '{}', vim_type: {}, vim_url: '{}', vim_tenant_name: '{}', " "vim_user: {}, vim_password: {}".format(
+ vim_name,
+ os.environ.get("OSMNBITEST_VIM_TYPE", "openstack"),
+ os.environ.get("OSMNBITEST_VIM_URL"),
+ os.environ.get("OSMNBITEST_VIM_TENANT"),
+ os.environ.get("OSMNBITEST_VIM_USER"),
+ os.environ.get("OSMNBITEST_VIM_PASSWORD"),
+ )
if os.environ.get("OSMNBITEST_VIM_CONFIG"):
- vim_data += " ,config: {}".format(os.environ.get("OSMNBITEST_VIM_CONFIG"))
+ vim_data += " ,config: {}".format(
+ os.environ.get("OSMNBITEST_VIM_CONFIG")
+ )
vim_data += "}"
else:
- vim_data = "{schema_version: '1.0', name: fakeVim, vim_type: openstack, vim_url: 'http://10.11.12.13/fake'"\
- ", vim_tenant_name: 'vimtenant', vim_user: vimuser, vim_password: vimpassword}"
- self.test("Create VIM", "POST", "/admin/v1/vim_accounts", headers_yaml, vim_data,
- (201, 202), {"Location": "/admin/v1/vim_accounts/", "Content-Type": "application/yaml"}, "yaml")
+ vim_data = (
+ "{schema_version: '1.0', name: fakeVim, vim_type: openstack, vim_url: 'http://10.11.12.13/fake'"
+ ", vim_tenant_name: 'vimtenant', vim_user: vimuser, vim_password: vimpassword}"
+ )
+ self.test(
+ "Create VIM",
+ "POST",
+ "/admin/v1/vim_accounts",
+ headers_yaml,
+ vim_data,
+ (201, 202),
+ {"Location": "/admin/v1/vim_accounts/", "Content-Type": "application/yaml"},
+ "yaml",
+ )
return self.last_id
def print_results(self):
print("\n\n\n--------------------------------------------")
- print("TEST RESULTS: Total: {}, Passed: {}, Failed: {}".format(self.passed_tests + self.failed_tests,
- self.passed_tests, self.failed_tests))
+ print(
+ "TEST RESULTS: Total: {}, Passed: {}, Failed: {}".format(
+ self.passed_tests + self.failed_tests,
+ self.passed_tests,
+ self.failed_tests,
+ )
+ )
print("--------------------------------------------")
def wait_until_delete(self, url_op, timeout_delete):
:return:
"""
description = "Wait to topic being deleted"
- test_description = "Test {}{} {} {} {}".format(self.test_name, self.step, description, "GET", url_op)
+ test_description = "Test {}{} {} {} {}".format(
+ self.test_name, self.step, description, "GET", url_op
+ )
logger.warning(test_description)
self.step += 1
wait = timeout_delete
while wait >= 0:
- r = self.test(description, "GET", url_op, headers_yaml, None, (200, 404), None, r_header_yaml, "yaml",
- pooling=True)
+ r = self.test(
+ description,
+ "GET",
+ url_op,
+ headers_yaml,
+ None,
+ (200, 404),
+ None,
+ r_header_yaml,
+ "yaml",
+ pooling=True,
+ )
if not r:
return
if r.status_code == 404:
wait -= 5
sleep(5)
else:
- raise TestException("Topic is not deleted after {} seconds".format(timeout_delete))
+ raise TestException(
+ "Topic is not deleted after {} seconds".format(timeout_delete)
+ )
self.failed_tests += 1
def wait_operation_ready(self, ns_nsi, opp_id, timeout, expected_fail=False):
else:
url_op = "/nsilcm/v1/nsi_lcm_op_occs/{}".format(opp_id)
description = "Wait to {} lcm operation complete".format(ns_nsi)
- test_description = "Test {}{} {} {} {}".format(self.test_name, self.step, description, "GET", url_op)
+ test_description = "Test {}{} {} {} {}".format(
+ self.test_name, self.step, description, "GET", url_op
+ )
logger.warning(test_description)
self.step += 1
wait = timeout
while wait >= 0:
- r = self.test(description, "GET", url_op, headers_json, None,
- 200, r_header_json, "json", pooling=True)
+ r = self.test(
+ description,
+ "GET",
+ url_op,
+ headers_json,
+ None,
+ 200,
+ r_header_json,
+ "json",
+ pooling=True,
+ )
if not r:
return
nslcmop = r.json()
if "COMPLETED" in nslcmop["operationState"]:
if expected_fail:
- logger.error("NS terminate has success, expecting failing: {}".format(nslcmop["detailed-status"]))
+ logger.error(
+ "NS terminate has success, expecting failing: {}".format(
+ nslcmop["detailed-status"]
+ )
+ )
self.failed_tests += 1
else:
self.passed_tests += 1
break
elif "FAILED" in nslcmop["operationState"]:
if not expected_fail:
- logger.error("NS terminate has failed: {}".format(nslcmop["detailed-status"]))
+ logger.error(
+ "NS terminate has failed: {}".format(nslcmop["detailed-status"])
+ )
self.failed_tests += 1
else:
self.passed_tests += 1
sleep(10)
else:
self.failed_tests += 1
- logger.error("NS instantiate is not terminate after {} seconds".format(timeout))
+ logger.error(
+ "NS instantiate is not terminate after {} seconds".format(timeout)
+ )
return
print("", file=stderr)
engine.set_test_name("NonAuth")
engine.remove_authorization()
test_not_authorized_list = (
- ("Invalid token", "GET", "/admin/v1/users", headers_json, None, 401, r_header_json, "json"),
- ("Invalid URL", "POST", "/admin/v1/nonexist", headers_yaml, None, 405, r_header_yaml, "yaml"),
- ("Invalid version", "DELETE", "/admin/v2/users", headers_yaml, None, 405, r_header_yaml, "yaml"),
+ (
+ "Invalid token",
+ "GET",
+ "/admin/v1/users",
+ headers_json,
+ None,
+ 401,
+ r_header_json,
+ "json",
+ ),
+ (
+ "Invalid URL",
+ "POST",
+ "/admin/v1/nonexist",
+ headers_yaml,
+ None,
+ 405,
+ r_header_yaml,
+ "yaml",
+ ),
+ (
+ "Invalid version",
+ "DELETE",
+ "/admin/v2/users",
+ headers_yaml,
+ None,
+ 405,
+ r_header_yaml,
+ "yaml",
+ ),
)
for t in test_not_authorized_list:
engine.test(*t)
engine.get_autorization()
- res = engine.test("Create project non admin 1", "POST", "/admin/v1/projects", headers_json, {"name": "P1"},
- (201, 204), {"Location": "/admin/v1/projects/", "Content-Type": "application/json"}, "json")
+ res = engine.test(
+ "Create project non admin 1",
+ "POST",
+ "/admin/v1/projects",
+ headers_json,
+ {"name": "P1"},
+ (201, 204),
+ {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
+ "json",
+ )
p1 = engine.last_id if res else None
- res = engine.test("Create project admin", "POST", "/admin/v1/projects", headers_json,
- {"name": "Padmin", "admin": True}, (201, 204),
- {"Location": "/admin/v1/projects/", "Content-Type": "application/json"}, "json")
+ res = engine.test(
+ "Create project admin",
+ "POST",
+ "/admin/v1/projects",
+ headers_json,
+ {"name": "Padmin", "admin": True},
+ (201, 204),
+ {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
+ "json",
+ )
padmin = engine.last_id if res else None
- res = engine.test("Create project bad format", "POST", "/admin/v1/projects", headers_json, {"name": 1},
- (400, 422), r_header_json, "json")
+ res = engine.test(
+ "Create project bad format",
+ "POST",
+ "/admin/v1/projects",
+ headers_json,
+ {"name": 1},
+ (400, 422),
+ r_header_json,
+ "json",
+ )
pbad = engine.last_id if res else None
- res = engine.test("Get project admin role", "GET", "/admin/v1/roles?name=project_admin", headers_json, {},
- (200), {"Content-Type": "application/json"}, "json")
+ res = engine.test(
+ "Get project admin role",
+ "GET",
+ "/admin/v1/roles?name=project_admin",
+ headers_json,
+ {},
+ (200),
+ {"Content-Type": "application/json"},
+ "json",
+ )
rpa = res.json()[0]["_id"] if res else None
- res = engine.test("Get project user role", "GET", "/admin/v1/roles?name=project_user", headers_json, {},
- (200), {"Content-Type": "application/json"}, "json")
+ res = engine.test(
+ "Get project user role",
+ "GET",
+ "/admin/v1/roles?name=project_user",
+ headers_json,
+ {},
+ (200),
+ {"Content-Type": "application/json"},
+ "json",
+ )
rpu = res.json()[0]["_id"] if res else None
- res = engine.test("Get system admin role", "GET", "/admin/v1/roles?name=system_admin", headers_json, {},
- (200), {"Content-Type": "application/json"}, "json")
+ res = engine.test(
+ "Get system admin role",
+ "GET",
+ "/admin/v1/roles?name=system_admin",
+ headers_json,
+ {},
+ (200),
+ {"Content-Type": "application/json"},
+ "json",
+ )
rsa = res.json()[0]["_id"] if res else None
data = {"username": "U1", "password": "pw1"}
data["project_role_mappings"] = [
{"project": p1, "role": rpa},
{"project": p2, "role": rpa},
- {"project": padmin, "role": rpu}
+ {"project": padmin, "role": rpu},
]
rc = 201
xhd = {"Location": "/admin/v1/users/", "Content-Type": "application/json"}
- res = engine.test("Create user with bad project and force", "POST", "/admin/v1/users?FORCE=True", headers_json,
- data, rc, xhd, "json")
+ res = engine.test(
+ "Create user with bad project and force",
+ "POST",
+ "/admin/v1/users?FORCE=True",
+ headers_json,
+ data,
+ rc,
+ xhd,
+ "json",
+ )
if res:
u1 = engine.last_id
else:
# User is created sometimes even though an exception is raised
- res = engine.test("Get user U1", "GET", "/admin/v1/users?username=U1", headers_json, {},
- (200), {"Content-Type": "application/json"}, "json")
+ res = engine.test(
+ "Get user U1",
+ "GET",
+ "/admin/v1/users?username=U1",
+ headers_json,
+ {},
+ (200),
+ {"Content-Type": "application/json"},
+ "json",
+ )
u1 = res.json()[0]["_id"] if res else None
data = {"username": "U2", "password": "pw2"}
- data["project_role_mappings"] = [{"project": p1, "role": rpa}, {"project": padmin, "role": rsa}]
- res = engine.test("Create user 2", "POST", "/admin/v1/users", headers_json,
- data, 201, {"Location": "/admin/v1/users/", "Content-Type": "application/json"}, "json")
+ data["project_role_mappings"] = [
+ {"project": p1, "role": rpa},
+ {"project": padmin, "role": rsa},
+ ]
+ res = engine.test(
+ "Create user 2",
+ "POST",
+ "/admin/v1/users",
+ headers_json,
+ data,
+ 201,
+ {"Location": "/admin/v1/users/", "Content-Type": "application/json"},
+ "json",
+ )
u2 = engine.last_id if res else None
if u1:
ftt = "project_role_mappings"
xpr = [{"project": p1, "role": rpa}, {"project": padmin, "role": rpu}]
data = {ftt: xpr}
- engine.test("Edit user U1, delete P2 project", "PATCH", "/admin/v1/users/"+u1, headers_json,
- data, 204, None, None)
- res = engine.test("Check user U1, contains the right projects", "GET", "/admin/v1/users/"+u1,
- headers_json, None, 200, None, json)
+ engine.test(
+ "Edit user U1, delete P2 project",
+ "PATCH",
+ "/admin/v1/users/" + u1,
+ headers_json,
+ data,
+ 204,
+ None,
+ None,
+ )
+ res = engine.test(
+ "Check user U1, contains the right projects",
+ "GET",
+ "/admin/v1/users/" + u1,
+ headers_json,
+ None,
+ 200,
+ None,
+ json,
+ )
if res:
rj = res.json()
xpr[0]["project_name"] = "P1"
if pr not in rj[ftt]:
ok = False
if not ok:
- logger.error("User {} '{}' are different than expected '{}'. Edition was not done properly"
- .format(ftt, rj[ftt], xpr))
+ logger.error(
+ "User {} '{}' are different than expected '{}'. Edition was not done properly".format(
+ ftt, rj[ftt], xpr
+ )
+ )
engine.failed_tests += 1
- p2 = None # To prevent deletion attempts
+ p2 = None # To prevent deletion attempts
# Add a test of 'default project' for Keystone?
if u2:
- engine.test("Edit user U2, change password", "PUT", "/admin/v1/users/"+u2, headers_json,
- {"password": "pw2_new"}, 204, None, None)
+ engine.test(
+ "Edit user U2, change password",
+ "PUT",
+ "/admin/v1/users/" + u2,
+ headers_json,
+ {"password": "pw2_new"},
+ 204,
+ None,
+ None,
+ )
if p1:
- engine.test("Change to project P1 non existing", "POST", "/admin/v1/tokens/", headers_json,
- {"project_id": p1}, 401, r_header_json, "json")
+ engine.test(
+ "Change to project P1 non existing",
+ "POST",
+ "/admin/v1/tokens/",
+ headers_json,
+ {"project_id": p1},
+ 401,
+ r_header_json,
+ "json",
+ )
if u2 and p1:
- res = engine.test("Change to user U2 project P1", "POST", "/admin/v1/tokens", headers_json,
- {"username": "U2", "password": "pw2_new", "project_id": "P1"}, (200, 201),
- r_header_json, "json")
+ res = engine.test(
+ "Change to user U2 project P1",
+ "POST",
+ "/admin/v1/tokens",
+ headers_json,
+ {"username": "U2", "password": "pw2_new", "project_id": "P1"},
+ (200, 201),
+ r_header_json,
+ "json",
+ )
if res:
rj = res.json()
engine.set_header({"Authorization": "Bearer {}".format(rj["id"])})
- engine.test("Edit user projects non admin", "PUT", "/admin/v1/users/U1", headers_json,
- {"remove_project_role_mappings": [{"project": "P1", "role": None}]},
- 401, r_header_json, "json")
-
- res = engine.test("Add new project non admin", "POST", "/admin/v1/projects", headers_json,
- {"name": "P2"}, 401, r_header_json, "json")
+ engine.test(
+ "Edit user projects non admin",
+ "PUT",
+ "/admin/v1/users/U1",
+ headers_json,
+ {"remove_project_role_mappings": [{"project": "P1", "role": None}]},
+ 401,
+ r_header_json,
+ "json",
+ )
+
+ res = engine.test(
+ "Add new project non admin",
+ "POST",
+ "/admin/v1/projects",
+ headers_json,
+ {"name": "P2"},
+ 401,
+ r_header_json,
+ "json",
+ )
if res is None or res.status_code == 201:
# The project has been created even though it shouldn't
- res = engine.test("Get project P2", "GET", "/admin/v1/projects/P2", headers_json, None,
- 200, r_header_json, "json")
+ res = engine.test(
+ "Get project P2",
+ "GET",
+ "/admin/v1/projects/P2",
+ headers_json,
+ None,
+ 200,
+ r_header_json,
+ "json",
+ )
p2 = res.json()["_id"] if res else None
if p1:
data = {"username": "U3", "password": "pw3"}
data["project_role_mappings"] = [{"project": p1, "role": rpu}]
- res = engine.test("Add new user non admin", "POST", "/admin/v1/users", headers_json,
- data, 401, r_header_json, "json")
+ res = engine.test(
+ "Add new user non admin",
+ "POST",
+ "/admin/v1/users",
+ headers_json,
+ data,
+ 401,
+ r_header_json,
+ "json",
+ )
if res is None or res.status_code == 201:
# The user has been created even though it shouldn't
- res = engine.test("Get user U3", "GET", "/admin/v1/users/U3", headers_json, None,
- 200, r_header_json, "json")
+ res = engine.test(
+ "Get user U3",
+ "GET",
+ "/admin/v1/users/U3",
+ headers_json,
+ None,
+ 200,
+ r_header_json,
+ "json",
+ )
u3 = res.json()["_id"] if res else None
else:
u3 = None
if padmin:
- res = engine.test("Change to user U2 project Padmin", "POST", "/admin/v1/tokens", headers_json,
- {"project_id": "Padmin"}, # Caused a Keystone authentication error
- # {"username": "U2", "password": "pw2_new", "project_id": "Padmin"},
- (200, 201), r_header_json, "json")
+ res = engine.test(
+ "Change to user U2 project Padmin",
+ "POST",
+ "/admin/v1/tokens",
+ headers_json,
+ {
+ "project_id": "Padmin"
+ }, # Caused a Keystone authentication error
+ # {"username": "U2", "password": "pw2_new", "project_id": "Padmin"},
+ (200, 201),
+ r_header_json,
+ "json",
+ )
if res:
rj = res.json()
- engine.set_header({"Authorization": "Bearer {}".format(rj["id"])})
-
- res = engine.test("Add new project admin", "POST", "/admin/v1/projects", headers_json,
- {"name": "P3"}, (201, 204),
- {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
- "json")
+ engine.set_header(
+ {"Authorization": "Bearer {}".format(rj["id"])}
+ )
+
+ res = engine.test(
+ "Add new project admin",
+ "POST",
+ "/admin/v1/projects",
+ headers_json,
+ {"name": "P3"},
+ (201, 204),
+ {
+ "Location": "/admin/v1/projects/",
+ "Content-Type": "application/json",
+ },
+ "json",
+ )
p3 = engine.last_id if res else None
if p1:
data = {"username": "U4", "password": "pw4"}
- data["project_role_mappings"] = [{"project": p1, "role": rpa}]
- res = engine.test("Add new user admin", "POST", "/admin/v1/users", headers_json,
- data, (201, 204),
- {"Location": "/admin/v1/users/", "Content-Type": "application/json"},
- "json")
+ data["project_role_mappings"] = [
+ {"project": p1, "role": rpa}
+ ]
+ res = engine.test(
+ "Add new user admin",
+ "POST",
+ "/admin/v1/users",
+ headers_json,
+ data,
+ (201, 204),
+ {
+ "Location": "/admin/v1/users/",
+ "Content-Type": "application/json",
+ },
+ "json",
+ )
u4 = engine.last_id if res else None
else:
u4 = None
if u4 and p3:
- data = {"project_role_mappings": [{"project": p3, "role": rpa}]}
- engine.test("Edit user projects admin", "PUT", "/admin/v1/users/U4", headers_json,
- data, 204, None, None)
+ data = {
+ "project_role_mappings": [{"project": p3, "role": rpa}]
+ }
+ engine.test(
+ "Edit user projects admin",
+ "PUT",
+ "/admin/v1/users/U4",
+ headers_json,
+ data,
+ 204,
+ None,
+ None,
+ )
# Project is deleted even though it shouldn't - PROVISIONAL?
- res = engine.test("Delete project P3 conflict", "DELETE", "/admin/v1/projects/"+p3,
- headers_json, None, 409, None, None)
+ res = engine.test(
+ "Delete project P3 conflict",
+ "DELETE",
+ "/admin/v1/projects/" + p3,
+ headers_json,
+ None,
+ 409,
+ None,
+ None,
+ )
if res and res.status_code in (200, 204):
p3 = None
if p3:
- res = engine.test("Delete project P3 forcing", "DELETE",
- "/admin/v1/projects/"+p3+"?FORCE=True", headers_json, None, 204,
- None, None)
+ res = engine.test(
+ "Delete project P3 forcing",
+ "DELETE",
+ "/admin/v1/projects/" + p3 + "?FORCE=True",
+ headers_json,
+ None,
+ 204,
+ None,
+ None,
+ )
if res and res.status_code in (200, 204):
p3 = None
if u2:
- res = engine.test("Delete user U2. Conflict deleting own user", "DELETE",
- "/admin/v1/users/"+u2, headers_json, None, 409, r_header_json, "json")
+ res = engine.test(
+ "Delete user U2. Conflict deleting own user",
+ "DELETE",
+ "/admin/v1/users/" + u2,
+ headers_json,
+ None,
+ 409,
+ r_header_json,
+ "json",
+ )
if res is None or res.status_code in (200, 204):
u2 = None
if u4:
- res = engine.test("Delete user U4", "DELETE", "/admin/v1/users/"+u4, headers_json, None,
- 204, None, None)
+ res = engine.test(
+ "Delete user U4",
+ "DELETE",
+ "/admin/v1/users/" + u4,
+ headers_json,
+ None,
+ 204,
+ None,
+ None,
+ )
if res and res.status_code in (200, 204):
u4 = None
if p3:
- res = engine.test("Delete project P3", "DELETE", "/admin/v1/projects/"+p3, headers_json,
- None, 204, None, None)
+ res = engine.test(
+ "Delete project P3",
+ "DELETE",
+ "/admin/v1/projects/" + p3,
+ headers_json,
+ None,
+ 204,
+ None,
+ None,
+ )
if res and res.status_code in (200, 204):
p3 = None
if u3:
- res = engine.test("Delete user U3", "DELETE", "/admin/v1/users/"+u3, headers_json, None,
- 204, None, None)
+ res = engine.test(
+ "Delete user U3",
+ "DELETE",
+ "/admin/v1/users/" + u3,
+ headers_json,
+ None,
+ 204,
+ None,
+ None,
+ )
if res:
u3 = None
# change to admin
- engine.remove_authorization() # To force get authorization
+ engine.remove_authorization() # To force get authorization
engine.get_autorization()
if u1:
- engine.test("Delete user U1", "DELETE", "/admin/v1/users/"+u1, headers_json, None, 204, None, None)
+ engine.test(
+ "Delete user U1",
+ "DELETE",
+ "/admin/v1/users/" + u1,
+ headers_json,
+ None,
+ 204,
+ None,
+ None,
+ )
if u2:
- engine.test("Delete user U2", "DELETE", "/admin/v1/users/"+u2, headers_json, None, 204, None, None)
+ engine.test(
+ "Delete user U2",
+ "DELETE",
+ "/admin/v1/users/" + u2,
+ headers_json,
+ None,
+ 204,
+ None,
+ None,
+ )
if u3:
- engine.test("Delete user U3", "DELETE", "/admin/v1/users/"+u3, headers_json, None, 204, None, None)
+ engine.test(
+ "Delete user U3",
+ "DELETE",
+ "/admin/v1/users/" + u3,
+ headers_json,
+ None,
+ 204,
+ None,
+ None,
+ )
if u4:
- engine.test("Delete user U4", "DELETE", "/admin/v1/users/"+u4, headers_json, None, 204, None, None)
+ engine.test(
+ "Delete user U4",
+ "DELETE",
+ "/admin/v1/users/" + u4,
+ headers_json,
+ None,
+ 204,
+ None,
+ None,
+ )
if p1:
- engine.test("Delete project P1", "DELETE", "/admin/v1/projects/"+p1, headers_json, None, 204, None, None)
+ engine.test(
+ "Delete project P1",
+ "DELETE",
+ "/admin/v1/projects/" + p1,
+ headers_json,
+ None,
+ 204,
+ None,
+ None,
+ )
if p2:
- engine.test("Delete project P2", "DELETE", "/admin/v1/projects/"+p2, headers_json, None, 204, None, None)
+ engine.test(
+ "Delete project P2",
+ "DELETE",
+ "/admin/v1/projects/" + p2,
+ headers_json,
+ None,
+ 204,
+ None,
+ None,
+ )
if p3:
- engine.test("Delete project P3", "DELETE", "/admin/v1/projects/"+p3, headers_json, None, 204, None, None)
+ engine.test(
+ "Delete project P3",
+ "DELETE",
+ "/admin/v1/projects/" + p3,
+ headers_json,
+ None,
+ 204,
+ None,
+ None,
+ )
if padmin:
- engine.test("Delete project Padmin", "DELETE", "/admin/v1/projects/"+padmin, headers_json, None, 204,
- None, None)
+ engine.test(
+ "Delete project Padmin",
+ "DELETE",
+ "/admin/v1/projects/" + padmin,
+ headers_json,
+ None,
+ 204,
+ None,
+ None,
+ )
if pbad:
- engine.test("Delete bad project", "DELETE", "/admin/v1/projects/"+pbad, headers_json, None, 204,
- None, None)
+ engine.test(
+ "Delete bad project",
+ "DELETE",
+ "/admin/v1/projects/" + pbad,
+ headers_json,
+ None,
+ 204,
+ None,
+ None,
+ )
# BEGIN New Tests - Addressing Projects/Users by Name/ID
pid1 = pid2 = None
uid1 = uid2 = None
- res = engine.test("Create new project P1", "POST", "/admin/v1/projects", headers_json, {"name": "P1"},
- 201, {"Location": "/admin/v1/projects/", "Content-Type": "application/json"}, "json")
+ res = engine.test(
+ "Create new project P1",
+ "POST",
+ "/admin/v1/projects",
+ headers_json,
+ {"name": "P1"},
+ 201,
+ {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
+ "json",
+ )
if res:
pid1 = res.json()["id"]
# print("# pid =", pid1)
- res = engine.test("Create new project P2", "POST", "/admin/v1/projects", headers_json, {"name": "P2"},
- 201, {"Location": "/admin/v1/projects/", "Content-Type": "application/json"}, "json")
+ res = engine.test(
+ "Create new project P2",
+ "POST",
+ "/admin/v1/projects",
+ headers_json,
+ {"name": "P2"},
+ 201,
+ {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
+ "json",
+ )
if res:
pid2 = res.json()["id"]
# print("# pid =", pid2)
data = {"username": "U1", "password": "pw1"}
data["project_role_mappings"] = [{"project": pid1, "role": rpu}]
- res = engine.test("Create new user U1", "POST", "/admin/v1/users", headers_json, data, 201,
- {"Location": "/admin/v1/users/", "Content-Type": "application/json"}, "json")
+ res = engine.test(
+ "Create new user U1",
+ "POST",
+ "/admin/v1/users",
+ headers_json,
+ data,
+ 201,
+ {"Location": "/admin/v1/users/", "Content-Type": "application/json"},
+ "json",
+ )
if res:
uid1 = res.json()["id"]
# print("# uid =", uid1)
data = {"username": "U2", "password": "pw2"}
data["project_role_mappings"] = [{"project": pid2, "role": rpu}]
- res = engine.test("Create new user U2", "POST", "/admin/v1/users", headers_json, data, 201,
- {"Location": "/admin/v1/users/", "Content-Type": "application/json"}, "json")
+ res = engine.test(
+ "Create new user U2",
+ "POST",
+ "/admin/v1/users",
+ headers_json,
+ data,
+ 201,
+ {"Location": "/admin/v1/users/", "Content-Type": "application/json"},
+ "json",
+ )
if res:
uid2 = res.json()["id"]
# print("# uid =", uid2)
if pid1:
- engine.test("Get Project P1 by Name", "GET", "/admin/v1/projects/P1", headers_json, None,
- 200, None, "json")
- engine.test("Get Project P1 by ID", "GET", "/admin/v1/projects/"+pid1, headers_json, None,
- 200, None, "json")
+ engine.test(
+ "Get Project P1 by Name",
+ "GET",
+ "/admin/v1/projects/P1",
+ headers_json,
+ None,
+ 200,
+ None,
+ "json",
+ )
+ engine.test(
+ "Get Project P1 by ID",
+ "GET",
+ "/admin/v1/projects/" + pid1,
+ headers_json,
+ None,
+ 200,
+ None,
+ "json",
+ )
if uid1:
- engine.test("Get User U1 by Name", "GET", "/admin/v1/users/U1", headers_json, None, 200, None, "json")
- engine.test("Get User U1 by ID", "GET", "/admin/v1/users/"+uid1, headers_json, None, 200, None, "json")
+ engine.test(
+ "Get User U1 by Name",
+ "GET",
+ "/admin/v1/users/U1",
+ headers_json,
+ None,
+ 200,
+ None,
+ "json",
+ )
+ engine.test(
+ "Get User U1 by ID",
+ "GET",
+ "/admin/v1/users/" + uid1,
+ headers_json,
+ None,
+ 200,
+ None,
+ "json",
+ )
if pid1:
- res = engine.test("Rename Project P1 by Name", "PUT", "/admin/v1/projects/P1", headers_json,
- {"name": "P3"}, 204, None, None)
+ res = engine.test(
+ "Rename Project P1 by Name",
+ "PUT",
+ "/admin/v1/projects/P1",
+ headers_json,
+ {"name": "P3"},
+ 204,
+ None,
+ None,
+ )
if res:
- engine.test("Get Project P1 by new Name", "GET", "/admin/v1/projects/P3", headers_json, None,
- 200, None, "json")
+ engine.test(
+ "Get Project P1 by new Name",
+ "GET",
+ "/admin/v1/projects/P3",
+ headers_json,
+ None,
+ 200,
+ None,
+ "json",
+ )
if pid2:
- res = engine.test("Rename Project P2 by ID", "PUT", "/admin/v1/projects/"+pid2, headers_json,
- {"name": "P4"}, 204, None, None)
+ res = engine.test(
+ "Rename Project P2 by ID",
+ "PUT",
+ "/admin/v1/projects/" + pid2,
+ headers_json,
+ {"name": "P4"},
+ 204,
+ None,
+ None,
+ )
if res:
- engine.test("Get Project P2 by new Name", "GET", "/admin/v1/projects/P4", headers_json, None,
- 200, None, "json")
+ engine.test(
+ "Get Project P2 by new Name",
+ "GET",
+ "/admin/v1/projects/P4",
+ headers_json,
+ None,
+ 200,
+ None,
+ "json",
+ )
if uid1:
- res = engine.test("Rename User U1 by Name", "PUT", "/admin/v1/users/U1", headers_json,
- {"username": "U3"}, 204, None, None)
+ res = engine.test(
+ "Rename User U1 by Name",
+ "PUT",
+ "/admin/v1/users/U1",
+ headers_json,
+ {"username": "U3"},
+ 204,
+ None,
+ None,
+ )
if res:
- engine.test("Get User U1 by new Name", "GET", "/admin/v1/users/U3", headers_json, None,
- 200, None, "json")
+ engine.test(
+ "Get User U1 by new Name",
+ "GET",
+ "/admin/v1/users/U3",
+ headers_json,
+ None,
+ 200,
+ None,
+ "json",
+ )
if uid2:
- res = engine.test("Rename User U2 by ID", "PUT", "/admin/v1/users/"+uid2, headers_json,
- {"username": "U4"}, 204, None, None)
+ res = engine.test(
+ "Rename User U2 by ID",
+ "PUT",
+ "/admin/v1/users/" + uid2,
+ headers_json,
+ {"username": "U4"},
+ 204,
+ None,
+ None,
+ )
if res:
- engine.test("Get User U2 by new Name", "GET", "/admin/v1/users/U4", headers_json, None,
- 200, None, "json")
+ engine.test(
+ "Get User U2 by new Name",
+ "GET",
+ "/admin/v1/users/U4",
+ headers_json,
+ None,
+ 200,
+ None,
+ "json",
+ )
if uid1:
- res = engine.test("Delete User U1 by Name", "DELETE", "/admin/v1/users/U3", headers_json, None,
- 204, None, None)
+ res = engine.test(
+ "Delete User U1 by Name",
+ "DELETE",
+ "/admin/v1/users/U3",
+ headers_json,
+ None,
+ 204,
+ None,
+ None,
+ )
if res:
uid1 = None
if uid2:
- res = engine.test("Delete User U2 by ID", "DELETE", "/admin/v1/users/"+uid2, headers_json, None,
- 204, None, None)
+ res = engine.test(
+ "Delete User U2 by ID",
+ "DELETE",
+ "/admin/v1/users/" + uid2,
+ headers_json,
+ None,
+ 204,
+ None,
+ None,
+ )
if res:
uid2 = None
if pid1:
- res = engine.test("Delete Project P1 by Name", "DELETE", "/admin/v1/projects/P3", headers_json, None,
- 204, None, None)
+ res = engine.test(
+ "Delete Project P1 by Name",
+ "DELETE",
+ "/admin/v1/projects/P3",
+ headers_json,
+ None,
+ 204,
+ None,
+ None,
+ )
if res:
pid1 = None
if pid2:
- res = engine.test("Delete Project P2 by ID", "DELETE", "/admin/v1/projects/"+pid2, headers_json, None,
- 204, None, None)
+ res = engine.test(
+ "Delete Project P2 by ID",
+ "DELETE",
+ "/admin/v1/projects/" + pid2,
+ headers_json,
+ None,
+ 204,
+ None,
+ None,
+ )
if res:
pid2 = None
# CLEANUP
if pid1:
- engine.test("Delete Project P1", "DELETE", "/admin/v1/projects/"+pid1, headers_json, None, 204, None, None)
+ engine.test(
+ "Delete Project P1",
+ "DELETE",
+ "/admin/v1/projects/" + pid1,
+ headers_json,
+ None,
+ 204,
+ None,
+ None,
+ )
if pid2:
- engine.test("Delete Project P2", "DELETE", "/admin/v1/projects/"+pid2, headers_json, None, 204, None, None)
+ engine.test(
+ "Delete Project P2",
+ "DELETE",
+ "/admin/v1/projects/" + pid2,
+ headers_json,
+ None,
+ 204,
+ None,
+ None,
+ )
if uid1:
- engine.test("Delete User U1", "DELETE", "/admin/v1/users/"+uid1, headers_json, None, 204, None, None)
+ engine.test(
+ "Delete User U1",
+ "DELETE",
+ "/admin/v1/users/" + uid1,
+ headers_json,
+ None,
+ 204,
+ None,
+ None,
+ )
if uid2:
- engine.test("Delete User U2", "DELETE", "/admin/v1/users/"+uid2, headers_json, None, 204, None, None)
+ engine.test(
+ "Delete User U2",
+ "DELETE",
+ "/admin/v1/users/" + uid2,
+ headers_json,
+ None,
+ 204,
+ None,
+ None,
+ )
- engine.remove_authorization() # To finish
+ engine.remove_authorization() # To finish
class TestProjectsDescriptors:
engine.get_autorization()
project_admin_id = None
- res = engine.test("Get my project Padmin", "GET", "/admin/v1/projects/{}".format(engine.project), headers_json,
- None, 200, r_header_json, "json")
+ res = engine.test(
+ "Get my project Padmin",
+ "GET",
+ "/admin/v1/projects/{}".format(engine.project),
+ headers_json,
+ None,
+ 200,
+ r_header_json,
+ "json",
+ )
if res:
response = res.json()
project_admin_id = response["_id"]
- engine.test("Create project Padmin", "POST", "/admin/v1/projects", headers_json,
- {"name": "Padmin", "admin": True}, (201, 204),
- {"Location": "/admin/v1/projects/", "Content-Type": "application/json"}, "json")
- engine.test("Create project P2", "POST", "/admin/v1/projects", headers_json, {"name": "P2"},
- (201, 204), {"Location": "/admin/v1/projects/", "Content-Type": "application/json"}, "json")
- engine.test("Create project P3", "POST", "/admin/v1/projects", headers_json, {"name": "P3"},
- (201, 204), {"Location": "/admin/v1/projects/", "Content-Type": "application/json"}, "json")
-
- engine.test("Create user U1", "POST", "/admin/v1/users", headers_json,
- {"username": "U1", "password": "pw1",
- "project_role_mappings": [{"project": "Padmin", "role": "system_admin"},
- {"project": "P2", "role": "project_admin"},
- {"project": "P3", "role": "project_admin"}],
- }, 201, {"Location": "/admin/v1/users/", "Content-Type": "application/json"}, "json")
-
- engine.test("Onboard VNFD id1", "POST", "/vnfpkgm/v1/vnf_packages_content?id=id1", headers_yaml,
- TestDescriptors.vnfd_empty, 201, r_headers_yaml_location_vnfd, "yaml")
+ engine.test(
+ "Create project Padmin",
+ "POST",
+ "/admin/v1/projects",
+ headers_json,
+ {"name": "Padmin", "admin": True},
+ (201, 204),
+ {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
+ "json",
+ )
+ engine.test(
+ "Create project P2",
+ "POST",
+ "/admin/v1/projects",
+ headers_json,
+ {"name": "P2"},
+ (201, 204),
+ {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
+ "json",
+ )
+ engine.test(
+ "Create project P3",
+ "POST",
+ "/admin/v1/projects",
+ headers_json,
+ {"name": "P3"},
+ (201, 204),
+ {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
+ "json",
+ )
+
+ engine.test(
+ "Create user U1",
+ "POST",
+ "/admin/v1/users",
+ headers_json,
+ {
+ "username": "U1",
+ "password": "pw1",
+ "project_role_mappings": [
+ {"project": "Padmin", "role": "system_admin"},
+ {"project": "P2", "role": "project_admin"},
+ {"project": "P3", "role": "project_admin"},
+ ],
+ },
+ 201,
+ {"Location": "/admin/v1/users/", "Content-Type": "application/json"},
+ "json",
+ )
+
+ engine.test(
+ "Onboard VNFD id1",
+ "POST",
+ "/vnfpkgm/v1/vnf_packages_content?id=id1",
+ headers_yaml,
+ TestDescriptors.vnfd_empty,
+ 201,
+ r_headers_yaml_location_vnfd,
+ "yaml",
+ )
vnfd_ids.append(engine.last_id)
- engine.test("Onboard VNFD id2 PUBLIC", "POST", "/vnfpkgm/v1/vnf_packages_content?id=id2&PUBLIC=TRUE",
- headers_yaml, TestDescriptors.vnfd_empty, 201, r_headers_yaml_location_vnfd, "yaml")
+ engine.test(
+ "Onboard VNFD id2 PUBLIC",
+ "POST",
+ "/vnfpkgm/v1/vnf_packages_content?id=id2&PUBLIC=TRUE",
+ headers_yaml,
+ TestDescriptors.vnfd_empty,
+ 201,
+ r_headers_yaml_location_vnfd,
+ "yaml",
+ )
vnfd_ids.append(engine.last_id)
- engine.test("Onboard VNFD id3", "POST", "/vnfpkgm/v1/vnf_packages_content?id=id3&PUBLIC=FALSE", headers_yaml,
- TestDescriptors.vnfd_empty, 201, r_headers_yaml_location_vnfd, "yaml")
+ engine.test(
+ "Onboard VNFD id3",
+ "POST",
+ "/vnfpkgm/v1/vnf_packages_content?id=id3&PUBLIC=FALSE",
+ headers_yaml,
+ TestDescriptors.vnfd_empty,
+ 201,
+ r_headers_yaml_location_vnfd,
+ "yaml",
+ )
vnfd_ids.append(engine.last_id)
- res = engine.test("Get VNFD descriptors", "GET", "/vnfpkgm/v1/vnf_packages?id=id1,id2,id3",
- headers_json, None, 200, r_header_json, "json")
+ res = engine.test(
+ "Get VNFD descriptors",
+ "GET",
+ "/vnfpkgm/v1/vnf_packages?id=id1,id2,id3",
+ headers_json,
+ None,
+ 200,
+ r_header_json,
+ "json",
+ )
response = res.json()
if len(response) != 3:
- logger.error("Only 3 vnfds should be present for project admin. {} listed".format(len(response)))
+ logger.error(
+ "Only 3 vnfds should be present for project admin. {} listed".format(
+ len(response)
+ )
+ )
engine.failed_tests += 1
# Change to other project Padmin
- res = engine.test("Change to user U1 project Padmin", "POST", "/admin/v1/tokens", headers_json,
- {"username": "U1", "password": "pw1", "project_id": "Padmin"}, (200, 201),
- r_header_json, "json")
+ res = engine.test(
+ "Change to user U1 project Padmin",
+ "POST",
+ "/admin/v1/tokens",
+ headers_json,
+ {"username": "U1", "password": "pw1", "project_id": "Padmin"},
+ (200, 201),
+ r_header_json,
+ "json",
+ )
if res:
response = res.json()
engine.set_header({"Authorization": "Bearer {}".format(response["id"])})
# list vnfds
- res = engine.test("List VNFD descriptors for Padmin", "GET", "/vnfpkgm/v1/vnf_packages",
- headers_json, None, 200, r_header_json, "json")
+ res = engine.test(
+ "List VNFD descriptors for Padmin",
+ "GET",
+ "/vnfpkgm/v1/vnf_packages",
+ headers_json,
+ None,
+ 200,
+ r_header_json,
+ "json",
+ )
response = res.json()
if len(response) != 0:
- logger.error("Only 0 vnfds should be present for project Padmin. {} listed".format(len(response)))
+ logger.error(
+ "Only 0 vnfds should be present for project Padmin. {} listed".format(
+ len(response)
+ )
+ )
engine.failed_tests += 1
# list Public vnfds
- res = engine.test("List VNFD public descriptors", "GET", "/vnfpkgm/v1/vnf_packages?PUBLIC=True",
- headers_json, None, 200, r_header_json, "json")
+ res = engine.test(
+ "List VNFD public descriptors",
+ "GET",
+ "/vnfpkgm/v1/vnf_packages?PUBLIC=True",
+ headers_json,
+ None,
+ 200,
+ r_header_json,
+ "json",
+ )
response = res.json()
if len(response) != 1:
- logger.error("Only 1 vnfds should be present for project Padmin. {} listed".format(len(response)))
+ logger.error(
+ "Only 1 vnfds should be present for project Padmin. {} listed".format(
+ len(response)
+ )
+ )
engine.failed_tests += 1
# list vnfds belonging to project "admin"
- res = engine.test("List VNFD of admin project", "GET",
- "/vnfpkgm/v1/vnf_packages?ADMIN={}".format(project_admin_id),
- headers_json, None, 200, r_header_json, "json")
+ res = engine.test(
+ "List VNFD of admin project",
+ "GET",
+ "/vnfpkgm/v1/vnf_packages?ADMIN={}".format(project_admin_id),
+ headers_json,
+ None,
+ 200,
+ r_header_json,
+ "json",
+ )
if res:
response = res.json()
if len(response) != 3:
- logger.error("Only 3 vnfds should be present for project Padmin. {} listed".format(len(response)))
+ logger.error(
+ "Only 3 vnfds should be present for project Padmin. {} listed".format(
+ len(response)
+ )
+ )
engine.failed_tests += 1
# Get Public vnfds
- engine.test("Get VNFD public descriptors", "GET", "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[1]),
- headers_json, None, 200, r_header_json, "json")
+ engine.test(
+ "Get VNFD public descriptors",
+ "GET",
+ "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[1]),
+ headers_json,
+ None,
+ 200,
+ r_header_json,
+ "json",
+ )
# Edit not owned vnfd
- engine.test("Edit VNFD ", "PATCH", "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[0]),
- headers_yaml, '{name: pepe}', 404, r_header_yaml, "yaml")
+ engine.test(
+ "Edit VNFD ",
+ "PATCH",
+ "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[0]),
+ headers_yaml,
+ "{name: pepe}",
+ 404,
+ r_header_yaml,
+ "yaml",
+ )
# Add to my catalog
- engine.test("Add VNFD id2 to my catalog", "PATCH", "/vnfpkgm/v1/vnf_packages/{}?SET_PROJECT".
- format(vnfd_ids[1]), headers_json, None, 204, None, 0)
+ engine.test(
+ "Add VNFD id2 to my catalog",
+ "PATCH",
+ "/vnfpkgm/v1/vnf_packages/{}?SET_PROJECT".format(vnfd_ids[1]),
+ headers_json,
+ None,
+ 204,
+ None,
+ 0,
+ )
# Add a new vnfd
- engine.test("Onboard VNFD id4", "POST", "/vnfpkgm/v1/vnf_packages_content?id=id4", headers_yaml,
- TestDescriptors.vnfd_empty, 201, r_headers_yaml_location_vnfd, "yaml")
+ engine.test(
+ "Onboard VNFD id4",
+ "POST",
+ "/vnfpkgm/v1/vnf_packages_content?id=id4",
+ headers_yaml,
+ TestDescriptors.vnfd_empty,
+ 201,
+ r_headers_yaml_location_vnfd,
+ "yaml",
+ )
vnfd_ids.append(engine.last_id)
# list vnfds
- res = engine.test("List VNFD public descriptors", "GET", "/vnfpkgm/v1/vnf_packages",
- headers_json, None, 200, r_header_json, "json")
+ res = engine.test(
+ "List VNFD public descriptors",
+ "GET",
+ "/vnfpkgm/v1/vnf_packages",
+ headers_json,
+ None,
+ 200,
+ r_header_json,
+ "json",
+ )
response = res.json()
if len(response) != 2:
- logger.error("Only 2 vnfds should be present for project Padmin. {} listed".format(len(response)))
+ logger.error(
+ "Only 2 vnfds should be present for project Padmin. {} listed".format(
+ len(response)
+ )
+ )
engine.failed_tests += 1
if manual_check:
- input('VNFDs have been omboarded. Perform manual check and press enter to resume')
-
- test_rest.test("Delete VNFD id2", "DELETE", "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[1]),
- headers_yaml, None, 204, None, 0)
+ input(
+ "VNFDs have been omboarded. Perform manual check and press enter to resume"
+ )
+
+ test_rest.test(
+ "Delete VNFD id2",
+ "DELETE",
+ "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[1]),
+ headers_yaml,
+ None,
+ 204,
+ None,
+ 0,
+ )
# change to admin project
- engine.remove_authorization() # To force get authorization
+ engine.remove_authorization() # To force get authorization
engine.get_autorization()
- test_rest.test("Delete VNFD id1", "DELETE", "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[0]),
- headers_yaml, None, 204, None, 0)
- test_rest.test("Delete VNFD id2", "DELETE", "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[1]),
- headers_yaml, None, 204, None, 0)
- test_rest.test("Delete VNFD id3", "DELETE", "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[2]),
- headers_yaml, None, 204, None, 0)
- test_rest.test("Delete VNFD id4", "DELETE", "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[3]),
- headers_yaml, None, 404, r_header_yaml, "yaml")
- test_rest.test("Delete VNFD id4", "DELETE", "/vnfpkgm/v1/vnf_packages/{}?ADMIN".format(vnfd_ids[3]),
- headers_yaml, None, 204, None, 0)
+ test_rest.test(
+ "Delete VNFD id1",
+ "DELETE",
+ "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[0]),
+ headers_yaml,
+ None,
+ 204,
+ None,
+ 0,
+ )
+ test_rest.test(
+ "Delete VNFD id2",
+ "DELETE",
+ "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[1]),
+ headers_yaml,
+ None,
+ 204,
+ None,
+ 0,
+ )
+ test_rest.test(
+ "Delete VNFD id3",
+ "DELETE",
+ "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[2]),
+ headers_yaml,
+ None,
+ 204,
+ None,
+ 0,
+ )
+ test_rest.test(
+ "Delete VNFD id4",
+ "DELETE",
+ "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[3]),
+ headers_yaml,
+ None,
+ 404,
+ r_header_yaml,
+ "yaml",
+ )
+ test_rest.test(
+ "Delete VNFD id4",
+ "DELETE",
+ "/vnfpkgm/v1/vnf_packages/{}?ADMIN".format(vnfd_ids[3]),
+ headers_yaml,
+ None,
+ 204,
+ None,
+ 0,
+ )
# Get Public vnfds
- engine.test("Get VNFD deleted id1", "GET", "/vnfpkgm/v1/vnf_packages/{}?ADMIN".format(vnfd_ids[0]),
- headers_json, None, 404, r_header_json, "json")
- engine.test("Get VNFD deleted id2", "GET", "/vnfpkgm/v1/vnf_packages/{}?ADMIN".format(vnfd_ids[1]),
- headers_json, None, 404, r_header_json, "json")
- engine.test("Get VNFD deleted id3", "GET", "/vnfpkgm/v1/vnf_packages/{}?ADMIN".format(vnfd_ids[2]),
- headers_json, None, 404, r_header_json, "json")
- engine.test("Get VNFD deleted id4", "GET", "/vnfpkgm/v1/vnf_packages/{}?ADMIN".format(vnfd_ids[3]),
- headers_json, None, 404, r_header_json, "json")
+ engine.test(
+ "Get VNFD deleted id1",
+ "GET",
+ "/vnfpkgm/v1/vnf_packages/{}?ADMIN".format(vnfd_ids[0]),
+ headers_json,
+ None,
+ 404,
+ r_header_json,
+ "json",
+ )
+ engine.test(
+ "Get VNFD deleted id2",
+ "GET",
+ "/vnfpkgm/v1/vnf_packages/{}?ADMIN".format(vnfd_ids[1]),
+ headers_json,
+ None,
+ 404,
+ r_header_json,
+ "json",
+ )
+ engine.test(
+ "Get VNFD deleted id3",
+ "GET",
+ "/vnfpkgm/v1/vnf_packages/{}?ADMIN".format(vnfd_ids[2]),
+ headers_json,
+ None,
+ 404,
+ r_header_json,
+ "json",
+ )
+ engine.test(
+ "Get VNFD deleted id4",
+ "GET",
+ "/vnfpkgm/v1/vnf_packages/{}?ADMIN".format(vnfd_ids[3]),
+ headers_json,
+ None,
+ 404,
+ r_header_json,
+ "json",
+ )
- engine.test("Delete user U1", "DELETE", "/admin/v1/users/U1", headers_json, None, 204, None, None)
- engine.test("Delete project Padmin", "DELETE", "/admin/v1/projects/Padmin", headers_json, None, 204, None, None)
- engine.test("Delete project P2", "DELETE", "/admin/v1/projects/P2", headers_json, None, 204, None, None)
- engine.test("Delete project P3", "DELETE", "/admin/v1/projects/P3", headers_json, None, 204, None, None)
+ engine.test(
+ "Delete user U1",
+ "DELETE",
+ "/admin/v1/users/U1",
+ headers_json,
+ None,
+ 204,
+ None,
+ None,
+ )
+ engine.test(
+ "Delete project Padmin",
+ "DELETE",
+ "/admin/v1/projects/Padmin",
+ headers_json,
+ None,
+ 204,
+ None,
+ None,
+ )
+ engine.test(
+ "Delete project P2",
+ "DELETE",
+ "/admin/v1/projects/P2",
+ headers_json,
+ None,
+ 204,
+ None,
+ None,
+ )
+ engine.test(
+ "Delete project P3",
+ "DELETE",
+ "/admin/v1/projects/P3",
+ headers_json,
+ None,
+ 204,
+ None,
+ None,
+ )
class TestFakeVim:
"vim_tenant_name": "vimTenant",
"vim_user": "user",
"vim_password": "password",
- "config": {"config_param": 1}
+ "config": {"config_param": 1},
}
self.sdn = {
"name": "sdn-name",
"type": "opendaylight",
"version": "3.5.6",
"user": "user",
- "password": "passwd"
+ "password": "passwd",
}
self.port_mapping = [
- {"compute_node": "compute node 1",
- "ports": [{"pci": "0000:81:00.0", "switch_port": "port-2/1", "switch_mac": "52:54:00:94:21:21"},
- {"pci": "0000:81:00.1", "switch_port": "port-2/2", "switch_mac": "52:54:00:94:21:22"}
- ]},
- {"compute_node": "compute node 2",
- "ports": [{"pci": "0000:81:00.0", "switch_port": "port-2/3", "switch_mac": "52:54:00:94:21:23"},
- {"pci": "0000:81:00.1", "switch_port": "port-2/4", "switch_mac": "52:54:00:94:21:24"}
- ]}
+ {
+ "compute_node": "compute node 1",
+ "ports": [
+ {
+ "pci": "0000:81:00.0",
+ "switch_port": "port-2/1",
+ "switch_mac": "52:54:00:94:21:21",
+ },
+ {
+ "pci": "0000:81:00.1",
+ "switch_port": "port-2/2",
+ "switch_mac": "52:54:00:94:21:22",
+ },
+ ],
+ },
+ {
+ "compute_node": "compute node 2",
+ "ports": [
+ {
+ "pci": "0000:81:00.0",
+ "switch_port": "port-2/3",
+ "switch_mac": "52:54:00:94:21:23",
+ },
+ {
+ "pci": "0000:81:00.1",
+ "switch_port": "port-2/4",
+ "switch_mac": "52:54:00:94:21:24",
+ },
+ ],
+ },
]
def run(self, engine, test_osm, manual_check, test_params=None):
engine.set_test_name("FakeVim")
engine.get_autorization()
- engine.test("Create VIM", "POST", "/admin/v1/vim_accounts", headers_json, self.vim, (201, 202),
- {"Location": "/admin/v1/vim_accounts/", "Content-Type": "application/json"}, "json")
+ engine.test(
+ "Create VIM",
+ "POST",
+ "/admin/v1/vim_accounts",
+ headers_json,
+ self.vim,
+ (201, 202),
+ {"Location": "/admin/v1/vim_accounts/", "Content-Type": "application/json"},
+ "json",
+ )
vim_id = engine.last_id
- engine.test("Create VIM without name, bad schema", "POST", "/admin/v1/vim_accounts", headers_json,
- vim_bad, 422, None, headers_json)
- engine.test("Create VIM name repeated", "POST", "/admin/v1/vim_accounts", headers_json, self.vim,
- 409, None, headers_json)
- engine.test("Show VIMs", "GET", "/admin/v1/vim_accounts", headers_yaml, None, 200, r_header_yaml,
- "yaml")
- engine.test("Show VIM", "GET", "/admin/v1/vim_accounts/{}".format(vim_id), headers_yaml, None, 200,
- r_header_yaml, "yaml")
+ engine.test(
+ "Create VIM without name, bad schema",
+ "POST",
+ "/admin/v1/vim_accounts",
+ headers_json,
+ vim_bad,
+ 422,
+ None,
+ headers_json,
+ )
+ engine.test(
+ "Create VIM name repeated",
+ "POST",
+ "/admin/v1/vim_accounts",
+ headers_json,
+ self.vim,
+ 409,
+ None,
+ headers_json,
+ )
+ engine.test(
+ "Show VIMs",
+ "GET",
+ "/admin/v1/vim_accounts",
+ headers_yaml,
+ None,
+ 200,
+ r_header_yaml,
+ "yaml",
+ )
+ engine.test(
+ "Show VIM",
+ "GET",
+ "/admin/v1/vim_accounts/{}".format(vim_id),
+ headers_yaml,
+ None,
+ 200,
+ r_header_yaml,
+ "yaml",
+ )
if not test_osm:
# delete with FORCE
- engine.test("Delete VIM", "DELETE", "/admin/v1/vim_accounts/{}?FORCE=True".format(vim_id), headers_yaml,
- None, 202, None, 0)
- engine.test("Check VIM is deleted", "GET", "/admin/v1/vim_accounts/{}".format(vim_id), headers_yaml, None,
- 404, r_header_yaml, "yaml")
+ engine.test(
+ "Delete VIM",
+ "DELETE",
+ "/admin/v1/vim_accounts/{}?FORCE=True".format(vim_id),
+ headers_yaml,
+ None,
+ 202,
+ None,
+ 0,
+ )
+ engine.test(
+ "Check VIM is deleted",
+ "GET",
+ "/admin/v1/vim_accounts/{}".format(vim_id),
+ headers_yaml,
+ None,
+ 404,
+ r_header_yaml,
+ "yaml",
+ )
else:
# delete and wait until is really deleted
- engine.test("Delete VIM", "DELETE", "/admin/v1/vim_accounts/{}".format(vim_id), headers_yaml, None, 202,
- None, 0)
- engine.wait_until_delete("/admin/v1/vim_accounts/{}".format(vim_id), timeout)
+ engine.test(
+ "Delete VIM",
+ "DELETE",
+ "/admin/v1/vim_accounts/{}".format(vim_id),
+ headers_yaml,
+ None,
+ 202,
+ None,
+ 0,
+ )
+ engine.wait_until_delete(
+ "/admin/v1/vim_accounts/{}".format(vim_id), timeout
+ )
class TestVIMSDN(TestFakeVim):
"wim_url": "http://localhost:/wim",
"user": "user",
"password": "password",
- "config": {"config_param": 1}
+ "config": {"config_param": 1},
}
def run(self, engine, test_osm, manual_check, test_params=None):
engine.set_test_name("VimSdn")
engine.get_autorization()
# Added SDN
- engine.test("Create SDN", "POST", "/admin/v1/sdns", headers_json, self.sdn, (201, 202),
- {"Location": "/admin/v1/sdns/", "Content-Type": "application/json"}, "json")
+ engine.test(
+ "Create SDN",
+ "POST",
+ "/admin/v1/sdns",
+ headers_json,
+ self.sdn,
+ (201, 202),
+ {"Location": "/admin/v1/sdns/", "Content-Type": "application/json"},
+ "json",
+ )
sdnc_id = engine.last_id
# sleep(5)
# Edit SDN
- engine.test("Edit SDN", "PATCH", "/admin/v1/sdns/{}".format(sdnc_id), headers_json, {"name": "new_sdn_name"},
- (202, 204), None, None)
+ engine.test(
+ "Edit SDN",
+ "PATCH",
+ "/admin/v1/sdns/{}".format(sdnc_id),
+ headers_json,
+ {"name": "new_sdn_name"},
+ (202, 204),
+ None,
+ None,
+ )
# sleep(5)
# VIM with SDN
self.vim["config"]["sdn-controller"] = sdnc_id
self.vim["config"]["sdn-port-mapping"] = self.port_mapping
- engine.test("Create VIM", "POST", "/admin/v1/vim_accounts", headers_json, self.vim, (200, 202, 201),
- {"Location": "/admin/v1/vim_accounts/", "Content-Type": "application/json"}, "json"),
+ engine.test(
+ "Create VIM",
+ "POST",
+ "/admin/v1/vim_accounts",
+ headers_json,
+ self.vim,
+ (200, 202, 201),
+ {"Location": "/admin/v1/vim_accounts/", "Content-Type": "application/json"},
+ "json",
+ ),
vim_id = engine.last_id
self.port_mapping[0]["compute_node"] = "compute node XX"
- engine.test("Edit VIM change port-mapping", "PUT", "/admin/v1/vim_accounts/{}".format(vim_id), headers_json,
- {"config": {"sdn-port-mapping": self.port_mapping}}, (202, 204), None, None)
- engine.test("Edit VIM remove port-mapping", "PUT", "/admin/v1/vim_accounts/{}".format(vim_id), headers_json,
- {"config": {"sdn-port-mapping": None}}, (202, 204), None, None)
+ engine.test(
+ "Edit VIM change port-mapping",
+ "PUT",
+ "/admin/v1/vim_accounts/{}".format(vim_id),
+ headers_json,
+ {"config": {"sdn-port-mapping": self.port_mapping}},
+ (202, 204),
+ None,
+ None,
+ )
+ engine.test(
+ "Edit VIM remove port-mapping",
+ "PUT",
+ "/admin/v1/vim_accounts/{}".format(vim_id),
+ headers_json,
+ {"config": {"sdn-port-mapping": None}},
+ (202, 204),
+ None,
+ None,
+ )
- engine.test("Create WIM", "POST", "/admin/v1/wim_accounts", headers_json, self.wim, (200, 202, 201),
- {"Location": "/admin/v1/wim_accounts/", "Content-Type": "application/json"}, "json"),
+ engine.test(
+ "Create WIM",
+ "POST",
+ "/admin/v1/wim_accounts",
+ headers_json,
+ self.wim,
+ (200, 202, 201),
+ {"Location": "/admin/v1/wim_accounts/", "Content-Type": "application/json"},
+ "json",
+ ),
wim_id = engine.last_id
if not test_osm:
# delete with FORCE
- engine.test("Delete VIM remove port-mapping", "DELETE",
- "/admin/v1/vim_accounts/{}?FORCE=True".format(vim_id), headers_json, None, 202, None, 0)
- engine.test("Delete SDNC", "DELETE", "/admin/v1/sdns/{}?FORCE=True".format(sdnc_id), headers_json, None,
- 202, None, 0)
-
- engine.test("Delete WIM", "DELETE",
- "/admin/v1/wim_accounts/{}?FORCE=True".format(wim_id), headers_json, None, 202, None, 0)
- engine.test("Check VIM is deleted", "GET", "/admin/v1/vim_accounts/{}".format(vim_id), headers_yaml,
- None, 404, r_header_yaml, "yaml")
- engine.test("Check SDN is deleted", "GET", "/admin/v1/sdns/{}".format(sdnc_id), headers_yaml, None,
- 404, r_header_yaml, "yaml")
- engine.test("Check WIM is deleted", "GET", "/admin/v1/wim_accounts/{}".format(wim_id), headers_yaml,
- None, 404, r_header_yaml, "yaml")
+ engine.test(
+ "Delete VIM remove port-mapping",
+ "DELETE",
+ "/admin/v1/vim_accounts/{}?FORCE=True".format(vim_id),
+ headers_json,
+ None,
+ 202,
+ None,
+ 0,
+ )
+ engine.test(
+ "Delete SDNC",
+ "DELETE",
+ "/admin/v1/sdns/{}?FORCE=True".format(sdnc_id),
+ headers_json,
+ None,
+ 202,
+ None,
+ 0,
+ )
+
+ engine.test(
+ "Delete WIM",
+ "DELETE",
+ "/admin/v1/wim_accounts/{}?FORCE=True".format(wim_id),
+ headers_json,
+ None,
+ 202,
+ None,
+ 0,
+ )
+ engine.test(
+ "Check VIM is deleted",
+ "GET",
+ "/admin/v1/vim_accounts/{}".format(vim_id),
+ headers_yaml,
+ None,
+ 404,
+ r_header_yaml,
+ "yaml",
+ )
+ engine.test(
+ "Check SDN is deleted",
+ "GET",
+ "/admin/v1/sdns/{}".format(sdnc_id),
+ headers_yaml,
+ None,
+ 404,
+ r_header_yaml,
+ "yaml",
+ )
+ engine.test(
+ "Check WIM is deleted",
+ "GET",
+ "/admin/v1/wim_accounts/{}".format(wim_id),
+ headers_yaml,
+ None,
+ 404,
+ r_header_yaml,
+ "yaml",
+ )
else:
if manual_check:
- input('VIM, SDN, WIM has been deployed. Perform manual check and press enter to resume')
+ input(
+ "VIM, SDN, WIM has been deployed. Perform manual check and press enter to resume"
+ )
# delete and wait until is really deleted
- engine.test("Delete VIM remove port-mapping", "DELETE", "/admin/v1/vim_accounts/{}".format(vim_id),
- headers_json, None, (202, 201, 204), None, 0)
- engine.test("Delete SDN", "DELETE", "/admin/v1/sdns/{}".format(sdnc_id), headers_json, None,
- (202, 201, 204), None, 0)
- engine.test("Delete VIM", "DELETE", "/admin/v1/wim_accounts/{}".format(wim_id),
- headers_json, None, (202, 201, 204), None, 0)
- engine.wait_until_delete("/admin/v1/vim_accounts/{}".format(vim_id), timeout)
+ engine.test(
+ "Delete VIM remove port-mapping",
+ "DELETE",
+ "/admin/v1/vim_accounts/{}".format(vim_id),
+ headers_json,
+ None,
+ (202, 201, 204),
+ None,
+ 0,
+ )
+ engine.test(
+ "Delete SDN",
+ "DELETE",
+ "/admin/v1/sdns/{}".format(sdnc_id),
+ headers_json,
+ None,
+ (202, 201, 204),
+ None,
+ 0,
+ )
+ engine.test(
+ "Delete VIM",
+ "DELETE",
+ "/admin/v1/wim_accounts/{}".format(wim_id),
+ headers_json,
+ None,
+ (202, 201, 204),
+ None,
+ 0,
+ )
+ engine.wait_until_delete(
+ "/admin/v1/vim_accounts/{}".format(vim_id), timeout
+ )
engine.wait_until_delete("/admin/v1/sdns/{}".format(sdnc_id), timeout)
- engine.wait_until_delete("/admin/v1/wim_accounts/{}".format(wim_id), timeout)
+ engine.wait_until_delete(
+ "/admin/v1/wim_accounts/{}".format(wim_id), timeout
+ )
class TestDeploy:
self.vim_id = None
self.ns_id = None
self.vnfds_id = []
- self.descriptor_url = "https://osm-download.etsi.org/ftp/osm-3.0-three/2nd-hackfest/packages/"
+ self.descriptor_url = (
+ "https://osm-download.etsi.org/ftp/osm-3.0-three/2nd-hackfest/packages/"
+ )
self.vnfd_filenames = ("cirros_vnf.tar.gz",)
self.nsd_filename = "cirros_2vnf_ns.tar.gz"
self.descriptor_edit = None
if "/" in vnfd_filename:
vnfd_filename_path = vnfd_filename
if not os.path.exists(vnfd_filename_path):
- raise TestException("File '{}' does not exist".format(vnfd_filename_path))
+ raise TestException(
+ "File '{}' does not exist".format(vnfd_filename_path)
+ )
else:
vnfd_filename_path = temp_dir + vnfd_filename
if not os.path.exists(vnfd_filename_path):
with open(vnfd_filename_path, "wb") as file:
response = requests.get(self.descriptor_url + vnfd_filename)
if response.status_code >= 300:
- raise TestException("Error downloading descriptor from '{}': {}".format(
- self.descriptor_url + vnfd_filename, response.status_code))
+ raise TestException(
+ "Error downloading descriptor from '{}': {}".format(
+ self.descriptor_url + vnfd_filename,
+ response.status_code,
+ )
+ )
file.write(response.content)
if vnfd_filename_path.endswith(".yaml"):
headers = headers_yaml
headers = headers_zip_yaml
if randint(0, 1) == 0:
# vnfd CREATE AND UPLOAD in one step:
- engine.test("Onboard VNFD in one step", "POST",
- "/vnfpkgm/v1/vnf_packages_content" + self.qforce, headers, "@b" + vnfd_filename_path, 201,
- r_headers_yaml_location_vnfd,
- "yaml")
+ engine.test(
+ "Onboard VNFD in one step",
+ "POST",
+ "/vnfpkgm/v1/vnf_packages_content" + self.qforce,
+ headers,
+ "@b" + vnfd_filename_path,
+ 201,
+ r_headers_yaml_location_vnfd,
+ "yaml",
+ )
self.vnfds_id.append(engine.last_id)
else:
# vnfd CREATE AND UPLOAD ZIP
- engine.test("Onboard VNFD step 1", "POST", "/vnfpkgm/v1/vnf_packages",
- headers_json, None, 201,
- {"Location": "/vnfpkgm/v1/vnf_packages/", "Content-Type": "application/json"}, "json")
+ engine.test(
+ "Onboard VNFD step 1",
+ "POST",
+ "/vnfpkgm/v1/vnf_packages",
+ headers_json,
+ None,
+ 201,
+ {
+ "Location": "/vnfpkgm/v1/vnf_packages/",
+ "Content-Type": "application/json",
+ },
+ "json",
+ )
self.vnfds_id.append(engine.last_id)
- engine.test("Onboard VNFD step 2 as ZIP", "PUT",
- "/vnfpkgm/v1/vnf_packages/<>/package_content" + self.qforce,
- headers, "@b" + vnfd_filename_path, 204, None, 0)
+ engine.test(
+ "Onboard VNFD step 2 as ZIP",
+ "PUT",
+ "/vnfpkgm/v1/vnf_packages/<>/package_content" + self.qforce,
+ headers,
+ "@b" + vnfd_filename_path,
+ 204,
+ None,
+ 0,
+ )
if self.descriptor_edit:
if "vnfd{}".format(vnfd_index) in self.descriptor_edit:
# Modify VNFD
- engine.test("Edit VNFD ", "PATCH",
- "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfds_id[-1]),
- headers_yaml, self.descriptor_edit["vnfd{}".format(vnfd_index)], 204, None, None)
+ engine.test(
+ "Edit VNFD ",
+ "PATCH",
+ "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfds_id[-1]),
+ headers_yaml,
+ self.descriptor_edit["vnfd{}".format(vnfd_index)],
+ 204,
+ None,
+ None,
+ )
if "/" in self.nsd_filename:
nsd_filename_path = self.nsd_filename
if not os.path.exists(nsd_filename_path):
- raise TestException("File '{}' does not exist".format(nsd_filename_path))
+ raise TestException(
+ "File '{}' does not exist".format(nsd_filename_path)
+ )
else:
nsd_filename_path = temp_dir + self.nsd_filename
if not os.path.exists(nsd_filename_path):
with open(nsd_filename_path, "wb") as file:
response = requests.get(self.descriptor_url + self.nsd_filename)
if response.status_code >= 300:
- raise TestException("Error downloading descriptor from '{}': {}".format(
- self.descriptor_url + self.nsd_filename, response.status_code))
+ raise TestException(
+ "Error downloading descriptor from '{}': {}".format(
+ self.descriptor_url + self.nsd_filename,
+ response.status_code,
+ )
+ )
file.write(response.content)
if nsd_filename_path.endswith(".yaml"):
headers = headers_yaml
if randint(0, 1) == 0:
# nsd CREATE AND UPLOAD in one step:
- engine.test("Onboard NSD in one step", "POST",
- "/nsd/v1/ns_descriptors_content" + self.qforce, headers, "@b" + nsd_filename_path, 201,
- r_headers_yaml_location_nsd, yaml)
+ engine.test(
+ "Onboard NSD in one step",
+ "POST",
+ "/nsd/v1/ns_descriptors_content" + self.qforce,
+ headers,
+ "@b" + nsd_filename_path,
+ 201,
+ r_headers_yaml_location_nsd,
+ yaml,
+ )
self.nsd_id = engine.last_id
else:
# nsd CREATE AND UPLOAD ZIP
- engine.test("Onboard NSD step 1", "POST", "/nsd/v1/ns_descriptors",
- headers_json, None, 201,
- {"Location": "/nsd/v1/ns_descriptors/", "Content-Type": "application/json"}, "json")
+ engine.test(
+ "Onboard NSD step 1",
+ "POST",
+ "/nsd/v1/ns_descriptors",
+ headers_json,
+ None,
+ 201,
+ {
+ "Location": "/nsd/v1/ns_descriptors/",
+ "Content-Type": "application/json",
+ },
+ "json",
+ )
self.nsd_id = engine.last_id
- engine.test("Onboard NSD step 2 as ZIP", "PUT",
- "/nsd/v1/ns_descriptors/<>/nsd_content" + self.qforce,
- headers, "@b" + nsd_filename_path, 204, None, 0)
+ engine.test(
+ "Onboard NSD step 2 as ZIP",
+ "PUT",
+ "/nsd/v1/ns_descriptors/<>/nsd_content" + self.qforce,
+ headers,
+ "@b" + nsd_filename_path,
+ 204,
+ None,
+ 0,
+ )
if self.descriptor_edit and "nsd" in self.descriptor_edit:
# Modify NSD
- engine.test("Edit NSD ", "PATCH",
- "/nsd/v1/ns_descriptors/{}".format(self.nsd_id),
- headers_yaml, self.descriptor_edit["nsd"], 204, None, None)
+ engine.test(
+ "Edit NSD ",
+ "PATCH",
+ "/nsd/v1/ns_descriptors/{}".format(self.nsd_id),
+ headers_yaml,
+ self.descriptor_edit["nsd"],
+ 204,
+ None,
+ None,
+ )
def delete_descriptors(self, engine):
# delete descriptors
- engine.test("Delete NSSD SOL005", "DELETE",
- "/nsd/v1/ns_descriptors/{}".format(self.nsd_id),
- headers_yaml, None, 204, None, 0)
+ engine.test(
+ "Delete NSSD SOL005",
+ "DELETE",
+ "/nsd/v1/ns_descriptors/{}".format(self.nsd_id),
+ headers_yaml,
+ None,
+ 204,
+ None,
+ 0,
+ )
for vnfd_id in self.vnfds_id:
- engine.test("Delete VNFD SOL005", "DELETE",
- "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_id), headers_yaml, None, 204, None, 0)
+ engine.test(
+ "Delete VNFD SOL005",
+ "DELETE",
+ "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_id),
+ headers_yaml,
+ None,
+ 204,
+ None,
+ 0,
+ )
def instantiate(self, engine, ns_data):
ns_data_text = yaml.safe_dump(ns_data, default_flow_style=True, width=256)
# create NS Two steps
- r = engine.test("Create NS step 1", "POST", "/nslcm/v1/ns_instances",
- headers_yaml, ns_data_text, (201, 202),
- {"Location": "nslcm/v1/ns_instances/", "Content-Type": "application/yaml"}, "yaml")
+ r = engine.test(
+ "Create NS step 1",
+ "POST",
+ "/nslcm/v1/ns_instances",
+ headers_yaml,
+ ns_data_text,
+ (201, 202),
+ {"Location": "nslcm/v1/ns_instances/", "Content-Type": "application/yaml"},
+ "yaml",
+ )
if not r:
return
self.ns_id = engine.last_id
- engine.test("Instantiate NS step 2", "POST",
- "/nslcm/v1/ns_instances/{}/instantiate".format(self.ns_id), headers_yaml, ns_data_text,
- (201, 202), r_headers_yaml_location_nslcmop, "yaml")
+ engine.test(
+ "Instantiate NS step 2",
+ "POST",
+ "/nslcm/v1/ns_instances/{}/instantiate".format(self.ns_id),
+ headers_yaml,
+ ns_data_text,
+ (201, 202),
+ r_headers_yaml_location_nslcmop,
+ "yaml",
+ )
nslcmop_id = engine.last_id
if test_osm:
def terminate(self, engine):
# remove deployment
if test_osm:
- engine.test("Terminate NS", "POST", "/nslcm/v1/ns_instances/{}/terminate".format(self.ns_id), headers_yaml,
- None, (201, 202), r_headers_yaml_location_nslcmop, "yaml")
+ engine.test(
+ "Terminate NS",
+ "POST",
+ "/nslcm/v1/ns_instances/{}/terminate".format(self.ns_id),
+ headers_yaml,
+ None,
+ (201, 202),
+ r_headers_yaml_location_nslcmop,
+ "yaml",
+ )
nslcmop2_id = engine.last_id
# Wait until status is Ok
engine.wait_operation_ready("ns", nslcmop2_id, timeout_deploy)
- engine.test("Delete NS", "DELETE", "/nslcm/v1/ns_instances/{}".format(self.ns_id), headers_yaml, None,
- 204, None, 0)
+ engine.test(
+ "Delete NS",
+ "DELETE",
+ "/nslcm/v1/ns_instances/{}".format(self.ns_id),
+ headers_yaml,
+ None,
+ 204,
+ None,
+ 0,
+ )
else:
- engine.test("Delete NS with FORCE", "DELETE", "/nslcm/v1/ns_instances/{}?FORCE=True".format(self.ns_id),
- headers_yaml, None, 204, None, 0)
+ engine.test(
+ "Delete NS with FORCE",
+ "DELETE",
+ "/nslcm/v1/ns_instances/{}?FORCE=True".format(self.ns_id),
+ headers_yaml,
+ None,
+ 204,
+ None,
+ 0,
+ )
# check all it is deleted
- engine.test("Check NS is deleted", "GET", "/nslcm/v1/ns_instances/{}".format(self.ns_id), headers_yaml, None,
- 404, None, "yaml")
- r = engine.test("Check NSLCMOPs are deleted", "GET",
- "/nslcm/v1/ns_lcm_op_occs?nsInstanceId={}".format(self.ns_id), headers_json, None,
- 200, None, "json")
+ engine.test(
+ "Check NS is deleted",
+ "GET",
+ "/nslcm/v1/ns_instances/{}".format(self.ns_id),
+ headers_yaml,
+ None,
+ 404,
+ None,
+ "yaml",
+ )
+ r = engine.test(
+ "Check NSLCMOPs are deleted",
+ "GET",
+ "/nslcm/v1/ns_lcm_op_occs?nsInstanceId={}".format(self.ns_id),
+ headers_json,
+ None,
+ 200,
+ None,
+ "json",
+ )
if not r:
return
nslcmops = r.json()
if not isinstance(nslcmops, list) or nslcmops:
- raise TestException("NS {} deleted but with ns_lcm_op_occ active: {}".format(self.ns_id, nslcmops))
-
- def test_ns(self, engine, test_osm, commands=None, users=None, passwds=None, keys=None, timeout=0):
-
- r = engine.test("GET VNFR IDs", "GET",
- "/nslcm/v1/ns_instances/{}".format(self.ns_id), headers_json, None,
- 200, r_header_json, "json")
+ raise TestException(
+ "NS {} deleted but with ns_lcm_op_occ active: {}".format(
+ self.ns_id, nslcmops
+ )
+ )
+
+ def test_ns(
+ self,
+ engine,
+ test_osm,
+ commands=None,
+ users=None,
+ passwds=None,
+ keys=None,
+ timeout=0,
+ ):
+
+ r = engine.test(
+ "GET VNFR IDs",
+ "GET",
+ "/nslcm/v1/ns_instances/{}".format(self.ns_id),
+ headers_json,
+ None,
+ 200,
+ r_header_json,
+ "json",
+ )
if not r:
return
ns_data = r.json()
- vnfr_list = ns_data['constituent-vnfr-ref']
+ vnfr_list = ns_data["constituent-vnfr-ref"]
time = 0
_commands = commands if commands is not None else self.commands
_users = users if users is not None else self.users
# vnfr_list=[d8272263-6bd3-4680-84ca-6a4be23b3f2d, 88b22e2f-994a-4b61-94fd-4a3c90de3dc4]
for vnfr_id in vnfr_list:
- r = engine.test("Get VNFR to get IP_ADDRESS", "GET",
- "/nslcm/v1/vnfrs/{}".format(vnfr_id), headers_json, None,
- 200, r_header_json, "json")
+ r = engine.test(
+ "Get VNFR to get IP_ADDRESS",
+ "GET",
+ "/nslcm/v1/vnfrs/{}".format(vnfr_id),
+ headers_json,
+ None,
+ 200,
+ r_header_json,
+ "json",
+ )
if not r:
continue
vnfr_data = r.json()
vnf_index = str(vnfr_data["member-vnf-index-ref"])
ip_address = self.get_vnfr_ip(engine, vnf_index)
- description = "Exec command='{}' at VNFR={} IP={}".format(_commands.get(vnf_index)[0], vnf_index,
- ip_address)
+ description = "Exec command='{}' at VNFR={} IP={}".format(
+ _commands.get(vnf_index)[0], vnf_index, ip_address
+ )
engine.step += 1
- test_description = "{}{} {}".format(engine.test_name, engine.step, description)
+ test_description = "{}{} {}".format(
+ engine.test_name, engine.step, description
+ )
logger.warning(test_description)
while _timeout >= time:
- result, message = self.do_checks([ip_address],
- vnf_index=vnfr_data["member-vnf-index-ref"],
- commands=_commands.get(vnf_index), user=_users.get(vnf_index),
- passwd=_passwds.get(vnf_index), key=_keys.get(vnf_index))
+ result, message = self.do_checks(
+ [ip_address],
+ vnf_index=vnfr_data["member-vnf-index-ref"],
+ commands=_commands.get(vnf_index),
+ user=_users.get(vnf_index),
+ passwd=_passwds.get(vnf_index),
+ key=_keys.get(vnf_index),
+ )
if result == 1:
engine.passed_tests += 1
logger.debug(message)
logger.error(message)
else:
engine.failed_tests += 1
- logger.error("VNFR {} has not mgmt address. Check failed".format(vnf_index))
+ logger.error(
+ "VNFR {} has not mgmt address. Check failed".format(vnf_index)
+ )
def do_checks(self, ip, vnf_index, commands=[], user=None, passwd=None, key=None):
try:
from pssh.utils import load_private_key
from ssh2 import exceptions as ssh2Exception
except ImportError as e:
- logger.critical("Package <pssh> or/and <urllib3> is not installed. Please add them with 'pip3 install "
- "parallel-ssh urllib3': {}".format(e))
+ logger.critical(
+ "Package <pssh> or/and <urllib3> is not installed. Please add them with 'pip3 install "
+ "parallel-ssh urllib3': {}".format(e)
+ )
return -1, "install needed packages 'pip3 install parallel-ssh urllib3'"
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
try:
else:
pkey = None
- client = ParallelSSHClient(ip, user=user, password=passwd, pkey=pkey, proxy_host=p_host,
- proxy_user=p_user, proxy_password=p_password, timeout=10, num_retries=0)
+ client = ParallelSSHClient(
+ ip,
+ user=user,
+ password=passwd,
+ pkey=pkey,
+ proxy_host=p_host,
+ proxy_user=p_user,
+ proxy_password=p_password,
+ timeout=10,
+ num_retries=0,
+ )
for cmd in commands:
output = client.run_command(cmd)
client.join(output)
if output[ip[0]].exit_code:
- return -1, "VNFR {} command '{}' returns error: '{}'".format(ip[0], cmd,
- "\n".join(output[ip[0]].stderr))
+ return -1, "VNFR {} command '{}' returns error: '{}'".format(
+ ip[0], cmd, "\n".join(output[ip[0]].stderr)
+ )
else:
return 1, "VNFR {} command '{}' successful".format(ip[0], cmd)
- except (ssh2Exception.ChannelFailure, ssh2Exception.SocketDisconnectError, ssh2Exception.SocketTimeout,
- ssh2Exception.SocketRecvError) as e:
+ except (
+ ssh2Exception.ChannelFailure,
+ ssh2Exception.SocketDisconnectError,
+ ssh2Exception.SocketTimeout,
+ ssh2Exception.SocketRecvError,
+ ) as e:
return 0, "Timeout accessing the VNFR {}: {}".format(ip[0], str(e))
except Exception as e:
return -1, "ERROR checking the VNFR {}: {}".format(ip[0], str(e))
# create real VIM if not exist
self.vim_id = engine.get_create_vim(test_osm)
- ns_data = {"nsDescription": "default description", "nsName": nsname, "nsdId": self.nsd_id,
- "vimAccountId": self.vim_id}
+ ns_data = {
+ "nsDescription": "default description",
+ "nsName": nsname,
+ "nsdId": self.nsd_id,
+ "vimAccountId": self.vim_id,
+ }
if self.ns_params:
ns_data.update(self.ns_params)
if test_params and test_params.get("ns-config"):
self.instantiate(engine, ns_data)
if manual_check:
- input('NS has been deployed. Perform manual check and press enter to resume')
+ input(
+ "NS has been deployed. Perform manual check and press enter to resume"
+ )
if test_osm and self.commands:
self.test_ns(engine, test_osm)
self.additional_operations(engine, test_osm, manual_check)
def get_vnfr_ip(self, engine, vnfr_index_wanted):
# If the IP address list has been obtained before, it has been stored in 'vnfr_ip_list'
ip = self.vnfr_ip_list.get(vnfr_index_wanted, "")
- if (ip):
+ if ip:
return self.get_first_ip(ip)
- r = engine.test("Get VNFR to get IP_ADDRESS", "GET",
- "/nslcm/v1/vnfrs?member-vnf-index-ref={}&nsr-id-ref={}".format(
- vnfr_index_wanted, self.ns_id), headers_json, None,
- 200, r_header_json, "json")
+ r = engine.test(
+ "Get VNFR to get IP_ADDRESS",
+ "GET",
+ "/nslcm/v1/vnfrs?member-vnf-index-ref={}&nsr-id-ref={}".format(
+ vnfr_index_wanted, self.ns_id
+ ),
+ headers_json,
+ None,
+ 200,
+ r_header_json,
+ "json",
+ )
if not r:
return ""
vnfr_data = r.json()
self.test_name = "CIRROS"
self.vnfd_filenames = ("cirros_vnf.tar.gz",)
self.nsd_filename = "cirros_2vnf_ns.tar.gz"
- self.commands = {'1': ['ls -lrt', ], '2': ['ls -lrt', ]}
- self.users = {'1': "cirros", '2': "cirros"}
- self.passwords = {'1': "cubswin:)", '2': "cubswin:)"}
+ self.commands = {
+ "1": [
+ "ls -lrt",
+ ],
+ "2": [
+ "ls -lrt",
+ ],
+ }
+ self.users = {"1": "cirros", "2": "cirros"}
+ self.passwords = {"1": "cubswin:)", "2": "cubswin:)"}
def terminate(self, engine):
# Make a delete in one step, overriding the normal two step of TestDeploy that launched terminate and delete
if test_osm:
- engine.test("Terminate and delete NS in one step", "DELETE", "/nslcm/v1/ns_instances_content/{}".
- format(self.ns_id), headers_yaml, None, 202, None, "yaml")
-
- engine .wait_until_delete("/nslcm/v1/ns_instances/{}".format(self.ns_id), timeout_deploy)
+ engine.test(
+ "Terminate and delete NS in one step",
+ "DELETE",
+ "/nslcm/v1/ns_instances_content/{}".format(self.ns_id),
+ headers_yaml,
+ None,
+ 202,
+ None,
+ "yaml",
+ )
+
+ engine.wait_until_delete(
+ "/nslcm/v1/ns_instances/{}".format(self.ns_id), timeout_deploy
+ )
else:
- engine.test("Delete NS with FORCE", "DELETE", "/nslcm/v1/ns_instances/{}?FORCE=True".format(self.ns_id),
- headers_yaml, None, 204, None, 0)
+ engine.test(
+ "Delete NS with FORCE",
+ "DELETE",
+ "/nslcm/v1/ns_instances/{}?FORCE=True".format(self.ns_id),
+ headers_yaml,
+ None,
+ 204,
+ None,
+ 0,
+ )
# check all it is deleted
- engine.test("Check NS is deleted", "GET", "/nslcm/v1/ns_instances/{}".format(self.ns_id), headers_yaml, None,
- 404, None, "yaml")
- r = engine.test("Check NSLCMOPs are deleted", "GET",
- "/nslcm/v1/ns_lcm_op_occs?nsInstanceId={}".format(self.ns_id), headers_json, None,
- 200, None, "json")
+ engine.test(
+ "Check NS is deleted",
+ "GET",
+ "/nslcm/v1/ns_instances/{}".format(self.ns_id),
+ headers_yaml,
+ None,
+ 404,
+ None,
+ "yaml",
+ )
+ r = engine.test(
+ "Check NSLCMOPs are deleted",
+ "GET",
+ "/nslcm/v1/ns_lcm_op_occs?nsInstanceId={}".format(self.ns_id),
+ headers_json,
+ None,
+ 200,
+ None,
+ "json",
+ )
if not r:
return
nslcmops = r.json()
if not isinstance(nslcmops, list) or nslcmops:
- raise TestException("NS {} deleted but with ns_lcm_op_occ active: {}".format(self.ns_id, nslcmops))
+ raise TestException(
+ "NS {} deleted but with ns_lcm_op_occ active: {}".format(
+ self.ns_id, nslcmops
+ )
+ )
class TestDeployHackfest1(TestDeploy):
class TestDeployHackfestCirrosScaling(TestDeploy):
- description = "Load and deploy Hackfest cirros_2vnf_ns example with scaling modifications"
+ description = (
+ "Load and deploy Hackfest cirros_2vnf_ns example with scaling modifications"
+ )
def __init__(self):
super().__init__()
# Modify VNFD to add scaling and count=2
self.descriptor_edit = {
"vnfd0": {
- "vdu": {
- "$id: 'cirros_vnfd-VM'": {"count": 2}
- },
- "scaling-group-descriptor": [{
- "name": "scale_cirros",
- "max-instance-count": 2,
- "vdu": [{
- "vdu-id-ref": "cirros_vnfd-VM",
- "count": 2
- }]
- }]
+ "vdu": {"$id: 'cirros_vnfd-VM'": {"count": 2}},
+ "scaling-group-descriptor": [
+ {
+ "name": "scale_cirros",
+ "max-instance-count": 2,
+ "vdu": [{"vdu-id-ref": "cirros_vnfd-VM", "count": 2}],
+ }
+ ],
}
}
if not test_osm:
return
# 2 perform scale out twice
- payload = '{scaleType: SCALE_VNF, scaleVnfData: {scaleVnfType: SCALE_OUT, scaleByStepData: ' \
- '{scaling-group-descriptor: scale_cirros, member-vnf-index: "1"}}}'
+ payload = (
+ "{scaleType: SCALE_VNF, scaleVnfData: {scaleVnfType: SCALE_OUT, scaleByStepData: "
+ '{scaling-group-descriptor: scale_cirros, member-vnf-index: "1"}}}'
+ )
for i in range(0, 2):
- engine.test("Execute scale action over NS", "POST",
- "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id), headers_yaml, payload,
- (201, 202), r_headers_yaml_location_nslcmop, "yaml")
+ engine.test(
+ "Execute scale action over NS",
+ "POST",
+ "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id),
+ headers_yaml,
+ payload,
+ (201, 202),
+ r_headers_yaml_location_nslcmop,
+ "yaml",
+ )
nslcmop2_scale_out = engine.last_id
engine.wait_operation_ready("ns", nslcmop2_scale_out, timeout_deploy)
if manual_check:
- input('NS scale out done. Check that two more vdus are there')
+ input("NS scale out done. Check that two more vdus are there")
# TODO check automatic
# 2 perform scale in
- payload = '{scaleType: SCALE_VNF, scaleVnfData: {scaleVnfType: SCALE_IN, scaleByStepData: ' \
- '{scaling-group-descriptor: scale_cirros, member-vnf-index: "1"}}}'
+ payload = (
+ "{scaleType: SCALE_VNF, scaleVnfData: {scaleVnfType: SCALE_IN, scaleByStepData: "
+ '{scaling-group-descriptor: scale_cirros, member-vnf-index: "1"}}}'
+ )
for i in range(0, 2):
- engine.test("Execute scale IN action over NS", "POST",
- "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id), headers_yaml, payload,
- (201, 202), r_headers_yaml_location_nslcmop, "yaml")
+ engine.test(
+ "Execute scale IN action over NS",
+ "POST",
+ "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id),
+ headers_yaml,
+ payload,
+ (201, 202),
+ r_headers_yaml_location_nslcmop,
+ "yaml",
+ )
nslcmop2_scale_in = engine.last_id
engine.wait_operation_ready("ns", nslcmop2_scale_in, timeout_deploy)
if manual_check:
- input('NS scale in done. Check that two less vdus are there')
+ input("NS scale in done. Check that two less vdus are there")
# TODO check automatic
# perform scale in that must fail as reached limit
- engine.test("Execute scale IN out of limit action over NS", "POST",
- "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id), headers_yaml, payload,
- (201, 202), r_headers_yaml_location_nslcmop, "yaml")
+ engine.test(
+ "Execute scale IN out of limit action over NS",
+ "POST",
+ "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id),
+ headers_yaml,
+ payload,
+ (201, 202),
+ r_headers_yaml_location_nslcmop,
+ "yaml",
+ )
nslcmop2_scale_in = engine.last_id
- engine.wait_operation_ready("ns", nslcmop2_scale_in, timeout_deploy, expected_fail=True)
+ engine.wait_operation_ready(
+ "ns", nslcmop2_scale_in, timeout_deploy, expected_fail=True
+ )
class TestDeployIpMac(TestDeploy):
def __init__(self):
super().__init__()
self.test_name = "SetIpMac"
- self.vnfd_filenames = ("vnfd_2vdu_set_ip_mac2.yaml", "vnfd_2vdu_set_ip_mac.yaml")
+ self.vnfd_filenames = (
+ "vnfd_2vdu_set_ip_mac2.yaml",
+ "vnfd_2vdu_set_ip_mac.yaml",
+ )
self.nsd_filename = "scenario_2vdu_set_ip_mac.yaml"
- self.descriptor_url = \
- "https://osm.etsi.org/gitweb/?p=osm/RO.git;a=blob_plain;f=test/RO_tests/v3_2vdu_set_ip_mac/"
- self.commands = {'1': ['ls -lrt', ], '2': ['ls -lrt', ]}
- self.users = {'1': "osm", '2': "osm"}
- self.passwords = {'1': "osm4u", '2': "osm4u"}
+ self.descriptor_url = "https://osm.etsi.org/gitweb/?p=osm/RO.git;a=blob_plain;f=test/RO_tests/v3_2vdu_set_ip_mac/"
+ self.commands = {
+ "1": [
+ "ls -lrt",
+ ],
+ "2": [
+ "ls -lrt",
+ ],
+ }
+ self.users = {"1": "osm", "2": "osm"}
+ self.passwords = {"1": "osm4u", "2": "osm4u"}
self.timeout = 360
def run(self, engine, test_osm, manual_check, test_params=None):
"member-vnf-index": "1",
"internal-vld": [
{
- "name": "internal_vld1", # net_internal
+ "name": "internal_vld1", # net_internal
"ip-profile": {
"ip-version": "ipv4",
"subnet-address": "10.9.8.0/24",
- "dhcp-params": {"count": 100, "start-address": "10.9.8.100"}
+ "dhcp-params": {
+ "count": 100,
+ "start-address": "10.9.8.100",
+ },
},
"internal-connection-point": [
{
{
"id-ref": "eth3",
"ip-address": "10.9.8.3",
- }
- ]
+ },
+ ],
},
],
-
"vdu": [
{
"id": "VM1",
# "name": "iface11",
# "floating-ip-required": True,
# },
- {
- "name": "iface13",
- "mac-address": "52:33:44:55:66:13"
- },
+ {"name": "iface13", "mac-address": "52:33:44:55:66:13"},
],
},
{
{
"name": "iface21",
"ip-address": "10.31.31.22",
- "mac-address": "52:33:44:55:66:21"
+ "mac-address": "52:33:44:55:66:21",
},
],
},
- ]
+ ],
},
]
}
- super().run(engine, test_osm, manual_check, test_params={"ns-config": instantiation_params})
+ super().run(
+ engine,
+ test_osm,
+ manual_check,
+ test_params={"ns-config": instantiation_params},
+ )
class TestDeployHackfest4(TestDeploy):
self.vnfd_filenames = ("hackfest_4_vnfd.tar.gz",)
self.nsd_filename = "hackfest_4_nsd.tar.gz"
self.uses_configuration = True
- self.commands = {'1': ['ls -lrt', ], '2': ['ls -lrt', ]}
- self.users = {'1': "ubuntu", '2': "ubuntu"}
- self.passwords = {'1': "osm4u", '2': "osm4u"}
+ self.commands = {
+ "1": [
+ "ls -lrt",
+ ],
+ "2": [
+ "ls -lrt",
+ ],
+ }
+ self.users = {"1": "ubuntu", "2": "ubuntu"}
+ self.passwords = {"1": "osm4u", "2": "osm4u"}
# Modify VNFD to add scaling
# self.descriptor_edit = {
# "vnfd0": {
self.vnfd_filenames = ("hackfest_3charmed_vnfd.tar.gz",)
self.nsd_filename = "hackfest_3charmed_nsd.tar.gz"
self.uses_configuration = True
- self.commands = {'1': ['ls -lrt /home/ubuntu/first-touch'], '2': ['ls -lrt /home/ubuntu/first-touch']}
- self.users = {'1': "ubuntu", '2': "ubuntu"}
- self.passwords = {'1': "osm4u", '2': "osm4u"}
+ self.commands = {
+ "1": ["ls -lrt /home/ubuntu/first-touch"],
+ "2": ["ls -lrt /home/ubuntu/first-touch"],
+ }
+ self.users = {"1": "ubuntu", "2": "ubuntu"}
+ self.passwords = {"1": "osm4u", "2": "osm4u"}
self.descriptor_edit = {
"vnfd0": yaml.safe_load(
"""
parameter:
- name: filename
value: '/home/ubuntu/last-touch2'
- """)
+ """
+ )
}
def additional_operations(self, engine, test_osm, manual_check):
# 1 perform action
vnfr_index_selected = "2"
payload = '{member_vnf_index: "2", primitive: touch, primitive_params: { filename: /home/ubuntu/OSMTESTNBI }}'
- engine.test("Exec service primitive over NS", "POST",
- "/nslcm/v1/ns_instances/{}/action".format(self.ns_id), headers_yaml, payload,
- (201, 202), r_headers_yaml_location_nslcmop, "yaml")
+ engine.test(
+ "Exec service primitive over NS",
+ "POST",
+ "/nslcm/v1/ns_instances/{}/action".format(self.ns_id),
+ headers_yaml,
+ payload,
+ (201, 202),
+ r_headers_yaml_location_nslcmop,
+ "yaml",
+ )
nslcmop2_action = engine.last_id
# Wait until status is Ok
engine.wait_operation_ready("ns", nslcmop2_action, timeout_deploy)
if manual_check:
input(
"NS service primitive has been executed."
- "Check that file /home/ubuntu/OSMTESTNBI is present at {}".
- format(vnfr_ip))
+ "Check that file /home/ubuntu/OSMTESTNBI is present at {}".format(
+ vnfr_ip
+ )
+ )
if test_osm:
- commands = {'1': [''], '2': ['ls -lrt /home/ubuntu/OSMTESTNBI', ]}
+ commands = {
+ "1": [""],
+ "2": [
+ "ls -lrt /home/ubuntu/OSMTESTNBI",
+ ],
+ }
self.test_ns(engine, test_osm, commands=commands)
# # 2 perform scale out
class TestDeployHackfest3Charmed2(TestDeployHackfest3Charmed):
- description = "Load and deploy Hackfest 3charmed_ns example modified version of descriptors to have dots in " \
- "ids and member-vnf-index."
+ description = (
+ "Load and deploy Hackfest 3charmed_ns example modified version of descriptors to have dots in "
+ "ids and member-vnf-index."
+ )
def __init__(self):
super().__init__()
"vnfd0": {
"vdu": {
"$[0]": {
- "interface": {"$[0]": {"external-connection-point-ref": "pdu-mgmt"}}
+ "interface": {
+ "$[0]": {"external-connection-point-ref": "pdu-mgmt"}
+ }
},
- "$[1]": None
+ "$[1]": None,
},
"vnf-configuration": None,
"connection-point": {
"$[0]": {
"id": "pdu-mgmt",
"name": "pdu-mgmt",
- "short-name": "pdu-mgmt"
+ "short-name": "pdu-mgmt",
},
- "$[1]": None
+ "$[1]": None,
},
"mgmt-interface": {"cp": "pdu-mgmt"},
"description": "A vnf single vdu to be used as PDU",
"name": "pdu_internal",
"internal-connection-point": {"$[1]": None},
"short-name": "pdu_internal",
- "type": "ELAN"
+ "type": "ELAN",
}
- }
+ },
},
-
# Modify NSD accordingly
"nsd": {
"constituent-vnfd": {
"vnfd-connection-point-ref": "pdu-mgmt",
"vnfd-id-ref": "vdu-as-pdu",
},
- "$[1]": None
+ "$[1]": None,
},
- "type": "ELAN"
+ "type": "ELAN",
},
"$[1]": None,
- }
- }
+ },
+ },
}
def __init__(self):
super().__init__()
self.test_name = "HACKFEST3v3-"
- self.commands = {'1': ['ls -lrt /home/ubuntu/first-touch-1'], '2': ['ls -lrt /home/ubuntu/first-touch-2']}
+ self.commands = {
+ "1": ["ls -lrt /home/ubuntu/first-touch-1"],
+ "2": ["ls -lrt /home/ubuntu/first-touch-2"],
+ }
self.descriptor_edit = {
"vnfd0": yaml.load(
"""
"$[0]":
default-value: "<touch_filename2>"
""",
- Loader=yaml.Loader)
+ Loader=yaml.Loader,
+ )
}
self.ns_params = {
"additionalParamsForVnf": [
- {"member-vnf-index": "1", "additionalParams": {"touch_filename": "/home/ubuntu/first-touch-1",
- "touch_filename2": "/home/ubuntu/second-touch-1"}},
- {"member-vnf-index": "2", "additionalParams": {"touch_filename": "/home/ubuntu/first-touch-2",
- "touch_filename2": "/home/ubuntu/second-touch-2"}},
+ {
+ "member-vnf-index": "1",
+ "additionalParams": {
+ "touch_filename": "/home/ubuntu/first-touch-1",
+ "touch_filename2": "/home/ubuntu/second-touch-1",
+ },
+ },
+ {
+ "member-vnf-index": "2",
+ "additionalParams": {
+ "touch_filename": "/home/ubuntu/first-touch-2",
+ "touch_filename2": "/home/ubuntu/second-touch-2",
+ },
+ },
]
}
return
# 2 perform scale out
- payload = '{scaleType: SCALE_VNF, scaleVnfData: {scaleVnfType: SCALE_OUT, scaleByStepData: ' \
- '{scaling-group-descriptor: scale_dataVM, member-vnf-index: "1"}}}'
- engine.test("Execute scale action over NS", "POST",
- "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id), headers_yaml, payload,
- (201, 202), r_headers_yaml_location_nslcmop, "yaml")
+ payload = (
+ "{scaleType: SCALE_VNF, scaleVnfData: {scaleVnfType: SCALE_OUT, scaleByStepData: "
+ '{scaling-group-descriptor: scale_dataVM, member-vnf-index: "1"}}}'
+ )
+ engine.test(
+ "Execute scale action over NS",
+ "POST",
+ "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id),
+ headers_yaml,
+ payload,
+ (201, 202),
+ r_headers_yaml_location_nslcmop,
+ "yaml",
+ )
nslcmop2_scale_out = engine.last_id
engine.wait_operation_ready("ns", nslcmop2_scale_out, timeout_deploy)
if manual_check:
- input('NS scale out done. Check that file /home/ubuntu/second-touch-1 is present and new VM is created')
+ input(
+ "NS scale out done. Check that file /home/ubuntu/second-touch-1 is present and new VM is created"
+ )
if test_osm:
- commands = {'1': ['ls -lrt /home/ubuntu/second-touch-1', ]}
+ commands = {
+ "1": [
+ "ls -lrt /home/ubuntu/second-touch-1",
+ ]
+ }
self.test_ns(engine, test_osm, commands=commands)
# TODO check automatic connection to scaled VM
# 2 perform scale in
- payload = '{scaleType: SCALE_VNF, scaleVnfData: {scaleVnfType: SCALE_IN, scaleByStepData: ' \
- '{scaling-group-descriptor: scale_dataVM, member-vnf-index: "1"}}}'
- engine.test("Execute scale action over NS", "POST",
- "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id), headers_yaml, payload,
- (201, 202), r_headers_yaml_location_nslcmop, "yaml")
+ payload = (
+ "{scaleType: SCALE_VNF, scaleVnfData: {scaleVnfType: SCALE_IN, scaleByStepData: "
+ '{scaling-group-descriptor: scale_dataVM, member-vnf-index: "1"}}}'
+ )
+ engine.test(
+ "Execute scale action over NS",
+ "POST",
+ "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id),
+ headers_yaml,
+ payload,
+ (201, 202),
+ r_headers_yaml_location_nslcmop,
+ "yaml",
+ )
nslcmop2_scale_in = engine.last_id
engine.wait_operation_ready("ns", nslcmop2_scale_in, timeout_deploy)
if manual_check:
- input('NS scale in done. Check that file /home/ubuntu/second-touch-1 is updated and new VM is deleted')
+ input(
+ "NS scale in done. Check that file /home/ubuntu/second-touch-1 is updated and new VM is deleted"
+ )
# TODO check automatic
def __init__(self):
super().__init__()
self.test_name = "HACKFEST-SIMPLE"
- self.descriptor_url = "https://osm-download.etsi.org/ftp/osm-4.0-four/4th-hackfest/packages/"
+ self.descriptor_url = (
+ "https://osm-download.etsi.org/ftp/osm-4.0-four/4th-hackfest/packages/"
+ )
self.vnfd_filenames = ("hackfest_simplecharm_vnf.tar.gz",)
self.nsd_filename = "hackfest_simplecharm_ns.tar.gz"
self.uses_configuration = True
- self.commands = {'1': [''], '2': ['ls -lrt /home/ubuntu/first-touch', ]}
- self.users = {'1': "ubuntu", '2': "ubuntu"}
- self.passwords = {'1': "osm4u", '2': "osm4u"}
+ self.commands = {
+ "1": [""],
+ "2": [
+ "ls -lrt /home/ubuntu/first-touch",
+ ],
+ }
+ self.users = {"1": "ubuntu", "2": "ubuntu"}
+ self.passwords = {"1": "osm4u", "2": "osm4u"}
class TestDeploySimpleCharm2(TestDeploySimpleCharm):
- description = "Deploy hackfest-4 hackfest_simplecharm example changing naming to contain dots on ids and " \
- "vnf-member-index"
+ description = (
+ "Deploy hackfest-4 hackfest_simplecharm example changing naming to contain dots on ids and "
+ "vnf-member-index"
+ )
def __init__(self):
super().__init__()
self.test_name = "HACKFEST-SIMPLE2-"
self.qforce = "?FORCE=True"
self.descriptor_edit = {
- "vnfd0": {
- "id": "hackfest.simplecharm.vnf"
- },
-
+ "vnfd0": {"id": "hackfest.simplecharm.vnf"},
"nsd": {
"id": "hackfest.simplecharm.ns",
"constituent-vnfd": {
- "$[0]": {"vnfd-id-ref": "hackfest.simplecharm.vnf", "member-vnf-index": "$1"},
- "$[1]": {"vnfd-id-ref": "hackfest.simplecharm.vnf", "member-vnf-index": "$2"},
+ "$[0]": {
+ "vnfd-id-ref": "hackfest.simplecharm.vnf",
+ "member-vnf-index": "$1",
+ },
+ "$[1]": {
+ "vnfd-id-ref": "hackfest.simplecharm.vnf",
+ "member-vnf-index": "$2",
+ },
},
"vld": {
"$[0]": {
- "vnfd-connection-point-ref": {"$[0]": {"member-vnf-index-ref": "$1",
- "vnfd-id-ref": "hackfest.simplecharm.vnf"},
- "$[1]": {"member-vnf-index-ref": "$2",
- "vnfd-id-ref": "hackfest.simplecharm.vnf"}},
+ "vnfd-connection-point-ref": {
+ "$[0]": {
+ "member-vnf-index-ref": "$1",
+ "vnfd-id-ref": "hackfest.simplecharm.vnf",
+ },
+ "$[1]": {
+ "member-vnf-index-ref": "$2",
+ "vnfd-id-ref": "hackfest.simplecharm.vnf",
+ },
+ },
},
"$[1]": {
- "vnfd-connection-point-ref": {"$[0]": {"member-vnf-index-ref": "$1",
- "vnfd-id-ref": "hackfest.simplecharm.vnf"},
- "$[1]": {"member-vnf-index-ref": "$2",
- "vnfd-id-ref": "hackfest.simplecharm.vnf"}},
+ "vnfd-connection-point-ref": {
+ "$[0]": {
+ "member-vnf-index-ref": "$1",
+ "vnfd-id-ref": "hackfest.simplecharm.vnf",
+ },
+ "$[1]": {
+ "member-vnf-index-ref": "$2",
+ "vnfd-id-ref": "hackfest.simplecharm.vnf",
+ },
+ },
},
- }
- }
+ },
+ },
}
class TestDeploySingleVdu(TestDeployHackfest3Charmed):
- description = "Generate a single VDU base on editing Hackfest3Charmed descriptors and deploy"
+ description = (
+ "Generate a single VDU base on editing Hackfest3Charmed descriptors and deploy"
+ )
def __init__(self):
super().__init__()
"vnfd0": {
"vdu": {
"$[0]": {
- "interface": {"$[0]": {"external-connection-point-ref": "pdu-mgmt"}}
+ "interface": {
+ "$[0]": {"external-connection-point-ref": "pdu-mgmt"}
+ }
},
- "$[1]": None
+ "$[1]": None,
},
"vnf-configuration": None,
"connection-point": {
"$[0]": {
"id": "pdu-mgmt",
"name": "pdu-mgmt",
- "short-name": "pdu-mgmt"
+ "short-name": "pdu-mgmt",
},
- "$[1]": None
+ "$[1]": None,
},
"mgmt-interface": {"cp": "pdu-mgmt"},
"description": "A vnf single vdu to be used as PDU",
"name": "pdu_internal",
"internal-connection-point": {"$[1]": None},
"short-name": "pdu_internal",
- "type": "ELAN"
+ "type": "ELAN",
}
- }
+ },
},
-
# Modify NSD accordingly
"nsd": {
"constituent-vnfd": {
"vnfd-connection-point-ref": "pdu-mgmt",
"vnfd-id-ref": "vdu-as-pdu",
},
- "$[1]": None
+ "$[1]": None,
},
- "type": "ELAN"
+ "type": "ELAN",
},
"$[1]": None,
- }
- }
+ },
+ },
}
class TestDeployHnfd(TestDeployHackfest3Charmed):
- description = "Generate a HNFD base on editing Hackfest3Charmed descriptors and deploy"
+ description = (
+ "Generate a HNFD base on editing Hackfest3Charmed descriptors and deploy"
+ )
def __init__(self):
super().__init__()
"mac-address": "mac_address",
"vim-network-name": "pdu_internal", # OSMNBITEST-PDU-pdu_internal
},
- ]
+ ],
}
- self.vnfd_filenames = ("hackfest_3charmed_vnfd.tar.gz", "hackfest_3charmed_vnfd.tar.gz")
+ self.vnfd_filenames = (
+ "hackfest_3charmed_vnfd.tar.gz",
+ "hackfest_3charmed_vnfd.tar.gz",
+ )
self.descriptor_edit = {
"vnfd0": {
"interface": {
"$[0]": {"name": "mgmt-iface"},
"$[1]": {"name": "pdu-iface-internal"},
- }
+ },
}
- }
+ },
},
"nsd": {
- "constituent-vnfd": {
- "$[1]": {"vnfd-id-ref": "hfnd1"}
- },
+ "constituent-vnfd": {"$[1]": {"vnfd-id-ref": "hfnd1"}},
"vld": {
- "$[0]": {"vnfd-connection-point-ref": {"$[1]": {"vnfd-id-ref": "hfnd1"}}},
- "$[1]": {"vnfd-connection-point-ref": {"$[1]": {"vnfd-id-ref": "hfnd1"}}}
- }
- }
+ "$[0]": {
+ "vnfd-connection-point-ref": {"$[1]": {"vnfd-id-ref": "hfnd1"}}
+ },
+ "$[1]": {
+ "vnfd-connection-point-ref": {"$[1]": {"vnfd-id-ref": "hfnd1"}}
+ },
+ },
+ },
}
def create_descriptors(self, engine):
# TODO get vim-network-name from vnfr.vld.name
self.pdu_descriptor["interfaces"][1]["vim-network-name"] = "{}-{}-{}".format(
os.environ.get("OSMNBITEST_NS_NAME", "OSMNBITEST"),
- "PDU", self.pdu_descriptor["interfaces"][1]["vim-network-name"])
- engine.test("Onboard PDU descriptor", "POST", "/pdu/v1/pdu_descriptors",
- {"Location": "/pdu/v1/pdu_descriptors/", "Content-Type": "application/yaml"}, self.pdu_descriptor,
- 201, r_header_yaml, "yaml")
+ "PDU",
+ self.pdu_descriptor["interfaces"][1]["vim-network-name"],
+ )
+ engine.test(
+ "Onboard PDU descriptor",
+ "POST",
+ "/pdu/v1/pdu_descriptors",
+ {
+ "Location": "/pdu/v1/pdu_descriptors/",
+ "Content-Type": "application/yaml",
+ },
+ self.pdu_descriptor,
+ 201,
+ r_header_yaml,
+ "yaml",
+ )
self.pdu_id = engine.last_id
def run(self, engine, test_osm, manual_check, test_params=None):
self.vim_id = engine.get_create_vim(test_osm)
# instantiate PDU
self.pduDeploy.create_descriptors(engine)
- self.pduDeploy.instantiate(engine, {"nsDescription": "to be used as PDU", "nsName": nsname + "-PDU",
- "nsdId": self.pduDeploy.nsd_id, "vimAccountId": self.vim_id})
+ self.pduDeploy.instantiate(
+ engine,
+ {
+ "nsDescription": "to be used as PDU",
+ "nsName": nsname + "-PDU",
+ "nsdId": self.pduDeploy.nsd_id,
+ "vimAccountId": self.vim_id,
+ },
+ )
if manual_check:
- input('VNF to be used as PDU has been deployed. Perform manual check and press enter to resume')
+ input(
+ "VNF to be used as PDU has been deployed. Perform manual check and press enter to resume"
+ )
if test_osm:
self.pduDeploy.test_ns(engine, test_osm)
if test_osm:
- r = engine.test("Get VNFR to obtain IP_ADDRESS", "GET",
- "/nslcm/v1/vnfrs?nsr-id-ref={}".format(self.pduDeploy.ns_id), headers_json, None,
- 200, r_header_json, "json")
+ r = engine.test(
+ "Get VNFR to obtain IP_ADDRESS",
+ "GET",
+ "/nslcm/v1/vnfrs?nsr-id-ref={}".format(self.pduDeploy.ns_id),
+ headers_json,
+ None,
+ 200,
+ r_header_json,
+ "json",
+ )
if not r:
return
vnfr_data = r.json()
# print(vnfr_data)
- self.pdu_interface_0["ip-address"] = vnfr_data[0]["vdur"][0]["interfaces"][0].get("ip-address")
- self.pdu_interface_1["ip-address"] = vnfr_data[0]["vdur"][0]["interfaces"][1].get("ip-address")
- self.pdu_interface_0["mac-address"] = vnfr_data[0]["vdur"][0]["interfaces"][0].get("mac-address")
- self.pdu_interface_1["mac-address"] = vnfr_data[0]["vdur"][0]["interfaces"][1].get("mac-address")
+ self.pdu_interface_0["ip-address"] = vnfr_data[0]["vdur"][0]["interfaces"][
+ 0
+ ].get("ip-address")
+ self.pdu_interface_1["ip-address"] = vnfr_data[0]["vdur"][0]["interfaces"][
+ 1
+ ].get("ip-address")
+ self.pdu_interface_0["mac-address"] = vnfr_data[0]["vdur"][0]["interfaces"][
+ 0
+ ].get("mac-address")
+ self.pdu_interface_1["mac-address"] = vnfr_data[0]["vdur"][0]["interfaces"][
+ 1
+ ].get("mac-address")
if not self.pdu_interface_0["ip-address"]:
raise TestException("Vnfr has not managment ip address")
else:
self.create_descriptors(engine)
- ns_data = {"nsDescription": "default description", "nsName": nsname, "nsdId": self.nsd_id,
- "vimAccountId": self.vim_id}
+ ns_data = {
+ "nsDescription": "default description",
+ "nsName": nsname,
+ "nsdId": self.nsd_id,
+ "vimAccountId": self.vim_id,
+ }
if test_params and test_params.get("ns-config"):
if isinstance(test_params["ns-config"], str):
ns_data.update(yaml.load(test_params["ns-config"]), Loader=yaml.Loader)
self.instantiate(engine, ns_data)
if manual_check:
- input('NS has been deployed. Perform manual check and press enter to resume')
+ input(
+ "NS has been deployed. Perform manual check and press enter to resume"
+ )
if test_osm:
self.test_ns(engine, test_osm)
self.additional_operations(engine, test_osm, manual_check)
def delete_descriptors(self, engine):
super().delete_descriptors(engine)
# delete pdu
- engine.test("Delete PDU SOL005", "DELETE",
- "/pdu/v1/pdu_descriptors/{}".format(self.pdu_id),
- headers_yaml, None, 204, None, 0)
+ engine.test(
+ "Delete PDU SOL005",
+ "DELETE",
+ "/pdu/v1/pdu_descriptors/{}".format(self.pdu_id),
+ headers_yaml,
+ None,
+ 204,
+ None,
+ 0,
+ )
class TestDescriptors:
def __init__(self):
self.vnfd_filename = "hackfest_3charmed_vnfd.tar.gz"
self.nsd_filename = "hackfest_3charmed_nsd.tar.gz"
- self.descriptor_url = "https://osm-download.etsi.org/ftp/osm-3.0-three/2nd-hackfest/packages/"
+ self.descriptor_url = (
+ "https://osm-download.etsi.org/ftp/osm-3.0-three/2nd-hackfest/packages/"
+ )
self.vnfd_id = None
self.nsd_id = None
with open(filename_path, "wb") as file:
response = requests.get(self.descriptor_url + filename)
if response.status_code >= 300:
- raise TestException("Error downloading descriptor from '{}': {}".format(
- self.descriptor_url + filename, response.status_code))
+ raise TestException(
+ "Error downloading descriptor from '{}': {}".format(
+ self.descriptor_url + filename, response.status_code
+ )
+ )
file.write(response.content)
vnfd_filename_path = temp_dir + self.vnfd_filename
nsd_filename_path = temp_dir + self.nsd_filename
- engine.test("Onboard empty VNFD in one step", "POST", "/vnfpkgm/v1/vnf_packages_content", headers_yaml,
- self.vnfd_empty, 201, r_headers_yaml_location_vnfd, "yaml")
+ engine.test(
+ "Onboard empty VNFD in one step",
+ "POST",
+ "/vnfpkgm/v1/vnf_packages_content",
+ headers_yaml,
+ self.vnfd_empty,
+ 201,
+ r_headers_yaml_location_vnfd,
+ "yaml",
+ )
self.vnfd_id = engine.last_id
# test bug 605
- engine.test("Upload invalid VNFD ", "PUT", "/vnfpkgm/v1/vnf_packages/{}/package_content".format(self.vnfd_id),
- headers_yaml, self.vnfd_prova, 422, r_header_yaml, "yaml")
-
- engine.test("Upload VNFD {}".format(self.vnfd_filename), "PUT",
- "/vnfpkgm/v1/vnf_packages/{}/package_content".format(self.vnfd_id), headers_zip_yaml,
- "@b" + vnfd_filename_path, 204, None, 0)
-
- queries = ["mgmt-interface.cp=mgmt", "vdu.0.interface.0.external-connection-point-ref=mgmt",
- "vdu.0.interface.1.internal-connection-point-ref=internal",
- "internal-vld.0.internal-connection-point.0.id-ref=internal",
- # Detection of duplicated VLD names in VNF Descriptors
- # URL: internal-vld=[
- # {id: internal1, name: internal, type:ELAN,
- # internal-connection-point: [{id-ref: mgmtVM-internal}, {id-ref: dataVM-internal}]},
- # {id: internal2, name: internal, type:ELAN,
- # internal-connection-point: [{id-ref: mgmtVM-internal}, {id-ref: dataVM-internal}]}
- # ]
- "internal-vld=%5B%7Bid%3A%20internal1%2C%20name%3A%20internal%2C%20type%3A%20ELAN%2C%20"
- "internal-connection-point%3A%20%5B%7Bid-ref%3A%20mgmtVM-internal%7D%2C%20%7Bid-ref%3A%20"
- "dataVM-internal%7D%5D%7D%2C%20%7Bid%3A%20internal2%2C%20name%3A%20internal%2C%20type%3A%20"
- "ELAN%2C%20internal-connection-point%3A%20%5B%7Bid-ref%3A%20mgmtVM-internal%7D%2C%20%7B"
- "id-ref%3A%20dataVM-internal%7D%5D%7D%5D"
- ]
+ engine.test(
+ "Upload invalid VNFD ",
+ "PUT",
+ "/vnfpkgm/v1/vnf_packages/{}/package_content".format(self.vnfd_id),
+ headers_yaml,
+ self.vnfd_prova,
+ 422,
+ r_header_yaml,
+ "yaml",
+ )
+
+ engine.test(
+ "Upload VNFD {}".format(self.vnfd_filename),
+ "PUT",
+ "/vnfpkgm/v1/vnf_packages/{}/package_content".format(self.vnfd_id),
+ headers_zip_yaml,
+ "@b" + vnfd_filename_path,
+ 204,
+ None,
+ 0,
+ )
+
+ queries = [
+ "mgmt-interface.cp=mgmt",
+ "vdu.0.interface.0.external-connection-point-ref=mgmt",
+ "vdu.0.interface.1.internal-connection-point-ref=internal",
+ "internal-vld.0.internal-connection-point.0.id-ref=internal",
+ # Detection of duplicated VLD names in VNF Descriptors
+ # URL: internal-vld=[
+ # {id: internal1, name: internal, type:ELAN,
+ # internal-connection-point: [{id-ref: mgmtVM-internal}, {id-ref: dataVM-internal}]},
+ # {id: internal2, name: internal, type:ELAN,
+ # internal-connection-point: [{id-ref: mgmtVM-internal}, {id-ref: dataVM-internal}]}
+ # ]
+ "internal-vld=%5B%7Bid%3A%20internal1%2C%20name%3A%20internal%2C%20type%3A%20ELAN%2C%20"
+ "internal-connection-point%3A%20%5B%7Bid-ref%3A%20mgmtVM-internal%7D%2C%20%7Bid-ref%3A%20"
+ "dataVM-internal%7D%5D%7D%2C%20%7Bid%3A%20internal2%2C%20name%3A%20internal%2C%20type%3A%20"
+ "ELAN%2C%20internal-connection-point%3A%20%5B%7Bid-ref%3A%20mgmtVM-internal%7D%2C%20%7B"
+ "id-ref%3A%20dataVM-internal%7D%5D%7D%5D",
+ ]
for query in queries:
- engine.test("Upload invalid VNFD ", "PUT",
- "/vnfpkgm/v1/vnf_packages/{}/package_content?{}".format(self.vnfd_id, query),
- headers_zip_yaml, "@b" + vnfd_filename_path, 422, r_header_yaml, "yaml")
+ engine.test(
+ "Upload invalid VNFD ",
+ "PUT",
+ "/vnfpkgm/v1/vnf_packages/{}/package_content?{}".format(
+ self.vnfd_id, query
+ ),
+ headers_zip_yaml,
+ "@b" + vnfd_filename_path,
+ 422,
+ r_header_yaml,
+ "yaml",
+ )
# test bug 605
- engine.test("Upload invalid VNFD ", "PUT", "/vnfpkgm/v1/vnf_packages/{}/package_content".format(self.vnfd_id),
- headers_yaml, self.vnfd_prova, 422, r_header_yaml, "yaml")
+ engine.test(
+ "Upload invalid VNFD ",
+ "PUT",
+ "/vnfpkgm/v1/vnf_packages/{}/package_content".format(self.vnfd_id),
+ headers_yaml,
+ self.vnfd_prova,
+ 422,
+ r_header_yaml,
+ "yaml",
+ )
# get vnfd descriptor
- engine.test("Get VNFD descriptor", "GET", "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_id),
- headers_yaml, None, 200, r_header_yaml, "yaml")
+ engine.test(
+ "Get VNFD descriptor",
+ "GET",
+ "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_id),
+ headers_yaml,
+ None,
+ 200,
+ r_header_yaml,
+ "yaml",
+ )
# get vnfd file descriptor
- engine.test("Get VNFD file descriptor", "GET", "/vnfpkgm/v1/vnf_packages/{}/vnfd".format(self.vnfd_id),
- headers_text, None, 200, r_header_text, "text", temp_dir+"vnfd-yaml")
+ engine.test(
+ "Get VNFD file descriptor",
+ "GET",
+ "/vnfpkgm/v1/vnf_packages/{}/vnfd".format(self.vnfd_id),
+ headers_text,
+ None,
+ 200,
+ r_header_text,
+ "text",
+ temp_dir + "vnfd-yaml",
+ )
# TODO compare files: diff vnfd-yaml hackfest_3charmed_vnfd/hackfest_3charmed_vnfd.yaml
# get vnfd zip file package
- engine.test("Get VNFD zip package", "GET",
- "/vnfpkgm/v1/vnf_packages/{}/package_content".format(self.vnfd_id), headers_zip, None, 200,
- r_header_zip, "zip", temp_dir+"vnfd-zip")
+ engine.test(
+ "Get VNFD zip package",
+ "GET",
+ "/vnfpkgm/v1/vnf_packages/{}/package_content".format(self.vnfd_id),
+ headers_zip,
+ None,
+ 200,
+ r_header_zip,
+ "zip",
+ temp_dir + "vnfd-zip",
+ )
# TODO compare files: diff vnfd-zip hackfest_3charmed_vnfd.tar.gz
# get vnfd artifact
- engine.test("Get VNFD artifact package", "GET",
- "/vnfpkgm/v1/vnf_packages/{}/artifacts/icons/osm.png".format(self.vnfd_id), headers_zip, None, 200,
- r_header_octect, "octet-string", temp_dir+"vnfd-icon")
+ engine.test(
+ "Get VNFD artifact package",
+ "GET",
+ "/vnfpkgm/v1/vnf_packages/{}/artifacts/icons/osm.png".format(self.vnfd_id),
+ headers_zip,
+ None,
+ 200,
+ r_header_octect,
+ "octet-string",
+ temp_dir + "vnfd-icon",
+ )
# TODO compare files: diff vnfd-icon hackfest_3charmed_vnfd/icons/osm.png
# nsd CREATE AND UPLOAD in one step:
- engine.test("Onboard NSD in one step", "POST", "/nsd/v1/ns_descriptors_content", headers_zip_yaml,
- "@b" + nsd_filename_path, 201, r_headers_yaml_location_nsd, "yaml")
+ engine.test(
+ "Onboard NSD in one step",
+ "POST",
+ "/nsd/v1/ns_descriptors_content",
+ headers_zip_yaml,
+ "@b" + nsd_filename_path,
+ 201,
+ r_headers_yaml_location_nsd,
+ "yaml",
+ )
self.nsd_id = engine.last_id
queries = ["vld.0.vnfd-connection-point-ref.0.vnfd-id-ref=hf"]
for query in queries:
- engine.test("Upload invalid NSD ", "PUT",
- "/nsd/v1/ns_descriptors/{}/nsd_content?{}".format(self.nsd_id, query),
- headers_zip_yaml, "@b" + nsd_filename_path, 422, r_header_yaml, "yaml")
+ engine.test(
+ "Upload invalid NSD ",
+ "PUT",
+ "/nsd/v1/ns_descriptors/{}/nsd_content?{}".format(self.nsd_id, query),
+ headers_zip_yaml,
+ "@b" + nsd_filename_path,
+ 422,
+ r_header_yaml,
+ "yaml",
+ )
# get nsd descriptor
- engine.test("Get NSD descriptor", "GET", "/nsd/v1/ns_descriptors/{}".format(self.nsd_id), headers_yaml,
- None, 200, r_header_yaml, "yaml")
+ engine.test(
+ "Get NSD descriptor",
+ "GET",
+ "/nsd/v1/ns_descriptors/{}".format(self.nsd_id),
+ headers_yaml,
+ None,
+ 200,
+ r_header_yaml,
+ "yaml",
+ )
# get nsd file descriptor
- engine.test("Get NSD file descriptor", "GET", "/nsd/v1/ns_descriptors/{}/nsd".format(self.nsd_id), headers_text,
- None, 200, r_header_text, "text", temp_dir+"nsd-yaml")
+ engine.test(
+ "Get NSD file descriptor",
+ "GET",
+ "/nsd/v1/ns_descriptors/{}/nsd".format(self.nsd_id),
+ headers_text,
+ None,
+ 200,
+ r_header_text,
+ "text",
+ temp_dir + "nsd-yaml",
+ )
# TODO compare files: diff nsd-yaml hackfest_3charmed_nsd/hackfest_3charmed_nsd.yaml
# get nsd zip file package
- engine.test("Get NSD zip package", "GET", "/nsd/v1/ns_descriptors/{}/nsd_content".format(self.nsd_id),
- headers_zip, None, 200, r_header_zip, "zip", temp_dir+"nsd-zip")
+ engine.test(
+ "Get NSD zip package",
+ "GET",
+ "/nsd/v1/ns_descriptors/{}/nsd_content".format(self.nsd_id),
+ headers_zip,
+ None,
+ 200,
+ r_header_zip,
+ "zip",
+ temp_dir + "nsd-zip",
+ )
# TODO compare files: diff nsd-zip hackfest_3charmed_nsd.tar.gz
# get nsd artifact
- engine.test("Get NSD artifact package", "GET",
- "/nsd/v1/ns_descriptors/{}/artifacts/icons/osm.png".format(self.nsd_id), headers_zip, None, 200,
- r_header_octect, "octet-string", temp_dir+"nsd-icon")
+ engine.test(
+ "Get NSD artifact package",
+ "GET",
+ "/nsd/v1/ns_descriptors/{}/artifacts/icons/osm.png".format(self.nsd_id),
+ headers_zip,
+ None,
+ 200,
+ r_header_octect,
+ "octet-string",
+ temp_dir + "nsd-icon",
+ )
# TODO compare files: diff nsd-icon hackfest_3charmed_nsd/icons/osm.png
# vnfd DELETE
- test_rest.test("Delete VNFD conflict", "DELETE", "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_id),
- headers_yaml, None, 409, None, None)
+ test_rest.test(
+ "Delete VNFD conflict",
+ "DELETE",
+ "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_id),
+ headers_yaml,
+ None,
+ 409,
+ None,
+ None,
+ )
- test_rest.test("Delete VNFD force", "DELETE", "/vnfpkgm/v1/vnf_packages/{}?FORCE=TRUE".format(self.vnfd_id),
- headers_yaml, None, 204, None, 0)
+ test_rest.test(
+ "Delete VNFD force",
+ "DELETE",
+ "/vnfpkgm/v1/vnf_packages/{}?FORCE=TRUE".format(self.vnfd_id),
+ headers_yaml,
+ None,
+ 204,
+ None,
+ 0,
+ )
# nsd DELETE
- test_rest.test("Delete NSD", "DELETE", "/nsd/v1/ns_descriptors/{}".format(self.nsd_id), headers_yaml, None, 204,
- None, 0)
+ test_rest.test(
+ "Delete NSD",
+ "DELETE",
+ "/nsd/v1/ns_descriptors/{}".format(self.nsd_id),
+ headers_yaml,
+ None,
+ 204,
+ None,
+ 0,
+ )
class TestNetSliceTemplates:
description = "Upload a NST to OSM"
def __init__(self):
- self.vnfd_filename = ("@./slice_shared/vnfd/slice_shared_vnfd.yaml")
- self.vnfd_filename_middle = ("@./slice_shared/vnfd/slice_shared_middle_vnfd.yaml")
- self.nsd_filename = ("@./slice_shared/nsd/slice_shared_nsd.yaml")
- self.nsd_filename_middle = ("@./slice_shared/nsd/slice_shared_middle_nsd.yaml")
- self.nst_filenames = ("@./slice_shared/slice_shared_nstd.yaml")
+ self.vnfd_filename = "@./slice_shared/vnfd/slice_shared_vnfd.yaml"
+ self.vnfd_filename_middle = "@./slice_shared/vnfd/slice_shared_middle_vnfd.yaml"
+ self.nsd_filename = "@./slice_shared/nsd/slice_shared_nsd.yaml"
+ self.nsd_filename_middle = "@./slice_shared/nsd/slice_shared_middle_nsd.yaml"
+ self.nst_filenames = "@./slice_shared/slice_shared_nstd.yaml"
def run(self, engine, test_osm, manual_check, test_params=None):
# nst CREATE
os.makedirs(temp_dir)
# Onboard VNFDs
- engine.test("Onboard edge VNFD", "POST", "/vnfpkgm/v1/vnf_packages_content", headers_yaml,
- self.vnfd_filename, 201, r_headers_yaml_location_vnfd, "yaml")
+ engine.test(
+ "Onboard edge VNFD",
+ "POST",
+ "/vnfpkgm/v1/vnf_packages_content",
+ headers_yaml,
+ self.vnfd_filename,
+ 201,
+ r_headers_yaml_location_vnfd,
+ "yaml",
+ )
self.vnfd_edge_id = engine.last_id
- engine.test("Onboard middle VNFD", "POST", "/vnfpkgm/v1/vnf_packages_content", headers_yaml,
- self.vnfd_filename_middle, 201, r_headers_yaml_location_vnfd, "yaml")
+ engine.test(
+ "Onboard middle VNFD",
+ "POST",
+ "/vnfpkgm/v1/vnf_packages_content",
+ headers_yaml,
+ self.vnfd_filename_middle,
+ 201,
+ r_headers_yaml_location_vnfd,
+ "yaml",
+ )
self.vnfd_middle_id = engine.last_id
# Onboard NSDs
- engine.test("Onboard NSD edge", "POST", "/nsd/v1/ns_descriptors_content", headers_yaml,
- self.nsd_filename, 201, r_headers_yaml_location_nsd, "yaml")
+ engine.test(
+ "Onboard NSD edge",
+ "POST",
+ "/nsd/v1/ns_descriptors_content",
+ headers_yaml,
+ self.nsd_filename,
+ 201,
+ r_headers_yaml_location_nsd,
+ "yaml",
+ )
self.nsd_edge_id = engine.last_id
- engine.test("Onboard NSD middle", "POST", "/nsd/v1/ns_descriptors_content", headers_yaml,
- self.nsd_filename_middle, 201, r_headers_yaml_location_nsd, "yaml")
+ engine.test(
+ "Onboard NSD middle",
+ "POST",
+ "/nsd/v1/ns_descriptors_content",
+ headers_yaml,
+ self.nsd_filename_middle,
+ 201,
+ r_headers_yaml_location_nsd,
+ "yaml",
+ )
self.nsd_middle_id = engine.last_id
# Onboard NST
- engine.test("Onboard NST", "POST", "/nst/v1/netslice_templates_content", headers_yaml, self.nst_filenames,
- 201, r_headers_yaml_location_nst, "yaml")
+ engine.test(
+ "Onboard NST",
+ "POST",
+ "/nst/v1/netslice_templates_content",
+ headers_yaml,
+ self.nst_filenames,
+ 201,
+ r_headers_yaml_location_nst,
+ "yaml",
+ )
nst_id = engine.last_id
# nstd SHOW OSM format
- engine.test("Show NSTD OSM format", "GET", "/nst/v1/netslice_templates/{}".format(nst_id), headers_json, None,
- 200, r_header_json, "json")
+ engine.test(
+ "Show NSTD OSM format",
+ "GET",
+ "/nst/v1/netslice_templates/{}".format(nst_id),
+ headers_json,
+ None,
+ 200,
+ r_header_json,
+ "json",
+ )
# nstd DELETE
- engine.test("Delete NSTD", "DELETE", "/nst/v1/netslice_templates/{}".format(nst_id), headers_json, None,
- 204, None, 0)
+ engine.test(
+ "Delete NSTD",
+ "DELETE",
+ "/nst/v1/netslice_templates/{}".format(nst_id),
+ headers_json,
+ None,
+ 204,
+ None,
+ 0,
+ )
# NSDs DELETE
- test_rest.test("Delete NSD middle", "DELETE", "/nsd/v1/ns_descriptors/{}".format(self.nsd_middle_id),
- headers_json, None, 204, None, 0)
+ test_rest.test(
+ "Delete NSD middle",
+ "DELETE",
+ "/nsd/v1/ns_descriptors/{}".format(self.nsd_middle_id),
+ headers_json,
+ None,
+ 204,
+ None,
+ 0,
+ )
- test_rest.test("Delete NSD edge", "DELETE", "/nsd/v1/ns_descriptors/{}".format(self.nsd_edge_id), headers_json,
- None, 204, None, 0)
+ test_rest.test(
+ "Delete NSD edge",
+ "DELETE",
+ "/nsd/v1/ns_descriptors/{}".format(self.nsd_edge_id),
+ headers_json,
+ None,
+ 204,
+ None,
+ 0,
+ )
# VNFDs DELETE
- test_rest.test("Delete VNFD edge", "DELETE", "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_edge_id),
- headers_yaml, None, 204, None, 0)
+ test_rest.test(
+ "Delete VNFD edge",
+ "DELETE",
+ "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_edge_id),
+ headers_yaml,
+ None,
+ 204,
+ None,
+ 0,
+ )
- test_rest.test("Delete VNFD middle", "DELETE", "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_middle_id),
- headers_yaml, None, 204, None, 0)
+ test_rest.test(
+ "Delete VNFD middle",
+ "DELETE",
+ "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_middle_id),
+ headers_yaml,
+ None,
+ 204,
+ None,
+ 0,
+ )
class TestNetSliceInstances:
- '''
+ """
Test procedure:
1. Populate databases with VNFD, NSD, NST with the following scenario
+-----------------management-----------------+
Manual check - All cleaned correctly?
NSI-2 and NSI-3 were terminated and deleted
14. Cleanup database
- '''
+ """
description = "Upload a NST to OSM"
def __init__(self):
self.vim_id = None
- self.vnfd_filename = ("@./slice_shared/vnfd/slice_shared_vnfd.yaml")
- self.vnfd_filename_middle = ("@./slice_shared/vnfd/slice_shared_middle_vnfd.yaml")
- self.nsd_filename = ("@./slice_shared/nsd/slice_shared_nsd.yaml")
- self.nsd_filename_middle = ("@./slice_shared/nsd/slice_shared_middle_nsd.yaml")
- self.nst_filenames = ("@./slice_shared/slice_shared_nstd.yaml")
+ self.vnfd_filename = "@./slice_shared/vnfd/slice_shared_vnfd.yaml"
+ self.vnfd_filename_middle = "@./slice_shared/vnfd/slice_shared_middle_vnfd.yaml"
+ self.nsd_filename = "@./slice_shared/nsd/slice_shared_nsd.yaml"
+ self.nsd_filename_middle = "@./slice_shared/nsd/slice_shared_middle_nsd.yaml"
+ self.nst_filenames = "@./slice_shared/slice_shared_nstd.yaml"
def create_slice(self, engine, nsi_data, name):
ns_data_text = yaml.safe_dump(nsi_data, default_flow_style=True, width=256)
- r = engine.test(name, "POST", "/nsilcm/v1/netslice_instances",
- headers_yaml, ns_data_text, (201, 202),
- {"Location": "nsilcm/v1/netslice_instances/", "Content-Type": "application/yaml"}, "yaml")
+ r = engine.test(
+ name,
+ "POST",
+ "/nsilcm/v1/netslice_instances",
+ headers_yaml,
+ ns_data_text,
+ (201, 202),
+ {
+ "Location": "nsilcm/v1/netslice_instances/",
+ "Content-Type": "application/yaml",
+ },
+ "yaml",
+ )
return r
def instantiate_slice(self, engine, nsi_data, nsi_id, name):
ns_data_text = yaml.safe_dump(nsi_data, default_flow_style=True, width=256)
- engine.test(name, "POST",
- "/nsilcm/v1/netslice_instances/{}/instantiate".format(nsi_id), headers_yaml, ns_data_text,
- (201, 202), r_headers_yaml_location_nsilcmop, "yaml")
+ engine.test(
+ name,
+ "POST",
+ "/nsilcm/v1/netslice_instances/{}/instantiate".format(nsi_id),
+ headers_yaml,
+ ns_data_text,
+ (201, 202),
+ r_headers_yaml_location_nsilcmop,
+ "yaml",
+ )
def terminate_slice(self, engine, nsi_id, name):
- engine.test(name, "POST", "/nsilcm/v1/netslice_instances/{}/terminate".format(nsi_id),
- headers_yaml, None, (201, 202), r_headers_yaml_location_nsilcmop, "yaml")
+ engine.test(
+ name,
+ "POST",
+ "/nsilcm/v1/netslice_instances/{}/terminate".format(nsi_id),
+ headers_yaml,
+ None,
+ (201, 202),
+ r_headers_yaml_location_nsilcmop,
+ "yaml",
+ )
def delete_slice(self, engine, nsi_id, name):
- engine.test(name, "DELETE", "/nsilcm/v1/netslice_instances/{}".format(nsi_id), headers_yaml, None,
- 204, None, 0)
+ engine.test(
+ name,
+ "DELETE",
+ "/nsilcm/v1/netslice_instances/{}".format(nsi_id),
+ headers_yaml,
+ None,
+ 204,
+ None,
+ 0,
+ )
def run(self, engine, test_osm, manual_check, test_params=None):
# nst CREATE
engine.get_autorization()
# Onboard VNFDs
- engine.test("Onboard edge VNFD", "POST", "/vnfpkgm/v1/vnf_packages_content", headers_yaml,
- self.vnfd_filename, 201, r_headers_yaml_location_vnfd, "yaml")
+ engine.test(
+ "Onboard edge VNFD",
+ "POST",
+ "/vnfpkgm/v1/vnf_packages_content",
+ headers_yaml,
+ self.vnfd_filename,
+ 201,
+ r_headers_yaml_location_vnfd,
+ "yaml",
+ )
self.vnfd_edge_id = engine.last_id
- engine.test("Onboard middle VNFD", "POST", "/vnfpkgm/v1/vnf_packages_content", headers_yaml,
- self.vnfd_filename_middle, 201, r_headers_yaml_location_vnfd, "yaml")
+ engine.test(
+ "Onboard middle VNFD",
+ "POST",
+ "/vnfpkgm/v1/vnf_packages_content",
+ headers_yaml,
+ self.vnfd_filename_middle,
+ 201,
+ r_headers_yaml_location_vnfd,
+ "yaml",
+ )
self.vnfd_middle_id = engine.last_id
# Onboard NSDs
- engine.test("Onboard NSD edge", "POST", "/nsd/v1/ns_descriptors_content", headers_yaml,
- self.nsd_filename, 201, r_headers_yaml_location_nsd, "yaml")
+ engine.test(
+ "Onboard NSD edge",
+ "POST",
+ "/nsd/v1/ns_descriptors_content",
+ headers_yaml,
+ self.nsd_filename,
+ 201,
+ r_headers_yaml_location_nsd,
+ "yaml",
+ )
self.nsd_edge_id = engine.last_id
- engine.test("Onboard NSD middle", "POST", "/nsd/v1/ns_descriptors_content", headers_yaml,
- self.nsd_filename_middle, 201, r_headers_yaml_location_nsd, "yaml")
+ engine.test(
+ "Onboard NSD middle",
+ "POST",
+ "/nsd/v1/ns_descriptors_content",
+ headers_yaml,
+ self.nsd_filename_middle,
+ 201,
+ r_headers_yaml_location_nsd,
+ "yaml",
+ )
self.nsd_middle_id = engine.last_id
# Onboard NST
- engine.test("Onboard NST", "POST", "/nst/v1/netslice_templates_content", headers_yaml, self.nst_filenames,
- 201, r_headers_yaml_location_nst, "yaml")
+ engine.test(
+ "Onboard NST",
+ "POST",
+ "/nst/v1/netslice_templates_content",
+ headers_yaml,
+ self.nst_filenames,
+ 201,
+ r_headers_yaml_location_nst,
+ "yaml",
+ )
nst_id = engine.last_id
self.vim_id = engine.get_create_vim(test_osm)
# CREATE NSI-1
- ns_data = {'nsiName': 'Deploy-NSI-1', 'vimAccountId': self.vim_id, 'nstId': nst_id, 'nsiDescription': 'default'}
+ ns_data = {
+ "nsiName": "Deploy-NSI-1",
+ "vimAccountId": self.vim_id,
+ "nstId": nst_id,
+ "nsiDescription": "default",
+ }
r = self.create_slice(engine, ns_data, "Create NSI-1 step 1")
if not r:
return
self.nsi_id1 = engine.last_id
# INSTANTIATE NSI-1
- self.instantiate_slice(engine, ns_data, self.nsi_id1, "Instantiate NSI-1 step 2")
+ self.instantiate_slice(
+ engine, ns_data, self.nsi_id1, "Instantiate NSI-1 step 2"
+ )
nsilcmop_id1 = engine.last_id
# Waiting for NSI-1
engine.wait_operation_ready("nsi", nsilcmop_id1, timeout_deploy)
# CREATE NSI-2
- ns_data = {'nsiName': 'Deploy-NSI-2', 'vimAccountId': self.vim_id, 'nstId': nst_id, 'nsiDescription': 'default'}
+ ns_data = {
+ "nsiName": "Deploy-NSI-2",
+ "vimAccountId": self.vim_id,
+ "nstId": nst_id,
+ "nsiDescription": "default",
+ }
r = self.create_slice(engine, ns_data, "Create NSI-2 step 1")
if not r:
return
self.nsi_id2 = engine.last_id
# INSTANTIATE NSI-2
- self.instantiate_slice(engine, ns_data, self.nsi_id2, "Instantiate NSI-2 step 2")
+ self.instantiate_slice(
+ engine, ns_data, self.nsi_id2, "Instantiate NSI-2 step 2"
+ )
nsilcmop_id2 = engine.last_id
# Waiting for NSI-2
engine.wait_operation_ready("nsi", nsilcmop_id2, timeout_deploy)
if manual_check:
- input('NSI-1 AND NSI-2 has been deployed. Perform manual check and press enter to resume')
+ input(
+ "NSI-1 AND NSI-2 has been deployed. Perform manual check and press enter to resume"
+ )
# TERMINATE NSI-1
if test_osm:
self.delete_slice(engine, self.nsi_id1, "Delete NS")
if manual_check:
- input('NSI-1 has been deleted. Perform manual check and press enter to resume')
+ input(
+ "NSI-1 has been deleted. Perform manual check and press enter to resume"
+ )
# CREATE NSI-3
- ns_data = {'nsiName': 'Deploy-NSI-3', 'vimAccountId': self.vim_id, 'nstId': nst_id, 'nsiDescription': 'default'}
+ ns_data = {
+ "nsiName": "Deploy-NSI-3",
+ "vimAccountId": self.vim_id,
+ "nstId": nst_id,
+ "nsiDescription": "default",
+ }
r = self.create_slice(engine, ns_data, "Create NSI-3 step 1")
if not r:
self.nsi_id3 = engine.last_id
# INSTANTIATE NSI-3
- self.instantiate_slice(engine, ns_data, self.nsi_id3, "Instantiate NSI-3 step 2")
+ self.instantiate_slice(
+ engine, ns_data, self.nsi_id3, "Instantiate NSI-3 step 2"
+ )
nsilcmop_id3 = engine.last_id
# Wait Instantiate NSI-3
engine.wait_operation_ready("nsi", nsilcmop_id3, timeout_deploy)
if manual_check:
- input('NSI-3 has been deployed. Perform manual check and press enter to resume')
+ input(
+ "NSI-3 has been deployed. Perform manual check and press enter to resume"
+ )
# TERMINATE NSI-2
if test_osm:
# Wait terminate NSI-2
engine.wait_operation_ready("nsi", nsilcmop2_id, timeout_deploy)
-
+
# DELETE NSI-2
self.delete_slice(engine, self.nsi_id2, "DELETE NSI-2")
# TERMINATE NSI-3
if test_osm:
- self. terminate_slice(engine, self.nsi_id3, "Terminate NSI-3")
+ self.terminate_slice(engine, self.nsi_id3, "Terminate NSI-3")
nsilcmop3_id = engine.last_id
# Wait terminate NSI-3
self.delete_slice(engine, self.nsi_id3, "DELETE NSI-3")
if manual_check:
- input('NSI-2 and NSI-3 has been deleted. Perform manual check and press enter to resume')
+ input(
+ "NSI-2 and NSI-3 has been deleted. Perform manual check and press enter to resume"
+ )
# nstd DELETE
- engine.test("Delete NSTD", "DELETE", "/nst/v1/netslice_templates/{}".format(nst_id), headers_json, None,
- 204, None, 0)
+ engine.test(
+ "Delete NSTD",
+ "DELETE",
+ "/nst/v1/netslice_templates/{}".format(nst_id),
+ headers_json,
+ None,
+ 204,
+ None,
+ 0,
+ )
# NSDs DELETE
- test_rest.test("Delete NSD middle", "DELETE", "/nsd/v1/ns_descriptors/{}".format(self.nsd_middle_id),
- headers_json, None, 204, None, 0)
+ test_rest.test(
+ "Delete NSD middle",
+ "DELETE",
+ "/nsd/v1/ns_descriptors/{}".format(self.nsd_middle_id),
+ headers_json,
+ None,
+ 204,
+ None,
+ 0,
+ )
- test_rest.test("Delete NSD edge", "DELETE", "/nsd/v1/ns_descriptors/{}".format(self.nsd_edge_id), headers_json,
- None, 204, None, 0)
+ test_rest.test(
+ "Delete NSD edge",
+ "DELETE",
+ "/nsd/v1/ns_descriptors/{}".format(self.nsd_edge_id),
+ headers_json,
+ None,
+ 204,
+ None,
+ 0,
+ )
# VNFDs DELETE
- test_rest.test("Delete VNFD edge", "DELETE", "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_edge_id),
- headers_yaml, None, 204, None, 0)
+ test_rest.test(
+ "Delete VNFD edge",
+ "DELETE",
+ "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_edge_id),
+ headers_yaml,
+ None,
+ 204,
+ None,
+ 0,
+ )
- test_rest.test("Delete VNFD middle", "DELETE", "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_middle_id),
- headers_yaml, None, 204, None, 0)
+ test_rest.test(
+ "Delete VNFD middle",
+ "DELETE",
+ "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_middle_id),
+ headers_yaml,
+ None,
+ 204,
+ None,
+ 0,
+ )
class TestAuthentication:
engine.get_autorization()
# GET
- engine.test("Get tokens", "GET", "/admin/v1/tokens", headers_json, {},
- (200), {"Content-Type": "application/json"}, "json")
- engine.test("Get projects", "GET", "/admin/v1/projects", headers_json, {},
- (200), {"Content-Type": "application/json"}, "json")
- engine.test("Get users", "GET", "/admin/v1/users", headers_json, {},
- (200), {"Content-Type": "application/json"}, "json")
- engine.test("Get roles", "GET", "/admin/v1/roles", headers_json, {},
- (200), {"Content-Type": "application/json"}, "json")
- res = engine.test("Get admin project", "GET", "/admin/v1/projects?name=admin", headers_json, {},
- (200), {"Content-Type": "application/json"}, "json")
+ engine.test(
+ "Get tokens",
+ "GET",
+ "/admin/v1/tokens",
+ headers_json,
+ {},
+ (200),
+ {"Content-Type": "application/json"},
+ "json",
+ )
+ engine.test(
+ "Get projects",
+ "GET",
+ "/admin/v1/projects",
+ headers_json,
+ {},
+ (200),
+ {"Content-Type": "application/json"},
+ "json",
+ )
+ engine.test(
+ "Get users",
+ "GET",
+ "/admin/v1/users",
+ headers_json,
+ {},
+ (200),
+ {"Content-Type": "application/json"},
+ "json",
+ )
+ engine.test(
+ "Get roles",
+ "GET",
+ "/admin/v1/roles",
+ headers_json,
+ {},
+ (200),
+ {"Content-Type": "application/json"},
+ "json",
+ )
+ res = engine.test(
+ "Get admin project",
+ "GET",
+ "/admin/v1/projects?name=admin",
+ headers_json,
+ {},
+ (200),
+ {"Content-Type": "application/json"},
+ "json",
+ )
admin_project_id = res.json()[0]["_id"] if res else None
- res = engine.test("Get project admin role", "GET", "/admin/v1/roles?name=project_admin", headers_json, {},
- (200), {"Content-Type": "application/json"}, "json")
+ res = engine.test(
+ "Get project admin role",
+ "GET",
+ "/admin/v1/roles?name=project_admin",
+ headers_json,
+ {},
+ (200),
+ {"Content-Type": "application/json"},
+ "json",
+ )
project_admin_role_id = res.json()[0]["_id"] if res else None
- res = engine.test("Get project user role", "GET", "/admin/v1/roles?name=project_user", headers_json, {},
- (200), {"Content-Type": "application/json"}, "json")
+ res = engine.test(
+ "Get project user role",
+ "GET",
+ "/admin/v1/roles?name=project_user",
+ headers_json,
+ {},
+ (200),
+ {"Content-Type": "application/json"},
+ "json",
+ )
project_user_role_id = res.json()[0]["_id"] if res else None
# POST
- res = engine.test("Create test project", "POST", "/admin/v1/projects", headers_json, {"name": "test"},
- (201), {"Location": "/admin/v1/projects/", "Content-Type": "application/json"}, "json")
+ res = engine.test(
+ "Create test project",
+ "POST",
+ "/admin/v1/projects",
+ headers_json,
+ {"name": "test"},
+ (201),
+ {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
+ "json",
+ )
test_project_id = engine.last_id if res else None
- res = engine.test("Create role without permissions", "POST", "/admin/v1/roles", headers_json, {"name": "empty"},
- (201), {"Content-Type": "application/json"}, "json")
+ res = engine.test(
+ "Create role without permissions",
+ "POST",
+ "/admin/v1/roles",
+ headers_json,
+ {"name": "empty"},
+ (201),
+ {"Content-Type": "application/json"},
+ "json",
+ )
empty_role_id = engine.last_id if res else None
- res = engine.test("Create role with default permissions", "POST", "/admin/v1/roles", headers_json,
- {"name": "default", "permissions": {"default": True}},
- (201), {"Location": "/admin/v1/roles/", "Content-Type": "application/json"}, "json")
+ res = engine.test(
+ "Create role with default permissions",
+ "POST",
+ "/admin/v1/roles",
+ headers_json,
+ {"name": "default", "permissions": {"default": True}},
+ (201),
+ {"Location": "/admin/v1/roles/", "Content-Type": "application/json"},
+ "json",
+ )
default_role_id = engine.last_id if res else None
- res = engine.test("Create role with token permissions", "POST", "/admin/v1/roles", headers_json,
- {"name": "tokens", "permissions": {"tokens": True}}, # is default required ?
- (201), {"Location": "/admin/v1/roles/", "Content-Type": "application/json"}, "json")
+ res = engine.test(
+ "Create role with token permissions",
+ "POST",
+ "/admin/v1/roles",
+ headers_json,
+ {
+ "name": "tokens",
+ "permissions": {"tokens": True},
+ }, # is default required ?
+ (201),
+ {"Location": "/admin/v1/roles/", "Content-Type": "application/json"},
+ "json",
+ )
token_role_id = engine.last_id if res else None
pr = "project-role mappings"
- res = engine.test("Create user without "+pr, "POST", "/admin/v1/users", headers_json,
- {"username": "empty", "password": "empty"},
- 201, {"Content-Type": "application/json"}, "json")
+ res = engine.test(
+ "Create user without " + pr,
+ "POST",
+ "/admin/v1/users",
+ headers_json,
+ {"username": "empty", "password": "empty"},
+ 201,
+ {"Content-Type": "application/json"},
+ "json",
+ )
empty_user_id = engine.last_id if res else None
- if admin_project_id and test_project_id and project_admin_role_id and project_user_role_id:
+ if (
+ admin_project_id
+ and test_project_id
+ and project_admin_role_id
+ and project_user_role_id
+ ):
data = {"username": "test", "password": "test"}
data["project_role_mappings"] = [
{"project": test_project_id, "role": project_admin_role_id},
- {"project": admin_project_id, "role": project_user_role_id}
+ {"project": admin_project_id, "role": project_user_role_id},
]
- res = engine.test("Create user with "+pr, "POST", "/admin/v1/users", headers_json, data,
- (201), {"Content-Type": "application/json"}, "json")
+ res = engine.test(
+ "Create user with " + pr,
+ "POST",
+ "/admin/v1/users",
+ headers_json,
+ data,
+ (201),
+ {"Content-Type": "application/json"},
+ "json",
+ )
test_user_id = engine.last_id if res else None
# PUT
if test_user_id:
- engine.test("Modify test user's password", "PUT", "/admin/v1/users/"+test_user_id, headers_json,
- {"password": "password"},
- (204), {}, 0)
- if empty_user_id and admin_project_id and test_project_id and project_admin_role_id and project_user_role_id:
- data = {"project_role_mappings": [
- {"project": test_project_id, "role": project_admin_role_id},
- {"project": admin_project_id, "role": project_user_role_id}
- ]}
- engine.test("Modify empty user's "+pr, "PUT", "/admin/v1/users/"+empty_user_id,
- headers_json,
- data,
- (204), {}, 0)
+ engine.test(
+ "Modify test user's password",
+ "PUT",
+ "/admin/v1/users/" + test_user_id,
+ headers_json,
+ {"password": "password"},
+ (204),
+ {},
+ 0,
+ )
+ if (
+ empty_user_id
+ and admin_project_id
+ and test_project_id
+ and project_admin_role_id
+ and project_user_role_id
+ ):
+ data = {
+ "project_role_mappings": [
+ {"project": test_project_id, "role": project_admin_role_id},
+ {"project": admin_project_id, "role": project_user_role_id},
+ ]
+ }
+ engine.test(
+ "Modify empty user's " + pr,
+ "PUT",
+ "/admin/v1/users/" + empty_user_id,
+ headers_json,
+ data,
+ (204),
+ {},
+ 0,
+ )
# DELETE
if empty_user_id:
- engine.test("Delete empty user", "DELETE", "/admin/v1/users/"+empty_user_id, headers_json, {},
- (204), {}, 0)
+ engine.test(
+ "Delete empty user",
+ "DELETE",
+ "/admin/v1/users/" + empty_user_id,
+ headers_json,
+ {},
+ (204),
+ {},
+ 0,
+ )
if test_user_id:
- engine.test("Delete test user", "DELETE", "/admin/v1/users/"+test_user_id, headers_json, {},
- (204), {}, 0)
+ engine.test(
+ "Delete test user",
+ "DELETE",
+ "/admin/v1/users/" + test_user_id,
+ headers_json,
+ {},
+ (204),
+ {},
+ 0,
+ )
if empty_role_id:
- engine.test("Delete empty role", "DELETE", "/admin/v1/roles/"+empty_role_id, headers_json, {},
- (204), {}, 0)
+ engine.test(
+ "Delete empty role",
+ "DELETE",
+ "/admin/v1/roles/" + empty_role_id,
+ headers_json,
+ {},
+ (204),
+ {},
+ 0,
+ )
if default_role_id:
- engine.test("Delete default role", "DELETE", "/admin/v1/roles/"+default_role_id, headers_json, {},
- (204), {}, 0)
+ engine.test(
+ "Delete default role",
+ "DELETE",
+ "/admin/v1/roles/" + default_role_id,
+ headers_json,
+ {},
+ (204),
+ {},
+ 0,
+ )
if token_role_id:
- engine.test("Delete token role", "DELETE", "/admin/v1/roles/"+token_role_id, headers_json, {},
- (204), {}, 0)
+ engine.test(
+ "Delete token role",
+ "DELETE",
+ "/admin/v1/roles/" + token_role_id,
+ headers_json,
+ {},
+ (204),
+ {},
+ 0,
+ )
if test_project_id:
- engine.test("Delete test project", "DELETE", "/admin/v1/projects/"+test_project_id, headers_json, {},
- (204), {}, 0)
+ engine.test(
+ "Delete test project",
+ "DELETE",
+ "/admin/v1/projects/" + test_project_id,
+ headers_json,
+ {},
+ (204),
+ {},
+ 0,
+ )
# END Tests
- engine.remove_authorization() # To finish
+ engine.remove_authorization() # To finish
-class TestNbiQuotas():
+class TestNbiQuotas:
description = "Test NBI Quotas"
@staticmethod
admin_token = engine.last_id
# Check that test project,user do not exist
- res1 = engine.test("Check that test project doesn't exist", "GET", "/admin/v1/projects/"+test_project,
- headers_json, {}, (404), {}, True)
- res2 = engine.test("Check that test user doesn't exist", "GET", "/admin/v1/users/"+test_username,
- headers_json, {}, (404), {}, True)
+ res1 = engine.test(
+ "Check that test project doesn't exist",
+ "GET",
+ "/admin/v1/projects/" + test_project,
+ headers_json,
+ {},
+ (404),
+ {},
+ True,
+ )
+ res2 = engine.test(
+ "Check that test user doesn't exist",
+ "GET",
+ "/admin/v1/users/" + test_username,
+ headers_json,
+ {},
+ (404),
+ {},
+ True,
+ )
if None in [res1, res2]:
engine.remove_authorization()
logger.error("Test project and/or user already exist")
return
# Create test project&user
- res = engine.test("Create test project", "POST", "/admin/v1/projects", headers_json,
- {"name": test_username,
- "quotas": {
- "vnfds": 2,
- "nsds": 2,
- "nsts": 1,
- "pdus": 1,
- "nsrs": 2,
- "nsis": 1,
- "vim_accounts": 1,
- "wim_accounts": 1,
- "sdns": 1,
- }
- },
- (201), r_header_json, "json")
+ res = engine.test(
+ "Create test project",
+ "POST",
+ "/admin/v1/projects",
+ headers_json,
+ {
+ "name": test_username,
+ "quotas": {
+ "vnfds": 2,
+ "nsds": 2,
+ "nsts": 1,
+ "pdus": 1,
+ "nsrs": 2,
+ "nsis": 1,
+ "vim_accounts": 1,
+ "wim_accounts": 1,
+ "sdns": 1,
+ },
+ },
+ (201),
+ r_header_json,
+ "json",
+ )
test_project_id = engine.last_id if res else None
- res = engine.test("Create test user", "POST", "/admin/v1/users", headers_json,
- {"username": test_username, "password": test_password,
- "project_role_mappings": [{"project": test_project, "role": "project_admin"}]},
- (201), r_header_json, "json")
+ res = engine.test(
+ "Create test user",
+ "POST",
+ "/admin/v1/users",
+ headers_json,
+ {
+ "username": test_username,
+ "password": test_password,
+ "project_role_mappings": [
+ {"project": test_project, "role": "project_admin"}
+ ],
+ },
+ (201),
+ r_header_json,
+ "json",
+ )
test_user_id = engine.last_id if res else None
if test_project_id and test_user_id:
user_token = engine.last_id
# Create test VIM
- res = engine.test("Create test VIM", "POST", "/admin/v1/vim_accounts", headers_json,
- {"name": test_vim,
- "vim_type": "openvim",
- "vim_user": test_username,
- "vim_password": test_password,
- "vim_tenant_name": test_project,
- "vim_url": "https://0.0.0.0:0/v0.0",
- },
- (202), r_header_json, "json")
+ res = engine.test(
+ "Create test VIM",
+ "POST",
+ "/admin/v1/vim_accounts",
+ headers_json,
+ {
+ "name": test_vim,
+ "vim_type": "openvim",
+ "vim_user": test_username,
+ "vim_password": test_password,
+ "vim_tenant_name": test_project,
+ "vim_url": "https://0.0.0.0:0/v0.0",
+ },
+ (202),
+ r_header_json,
+ "json",
+ )
test_vim_ids += [engine.last_id if res else None]
- res = engine.test("Try to create second test VIM", "POST", "/admin/v1/vim_accounts", headers_json,
- {"name": test_vim + "_2",
- "vim_type": "openvim",
- "vim_user": test_username,
- "vim_password": test_password,
- "vim_tenant_name": test_project,
- "vim_url": "https://0.0.0.0:0/v0.0",
- },
- (422), r_header_json, "json")
+ res = engine.test(
+ "Try to create second test VIM",
+ "POST",
+ "/admin/v1/vim_accounts",
+ headers_json,
+ {
+ "name": test_vim + "_2",
+ "vim_type": "openvim",
+ "vim_user": test_username,
+ "vim_password": test_password,
+ "vim_tenant_name": test_project,
+ "vim_url": "https://0.0.0.0:0/v0.0",
+ },
+ (422),
+ r_header_json,
+ "json",
+ )
test_vim_ids += [engine.last_id if res is None else None]
- res = engine.test("Try to create second test VIM with FORCE",
- "POST", "/admin/v1/vim_accounts?FORCE", headers_json,
- {"name": test_vim + "_3",
- "vim_type": "openvim",
- "vim_user": test_username,
- "vim_password": test_password,
- "vim_tenant_name": test_project,
- "vim_url": "https://0.0.0.0:0/v0.0",
- },
- (202), r_header_json, "json")
+ res = engine.test(
+ "Try to create second test VIM with FORCE",
+ "POST",
+ "/admin/v1/vim_accounts?FORCE",
+ headers_json,
+ {
+ "name": test_vim + "_3",
+ "vim_type": "openvim",
+ "vim_user": test_username,
+ "vim_password": test_password,
+ "vim_tenant_name": test_project,
+ "vim_url": "https://0.0.0.0:0/v0.0",
+ },
+ (202),
+ r_header_json,
+ "json",
+ )
test_vim_ids += [engine.last_id if res else None]
if test_vim_ids[0]:
# Download descriptor files (if required)
- test_dir = "/tmp/"+test_username+"/"
+ test_dir = "/tmp/" + test_username + "/"
test_url = "https://osm-download.etsi.org/ftp/osm-6.0-six/7th-hackfest/packages/"
- vnfd_filenames = ["slice_hackfest_vnfd.tar.gz", "slice_hackfest_middle_vnfd.tar.gz"]
- nsd_filenames = ["slice_hackfest_nsd.tar.gz", "slice_hackfest_middle_nsd.tar.gz"]
+ vnfd_filenames = [
+ "slice_hackfest_vnfd.tar.gz",
+ "slice_hackfest_middle_vnfd.tar.gz",
+ ]
+ nsd_filenames = [
+ "slice_hackfest_nsd.tar.gz",
+ "slice_hackfest_middle_nsd.tar.gz",
+ ]
nst_filenames = ["slice_hackfest_nstd.yaml"]
pdu_filenames = ["PDU_router.yaml"]
- desc_filenames = vnfd_filenames + nsd_filenames + nst_filenames + pdu_filenames
+ desc_filenames = (
+ vnfd_filenames + nsd_filenames + nst_filenames + pdu_filenames
+ )
if not os.path.exists(test_dir):
os.makedirs(test_dir)
for filename in desc_filenames:
- if not os.path.exists(test_dir+filename):
- res = requests.get(test_url+filename)
+ if not os.path.exists(test_dir + filename):
+ res = requests.get(test_url + filename)
if res.status_code < 300:
- with open(test_dir+filename, "wb") as file:
+ with open(test_dir + filename, "wb") as file:
file.write(res.content)
- if all([os.path.exists(test_dir+p) for p in desc_filenames]):
+ if all([os.path.exists(test_dir + p) for p in desc_filenames]):
# Test VNFD Quotas
- res = engine.test("Create test VNFD #1", "POST", "/vnfpkgm/v1/vnf_packages_content",
- headers_zip_json, "@b"+test_dir+vnfd_filenames[0],
- (201), r_header_json, "json")
+ res = engine.test(
+ "Create test VNFD #1",
+ "POST",
+ "/vnfpkgm/v1/vnf_packages_content",
+ headers_zip_json,
+ "@b" + test_dir + vnfd_filenames[0],
+ (201),
+ r_header_json,
+ "json",
+ )
test_vnfd_ids += [engine.last_id if res else None]
- res = engine.test("Create test VNFD #2", "POST", "/vnfpkgm/v1/vnf_packages_content",
- headers_zip_json, "@b"+test_dir+vnfd_filenames[1],
- (201), r_header_json, "json")
+ res = engine.test(
+ "Create test VNFD #2",
+ "POST",
+ "/vnfpkgm/v1/vnf_packages_content",
+ headers_zip_json,
+ "@b" + test_dir + vnfd_filenames[1],
+ (201),
+ r_header_json,
+ "json",
+ )
test_vnfd_ids += [engine.last_id if res else None]
- res = engine.test("Try to create extra test VNFD", "POST",
- "/vnfpkgm/v1/vnf_packages_content",
- headers_zip_json, "@b"+test_dir+vnfd_filenames[0],
- (422), r_header_json, "json")
+ res = engine.test(
+ "Try to create extra test VNFD",
+ "POST",
+ "/vnfpkgm/v1/vnf_packages_content",
+ headers_zip_json,
+ "@b" + test_dir + vnfd_filenames[0],
+ (422),
+ r_header_json,
+ "json",
+ )
test_vnfd_ids += [engine.last_id if res is None else None]
- res = engine.test("Try to create extra test VNFD with FORCE",
- "POST", "/vnfpkgm/v1/vnf_packages_content?FORCE",
- headers_zip_json, "@b"+test_dir+vnfd_filenames[0],
- (201), r_header_json, "json")
+ res = engine.test(
+ "Try to create extra test VNFD with FORCE",
+ "POST",
+ "/vnfpkgm/v1/vnf_packages_content?FORCE",
+ headers_zip_json,
+ "@b" + test_dir + vnfd_filenames[0],
+ (201),
+ r_header_json,
+ "json",
+ )
test_vnfd_ids += [engine.last_id if res else None]
# Remove extra VNFDs to prevent further errors
for i in [2, 3]:
if test_vnfd_ids[i]:
- res = engine.test("Delete test VNFD #" + str(i), "DELETE",
- "/vnfpkgm/v1/vnf_packages_content/"+test_vnfd_ids[i]+"?FORCE",
- headers_json, {}, (204), {}, 0)
+ res = engine.test(
+ "Delete test VNFD #" + str(i),
+ "DELETE",
+ "/vnfpkgm/v1/vnf_packages_content/"
+ + test_vnfd_ids[i]
+ + "?FORCE",
+ headers_json,
+ {},
+ (204),
+ {},
+ 0,
+ )
if res:
test_vnfd_ids[i] = None
if test_vnfd_ids[0] and test_vnfd_ids[1]:
# Test NSD Quotas
- res = engine.test("Create test NSD #1", "POST", "/nsd/v1/ns_descriptors_content",
- headers_zip_json, "@b"+test_dir+nsd_filenames[0],
- (201), r_header_json, "json")
+ res = engine.test(
+ "Create test NSD #1",
+ "POST",
+ "/nsd/v1/ns_descriptors_content",
+ headers_zip_json,
+ "@b" + test_dir + nsd_filenames[0],
+ (201),
+ r_header_json,
+ "json",
+ )
test_nsd_ids += [engine.last_id if res else None]
- res = engine.test("Create test NSD #2", "POST", "/nsd/v1/ns_descriptors_content",
- headers_zip_json, "@b"+test_dir+nsd_filenames[1],
- (201), r_header_json, "json")
+ res = engine.test(
+ "Create test NSD #2",
+ "POST",
+ "/nsd/v1/ns_descriptors_content",
+ headers_zip_json,
+ "@b" + test_dir + nsd_filenames[1],
+ (201),
+ r_header_json,
+ "json",
+ )
test_nsd_ids += [engine.last_id if res else None]
- res = engine.test("Try to create extra test NSD", "POST", "/nsd/v1/ns_descriptors_content",
- headers_zip_json, "@b"+test_dir+nsd_filenames[0],
- (422), r_header_json, "json")
+ res = engine.test(
+ "Try to create extra test NSD",
+ "POST",
+ "/nsd/v1/ns_descriptors_content",
+ headers_zip_json,
+ "@b" + test_dir + nsd_filenames[0],
+ (422),
+ r_header_json,
+ "json",
+ )
test_nsd_ids += [engine.last_id if res is None else None]
- res = engine.test("Try to create extra test NSD with FORCE",
- "POST", "/nsd/v1/ns_descriptors_content?FORCE",
- headers_zip_json, "@b"+test_dir+nsd_filenames[0],
- (201), r_header_json, "json")
+ res = engine.test(
+ "Try to create extra test NSD with FORCE",
+ "POST",
+ "/nsd/v1/ns_descriptors_content?FORCE",
+ headers_zip_json,
+ "@b" + test_dir + nsd_filenames[0],
+ (201),
+ r_header_json,
+ "json",
+ )
test_nsd_ids += [engine.last_id if res else None]
# Remove extra NSDs to prevent further errors
for i in [2, 3]:
if test_nsd_ids[i]:
- res = engine.test("Delete test NSD #" + str(i), "DELETE",
- "/nsd/v1/ns_descriptors_content/"+test_nsd_ids[i]+"?FORCE",
- headers_json, {}, (204), {}, 0)
+ res = engine.test(
+ "Delete test NSD #" + str(i),
+ "DELETE",
+ "/nsd/v1/ns_descriptors_content/"
+ + test_nsd_ids[i]
+ + "?FORCE",
+ headers_json,
+ {},
+ (204),
+ {},
+ 0,
+ )
if res:
test_nsd_ids[i] = None
if test_nsd_ids[0] and test_nsd_ids[1]:
# Test NSR Quotas
- res = engine.test("Create test NSR #1", "POST", "/nslcm/v1/ns_instances_content",
- headers_json,
- {"nsName": test_username+"_1",
- "nsdId": test_nsd_ids[0],
- "vimAccountId": test_vim_ids[0],
- },
- (201), r_header_json, "json")
+ res = engine.test(
+ "Create test NSR #1",
+ "POST",
+ "/nslcm/v1/ns_instances_content",
+ headers_json,
+ {
+ "nsName": test_username + "_1",
+ "nsdId": test_nsd_ids[0],
+ "vimAccountId": test_vim_ids[0],
+ },
+ (201),
+ r_header_json,
+ "json",
+ )
test_nsr_ids += [engine.last_id if res else None]
- res = engine.test("Create test NSR #2", "POST", "/nslcm/v1/ns_instances_content",
- headers_json,
- {"nsName": test_username+"_2",
- "nsdId": test_nsd_ids[1],
- "vimAccountId": test_vim_ids[0],
- },
- (201), r_header_json, "json")
+ res = engine.test(
+ "Create test NSR #2",
+ "POST",
+ "/nslcm/v1/ns_instances_content",
+ headers_json,
+ {
+ "nsName": test_username + "_2",
+ "nsdId": test_nsd_ids[1],
+ "vimAccountId": test_vim_ids[0],
+ },
+ (201),
+ r_header_json,
+ "json",
+ )
test_nsr_ids += [engine.last_id if res else None]
- res = engine.test("Try to create extra test NSR", "POST", "/nslcm/v1/ns_instances_content",
- headers_json,
- {"nsName": test_username+"_3",
- "nsdId": test_nsd_ids[0],
- "vimAccountId": test_vim_ids[0],
- },
- (422), r_header_json, "json")
+ res = engine.test(
+ "Try to create extra test NSR",
+ "POST",
+ "/nslcm/v1/ns_instances_content",
+ headers_json,
+ {
+ "nsName": test_username + "_3",
+ "nsdId": test_nsd_ids[0],
+ "vimAccountId": test_vim_ids[0],
+ },
+ (422),
+ r_header_json,
+ "json",
+ )
test_nsr_ids += [engine.last_id if res is None else None]
- res = engine.test("Try to create test NSR with FORCE", "POST",
- "/nslcm/v1/ns_instances_content?FORCE", headers_json,
- {"nsName": test_username+"_4",
- "nsdId": test_nsd_ids[0],
- "vimAccountId": test_vim_ids[0],
- },
- (201), r_header_json, "json")
+ res = engine.test(
+ "Try to create test NSR with FORCE",
+ "POST",
+ "/nslcm/v1/ns_instances_content?FORCE",
+ headers_json,
+ {
+ "nsName": test_username + "_4",
+ "nsdId": test_nsd_ids[0],
+ "vimAccountId": test_vim_ids[0],
+ },
+ (201),
+ r_header_json,
+ "json",
+ )
test_nsr_ids += [engine.last_id if res else None]
# Test NST Quotas
- res = engine.test("Create test NST", "POST", "/nst/v1/netslice_templates_content",
- headers_txt_json, "@b"+test_dir+nst_filenames[0],
- (201), r_header_json, "json")
+ res = engine.test(
+ "Create test NST",
+ "POST",
+ "/nst/v1/netslice_templates_content",
+ headers_txt_json,
+ "@b" + test_dir + nst_filenames[0],
+ (201),
+ r_header_json,
+ "json",
+ )
test_nst_ids += [engine.last_id if res else None]
- res = engine.test("Try to create extra test NST", "POST",
- "/nst/v1/netslice_templates_content",
- headers_txt_json, "@b"+test_dir+nst_filenames[0],
- (422), r_header_json, "json")
+ res = engine.test(
+ "Try to create extra test NST",
+ "POST",
+ "/nst/v1/netslice_templates_content",
+ headers_txt_json,
+ "@b" + test_dir + nst_filenames[0],
+ (422),
+ r_header_json,
+ "json",
+ )
test_nst_ids += [engine.last_id if res is None else None]
- res = engine.test("Try to create extra test NST with FORCE", "POST",
- "/nst/v1/netslice_templates_content?FORCE",
- headers_txt_json, "@b"+test_dir+nst_filenames[0],
- (201), r_header_json, "json")
+ res = engine.test(
+ "Try to create extra test NST with FORCE",
+ "POST",
+ "/nst/v1/netslice_templates_content?FORCE",
+ headers_txt_json,
+ "@b" + test_dir + nst_filenames[0],
+ (201),
+ r_header_json,
+ "json",
+ )
test_nst_ids += [engine.last_id if res else None]
if test_nst_ids[0]:
# Remove NSR Quota
- engine.set_header({"Authorization": "Bearer {}".format(admin_token)})
- res = engine.test("Remove NSR Quota", "PUT", "/admin/v1/projects/"+test_project_id,
- headers_json,
- {"quotas": {"nsrs": None}},
- (204), {}, 0)
- engine.set_header({"Authorization": "Bearer {}".format(user_token)})
+ engine.set_header(
+ {"Authorization": "Bearer {}".format(admin_token)}
+ )
+ res = engine.test(
+ "Remove NSR Quota",
+ "PUT",
+ "/admin/v1/projects/" + test_project_id,
+ headers_json,
+ {"quotas": {"nsrs": None}},
+ (204),
+ {},
+ 0,
+ )
+ engine.set_header(
+ {"Authorization": "Bearer {}".format(user_token)}
+ )
if res:
# Test NSI Quotas
- res = engine.test("Create test NSI", "POST",
- "/nsilcm/v1/netslice_instances_content", headers_json,
- {"nsiName": test_username,
- "nstId": test_nst_ids[0],
- "vimAccountId": test_vim_ids[0],
- },
- (201), r_header_json, "json")
+ res = engine.test(
+ "Create test NSI",
+ "POST",
+ "/nsilcm/v1/netslice_instances_content",
+ headers_json,
+ {
+ "nsiName": test_username,
+ "nstId": test_nst_ids[0],
+ "vimAccountId": test_vim_ids[0],
+ },
+ (201),
+ r_header_json,
+ "json",
+ )
test_nsi_ids += [engine.last_id if res else None]
- res = engine.test("Try to create extra test NSI", "POST",
- "/nsilcm/v1/netslice_instances_content", headers_json,
- {"nsiName": test_username,
- "nstId": test_nst_ids[0],
- "vimAccountId": test_vim_ids[0],
- },
- (400), r_header_json, "json")
- test_nsi_ids += [engine.last_id if res is None else None]
- res = engine.test("Try to create extra test NSI with FORCE", "POST",
- "/nsilcm/v1/netslice_instances_content?FORCE", headers_json,
- {"nsiName": test_username,
- "nstId": test_nst_ids[0],
- "vimAccountId": test_vim_ids[0],
- },
- (201), r_header_json, "json")
+ res = engine.test(
+ "Try to create extra test NSI",
+ "POST",
+ "/nsilcm/v1/netslice_instances_content",
+ headers_json,
+ {
+ "nsiName": test_username,
+ "nstId": test_nst_ids[0],
+ "vimAccountId": test_vim_ids[0],
+ },
+ (400),
+ r_header_json,
+ "json",
+ )
+ test_nsi_ids += [
+ engine.last_id if res is None else None
+ ]
+ res = engine.test(
+ "Try to create extra test NSI with FORCE",
+ "POST",
+ "/nsilcm/v1/netslice_instances_content?FORCE",
+ headers_json,
+ {
+ "nsiName": test_username,
+ "nstId": test_nst_ids[0],
+ "vimAccountId": test_vim_ids[0],
+ },
+ (201),
+ r_header_json,
+ "json",
+ )
test_nsi_ids += [engine.last_id if res else None]
# Test PDU Quotas
- with open(test_dir+pdu_filenames[0], "rb") as file:
- pdu_text = re.sub(r"ip-address: *\[[^\]]*\]", "ip-address: '0.0.0.0'",
- file.read().decode("utf-8"))
- with open(test_dir+pdu_filenames[0], "wb") as file:
+ with open(test_dir + pdu_filenames[0], "rb") as file:
+ pdu_text = re.sub(
+ r"ip-address: *\[[^\]]*\]",
+ "ip-address: '0.0.0.0'",
+ file.read().decode("utf-8"),
+ )
+ with open(test_dir + pdu_filenames[0], "wb") as file:
file.write(pdu_text.encode("utf-8"))
- res = engine.test("Create test PDU", "POST", "/pdu/v1/pdu_descriptors",
- headers_yaml, "@b"+test_dir+pdu_filenames[0],
- (201), r_header_yaml, "yaml")
+ res = engine.test(
+ "Create test PDU",
+ "POST",
+ "/pdu/v1/pdu_descriptors",
+ headers_yaml,
+ "@b" + test_dir + pdu_filenames[0],
+ (201),
+ r_header_yaml,
+ "yaml",
+ )
test_pdu_ids += [engine.last_id if res else None]
- res = engine.test("Try to create extra test PDU", "POST", "/pdu/v1/pdu_descriptors",
- headers_yaml, "@b"+test_dir+pdu_filenames[0],
- (422), r_header_yaml, "yaml")
+ res = engine.test(
+ "Try to create extra test PDU",
+ "POST",
+ "/pdu/v1/pdu_descriptors",
+ headers_yaml,
+ "@b" + test_dir + pdu_filenames[0],
+ (422),
+ r_header_yaml,
+ "yaml",
+ )
test_pdu_ids += [engine.last_id if res is None else None]
- res = engine.test("Try to create extra test PDU with FORCE", "POST",
- "/pdu/v1/pdu_descriptors?FORCE",
- headers_yaml, "@b"+test_dir+pdu_filenames[0],
- (201), r_header_yaml, "yaml")
+ res = engine.test(
+ "Try to create extra test PDU with FORCE",
+ "POST",
+ "/pdu/v1/pdu_descriptors?FORCE",
+ headers_yaml,
+ "@b" + test_dir + pdu_filenames[0],
+ (201),
+ r_header_yaml,
+ "yaml",
+ )
test_pdu_ids += [engine.last_id if res else None]
# Cleanup
for i, id in enumerate(test_nsi_ids):
if id:
- engine.test("Delete test NSI #"+str(i), "DELETE",
- "/nsilcm/v1/netslice_instances_content/"+id+"?FORCE",
- headers_json, {}, (204), {}, 0)
+ engine.test(
+ "Delete test NSI #" + str(i),
+ "DELETE",
+ "/nsilcm/v1/netslice_instances_content/"
+ + id
+ + "?FORCE",
+ headers_json,
+ {},
+ (204),
+ {},
+ 0,
+ )
for i, id in enumerate(test_nsr_ids):
if id:
- engine.test("Delete test NSR #"+str(i), "DELETE",
- "/nslcm/v1/ns_instances_content/"+id+"?FORCE",
- headers_json, {}, (204), {}, 0)
+ engine.test(
+ "Delete test NSR #" + str(i),
+ "DELETE",
+ "/nslcm/v1/ns_instances_content/" + id + "?FORCE",
+ headers_json,
+ {},
+ (204),
+ {},
+ 0,
+ )
for i, id in enumerate(test_nst_ids):
if id:
- engine.test("Delete test NST #"+str(i), "DELETE",
- "/nst/v1/netslice_templates_content/"+id+"?FORCE",
- headers_json, {}, (204), {}, 0)
+ engine.test(
+ "Delete test NST #" + str(i),
+ "DELETE",
+ "/nst/v1/netslice_templates_content/" + id + "?FORCE",
+ headers_json,
+ {},
+ (204),
+ {},
+ 0,
+ )
for i, id in enumerate(test_nsd_ids):
if id:
- engine.test("Delete test NSD #"+str(i), "DELETE",
- "/nsd/v1/ns_descriptors_content/"+id+"?FORCE",
- headers_json, {}, (204), {}, 0)
+ engine.test(
+ "Delete test NSD #" + str(i),
+ "DELETE",
+ "/nsd/v1/ns_descriptors_content/" + id + "?FORCE",
+ headers_json,
+ {},
+ (204),
+ {},
+ 0,
+ )
for i, id in enumerate(test_vnfd_ids):
if id:
- engine.test("Delete test VNFD #"+str(i), "DELETE",
- "/vnfpkgm/v1/vnf_packages_content/"+id+"?FORCE",
- headers_json, {}, (204), {}, 0)
+ engine.test(
+ "Delete test VNFD #" + str(i),
+ "DELETE",
+ "/vnfpkgm/v1/vnf_packages_content/" + id + "?FORCE",
+ headers_json,
+ {},
+ (204),
+ {},
+ 0,
+ )
for i, id in enumerate(test_pdu_ids):
if id:
- engine.test("Delete test PDU #"+str(i), "DELETE",
- "/pdu/v1/pdu_descriptors/"+id+"?FORCE",
- headers_json, {}, (204), {}, 0)
+ engine.test(
+ "Delete test PDU #" + str(i),
+ "DELETE",
+ "/pdu/v1/pdu_descriptors/" + id + "?FORCE",
+ headers_json,
+ {},
+ (204),
+ {},
+ 0,
+ )
# END Test NBI Quotas
# Test WIM Quotas
- res = engine.test("Create test WIM", "POST", "/admin/v1/wim_accounts", headers_json,
- {"name": test_wim,
- "wim_type": "onos",
- "wim_url": "https://0.0.0.0:0/v0.0",
- },
- (202), r_header_json, "json")
+ res = engine.test(
+ "Create test WIM",
+ "POST",
+ "/admin/v1/wim_accounts",
+ headers_json,
+ {
+ "name": test_wim,
+ "wim_type": "onos",
+ "wim_url": "https://0.0.0.0:0/v0.0",
+ },
+ (202),
+ r_header_json,
+ "json",
+ )
test_wim_ids += [engine.last_id if res else None]
- res = engine.test("Try to create second test WIM", "POST", "/admin/v1/wim_accounts", headers_json,
- {"name": test_wim + "_2",
- "wim_type": "onos",
- "wim_url": "https://0.0.0.0:0/v0.0",
- },
- (422), r_header_json, "json")
+ res = engine.test(
+ "Try to create second test WIM",
+ "POST",
+ "/admin/v1/wim_accounts",
+ headers_json,
+ {
+ "name": test_wim + "_2",
+ "wim_type": "onos",
+ "wim_url": "https://0.0.0.0:0/v0.0",
+ },
+ (422),
+ r_header_json,
+ "json",
+ )
test_wim_ids += [engine.last_id if res is None else None]
- res = engine.test("Try to create second test WIM with FORCE", "POST", "/admin/v1/wim_accounts?FORCE",
- headers_json,
- {"name": test_wim + "_3",
- "wim_type": "onos",
- "wim_url": "https://0.0.0.0:0/v0.0",
- },
- (202), r_header_json, "json")
+ res = engine.test(
+ "Try to create second test WIM with FORCE",
+ "POST",
+ "/admin/v1/wim_accounts?FORCE",
+ headers_json,
+ {
+ "name": test_wim + "_3",
+ "wim_type": "onos",
+ "wim_url": "https://0.0.0.0:0/v0.0",
+ },
+ (202),
+ r_header_json,
+ "json",
+ )
test_wim_ids += [engine.last_id if res else None]
# Test SDN Quotas
- res = engine.test("Create test SDN", "POST", "/admin/v1/sdns", headers_json,
- {"name": test_sdn,
- "type": "onos",
- "ip": "0.0.0.0",
- "port": 9999,
- "dpid": "00:00:00:00:00:00:00:00",
- },
- (202), r_header_json, "json")
+ res = engine.test(
+ "Create test SDN",
+ "POST",
+ "/admin/v1/sdns",
+ headers_json,
+ {
+ "name": test_sdn,
+ "type": "onos",
+ "ip": "0.0.0.0",
+ "port": 9999,
+ "dpid": "00:00:00:00:00:00:00:00",
+ },
+ (202),
+ r_header_json,
+ "json",
+ )
test_sdn_ids += [engine.last_id if res else None]
- res = engine.test("Try to create second test SDN", "POST", "/admin/v1/sdns", headers_json,
- {"name": test_sdn + "_2",
- "type": "onos",
- "ip": "0.0.0.0",
- "port": 9999,
- "dpid": "00:00:00:00:00:00:00:00",
- },
- (422), r_header_json, "json")
+ res = engine.test(
+ "Try to create second test SDN",
+ "POST",
+ "/admin/v1/sdns",
+ headers_json,
+ {
+ "name": test_sdn + "_2",
+ "type": "onos",
+ "ip": "0.0.0.0",
+ "port": 9999,
+ "dpid": "00:00:00:00:00:00:00:00",
+ },
+ (422),
+ r_header_json,
+ "json",
+ )
test_sdn_ids += [engine.last_id if res is None else None]
- res = engine.test("Try to create second test SDN with FORCE", "POST", "/admin/v1/sdns?FORCE", headers_json,
- {"name": test_sdn + "_3",
- "type": "onos",
- "ip": "0.0.0.0",
- "port": 9999,
- "dpid": "00:00:00:00:00:00:00:00",
- },
- (202), r_header_json, "json")
+ res = engine.test(
+ "Try to create second test SDN with FORCE",
+ "POST",
+ "/admin/v1/sdns?FORCE",
+ headers_json,
+ {
+ "name": test_sdn + "_3",
+ "type": "onos",
+ "ip": "0.0.0.0",
+ "port": 9999,
+ "dpid": "00:00:00:00:00:00:00:00",
+ },
+ (202),
+ r_header_json,
+ "json",
+ )
test_sdn_ids += [engine.last_id if res else None]
# Cleanup
for i, id in enumerate(test_vim_ids):
if id:
- engine.test("Delete test VIM #"+str(i), "DELETE", "/admin/v1/vim_accounts/"+id+"?FORCE",
- headers_json, {}, (202), {}, 0)
+ engine.test(
+ "Delete test VIM #" + str(i),
+ "DELETE",
+ "/admin/v1/vim_accounts/" + id + "?FORCE",
+ headers_json,
+ {},
+ (202),
+ {},
+ 0,
+ )
for i, id in enumerate(test_wim_ids):
if id:
- engine.test("Delete test WIM #"+str(i), "DELETE", "/admin/v1/wim_accounts/"+id+"?FORCE",
- headers_json, {}, (202), {}, 0)
+ engine.test(
+ "Delete test WIM #" + str(i),
+ "DELETE",
+ "/admin/v1/wim_accounts/" + id + "?FORCE",
+ headers_json,
+ {},
+ (202),
+ {},
+ 0,
+ )
for i, id in enumerate(test_sdn_ids):
if id:
- engine.test("Delete test SDN #"+str(i), "DELETE", "/admin/v1/sdns/"+id+"?FORCE",
- headers_json, {}, (202), {}, 0)
+ engine.test(
+ "Delete test SDN #" + str(i),
+ "DELETE",
+ "/admin/v1/sdns/" + id + "?FORCE",
+ headers_json,
+ {},
+ (202),
+ {},
+ 0,
+ )
# Release user access
engine.remove_authorization()
engine.project = admin_project
engine.get_autorization()
if test_user_id:
- engine.test("Delete test user", "DELETE", "/admin/v1/users/"+test_user_id+"?FORCE",
- headers_json, {}, (204), {}, 0)
+ engine.test(
+ "Delete test user",
+ "DELETE",
+ "/admin/v1/users/" + test_user_id + "?FORCE",
+ headers_json,
+ {},
+ (204),
+ {},
+ 0,
+ )
if test_project_id:
- engine.test("Delete test project", "DELETE", "/admin/v1/projects/"+test_project_id+"?FORCE",
- headers_json, {}, (204), {}, 0)
+ engine.test(
+ "Delete test project",
+ "DELETE",
+ "/admin/v1/projects/" + test_project_id + "?FORCE",
+ headers_json,
+ {},
+ (204),
+ {},
+ 0,
+ )
engine.remove_authorization()
# END class TestNbiQuotas
requests.packages.urllib3.disable_warnings()
try:
logging.basicConfig(format="%(levelname)s %(message)s", level=logging.ERROR)
- logger = logging.getLogger('NBI')
+ logger = logging.getLogger("NBI")
# load parameters and configuration
- opts, args = getopt.getopt(sys.argv[1:], "hvu:p:",
- ["url=", "user=", "password=", "help", "version", "verbose", "no-verbose",
- "project=", "insecure", "timeout", "timeout-deploy", "timeout-configure",
- "test=", "list", "test-osm", "manual-check", "params=", 'fail-fast'])
+ opts, args = getopt.getopt(
+ sys.argv[1:],
+ "hvu:p:",
+ [
+ "url=",
+ "user=",
+ "password=",
+ "help",
+ "version",
+ "verbose",
+ "no-verbose",
+ "project=",
+ "insecure",
+ "timeout",
+ "timeout-deploy",
+ "timeout-configure",
+ "test=",
+ "list",
+ "test-osm",
+ "manual-check",
+ "params=",
+ "fail-fast",
+ ],
+ )
url = "https://localhost:9999/osm"
user = password = project = "admin"
test_osm = False
for o, a in opts:
# print("parameter:", o, a)
if o == "--version":
- print("test version " + __version__ + ' ' + version_date)
+ print("test version " + __version__ + " " + version_date)
exit()
elif o == "--list":
for test, test_class in sorted(test_classes.items()):
elif o == "--test":
for _test in a.split(","):
if _test not in test_classes:
- print("Invalid test name '{}'. Use option '--list' to show available tests".format(_test),
- file=sys.stderr)
+ print(
+ "Invalid test name '{}'. Use option '--list' to show available tests".format(
+ _test
+ ),
+ file=sys.stderr,
+ )
exit(1)
test_to_do.append(_test)
elif o == "--params":
break
text_index += 1
test_class = test_classes[test]
- test_class().run(test_rest, test_osm, manual_check, test_params.get(text_index))
+ test_class().run(
+ test_rest, test_osm, manual_check, test_params.get(text_index)
+ )
else:
for test, test_class in sorted(test_classes.items()):
if fail_fast and test_rest.failed_tests:
exit(0)
if len(sys.argv) != 4:
- print("missing parameters. Type --help for more information", file=sys.stderr)
+ print(
+ "missing parameters. Type --help for more information", file=sys.stderr
+ )
exit(1)
topic, key, message = sys.argv[1:]
host = getenv("OSMNBI_HOST", "localhost")
port = getenv("OSMNBI_PORT", "9999")
- url = "https://{host}:{port}/osm/test/message/{topic}".format(host=host, port=port, topic=topic)
+ url = "https://{host}:{port}/osm/test/message/{topic}".format(
+ host=host, port=port, topic=topic
+ )
print(url)
data = {key: message}
def norm(str):
"""Normalize string for checking"""
- return ' '.join(str.strip().split()).lower()
+ return " ".join(str.strip().split()).lower()
class TestVcaTopic(TestCase):
self.db.get_list.assert_called_with(
"vim_accounts",
- {"vca": _id, '_admin.projects_read.cont': 'project-id'},
+ {"vca": _id, "_admin.projects_read.cont": "project-id"},
)
mock_check_conflict_on_del.assert_called_with(session, _id, db_content)
context.exception,
EngineException(
"There is at least one VIM account using this vca",
- http_code=HTTPStatus.CONFLICT
- )
+ http_code=HTTPStatus.CONFLICT,
+ ),
)
self.db.get_list.assert_called_with(
"vim_accounts",
- {"vca": _id, '_admin.projects_read.cont': 'project-id'},
+ {"vca": _id, "_admin.projects_read.cont": "project-id"},
)
mock_check_conflict_on_del.assert_not_called()
class Test_ProjectTopicAuth(TestCase):
-
@classmethod
def setUpClass(cls):
cls.test_name = "test-project-topic"
self.msg = Mock(msgbase.MsgBase())
self.auth = Mock(authconn.Authconn(None, None, None))
self.topic = ProjectTopicAuth(self.db, self.fs, self.msg, self.auth)
- self.fake_session = {"username": self.test_name, "project_id": (test_pid,), "method": None,
- "admin": True, "force": False, "public": False, "allow_show_user_project_role": True}
+ self.fake_session = {
+ "username": self.test_name,
+ "project_id": (test_pid,),
+ "method": None,
+ "admin": True,
+ "force": False,
+ "public": False,
+ "allow_show_user_project_role": True,
+ }
self.topic.check_quota = Mock(return_value=None) # skip quota
def test_new_project(self):
pid1 = str(uuid4())
self.auth.get_project_list.return_value = []
self.auth.create_project.return_value = pid1
- pid2, oid = self.topic.new(rollback, self.fake_session, {"name": self.test_name, "quotas": {}})
+ pid2, oid = self.topic.new(
+ rollback, self.fake_session, {"name": self.test_name, "quotas": {}}
+ )
self.assertEqual(len(rollback), 1, "Wrong rollback length")
self.assertEqual(pid2, pid1, "Wrong project identifier")
content = self.auth.create_project.call_args[0][0]
self.assertEqual(content["name"], self.test_name, "Wrong project name")
self.assertEqual(content["quotas"], {}, "Wrong quotas")
self.assertIsNotNone(content["_admin"]["created"], "Wrong creation time")
- self.assertEqual(content["_admin"]["modified"], content["_admin"]["created"], "Wrong modification time")
+ self.assertEqual(
+ content["_admin"]["modified"],
+ content["_admin"]["created"],
+ "Wrong modification time",
+ )
with self.subTest(i=2):
rollback = []
with self.assertRaises(EngineException, msg="Accepted wrong quotas") as e:
- self.topic.new(rollback, self.fake_session, {"name": "other-project-name", "quotas": {"baditems": 10}})
+ self.topic.new(
+ rollback,
+ self.fake_session,
+ {"name": "other-project-name", "quotas": {"baditems": 10}},
+ )
self.assertEqual(len(rollback), 0, "Wrong rollback length")
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn("format error at 'quotas' 'additional properties are not allowed ('{}' was unexpected)'"
- .format("baditems"), norm(str(e.exception)), "Wrong exception text")
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ "format error at 'quotas' 'additional properties are not allowed ('{}' was unexpected)'".format(
+ "baditems"
+ ),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
def test_edit_project(self):
now = time()
pid = str(uuid4())
- proj = {"_id": pid, "name": self.test_name, "_admin": {"created": now, "modified": now}}
+ proj = {
+ "_id": pid,
+ "name": self.test_name,
+ "_admin": {"created": now, "modified": now},
+ }
with self.subTest(i=1):
self.auth.get_project_list.side_effect = [[proj], []]
new_name = "new-project-name"
quotas = {"vnfds": randint(0, 100), "nsds": randint(0, 100)}
- self.topic.edit(self.fake_session, pid, {"name": new_name, "quotas": quotas})
+ self.topic.edit(
+ self.fake_session, pid, {"name": new_name, "quotas": quotas}
+ )
_id, content = self.auth.update_project.call_args[0]
self.assertEqual(_id, pid, "Wrong project identifier")
self.assertEqual(content["_id"], pid, "Wrong project identifier")
self.assertEqual(content["_admin"]["created"], now, "Wrong creation time")
- self.assertGreater(content["_admin"]["modified"], now, "Wrong modification time")
+ self.assertGreater(
+ content["_admin"]["modified"], now, "Wrong modification time"
+ )
self.assertEqual(content["name"], new_name, "Wrong project name")
self.assertEqual(content["quotas"], quotas, "Wrong quotas")
with self.subTest(i=2):
quotas = {"baditems": randint(0, 100)}
self.auth.get_project_list.side_effect = [[proj], []]
with self.assertRaises(EngineException, msg="Accepted wrong quotas") as e:
- self.topic.edit(self.fake_session, pid, {"name": new_name, "quotas": quotas})
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn("format error at 'quotas' 'additional properties are not allowed ('{}' was unexpected)'"
- .format("baditems"), norm(str(e.exception)), "Wrong exception text")
+ self.topic.edit(
+ self.fake_session, pid, {"name": new_name, "quotas": quotas}
+ )
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ "format error at 'quotas' 'additional properties are not allowed ('{}' was unexpected)'".format(
+ "baditems"
+ ),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
def test_conflict_on_new(self):
with self.subTest(i=1):
rollback = []
pid = str(uuid4())
- with self.assertRaises(EngineException, msg="Accepted uuid as project name") as e:
+ with self.assertRaises(
+ EngineException, msg="Accepted uuid as project name"
+ ) as e:
self.topic.new(rollback, self.fake_session, {"name": pid})
self.assertEqual(len(rollback), 0, "Wrong rollback length")
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn("project name '{}' cannot have an uuid format".format(pid),
- norm(str(e.exception)), "Wrong exception text")
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ "project name '{}' cannot have an uuid format".format(pid),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
with self.subTest(i=2):
rollback = []
- self.auth.get_project_list.return_value = [{"_id": test_pid, "name": self.test_name}]
- with self.assertRaises(EngineException, msg="Accepted existing project name") as e:
+ self.auth.get_project_list.return_value = [
+ {"_id": test_pid, "name": self.test_name}
+ ]
+ with self.assertRaises(
+ EngineException, msg="Accepted existing project name"
+ ) as e:
self.topic.new(rollback, self.fake_session, {"name": self.test_name})
self.assertEqual(len(rollback), 0, "Wrong rollback length")
- self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
- self.assertIn("project '{}' exists".format(self.test_name),
- norm(str(e.exception)), "Wrong exception text")
+ self.assertEqual(
+ e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+ )
+ self.assertIn(
+ "project '{}' exists".format(self.test_name),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
def test_conflict_on_edit(self):
with self.subTest(i=1):
- self.auth.get_project_list.return_value = [{"_id": test_pid, "name": self.test_name}]
+ self.auth.get_project_list.return_value = [
+ {"_id": test_pid, "name": self.test_name}
+ ]
new_name = str(uuid4())
- with self.assertRaises(EngineException, msg="Accepted uuid as project name") as e:
+ with self.assertRaises(
+ EngineException, msg="Accepted uuid as project name"
+ ) as e:
self.topic.edit(self.fake_session, test_pid, {"name": new_name})
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn("project name '{}' cannot have an uuid format".format(new_name),
- norm(str(e.exception)), "Wrong exception text")
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ "project name '{}' cannot have an uuid format".format(new_name),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
with self.subTest(i=2):
pid = str(uuid4())
self.auth.get_project_list.return_value = [{"_id": pid, "name": "admin"}]
- with self.assertRaises(EngineException, msg="Accepted renaming of project 'admin'") as e:
+ with self.assertRaises(
+ EngineException, msg="Accepted renaming of project 'admin'"
+ ) as e:
self.topic.edit(self.fake_session, pid, {"name": "new-name"})
- self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
- self.assertIn("you cannot rename project 'admin'",
- norm(str(e.exception)), "Wrong exception text")
+ self.assertEqual(
+ e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+ )
+ self.assertIn(
+ "you cannot rename project 'admin'",
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
with self.subTest(i=3):
new_name = "new-project-name"
- self.auth.get_project_list.side_effect = [[{"_id": test_pid, "name": self.test_name}],
- [{"_id": str(uuid4()), "name": new_name}]]
- with self.assertRaises(EngineException, msg="Accepted existing project name") as e:
+ self.auth.get_project_list.side_effect = [
+ [{"_id": test_pid, "name": self.test_name}],
+ [{"_id": str(uuid4()), "name": new_name}],
+ ]
+ with self.assertRaises(
+ EngineException, msg="Accepted existing project name"
+ ) as e:
self.topic.edit(self.fake_session, pid, {"name": new_name})
- self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
- self.assertIn("project '{}' is already used".format(new_name),
- norm(str(e.exception)), "Wrong exception text")
+ self.assertEqual(
+ e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+ )
+ self.assertIn(
+ "project '{}' is already used".format(new_name),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
def test_delete_project(self):
with self.subTest(i=1):
pid = str(uuid4())
- self.auth.get_project.return_value = {"_id": pid, "name": "other-project-name"}
+ self.auth.get_project.return_value = {
+ "_id": pid,
+ "name": "other-project-name",
+ }
self.auth.delete_project.return_value = {"deleted": 1}
self.auth.get_user_list.return_value = []
self.db.get_list.return_value = []
rc = self.topic.delete(self.fake_session, pid)
self.assertEqual(rc, {"deleted": 1}, "Wrong project deletion return info")
- self.assertEqual(self.auth.get_project.call_args[0][0], pid, "Wrong project identifier")
- self.assertEqual(self.auth.delete_project.call_args[0][0], pid, "Wrong project identifier")
+ self.assertEqual(
+ self.auth.get_project.call_args[0][0], pid, "Wrong project identifier"
+ )
+ self.assertEqual(
+ self.auth.delete_project.call_args[0][0],
+ pid,
+ "Wrong project identifier",
+ )
def test_conflict_on_del(self):
with self.subTest(i=1):
- self.auth.get_project.return_value = {"_id": test_pid, "name": self.test_name}
- with self.assertRaises(EngineException, msg="Accepted deletion of own project") as e:
+ self.auth.get_project.return_value = {
+ "_id": test_pid,
+ "name": self.test_name,
+ }
+ with self.assertRaises(
+ EngineException, msg="Accepted deletion of own project"
+ ) as e:
self.topic.delete(self.fake_session, self.test_name)
- self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
- self.assertIn("you cannot delete your own project", norm(str(e.exception)), "Wrong exception text")
+ self.assertEqual(
+ e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+ )
+ self.assertIn(
+ "you cannot delete your own project",
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
with self.subTest(i=2):
self.auth.get_project.return_value = {"_id": str(uuid4()), "name": "admin"}
- with self.assertRaises(EngineException, msg="Accepted deletion of project 'admin'") as e:
+ with self.assertRaises(
+ EngineException, msg="Accepted deletion of project 'admin'"
+ ) as e:
self.topic.delete(self.fake_session, "admin")
- self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
- self.assertIn("you cannot delete project 'admin'", norm(str(e.exception)), "Wrong exception text")
+ self.assertEqual(
+ e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+ )
+ self.assertIn(
+ "you cannot delete project 'admin'",
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
with self.subTest(i=3):
pid = str(uuid4())
name = "other-project-name"
self.auth.get_project.return_value = {"_id": pid, "name": name}
- self.auth.get_user_list.return_value = [{"_id": str(uuid4()), "username": self.test_name,
- "project_role_mappings": [{"project": pid, "role": str(uuid4())}]}]
- with self.assertRaises(EngineException, msg="Accepted deletion of used project") as e:
+ self.auth.get_user_list.return_value = [
+ {
+ "_id": str(uuid4()),
+ "username": self.test_name,
+ "project_role_mappings": [{"project": pid, "role": str(uuid4())}],
+ }
+ ]
+ with self.assertRaises(
+ EngineException, msg="Accepted deletion of used project"
+ ) as e:
self.topic.delete(self.fake_session, pid)
- self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
- self.assertIn("project '{}' ({}) is being used by user '{}'".format(name, pid, self.test_name),
- norm(str(e.exception)), "Wrong exception text")
+ self.assertEqual(
+ e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+ )
+ self.assertIn(
+ "project '{}' ({}) is being used by user '{}'".format(
+ name, pid, self.test_name
+ ),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
with self.subTest(i=4):
self.auth.get_user_list.return_value = []
- self.db.get_list.return_value = [{"_id": str(uuid4()), "id": self.test_name,
- "_admin": {"projects_read": [pid], "projects_write": []}}]
- with self.assertRaises(EngineException, msg="Accepted deletion of used project") as e:
+ self.db.get_list.return_value = [
+ {
+ "_id": str(uuid4()),
+ "id": self.test_name,
+ "_admin": {"projects_read": [pid], "projects_write": []},
+ }
+ ]
+ with self.assertRaises(
+ EngineException, msg="Accepted deletion of used project"
+ ) as e:
self.topic.delete(self.fake_session, pid)
- self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
- self.assertIn("project '{}' ({}) is being used by {} '{}'"
- .format(name, pid, "vnf descriptor", self.test_name),
- norm(str(e.exception)), "Wrong exception text")
+ self.assertEqual(
+ e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+ )
+ self.assertIn(
+ "project '{}' ({}) is being used by {} '{}'".format(
+ name, pid, "vnf descriptor", self.test_name
+ ),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
class Test_RoleTopicAuth(TestCase):
-
@classmethod
def setUpClass(cls):
cls.test_name = "test-role-topic"
self.auth = Mock(authconn.Authconn(None, None, None))
self.auth.role_permissions = self.test_operations
self.topic = RoleTopicAuth(self.db, self.fs, self.msg, self.auth)
- self.fake_session = {"username": test_name, "project_id": (test_pid,), "method": None,
- "admin": True, "force": False, "public": False, "allow_show_user_project_role": True}
+ self.fake_session = {
+ "username": test_name,
+ "project_id": (test_pid,),
+ "method": None,
+ "admin": True,
+ "force": False,
+ "public": False,
+ "allow_show_user_project_role": True,
+ }
self.topic.check_quota = Mock(return_value=None) # skip quota
def test_new_role(self):
perms_out = {"default": False, "admin": False, "tokens": True}
self.auth.get_role_list.return_value = []
self.auth.create_role.return_value = rid1
- rid2, oid = self.topic.new(rollback, self.fake_session, {"name": self.test_name, "permissions": perms_in})
+ rid2, oid = self.topic.new(
+ rollback,
+ self.fake_session,
+ {"name": self.test_name, "permissions": perms_in},
+ )
self.assertEqual(len(rollback), 1, "Wrong rollback length")
self.assertEqual(rid2, rid1, "Wrong project identifier")
content = self.auth.create_role.call_args[0][0]
self.assertEqual(content["name"], self.test_name, "Wrong role name")
self.assertEqual(content["permissions"], perms_out, "Wrong permissions")
self.assertIsNotNone(content["_admin"]["created"], "Wrong creation time")
- self.assertEqual(content["_admin"]["modified"], content["_admin"]["created"], "Wrong modification time")
+ self.assertEqual(
+ content["_admin"]["modified"],
+ content["_admin"]["created"],
+ "Wrong modification time",
+ )
with self.subTest(i=2):
rollback = []
- with self.assertRaises(EngineException, msg="Accepted wrong permissions") as e:
- self.topic.new(rollback, self.fake_session,
- {"name": "other-role-name", "permissions": {"projects": True}})
+ with self.assertRaises(
+ EngineException, msg="Accepted wrong permissions"
+ ) as e:
+ self.topic.new(
+ rollback,
+ self.fake_session,
+ {"name": "other-role-name", "permissions": {"projects": True}},
+ )
self.assertEqual(len(rollback), 0, "Wrong rollback length")
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn("invalid permission '{}'".format("projects"),
- norm(str(e.exception)), "Wrong exception text")
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ "invalid permission '{}'".format("projects"),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
def test_edit_role(self):
now = time()
rid = str(uuid4())
- role = {"_id": rid, "name": self.test_name, "permissions": {"tokens": True},
- "_admin": {"created": now, "modified": now}}
+ role = {
+ "_id": rid,
+ "name": self.test_name,
+ "permissions": {"tokens": True},
+ "_admin": {"created": now, "modified": now},
+ }
with self.subTest(i=1):
self.auth.get_role_list.side_effect = [[role], []]
self.auth.get_role.return_value = role
new_name = "new-role-name"
perms_in = {"tokens": False, "tokens:get": True}
- perms_out = {"default": False, "admin": False, "tokens": False, "tokens:get": True}
- self.topic.edit(self.fake_session, rid, {"name": new_name, "permissions": perms_in})
+ perms_out = {
+ "default": False,
+ "admin": False,
+ "tokens": False,
+ "tokens:get": True,
+ }
+ self.topic.edit(
+ self.fake_session, rid, {"name": new_name, "permissions": perms_in}
+ )
content = self.auth.update_role.call_args[0][0]
self.assertEqual(content["_id"], rid, "Wrong role identifier")
self.assertEqual(content["_admin"]["created"], now, "Wrong creation time")
- self.assertGreater(content["_admin"]["modified"], now, "Wrong modification time")
+ self.assertGreater(
+ content["_admin"]["modified"], now, "Wrong modification time"
+ )
self.assertEqual(content["name"], new_name, "Wrong role name")
self.assertEqual(content["permissions"], perms_out, "Wrong permissions")
with self.subTest(i=2):
new_name = "other-role-name"
perms_in = {"tokens": False, "tokens:post": True}
self.auth.get_role_list.side_effect = [[role], []]
- with self.assertRaises(EngineException, msg="Accepted wrong permissions") as e:
- self.topic.edit(self.fake_session, rid, {"name": new_name, "permissions": perms_in})
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn("invalid permission '{}'".format("tokens:post"),
- norm(str(e.exception)), "Wrong exception text")
+ with self.assertRaises(
+ EngineException, msg="Accepted wrong permissions"
+ ) as e:
+ self.topic.edit(
+ self.fake_session, rid, {"name": new_name, "permissions": perms_in}
+ )
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ "invalid permission '{}'".format("tokens:post"),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
def test_delete_role(self):
with self.subTest(i=1):
self.auth.get_user_list.return_value = []
rc = self.topic.delete(self.fake_session, rid)
self.assertEqual(rc, {"deleted": 1}, "Wrong role deletion return info")
- self.assertEqual(self.auth.get_role_list.call_args[0][0]["_id"], rid, "Wrong role identifier")
- self.assertEqual(self.auth.get_role.call_args[0][0], rid, "Wrong role identifier")
- self.assertEqual(self.auth.delete_role.call_args[0][0], rid, "Wrong role identifier")
+ self.assertEqual(
+ self.auth.get_role_list.call_args[0][0]["_id"],
+ rid,
+ "Wrong role identifier",
+ )
+ self.assertEqual(
+ self.auth.get_role.call_args[0][0], rid, "Wrong role identifier"
+ )
+ self.assertEqual(
+ self.auth.delete_role.call_args[0][0], rid, "Wrong role identifier"
+ )
def test_conflict_on_new(self):
with self.subTest(i=1):
rollback = []
rid = str(uuid4())
- with self.assertRaises(EngineException, msg="Accepted uuid as role name") as e:
+ with self.assertRaises(
+ EngineException, msg="Accepted uuid as role name"
+ ) as e:
self.topic.new(rollback, self.fake_session, {"name": rid})
self.assertEqual(len(rollback), 0, "Wrong rollback length")
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn("role name '{}' cannot have an uuid format".format(rid),
- norm(str(e.exception)), "Wrong exception text")
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ "role name '{}' cannot have an uuid format".format(rid),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
with self.subTest(i=2):
rollback = []
- self.auth.get_role_list.return_value = [{"_id": str(uuid4()), "name": self.test_name}]
- with self.assertRaises(EngineException, msg="Accepted existing role name") as e:
+ self.auth.get_role_list.return_value = [
+ {"_id": str(uuid4()), "name": self.test_name}
+ ]
+ with self.assertRaises(
+ EngineException, msg="Accepted existing role name"
+ ) as e:
self.topic.new(rollback, self.fake_session, {"name": self.test_name})
self.assertEqual(len(rollback), 0, "Wrong rollback length")
- self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
- self.assertIn("role name '{}' exists".format(self.test_name),
- norm(str(e.exception)), "Wrong exception text")
+ self.assertEqual(
+ e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+ )
+ self.assertIn(
+ "role name '{}' exists".format(self.test_name),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
def test_conflict_on_edit(self):
rid = str(uuid4())
with self.subTest(i=1):
- self.auth.get_role_list.return_value = [{"_id": rid, "name": self.test_name, "permissions": {}}]
+ self.auth.get_role_list.return_value = [
+ {"_id": rid, "name": self.test_name, "permissions": {}}
+ ]
new_name = str(uuid4())
- with self.assertRaises(EngineException, msg="Accepted uuid as role name") as e:
+ with self.assertRaises(
+ EngineException, msg="Accepted uuid as role name"
+ ) as e:
self.topic.edit(self.fake_session, rid, {"name": new_name})
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn("role name '{}' cannot have an uuid format".format(new_name),
- norm(str(e.exception)), "Wrong exception text")
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ "role name '{}' cannot have an uuid format".format(new_name),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
for i, role_name in enumerate(["system_admin", "project_admin"], start=2):
with self.subTest(i=i):
rid = str(uuid4())
- self.auth.get_role.return_value = {"_id": rid, "name": role_name, "permissions": {}}
- with self.assertRaises(EngineException, msg="Accepted renaming of role '{}'".format(role_name)) as e:
+ self.auth.get_role.return_value = {
+ "_id": rid,
+ "name": role_name,
+ "permissions": {},
+ }
+ with self.assertRaises(
+ EngineException,
+ msg="Accepted renaming of role '{}'".format(role_name),
+ ) as e:
self.topic.edit(self.fake_session, rid, {"name": "new-name"})
- self.assertEqual(e.exception.http_code, HTTPStatus.FORBIDDEN, "Wrong HTTP status code")
- self.assertIn("you cannot rename role '{}'".format(role_name),
- norm(str(e.exception)), "Wrong exception text")
- with self.subTest(i=i+1):
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.FORBIDDEN,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ "you cannot rename role '{}'".format(role_name),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
+ with self.subTest(i=i + 1):
new_name = "new-role-name"
- self.auth.get_role_list.side_effect = [[{"_id": rid, "name": self.test_name, "permissions": {}}],
- [{"_id": str(uuid4()), "name": new_name, "permissions": {}}]]
- self.auth.get_role.return_value = {"_id": rid, "name": self.test_name, "permissions": {}}
- with self.assertRaises(EngineException, msg="Accepted existing role name") as e:
+ self.auth.get_role_list.side_effect = [
+ [{"_id": rid, "name": self.test_name, "permissions": {}}],
+ [{"_id": str(uuid4()), "name": new_name, "permissions": {}}],
+ ]
+ self.auth.get_role.return_value = {
+ "_id": rid,
+ "name": self.test_name,
+ "permissions": {},
+ }
+ with self.assertRaises(
+ EngineException, msg="Accepted existing role name"
+ ) as e:
self.topic.edit(self.fake_session, rid, {"name": new_name})
- self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
- self.assertIn("role name '{}' exists".format(new_name),
- norm(str(e.exception)), "Wrong exception text")
+ self.assertEqual(
+ e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+ )
+ self.assertIn(
+ "role name '{}' exists".format(new_name),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
def test_conflict_on_del(self):
for i, role_name in enumerate(["system_admin", "project_admin"], start=1):
role = {"_id": rid, "name": role_name}
self.auth.get_role_list.return_value = [role]
self.auth.get_role.return_value = role
- with self.assertRaises(EngineException, msg="Accepted deletion of role '{}'".format(role_name)) as e:
+ with self.assertRaises(
+ EngineException,
+ msg="Accepted deletion of role '{}'".format(role_name),
+ ) as e:
self.topic.delete(self.fake_session, rid)
- self.assertEqual(e.exception.http_code, HTTPStatus.FORBIDDEN, "Wrong HTTP status code")
- self.assertIn("you cannot delete role '{}'".format(role_name),
- norm(str(e.exception)), "Wrong exception text")
- with self.subTest(i=i+1):
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.FORBIDDEN,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ "you cannot delete role '{}'".format(role_name),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
+ with self.subTest(i=i + 1):
rid = str(uuid4())
name = "other-role-name"
role = {"_id": rid, "name": name}
self.auth.get_role_list.return_value = [role]
self.auth.get_role.return_value = role
- self.auth.get_user_list.return_value = [{"_id": str(uuid4()), "username": self.test_name,
- "project_role_mappings": [{"project": str(uuid4()), "role": rid}]}]
- with self.assertRaises(EngineException, msg="Accepted deletion of used role") as e:
+ self.auth.get_user_list.return_value = [
+ {
+ "_id": str(uuid4()),
+ "username": self.test_name,
+ "project_role_mappings": [{"project": str(uuid4()), "role": rid}],
+ }
+ ]
+ with self.assertRaises(
+ EngineException, msg="Accepted deletion of used role"
+ ) as e:
self.topic.delete(self.fake_session, rid)
- self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
- self.assertIn("role '{}' ({}) is being used by user '{}'".format(name, rid, self.test_name),
- norm(str(e.exception)), "Wrong exception text")
+ self.assertEqual(
+ e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+ )
+ self.assertIn(
+ "role '{}' ({}) is being used by user '{}'".format(
+ name, rid, self.test_name
+ ),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
class Test_UserTopicAuth(TestCase):
-
@classmethod
def setUpClass(cls):
cls.test_name = "test-user-topic"
self.msg = Mock(msgbase.MsgBase())
self.auth = Mock(authconn.Authconn(None, None, None))
self.topic = UserTopicAuth(self.db, self.fs, self.msg, self.auth)
- self.fake_session = {"username": test_name, "project_id": (test_pid,), "method": None,
- "admin": True, "force": False, "public": False, "allow_show_user_project_role": True}
+ self.fake_session = {
+ "username": test_name,
+ "project_id": (test_pid,),
+ "method": None,
+ "admin": True,
+ "force": False,
+ "public": False,
+ "allow_show_user_project_role": True,
+ }
self.topic.check_quota = Mock(return_value=None) # skip quota
def test_new_user(self):
self.auth.get_role.return_value = {"_id": rid, "name": "some_role"}
prms_in = [{"project": "some_project", "role": "some_role"}]
prms_out = [{"project": pid, "role": rid}]
- uid2, oid = self.topic.new(rollback, self.fake_session, {"username": self.test_name,
- "password": self.test_name,
- "project_role_mappings": prms_in
- })
+ uid2, oid = self.topic.new(
+ rollback,
+ self.fake_session,
+ {
+ "username": self.test_name,
+ "password": self.test_name,
+ "project_role_mappings": prms_in,
+ },
+ )
self.assertEqual(len(rollback), 1, "Wrong rollback length")
self.assertEqual(uid2, uid1, "Wrong project identifier")
content = self.auth.create_user.call_args[0][0]
self.assertEqual(content["username"], self.test_name, "Wrong project name")
self.assertEqual(content["password"], self.test_name, "Wrong password")
- self.assertEqual(content["project_role_mappings"], prms_out, "Wrong project-role mappings")
+ self.assertEqual(
+ content["project_role_mappings"],
+ prms_out,
+ "Wrong project-role mappings",
+ )
self.assertIsNotNone(content["_admin"]["created"], "Wrong creation time")
- self.assertEqual(content["_admin"]["modified"], content["_admin"]["created"], "Wrong modification time")
+ self.assertEqual(
+ content["_admin"]["modified"],
+ content["_admin"]["created"],
+ "Wrong modification time",
+ )
with self.subTest(i=2):
rollback = []
def_rid = str(uuid4())
self.auth.get_role.return_value = def_role
self.auth.get_role_list.return_value = [def_role]
prms_out = [{"project": pid, "role": def_rid}]
- uid2, oid = self.topic.new(rollback, self.fake_session, {"username": self.test_name,
- "password": self.test_name,
- "projects": ["some_project"]
- })
+ uid2, oid = self.topic.new(
+ rollback,
+ self.fake_session,
+ {
+ "username": self.test_name,
+ "password": self.test_name,
+ "projects": ["some_project"],
+ },
+ )
self.assertEqual(len(rollback), 1, "Wrong rollback length")
self.assertEqual(uid2, uid1, "Wrong project identifier")
content = self.auth.create_user.call_args[0][0]
self.assertEqual(content["username"], self.test_name, "Wrong project name")
self.assertEqual(content["password"], self.test_name, "Wrong password")
- self.assertEqual(content["project_role_mappings"], prms_out, "Wrong project-role mappings")
+ self.assertEqual(
+ content["project_role_mappings"],
+ prms_out,
+ "Wrong project-role mappings",
+ )
self.assertIsNotNone(content["_admin"]["created"], "Wrong creation time")
- self.assertEqual(content["_admin"]["modified"], content["_admin"]["created"], "Wrong modification time")
+ self.assertEqual(
+ content["_admin"]["modified"],
+ content["_admin"]["created"],
+ "Wrong modification time",
+ )
with self.subTest(i=3):
rollback = []
- with self.assertRaises(EngineException, msg="Accepted wrong project-role mappings") as e:
- self.topic.new(rollback, self.fake_session, {"username": "other-project-name",
- "password": "other-password",
- "project_role_mappings": [{}]
- })
+ with self.assertRaises(
+ EngineException, msg="Accepted wrong project-role mappings"
+ ) as e:
+ self.topic.new(
+ rollback,
+ self.fake_session,
+ {
+ "username": "other-project-name",
+ "password": "other-password",
+ "project_role_mappings": [{}],
+ },
+ )
self.assertEqual(len(rollback), 0, "Wrong rollback length")
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn("format error at '{}' '{}'"
- .format("project_role_mappings:{}", "'{}' is a required property").format(0, "project"),
- norm(str(e.exception)), "Wrong exception text")
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ "format error at '{}' '{}'".format(
+ "project_role_mappings:{}", "'{}' is a required property"
+ ).format(0, "project"),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
with self.subTest(i=4):
rollback = []
with self.assertRaises(EngineException, msg="Accepted wrong projects") as e:
- self.topic.new(rollback, self.fake_session, {"username": "other-project-name",
- "password": "other-password",
- "projects": []
- })
+ self.topic.new(
+ rollback,
+ self.fake_session,
+ {
+ "username": "other-project-name",
+ "password": "other-password",
+ "projects": [],
+ },
+ )
self.assertEqual(len(rollback), 0, "Wrong rollback length")
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn("format error at '{}' '{}'" .format("projects", "{} is too short").format([]),
- norm(str(e.exception)), "Wrong exception text")
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ "format error at '{}' '{}'".format(
+ "projects", "{} is too short"
+ ).format([]),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
def test_edit_user(self):
now = time()
uid = str(uuid4())
pid1 = str(uuid4())
rid1 = str(uuid4())
- prms = [{"project": pid1, "project_name": "project-1", "role": rid1, "role_name": "role-1"}]
- user = {"_id": uid, "username": self.test_name, "project_role_mappings": prms,
- "_admin": {"created": now, "modified": now}}
+ prms = [
+ {
+ "project": pid1,
+ "project_name": "project-1",
+ "role": rid1,
+ "role_name": "role-1",
+ }
+ ]
+ user = {
+ "_id": uid,
+ "username": self.test_name,
+ "project_role_mappings": prms,
+ "_admin": {"created": now, "modified": now},
+ }
with self.subTest(i=1):
self.auth.get_user_list.side_effect = [[user], []]
self.auth.get_user.return_value = user
pid2 = str(uuid4())
rid2 = str(uuid4())
- self.auth.get_project.side_effect = [{"_id": pid2, "name": "project-2"},
- {"_id": pid1, "name": "project-1"}]
- self.auth.get_role.side_effect = [{"_id": rid2, "name": "role-2"},
- {"_id": rid1, "name": "role-1"}]
+ self.auth.get_project.side_effect = [
+ {"_id": pid2, "name": "project-2"},
+ {"_id": pid1, "name": "project-1"},
+ ]
+ self.auth.get_role.side_effect = [
+ {"_id": rid2, "name": "role-2"},
+ {"_id": rid1, "name": "role-1"},
+ ]
new_name = "new-user-name"
new_pasw = "new-password"
add_prms = [{"project": pid2, "role": rid2}]
rem_prms = [{"project": pid1, "role": rid1}]
- self.topic.edit(self.fake_session, uid, {"username": new_name, "password": new_pasw,
- "add_project_role_mappings": add_prms,
- "remove_project_role_mappings": rem_prms
- })
+ self.topic.edit(
+ self.fake_session,
+ uid,
+ {
+ "username": new_name,
+ "password": new_pasw,
+ "add_project_role_mappings": add_prms,
+ "remove_project_role_mappings": rem_prms,
+ },
+ )
content = self.auth.update_user.call_args[0][0]
self.assertEqual(content["_id"], uid, "Wrong user identifier")
self.assertEqual(content["username"], new_name, "Wrong user name")
self.assertEqual(content["password"], new_pasw, "Wrong user password")
- self.assertEqual(content["add_project_role_mappings"], add_prms, "Wrong project-role mappings to add")
- self.assertEqual(content["remove_project_role_mappings"], prms, "Wrong project-role mappings to remove")
+ self.assertEqual(
+ content["add_project_role_mappings"],
+ add_prms,
+ "Wrong project-role mappings to add",
+ )
+ self.assertEqual(
+ content["remove_project_role_mappings"],
+ prms,
+ "Wrong project-role mappings to remove",
+ )
with self.subTest(i=2):
new_name = "other-user-name"
new_prms = [{}]
self.auth.get_role_list.side_effect = [[user], []]
self.auth.get_user_list.side_effect = [[user]]
- with self.assertRaises(EngineException, msg="Accepted wrong project-role mappings") as e:
- self.topic.edit(self.fake_session, uid, {"username": new_name, "project_role_mappings": new_prms})
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn("format error at '{}' '{}'"
- .format("project_role_mappings:{}", "'{}' is a required property").format(0, "project"),
- norm(str(e.exception)), "Wrong exception text")
+ with self.assertRaises(
+ EngineException, msg="Accepted wrong project-role mappings"
+ ) as e:
+ self.topic.edit(
+ self.fake_session,
+ uid,
+ {"username": new_name, "project_role_mappings": new_prms},
+ )
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ "format error at '{}' '{}'".format(
+ "project_role_mappings:{}", "'{}' is a required property"
+ ).format(0, "project"),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
def test_delete_user(self):
with self.subTest(i=1):
uid = str(uuid4())
self.fake_session["username"] = self.test_name
- user = user = {"_id": uid, "username": "other-user-name", "project_role_mappings": []}
+ user = user = {
+ "_id": uid,
+ "username": "other-user-name",
+ "project_role_mappings": [],
+ }
self.auth.get_user.return_value = user
self.auth.delete_user.return_value = {"deleted": 1}
rc = self.topic.delete(self.fake_session, uid)
self.assertEqual(rc, {"deleted": 1}, "Wrong user deletion return info")
- self.assertEqual(self.auth.get_user.call_args[0][0], uid, "Wrong user identifier")
- self.assertEqual(self.auth.delete_user.call_args[0][0], uid, "Wrong user identifier")
+ self.assertEqual(
+ self.auth.get_user.call_args[0][0], uid, "Wrong user identifier"
+ )
+ self.assertEqual(
+ self.auth.delete_user.call_args[0][0], uid, "Wrong user identifier"
+ )
def test_conflict_on_new(self):
with self.subTest(i=1):
rollback = []
uid = str(uuid4())
- with self.assertRaises(EngineException, msg="Accepted uuid as username") as e:
- self.topic.new(rollback, self.fake_session, {"username": uid, "password": self.test_name,
- "projects": [test_pid]})
+ with self.assertRaises(
+ EngineException, msg="Accepted uuid as username"
+ ) as e:
+ self.topic.new(
+ rollback,
+ self.fake_session,
+ {
+ "username": uid,
+ "password": self.test_name,
+ "projects": [test_pid],
+ },
+ )
self.assertEqual(len(rollback), 0, "Wrong rollback length")
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn("username '{}' cannot have a uuid format".format(uid),
- norm(str(e.exception)), "Wrong exception text")
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ "username '{}' cannot have a uuid format".format(uid),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
with self.subTest(i=2):
rollback = []
- self.auth.get_user_list.return_value = [{"_id": str(uuid4()), "username": self.test_name}]
- with self.assertRaises(EngineException, msg="Accepted existing username") as e:
- self.topic.new(rollback, self.fake_session, {"username": self.test_name, "password": self.test_name,
- "projects": [test_pid]})
+ self.auth.get_user_list.return_value = [
+ {"_id": str(uuid4()), "username": self.test_name}
+ ]
+ with self.assertRaises(
+ EngineException, msg="Accepted existing username"
+ ) as e:
+ self.topic.new(
+ rollback,
+ self.fake_session,
+ {
+ "username": self.test_name,
+ "password": self.test_name,
+ "projects": [test_pid],
+ },
+ )
self.assertEqual(len(rollback), 0, "Wrong rollback length")
- self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
- self.assertIn("username '{}' is already used".format(self.test_name),
- norm(str(e.exception)), "Wrong exception text")
+ self.assertEqual(
+ e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+ )
+ self.assertIn(
+ "username '{}' is already used".format(self.test_name),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
with self.subTest(i=3):
rollback = []
self.auth.get_user_list.return_value = []
self.auth.get_role_list.side_effect = [[], []]
- with self.assertRaises(AuthconnNotFoundException, msg="Accepted user without default role") as e:
- self.topic.new(rollback, self.fake_session, {"username": self.test_name, "password": self.test_name,
- "projects": [str(uuid4())]})
+ with self.assertRaises(
+ AuthconnNotFoundException, msg="Accepted user without default role"
+ ) as e:
+ self.topic.new(
+ rollback,
+ self.fake_session,
+ {
+ "username": self.test_name,
+ "password": self.test_name,
+ "projects": [str(uuid4())],
+ },
+ )
self.assertEqual(len(rollback), 0, "Wrong rollback length")
- self.assertEqual(e.exception.http_code, HTTPStatus.NOT_FOUND, "Wrong HTTP status code")
- self.assertIn("can't find default role for user '{}'".format(self.test_name),
- norm(str(e.exception)), "Wrong exception text")
+ self.assertEqual(
+ e.exception.http_code, HTTPStatus.NOT_FOUND, "Wrong HTTP status code"
+ )
+ self.assertIn(
+ "can't find default role for user '{}'".format(self.test_name),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
def test_conflict_on_edit(self):
uid = str(uuid4())
with self.subTest(i=1):
- self.auth.get_user_list.return_value = [{"_id": uid, "username": self.test_name}]
+ self.auth.get_user_list.return_value = [
+ {"_id": uid, "username": self.test_name}
+ ]
new_name = str(uuid4())
- with self.assertRaises(EngineException, msg="Accepted uuid as username") as e:
+ with self.assertRaises(
+ EngineException, msg="Accepted uuid as username"
+ ) as e:
self.topic.edit(self.fake_session, uid, {"username": new_name})
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn("username '{}' cannot have an uuid format".format(new_name),
- norm(str(e.exception)), "Wrong exception text")
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ "username '{}' cannot have an uuid format".format(new_name),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
with self.subTest(i=2):
- self.auth.get_user_list.return_value = [{"_id": uid, "username": self.test_name}]
+ self.auth.get_user_list.return_value = [
+ {"_id": uid, "username": self.test_name}
+ ]
self.auth.get_role_list.side_effect = [[], []]
- with self.assertRaises(AuthconnNotFoundException, msg="Accepted user without default role") as e:
+ with self.assertRaises(
+ AuthconnNotFoundException, msg="Accepted user without default role"
+ ) as e:
self.topic.edit(self.fake_session, uid, {"projects": [str(uuid4())]})
- self.assertEqual(e.exception.http_code, HTTPStatus.NOT_FOUND, "Wrong HTTP status code")
- self.assertIn("can't find a default role for user '{}'".format(self.test_name),
- norm(str(e.exception)), "Wrong exception text")
+ self.assertEqual(
+ e.exception.http_code, HTTPStatus.NOT_FOUND, "Wrong HTTP status code"
+ )
+ self.assertIn(
+ "can't find a default role for user '{}'".format(self.test_name),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
with self.subTest(i=3):
admin_uid = str(uuid4())
- self.auth.get_user_list.return_value = [{"_id": admin_uid, "username": "admin"}]
- with self.assertRaises(EngineException, msg="Accepted removing system_admin role from admin user") as e:
- self.topic.edit(self.fake_session, admin_uid,
- {"remove_project_role_mappings": [{"project": "admin", "role": "system_admin"}]})
- self.assertEqual(e.exception.http_code, HTTPStatus.FORBIDDEN, "Wrong HTTP status code")
- self.assertIn("you cannot remove system_admin role from admin user",
- norm(str(e.exception)), "Wrong exception text")
+ self.auth.get_user_list.return_value = [
+ {"_id": admin_uid, "username": "admin"}
+ ]
+ with self.assertRaises(
+ EngineException,
+ msg="Accepted removing system_admin role from admin user",
+ ) as e:
+ self.topic.edit(
+ self.fake_session,
+ admin_uid,
+ {
+ "remove_project_role_mappings": [
+ {"project": "admin", "role": "system_admin"}
+ ]
+ },
+ )
+ self.assertEqual(
+ e.exception.http_code, HTTPStatus.FORBIDDEN, "Wrong HTTP status code"
+ )
+ self.assertIn(
+ "you cannot remove system_admin role from admin user",
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
with self.subTest(i=4):
new_name = "new-user-name"
- self.auth.get_user_list.side_effect = [[{"_id": uid, "name": self.test_name}],
- [{"_id": str(uuid4()), "name": new_name}]]
- with self.assertRaises(EngineException, msg="Accepted existing username") as e:
+ self.auth.get_user_list.side_effect = [
+ [{"_id": uid, "name": self.test_name}],
+ [{"_id": str(uuid4()), "name": new_name}],
+ ]
+ with self.assertRaises(
+ EngineException, msg="Accepted existing username"
+ ) as e:
self.topic.edit(self.fake_session, uid, {"username": new_name})
- self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
- self.assertIn("username '{}' is already used".format(new_name),
- norm(str(e.exception)), "Wrong exception text")
+ self.assertEqual(
+ e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+ )
+ self.assertIn(
+ "username '{}' is already used".format(new_name),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
def test_conflict_on_del(self):
with self.subTest(i=1):
uid = str(uuid4())
self.fake_session["username"] = self.test_name
- user = user = {"_id": uid, "username": self.test_name, "project_role_mappings": []}
+ user = user = {
+ "_id": uid,
+ "username": self.test_name,
+ "project_role_mappings": [],
+ }
self.auth.get_user.return_value = user
- with self.assertRaises(EngineException, msg="Accepted deletion of own user") as e:
+ with self.assertRaises(
+ EngineException, msg="Accepted deletion of own user"
+ ) as e:
self.topic.delete(self.fake_session, uid)
- self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
- self.assertIn("you cannot delete your own login user", norm(str(e.exception)), "Wrong exception text")
+ self.assertEqual(
+ e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+ )
+ self.assertIn(
+ "you cannot delete your own login user",
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
class Test_CommonVimWimSdn(TestCase):
-
@classmethod
def setUpClass(cls):
- cls.test_name = "test-cim-topic" # CIM = Common Infrastructure Manager
+ cls.test_name = "test-cim-topic" # CIM = Common Infrastructure Manager
def setUp(self):
self.db = Mock(dbbase.DbBase())
self.topic.topic = "wims"
self.topic.schema_new = validation.wim_account_new_schema
self.topic.schema_edit = validation.wim_account_edit_schema
- self.fake_session = {"username": test_name, "project_id": (test_pid,), "method": None,
- "admin": True, "force": False, "public": False, "allow_show_user_project_role": True}
+ self.fake_session = {
+ "username": test_name,
+ "project_id": (test_pid,),
+ "method": None,
+ "admin": True,
+ "force": False,
+ "public": False,
+ "allow_show_user_project_role": True,
+ }
self.topic.check_quota = Mock(return_value=None) # skip quota
def test_new_cvws(self):
test_type = "fake"
self.db.get_one.return_value = None
self.db.create.side_effect = lambda self, content: content["_id"]
- cid, oid = self.topic.new(rollback, self.fake_session,
- {"name": self.test_name, "wim_url": test_url, "wim_type": test_type})
+ cid, oid = self.topic.new(
+ rollback,
+ self.fake_session,
+ {"name": self.test_name, "wim_url": test_url, "wim_type": test_type},
+ )
self.assertEqual(len(rollback), 1, "Wrong rollback length")
args = self.db.create.call_args[0]
content = args[1]
self.assertEqual(content["schema_version"], "1.11", "Wrong schema version")
self.assertEqual(content["op_id"], oid, "Wrong operation identifier")
self.assertIsNotNone(content["_admin"]["created"], "Wrong creation time")
- self.assertEqual(content["_admin"]["modified"], content["_admin"]["created"], "Wrong modification time")
- self.assertEqual(content["_admin"]["operationalState"], "PROCESSING", "Wrong operational state")
- self.assertEqual(content["_admin"]["projects_read"], [test_pid], "Wrong read-only projects")
- self.assertEqual(content["_admin"]["projects_write"], [test_pid], "Wrong read/write projects")
- self.assertIsNone(content["_admin"]["current_operation"], "Wrong current operation")
- self.assertEqual(len(content["_admin"]["operations"]), 1, "Wrong number of operations")
+ self.assertEqual(
+ content["_admin"]["modified"],
+ content["_admin"]["created"],
+ "Wrong modification time",
+ )
+ self.assertEqual(
+ content["_admin"]["operationalState"],
+ "PROCESSING",
+ "Wrong operational state",
+ )
+ self.assertEqual(
+ content["_admin"]["projects_read"],
+ [test_pid],
+ "Wrong read-only projects",
+ )
+ self.assertEqual(
+ content["_admin"]["projects_write"],
+ [test_pid],
+ "Wrong read/write projects",
+ )
+ self.assertIsNone(
+ content["_admin"]["current_operation"], "Wrong current operation"
+ )
+ self.assertEqual(
+ len(content["_admin"]["operations"]), 1, "Wrong number of operations"
+ )
operation = content["_admin"]["operations"][0]
- self.assertEqual(operation["lcmOperationType"], "create", "Wrong operation type")
- self.assertEqual(operation["operationState"], "PROCESSING", "Wrong operation state")
- self.assertGreater(operation["startTime"], content["_admin"]["created"], "Wrong operation start time")
- self.assertGreater(operation["statusEnteredTime"], content["_admin"]["created"],
- "Wrong operation status enter time")
- self.assertEqual(operation["detailed-status"], "", "Wrong operation detailed status info")
- self.assertIsNone(operation["operationParams"], "Wrong operation parameters")
+ self.assertEqual(
+ operation["lcmOperationType"], "create", "Wrong operation type"
+ )
+ self.assertEqual(
+ operation["operationState"], "PROCESSING", "Wrong operation state"
+ )
+ self.assertGreater(
+ operation["startTime"],
+ content["_admin"]["created"],
+ "Wrong operation start time",
+ )
+ self.assertGreater(
+ operation["statusEnteredTime"],
+ content["_admin"]["created"],
+ "Wrong operation status enter time",
+ )
+ self.assertEqual(
+ operation["detailed-status"], "", "Wrong operation detailed status info"
+ )
+ self.assertIsNone(
+ operation["operationParams"], "Wrong operation parameters"
+ )
# This test is disabled. From Feature 8030 we admit all WIM/SDN types
# with self.subTest(i=2):
# rollback = []
test_url = "http://0.0.0.0:0"
test_type = "fake"
self.db.get_one.return_value = {"_id": str(uuid4()), "name": self.test_name}
- with self.assertRaises(EngineException, msg="Accepted existing CIM name") as e:
- self.topic.new(rollback, self.fake_session,
- {"name": self.test_name, "wim_url": test_url, "wim_type": test_type})
+ with self.assertRaises(
+ EngineException, msg="Accepted existing CIM name"
+ ) as e:
+ self.topic.new(
+ rollback,
+ self.fake_session,
+ {
+ "name": self.test_name,
+ "wim_url": test_url,
+ "wim_type": test_type,
+ },
+ )
self.assertEqual(len(rollback), 0, "Wrong rollback length")
- self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
- self.assertIn("name '{}' already exists for {}".format(self.test_name, self.topic.topic),
- norm(str(e.exception)), "Wrong exception text")
+ self.assertEqual(
+ e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+ )
+ self.assertIn(
+ "name '{}' already exists for {}".format(
+ self.test_name, self.topic.topic
+ ),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
def test_edit_cvws(self):
now = time()
cid = str(uuid4())
test_url = "http://0.0.0.0:0"
test_type = "fake"
- cvws = {"_id": cid, "name": self.test_name, "wim_url": test_url, "wim_type": test_type,
- "_admin": {"created": now, "modified": now, "operations": [{"lcmOperationType": "create"}]}}
+ cvws = {
+ "_id": cid,
+ "name": self.test_name,
+ "wim_url": test_url,
+ "wim_type": test_type,
+ "_admin": {
+ "created": now,
+ "modified": now,
+ "operations": [{"lcmOperationType": "create"}],
+ },
+ }
with self.subTest(i=1):
new_name = "new-cim-name"
new_url = "https://1.1.1.1:1"
self.db.get_one.side_effect = [cvws, None]
self.db.replace.return_value = {"updated": 1}
# self.db.encrypt.side_effect = [b64str(), b64str()]
- self.topic.edit(self.fake_session, cid, {"name": new_name, "wim_url": new_url, "wim_type": new_type})
+ self.topic.edit(
+ self.fake_session,
+ cid,
+ {"name": new_name, "wim_url": new_url, "wim_type": new_type},
+ )
args = self.db.replace.call_args[0]
content = args[2]
self.assertEqual(args[0], self.topic.topic, "Wrong topic")
self.assertEqual(content["wim_type"], new_type, "Wrong CIM type")
self.assertEqual(content["wim_url"], new_url, "Wrong URL")
self.assertEqual(content["_admin"]["created"], now, "Wrong creation time")
- self.assertGreater(content["_admin"]["modified"], content["_admin"]["created"], "Wrong modification time")
- self.assertEqual(len(content["_admin"]["operations"]), 2, "Wrong number of operations")
+ self.assertGreater(
+ content["_admin"]["modified"],
+ content["_admin"]["created"],
+ "Wrong modification time",
+ )
+ self.assertEqual(
+ len(content["_admin"]["operations"]), 2, "Wrong number of operations"
+ )
operation = content["_admin"]["operations"][1]
- self.assertEqual(operation["lcmOperationType"], "edit", "Wrong operation type")
- self.assertEqual(operation["operationState"], "PROCESSING", "Wrong operation state")
- self.assertGreater(operation["startTime"], content["_admin"]["modified"], "Wrong operation start time")
- self.assertGreater(operation["statusEnteredTime"], content["_admin"]["modified"],
- "Wrong operation status enter time")
- self.assertEqual(operation["detailed-status"], "", "Wrong operation detailed status info")
- self.assertIsNone(operation["operationParams"], "Wrong operation parameters")
+ self.assertEqual(
+ operation["lcmOperationType"], "edit", "Wrong operation type"
+ )
+ self.assertEqual(
+ operation["operationState"], "PROCESSING", "Wrong operation state"
+ )
+ self.assertGreater(
+ operation["startTime"],
+ content["_admin"]["modified"],
+ "Wrong operation start time",
+ )
+ self.assertGreater(
+ operation["statusEnteredTime"],
+ content["_admin"]["modified"],
+ "Wrong operation status enter time",
+ )
+ self.assertEqual(
+ operation["detailed-status"], "", "Wrong operation detailed status info"
+ )
+ self.assertIsNone(
+ operation["operationParams"], "Wrong operation parameters"
+ )
with self.subTest(i=2):
self.db.get_one.side_effect = [cvws]
with self.assertRaises(EngineException, msg="Accepted wrong property") as e:
- self.topic.edit(self.fake_session, str(uuid4()), {"name": "new-name", "extra_prop": "anything"})
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn("format error '{}'".format("additional properties are not allowed ('{}' was unexpected)").
- format("extra_prop"),
- norm(str(e.exception)), "Wrong exception text")
+ self.topic.edit(
+ self.fake_session,
+ str(uuid4()),
+ {"name": "new-name", "extra_prop": "anything"},
+ )
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ "format error '{}'".format(
+ "additional properties are not allowed ('{}' was unexpected)"
+ ).format("extra_prop"),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
def test_conflict_on_edit(self):
with self.subTest(i=1):
cid = str(uuid4())
new_name = "new-cim-name"
- self.db.get_one.side_effect = [{"_id": cid, "name": self.test_name},
- {"_id": str(uuid4()), "name": new_name}]
- with self.assertRaises(EngineException, msg="Accepted existing CIM name") as e:
+ self.db.get_one.side_effect = [
+ {"_id": cid, "name": self.test_name},
+ {"_id": str(uuid4()), "name": new_name},
+ ]
+ with self.assertRaises(
+ EngineException, msg="Accepted existing CIM name"
+ ) as e:
self.topic.edit(self.fake_session, cid, {"name": new_name})
- self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
- self.assertIn("name '{}' already exists for {}".format(new_name, self.topic.topic),
- norm(str(e.exception)), "Wrong exception text")
+ self.assertEqual(
+ e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+ )
+ self.assertIn(
+ "name '{}' already exists for {}".format(new_name, self.topic.topic),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
def test_delete_cvws(self):
cid = str(uuid4())
cvws = {"_id": cid, "name": self.test_name}
self.db.get_list.return_value = []
with self.subTest(i=1):
- cvws["_admin"] = {"projects_read": [test_pid, ro_pid, rw_pid], "projects_write": [test_pid, rw_pid]}
+ cvws["_admin"] = {
+ "projects_read": [test_pid, ro_pid, rw_pid],
+ "projects_write": [test_pid, rw_pid],
+ }
self.db.get_one.return_value = cvws
oid = self.topic.delete(self.fake_session, cid)
self.assertIsNone(oid, "Wrong operation identifier")
- self.assertEqual(self.db.get_one.call_args[0][0], self.topic.topic, "Wrong topic")
- self.assertEqual(self.db.get_one.call_args[0][1]["_id"], cid, "Wrong CIM identifier")
- self.assertEqual(self.db.set_one.call_args[0][0], self.topic.topic, "Wrong topic")
- self.assertEqual(self.db.set_one.call_args[0][1]["_id"], cid, "Wrong CIM identifier")
- self.assertEqual(self.db.set_one.call_args[1]["update_dict"], None,
- "Wrong read-only projects update")
- self.assertEqual(self.db.set_one.call_args[1]["pull_list"],
- {"_admin.projects_read": (test_pid,), "_admin.projects_write": (test_pid,)},
- "Wrong read/write projects update")
+ self.assertEqual(
+ self.db.get_one.call_args[0][0], self.topic.topic, "Wrong topic"
+ )
+ self.assertEqual(
+ self.db.get_one.call_args[0][1]["_id"], cid, "Wrong CIM identifier"
+ )
+ self.assertEqual(
+ self.db.set_one.call_args[0][0], self.topic.topic, "Wrong topic"
+ )
+ self.assertEqual(
+ self.db.set_one.call_args[0][1]["_id"], cid, "Wrong CIM identifier"
+ )
+ self.assertEqual(
+ self.db.set_one.call_args[1]["update_dict"],
+ None,
+ "Wrong read-only projects update",
+ )
+ self.assertEqual(
+ self.db.set_one.call_args[1]["pull_list"],
+ {
+ "_admin.projects_read": (test_pid,),
+ "_admin.projects_write": (test_pid,),
+ },
+ "Wrong read/write projects update",
+ )
self.topic._send_msg.assert_not_called()
with self.subTest(i=2):
now = time()
- cvws["_admin"] = {"projects_read": [test_pid], "projects_write": [test_pid], "operations": []}
+ cvws["_admin"] = {
+ "projects_read": [test_pid],
+ "projects_write": [test_pid],
+ "operations": [],
+ }
self.db.get_one.return_value = cvws
oid = self.topic.delete(self.fake_session, cid)
- self.assertEqual(oid, cid+":0", "Wrong operation identifier")
- self.assertEqual(self.db.get_one.call_args[0][0], self.topic.topic, "Wrong topic")
- self.assertEqual(self.db.get_one.call_args[0][1]["_id"], cid, "Wrong CIM identifier")
- self.assertEqual(self.db.set_one.call_args[0][0], self.topic.topic, "Wrong topic")
- self.assertEqual(self.db.set_one.call_args[0][1]["_id"], cid, "Wrong user identifier")
- self.assertEqual(self.db.set_one.call_args[1]["update_dict"], {"_admin.to_delete": True},
- "Wrong _admin.to_delete update")
+ self.assertEqual(oid, cid + ":0", "Wrong operation identifier")
+ self.assertEqual(
+ self.db.get_one.call_args[0][0], self.topic.topic, "Wrong topic"
+ )
+ self.assertEqual(
+ self.db.get_one.call_args[0][1]["_id"], cid, "Wrong CIM identifier"
+ )
+ self.assertEqual(
+ self.db.set_one.call_args[0][0], self.topic.topic, "Wrong topic"
+ )
+ self.assertEqual(
+ self.db.set_one.call_args[0][1]["_id"], cid, "Wrong user identifier"
+ )
+ self.assertEqual(
+ self.db.set_one.call_args[1]["update_dict"],
+ {"_admin.to_delete": True},
+ "Wrong _admin.to_delete update",
+ )
operation = self.db.set_one.call_args[1]["push"]["_admin.operations"]
- self.assertEqual(operation["lcmOperationType"], "delete", "Wrong operation type")
- self.assertEqual(operation["operationState"], "PROCESSING", "Wrong operation state")
- self.assertEqual(operation["detailed-status"], "", "Wrong operation detailed status")
- self.assertIsNone(operation["operationParams"], "Wrong operation parameters")
- self.assertGreater(operation["startTime"], now, "Wrong operation start time")
- self.assertGreater(operation["statusEnteredTime"], now, "Wrong operation status enter time")
- self.topic._send_msg.assert_called_once_with("delete", {"_id": cid, "op_id": cid + ":0"}, not_send_msg=None)
+ self.assertEqual(
+ operation["lcmOperationType"], "delete", "Wrong operation type"
+ )
+ self.assertEqual(
+ operation["operationState"], "PROCESSING", "Wrong operation state"
+ )
+ self.assertEqual(
+ operation["detailed-status"], "", "Wrong operation detailed status"
+ )
+ self.assertIsNone(
+ operation["operationParams"], "Wrong operation parameters"
+ )
+ self.assertGreater(
+ operation["startTime"], now, "Wrong operation start time"
+ )
+ self.assertGreater(
+ operation["statusEnteredTime"], now, "Wrong operation status enter time"
+ )
+ self.topic._send_msg.assert_called_once_with(
+ "delete", {"_id": cid, "op_id": cid + ":0"}, not_send_msg=None
+ )
with self.subTest(i=3):
- cvws["_admin"] = {"projects_read": [], "projects_write": [], "operations": []}
+ cvws["_admin"] = {
+ "projects_read": [],
+ "projects_write": [],
+ "operations": [],
+ }
self.db.get_one.return_value = cvws
self.topic._send_msg.reset_mock()
self.db.get_one.reset_mock()
self.db.del_one.reset_mock()
- self.fake_session["force"] = True # to force deletion
- self.fake_session["admin"] = True # to force deletion
- self.fake_session["project_id"] = [] # to force deletion
+ self.fake_session["force"] = True # to force deletion
+ self.fake_session["admin"] = True # to force deletion
+ self.fake_session["project_id"] = [] # to force deletion
oid = self.topic.delete(self.fake_session, cid)
self.assertIsNone(oid, "Wrong operation identifier")
- self.assertEqual(self.db.get_one.call_args[0][0], self.topic.topic, "Wrong topic")
- self.assertEqual(self.db.get_one.call_args[0][1]["_id"], cid, "Wrong CIM identifier")
- self.assertEqual(self.db.del_one.call_args[0][0], self.topic.topic, "Wrong topic")
- self.assertEqual(self.db.del_one.call_args[0][1]["_id"], cid, "Wrong CIM identifier")
- self.topic._send_msg.assert_called_once_with("deleted", {"_id": cid, "op_id": None}, not_send_msg=None)
+ self.assertEqual(
+ self.db.get_one.call_args[0][0], self.topic.topic, "Wrong topic"
+ )
+ self.assertEqual(
+ self.db.get_one.call_args[0][1]["_id"], cid, "Wrong CIM identifier"
+ )
+ self.assertEqual(
+ self.db.del_one.call_args[0][0], self.topic.topic, "Wrong topic"
+ )
+ self.assertEqual(
+ self.db.del_one.call_args[0][1]["_id"], cid, "Wrong CIM identifier"
+ )
+ self.topic._send_msg.assert_called_once_with(
+ "deleted", {"_id": cid, "op_id": None}, not_send_msg=None
+ )
-if __name__ == '__main__':
+if __name__ == "__main__":
unittest.main()
import unittest
from unittest import TestCase
+
# from unittest.mock import Mock
# from osm_common import dbbase, fsbase, msgbase
from osm_nbi.base_topic import BaseTopic, EngineException
class Test_BaseTopic(TestCase):
-
@classmethod
def setUpClass(cls):
cls.test_name = "test-base-topic"
test_set = (
# (descriptor content, kwargs, expected descriptor (None=fails), message)
- ({"a": {"none": None}}, {"a.b.num": "v"}, {"a": {"none": None, "b": {"num": "v"}}}, "create dict"),
- ({"a": {"none": None}}, {"a.none.num": "v"}, {"a": {"none": {"num": "v"}}}, "create dict over none"),
- ({"a": {"b": {"num": 4}}}, {"a.b.num": "v"}, {"a": {"b": {"num": "v"}}}, "replace_number"),
- ({"a": {"b": {"num": 4}}}, {"a.b.num.c.d": "v"}, {"a": {"b": {"num": {"c": {"d": "v"}}}}},
- "create dict over number"),
- ({"a": {"b": {"num": 4}}}, {"a.b": "v"}, {"a": {"b": "v"}}, "replace dict with a string"),
- ({"a": {"b": {"num": 4}}}, {"a.b": None}, {"a": {}}, "replace dict with None"),
- ({"a": [{"b": {"num": 4}}]}, {"a.b.num": "v"}, None, "create dict over list should fail"),
- ({"a": [{"b": {"num": 4}}]}, {"a.0.b.num": "v"}, {"a": [{"b": {"num": "v"}}]}, "set list"),
- ({"a": [{"b": {"num": 4}}]}, {"a.3.b.num": "v"},
- {"a": [{"b": {"num": 4}}, None, None, {"b": {"num": "v"}}]}, "expand list"),
+ (
+ {"a": {"none": None}},
+ {"a.b.num": "v"},
+ {"a": {"none": None, "b": {"num": "v"}}},
+ "create dict",
+ ),
+ (
+ {"a": {"none": None}},
+ {"a.none.num": "v"},
+ {"a": {"none": {"num": "v"}}},
+ "create dict over none",
+ ),
+ (
+ {"a": {"b": {"num": 4}}},
+ {"a.b.num": "v"},
+ {"a": {"b": {"num": "v"}}},
+ "replace_number",
+ ),
+ (
+ {"a": {"b": {"num": 4}}},
+ {"a.b.num.c.d": "v"},
+ {"a": {"b": {"num": {"c": {"d": "v"}}}}},
+ "create dict over number",
+ ),
+ (
+ {"a": {"b": {"num": 4}}},
+ {"a.b": "v"},
+ {"a": {"b": "v"}},
+ "replace dict with a string",
+ ),
+ (
+ {"a": {"b": {"num": 4}}},
+ {"a.b": None},
+ {"a": {}},
+ "replace dict with None",
+ ),
+ (
+ {"a": [{"b": {"num": 4}}]},
+ {"a.b.num": "v"},
+ None,
+ "create dict over list should fail",
+ ),
+ (
+ {"a": [{"b": {"num": 4}}]},
+ {"a.0.b.num": "v"},
+ {"a": [{"b": {"num": "v"}}]},
+ "set list",
+ ),
+ (
+ {"a": [{"b": {"num": 4}}]},
+ {"a.3.b.num": "v"},
+ {"a": [{"b": {"num": 4}}, None, None, {"b": {"num": "v"}}]},
+ "expand list",
+ ),
({"a": [[4]]}, {"a.0.0": "v"}, {"a": [["v"]]}, "set nested list"),
- ({"a": [[4]]}, {"a.0.2": "v"}, {"a": [[4, None, "v"]]}, "expand nested list"),
- ({"a": [[4]]}, {"a.2.2": "v"}, {"a": [[4], None, {"2": "v"}]}, "expand list and add number key"),
+ (
+ {"a": [[4]]},
+ {"a.0.2": "v"},
+ {"a": [[4, None, "v"]]},
+ "expand nested list",
+ ),
+ (
+ {"a": [[4]]},
+ {"a.2.2": "v"},
+ {"a": [[4], None, {"2": "v"}]},
+ "expand list and add number key",
+ ),
({"a": None}, {"b.c": "v"}, {"a": None, "b": {"c": "v"}}, "expand at root"),
)
for desc, kwargs, expected, message in test_set:
if expected is None:
- self.assertRaises(EngineException, BaseTopic._update_input_with_kwargs, desc, kwargs)
+ self.assertRaises(
+ EngineException, BaseTopic._update_input_with_kwargs, desc, kwargs
+ )
else:
BaseTopic._update_input_with_kwargs(desc, kwargs)
self.assertEqual(desc, expected, message)
-if __name__ == '__main__':
+if __name__ == "__main__":
unittest.main()
db_vnfd_content = yaml.load(db_vnfds_text, Loader=yaml.Loader)[0]
db_nsd_content = yaml.load(db_nsds_text, Loader=yaml.Loader)[0]
test_pid = db_vnfd_content["_admin"]["projects_read"][0]
-fake_session = {"username": test_name, "project_id": (test_pid,), "method": None,
- "admin": True, "force": False, "public": False, "allow_show_user_project_role": True}
+fake_session = {
+ "username": test_name,
+ "project_id": (test_pid,),
+ "method": None,
+ "admin": True,
+ "force": False,
+ "public": False,
+ "allow_show_user_project_role": True,
+}
def norm(str):
"""Normalize string for checking"""
- return ' '.join(str.strip().split()).lower()
+ return " ".join(str.strip().split()).lower()
def compare_desc(tc, d1, d2, k):
class Test_VnfdTopic(TestCase):
-
@classmethod
def setUpClass(cls):
cls.test_name = "test-vnfd-topic"
did = db_vnfd_content["_id"]
self.fs.get_params.return_value = {}
self.fs.file_exists.return_value = False
- self.fs.file_open.side_effect = lambda path, mode: open("/tmp/" + str(uuid4()), "a+b")
+ self.fs.file_open.side_effect = lambda path, mode: open(
+ "/tmp/" + str(uuid4()), "a+b"
+ )
test_vnfd = deepcopy(db_vnfd_content)
del test_vnfd["_id"]
del test_vnfd["_admin"]
- with self.subTest(i=1, t='Normal Creation'):
+ with self.subTest(i=1, t="Normal Creation"):
self.db.create.return_value = did
rollback = []
did2, oid = self.topic.new(rollback, fake_session, {})
self.assertEqual(db_args[0], self.topic.topic, "Wrong DB topic")
self.assertEqual(did2, did, "Wrong DB VNFD id")
self.assertIsNotNone(db_args[1]["_admin"]["created"], "Wrong creation time")
- self.assertEqual(db_args[1]["_admin"]["modified"], db_args[1]["_admin"]["created"],
- "Wrong modification time")
- self.assertEqual(db_args[1]["_admin"]["projects_read"], [test_pid], "Wrong read-only project list")
- self.assertEqual(db_args[1]["_admin"]["projects_write"], [test_pid], "Wrong read-write project list")
+ self.assertEqual(
+ db_args[1]["_admin"]["modified"],
+ db_args[1]["_admin"]["created"],
+ "Wrong modification time",
+ )
+ self.assertEqual(
+ db_args[1]["_admin"]["projects_read"],
+ [test_pid],
+ "Wrong read-only project list",
+ )
+ self.assertEqual(
+ db_args[1]["_admin"]["projects_write"],
+ [test_pid],
+ "Wrong read-write project list",
+ )
tmp1 = test_vnfd["vdu"][0]["cloud-init-file"]
- tmp2 = test_vnfd["df"][
- 0
- ]["lcm-operations-configuration"]["operate-vnf-op-config"]["day1-2"][
- 0
- ]["execution-environment-list"][
- 0
- ]["juju"]
+ tmp2 = test_vnfd["df"][0]["lcm-operations-configuration"][
+ "operate-vnf-op-config"
+ ]["day1-2"][0]["execution-environment-list"][0]["juju"]
del test_vnfd["vdu"][0]["cloud-init-file"]
- del test_vnfd["df"][
- 0
- ]["lcm-operations-configuration"]["operate-vnf-op-config"]["day1-2"][
- 0
- ]["execution-environment-list"][
- 0
- ]["juju"]
+ del test_vnfd["df"][0]["lcm-operations-configuration"][
+ "operate-vnf-op-config"
+ ]["day1-2"][0]["execution-environment-list"][0]["juju"]
try:
- self.db.get_one.side_effect = [{"_id": did, "_admin": deepcopy(db_vnfd_content["_admin"])}, None]
- self.topic.upload_content(fake_session, did, test_vnfd, {}, {"Content-Type": []})
+ self.db.get_one.side_effect = [
+ {"_id": did, "_admin": deepcopy(db_vnfd_content["_admin"])},
+ None,
+ ]
+ self.topic.upload_content(
+ fake_session, did, test_vnfd, {}, {"Content-Type": []}
+ )
msg_args = self.msg.write.call_args[0]
test_vnfd["_id"] = did
- self.assertEqual(msg_args[0], self.topic.topic_msg, "Wrong message topic")
+ self.assertEqual(
+ msg_args[0], self.topic.topic_msg, "Wrong message topic"
+ )
self.assertEqual(msg_args[1], "edited", "Wrong message action")
self.assertEqual(msg_args[2], test_vnfd, "Wrong message content")
db_args = self.db.get_one.mock_calls[0][1]
admin = db_args[2]["_admin"]
db_admin = deepcopy(db_vnfd_content["_admin"])
self.assertEqual(admin["type"], "vnfd", "Wrong descriptor type")
- self.assertEqual(admin["created"], db_admin["created"], "Wrong creation time")
- self.assertGreater(admin["modified"], db_admin["created"], "Wrong modification time")
- self.assertEqual(admin["projects_read"], db_admin["projects_read"], "Wrong read-only project list")
- self.assertEqual(admin["projects_write"], db_admin["projects_write"], "Wrong read-write project list")
- self.assertEqual(admin["onboardingState"], "ONBOARDED", "Wrong onboarding state")
- self.assertEqual(admin["operationalState"], "ENABLED", "Wrong operational state")
+ self.assertEqual(
+ admin["created"], db_admin["created"], "Wrong creation time"
+ )
+ self.assertGreater(
+ admin["modified"], db_admin["created"], "Wrong modification time"
+ )
+ self.assertEqual(
+ admin["projects_read"],
+ db_admin["projects_read"],
+ "Wrong read-only project list",
+ )
+ self.assertEqual(
+ admin["projects_write"],
+ db_admin["projects_write"],
+ "Wrong read-write project list",
+ )
+ self.assertEqual(
+ admin["onboardingState"], "ONBOARDED", "Wrong onboarding state"
+ )
+ self.assertEqual(
+ admin["operationalState"], "ENABLED", "Wrong operational state"
+ )
self.assertEqual(admin["usageState"], "NOT_IN_USE", "Wrong usage state")
storage = admin["storage"]
self.assertEqual(storage["folder"], did, "Wrong storage folder")
- self.assertEqual(storage["descriptor"], "package", "Wrong storage descriptor")
+ self.assertEqual(
+ storage["descriptor"], "package", "Wrong storage descriptor"
+ )
compare_desc(self, test_vnfd, db_args[2], "VNFD")
finally:
test_vnfd["vdu"][0]["cloud-init-file"] = tmp1
- test_vnfd["df"][
- 0
- ]["lcm-operations-configuration"]["operate-vnf-op-config"]["day1-2"][
- 0
- ]["execution-environment-list"][
- 0
- ]["juju"] = tmp2
- self.db.get_one.side_effect = lambda table, filter, fail_on_empty=None, fail_on_more=None: \
- {"_id": did, "_admin": deepcopy(db_vnfd_content["_admin"])}
- with self.subTest(i=2, t='Check Pyangbind Validation: additional properties'):
+ test_vnfd["df"][0]["lcm-operations-configuration"][
+ "operate-vnf-op-config"
+ ]["day1-2"][0]["execution-environment-list"][0]["juju"] = tmp2
+ self.db.get_one.side_effect = (
+ lambda table, filter, fail_on_empty=None, fail_on_more=None: {
+ "_id": did,
+ "_admin": deepcopy(db_vnfd_content["_admin"]),
+ }
+ )
+ with self.subTest(i=2, t="Check Pyangbind Validation: additional properties"):
test_vnfd["extra-property"] = 0
try:
- with self.assertRaises(EngineException, msg="Accepted VNFD with an additional property") as e:
- self.topic.upload_content(fake_session, did, test_vnfd, {}, {"Content-Type": []})
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn(norm("Error in pyangbind validation: {} ({})"
- .format("json object contained a key that did not exist", "extra-property")),
- norm(str(e.exception)), "Wrong exception text")
+ with self.assertRaises(
+ EngineException, msg="Accepted VNFD with an additional property"
+ ) as e:
+ self.topic.upload_content(
+ fake_session, did, test_vnfd, {}, {"Content-Type": []}
+ )
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ norm(
+ "Error in pyangbind validation: {} ({})".format(
+ "json object contained a key that did not exist",
+ "extra-property",
+ )
+ ),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
finally:
del test_vnfd["extra-property"]
- with self.subTest(i=3, t='Check Pyangbind Validation: property types'):
+ with self.subTest(i=3, t="Check Pyangbind Validation: property types"):
tmp = test_vnfd["product-name"]
test_vnfd["product-name"] = {"key": 0}
try:
- with self.assertRaises(EngineException, msg="Accepted VNFD with a wrongly typed property") as e:
- self.topic.upload_content(fake_session, did, test_vnfd, {}, {"Content-Type": []})
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn(norm("Error in pyangbind validation: {} ({})"
- .format("json object contained a key that did not exist", "key")),
- norm(str(e.exception)), "Wrong exception text")
+ with self.assertRaises(
+ EngineException, msg="Accepted VNFD with a wrongly typed property"
+ ) as e:
+ self.topic.upload_content(
+ fake_session, did, test_vnfd, {}, {"Content-Type": []}
+ )
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ norm(
+ "Error in pyangbind validation: {} ({})".format(
+ "json object contained a key that did not exist", "key"
+ )
+ ),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
finally:
test_vnfd["product-name"] = tmp
- with self.subTest(i=4, t='Check Input Validation: cloud-init'):
- with self.assertRaises(EngineException, msg="Accepted non-existent cloud_init file") as e:
- self.topic.upload_content(fake_session, did, test_vnfd, {}, {"Content-Type": []})
- self.assertEqual(e.exception.http_code, HTTPStatus.BAD_REQUEST, "Wrong HTTP status code")
- self.assertIn(norm("{} defined in vnf[id={}]:vdu[id={}] but not present in package"
- .format("cloud-init", test_vnfd["id"], test_vnfd["vdu"][0]["id"])),
- norm(str(e.exception)), "Wrong exception text")
- with self.subTest(i=5, t='Check Input Validation: day1-2 configuration[juju]'):
+ with self.subTest(i=4, t="Check Input Validation: cloud-init"):
+ with self.assertRaises(
+ EngineException, msg="Accepted non-existent cloud_init file"
+ ) as e:
+ self.topic.upload_content(
+ fake_session, did, test_vnfd, {}, {"Content-Type": []}
+ )
+ self.assertEqual(
+ e.exception.http_code, HTTPStatus.BAD_REQUEST, "Wrong HTTP status code"
+ )
+ self.assertIn(
+ norm(
+ "{} defined in vnf[id={}]:vdu[id={}] but not present in package".format(
+ "cloud-init", test_vnfd["id"], test_vnfd["vdu"][0]["id"]
+ )
+ ),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
+ with self.subTest(i=5, t="Check Input Validation: day1-2 configuration[juju]"):
del test_vnfd["vdu"][0]["cloud-init-file"]
- with self.assertRaises(EngineException, msg="Accepted non-existent charm in VNF configuration") as e:
- self.topic.upload_content(fake_session, did, test_vnfd, {}, {"Content-Type": []})
+ with self.assertRaises(
+ EngineException, msg="Accepted non-existent charm in VNF configuration"
+ ) as e:
+ self.topic.upload_content(
+ fake_session, did, test_vnfd, {}, {"Content-Type": []}
+ )
print(str(e.exception))
- self.assertEqual(e.exception.http_code, HTTPStatus.BAD_REQUEST, "Wrong HTTP status code")
- self.assertIn(norm("{} defined in vnf[id={}] but not present in package".format("charm", test_vnfd["id"])),
- norm(str(e.exception)), "Wrong exception text")
- del test_vnfd["df"][
- 0
- ]["lcm-operations-configuration"]["operate-vnf-op-config"]["day1-2"][
- 0
- ]["execution-environment-list"][
- 0
- ]["juju"]
- with self.subTest(i=6, t='Check Input Validation: mgmt-cp'):
+ self.assertEqual(
+ e.exception.http_code, HTTPStatus.BAD_REQUEST, "Wrong HTTP status code"
+ )
+ self.assertIn(
+ norm(
+ "{} defined in vnf[id={}] but not present in package".format(
+ "charm", test_vnfd["id"]
+ )
+ ),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
+ del test_vnfd["df"][0]["lcm-operations-configuration"][
+ "operate-vnf-op-config"
+ ]["day1-2"][0]["execution-environment-list"][0]["juju"]
+ with self.subTest(i=6, t="Check Input Validation: mgmt-cp"):
tmp = test_vnfd["mgmt-cp"]
del test_vnfd["mgmt-cp"]
try:
- with self.assertRaises(EngineException, msg="Accepted VNFD without management interface") as e:
- self.topic.upload_content(fake_session, did, test_vnfd, {}, {"Content-Type": []})
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn(norm("'{}' is a mandatory field and it is not defined".format("mgmt-cp")),
- norm(str(e.exception)), "Wrong exception text")
+ with self.assertRaises(
+ EngineException, msg="Accepted VNFD without management interface"
+ ) as e:
+ self.topic.upload_content(
+ fake_session, did, test_vnfd, {}, {"Content-Type": []}
+ )
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ norm(
+ "'{}' is a mandatory field and it is not defined".format(
+ "mgmt-cp"
+ )
+ ),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
finally:
test_vnfd["mgmt-cp"] = tmp
- with self.subTest(i=7, t='Check Input Validation: mgmt-cp connection point'):
+ with self.subTest(i=7, t="Check Input Validation: mgmt-cp connection point"):
tmp = test_vnfd["mgmt-cp"]
test_vnfd["mgmt-cp"] = "wrong-cp"
try:
- with self.assertRaises(EngineException, msg="Accepted wrong mgmt-cp connection point") as e:
- self.topic.upload_content(fake_session, did, test_vnfd, {}, {"Content-Type": []})
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn(norm("mgmt-cp='{}' must match an existing ext-cpd".format(test_vnfd["mgmt-cp"])),
- norm(str(e.exception)), "Wrong exception text")
+ with self.assertRaises(
+ EngineException, msg="Accepted wrong mgmt-cp connection point"
+ ) as e:
+ self.topic.upload_content(
+ fake_session, did, test_vnfd, {}, {"Content-Type": []}
+ )
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ norm(
+ "mgmt-cp='{}' must match an existing ext-cpd".format(
+ test_vnfd["mgmt-cp"]
+ )
+ ),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
finally:
test_vnfd["mgmt-cp"] = tmp
- with self.subTest(i=8, t='Check Input Validation: vdu int-cpd'):
+ with self.subTest(i=8, t="Check Input Validation: vdu int-cpd"):
ext_cpd = test_vnfd["ext-cpd"][1]
tmp = ext_cpd["int-cpd"]["cpd"]
ext_cpd["int-cpd"]["cpd"] = "wrong-cpd"
try:
- with self.assertRaises(EngineException, msg="Accepted wrong ext-cpd internal connection point") as e:
- self.topic.upload_content(fake_session, did, test_vnfd, {}, {"Content-Type": []})
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn(norm("ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd".format(ext_cpd["id"])),
- norm(str(e.exception)), "Wrong exception text")
+ with self.assertRaises(
+ EngineException,
+ msg="Accepted wrong ext-cpd internal connection point",
+ ) as e:
+ self.topic.upload_content(
+ fake_session, did, test_vnfd, {}, {"Content-Type": []}
+ )
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ norm(
+ "ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd".format(
+ ext_cpd["id"]
+ )
+ ),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
finally:
ext_cpd["int-cpd"]["cpd"] = tmp
- with self.subTest(i=9, t='Check Input Validation: Duplicated VLD'):
- test_vnfd['int-virtual-link-desc'].insert(0, {'id': 'internal'})
+ with self.subTest(i=9, t="Check Input Validation: Duplicated VLD"):
+ test_vnfd["int-virtual-link-desc"].insert(0, {"id": "internal"})
try:
- with self.assertRaises(EngineException, msg="Accepted duplicated VLD name") as e:
- self.topic.upload_content(fake_session, did, test_vnfd, {}, {"Content-Type": []})
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
+ with self.assertRaises(
+ EngineException, msg="Accepted duplicated VLD name"
+ ) as e:
+ self.topic.upload_content(
+ fake_session, did, test_vnfd, {}, {"Content-Type": []}
+ )
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
self.assertIn(
- norm("identifier id '{}' is not unique".format(test_vnfd['int-virtual-link-desc'][0]["id"])),
- norm(str(e.exception)), "Wrong exception text")
+ norm(
+ "identifier id '{}' is not unique".format(
+ test_vnfd["int-virtual-link-desc"][0]["id"]
+ )
+ ),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
finally:
- del test_vnfd['int-virtual-link-desc'][0]
- with self.subTest(i=10, t='Check Input Validation: vdu int-virtual-link-desc'):
- vdu = test_vnfd['vdu'][0]
- int_cpd = vdu['int-cpd'][1]
- tmp = int_cpd['int-virtual-link-desc']
- int_cpd['int-virtual-link-desc'] = 'non-existing-int-virtual-link-desc'
+ del test_vnfd["int-virtual-link-desc"][0]
+ with self.subTest(i=10, t="Check Input Validation: vdu int-virtual-link-desc"):
+ vdu = test_vnfd["vdu"][0]
+ int_cpd = vdu["int-cpd"][1]
+ tmp = int_cpd["int-virtual-link-desc"]
+ int_cpd["int-virtual-link-desc"] = "non-existing-int-virtual-link-desc"
try:
- with self.assertRaises(EngineException, msg="Accepted int-virtual-link-desc") as e:
- self.topic.upload_content(fake_session, did, test_vnfd, {}, {"Content-Type": []})
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn(norm("vdu[id='{}']:int-cpd[id='{}']:int-virtual-link-desc='{}' must match an existing "
- "int-virtual-link-desc".format(vdu["id"], int_cpd["id"],
- int_cpd['int-virtual-link-desc'])),
- norm(str(e.exception)), "Wrong exception text")
+ with self.assertRaises(
+ EngineException, msg="Accepted int-virtual-link-desc"
+ ) as e:
+ self.topic.upload_content(
+ fake_session, did, test_vnfd, {}, {"Content-Type": []}
+ )
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ norm(
+ "vdu[id='{}']:int-cpd[id='{}']:int-virtual-link-desc='{}' must match an existing "
+ "int-virtual-link-desc".format(
+ vdu["id"], int_cpd["id"], int_cpd["int-virtual-link-desc"]
+ )
+ ),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
finally:
- int_cpd['int-virtual-link-desc'] = tmp
- with self.subTest(i=11, t='Check Input Validation: virtual-link-profile)'):
- fake_ivld_profile = {'id': 'fake-profile-ref', 'flavour': 'fake-flavour'}
- df = test_vnfd['df'][0]
- df['virtual-link-profile'] = [fake_ivld_profile]
+ int_cpd["int-virtual-link-desc"] = tmp
+ with self.subTest(i=11, t="Check Input Validation: virtual-link-profile)"):
+ fake_ivld_profile = {"id": "fake-profile-ref", "flavour": "fake-flavour"}
+ df = test_vnfd["df"][0]
+ df["virtual-link-profile"] = [fake_ivld_profile]
try:
- with self.assertRaises(EngineException, msg="Accepted non-existent Profile Ref") as e:
- self.topic.upload_content(fake_session, did, test_vnfd, {}, {"Content-Type": []})
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn(norm("df[id='{}']:virtual-link-profile='{}' must match an existing "
- "int-virtual-link-desc".format(df["id"], fake_ivld_profile["id"])),
- norm(str(e.exception)), "Wrong exception text")
+ with self.assertRaises(
+ EngineException, msg="Accepted non-existent Profile Ref"
+ ) as e:
+ self.topic.upload_content(
+ fake_session, did, test_vnfd, {}, {"Content-Type": []}
+ )
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ norm(
+ "df[id='{}']:virtual-link-profile='{}' must match an existing "
+ "int-virtual-link-desc".format(
+ df["id"], fake_ivld_profile["id"]
+ )
+ ),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
finally:
- del df['virtual-link-profile']
- with self.subTest(i=12, t='Check Input Validation: scaling-criteria monitoring-param-ref'):
- vdu = test_vnfd['vdu'][1]
- affected_df = test_vnfd['df'][0]
- sa = affected_df['scaling-aspect'][0]
- sp = sa['scaling-policy'][0]
- sc = sp['scaling-criteria'][0]
- tmp = vdu.pop('monitoring-parameter')
+ del df["virtual-link-profile"]
+ with self.subTest(
+ i=12, t="Check Input Validation: scaling-criteria monitoring-param-ref"
+ ):
+ vdu = test_vnfd["vdu"][1]
+ affected_df = test_vnfd["df"][0]
+ sa = affected_df["scaling-aspect"][0]
+ sp = sa["scaling-policy"][0]
+ sc = sp["scaling-criteria"][0]
+ tmp = vdu.pop("monitoring-parameter")
try:
- with self.assertRaises(EngineException, msg="Accepted non-existent Scaling Group Policy Criteria") as e:
- self.topic.upload_content(fake_session, did, test_vnfd, {}, {"Content-Type": []})
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn(norm("df[id='{}']:scaling-aspect[id='{}']:scaling-policy"
- "[name='{}']:scaling-criteria[name='{}']: "
- "vnf-monitoring-param-ref='{}' not defined in any monitoring-param"
- .format(affected_df["id"], sa["id"], sp["name"],
- sc["name"], sc["vnf-monitoring-param-ref"])),
- norm(str(e.exception)), "Wrong exception text")
+ with self.assertRaises(
+ EngineException,
+ msg="Accepted non-existent Scaling Group Policy Criteria",
+ ) as e:
+ self.topic.upload_content(
+ fake_session, did, test_vnfd, {}, {"Content-Type": []}
+ )
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ norm(
+ "df[id='{}']:scaling-aspect[id='{}']:scaling-policy"
+ "[name='{}']:scaling-criteria[name='{}']: "
+ "vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format(
+ affected_df["id"],
+ sa["id"],
+ sp["name"],
+ sc["name"],
+ sc["vnf-monitoring-param-ref"],
+ )
+ ),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
finally:
- vdu['monitoring-parameter'] = tmp
- with self.subTest(i=13, t='Check Input Validation: scaling-aspect vnf-configuration'):
- df = test_vnfd['df'][0]
- tmp = test_vnfd["df"][0]["lcm-operations-configuration"]["operate-vnf-op-config"]["day1-2"].pop()
+ vdu["monitoring-parameter"] = tmp
+ with self.subTest(
+ i=13, t="Check Input Validation: scaling-aspect vnf-configuration"
+ ):
+ df = test_vnfd["df"][0]
+ tmp = test_vnfd["df"][0]["lcm-operations-configuration"][
+ "operate-vnf-op-config"
+ ]["day1-2"].pop()
try:
- with self.assertRaises(EngineException, msg="Accepted non-existent Scaling Group VDU ID Reference") \
- as e:
- self.topic.upload_content(fake_session, did, test_vnfd, {}, {"Content-Type": []})
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn(norm("'day1-2 configuration' not defined in the descriptor but it is referenced "
- "by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action"
- .format(df["id"], df['scaling-aspect'][0]["id"])),
- norm(str(e.exception)), "Wrong exception text")
+ with self.assertRaises(
+ EngineException,
+ msg="Accepted non-existent Scaling Group VDU ID Reference",
+ ) as e:
+ self.topic.upload_content(
+ fake_session, did, test_vnfd, {}, {"Content-Type": []}
+ )
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ norm(
+ "'day1-2 configuration' not defined in the descriptor but it is referenced "
+ "by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action".format(
+ df["id"], df["scaling-aspect"][0]["id"]
+ )
+ ),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
finally:
- test_vnfd["df"][0]["lcm-operations-configuration"]["operate-vnf-op-config"]["day1-2"].append(tmp)
- with self.subTest(i=14, t='Check Input Validation: scaling-config-action'):
- df = test_vnfd['df'][0]
- tmp = test_vnfd["df"][0].get(
- "lcm-operations-configuration"
- ).get(
- "operate-vnf-op-config"
- )["day1-2"][0]['config-primitive']
- test_vnfd["df"][0].get(
- "lcm-operations-configuration"
- ).get(
+ test_vnfd["df"][0]["lcm-operations-configuration"][
+ "operate-vnf-op-config"
+ ]["day1-2"].append(tmp)
+ with self.subTest(i=14, t="Check Input Validation: scaling-config-action"):
+ df = test_vnfd["df"][0]
+ tmp = (
+ test_vnfd["df"][0]
+ .get("lcm-operations-configuration")
+ .get("operate-vnf-op-config")["day1-2"][0]["config-primitive"]
+ )
+ test_vnfd["df"][0].get("lcm-operations-configuration").get(
"operate-vnf-op-config"
- )["day1-2"][0]['config-primitive'] = [{'name': 'wrong-primitive'}]
+ )["day1-2"][0]["config-primitive"] = [{"name": "wrong-primitive"}]
try:
- with self.assertRaises(EngineException,
- msg="Accepted non-existent Scaling Group VDU ID Reference") as e:
- self.topic.upload_content(fake_session, did, test_vnfd, {}, {"Content-Type": []})
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn(norm("df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-"
- "config-primitive-name-ref='{}' does not match any "
- "day1-2 configuration:config-primitive:name"
- .format(df["id"], df['scaling-aspect'][0]["id"],
- sa['scaling-config-action'][0]['vnf-config-primitive-name-ref'])),
- norm(str(e.exception)), "Wrong exception text")
+ with self.assertRaises(
+ EngineException,
+ msg="Accepted non-existent Scaling Group VDU ID Reference",
+ ) as e:
+ self.topic.upload_content(
+ fake_session, did, test_vnfd, {}, {"Content-Type": []}
+ )
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ norm(
+ "df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-"
+ "config-primitive-name-ref='{}' does not match any "
+ "day1-2 configuration:config-primitive:name".format(
+ df["id"],
+ df["scaling-aspect"][0]["id"],
+ sa["scaling-config-action"][0][
+ "vnf-config-primitive-name-ref"
+ ],
+ )
+ ),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
finally:
- test_vnfd["df"][0].get(
- "lcm-operations-configuration"
- ).get(
+ test_vnfd["df"][0].get("lcm-operations-configuration").get(
"operate-vnf-op-config"
- )["day1-2"][0]['config-primitive'] = tmp
- with self.subTest(i=15, t='Check Input Validation: everything right'):
+ )["day1-2"][0]["config-primitive"] = tmp
+ with self.subTest(i=15, t="Check Input Validation: everything right"):
test_vnfd["id"] = "fake-vnfd-id"
- test_vnfd["df"][0].get(
- "lcm-operations-configuration"
- ).get(
+ test_vnfd["df"][0].get("lcm-operations-configuration").get(
"operate-vnf-op-config"
)["day1-2"][0]["id"] = "fake-vnfd-id"
- self.db.get_one.side_effect = [{"_id": did, "_admin": deepcopy(db_vnfd_content["_admin"])}, None]
- rc = self.topic.upload_content(fake_session, did, test_vnfd, {}, {"Content-Type": []})
+ self.db.get_one.side_effect = [
+ {"_id": did, "_admin": deepcopy(db_vnfd_content["_admin"])},
+ None,
+ ]
+ rc = self.topic.upload_content(
+ fake_session, did, test_vnfd, {}, {"Content-Type": []}
+ )
self.assertTrue(rc, "Input Validation: Unexpected failure")
return
did = vnfd_content["_id"]
self.fs.file_exists.return_value = True
self.fs.dir_ls.return_value = True
- with self.subTest(i=1, t='Normal Edition'):
+ with self.subTest(i=1, t="Normal Edition"):
now = time()
self.db.get_one.side_effect = [deepcopy(vnfd_content), None]
- data = {
- "product-name": "new-vnfd-name"
- }
+ data = {"product-name": "new-vnfd-name"}
self.topic.edit(fake_session, did, data)
db_args = self.db.replace.call_args[0]
msg_args = self.msg.write.call_args[0]
self.assertEqual(msg_args[2], data, "Wrong message content")
self.assertEqual(db_args[0], self.topic.topic, "Wrong DB topic")
self.assertEqual(db_args[1], did, "Wrong DB ID")
- self.assertEqual(db_args[2]["_admin"]["created"], vnfd_content["_admin"]["created"],
- "Wrong creation time")
- self.assertGreater(db_args[2]["_admin"]["modified"], now,
- "Wrong modification time")
- self.assertEqual(db_args[2]["_admin"]["projects_read"], vnfd_content["_admin"]["projects_read"],
- "Wrong read-only project list")
- self.assertEqual(db_args[2]["_admin"]["projects_write"], vnfd_content["_admin"]["projects_write"],
- "Wrong read-write project list")
- self.assertEqual(db_args[2]["product-name"], data["product-name"], "Wrong VNFD Name")
- with self.subTest(i=2, t='Conflict on Edit'):
+ self.assertEqual(
+ db_args[2]["_admin"]["created"],
+ vnfd_content["_admin"]["created"],
+ "Wrong creation time",
+ )
+ self.assertGreater(
+ db_args[2]["_admin"]["modified"], now, "Wrong modification time"
+ )
+ self.assertEqual(
+ db_args[2]["_admin"]["projects_read"],
+ vnfd_content["_admin"]["projects_read"],
+ "Wrong read-only project list",
+ )
+ self.assertEqual(
+ db_args[2]["_admin"]["projects_write"],
+ vnfd_content["_admin"]["projects_write"],
+ "Wrong read-write project list",
+ )
+ self.assertEqual(
+ db_args[2]["product-name"], data["product-name"], "Wrong VNFD Name"
+ )
+ with self.subTest(i=2, t="Conflict on Edit"):
data = {"id": "hackfest3charmed-vnf", "product-name": "new-vnfd-name"}
- self.db.get_one.side_effect = [deepcopy(vnfd_content), {"_id": str(uuid4()), "id": data["id"]}]
- with self.assertRaises(EngineException, msg="Accepted existing VNFD ID") as e:
+ self.db.get_one.side_effect = [
+ deepcopy(vnfd_content),
+ {"_id": str(uuid4()), "id": data["id"]},
+ ]
+ with self.assertRaises(
+ EngineException, msg="Accepted existing VNFD ID"
+ ) as e:
self.topic.edit(fake_session, did, data)
- self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
- self.assertIn(norm("{} with id '{}' already exists for this project".format("vnfd", data["id"])),
- norm(str(e.exception)), "Wrong exception text")
- with self.subTest(i=3, t='Check Envelope'):
+ self.assertEqual(
+ e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+ )
+ self.assertIn(
+ norm(
+ "{} with id '{}' already exists for this project".format(
+ "vnfd", data["id"]
+ )
+ ),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
+ with self.subTest(i=3, t="Check Envelope"):
data = {"vnfd": [{"id": "new-vnfd-id-1", "product-name": "new-vnfd-name"}]}
- with self.assertRaises(EngineException, msg="Accepted VNFD with wrong envelope") as e:
+ with self.assertRaises(
+ EngineException, msg="Accepted VNFD with wrong envelope"
+ ) as e:
self.topic.edit(fake_session, did, data, content=vnfd_content)
- self.assertEqual(e.exception.http_code, HTTPStatus.BAD_REQUEST, "Wrong HTTP status code")
- self.assertIn("'vnfd' must be dict", norm(str(e.exception)), "Wrong exception text")
+ self.assertEqual(
+ e.exception.http_code, HTTPStatus.BAD_REQUEST, "Wrong HTTP status code"
+ )
+ self.assertIn(
+ "'vnfd' must be dict", norm(str(e.exception)), "Wrong exception text"
+ )
return
def test_delete_vnfd(self):
did = db_vnfd_content["_id"]
self.db.get_one.return_value = db_vnfd_content
p_id = db_vnfd_content["_admin"]["projects_read"][0]
- with self.subTest(i=1, t='Normal Deletion'):
+ with self.subTest(i=1, t="Normal Deletion"):
self.db.get_list.return_value = []
self.db.del_one.return_value = {"deleted": 1}
self.topic.delete(fake_session, did)
self.assertEqual(msg_args[2], {"_id": did}, "Wrong message content")
self.assertEqual(db_args[0], self.topic.topic, "Wrong DB topic")
self.assertEqual(db_args[1]["_id"], did, "Wrong DB ID")
- self.assertEqual(db_args[1]["_admin.projects_write.cont"], [p_id, 'ANY'], "Wrong DB filter")
+ self.assertEqual(
+ db_args[1]["_admin.projects_write.cont"],
+ [p_id, "ANY"],
+ "Wrong DB filter",
+ )
db_g1_args = self.db.get_one.call_args[0]
self.assertEqual(db_g1_args[0], self.topic.topic, "Wrong DB topic")
self.assertEqual(db_g1_args[1]["_id"], did, "Wrong DB VNFD ID")
self.assertEqual(db_gl_calls[0][0][0], "vnfrs", "Wrong DB topic")
# self.assertEqual(db_gl_calls[0][0][1]["vnfd-id"], did, "Wrong DB VNFD ID") # Filter changed after call
self.assertEqual(db_gl_calls[1][0][0], "nsds", "Wrong DB topic")
- self.assertEqual(db_gl_calls[1][0][1]["vnfd-id"], db_vnfd_content["id"],
- "Wrong DB NSD vnfd-id")
+ self.assertEqual(
+ db_gl_calls[1][0][1]["vnfd-id"],
+ db_vnfd_content["id"],
+ "Wrong DB NSD vnfd-id",
+ )
self.db.set_one.assert_not_called()
fs_del_calls = self.fs.file_delete.call_args_list
self.assertEqual(fs_del_calls[0][0][0], did, "Wrong FS file id")
- self.assertEqual(fs_del_calls[1][0][0], did + '_', "Wrong FS folder id")
- with self.subTest(i=2, t='Conflict on Delete - VNFD in use by VNFR'):
+ self.assertEqual(fs_del_calls[1][0][0], did + "_", "Wrong FS folder id")
+ with self.subTest(i=2, t="Conflict on Delete - VNFD in use by VNFR"):
self.db.get_list.return_value = [{"_id": str(uuid4()), "name": "fake-vnfr"}]
- with self.assertRaises(EngineException, msg="Accepted VNFD in use by VNFR") as e:
+ with self.assertRaises(
+ EngineException, msg="Accepted VNFD in use by VNFR"
+ ) as e:
self.topic.delete(fake_session, did)
- self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
- self.assertIn("there is at least one vnf instance using this descriptor", norm(str(e.exception)),
- "Wrong exception text")
- with self.subTest(i=3, t='Conflict on Delete - VNFD in use by NSD'):
- self.db.get_list.side_effect = [[], [{"_id": str(uuid4()), "name": "fake-nsd"}]]
- with self.assertRaises(EngineException, msg="Accepted VNFD in use by NSD") as e:
+ self.assertEqual(
+ e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+ )
+ self.assertIn(
+ "there is at least one vnf instance using this descriptor",
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
+ with self.subTest(i=3, t="Conflict on Delete - VNFD in use by NSD"):
+ self.db.get_list.side_effect = [
+ [],
+ [{"_id": str(uuid4()), "name": "fake-nsd"}],
+ ]
+ with self.assertRaises(
+ EngineException, msg="Accepted VNFD in use by NSD"
+ ) as e:
self.topic.delete(fake_session, did)
- self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
- self.assertIn("there is at least one ns package referencing this descriptor", norm(str(e.exception)),
- "Wrong exception text")
- with self.subTest(i=4, t='Non-existent VNFD'):
+ self.assertEqual(
+ e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+ )
+ self.assertIn(
+ "there is at least one ns package referencing this descriptor",
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
+ with self.subTest(i=4, t="Non-existent VNFD"):
excp_msg = "Not found any {} with filter='{}'".format("VNFD", {"_id": did})
self.db.get_one.side_effect = DbException(excp_msg, HTTPStatus.NOT_FOUND)
- with self.assertRaises(DbException, msg="Accepted non-existent VNFD ID") as e:
+ with self.assertRaises(
+ DbException, msg="Accepted non-existent VNFD ID"
+ ) as e:
self.topic.delete(fake_session, did)
- self.assertEqual(e.exception.http_code, HTTPStatus.NOT_FOUND, "Wrong HTTP status code")
- self.assertIn(norm(excp_msg), norm(str(e.exception)), "Wrong exception text")
- with self.subTest(i=5, t='No delete because referenced by other project'):
+ self.assertEqual(
+ e.exception.http_code, HTTPStatus.NOT_FOUND, "Wrong HTTP status code"
+ )
+ self.assertIn(
+ norm(excp_msg), norm(str(e.exception)), "Wrong exception text"
+ )
+ with self.subTest(i=5, t="No delete because referenced by other project"):
db_vnfd_content["_admin"]["projects_read"].append("other_project")
self.db.get_one = Mock(return_value=db_vnfd_content)
self.db.get_list = Mock(return_value=[])
db_s1_args = self.db.set_one.call_args
self.assertEqual(db_s1_args[0][0], self.topic.topic, "Wrong DB topic")
self.assertEqual(db_s1_args[0][1]["_id"], did, "Wrong DB ID")
- self.assertIn(p_id, db_s1_args[0][1]["_admin.projects_write.cont"], "Wrong DB filter")
- self.assertIsNone(db_s1_args[1]["update_dict"], "Wrong DB update dictionary")
- self.assertEqual(db_s1_args[1]["pull_list"],
- {"_admin.projects_read": (p_id,), "_admin.projects_write": (p_id,)},
- "Wrong DB pull_list dictionary")
+ self.assertIn(
+ p_id, db_s1_args[0][1]["_admin.projects_write.cont"], "Wrong DB filter"
+ )
+ self.assertIsNone(
+ db_s1_args[1]["update_dict"], "Wrong DB update dictionary"
+ )
+ self.assertEqual(
+ db_s1_args[1]["pull_list"],
+ {"_admin.projects_read": (p_id,), "_admin.projects_write": (p_id,)},
+ "Wrong DB pull_list dictionary",
+ )
self.fs.file_delete.assert_not_called()
return
indata = deepcopy(db_vnfd_content)
self.topic.validate_mgmt_interface_connection_point(indata)
- def test_validate_mgmt_interface_connection_point_when_missing_connection_point(self):
+ def test_validate_mgmt_interface_connection_point_when_missing_connection_point(
+ self,
+ ):
indata = deepcopy(db_vnfd_content)
- indata['ext-cpd'] = []
+ indata["ext-cpd"] = []
with self.assertRaises(EngineException) as e:
self.topic.validate_mgmt_interface_connection_point(indata)
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn(norm("mgmt-cp='{}' must match an existing ext-cpd"
- .format(indata["mgmt-cp"])),
- norm(str(e.exception)), "Wrong exception text")
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ norm(
+ "mgmt-cp='{}' must match an existing ext-cpd".format(indata["mgmt-cp"])
+ ),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
def test_validate_mgmt_interface_connection_point_when_missing_mgmt_cp(self):
indata = deepcopy(db_vnfd_content)
- indata.pop('mgmt-cp')
+ indata.pop("mgmt-cp")
with self.assertRaises(EngineException) as e:
self.topic.validate_mgmt_interface_connection_point(indata)
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn(norm("'mgmt-cp' is a mandatory field and it is not defined"),
- norm(str(e.exception)), "Wrong exception text")
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ norm("'mgmt-cp' is a mandatory field and it is not defined"),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
def test_validate_vdu_internal_connection_points_on_valid_descriptor(self):
indata = db_vnfd_content
- vdu = indata['vdu'][0]
+ vdu = indata["vdu"][0]
self.topic.validate_vdu_internal_connection_points(vdu)
def test_validate_external_connection_points_on_valid_descriptor(self):
indata = db_vnfd_content
self.topic.validate_external_connection_points(indata)
- def test_validate_external_connection_points_when_missing_internal_connection_point(self):
+ def test_validate_external_connection_points_when_missing_internal_connection_point(
+ self,
+ ):
indata = deepcopy(db_vnfd_content)
- vdu = indata['vdu'][0]
- vdu.pop('int-cpd')
+ vdu = indata["vdu"][0]
+ vdu.pop("int-cpd")
affected_ext_cpd = indata["ext-cpd"][0]
with self.assertRaises(EngineException) as e:
self.topic.validate_external_connection_points(indata)
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn(norm("ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd"
- .format(affected_ext_cpd["id"])),
- norm(str(e.exception)), "Wrong exception text")
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ norm(
+ "ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd".format(
+ affected_ext_cpd["id"]
+ )
+ ),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
- def test_validate_vdu_internal_connection_points_on_duplicated_internal_connection_point(self):
+ def test_validate_vdu_internal_connection_points_on_duplicated_internal_connection_point(
+ self,
+ ):
indata = deepcopy(db_vnfd_content)
- vdu = indata['vdu'][0]
- duplicated_cpd = {'id': 'vnf-mgmt', 'order': 3,
- 'virtual-network-interface-requirement': [{'name': 'duplicated'}]}
- vdu['int-cpd'].insert(0, duplicated_cpd)
+ vdu = indata["vdu"][0]
+ duplicated_cpd = {
+ "id": "vnf-mgmt",
+ "order": 3,
+ "virtual-network-interface-requirement": [{"name": "duplicated"}],
+ }
+ vdu["int-cpd"].insert(0, duplicated_cpd)
with self.assertRaises(EngineException) as e:
self.topic.validate_vdu_internal_connection_points(vdu)
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn(norm("vdu[id='{}']:int-cpd[id='{}'] is already used by other int-cpd"
- .format(vdu["id"], duplicated_cpd["id"])),
- norm(str(e.exception)), "Wrong exception text")
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ norm(
+ "vdu[id='{}']:int-cpd[id='{}'] is already used by other int-cpd".format(
+ vdu["id"], duplicated_cpd["id"]
+ )
+ ),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
- def test_validate_external_connection_points_on_duplicated_external_connection_point(self):
+ def test_validate_external_connection_points_on_duplicated_external_connection_point(
+ self,
+ ):
indata = deepcopy(db_vnfd_content)
- duplicated_cpd = {'id': 'vnf-mgmt-ext', 'int-cpd': {'vdu-id': 'dataVM', 'cpd': 'vnf-data'}}
- indata['ext-cpd'].insert(0, duplicated_cpd)
+ duplicated_cpd = {
+ "id": "vnf-mgmt-ext",
+ "int-cpd": {"vdu-id": "dataVM", "cpd": "vnf-data"},
+ }
+ indata["ext-cpd"].insert(0, duplicated_cpd)
with self.assertRaises(EngineException) as e:
self.topic.validate_external_connection_points(indata)
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn(norm("ext-cpd[id='{}'] is already used by other ext-cpd"
- .format(duplicated_cpd["id"])),
- norm(str(e.exception)), "Wrong exception text")
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ norm(
+ "ext-cpd[id='{}'] is already used by other ext-cpd".format(
+ duplicated_cpd["id"]
+ )
+ ),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
def test_validate_internal_virtual_links_on_valid_descriptor(self):
indata = db_vnfd_content
def test_validate_internal_virtual_links_on_duplicated_ivld(self):
indata = deepcopy(db_vnfd_content)
- duplicated_vld = {'id': 'internal'}
- indata['int-virtual-link-desc'].insert(0, duplicated_vld)
+ duplicated_vld = {"id": "internal"}
+ indata["int-virtual-link-desc"].insert(0, duplicated_vld)
with self.assertRaises(EngineException) as e:
self.topic.validate_internal_virtual_links(indata)
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn(norm("Duplicated VLD id in int-virtual-link-desc[id={}]"
- .format(duplicated_vld["id"])),
- norm(str(e.exception)), "Wrong exception text")
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ norm(
+ "Duplicated VLD id in int-virtual-link-desc[id={}]".format(
+ duplicated_vld["id"]
+ )
+ ),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
- def test_validate_internal_virtual_links_when_missing_ivld_on_connection_point(self):
+ def test_validate_internal_virtual_links_when_missing_ivld_on_connection_point(
+ self,
+ ):
indata = deepcopy(db_vnfd_content)
- vdu = indata['vdu'][0]
- affected_int_cpd = vdu['int-cpd'][0]
- affected_int_cpd['int-virtual-link-desc'] = 'non-existing-int-virtual-link-desc'
+ vdu = indata["vdu"][0]
+ affected_int_cpd = vdu["int-cpd"][0]
+ affected_int_cpd["int-virtual-link-desc"] = "non-existing-int-virtual-link-desc"
with self.assertRaises(EngineException) as e:
self.topic.validate_internal_virtual_links(indata)
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn(norm("vdu[id='{}']:int-cpd[id='{}']:int-virtual-link-desc='{}' must match an existing "
- "int-virtual-link-desc".format(vdu["id"], affected_int_cpd["id"],
- affected_int_cpd['int-virtual-link-desc'])),
- norm(str(e.exception)), "Wrong exception text")
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ norm(
+ "vdu[id='{}']:int-cpd[id='{}']:int-virtual-link-desc='{}' must match an existing "
+ "int-virtual-link-desc".format(
+ vdu["id"],
+ affected_int_cpd["id"],
+ affected_int_cpd["int-virtual-link-desc"],
+ )
+ ),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
def test_validate_internal_virtual_links_when_missing_ivld_on_profile(self):
indata = deepcopy(db_vnfd_content)
- affected_ivld_profile = {'id': 'non-existing-int-virtual-link-desc'}
- df = indata['df'][0]
- df['virtual-link-profile'] = [affected_ivld_profile]
+ affected_ivld_profile = {"id": "non-existing-int-virtual-link-desc"}
+ df = indata["df"][0]
+ df["virtual-link-profile"] = [affected_ivld_profile]
with self.assertRaises(EngineException) as e:
self.topic.validate_internal_virtual_links(indata)
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn(norm("df[id='{}']:virtual-link-profile='{}' must match an existing "
- "int-virtual-link-desc".format(df["id"], affected_ivld_profile["id"])),
- norm(str(e.exception)), "Wrong exception text")
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ norm(
+ "df[id='{}']:virtual-link-profile='{}' must match an existing "
+ "int-virtual-link-desc".format(df["id"], affected_ivld_profile["id"])
+ ),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
def test_validate_monitoring_params_on_valid_descriptor(self):
indata = db_vnfd_content
def test_validate_monitoring_params_on_duplicated_ivld_monitoring_param(self):
indata = deepcopy(db_vnfd_content)
- duplicated_mp = {'id': 'cpu', 'name': 'cpu', 'performance_metric': 'cpu'}
- affected_ivld = indata['int-virtual-link-desc'][0]
- affected_ivld['monitoring-parameters'] = [duplicated_mp, duplicated_mp]
+ duplicated_mp = {"id": "cpu", "name": "cpu", "performance_metric": "cpu"}
+ affected_ivld = indata["int-virtual-link-desc"][0]
+ affected_ivld["monitoring-parameters"] = [duplicated_mp, duplicated_mp]
with self.assertRaises(EngineException) as e:
self.topic.validate_monitoring_params(indata)
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn(norm("Duplicated monitoring-parameter id in "
- "int-virtual-link-desc[id='{}']:monitoring-parameters[id='{}']"
- .format(affected_ivld["id"], duplicated_mp["id"])),
- norm(str(e.exception)), "Wrong exception text")
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ norm(
+ "Duplicated monitoring-parameter id in "
+ "int-virtual-link-desc[id='{}']:monitoring-parameters[id='{}']".format(
+ affected_ivld["id"], duplicated_mp["id"]
+ )
+ ),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
def test_validate_monitoring_params_on_duplicated_vdu_monitoring_param(self):
indata = deepcopy(db_vnfd_content)
- duplicated_mp = {'id': 'dataVM_cpu_util', 'name': 'dataVM_cpu_util', 'performance_metric': 'cpu'}
- affected_vdu = indata['vdu'][1]
- affected_vdu['monitoring-parameter'].insert(0, duplicated_mp)
+ duplicated_mp = {
+ "id": "dataVM_cpu_util",
+ "name": "dataVM_cpu_util",
+ "performance_metric": "cpu",
+ }
+ affected_vdu = indata["vdu"][1]
+ affected_vdu["monitoring-parameter"].insert(0, duplicated_mp)
with self.assertRaises(EngineException) as e:
self.topic.validate_monitoring_params(indata)
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn(norm("Duplicated monitoring-parameter id in "
- "vdu[id='{}']:monitoring-parameter[id='{}']"
- .format(affected_vdu["id"], duplicated_mp["id"])),
- norm(str(e.exception)), "Wrong exception text")
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ norm(
+ "Duplicated monitoring-parameter id in "
+ "vdu[id='{}']:monitoring-parameter[id='{}']".format(
+ affected_vdu["id"], duplicated_mp["id"]
+ )
+ ),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
def test_validate_monitoring_params_on_duplicated_df_monitoring_param(self):
indata = deepcopy(db_vnfd_content)
- duplicated_mp = {'id': 'memory', 'name': 'memory', 'performance_metric': 'memory'}
- affected_df = indata['df'][0]
- affected_df['monitoring-parameter'] = [duplicated_mp, duplicated_mp]
+ duplicated_mp = {
+ "id": "memory",
+ "name": "memory",
+ "performance_metric": "memory",
+ }
+ affected_df = indata["df"][0]
+ affected_df["monitoring-parameter"] = [duplicated_mp, duplicated_mp]
with self.assertRaises(EngineException) as e:
self.topic.validate_monitoring_params(indata)
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn(norm("Duplicated monitoring-parameter id in "
- "df[id='{}']:monitoring-parameter[id='{}']"
- .format(affected_df["id"], duplicated_mp["id"])),
- norm(str(e.exception)), "Wrong exception text")
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ norm(
+ "Duplicated monitoring-parameter id in "
+ "df[id='{}']:monitoring-parameter[id='{}']".format(
+ affected_df["id"], duplicated_mp["id"]
+ )
+ ),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
def test_validate_scaling_group_descriptor_on_valid_descriptor(self):
indata = db_vnfd_content
def test_validate_scaling_group_descriptor_when_missing_monitoring_param(self):
indata = deepcopy(db_vnfd_content)
- vdu = indata['vdu'][1]
- affected_df = indata['df'][0]
- affected_sa = affected_df['scaling-aspect'][0]
- affected_sp = affected_sa['scaling-policy'][0]
- affected_sc = affected_sp['scaling-criteria'][0]
- vdu.pop('monitoring-parameter')
+ vdu = indata["vdu"][1]
+ affected_df = indata["df"][0]
+ affected_sa = affected_df["scaling-aspect"][0]
+ affected_sp = affected_sa["scaling-policy"][0]
+ affected_sc = affected_sp["scaling-criteria"][0]
+ vdu.pop("monitoring-parameter")
with self.assertRaises(EngineException) as e:
self.topic.validate_scaling_group_descriptor(indata)
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn(norm("df[id='{}']:scaling-aspect[id='{}']:scaling-policy"
- "[name='{}']:scaling-criteria[name='{}']: "
- "vnf-monitoring-param-ref='{}' not defined in any monitoring-param"
- .format(affected_df["id"], affected_sa["id"], affected_sp["name"], affected_sc["name"],
- affected_sc["vnf-monitoring-param-ref"])),
- norm(str(e.exception)), "Wrong exception text")
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ norm(
+ "df[id='{}']:scaling-aspect[id='{}']:scaling-policy"
+ "[name='{}']:scaling-criteria[name='{}']: "
+ "vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format(
+ affected_df["id"],
+ affected_sa["id"],
+ affected_sp["name"],
+ affected_sc["name"],
+ affected_sc["vnf-monitoring-param-ref"],
+ )
+ ),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
def test_validate_scaling_group_descriptor_when_missing_vnf_configuration(self):
indata = deepcopy(db_vnfd_content)
- df = indata['df'][0]
- affected_sa = df['scaling-aspect'][0]
- indata["df"][0]["lcm-operations-configuration"]["operate-vnf-op-config"]["day1-2"].pop()
+ df = indata["df"][0]
+ affected_sa = df["scaling-aspect"][0]
+ indata["df"][0]["lcm-operations-configuration"]["operate-vnf-op-config"][
+ "day1-2"
+ ].pop()
with self.assertRaises(EngineException) as e:
self.topic.validate_scaling_group_descriptor(indata)
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn(norm("'day1-2 configuration' not defined in the descriptor but it is referenced "
- "by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action"
- .format(df["id"], affected_sa["id"])),
- norm(str(e.exception)), "Wrong exception text")
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ norm(
+ "'day1-2 configuration' not defined in the descriptor but it is referenced "
+ "by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action".format(
+ df["id"], affected_sa["id"]
+ )
+ ),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
- def test_validate_scaling_group_descriptor_when_missing_scaling_config_action_primitive(self):
+ def test_validate_scaling_group_descriptor_when_missing_scaling_config_action_primitive(
+ self,
+ ):
indata = deepcopy(db_vnfd_content)
- df = indata['df'][0]
- affected_sa = df['scaling-aspect'][0]
- affected_sca_primitive = affected_sa['scaling-config-action'][0]['vnf-config-primitive-name-ref']
- df["lcm-operations-configuration"]["operate-vnf-op-config"]["day1-2"][0]['config-primitive'] = []
+ df = indata["df"][0]
+ affected_sa = df["scaling-aspect"][0]
+ affected_sca_primitive = affected_sa["scaling-config-action"][0][
+ "vnf-config-primitive-name-ref"
+ ]
+ df["lcm-operations-configuration"]["operate-vnf-op-config"]["day1-2"][0][
+ "config-primitive"
+ ] = []
with self.assertRaises(EngineException) as e:
self.topic.validate_scaling_group_descriptor(indata)
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn(norm("df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-"
- "config-primitive-name-ref='{}' does not match any "
- "day1-2 configuration:config-primitive:name"
- .format(df["id"], affected_sa["id"], affected_sca_primitive)),
- norm(str(e.exception)), "Wrong exception text")
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ norm(
+ "df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-"
+ "config-primitive-name-ref='{}' does not match any "
+ "day1-2 configuration:config-primitive:name".format(
+ df["id"], affected_sa["id"], affected_sca_primitive
+ )
+ ),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
class Test_NsdTopic(TestCase):
-
@classmethod
def setUpClass(cls):
cls.test_name = "test-nsd-topic"
did = db_nsd_content["_id"]
self.fs.get_params.return_value = {}
self.fs.file_exists.return_value = False
- self.fs.file_open.side_effect = lambda path, mode: open("/tmp/" + str(uuid4()), "a+b")
+ self.fs.file_open.side_effect = lambda path, mode: open(
+ "/tmp/" + str(uuid4()), "a+b"
+ )
test_nsd = deepcopy(db_nsd_content)
del test_nsd["_id"]
del test_nsd["_admin"]
- with self.subTest(i=1, t='Normal Creation'):
+ with self.subTest(i=1, t="Normal Creation"):
self.db.create.return_value = did
rollback = []
did2, oid = self.topic.new(rollback, fake_session, {})
self.assertEqual(db_args[0], self.topic.topic, "Wrong DB topic")
self.assertEqual(did2, did, "Wrong DB NSD id")
self.assertIsNotNone(db_args[1]["_admin"]["created"], "Wrong creation time")
- self.assertEqual(db_args[1]["_admin"]["modified"], db_args[1]["_admin"]["created"],
- "Wrong modification time")
- self.assertEqual(db_args[1]["_admin"]["projects_read"], [test_pid], "Wrong read-only project list")
- self.assertEqual(db_args[1]["_admin"]["projects_write"], [test_pid], "Wrong read-write project list")
+ self.assertEqual(
+ db_args[1]["_admin"]["modified"],
+ db_args[1]["_admin"]["created"],
+ "Wrong modification time",
+ )
+ self.assertEqual(
+ db_args[1]["_admin"]["projects_read"],
+ [test_pid],
+ "Wrong read-only project list",
+ )
+ self.assertEqual(
+ db_args[1]["_admin"]["projects_write"],
+ [test_pid],
+ "Wrong read-write project list",
+ )
try:
- self.db.get_one.side_effect = [{"_id": did, "_admin": db_nsd_content["_admin"]}, None]
+ self.db.get_one.side_effect = [
+ {"_id": did, "_admin": db_nsd_content["_admin"]},
+ None,
+ ]
self.db.get_list.return_value = [db_vnfd_content]
- self.topic.upload_content(fake_session, did, test_nsd, {}, {"Content-Type": []})
+ self.topic.upload_content(
+ fake_session, did, test_nsd, {}, {"Content-Type": []}
+ )
msg_args = self.msg.write.call_args[0]
test_nsd["_id"] = did
- self.assertEqual(msg_args[0], self.topic.topic_msg, "Wrong message topic")
+ self.assertEqual(
+ msg_args[0], self.topic.topic_msg, "Wrong message topic"
+ )
self.assertEqual(msg_args[1], "edited", "Wrong message action")
self.assertEqual(msg_args[2], test_nsd, "Wrong message content")
db_args = self.db.get_one.mock_calls[0][1]
self.assertEqual(db_args[1], did, "Wrong DB NSD id")
admin = db_args[2]["_admin"]
db_admin = db_nsd_content["_admin"]
- self.assertEqual(admin["created"], db_admin["created"], "Wrong creation time")
- self.assertGreater(admin["modified"], db_admin["created"], "Wrong modification time")
- self.assertEqual(admin["projects_read"], db_admin["projects_read"], "Wrong read-only project list")
- self.assertEqual(admin["projects_write"], db_admin["projects_write"], "Wrong read-write project list")
- self.assertEqual(admin["onboardingState"], "ONBOARDED", "Wrong onboarding state")
- self.assertEqual(admin["operationalState"], "ENABLED", "Wrong operational state")
+ self.assertEqual(
+ admin["created"], db_admin["created"], "Wrong creation time"
+ )
+ self.assertGreater(
+ admin["modified"], db_admin["created"], "Wrong modification time"
+ )
+ self.assertEqual(
+ admin["projects_read"],
+ db_admin["projects_read"],
+ "Wrong read-only project list",
+ )
+ self.assertEqual(
+ admin["projects_write"],
+ db_admin["projects_write"],
+ "Wrong read-write project list",
+ )
+ self.assertEqual(
+ admin["onboardingState"], "ONBOARDED", "Wrong onboarding state"
+ )
+ self.assertEqual(
+ admin["operationalState"], "ENABLED", "Wrong operational state"
+ )
self.assertEqual(admin["usageState"], "NOT_IN_USE", "Wrong usage state")
storage = admin["storage"]
self.assertEqual(storage["folder"], did, "Wrong storage folder")
- self.assertEqual(storage["descriptor"], "package", "Wrong storage descriptor")
+ self.assertEqual(
+ storage["descriptor"], "package", "Wrong storage descriptor"
+ )
compare_desc(self, test_nsd, db_args[2], "NSD")
finally:
pass
- self.db.get_one.side_effect = lambda table, filter, fail_on_empty=None, fail_on_more=None: \
- {"_id": did, "_admin": db_nsd_content["_admin"]}
- with self.subTest(i=2, t='Check Pyangbind Validation: required properties'):
+ self.db.get_one.side_effect = (
+ lambda table, filter, fail_on_empty=None, fail_on_more=None: {
+ "_id": did,
+ "_admin": db_nsd_content["_admin"],
+ }
+ )
+ with self.subTest(i=2, t="Check Pyangbind Validation: required properties"):
tmp = test_nsd["id"]
del test_nsd["id"]
try:
- with self.assertRaises(EngineException, msg="Accepted NSD with a missing required property") as e:
- self.topic.upload_content(fake_session, did, test_nsd, {}, {"Content-Type": []})
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn(norm("Error in pyangbind validation: '{}'".format("id")),
- norm(str(e.exception)), "Wrong exception text")
+ with self.assertRaises(
+ EngineException, msg="Accepted NSD with a missing required property"
+ ) as e:
+ self.topic.upload_content(
+ fake_session, did, test_nsd, {}, {"Content-Type": []}
+ )
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ norm("Error in pyangbind validation: '{}'".format("id")),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
finally:
test_nsd["id"] = tmp
- with self.subTest(i=3, t='Check Pyangbind Validation: additional properties'):
+ with self.subTest(i=3, t="Check Pyangbind Validation: additional properties"):
test_nsd["extra-property"] = 0
try:
- with self.assertRaises(EngineException, msg="Accepted NSD with an additional property") as e:
- self.topic.upload_content(fake_session, did, test_nsd, {}, {"Content-Type": []})
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn(norm("Error in pyangbind validation: {} ({})"
- .format("json object contained a key that did not exist", "extra-property")),
- norm(str(e.exception)), "Wrong exception text")
+ with self.assertRaises(
+ EngineException, msg="Accepted NSD with an additional property"
+ ) as e:
+ self.topic.upload_content(
+ fake_session, did, test_nsd, {}, {"Content-Type": []}
+ )
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ norm(
+ "Error in pyangbind validation: {} ({})".format(
+ "json object contained a key that did not exist",
+ "extra-property",
+ )
+ ),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
finally:
del test_nsd["extra-property"]
- with self.subTest(i=4, t='Check Pyangbind Validation: property types'):
+ with self.subTest(i=4, t="Check Pyangbind Validation: property types"):
tmp = test_nsd["designer"]
test_nsd["designer"] = {"key": 0}
try:
- with self.assertRaises(EngineException, msg="Accepted NSD with a wrongly typed property") as e:
- self.topic.upload_content(fake_session, did, test_nsd, {}, {"Content-Type": []})
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn(norm("Error in pyangbind validation: {} ({})"
- .format("json object contained a key that did not exist", "key")),
- norm(str(e.exception)), "Wrong exception text")
+ with self.assertRaises(
+ EngineException, msg="Accepted NSD with a wrongly typed property"
+ ) as e:
+ self.topic.upload_content(
+ fake_session, did, test_nsd, {}, {"Content-Type": []}
+ )
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ norm(
+ "Error in pyangbind validation: {} ({})".format(
+ "json object contained a key that did not exist", "key"
+ )
+ ),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
finally:
test_nsd["designer"] = tmp
- with self.subTest(i=5, t='Check Input Validation: mgmt-network+virtual-link-protocol-data'):
- df = test_nsd['df'][0]
- mgmt_profile = {'id': 'id', 'virtual-link-desc-id': 'mgmt',
- 'virtual-link-protocol-data': {'associated-layer-protocol': 'ipv4'}}
- df['virtual-link-profile'] = [mgmt_profile]
+ with self.subTest(
+ i=5, t="Check Input Validation: mgmt-network+virtual-link-protocol-data"
+ ):
+ df = test_nsd["df"][0]
+ mgmt_profile = {
+ "id": "id",
+ "virtual-link-desc-id": "mgmt",
+ "virtual-link-protocol-data": {"associated-layer-protocol": "ipv4"},
+ }
+ df["virtual-link-profile"] = [mgmt_profile]
try:
- with self.assertRaises(EngineException, msg="Accepted VLD with mgmt-network+ip-profile") as e:
- self.topic.upload_content(fake_session, did, test_nsd, {}, {"Content-Type": []})
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn(norm("Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-protocol-data"
- " You cannot set a virtual-link-protocol-data when mgmt-network is True"
- .format(df["id"], mgmt_profile["id"])),
- norm(str(e.exception)), "Wrong exception text")
+ with self.assertRaises(
+ EngineException, msg="Accepted VLD with mgmt-network+ip-profile"
+ ) as e:
+ self.topic.upload_content(
+ fake_session, did, test_nsd, {}, {"Content-Type": []}
+ )
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ norm(
+ "Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-protocol-data"
+ " You cannot set a virtual-link-protocol-data when mgmt-network is True".format(
+ df["id"], mgmt_profile["id"]
+ )
+ ),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
finally:
- del df['virtual-link-profile']
- with self.subTest(i=6, t='Check Descriptor Dependencies: vnfd-id[]'):
- self.db.get_one.side_effect = [{"_id": did, "_admin": db_nsd_content["_admin"]}, None]
+ del df["virtual-link-profile"]
+ with self.subTest(i=6, t="Check Descriptor Dependencies: vnfd-id[]"):
+ self.db.get_one.side_effect = [
+ {"_id": did, "_admin": db_nsd_content["_admin"]},
+ None,
+ ]
self.db.get_list.return_value = []
try:
- with self.assertRaises(EngineException, msg="Accepted wrong VNFD ID reference") as e:
- self.topic.upload_content(fake_session, did, test_nsd, {}, {"Content-Type": []})
- self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
- self.assertIn(norm("'vnfd-id'='{}' references a non existing vnfd".format(test_nsd['vnfd-id'][0])),
- norm(str(e.exception)), "Wrong exception text")
+ with self.assertRaises(
+ EngineException, msg="Accepted wrong VNFD ID reference"
+ ) as e:
+ self.topic.upload_content(
+ fake_session, did, test_nsd, {}, {"Content-Type": []}
+ )
+ self.assertEqual(
+ e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+ )
+ self.assertIn(
+ norm(
+ "'vnfd-id'='{}' references a non existing vnfd".format(
+ test_nsd["vnfd-id"][0]
+ )
+ ),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
finally:
pass
- with self.subTest(i=7, t='Check Descriptor Dependencies: '
- 'vld[vnfd-connection-point-ref][vnfd-connection-point-ref]'):
+ with self.subTest(
+ i=7,
+ t="Check Descriptor Dependencies: "
+ "vld[vnfd-connection-point-ref][vnfd-connection-point-ref]",
+ ):
vnfd_descriptor = deepcopy(db_vnfd_content)
- df = test_nsd['df'][0]
- affected_vnf_profile = df['vnf-profile'][0]
- affected_virtual_link = affected_vnf_profile['virtual-link-connectivity'][1]
- affected_cpd = vnfd_descriptor['ext-cpd'].pop()
- self.db.get_one.side_effect = [{"_id": did, "_admin": db_nsd_content["_admin"]}, None]
+ df = test_nsd["df"][0]
+ affected_vnf_profile = df["vnf-profile"][0]
+ affected_virtual_link = affected_vnf_profile["virtual-link-connectivity"][1]
+ affected_cpd = vnfd_descriptor["ext-cpd"].pop()
+ self.db.get_one.side_effect = [
+ {"_id": did, "_admin": db_nsd_content["_admin"]},
+ None,
+ ]
self.db.get_list.return_value = [vnfd_descriptor]
try:
- with self.assertRaises(EngineException, msg="Accepted wrong VLD CP reference") as e:
- self.topic.upload_content(fake_session, did, test_nsd, {}, {"Content-Type": []})
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn(norm("Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity"
- "[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a "
- "non existing ext-cpd:id inside vnfd '{}'"
- .format(df["id"], affected_vnf_profile["id"],
- affected_virtual_link["virtual-link-profile-id"], affected_cpd["id"],
- vnfd_descriptor["id"])),
- norm(str(e.exception)), "Wrong exception text")
+ with self.assertRaises(
+ EngineException, msg="Accepted wrong VLD CP reference"
+ ) as e:
+ self.topic.upload_content(
+ fake_session, did, test_nsd, {}, {"Content-Type": []}
+ )
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ norm(
+ "Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity"
+ "[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a "
+ "non existing ext-cpd:id inside vnfd '{}'".format(
+ df["id"],
+ affected_vnf_profile["id"],
+ affected_virtual_link["virtual-link-profile-id"],
+ affected_cpd["id"],
+ vnfd_descriptor["id"],
+ )
+ ),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
finally:
pass
return
did = nsd_content["_id"]
self.fs.file_exists.return_value = True
self.fs.dir_ls.return_value = True
- with self.subTest(i=1, t='Normal Edition'):
+ with self.subTest(i=1, t="Normal Edition"):
now = time()
self.db.get_one.side_effect = [deepcopy(nsd_content), None]
self.db.get_list.return_value = [db_vnfd_content]
self.assertEqual(msg_args[2], data, "Wrong message content")
self.assertEqual(db_args[0], self.topic.topic, "Wrong DB topic")
self.assertEqual(db_args[1], did, "Wrong DB ID")
- self.assertEqual(db_args[2]["_admin"]["created"], nsd_content["_admin"]["created"],
- "Wrong creation time")
- self.assertGreater(db_args[2]["_admin"]["modified"], now, "Wrong modification time")
- self.assertEqual(db_args[2]["_admin"]["projects_read"], nsd_content["_admin"]["projects_read"],
- "Wrong read-only project list")
- self.assertEqual(db_args[2]["_admin"]["projects_write"], nsd_content["_admin"]["projects_write"],
- "Wrong read-write project list")
+ self.assertEqual(
+ db_args[2]["_admin"]["created"],
+ nsd_content["_admin"]["created"],
+ "Wrong creation time",
+ )
+ self.assertGreater(
+ db_args[2]["_admin"]["modified"], now, "Wrong modification time"
+ )
+ self.assertEqual(
+ db_args[2]["_admin"]["projects_read"],
+ nsd_content["_admin"]["projects_read"],
+ "Wrong read-only project list",
+ )
+ self.assertEqual(
+ db_args[2]["_admin"]["projects_write"],
+ nsd_content["_admin"]["projects_write"],
+ "Wrong read-write project list",
+ )
self.assertEqual(db_args[2]["id"], data["id"], "Wrong NSD ID")
self.assertEqual(db_args[2]["name"], data["name"], "Wrong NSD Name")
- with self.subTest(i=2, t='Conflict on Edit'):
+ with self.subTest(i=2, t="Conflict on Edit"):
data = {"id": "fake-nsd-id", "name": "new-nsd-name"}
- self.db.get_one.side_effect = [nsd_content, {"_id": str(uuid4()), "id": data["id"]}]
- with self.assertRaises(EngineException, msg="Accepted existing NSD ID") as e:
+ self.db.get_one.side_effect = [
+ nsd_content,
+ {"_id": str(uuid4()), "id": data["id"]},
+ ]
+ with self.assertRaises(
+ EngineException, msg="Accepted existing NSD ID"
+ ) as e:
self.topic.edit(fake_session, did, data)
- self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
- self.assertIn(norm("{} with id '{}' already exists for this project".format("nsd", data["id"])),
- norm(str(e.exception)), "Wrong exception text")
- with self.subTest(i=3, t='Check Envelope'):
+ self.assertEqual(
+ e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+ )
+ self.assertIn(
+ norm(
+ "{} with id '{}' already exists for this project".format(
+ "nsd", data["id"]
+ )
+ ),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
+ with self.subTest(i=3, t="Check Envelope"):
data = {"nsd": {"nsd": {"id": "new-nsd-id", "name": "new-nsd-name"}}}
self.db.get_one.side_effect = [nsd_content, None]
- with self.assertRaises(EngineException, msg="Accepted NSD with wrong envelope") as e:
+ with self.assertRaises(
+ EngineException, msg="Accepted NSD with wrong envelope"
+ ) as e:
self.topic.edit(fake_session, did, data, content=nsd_content)
- self.assertEqual(e.exception.http_code, HTTPStatus.BAD_REQUEST, "Wrong HTTP status code")
- self.assertIn("'nsd' must be a list of only one element", norm(str(e.exception)), "Wrong exception text")
+ self.assertEqual(
+ e.exception.http_code, HTTPStatus.BAD_REQUEST, "Wrong HTTP status code"
+ )
+ self.assertIn(
+ "'nsd' must be a list of only one element",
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
return
def test_delete_nsd(self):
did = db_nsd_content["_id"]
self.db.get_one.return_value = db_nsd_content
p_id = db_nsd_content["_admin"]["projects_read"][0]
- with self.subTest(i=1, t='Normal Deletion'):
+ with self.subTest(i=1, t="Normal Deletion"):
self.db.get_list.return_value = []
self.db.del_one.return_value = {"deleted": 1}
self.topic.delete(fake_session, did)
self.assertEqual(msg_args[2], {"_id": did}, "Wrong message content")
self.assertEqual(db_args[0], self.topic.topic, "Wrong DB topic")
self.assertEqual(db_args[1]["_id"], did, "Wrong DB ID")
- self.assertEqual(db_args[1]["_admin.projects_write.cont"], [p_id, 'ANY'], "Wrong DB filter")
+ self.assertEqual(
+ db_args[1]["_admin.projects_write.cont"],
+ [p_id, "ANY"],
+ "Wrong DB filter",
+ )
db_g1_args = self.db.get_one.call_args[0]
self.assertEqual(db_g1_args[0], self.topic.topic, "Wrong DB topic")
self.assertEqual(db_g1_args[1]["_id"], did, "Wrong DB NSD ID")
self.assertEqual(db_gl_calls[0][0][0], "nsrs", "Wrong DB topic")
# self.assertEqual(db_gl_calls[0][0][1]["nsd-id"], did, "Wrong DB NSD ID") # Filter changed after call
self.assertEqual(db_gl_calls[1][0][0], "nsts", "Wrong DB topic")
- self.assertEqual(db_gl_calls[1][0][1]["netslice-subnet.ANYINDEX.nsd-ref"], db_nsd_content["id"],
- "Wrong DB NSD netslice-subnet nsd-ref")
+ self.assertEqual(
+ db_gl_calls[1][0][1]["netslice-subnet.ANYINDEX.nsd-ref"],
+ db_nsd_content["id"],
+ "Wrong DB NSD netslice-subnet nsd-ref",
+ )
self.db.set_one.assert_not_called()
fs_del_calls = self.fs.file_delete.call_args_list
self.assertEqual(fs_del_calls[0][0][0], did, "Wrong FS file id")
- self.assertEqual(fs_del_calls[1][0][0], did + '_', "Wrong FS folder id")
- with self.subTest(i=2, t='Conflict on Delete - NSD in use by nsr'):
+ self.assertEqual(fs_del_calls[1][0][0], did + "_", "Wrong FS folder id")
+ with self.subTest(i=2, t="Conflict on Delete - NSD in use by nsr"):
self.db.get_list.return_value = [{"_id": str(uuid4()), "name": "fake-nsr"}]
- with self.assertRaises(EngineException, msg="Accepted NSD in use by NSR") as e:
+ with self.assertRaises(
+ EngineException, msg="Accepted NSD in use by NSR"
+ ) as e:
self.topic.delete(fake_session, did)
- self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
- self.assertIn("there is at least one ns instance using this descriptor", norm(str(e.exception)),
- "Wrong exception text")
- with self.subTest(i=3, t='Conflict on Delete - NSD in use by NST'):
- self.db.get_list.side_effect = [[], [{"_id": str(uuid4()), "name": "fake-nst"}]]
- with self.assertRaises(EngineException, msg="Accepted NSD in use by NST") as e:
+ self.assertEqual(
+ e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+ )
+ self.assertIn(
+ "there is at least one ns instance using this descriptor",
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
+ with self.subTest(i=3, t="Conflict on Delete - NSD in use by NST"):
+ self.db.get_list.side_effect = [
+ [],
+ [{"_id": str(uuid4()), "name": "fake-nst"}],
+ ]
+ with self.assertRaises(
+ EngineException, msg="Accepted NSD in use by NST"
+ ) as e:
self.topic.delete(fake_session, did)
- self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
- self.assertIn("there is at least one netslice template referencing this descriptor", norm(str(e.exception)),
- "Wrong exception text")
- with self.subTest(i=4, t='Non-existent NSD'):
+ self.assertEqual(
+ e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+ )
+ self.assertIn(
+ "there is at least one netslice template referencing this descriptor",
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
+ with self.subTest(i=4, t="Non-existent NSD"):
excp_msg = "Not found any {} with filter='{}'".format("NSD", {"_id": did})
self.db.get_one.side_effect = DbException(excp_msg, HTTPStatus.NOT_FOUND)
- with self.assertRaises(DbException, msg="Accepted non-existent NSD ID") as e:
+ with self.assertRaises(
+ DbException, msg="Accepted non-existent NSD ID"
+ ) as e:
self.topic.delete(fake_session, did)
- self.assertEqual(e.exception.http_code, HTTPStatus.NOT_FOUND, "Wrong HTTP status code")
- self.assertIn(norm(excp_msg), norm(str(e.exception)), "Wrong exception text")
- with self.subTest(i=5, t='No delete because referenced by other project'):
+ self.assertEqual(
+ e.exception.http_code, HTTPStatus.NOT_FOUND, "Wrong HTTP status code"
+ )
+ self.assertIn(
+ norm(excp_msg), norm(str(e.exception)), "Wrong exception text"
+ )
+ with self.subTest(i=5, t="No delete because referenced by other project"):
db_nsd_content["_admin"]["projects_read"].append("other_project")
self.db.get_one = Mock(return_value=db_nsd_content)
self.db.get_list = Mock(return_value=[])
db_s1_args = self.db.set_one.call_args
self.assertEqual(db_s1_args[0][0], self.topic.topic, "Wrong DB topic")
self.assertEqual(db_s1_args[0][1]["_id"], did, "Wrong DB ID")
- self.assertIn(p_id, db_s1_args[0][1]["_admin.projects_write.cont"], "Wrong DB filter")
- self.assertIsNone(db_s1_args[1]["update_dict"], "Wrong DB update dictionary")
- self.assertEqual(db_s1_args[1]["pull_list"],
- {"_admin.projects_read": (p_id,), "_admin.projects_write": (p_id,)},
- "Wrong DB pull_list dictionary")
+ self.assertIn(
+ p_id, db_s1_args[0][1]["_admin.projects_write.cont"], "Wrong DB filter"
+ )
+ self.assertIsNone(
+ db_s1_args[1]["update_dict"], "Wrong DB update dictionary"
+ )
+ self.assertEqual(
+ db_s1_args[1]["pull_list"],
+ {"_admin.projects_read": (p_id,), "_admin.projects_write": (p_id,)},
+ "Wrong DB pull_list dictionary",
+ )
self.fs.file_delete.assert_not_called()
return
- def test_validate_vld_mgmt_network_with_virtual_link_protocol_data_on_valid_descriptor(self):
+ def test_validate_vld_mgmt_network_with_virtual_link_protocol_data_on_valid_descriptor(
+ self,
+ ):
indata = deepcopy(db_nsd_content)
- vld = indata['virtual-link-desc'][0]
- self.topic.validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata)
+ vld = indata["virtual-link-desc"][0]
+ self.topic.validate_vld_mgmt_network_with_virtual_link_protocol_data(
+ vld, indata
+ )
- def test_validate_vld_mgmt_network_with_virtual_link_protocol_data_when_both_defined(self):
+ def test_validate_vld_mgmt_network_with_virtual_link_protocol_data_when_both_defined(
+ self,
+ ):
indata = deepcopy(db_nsd_content)
- vld = indata['virtual-link-desc'][0]
- df = indata['df'][0]
- affected_vlp = {'id': 'id', 'virtual-link-desc-id': 'mgmt',
- 'virtual-link-protocol-data': {'associated-layer-protocol': 'ipv4'}}
- df['virtual-link-profile'] = [affected_vlp]
+ vld = indata["virtual-link-desc"][0]
+ df = indata["df"][0]
+ affected_vlp = {
+ "id": "id",
+ "virtual-link-desc-id": "mgmt",
+ "virtual-link-protocol-data": {"associated-layer-protocol": "ipv4"},
+ }
+ df["virtual-link-profile"] = [affected_vlp]
with self.assertRaises(EngineException) as e:
- self.topic.validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata)
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn(norm("Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-protocol-data"
- " You cannot set a virtual-link-protocol-data when mgmt-network is True"
- .format(df["id"], affected_vlp["id"])),
- norm(str(e.exception)), "Wrong exception text")
+ self.topic.validate_vld_mgmt_network_with_virtual_link_protocol_data(
+ vld, indata
+ )
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ norm(
+ "Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-protocol-data"
+ " You cannot set a virtual-link-protocol-data when mgmt-network is True".format(
+ df["id"], affected_vlp["id"]
+ )
+ ),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
def test_validate_vnf_profiles_vnfd_id_on_valid_descriptor(self):
indata = deepcopy(db_nsd_content)
def test_validate_vnf_profiles_vnfd_id_when_missing_vnfd(self):
indata = deepcopy(db_nsd_content)
- df = indata['df'][0]
- affected_vnf_profile = df['vnf-profile'][0]
- indata['vnfd-id'] = ['non-existing-vnfd']
+ df = indata["df"][0]
+ affected_vnf_profile = df["vnf-profile"][0]
+ indata["vnfd-id"] = ["non-existing-vnfd"]
with self.assertRaises(EngineException) as e:
self.topic.validate_vnf_profiles_vnfd_id(indata)
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn(norm("Error at df[id='{}']:vnf_profile[id='{}']:vnfd-id='{}' "
- "does not match any vnfd-id"
- .format(df["id"], affected_vnf_profile["id"], affected_vnf_profile['vnfd-id'])),
- norm(str(e.exception)), "Wrong exception text")
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ norm(
+ "Error at df[id='{}']:vnf_profile[id='{}']:vnfd-id='{}' "
+ "does not match any vnfd-id".format(
+ df["id"],
+ affected_vnf_profile["id"],
+ affected_vnf_profile["vnfd-id"],
+ )
+ ),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
- def test_validate_df_vnf_profiles_constituent_connection_points_on_valid_descriptor(self):
+ def test_validate_df_vnf_profiles_constituent_connection_points_on_valid_descriptor(
+ self,
+ ):
nsd_descriptor = deepcopy(db_nsd_content)
vnfd_descriptor = deepcopy(db_vnfd_content)
- df = nsd_descriptor['df'][0]
- vnfds_index = {vnfd_descriptor['id']: vnfd_descriptor}
- self.topic.validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index)
+ df = nsd_descriptor["df"][0]
+ vnfds_index = {vnfd_descriptor["id"]: vnfd_descriptor}
+ self.topic.validate_df_vnf_profiles_constituent_connection_points(
+ df, vnfds_index
+ )
- def test_validate_df_vnf_profiles_constituent_connection_points_when_missing_connection_point(self):
+ def test_validate_df_vnf_profiles_constituent_connection_points_when_missing_connection_point(
+ self,
+ ):
nsd_descriptor = deepcopy(db_nsd_content)
vnfd_descriptor = deepcopy(db_vnfd_content)
- df = nsd_descriptor['df'][0]
- affected_vnf_profile = df['vnf-profile'][0]
- affected_virtual_link = affected_vnf_profile['virtual-link-connectivity'][1]
- vnfds_index = {vnfd_descriptor['id']: vnfd_descriptor}
- affected_cpd = vnfd_descriptor['ext-cpd'].pop()
+ df = nsd_descriptor["df"][0]
+ affected_vnf_profile = df["vnf-profile"][0]
+ affected_virtual_link = affected_vnf_profile["virtual-link-connectivity"][1]
+ vnfds_index = {vnfd_descriptor["id"]: vnfd_descriptor}
+ affected_cpd = vnfd_descriptor["ext-cpd"].pop()
with self.assertRaises(EngineException) as e:
- self.topic.validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index)
- self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
- self.assertIn(norm("Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity"
- "[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a "
- "non existing ext-cpd:id inside vnfd '{}'"
- .format(df["id"], affected_vnf_profile["id"],
- affected_virtual_link["virtual-link-profile-id"], affected_cpd["id"],
- vnfd_descriptor["id"])),
- norm(str(e.exception)), "Wrong exception text")
+ self.topic.validate_df_vnf_profiles_constituent_connection_points(
+ df, vnfds_index
+ )
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.UNPROCESSABLE_ENTITY,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ norm(
+ "Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity"
+ "[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a "
+ "non existing ext-cpd:id inside vnfd '{}'".format(
+ df["id"],
+ affected_vnf_profile["id"],
+ affected_virtual_link["virtual-link-profile-id"],
+ affected_cpd["id"],
+ vnfd_descriptor["id"],
+ )
+ ),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
def test_check_conflict_on_edit_when_missing_constituent_vnfd_id(self):
nsd_descriptor = deepcopy(db_nsd_content)
- invalid_vnfd_id = 'invalid-vnfd-id'
- nsd_descriptor['id'] = 'invalid-vnfd-id-ns'
- nsd_descriptor['vnfd-id'][0] = invalid_vnfd_id
- nsd_descriptor['df'][0]['vnf-profile'][0]['vnfd-id'] = invalid_vnfd_id
- nsd_descriptor['df'][0]['vnf-profile'][1]['vnfd-id'] = invalid_vnfd_id
+ invalid_vnfd_id = "invalid-vnfd-id"
+ nsd_descriptor["id"] = "invalid-vnfd-id-ns"
+ nsd_descriptor["vnfd-id"][0] = invalid_vnfd_id
+ nsd_descriptor["df"][0]["vnf-profile"][0]["vnfd-id"] = invalid_vnfd_id
+ nsd_descriptor["df"][0]["vnf-profile"][1]["vnfd-id"] = invalid_vnfd_id
with self.assertRaises(EngineException) as e:
self.db.get_list.return_value = []
- nsd_descriptor = self.topic.check_conflict_on_edit(fake_session, nsd_descriptor, [], 'id')
- self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
- self.assertIn(norm("Descriptor error at 'vnfd-id'='{}' references a non "
- "existing vnfd".format(invalid_vnfd_id)),
- norm(str(e.exception)), "Wrong exception text")
+ nsd_descriptor = self.topic.check_conflict_on_edit(
+ fake_session, nsd_descriptor, [], "id"
+ )
+ self.assertEqual(
+ e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+ )
+ self.assertIn(
+ norm(
+ "Descriptor error at 'vnfd-id'='{}' references a non "
+ "existing vnfd".format(invalid_vnfd_id)
+ ),
+ norm(str(e.exception)),
+ "Wrong exception text",
+ )
-if __name__ == '__main__':
+if __name__ == "__main__":
unittest.main()
##
import unittest
-from unittest.mock import Mock, mock_open # patch, MagicMock
+from unittest.mock import Mock, mock_open # patch, MagicMock
from osm_common.dbbase import DbException
from osm_nbi.engine import EngineException
from osm_common.dbmemory import DbMemory
from osm_common.msgbase import MsgBase
from http import HTTPStatus
from osm_nbi.instance_topics import NsLcmOpTopic, NsrTopic
-from osm_nbi.tests.test_db_descriptors import db_vim_accounts_text, db_nsds_text, db_vnfds_text, db_nsrs_text,\
- db_vnfrs_text
+from osm_nbi.tests.test_db_descriptors import (
+ db_vim_accounts_text,
+ db_nsds_text,
+ db_vnfds_text,
+ db_nsrs_text,
+ db_vnfrs_text,
+)
from copy import deepcopy
import yaml
class TestNsLcmOpTopic(unittest.TestCase):
-
def setUp(self):
self.db = DbMemory()
self.fs = Mock(FsBase())
self.nslcmop_topic = NsLcmOpTopic(self.db, self.fs, self.msg, None)
self.nslcmop_topic.check_quota = Mock(return_value=None) # skip quota
- self.db.create_list("vim_accounts", yaml.load(db_vim_accounts_text, Loader=yaml.Loader))
+ self.db.create_list(
+ "vim_accounts", yaml.load(db_vim_accounts_text, Loader=yaml.Loader)
+ )
self.db.create_list("nsds", yaml.load(db_nsds_text, Loader=yaml.Loader))
self.db.create_list("vnfds", yaml.load(db_vnfds_text, Loader=yaml.Loader))
self.db.create_list("vnfrs", yaml.load(db_vnfrs_text, Loader=yaml.Loader))
self.vim_id = self.vim["_id"]
def test_create_instantiate(self):
- session = {"force": False, "admin": False, "public": False, "project_id": [self.nsr_project], "method": "write"}
+ session = {
+ "force": False,
+ "admin": False,
+ "public": False,
+ "project_id": [self.nsr_project],
+ "method": "write",
+ }
indata = {
"nsdId": self.nsd_id,
"nsInstanceId": self.nsr_id,
"nsName": "name",
"vimAccountId": self.vim_id,
- "additionalParamsForVnf": [{"member-vnf-index": "1", "additionalParams": {"touch_filename": "file"}},
- {"member-vnf-index": "2", "additionalParams": {"touch_filename": "file"}}],
- "vnf": [{"member-vnf-index": "1",
- "vdu": [{"id": "dataVM", "interface": [{"name": "dataVM-eth0",
- "ip-address": "10.11.12.13",
- "floating-ip-required": True}]
- }],
- "internal-vld": [{"name": "internal", "vim-network-id": "vim-net-id"}]
- }],
+ "additionalParamsForVnf": [
+ {
+ "member-vnf-index": "1",
+ "additionalParams": {"touch_filename": "file"},
+ },
+ {
+ "member-vnf-index": "2",
+ "additionalParams": {"touch_filename": "file"},
+ },
+ ],
+ "vnf": [
+ {
+ "member-vnf-index": "1",
+ "vdu": [
+ {
+ "id": "dataVM",
+ "interface": [
+ {
+ "name": "dataVM-eth0",
+ "ip-address": "10.11.12.13",
+ "floating-ip-required": True,
+ }
+ ],
+ }
+ ],
+ "internal-vld": [
+ {"name": "internal", "vim-network-id": "vim-net-id"}
+ ],
+ }
+ ],
"lcmOperationType": "instantiate",
-
}
rollback = []
headers = {}
- nslcmop_id, _ = self.nslcmop_topic.new(rollback, session, indata=deepcopy(indata), kwargs=None, headers=headers)
+ nslcmop_id, _ = self.nslcmop_topic.new(
+ rollback, session, indata=deepcopy(indata), kwargs=None, headers=headers
+ )
# check nslcmop is created at database
- self.assertEqual(self.db.create.call_count, 1, "database create not called, or called more than once")
+ self.assertEqual(
+ self.db.create.call_count,
+ 1,
+ "database create not called, or called more than once",
+ )
_call = self.db.create.call_args_list[0]
- self.assertEqual(_call[0][0], "nslcmops", "must be create a nslcmops entry at database")
+ self.assertEqual(
+ _call[0][0], "nslcmops", "must be create a nslcmops entry at database"
+ )
created_nslcmop = _call[0][1]
- self.assertEqual(nslcmop_id, created_nslcmop["_id"], "mismatch between return id and database '_id'")
- self.assertEqual(self.nsr_id, created_nslcmop["nsInstanceId"], "bad reference id from nslcmop to nsr")
- self.assertTrue(created_nslcmop["_admin"].get("projects_read"),
- "Database record must contain '_amdin.projects_read'")
- self.assertIn("created", created_nslcmop["_admin"], "Database record must contain '_admin.created'")
- self.assertTrue(created_nslcmop["lcmOperationType"] == "instantiate",
- "Database record must contain 'lcmOperationType=instantiate'")
+ self.assertEqual(
+ nslcmop_id,
+ created_nslcmop["_id"],
+ "mismatch between return id and database '_id'",
+ )
+ self.assertEqual(
+ self.nsr_id,
+ created_nslcmop["nsInstanceId"],
+ "bad reference id from nslcmop to nsr",
+ )
+ self.assertTrue(
+ created_nslcmop["_admin"].get("projects_read"),
+ "Database record must contain '_amdin.projects_read'",
+ )
+ self.assertIn(
+ "created",
+ created_nslcmop["_admin"],
+ "Database record must contain '_admin.created'",
+ )
+ self.assertTrue(
+ created_nslcmop["lcmOperationType"] == "instantiate",
+ "Database record must contain 'lcmOperationType=instantiate'",
+ )
- self.assertEqual(len(rollback), len(self.db.set_one.call_args_list) + 1,
- "rollback mismatch with created/set items at database")
+ self.assertEqual(
+ len(rollback),
+ len(self.db.set_one.call_args_list) + 1,
+ "rollback mismatch with created/set items at database",
+ )
# test parameters with error
bad_id = "88d90b0c-faff-4b9f-bccd-aaaaaaaaaaaa"
test_set = (
- ("nsr not found", {"nsInstanceId": bad_id}, DbException, HTTPStatus.NOT_FOUND, ("not found", bad_id)),
+ (
+ "nsr not found",
+ {"nsInstanceId": bad_id},
+ DbException,
+ HTTPStatus.NOT_FOUND,
+ ("not found", bad_id),
+ ),
# TODO add "nsd"
# ({"vimAccountId": bad_id}, DbException, HTTPStatus.NOT_FOUND, ("not found", bad_id)), # TODO add "vim"
- ("bad member-vnf-index", {"vnf.0.member-vnf-index": "k"}, EngineException, HTTPStatus.BAD_REQUEST,
- ("k",)),
+ (
+ "bad member-vnf-index",
+ {"vnf.0.member-vnf-index": "k"},
+ EngineException,
+ HTTPStatus.BAD_REQUEST,
+ ("k",),
+ ),
)
for message, kwargs_, expect_exc, expect_code, expect_text_list in test_set:
with self.assertRaises(expect_exc, msg=message) as e:
- self.nslcmop_topic.new(rollback, session, indata=deepcopy(indata), kwargs=kwargs_, headers=headers)
+ self.nslcmop_topic.new(
+ rollback,
+ session,
+ indata=deepcopy(indata),
+ kwargs=kwargs_,
+ headers=headers,
+ )
if expect_code:
self.assertTrue(e.exception.http_code == expect_code)
if expect_text_list:
for expect_text in expect_text_list:
- self.assertIn(expect_text, str(e.exception).lower(),
- "Expected '{}' at exception text".format(expect_text))
+ self.assertIn(
+ expect_text,
+ str(e.exception).lower(),
+ "Expected '{}' at exception text".format(expect_text),
+ )
def test_check_ns_operation_action(self):
nsrs = self.db.get_list("nsrs")[0]
"member_vnf_index": "1",
"vdu_id": None,
"primitive": "touch",
- "primitive_params": {"filename": "file"}
+ "primitive_params": {"filename": "file"},
}
self.nslcmop_topic._check_ns_operation(session, nsrs, "action", indata)
continue
indata_copy[k] = "non_existing"
with self.assertRaises(EngineException) as exc_manager:
- self.nslcmop_topic._check_ns_operation(session, nsrs, "action", indata_copy)
+ self.nslcmop_topic._check_ns_operation(
+ session, nsrs, "action", indata_copy
+ )
exc = exc_manager.exception
- self.assertEqual(exc.http_code, HTTPStatus.BAD_REQUEST, "Engine exception bad http_code with {}".
- format(indata_copy))
+ self.assertEqual(
+ exc.http_code,
+ HTTPStatus.BAD_REQUEST,
+ "Engine exception bad http_code with {}".format(indata_copy),
+ )
class TestNsrTopic(unittest.TestCase):
-
def setUp(self):
self.db = DbMemory()
self.fs = Mock(FsBase())
self.nsr_topic = NsrTopic(self.db, self.fs, self.msg, None)
self.nsr_topic.check_quota = Mock(return_value=None) # skip quota
- self.db.create_list("vim_accounts", yaml.load(db_vim_accounts_text, Loader=yaml.Loader))
+ self.db.create_list(
+ "vim_accounts", yaml.load(db_vim_accounts_text, Loader=yaml.Loader)
+ )
self.db.create_list("nsds", yaml.load(db_nsds_text, Loader=yaml.Loader))
self.db.create_list("vnfds", yaml.load(db_vnfds_text, Loader=yaml.Loader))
self.db.create = Mock(return_value="created_id")
self.vim_id = self.vim["_id"]
def test_create(self):
- session = {"force": False, "admin": False, "public": False, "project_id": [self.nsd_project], "method": "write"}
+ session = {
+ "force": False,
+ "admin": False,
+ "public": False,
+ "project_id": [self.nsd_project],
+ "method": "write",
+ }
indata = {
"nsdId": self.nsd_id,
"nsName": "name",
"vimAccountId": self.vim_id,
"additionalParamsForVnf": [
- {"member-vnf-index": "hackfest_vnf1", "additionalParams": {"touch_filename": "file"}},
- {"member-vnf-index": "hackfest_vnf2", "additionalParams": {"touch_filename": "file"}}
- ]
+ {
+ "member-vnf-index": "hackfest_vnf1",
+ "additionalParams": {"touch_filename": "file"},
+ },
+ {
+ "member-vnf-index": "hackfest_vnf2",
+ "additionalParams": {"touch_filename": "file"},
+ },
+ ],
}
rollback = []
headers = {}
- self.nsr_topic.new(rollback, session, indata=indata, kwargs=None, headers=headers)
+ self.nsr_topic.new(
+ rollback, session, indata=indata, kwargs=None, headers=headers
+ )
# check vnfrs and nsrs created in whatever order
created_vnfrs = []
created_item = _call[0][1]
if _call[0][0] == "vnfrs":
created_vnfrs.append(created_item)
- self.assertIn("member-vnf-index-ref", created_item,
- "Created item must contain member-vnf-index-ref section")
+ self.assertIn(
+ "member-vnf-index-ref",
+ created_item,
+ "Created item must contain member-vnf-index-ref section",
+ )
if nsr_id:
- self.assertEqual(nsr_id, created_item["nsr-id-ref"], "bad reference id from vnfr to nsr")
+ self.assertEqual(
+ nsr_id,
+ created_item["nsr-id-ref"],
+ "bad reference id from vnfr to nsr",
+ )
else:
nsr_id = created_item["nsr-id-ref"]
elif _call[0][0] == "nsrs":
created_nsrs.append(created_item)
if nsr_id:
- self.assertEqual(nsr_id, created_item["_id"], "bad reference id from vnfr to nsr")
+ self.assertEqual(
+ nsr_id, created_item["_id"], "bad reference id from vnfr to nsr"
+ )
else:
nsr_id = created_item["_id"]
else:
- assert True, "created an unknown record {} at database".format(_call[0][0])
-
- self.assertTrue(created_item["_admin"].get("projects_read"),
- "Database record must contain '_amdin.projects_read'")
- self.assertIn("created", created_item["_admin"], "Database record must contain '_admin.created'")
- self.assertTrue(created_item["_admin"]["nsState"] == "NOT_INSTANTIATED",
- "Database record must contain '_admin.nstate=NOT INSTANTIATE'")
-
- self.assertEqual(len(created_vnfrs), 2, "created a mismatch number of vnfr at database")
- self.assertEqual(len(created_nsrs), 1, "Only one nsrs must be created at database")
- self.assertEqual(len(rollback), len(created_vnfrs) + 1, "rollback mismatch with created items at database")
+ assert True, "created an unknown record {} at database".format(
+ _call[0][0]
+ )
+
+ self.assertTrue(
+ created_item["_admin"].get("projects_read"),
+ "Database record must contain '_amdin.projects_read'",
+ )
+ self.assertIn(
+ "created",
+ created_item["_admin"],
+ "Database record must contain '_admin.created'",
+ )
+ self.assertTrue(
+ created_item["_admin"]["nsState"] == "NOT_INSTANTIATED",
+ "Database record must contain '_admin.nstate=NOT INSTANTIATE'",
+ )
+
+ self.assertEqual(
+ len(created_vnfrs), 2, "created a mismatch number of vnfr at database"
+ )
+ self.assertEqual(
+ len(created_nsrs), 1, "Only one nsrs must be created at database"
+ )
+ self.assertEqual(
+ len(rollback),
+ len(created_vnfrs) + 1,
+ "rollback mismatch with created items at database",
+ )
# test parameters with error
bad_id = "88d90b0c-faff-4b9f-bccd-aaaaaaaaaaaa"
test_set = (
# TODO add "nsd"
- ("nsd not found", {"nsdId": bad_id}, DbException, HTTPStatus.NOT_FOUND, ("not found", bad_id)),
+ (
+ "nsd not found",
+ {"nsdId": bad_id},
+ DbException,
+ HTTPStatus.NOT_FOUND,
+ ("not found", bad_id),
+ ),
# ({"vimAccountId": bad_id}, DbException, HTTPStatus.NOT_FOUND, ("not found", bad_id)), # TODO add "vim"
- ("additional params not supply", {"additionalParamsForVnf.0.member-vnf-index": "k"}, EngineException,
- HTTPStatus.BAD_REQUEST, None),
+ (
+ "additional params not supply",
+ {"additionalParamsForVnf.0.member-vnf-index": "k"},
+ EngineException,
+ HTTPStatus.BAD_REQUEST,
+ None,
+ ),
)
for message, kwargs_, expect_exc, expect_code, expect_text_list in test_set:
with self.assertRaises(expect_exc, msg=message) as e:
- self.nsr_topic.new(rollback, session, indata=deepcopy(indata), kwargs=kwargs_, headers=headers)
+ self.nsr_topic.new(
+ rollback,
+ session,
+ indata=deepcopy(indata),
+ kwargs=kwargs_,
+ headers=headers,
+ )
if expect_code:
self.assertTrue(e.exception.http_code == expect_code)
if expect_text_list:
for expect_text in expect_text_list:
- self.assertIn(expect_text, str(e.exception).lower(),
- "Expected '{}' at exception text".format(expect_text))
+ self.assertIn(
+ expect_text,
+ str(e.exception).lower(),
+ "Expected '{}' at exception text".format(expect_text),
+ )
def test_delete_ns(self):
self.db.create_list("nsrs", yaml.load(db_nsrs_text, Loader=yaml.Loader))
p_id = self.nsd_project
p_other = "other_p"
- session = {"force": False, "admin": False, "public": None, "project_id": [p_id], "method": "delete"}
- session2 = {"force": False, "admin": False, "public": None, "project_id": [p_other], "method": "delete"}
- session_force = {"force": True, "admin": True, "public": None, "project_id": [], "method": "delete"}
- with self.subTest(i=1, t='Normal Deletion'):
+ session = {
+ "force": False,
+ "admin": False,
+ "public": None,
+ "project_id": [p_id],
+ "method": "delete",
+ }
+ session2 = {
+ "force": False,
+ "admin": False,
+ "public": None,
+ "project_id": [p_other],
+ "method": "delete",
+ }
+ session_force = {
+ "force": True,
+ "admin": True,
+ "public": None,
+ "project_id": [],
+ "method": "delete",
+ }
+ with self.subTest(i=1, t="Normal Deletion"):
self.db.del_one = Mock()
self.db.set_one = Mock()
self.nsr_topic.delete(session, self.nsr_id)
db_args_ro_nsrs = self.db.del_one.call_args_list[1][0]
db_args = self.db.del_one.call_args_list[0][0]
msg_args = self.msg.write.call_args[0]
- self.assertEqual(msg_args[0], self.nsr_topic.topic_msg, "Wrong message topic")
+ self.assertEqual(
+ msg_args[0], self.nsr_topic.topic_msg, "Wrong message topic"
+ )
self.assertEqual(msg_args[1], "deleted", "Wrong message action")
self.assertEqual(msg_args[2], {"_id": self.nsr_id}, "Wrong message content")
self.assertEqual(db_args_ro_nsrs[0], "ro_nsrs", "Wrong DB topic")
self.assertEqual(db_args[0], self.nsr_topic.topic, "Wrong DB topic")
self.assertEqual(db_args[1]["_id"], self.nsr_id, "Wrong DB ID")
- self.assertEqual(db_args[1]["_admin.projects_read.cont"], [p_id], "Wrong DB filter")
+ self.assertEqual(
+ db_args[1]["_admin.projects_read.cont"], [p_id], "Wrong DB filter"
+ )
self.db.set_one.assert_not_called()
fs_del_calls = self.fs.file_delete.call_args_list
self.assertEqual(fs_del_calls[0][0][0], self.nsr_id, "Wrong FS file id")
- with self.subTest(i=2, t='No delete because referenced by other project'):
- self.db_set_one("nsrs", {"_id": self.nsr_id}, update_dict=None, push={"_admin.projects_read": p_other,
- "_admin.projects_write": p_other})
+ with self.subTest(i=2, t="No delete because referenced by other project"):
+ self.db_set_one(
+ "nsrs",
+ {"_id": self.nsr_id},
+ update_dict=None,
+ push={
+ "_admin.projects_read": p_other,
+ "_admin.projects_write": p_other,
+ },
+ )
self.db.del_one.reset_mock()
self.db.set_one.reset_mock()
self.msg.write.reset_mock()
db_s1_args = self.db.set_one.call_args
self.assertEqual(db_s1_args[0][0], self.nsr_topic.topic, "Wrong DB topic")
self.assertEqual(db_s1_args[0][1]["_id"], self.nsr_id, "Wrong DB ID")
- self.assertIsNone(db_s1_args[1]["update_dict"], "Wrong DB update dictionary")
- self.assertEqual(db_s1_args[1]["pull_list"],
- {"_admin.projects_read": [p_other], "_admin.projects_write": [p_other]},
- "Wrong DB pull_list dictionary")
+ self.assertIsNone(
+ db_s1_args[1]["update_dict"], "Wrong DB update dictionary"
+ )
+ self.assertEqual(
+ db_s1_args[1]["pull_list"],
+ {"_admin.projects_read": [p_other], "_admin.projects_write": [p_other]},
+ "Wrong DB pull_list dictionary",
+ )
self.fs.file_delete.assert_not_called()
- with self.subTest(i=4, t='Delete with force and admin'):
+ with self.subTest(i=4, t="Delete with force and admin"):
self.db.del_one.reset_mock()
self.db.set_one.reset_mock()
self.msg.write.reset_mock()
db_args_ro_nsrs = self.db.del_one.call_args_list[1][0]
db_args = self.db.del_one.call_args_list[0][0]
msg_args = self.msg.write.call_args[0]
- self.assertEqual(msg_args[0], self.nsr_topic.topic_msg, "Wrong message topic")
+ self.assertEqual(
+ msg_args[0], self.nsr_topic.topic_msg, "Wrong message topic"
+ )
self.assertEqual(msg_args[1], "deleted", "Wrong message action")
self.assertEqual(msg_args[2], {"_id": self.nsr_id}, "Wrong message content")
self.assertEqual(db_args_ro_nsrs[0], "ro_nsrs", "Wrong DB topic")
self.db.set_one.assert_not_called()
fs_del_calls = self.fs.file_delete.call_args_list
self.assertEqual(fs_del_calls[0][0][0], self.nsr_id, "Wrong FS file id")
- with self.subTest(i=3, t='Conflict on Delete - NS in INSTANTIATED state'):
- self.db_set_one("nsrs", {"_id": self.nsr_id}, {"_admin.nsState": "INSTANTIATED"},
- pull={"_admin.projects_read": p_other, "_admin.projects_write": p_other})
+ with self.subTest(i=3, t="Conflict on Delete - NS in INSTANTIATED state"):
+ self.db_set_one(
+ "nsrs",
+ {"_id": self.nsr_id},
+ {"_admin.nsState": "INSTANTIATED"},
+ pull={
+ "_admin.projects_read": p_other,
+ "_admin.projects_write": p_other,
+ },
+ )
self.db.del_one.reset_mock()
self.db.set_one.reset_mock()
self.msg.write.reset_mock()
self.fs.file_delete.reset_mock()
- with self.assertRaises(EngineException, msg="Accepted NSR with nsState INSTANTIATED") as e:
+ with self.assertRaises(
+ EngineException, msg="Accepted NSR with nsState INSTANTIATED"
+ ) as e:
self.nsr_topic.delete(session, self.nsr_id)
- self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
+ self.assertEqual(
+ e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+ )
self.assertIn("INSTANTIATED", str(e.exception), "Wrong exception text")
# TODOD with self.subTest(i=3, t='Conflict on Delete - NS in use by NSI'):
- with self.subTest(i=4, t='Non-existent NS'):
+ with self.subTest(i=4, t="Non-existent NS"):
self.db.del_one.reset_mock()
self.db.set_one.reset_mock()
self.msg.write.reset_mock()
self.fs.file_delete.reset_mock()
excp_msg = "Not found"
- with self.assertRaises(DbException, msg="Accepted non-existent NSD ID") as e:
+ with self.assertRaises(
+ DbException, msg="Accepted non-existent NSD ID"
+ ) as e:
self.nsr_topic.delete(session2, "other_id")
- self.assertEqual(e.exception.http_code, HTTPStatus.NOT_FOUND, "Wrong HTTP status code")
+ self.assertEqual(
+ e.exception.http_code, HTTPStatus.NOT_FOUND, "Wrong HTTP status code"
+ )
self.assertIn(excp_msg, str(e.exception), "Wrong exception text")
self.assertIn("other_id", str(e.exception), "Wrong exception text")
return
from osm_nbi.engine import EngineException
from osm_common.dbmemory import DbMemory
from osm_nbi.pmjobs_topics import PmJobsTopic
-from osm_nbi.tests.test_db_descriptors import db_nsds_text, db_vnfds_text, db_nsrs_text, db_vnfrs_text
-from osm_nbi.tests.pmjob_mocks.response import show_res, prom_res, cpu_utilization, users, load, empty
+from osm_nbi.tests.test_db_descriptors import (
+ db_nsds_text,
+ db_vnfds_text,
+ db_nsrs_text,
+ db_vnfrs_text,
+)
+from osm_nbi.tests.pmjob_mocks.response import (
+ show_res,
+ prom_res,
+ cpu_utilization,
+ users,
+ load,
+ empty,
+)
class PmJobsTopicTest(asynctest.TestCase):
-
def setUp(self):
self.db = DbMemory()
self.pmjobs_topic = PmJobsTopic(self.db, host="prometheus", port=9091)
self.nsr_id = self.nsr["_id"]
project_id = self.nsr["_admin"]["projects_write"]
"""metric_check_list contains the vnf metric name used in descriptor i.e users,load"""
- self.metric_check_list = ['cpu_utilization', 'average_memory_utilization', 'disk_read_ops',
- 'disk_write_ops', 'disk_read_bytes', 'disk_write_bytes',
- 'packets_dropped', 'packets_sent', 'packets_received', 'users', 'load']
- self.session = {"username": "admin", "project_id": project_id, "method": None,
- "admin": True, "force": False, "public": False, "allow_show_user_project_role": True}
+ self.metric_check_list = [
+ "cpu_utilization",
+ "average_memory_utilization",
+ "disk_read_ops",
+ "disk_write_ops",
+ "disk_read_bytes",
+ "disk_write_bytes",
+ "packets_dropped",
+ "packets_sent",
+ "packets_received",
+ "users",
+ "load",
+ ]
+ self.session = {
+ "username": "admin",
+ "project_id": project_id,
+ "method": None,
+ "admin": True,
+ "force": False,
+ "public": False,
+ "allow_show_user_project_role": True,
+ }
def set_get_mock_res(self, mock_res, ns_id, metric_list):
site = "http://prometheus:9091/api/v1/query?query=osm_metric_name{ns_id='nsr'}"
- site = re.sub(r'nsr', ns_id, site)
+ site = re.sub(r"nsr", ns_id, site)
for metric in metric_list:
- endpoint = re.sub(r'metric_name', metric, site)
- if metric == 'cpu_utilization':
+ endpoint = re.sub(r"metric_name", metric, site)
+ if metric == "cpu_utilization":
response = yaml.load(cpu_utilization, Loader=yaml.Loader)
- elif metric == 'users':
+ elif metric == "users":
response = yaml.load(users, Loader=yaml.Loader)
- elif metric == 'load':
+ elif metric == "load":
response = yaml.load(load, Loader=yaml.Loader)
else:
response = yaml.load(empty, Loader=yaml.Loader)
prom_response = yaml.load(prom_res, Loader=yaml.Loader)
with aioresponses() as mock_res:
self.set_get_mock_res(mock_res, self.nsr_id, self.metric_check_list)
- result = await self.pmjobs_topic._prom_metric_request(self.nsr_id, self.metric_check_list)
+ result = await self.pmjobs_topic._prom_metric_request(
+ self.nsr_id, self.metric_check_list
+ )
self.assertCountEqual(result, prom_response, "Metric Data is valid")
with self.subTest("Test case2 failed in test_prom"):
- with self.assertRaises(EngineException, msg="Prometheus not reachable") as e:
- await self.pmjobs_topic._prom_metric_request(self.nsr_id, self.metric_check_list)
+ with self.assertRaises(
+ EngineException, msg="Prometheus not reachable"
+ ) as e:
+ await self.pmjobs_topic._prom_metric_request(
+ self.nsr_id, self.metric_check_list
+ )
self.assertIn("Connection to ", str(e.exception), "Wrong exception text")
def test_show(self):
with aioresponses() as mock_res:
self.set_get_mock_res(mock_res, self.nsr_id, self.metric_check_list)
result = self.pmjobs_topic.show(self.session, self.nsr_id)
- self.assertEqual(len(result['entries']), 1, "Number of metrics returned")
+ self.assertEqual(len(result["entries"]), 1, "Number of metrics returned")
self.assertCountEqual(result, show_response, "Response is valid")
with self.subTest("Test case2 failed in test_show"):
wrong_ns_id = "88d90b0c-faff-4bbc-cccc-aaaaaaaaaaaa"
self.set_get_mock_res(mock_res, wrong_ns_id, self.metric_check_list)
with self.assertRaises(EngineException, msg="ns not found") as e:
self.pmjobs_topic.show(self.session, wrong_ns_id)
- self.assertEqual(e.exception.http_code, HTTPStatus.NOT_FOUND, "Wrong HTTP status code")
- self.assertIn("NS not found with id {}".format(wrong_ns_id), str(e.exception),
- "Wrong exception text")
+ self.assertEqual(
+ e.exception.http_code,
+ HTTPStatus.NOT_FOUND,
+ "Wrong HTTP status code",
+ )
+ self.assertIn(
+ "NS not found with id {}".format(wrong_ns_id),
+ str(e.exception),
+ "Wrong exception text",
+ )
if __name__ == "__main__":
try:
# load parameters and configuration
- opts, args = getopt.getopt(sys.argv[1:], "hvu:s:f:t:",
- ["url=", "help", "version", "verbose", "file=", "chunk-size=", "token="])
+ opts, args = getopt.getopt(
+ sys.argv[1:],
+ "hvu:s:f:t:",
+ ["url=", "help", "version", "verbose", "file=", "chunk-size=", "token="],
+ )
url = None
chunk_size = 500
pkg_file = None
for o, a in opts:
if o == "--version":
- print("upload version " + __version__ + ' ' + version_date)
+ print("upload version " + __version__ + " " + version_date)
sys.exit()
elif o in ("-v", "--verbose"):
verbose += 1
index = 0
transaction_id = None
file_md5 = md5()
- with open(pkg_file, 'rb') as f:
+ with open(pkg_file, "rb") as f:
headers = {
"Content-type": "application/gzip",
"Content-Filename": basename(pkg_file),
# "chunk_size": chunk_size}
if transaction_id:
headers["Transaction-Id"] = transaction_id
- if index+len(chunk_data) == total_size:
+ if index + len(chunk_data) == total_size:
headers["Content-File-MD5"] = file_md5.hexdigest()
# payload["id"] = transaction_id
- headers["Content-range"] = "bytes {}-{}/{}".format(index, index+len(chunk_data)-1, total_size)
+ headers["Content-range"] = "bytes {}-{}/{}".format(
+ index, index + len(chunk_data) - 1, total_size
+ )
# refers to rfc2616: https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html
if verbose:
print("TX chunk Headers: {}".format(headers))
from jsonschema import validate as js_v, exceptions as js_e
from http import HTTPStatus
from copy import deepcopy
-from uuid import UUID # To test for valid UUID
+from uuid import UUID # To test for valid UUID
__author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
__version__ = "0.1"
# Basis schemas
patern_name = "^[ -~]+$"
-shortname_schema = {"type": "string", "minLength": 1, "maxLength": 60, "pattern": "^[^,;()\\.\\$'\"]+$"}
+shortname_schema = {
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 60,
+ "pattern": "^[^,;()\\.\\$'\"]+$",
+}
passwd_schema = {"type": "string", "minLength": 1, "maxLength": 60}
-name_schema = {"type": "string", "minLength": 1, "maxLength": 255, "pattern": "^[^,;()'\"]+$"}
+name_schema = {
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 255,
+ "pattern": "^[^,;()'\"]+$",
+}
string_schema = {"type": "string", "minLength": 1, "maxLength": 255}
-xml_text_schema = {"type": "string", "minLength": 1, "maxLength": 1000, "pattern": "^[^']+$"}
-description_schema = {"type": ["string", "null"], "maxLength": 255, "pattern": "^[^'\"]+$"}
-long_description_schema = {"type": ["string", "null"], "maxLength": 3000, "pattern": "^[^'\"]+$"}
+xml_text_schema = {
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 1000,
+ "pattern": "^[^']+$",
+}
+description_schema = {
+ "type": ["string", "null"],
+ "maxLength": 255,
+ "pattern": "^[^'\"]+$",
+}
+long_description_schema = {
+ "type": ["string", "null"],
+ "maxLength": 3000,
+ "pattern": "^[^'\"]+$",
+}
id_schema_fake = {"type": "string", "minLength": 2, "maxLength": 36}
bool_schema = {"type": "boolean"}
null_schema = {"type": "null"}
# "pattern": "^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$"
-id_schema = {"type": "string", "pattern": "^[a-fA-F0-9]{8}(-[a-fA-F0-9]{4}){3}-[a-fA-F0-9]{12}$"}
-time_schema = {"type": "string", "pattern": "^[0-9]{4}-[0-1][0-9]-[0-3][0-9]T[0-2][0-9]([0-5]:){2}"}
-pci_schema = {"type": "string", "pattern": "^[0-9a-fA-F]{4}(:[0-9a-fA-F]{2}){2}\\.[0-9a-fA-F]$"}
+id_schema = {
+ "type": "string",
+ "pattern": "^[a-fA-F0-9]{8}(-[a-fA-F0-9]{4}){3}-[a-fA-F0-9]{12}$",
+}
+time_schema = {
+ "type": "string",
+ "pattern": "^[0-9]{4}-[0-1][0-9]-[0-3][0-9]T[0-2][0-9]([0-5]:){2}",
+}
+pci_schema = {
+ "type": "string",
+ "pattern": "^[0-9a-fA-F]{4}(:[0-9a-fA-F]{2}){2}\\.[0-9a-fA-F]$",
+}
# allows [] for wildcards. For that reason huge length limit is set
pci_extended_schema = {"type": "string", "pattern": "^[0-9a-fA-F.:-\\[\\]]{12,40}$"}
http_schema = {"type": "string", "pattern": "^(https?|http)://[^'\"=]+$"}
path_schema = {"type": "string", "pattern": "^(\\.){0,2}(/[^/\"':{}\\(\\)]+)+$"}
vlan_schema = {"type": "integer", "minimum": 1, "maximum": 4095}
vlan1000_schema = {"type": "integer", "minimum": 1000, "maximum": 4095}
-mac_schema = {"type": "string",
- "pattern": "^[0-9a-fA-F][02468aceACE](:[0-9a-fA-F]{2}){5}$"} # must be unicast: LSB bit of MSB byte ==0
+mac_schema = {
+ "type": "string",
+ "pattern": "^[0-9a-fA-F][02468aceACE](:[0-9a-fA-F]{2}){5}$",
+} # must be unicast: LSB bit of MSB byte ==0
dpid_Schema = {"type": "string", "pattern": "^[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){7}$"}
# mac_schema={"type":"string", "pattern":"^([0-9a-fA-F]{2}:){5}[0-9a-fA-F]{2}$"}
-ip_schema = {"type": "string",
- "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"}
-ip_prefix_schema = {"type": "string",
- "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}"
- "(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)/(30|[12]?[0-9])$"}
+ip_schema = {
+ "type": "string",
+ "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$",
+}
+ip_prefix_schema = {
+ "type": "string",
+ "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}"
+ "(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)/(30|[12]?[0-9])$",
+}
port_schema = {"type": "integer", "minimum": 1, "maximum": 65534}
object_schema = {"type": "object"}
schema_version_2 = {"type": "integer", "minimum": 2, "maximum": 2}
# schema_version_string={"type":"string","enum": ["0.1", "2", "0.2", "3", "0.3"]}
-log_level_schema = {"type": "string", "enum": ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]}
+log_level_schema = {
+ "type": "string",
+ "enum": ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"],
+}
checksum_schema = {"type": "string", "pattern": "^[0-9a-fA-F]{32}$"}
size_schema = {"type": "integer", "minimum": 1, "maximum": 100}
array_edition_schema = {
"type": "object",
- "patternProperties": {
- "^\\$": {}
- },
+ "patternProperties": {"^\\$": {}},
"additionalProperties": False,
"minProperties": 1,
}
"vim-volume-id": name_schema,
},
"required": ["name", "vim-volume-id"],
- "additionalProperties": False
- }
+ "additionalProperties": False,
+ },
},
"interface": {
"type": "array",
"floating-ip-required": bool_schema,
},
"required": ["name"],
- "additionalProperties": False
- }
- }
+ "additionalProperties": False,
+ },
+ },
},
"required": ["id"],
- "additionalProperties": False
+ "additionalProperties": False,
}
ip_profile_dns_schema = {
"address": ip_schema,
},
"required": ["address"],
- "additionalProperties": False
- }
+ "additionalProperties": False,
+ },
}
ip_profile_dhcp_schema = {
"properties": {
"enabled": {"type": "boolean"},
"count": integer1_schema,
- "start-address": ip_schema
+ "start-address": ip_schema,
},
"additionalProperties": False,
}
"gateway-address": ip_schema,
"dns-server": ip_profile_dns_schema,
"dhcp-params": ip_profile_dhcp_schema,
- }
+ },
}
ip_profile_update_schema = {
"subnet-address": {"oneOf": [null_schema, ip_prefix_schema]},
"gateway-address": {"oneOf": [null_schema, ip_schema]},
"dns-server": {"oneOf": [null_schema, ip_profile_dns_schema]},
-
"dhcp-params": {"oneOf": [null_schema, ip_profile_dhcp_schema]},
},
- "additionalProperties": False
+ "additionalProperties": False,
}
provider_network_schema = {
"mac_address": mac_schema,
"vlan": vlan_schema,
},
- "additionalProperties": True
- }
+ "additionalProperties": True,
+ },
},
"network-type": shortname_schema,
},
- "additionalProperties": True
+ "additionalProperties": True,
}
ns_instantiate_internal_vld = {
},
"required": ["id-ref"],
"minProperties": 2,
- "additionalProperties": False
+ "additionalProperties": False,
},
- }
+ },
},
"required": ["name"],
"minProperties": 2,
- "additionalProperties": False
+ "additionalProperties": False,
}
additional_params_for_vnf = {
"properties": {
"vdu_id": name_schema,
"additionalParams": object_schema,
- "config-units": integer1_schema, # number of configuration units of this vdu, by default 1
+ "config-units": integer1_schema, # number of configuration units of this vdu, by default 1
},
"required": ["vdu_id"],
"minProperties": 2,
"additionalParams": object_schema,
"kdu_model": name_schema,
"k8s-namespace": name_schema,
- "config-units": integer1_schema, # number of configuration units of this knf, by default 1
+ "config-units": integer1_schema, # number of configuration units of this knf, by default 1
},
"required": ["kdu_name"],
"minProperties": 2,
},
"required": ["member-vnf-index"],
"minProperties": 2,
- "additionalProperties": False
- }
+ "additionalProperties": False,
+ },
}
ns_instantiate = {
"placement-constraints": object_schema,
"additionalParamsForNs": object_schema,
"additionalParamsForVnf": additional_params_for_vnf,
- "config-units": integer1_schema, # number of configuration units of this ns, by default 1
+ "config-units": integer1_schema, # number of configuration units of this ns, by default 1
"k8s-namespace": name_schema,
"ssh_keys": {"type": "array", "items": {"type": "string"}},
"timeout_ns_deploy": integer1_schema,
"internal-vld": {
"type": "array",
"minItems": 1,
- "items": ns_instantiate_internal_vld
- }
+ "items": ns_instantiate_internal_vld,
+ },
},
"required": ["member-vnf-index"],
"minProperties": 2,
- "additionalProperties": False
- }
+ "additionalProperties": False,
+ },
},
"vld": {
"type": "array",
"ip-address": ip_schema,
# "mac-address": mac_schema,
},
- "required": ["member-vnf-index-ref", "vnfd-connection-point-ref"],
+ "required": [
+ "member-vnf-index-ref",
+ "vnfd-connection-point-ref",
+ ],
"minProperties": 3,
- "additionalProperties": False
+ "additionalProperties": False,
},
- }
+ },
},
"required": ["name"],
- "additionalProperties": False
- }
+ "additionalProperties": False,
+ },
},
},
"required": ["nsName", "nsdId", "vimAccountId"],
- "additionalProperties": False
+ "additionalProperties": False,
}
ns_terminate = {
"skip_terminate_primitives": bool_schema,
"netsliceInstanceId": id_schema,
},
- "additionalProperties": False
+ "additionalProperties": False,
}
-ns_action = { # TODO for the moment it is only contemplated the vnfd primitive execution
+ns_action = { # TODO for the moment it is only contemplated the vnfd primitive execution
"title": "ns action input schema",
"$schema": "http://json-schema.org/draft-04/schema#",
"type": "object",
"timeout_ns_action": integer1_schema,
"primitive_params": {"type": "object"},
},
- "required": ["primitive", "primitive_params"], # TODO add member_vnf_index
- "additionalProperties": False
+ "required": ["primitive", "primitive_params"], # TODO add member_vnf_index
+ "additionalProperties": False,
}
-ns_scale = { # TODO for the moment it is only VDU-scaling
+ns_scale = { # TODO for the moment it is only VDU-scaling
"title": "ns scale input schema",
"$schema": "http://json-schema.org/draft-04/schema#",
"type": "object",
"type": "object",
"properties": {
"vnfInstanceId": name_schema,
- "scaleVnfType": {"enum": ["SCALE_OUT", 'SCALE_IN']},
+ "scaleVnfType": {"enum": ["SCALE_OUT", "SCALE_IN"]},
"scaleByStepData": {
"type": "object",
"properties": {
"scaling-policy": name_schema,
},
"required": ["scaling-group-descriptor", "member-vnf-index"],
- "additionalProperties": False
+ "additionalProperties": False,
},
},
"required": ["scaleVnfType", "scaleByStepData"], # vnfInstanceId
- "additionalProperties": False
+ "additionalProperties": False,
},
"scaleTime": time_schema,
},
"required": ["scaleType", "scaleVnfData"],
- "additionalProperties": False
+ "additionalProperties": False,
}
"vim_user": shortname_schema,
"vim_password": passwd_schema,
"vca": id_schema,
- "config": {"type": "object"}
+ "config": {"type": "object"},
},
- "additionalProperties": False
+ "additionalProperties": False,
}
vim_account_new_schema = {
"vim_user": shortname_schema,
"vim_password": passwd_schema,
"vca": id_schema,
- "config": {"type": "object"}
+ "config": {"type": "object"},
},
- "required": ["name", "vim_url", "vim_type", "vim_user", "vim_password", "vim_tenant_name"],
- "additionalProperties": False
+ "required": [
+ "name",
+ "vim_url",
+ "vim_type",
+ "vim_user",
+ "vim_password",
+ "vim_tenant_name",
+ ],
+ "additionalProperties": False,
}
wim_type = shortname_schema # {"enum": ["ietfl2vpn", "onos", "odl", "dynpac", "fake"]}
"wim_url": description_schema,
"user": shortname_schema,
"password": passwd_schema,
- "config": {"type": "object"}
+ "config": {"type": "object"},
},
- "additionalProperties": False
+ "additionalProperties": False,
}
wim_account_new_schema = {
"password": passwd_schema,
"config": {
"type": "object",
- "patternProperties": {
- ".": {"not": {"type": "null"}}
- }
- }
+ "patternProperties": {".": {"not": {"type": "null"}}},
+ },
},
"required": ["name", "wim_url", "wim_type"],
- "additionalProperties": False
+ "additionalProperties": False,
}
sdn_properties = {
"$schema": "http://json-schema.org/draft-04/schema#",
"type": "object",
"properties": sdn_properties,
- "required": ["name", 'type'],
- "additionalProperties": False
+ "required": ["name", "type"],
+ "additionalProperties": False,
}
sdn_edit_schema = {
"title": "sdn controller update information schema",
"type": "object",
"properties": sdn_properties,
# "required": ["name", "port", 'ip', 'dpid', 'type'],
- "additionalProperties": False
+ "additionalProperties": False,
}
sdn_port_mapping_schema = {
"$schema": "http://json-schema.org/draft-04/schema#",
"properties": {
"pci": pci_extended_schema,
"switch_port": shortname_schema,
- "switch_mac": mac_schema
+ "switch_mac": mac_schema,
},
- "required": ["pci"]
- }
- }
+ "required": ["pci"],
+ },
+ },
},
- "required": ["compute_node", "ports"]
- }
+ "required": ["compute_node", "ports"],
+ },
}
sdn_external_port_schema = {
"$schema": "http://json-schema.org/draft-04/schema#",
"properties": {
"port": {"type": "string", "minLength": 1, "maxLength": 60},
"vlan": vlan_schema,
- "mac": mac_schema
+ "mac": mac_schema,
},
- "required": ["port"]
+ "required": ["port"],
}
# K8s Clusters
"type": "object",
"patternProperties": {".": {"oneOf": [name_schema, null_schema]}},
"minProperties": 1,
- "additionalProperties": False
+ "additionalProperties": False,
}
k8scluster_new_schema = {
"title": "k8scluster creation input schema",
"cni": nameshort_list_schema,
},
"required": ["name", "credentials", "vim_account", "k8s_version", "nets"],
- "additionalProperties": False
+ "additionalProperties": False,
}
k8scluster_edit_schema = {
"title": "vim_account edit input schema",
"namespace": name_schema,
"cni": nameshort_list_schema,
},
- "additionalProperties": False
+ "additionalProperties": False,
}
# VCA
"type": "object",
"properties": k8srepo_properties,
"required": ["name", "type", "url"],
- "additionalProperties": False
+ "additionalProperties": False,
}
k8srepo_edit_schema = {
"title": "vim_account edit input schema",
"$schema": "http://json-schema.org/draft-04/schema#",
"type": "object",
"properties": k8srepo_properties,
- "additionalProperties": False
+ "additionalProperties": False,
}
# OSM Repos
"type": "object",
"properties": osmrepo_properties,
"required": ["name", "type", "url"],
- "additionalProperties": False
+ "additionalProperties": False,
}
osmrepo_edit_schema = {
"title": "osm repo edit input schema",
"$schema": "http://json-schema.org/draft-04/schema#",
"type": "object",
"properties": osmrepo_properties,
- "additionalProperties": False
+ "additionalProperties": False,
}
# PDUs
"properties": {
"name": shortname_schema,
"mgmt": bool_schema,
- "type": {"enum": ["overlay", 'underlay']},
+ "type": {"enum": ["overlay", "underlay"]},
"ip-address": ip_schema,
# TODO, add user, password, ssh-key
"mac-address": mac_schema,
# "switch-vlan": vlan_schema,
},
"required": ["name", "mgmt", "ip-address"],
- "additionalProperties": False
+ "additionalProperties": False,
}
pdu_new_schema = {
"title": "pdu creation input schema",
"shared": bool_schema,
"vims": nameshort_list_schema,
"vim_accounts": nameshort_list_schema,
- "interfaces": {
- "type": "array",
- "items": pdu_interface,
- "minItems": 1
- }
+ "interfaces": {"type": "array", "items": pdu_interface, "minItems": 1},
},
"required": ["name", "type", "interfaces"],
- "additionalProperties": False
+ "additionalProperties": False,
}
pdu_edit_schema = {
"title": "pdu edit input schema",
"shared": bool_schema,
"vims": {"oneOf": [array_edition_schema, nameshort_list_schema]},
"vim_accounts": {"oneOf": [array_edition_schema, nameshort_list_schema]},
- "interfaces": {"oneOf": [
- array_edition_schema,
- {
- "type": "array",
- "items": pdu_interface,
- "minItems": 1
- }
- ]}
+ "interfaces": {
+ "oneOf": [
+ array_edition_schema,
+ {"type": "array", "items": pdu_interface, "minItems": 1},
+ ]
+ },
},
"additionalProperties": False,
- "minProperties": 1
+ "minProperties": 1,
}
# VNF PKG OPERATIONS
"primitive": name_schema,
"primitive_params": {"type": "object"},
},
- "required": ["lcmOperationType", "vnfPkgId", "kdu_name", "primitive", "primitive_params"],
- "additionalProperties": False
+ "required": [
+ "lcmOperationType",
+ "vnfPkgId",
+ "kdu_name",
+ "primitive",
+ "primitive_params",
+ ],
+ "additionalProperties": False,
}
# USERS
"type": "array",
"items": {
"type": "object",
- "properties": {
- "project": shortname_schema,
- "role": shortname_schema
- },
+ "properties": {"project": shortname_schema, "role": shortname_schema},
"required": ["project", "role"],
- "additionalProperties": False
+ "additionalProperties": False,
},
- "minItems": 1
+ "minItems": 1,
}
project_role_mappings_optional = {
"title": "list of projects/roles or projects only",
"type": "array",
"items": {
"type": "object",
- "properties": {
- "project": shortname_schema,
- "role": shortname_schema
- },
+ "properties": {"project": shortname_schema, "role": shortname_schema},
"required": ["project"],
- "additionalProperties": False
+ "additionalProperties": False,
},
- "minItems": 1
+ "minItems": 1,
}
user_new_schema = {
"$schema": "http://json-schema.org/draft-04/schema#",
"project_role_mappings": project_role_mappings,
},
"required": ["username", "password"],
- "additionalProperties": False
+ "additionalProperties": False,
}
user_edit_schema = {
"$schema": "http://json-schema.org/draft-04/schema#",
"type": "object",
"properties": {
"password": passwd_schema,
- "username": shortname_schema, # To allow User Name modification
- "projects": {
- "oneOf": [
- nameshort_list_schema,
- array_edition_schema
- ]
- },
+ "username": shortname_schema, # To allow User Name modification
+ "projects": {"oneOf": [nameshort_list_schema, array_edition_schema]},
"project_role_mappings": project_role_mappings,
"add_project_role_mappings": project_role_mappings,
"remove_project_role_mappings": project_role_mappings_optional,
},
"minProperties": 1,
- "additionalProperties": False
+ "additionalProperties": False,
}
# PROJECTS
-topics_with_quota = ["vnfds", "nsds", "slice_templates", "pduds", "ns_instances", "slice_instances", "vim_accounts",
- "wim_accounts", "sdn_controllers", "k8sclusters", "vca", "k8srepos", "osmrepos", "ns_subscriptions"]
+topics_with_quota = [
+ "vnfds",
+ "nsds",
+ "slice_templates",
+ "pduds",
+ "ns_instances",
+ "slice_instances",
+ "vim_accounts",
+ "wim_accounts",
+ "sdn_controllers",
+ "k8sclusters",
+ "vca",
+ "k8srepos",
+ "osmrepos",
+ "ns_subscriptions",
+]
project_new_schema = {
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "New project schema for administrators",
"quotas": {
"type": "object",
"properties": {topic: integer0_schema for topic in topics_with_quota},
- "additionalProperties": False
+ "additionalProperties": False,
},
},
"required": ["name"],
- "additionalProperties": False
+ "additionalProperties": False,
}
project_edit_schema = {
"$schema": "http://json-schema.org/draft-04/schema#",
"type": "object",
"properties": {
"admin": bool_schema,
- "name": shortname_schema, # To allow Project Name modification
+ "name": shortname_schema, # To allow Project Name modification
"quotas": {
"type": "object",
- "properties": {topic: {"oneOf": [integer0_schema, null_schema]} for topic in topics_with_quota},
- "additionalProperties": False
+ "properties": {
+ topic: {"oneOf": [integer0_schema, null_schema]}
+ for topic in topics_with_quota
+ },
+ "additionalProperties": False,
},
},
"additionalProperties": False,
- "minProperties": 1
+ "minProperties": 1,
}
# ROLES
".": bool_schema,
},
# "minProperties": 1,
- }
+ },
},
"required": ["name"],
- "additionalProperties": False
+ "additionalProperties": False,
}
roles_edit_schema = {
"$schema": "http://json-schema.org/draft-04/schema#",
"name": shortname_schema,
"permissions": {
"type": "object",
- "patternProperties": {
- ".": {
- "oneOf": [bool_schema, null_schema]
- }
- },
+ "patternProperties": {".": {"oneOf": [bool_schema, null_schema]}},
# "minProperties": 1,
- }
+ },
},
"additionalProperties": False,
- "minProperties": 1
+ "minProperties": 1,
}
# GLOBAL SCHEMAS
"ip-profile": object_schema,
},
"required": ["name"],
- "additionalProperties": False
+ "additionalProperties": False,
}
nsi_instantiate = {
"netslice-subnet": {
"type": "array",
"minItems": 1,
- "items": nsi_subnet_instantiate
- },
- "netslice-vld": {
- "type": "array",
- "minItems": 1,
- "items": nsi_vld_instantiate
+ "items": nsi_subnet_instantiate,
},
+ "netslice-vld": {"type": "array", "minItems": 1, "items": nsi_vld_instantiate},
},
"required": ["nsiName", "nstId", "vimAccountId"],
- "additionalProperties": False
-}
-
-nsi_action = {
-
+ "additionalProperties": False,
}
-nsi_terminate = {
+nsi_action = {}
-}
+nsi_terminate = {}
nsinstancesubscriptionfilter_schema = {
"title": "instance identifier schema",
"notificationTypes": {
"type": "array",
"items": {
- "enum": ['NsLcmOperationOccurrenceNotification', 'NsChangeNotification',
- 'NsIdentifierCreationNotification', 'NsIdentifierDeletionNotification']
- }
+ "enum": [
+ "NsLcmOperationOccurrenceNotification",
+ "NsChangeNotification",
+ "NsIdentifierCreationNotification",
+ "NsIdentifierDeletionNotification",
+ ]
+ },
},
"operationTypes": {
"type": "array",
- "items": {
- "enum": ['INSTANTIATE', 'SCALE', 'TERMINATE', 'UPDATE', 'HEAL']
- }
+ "items": {"enum": ["INSTANTIATE", "SCALE", "TERMINATE", "UPDATE", "HEAL"]},
},
"operationStates": {
"type": "array",
"items": {
- "enum": ['PROCESSING', 'COMPLETED', 'PARTIALLY_COMPLETED', 'FAILED',
- 'FAILED_TEMP', 'ROLLING_BACK', 'ROLLED_BACK']
- }
- },
- "nsComponentTypes": {
- "type": "array",
- "items": {
- "enum": ['VNF', 'NS', 'PNF']
- }
+ "enum": [
+ "PROCESSING",
+ "COMPLETED",
+ "PARTIALLY_COMPLETED",
+ "FAILED",
+ "FAILED_TEMP",
+ "ROLLING_BACK",
+ "ROLLED_BACK",
+ ]
+ },
},
+ "nsComponentTypes": {"type": "array", "items": {"enum": ["VNF", "NS", "PNF"]}},
"lcmOpNameImpactingNsComponent": {
"type": "array",
"items": {
- "enum": ['VNF_INSTANTIATE', 'VNF_SCALE', 'VNF_SCALE_TO_LEVEL', 'VNF_CHANGE_FLAVOUR',
- 'VNF_TERMINATE', 'VNF_HEAL', 'VNF_OPERATE', 'VNF_CHANGE_EXT_CONN', 'VNF_MODIFY_INFO',
- 'NS_INSTANTIATE', 'NS_SCALE', 'NS_UPDATE', 'NS_TERMINATE', 'NS_HEAL']
- }
+ "enum": [
+ "VNF_INSTANTIATE",
+ "VNF_SCALE",
+ "VNF_SCALE_TO_LEVEL",
+ "VNF_CHANGE_FLAVOUR",
+ "VNF_TERMINATE",
+ "VNF_HEAL",
+ "VNF_OPERATE",
+ "VNF_CHANGE_EXT_CONN",
+ "VNF_MODIFY_INFO",
+ "NS_INSTANTIATE",
+ "NS_SCALE",
+ "NS_UPDATE",
+ "NS_TERMINATE",
+ "NS_HEAL",
+ ]
+ },
},
"lcmOpOccStatusImpactingNsComponent": {
"type": "array",
"items": {
- "enum": ['START', 'COMPLETED', 'PARTIALLY_COMPLETED', 'FAILED', 'ROLLED_BACK']
- }
+ "enum": [
+ "START",
+ "COMPLETED",
+ "PARTIALLY_COMPLETED",
+ "FAILED",
+ "ROLLED_BACK",
+ ]
+ },
},
},
"allOf": [
{"required": ["operationTypes"]},
{"required": ["operationStates"]},
]
- }
+ },
},
{
"if": {
"properties": {
- "notificationTypes": {
- "contains": {"const": "NsChangeNotification"}
- }
+ "notificationTypes": {"contains": {"const": "NsChangeNotification"}}
},
},
"then": {
{"required": ["lcmOpNameImpactingNsComponent"]},
{"required": ["lcmOpOccStatusImpactingNsComponent"]},
]
- }
- }
- ]
+ },
+ },
+ ],
}
authentication_schema = {
"properties": {
"filter": nslcmsub_schema,
"CallbackUri": description_schema,
- "authentication": authentication_schema
+ "authentication": authentication_schema,
},
"required": ["CallbackUri"],
}
error_pos = ""
raise ValidationError("Format error {} '{}' ".format(error_pos, e.message))
except js_e.SchemaError:
- raise ValidationError("Bad json schema {}".format(schema_to_use), http_code=HTTPStatus.INTERNAL_SERVER_ERROR)
+ raise ValidationError(
+ "Bad json schema {}".format(schema_to_use),
+ http_code=HTTPStatus.INTERNAL_SERVER_ERROR,
+ )
def is_valid_uuid(x):
here = os.path.abspath(os.path.dirname(__file__))
# with open(os.path.join(here, 'osm_nbi/html_public/version')) as version_file:
# VERSION = version_file.readline().strip()
-with open(os.path.join(here, 'README.rst')) as readme_file:
+with open(os.path.join(here, "README.rst")) as readme_file:
README = readme_file.read()
setup(
name=_name,
- description='OSM North Bound Interface',
+ description="OSM North Bound Interface",
long_description=README,
- version_command=('git describe --match v* --tags --long --dirty', 'pep440-git-full'),
+ version_command=(
+ "git describe --match v* --tags --long --dirty",
+ "pep440-git-full",
+ ),
# version=VERSION,
# python_requires='>3.5.0',
- author='ETSI OSM',
- author_email='osmsupport@etsi.org',
- maintainer='ETSI OSM',
- maintainer_email='osmsupport@etsi.org',
- url='https://osm.etsi.org/gitweb/?p=osm/NBI.git;a=summary',
- license='Apache 2.0',
-
+ author="ETSI OSM",
+ author_email="osmsupport@etsi.org",
+ maintainer="ETSI OSM",
+ maintainer_email="osmsupport@etsi.org",
+ url="https://osm.etsi.org/gitweb/?p=osm/NBI.git;a=summary",
+ license="Apache 2.0",
packages=find_packages(exclude=["temp", "local"]),
include_package_data=True,
- setup_requires=['setuptools-version-command'],
+ setup_requires=["setuptools-version-command"],
)
skip_install = true
commands =
- black --check --diff osm_nbi/
+ - black --check --diff setup.py
#######################################################################################
test_mznmodels.py
max-line-length = 120
show-source = True
-builtins = _
\ No newline at end of file
+builtins = _
+