From: garciadeblas Date: Fri, 28 Sep 2018 10:13:44 +0000 (+0200) Subject: Merge branch 'master' into netslice X-Git-Tag: v5.0.0~15^2~5 X-Git-Url: https://osm.etsi.org/gitweb/?p=osm%2FNBI.git;a=commitdiff_plain;h=b69fb3ca37645088483959976edbfc7fda66df8d;hp=0da52259d5938703bcb71bb1f42cc1da345ff887 Merge branch 'master' into netslice --- diff --git a/.gitignore-common b/.gitignore-common index f0f10bd..6eb4b24 100644 --- a/.gitignore-common +++ b/.gitignore-common @@ -32,4 +32,4 @@ osm_nbi/test/temp build dist *.egg-info - +.eggs diff --git a/Dockerfile.local b/Dockerfile.local index 154e4ae..1ec838a 100644 --- a/Dockerfile.local +++ b/Dockerfile.local @@ -9,16 +9,25 @@ WORKDIR /app/osm_nbi ADD . /app RUN apt-get update && apt-get install -y git python3 python3-jsonschema \ - python3-cherrypy3 python3-pymongo python3-yaml python3-pip \ + python3-pymongo python3-yaml python3-pip \ && pip3 install pip==9.0.3 \ - && pip3 install aiokafka \ + && pip3 install aiokafka cherrypy \ && mkdir -p /app/storage/kafka && mkdir -p /app/log + +# OSM_COMMON RUN git clone https://osm.etsi.org/gerrit/osm/common.git \ && cd common && python3 setup.py develop && cd .. # && pip3 install -U -r requirements.txt \ # && cd .. - +# OSM_IM +RUN pip3 install pyang && mkdir -p /app && cd /app \ + && git clone https://github.com/robshakir/pyangbind \ + && pip3 install -e pyangbind \ + && git clone https://osm.etsi.org/gerrit/osm/IM \ + && cd /app/IM/models/yang \ + && pyang --plugindir /app/pyangbind/pyangbind/plugin -f pybind -o /app/osm_nbi/vnfd_catalog.py vnfd.yang \ + && pyang --plugindir /app/pyangbind/pyangbind/plugin -f pybind -o /app/osm_nbi/nsd_catalog.py nsd.yang EXPOSE 9999 diff --git a/osm_nbi/engine.py b/osm_nbi/engine.py index 50d1bd1..29bd4f1 100644 --- a/osm_nbi/engine.py +++ b/osm_nbi/engine.py @@ -12,12 +12,12 @@ import logging from random import choice as random_choice from uuid import uuid4 from hashlib import sha256, md5 -from osm_common.dbbase import DbException +from osm_common.dbbase import DbException, deep_update from osm_common.fsbase import FsException from osm_common.msgbase import MsgException from http import HTTPStatus from time import time -from copy import deepcopy +from copy import deepcopy, copy from validation import validate_input, ValidationError __author__ = "Alfonso Tierno " @@ -30,27 +30,15 @@ class EngineException(Exception): Exception.__init__(self, message) -def _deep_update(dict_to_change, dict_reference): +def get_iterable(input): """ - Modifies one dictionary with the information of the other following https://tools.ietf.org/html/rfc7396 - :param dict_to_change: Ends modified - :param dict_reference: reference - :return: none + Returns an iterable, in case input is None it just returns an empty tuple + :param input: + :return: iterable """ - for k in dict_reference: - if dict_reference[k] is None: # None->Anything - if k in dict_to_change: - del dict_to_change[k] - elif not isinstance(dict_reference[k], dict): # NotDict->Anything - dict_to_change[k] = dict_reference[k] - elif k not in dict_to_change: # Dict->Empty - dict_to_change[k] = deepcopy(dict_reference[k]) - _deep_update(dict_to_change[k], dict_reference[k]) - elif isinstance(dict_to_change[k], dict): # Dict->Dict - _deep_update(dict_to_change[k], dict_reference[k]) - else: # Dict->NotDict - dict_to_change[k] = deepcopy(dict_reference[k]) - _deep_update(dict_to_change[k], dict_reference[k]) + if input is None: + return () + return input class Engine(object): @@ -243,20 +231,35 @@ class Engine(object): clean_indata = clean_indata['userDefinedData'] return clean_indata - def _check_dependencies_on_descriptor(self, session, item, descriptor_id): + def _check_project_dependencies(self, project_id): + """ + Check if a project can be deleted + :param session: + :param _id: + :return: + """ + # TODO Is it needed to check descriptors _admin.project_read/project_write?? + _filter = {"projects": project_id} + if self.db.get_list("users", _filter): + raise EngineException("There are users that uses this project", http_code=HTTPStatus.CONFLICT) + + def _check_dependencies_on_descriptor(self, session, item, descriptor_id, _id): """ Check that the descriptor to be deleded is not a dependency of others :param session: client session information :param item: can be vnfds, nsds - :param descriptor_id: id of descriptor to be deleted + :param descriptor_id: id (provided by client) of descriptor to be deleted + :param _id: internal id of descriptor to be deleted :return: None or raises exception """ if item == "vnfds": _filter = {"constituent-vnfd.ANYINDEX.vnfd-id-ref": descriptor_id} if self.get_item_list(session, "nsds", _filter): raise EngineException("There are nsd that depends on this VNFD", http_code=HTTPStatus.CONFLICT) + if self.get_item_list(session, "vnfrs", {"vnfd-id": _id}): + raise EngineException("There are vnfr that depends on this VNFD", http_code=HTTPStatus.CONFLICT) elif item == "nsds": - _filter = {"nsdId": descriptor_id} + _filter = {"nsdId": _id} if self.get_item_list(session, "nsrs", _filter): raise EngineException("There are nsr that depends on this NSD", http_code=HTTPStatus.CONFLICT) @@ -284,22 +287,35 @@ class Engine(object): raise EngineException("Descriptor error at nsdId='{}' references a non exist nsd".format(nsd_id), http_code=HTTPStatus.CONFLICT) + def _check_edition(self, session, item, indata, id, force=False): + if item == "users": + if indata.get("projects"): + if not session["admin"]: + raise EngineException("Needed admin privileges to edit user projects", HTTPStatus.UNAUTHORIZED) + if indata.get("password"): + # regenerate salt and encrypt password + salt = uuid4().hex + indata["_admin"] = {"salt": salt} + indata["password"] = sha256(indata["password"].encode('utf-8') + salt.encode('utf-8')).hexdigest() + def _validate_new_data(self, session, item, indata, id=None, force=False): if item == "users": - if not indata.get("username"): - raise EngineException("missing 'username'", HTTPStatus.UNPROCESSABLE_ENTITY) - if not indata.get("password"): - raise EngineException("missing 'password'", HTTPStatus.UNPROCESSABLE_ENTITY) - if not indata.get("projects"): - raise EngineException("missing 'projects'", HTTPStatus.UNPROCESSABLE_ENTITY) # check username not exists - if self.db.get_one(item, {"username": indata.get("username")}, fail_on_empty=False, fail_on_more=False): + if not id and self.db.get_one(item, {"username": indata.get("username")}, fail_on_empty=False, + fail_on_more=False): raise EngineException("username '{}' exists".format(indata["username"]), HTTPStatus.CONFLICT) + # check projects + if not force: + for p in indata["projects"]: + if p == "admin": + continue + if not self.db.get_one("projects", {"_id": p}, fail_on_empty=False, fail_on_more=False): + raise EngineException("project '{}' does not exists".format(p), HTTPStatus.CONFLICT) elif item == "projects": if not indata.get("name"): raise EngineException("missing 'name'") # check name not exists - if self.db.get_one(item, {"name": indata.get("name")}, fail_on_empty=False, fail_on_more=False): + if not id and self.db.get_one(item, {"name": indata.get("name")}, fail_on_empty=False, fail_on_more=False): raise EngineException("name '{}' exists".format(indata["name"]), HTTPStatus.CONFLICT) elif item in ("vnfds", "nsds"): filter = {"id": indata["id"]} @@ -342,16 +358,108 @@ class Engine(object): :param indata: descriptor with the parameters of the operation :return: None """ + vnfds = {} + vim_accounts = [] + nsd = nsr["nsd"] + + def check_valid_vnf_member_index(member_vnf_index): + for vnf in nsd["constituent-vnfd"]: + if member_vnf_index == vnf["member-vnf-index"]: + vnfd_id = vnf["vnfd-id-ref"] + if vnfd_id not in vnfds: + vnfds[vnfd_id] = self.db.get_one("vnfds", {"id": vnfd_id}) + return vnfds[vnfd_id] + else: + raise EngineException("Invalid parameter member_vnf_index='{}' is not one of the " + "nsd:constituent-vnfd".format(member_vnf_index)) + + def check_valid_vim_account(vim_account): + if vim_account in vim_accounts: + return + try: + self.db.get_one("vim_accounts", {"_id": vim_account}) + except Exception: + raise EngineException("Invalid vimAccountId='{}' not present".format(vim_account)) + vim_accounts.append(vim_account) + if operation == "action": + # check vnf_member_index if indata.get("vnf_member_index"): indata["member_vnf_index"] = indata.pop("vnf_member_index") # for backward compatibility - for vnf in nsr["nsd"]["constituent-vnfd"]: - if indata["member_vnf_index"] == vnf["member-vnf-index"]: - # TODO get vnfd, check primitives + if not indata.get("member_vnf_index"): + raise EngineException("Missing 'member_vnf_index' parameter") + vnfd = check_valid_vnf_member_index(indata["member_vnf_index"]) + # check primitive + for config_primitive in get_iterable(vnfd.get("vnf-configuration", {}).get("config-primitive")): + if indata["primitive"] == config_primitive["name"]: + # check needed primitive_params are provided + if indata.get("primitive_params"): + in_primitive_params_copy = copy(indata["primitive_params"]) + else: + in_primitive_params_copy = {} + for paramd in get_iterable(config_primitive.get("parameter")): + if paramd["name"] in in_primitive_params_copy: + del in_primitive_params_copy[paramd["name"]] + elif not paramd.get("default-value"): + raise EngineException("Needed parameter {} not provided for primitive '{}'".format( + paramd["name"], indata["primitive"])) + # check no extra primitive params are provided + if in_primitive_params_copy: + raise EngineException("parameter/s '{}' not present at vnfd for primitive '{}'".format( + list(in_primitive_params_copy.keys()), indata["primitive"])) + break + else: + raise EngineException("Invalid primitive '{}' is not present at vnfd".format(indata["primitive"])) + if operation == "scale": + vnfd = check_valid_vnf_member_index(indata["scaleVnfData"]["scaleByStepData"]["member-vnf-index"]) + for scaling_group in get_iterable(vnfd.get("scaling-group-descriptor")): + if indata["scaleVnfData"]["scaleByStepData"]["scaling-group-descriptor"] == scaling_group["name"]: break else: - raise EngineException("Invalid parameter member_vnf_index='{}' is not one of the nsd " - "constituent-vnfd".format(indata["member_vnf_index"])) + raise EngineException("Invalid scaleVnfData:scaleByStepData:scaling-group-descriptor '{}' is not " + "present at vnfd:scaling-group-descriptor".format( + indata["scaleVnfData"]["scaleByStepData"]["scaling-group-descriptor"])) + if operation == "instantiate": + # check vim_account + check_valid_vim_account(indata["vimAccountId"]) + for in_vnf in get_iterable(indata.get("vnf")): + vnfd = check_valid_vnf_member_index(in_vnf["member-vnf-index"]) + if in_vnf.get("vimAccountId"): + check_valid_vim_account(in_vnf["vimAccountId"]) + for in_vdu in get_iterable(in_vnf.get("vdu")): + for vdud in get_iterable(vnfd.get("vdu")): + if vdud["id"] == in_vdu["id"]: + for volume in get_iterable(in_vdu.get("volume")): + for volumed in get_iterable(vdud.get("volumes")): + if volumed["name"] == volume["name"]: + break + else: + raise EngineException("Invalid parameter vnf[member-vnf-index='{}']:vdu[id='{}']:" + "volume:name='{}' is not present at vnfd:vdu:volumes list". + format(in_vnf["member-vnf-index"], in_vdu["id"], + volume["name"])) + break + else: + raise EngineException("Invalid parameter vnf[member-vnf-index='{}']:vdu:id='{}' is not " + "present at vnfd".format(in_vnf["member-vnf-index"], in_vdu["id"])) + + for in_internal_vld in get_iterable(in_vnf.get("internal-vld")): + for internal_vldd in get_iterable(vnfd.get("internal-vld")): + if in_internal_vld["name"] == internal_vldd["name"] or \ + in_internal_vld["name"] == internal_vldd["id"]: + break + else: + raise EngineException("Invalid parameter vnf[member-vnf-index='{}']:internal-vld:name='{}'" + " is not present at vnfd '{}'".format(in_vnf["member-vnf-index"], + in_internal_vld["name"], + vnfd["id"])) + for in_vld in get_iterable(indata.get("vld")): + for vldd in get_iterable(nsd.get("vld")): + if in_vld["name"] == vldd["name"] or in_vld["name"] == vldd["id"]: + break + else: + raise EngineException("Invalid parameter vld:name='{}' is not present at nsd:vld".format( + in_vld["name"])) def _format_new_data(self, session, item, indata): now = time() @@ -565,10 +673,11 @@ class Engine(object): "orchestration-progress": {}, # {"networks": {"active": 0, "total": 0}, "vms": {"active": 0, "total": 0}}, - "crete-time": now, + "create-time": now, "nsd-name-ref": nsd["name"], "operational-events": [], # "id", "timestamp", "description", "event", "nsd-ref": nsd["id"], + "nsdId": nsd["_id"], "instantiate_params": ns_request, "ns-instance-config-ref": nsr_id, "id": nsr_id, @@ -624,9 +733,11 @@ class Engine(object): vdur = { "id": vdur_id, "vdu-id-ref": vdu["id"], + # TODO "name": "" Name of the VDU in the VIM "ip-address": None, # mgmt-interface filled by LCM # "vim-id", "flavor-id", "image-id", "management-ip" # filled by LCM "internal-connection-point": [], + "interfaces": [], } # TODO volumes: name, volume-id for icp in vdu.get("internal-connection-point", ()): @@ -638,6 +749,13 @@ class Engine(object): # vim-id # TODO it would be nice having a vim port id } vdur["internal-connection-point"].append(vdu_icp) + for iface in vdu.get("interface", ()): + vdu_iface = { + "name": iface.get("name"), + # "ip-address", "mac-address" # filled by LCM + # vim-id # TODO it would be nice having a vim port id + } + vdur["interfaces"].append(vdu_iface) vnfr_descriptor["vdur"].append(vdur) step = "creating vnfr vnfd-id='{}' constituent-vnfd='{}' at database".format( @@ -705,6 +823,9 @@ class Engine(object): :return: _id: identity of the inserted data. """ + if not session["admin"] and item in ("users", "projects"): + raise EngineException("Needed admin privileges to perform this operation", HTTPStatus.UNAUTHORIZED) + try: item_envelop = item if item in ("nsds", "vnfds"): @@ -713,7 +834,7 @@ class Engine(object): # Override descriptor with query string kwargs self._update_descriptor(content, kwargs) - if not indata and item not in ("nsds", "vnfds"): + if not content and item not in ("nsds", "vnfds"): raise EngineException("Empty payload") validate_input(content, item, new=True) @@ -722,7 +843,7 @@ class Engine(object): # in this case the input descriptor is not the data to be stored return self.new_nsr(rollback, session, ns_request=content) - self._validate_new_data(session, item_envelop, content, force) + self._validate_new_data(session, item_envelop, content, force=force) if item in ("nsds", "vnfds"): content = {"_admin": {"userDefinedData": content}} self._format_new_data(session, item, content) @@ -805,15 +926,15 @@ class Engine(object): # raise EngineException("Cannot get ns_instance '{}': {}".format(e), HTTPStatus.NOT_FOUND) def _add_read_filter(self, session, item, filter): - if session["project_id"] == "admin": # allows all + if session["admin"]: # allows all return filter if item == "users": filter["username"] = session["username"] - elif item in ("vnfds", "nsds", "nsrs"): + elif item in ("vnfds", "nsds", "nsrs", "vnfrs"): filter["_admin.projects_read.cont"] = ["ANY", session["project_id"]] def _add_delete_filter(self, session, item, filter): - if session["project_id"] != "admin" and item in ("users", "projects"): + if not session["admin"] and item in ("users", "projects"): raise EngineException("Only admin users can perform this task", http_code=HTTPStatus.FORBIDDEN) if item == "users": if filter.get("_id") == session["username"] or filter.get("username") == session["username"]: @@ -821,7 +942,7 @@ class Engine(object): elif item == "project": if filter.get("_id") == session["project_id"]: raise EngineException("You cannot delete your own project", http_code=HTTPStatus.CONFLICT) - elif item in ("vnfds", "nsds") and session["project_id"] != "admin": + elif item in ("vnfds", "nsds") and not session["admin"]: filter["_admin.projects_write.cont"] = ["ANY", session["project_id"]] def get_file(self, session, item, _id, path=None, accept_header=None): @@ -937,7 +1058,10 @@ class Engine(object): descriptor = self.get_item(session, item, _id) descriptor_id = descriptor.get("id") if descriptor_id: - self._check_dependencies_on_descriptor(session, item, descriptor_id) + self._check_dependencies_on_descriptor(session, item, descriptor_id, _id) + elif item == "projects": + if not force: + self._check_project_dependencies(_id) if item == "nsrs": nsr = self.db.get_one(item, filter) @@ -1051,8 +1175,9 @@ class Engine(object): except ValidationError as e: raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY) - _deep_update(content, indata) - self._validate_new_data(session, item, content, id, force) + self._check_edition(session, item, indata, id, force) + deep_update(content, indata) + self._validate_new_data(session, item, content, id=id, force=force) # self._format_new_data(session, item, content) self.db.replace(item, id, content) if item in ("vim_accounts", "sdns"): @@ -1075,6 +1200,8 @@ class Engine(object): :param force: If True avoid some dependence checks :return: dictionary, raise exception if not found. """ + if not session["admin"] and item == "projects": + raise EngineException("Needed admin privileges to perform this operation", HTTPStatus.UNAUTHORIZED) content = self.get_item(session, item, _id) return self._edit_item(session, item, _id, content, indata, kwargs, force) diff --git a/osm_nbi/html_out.py b/osm_nbi/html_out.py index a5b1bca..857d954 100644 --- a/osm_nbi/html_out.py +++ b/osm_nbi/html_out.py @@ -26,7 +26,8 @@ html_start = """ USERs PROJECTs TOKENs - VIMs + VIMs + SDNs logout @@ -91,7 +92,7 @@ html_upload_body = """ html_nslcmop_body = """ nslcm operations -VNFRS +VNFRS

diff --git a/osm_nbi/html_public/version b/osm_nbi/html_public/version index c9b57d9..03db56b 100644 --- a/osm_nbi/html_public/version +++ b/osm_nbi/html_public/version @@ -1,3 +1,3 @@ -0.1.11 -2018-06-27 +0.1.17 +2018-09-12 diff --git a/osm_nbi/nbi.py b/osm_nbi/nbi.py index e34cfad..f8c564d 100644 --- a/osm_nbi/nbi.py +++ b/osm_nbi/nbi.py @@ -68,15 +68,15 @@ URL: /osm GET POST /ns_lcm_op_occs 5 5 / 5 5 5 TO BE COMPLETED 5 5 - /vnfrs O - / O + /vnf_instances (also vnfrs for compatibility) O + / O /subscriptions 5 5 / 5 X /admin/v1 /tokens O O / O O /users O O - / O O + / O O O O /projects O O / O O /vims_accounts (also vims for compatibility) O O @@ -152,7 +152,7 @@ class Server(object): "": {"METHODS": ("GET", "DELETE")} }, "users": {"METHODS": ("GET", "POST"), - "": {"METHODS": ("GET", "POST", "DELETE")} + "": {"METHODS": ("GET", "POST", "DELETE", "PATCH", "PUT")} }, "projects": {"METHODS": ("GET", "POST"), "": {"METHODS": ("GET", "DELETE")} @@ -196,7 +196,7 @@ class Server(object): "": {"METHODS": ("GET", "PUT", "DELETE")} }, "vnf_packages": {"METHODS": ("GET", "POST"), - "": {"METHODS": ("GET", "DELETE"), "TODO": "PATCH", # GET: vnfPkgInfo + "": {"METHODS": ("GET", "DELETE", "PATCH"), # GET: vnfPkgInfo "package_content": {"METHODS": ("GET", "PUT"), # package "upload_from_uri": {"TODO": "POST"} }, @@ -216,7 +216,7 @@ class Server(object): }, "ns_instances": {"METHODS": ("GET", "POST"), "": {"METHODS": ("GET", "DELETE"), - "scale": {"TODO": "POST"}, + "scale": {"METHODS": "POST"}, "terminate": {"METHODS": "POST"}, "instantiate": {"METHODS": "POST"}, "action": {"METHODS": "POST"}, @@ -228,6 +228,9 @@ class Server(object): "vnfrs": {"METHODS": ("GET"), "": {"METHODS": ("GET")} }, + "vnf_instances": {"METHODS": ("GET"), + "": {"METHODS": ("GET")} + }, } }, } @@ -508,7 +511,7 @@ class Server(object): return f elif len(args) == 2 and args[0] == "db-clear": - return self.engine.del_item_list({"project_id": "admin"}, args[1], {}) + return self.engine.del_item_list({"project_id": "admin", "admin": True}, args[1], kwargs) elif args and args[0] == "prune": return self.engine.prune() elif args and args[0] == "login": @@ -657,7 +660,7 @@ class Server(object): engine_item = "nsrs" if item == "ns_lcm_op_occs": engine_item = "nslcmops" - if item == "vnfrs": + if item == "vnfrs" or item == "vnf_instances": engine_item = "vnfrs" if engine_item == "vims": # TODO this is for backward compatibility, it will remove in the future engine_item = "vim_accounts" @@ -727,6 +730,7 @@ class Server(object): cherrypy.response.status = HTTPStatus.ACCEPTED.value elif method in ("PUT", "PATCH"): + outdata = None if not indata and not kwargs: raise NbiException("Nothing to update. Provide payload and/or query string", HTTPStatus.BAD_REQUEST) @@ -735,10 +739,9 @@ class Server(object): cherrypy.request.headers) if not completed: cherrypy.response.headers["Transaction-Id"] = id - cherrypy.response.status = HTTPStatus.NO_CONTENT.value - outdata = None else: - outdata = {"id": self.engine.edit_item(session, engine_item, _id, indata, kwargs, force=force)} + self.engine.edit_item(session, engine_item, _id, indata, kwargs, force=force) + cherrypy.response.status = HTTPStatus.NO_CONTENT.value else: raise NbiException("Method {} not allowed".format(method), HTTPStatus.METHOD_NOT_ALLOWED) return self._format_out(outdata, session, _format) diff --git a/osm_nbi/tests/clear-all.sh b/osm_nbi/tests/clear-all.sh new file mode 100755 index 0000000..5a9a9c6 --- /dev/null +++ b/osm_nbi/tests/clear-all.sh @@ -0,0 +1,86 @@ +#! /bin/bash +# author: Alfonso Tierno +# Script that uses the test NBI URL to clean database. See usage + + +function usage(){ + echo -e "usage: $0 [OPTIONS]" + echo -e "TEST NBI API is used to clean database content, except user admin. Useful for testing." + echo -e "NOTE: database is cleaned but not the content of other modules as RO or VCA that must be cleaned manually." + echo -e " OPTIONS" + echo -e " -h --help: show this help" + echo -e " -f --force: Do not ask for confirmation" + echo -e " --clean-RO: clean RO content. RO client (openmano) must be installed and configured" + echo -e " --clean-VCA: clean VCA content. juju must be installed and configured" + echo -e " ENV variable 'OSMNBI_URL' is used for the URL of the NBI server. If missing, it uses" \ + "'https://\$OSM_HOSTNAME:9999/osm'. If 'OSM_HOSTNAME' is missing, localhost is used" +} + + +function ask_user(){ + # ask to the user and parse a response among 'y', 'yes', 'n' or 'no'. Case insensitive. + # Params: $1 text to ask; $2 Action by default, can be 'y' for yes, 'n' for no, other or empty for not allowed. + # Return: true(0) if user type 'yes'; false (1) if user type 'no' + read -e -p "$1" USER_CONFIRMATION + while true ; do + [ -z "$USER_CONFIRMATION" ] && [ "$2" == 'y' ] && return 0 + [ -z "$USER_CONFIRMATION" ] && [ "$2" == 'n' ] && return 1 + [ "${USER_CONFIRMATION,,}" == "yes" ] || [ "${USER_CONFIRMATION,,}" == "y" ] && return 0 + [ "${USER_CONFIRMATION,,}" == "no" ] || [ "${USER_CONFIRMATION,,}" == "n" ] && return 1 + read -e -p "Please type 'yes' or 'no': " USER_CONFIRMATION + done +} + + +while [ -n "$1" ] +do + option="$1" + shift + ( [ "$option" == -h ] || [ "$option" == --help ] ) && usage && exit + ( [ "$option" == -f ] || [ "$option" == --force ] ) && OSMNBI_CLEAN_FORCE=yes && continue + [ "$option" == --clean-RO ] && OSMNBI_CLEAN_RO=yes && continue + [ "$option" == --clean-VCA ] && OSMNBI_CLEAN_VCA=yes && continue + echo "Unknown option '$option'. Type $0 --help" 2>&1 && exit 1 +done + + +[ -n "$OSMNBI_CLEAN_FORCE" ] || ask_user "Clean database content (y/N)?" n || exit +[ -z "$OSM_HOSTNAME" ] && OSM_HOSTNAME=localhost +[ -z "$OSMNBI_URL" ] && OSMNBI_URL="https://${OSM_HOSTNAME}:9999/osm" + +if [ -n "$OSMNBI_CLEAN_RO" ] +then + export OPENMANO_TENANT=osm + for dc in `openmano datacenter-list | awk '{print $1}'` + do + export OPENMANO_DATACENTER=$dc + for i in instance-scenario scenario vnf + do + for f in `openmano $i-list | awk '{print $1}'` + do + [[ -n "$f" ]] && [[ "$f" != No ]] && openmano ${i}-delete -f ${f} + done + done + done +fi + +for item in vim_accounts vims sdns nsrs vnfrs nslcmops nsds vnfds projects +do + curl --insecure ${OSMNBI_URL}/test/db-clear/${item} + echo " ${item}" +done +# delete all users except admin +curl --insecure ${OSMNBI_URL}/test/db-clear/users?username.ne=admin + +if [ -n "$OSMNBI_CLEAN_RO" ] +then + for dc in `openmano datacenter-list | awk '{print $1}'` ; do openmano datacenter-detach $dc ; done + for dc in `openmano datacenter-list --all | awk '{print $1}'` ; do openmano datacenter-delete -f $dc ; done + for dc in `openmano sdn-controller-list | awk '{print $1}'` ; do openmano sdn-controller-delete -f $dc ; done +fi + +if [ -n "$OSMNBI_CLEAN_VCA" ] +then + juju destroy-model -y default + juju add-model default +fi diff --git a/osm_nbi/tests/test.py b/osm_nbi/tests/test.py index 88d57e9..4e21b3f 100755 --- a/osm_nbi/tests/test.py +++ b/osm_nbi/tests/test.py @@ -8,24 +8,51 @@ import json import logging import yaml # import json -import tarfile -from os import makedirs +# import tarfile +from time import sleep +import os __author__ = "Alfonso Tierno, alfonso.tiernosepulveda@telefonica.com" __date__ = "$2018-03-01$" -__version__ = "0.1" -version_date = "Mar 2018" +__version__ = "0.2" +version_date = "Jul 2018" def usage(): print("Usage: ", sys.argv[0], "[options]") - print(" --version: prints current version") - print(" -f|--file FILE: file to be sent") + print(" Performs system tests over running NBI. It can be used for real OSM test using option '--test-osm'") + print(" If this is the case env variables 'OSMNBITEST_VIM_NAME' must be suplied to create a VIM if not exist " + "where deployment is done") + print("OPTIONS") print(" -h|--help: shows this help") - print(" -u|--url URL: complete server URL") - print(" -s|--chunk-size SIZE: size of chunks, by default 1000") - print(" -t|--token TOKEN: Authorizaton token, previously obtained from server") + print(" --insecure: Allows non trusted https NBI server") + print(" --list: list available tests") + print(" --manual-check: Deployment tests stop after deployed to allow manual inspection. Only make sense with " + "'--test-osm'") + print(" -p|--password PASSWORD: NBI access password. 'admin' by default") + print(" ---project PROJECT: NBI access project. 'admin' by default") + print(" --test TEST[,...]: Execute only a test or a comma separated list of tests") + print(" --params key=val: params to the previous test. key can be vnfd-files, nsd-file, ns-name, ns-config") + print(" --test-osm: If missing this test is intended for NBI only, no other OSM components are expected. Use " + "this flag to test the system. LCM and RO components are expected to be up and running") + print(" --timeout TIMEOUT: General NBI timeout, by default {}s".format(timeout)) + print(" --timeout-deploy TIMEOUT: Timeout used for getting NS deployed, by default {}s".format(timeout_deploy)) + print(" --timeout-configure TIMEOUT: Timeout used for getting NS deployed and configured," + " by default {}s".format(timeout_configure)) + print(" -u|--user USERNAME: NBI access username. 'admin' by default") + print(" --url URL: complete NBI server URL. 'https//localhost:9999/osm' by default") print(" -v|--verbose print debug information, can be used several times") + print(" --no-verbose remove verbosity") + print(" --version: prints current version") + print("ENV variables used for real deployment tests with option osm-test.") + print(" export OSMNBITEST_VIM_NAME=vim-name") + print(" export OSMNBITEST_VIM_URL=vim-url") + print(" export OSMNBITEST_VIM_TYPE=vim-type") + print(" export OSMNBITEST_VIM_TENANT=vim-tenant") + print(" export OSMNBITEST_VIM_USER=vim-user") + print(" export OSMNBITEST_VIM_PASSWORD=vim-password") + print(" export OSMNBITEST_VIM_CONFIG=\"vim-config\"") + print(" export OSMNBITEST_NS_NAME=\"vim-config\"") return @@ -48,12 +75,10 @@ r_header_zip = {"Content-type": "application/zip"} headers_zip = { "Accept": "application/zip", } -# test without authorization -test_not_authorized_list = ( - ("NA1", "Invalid token", "GET", "/admin/v1/users", headers_json, None, 401, r_header_json, "json"), - ("NA2", "Invalid URL", "POST", "/admin/v1/nonexist", headers_yaml, None, 405, r_header_yaml, "yaml"), - ("NA3", "Invalid version", "DELETE", "/admin/v2/users", headers_yaml, None, 405, r_header_yaml, "yaml"), -) +headers_zip_yaml = { + "Accept": "application/yaml", "Content-type": "application/zip" +} + # test ones authorized test_authorized_list = ( @@ -64,32 +89,9 @@ test_authorized_list = ( ("AU3", "Invalid nsd id", "DELETE", "/nsd/v1/ns_descriptors_content/non-existing-id", headers_yaml, None, 404, r_header_yaml, "yaml"), ) - -vim = { - "schema_version": "1.0", - "schema_type": "No idea", - "name": "myVim", - "description": "Descriptor name", - "vim_type": "openstack", - "vim_url": "http://localhost:/vim", - "vim_tenant_name": "vimTenant", - "vim_user": "user", - "vim_password": "password", - "config": {"config_param": 1} -} - -vim_bad = vim.copy() -vim_bad.pop("name") - -test_admin_list1 = ( - ("VIM1", "Create VIM", "POST", "/admin/v1/vim_accounts", headers_json, vim, (201, 204), - {"Location": "/admin/v1/vim_accounts/", "Content-Type": "application/json"}, "json"), - ("VIM2", "Create VIM bad schema", "POST", "/admin/v1/vim_accounts", headers_json, vim_bad, 422, None, headers_json), - ("VIM2", "Create VIM name repeated", "POST", "/admin/v1/vim_accounts", headers_json, vim, 409, None, headers_json), - ("VIM4", "Show VIMs", "GET", "/admin/v1/vim_accounts", headers_yaml, None, 200, r_header_yaml, "yaml"), - ("VIM5", "Show VIM", "GET", "/admin/v1/vim_accounts/{VIM1}", headers_yaml, None, 200, r_header_yaml, "yaml"), - ("VIM6", "Delete VIM", "DELETE", "/admin/v1/vim_accounts/{VIM1}", headers_yaml, None, 202, None, 0), -) +timeout = 120 # general timeout +timeout_deploy = 60*10 # timeout for NS deploying without charms +timeout_configure = 60*20 # timeout for NS deploying and configuring class TestException(Exception): @@ -97,18 +99,31 @@ class TestException(Exception): class TestRest: - def __init__(self, url_base, header_base={}, verify=False): + def __init__(self, url_base, header_base=None, verify=False, user="admin", password="admin", project="admin"): self.url_base = url_base - self.header_base = header_base + if header_base is None: + self.header_base = {} + else: + self.header_base = header_base.copy() self.s = requests.session() - self.s.headers = header_base + self.s.headers = self.header_base self.verify = verify + self.token = False + self.user = user + self.password = password + self.project = project + self.vim_id = None # contains ID of tests obtained from Location response header. "" key contains last obtained id self.test_ids = {} + self.old_test_description = "" def set_header(self, header): self.s.headers.update(header) + def unset_header(self, key): + if key in self.s.headers: + del self.s.headers[key] + def test(self, name, description, method, url, headers, payload, expected_codes, expected_headers, expected_payload): """ @@ -125,6 +140,7 @@ class TestRest: :param expected_payload: expected payload, 0 if empty, 'yaml', 'json', 'text', 'zip' :return: requests response """ + r = None try: if not self.s: self.s = requests.session() @@ -134,16 +150,16 @@ class TestRest: elif not url.startswith("http"): url = self.url_base + url - var_start = url.find("{") + 1 + var_start = url.find("<") + 1 while var_start: - var_end = url.find("}", var_start) + var_end = url.find(">", var_start) if var_end == -1: break var_name = url[var_start:var_end] if var_name in self.test_ids: url = url[:var_start-1] + self.test_ids[var_name] + url[var_end+1:] var_start += len(self.test_ids[var_name]) - var_start = url.find("{", var_start) + 1 + var_start = url.find("<", var_start) + 1 if payload: if isinstance(payload, str): if payload.startswith("@"): @@ -157,8 +173,10 @@ class TestRest: elif isinstance(payload, dict): payload = json.dumps(payload) - test = "Test {} {} {} {}".format(name, description, method, url) - logger.warning(test) + test_description = "Test {} {} {} {}".format(name, description, method, url) + if self.old_test_description != test_description: + self.old_test_description = test_description + logger.warning(test_description) stream = False # if expected_payload == "zip": # stream = True @@ -216,12 +234,792 @@ class TestRest: self.test_ids[""] = str(_id) # last id return r except TestException as e: - logger.error("{} \nRX code{}: {}".format(e, r.status_code, r.text)) + r_status_code = None + r_text = None + if r: + r_status_code = r.status_code + r_text = r.text + logger.error("{} \nRX code{}: {}".format(e, r_status_code, r_text)) exit(1) except IOError as e: logger.error("Cannot open file {}".format(e)) exit(1) + def get_autorization(self): # user=None, password=None, project=None): + if self.token: # and self.user == user and self.password == password and self.project == project: + return + # self.user = user + # self.password = password + # self.project = project + r = self.test("TOKEN", "Obtain token", "POST", "/admin/v1/tokens", headers_json, + {"username": self.user, "password": self.password, "project_id": self.project}, + (200, 201), r_header_json, "json") + response = r.json() + self.token = response["id"] + self.set_header({"Authorization": "Bearer {}".format(self.token)}) + + def remove_authorization(self): + if self.token: + self.test("TOKEN_DEL", "Delete token", "DELETE", "/admin/v1/tokens/{}".format(self.token), headers_json, + None, (200, 201, 204), None, None) + self.token = None + self.unset_header("Authorization") + + def get_create_vim(self, test_osm): + if self.vim_id: + return self.vim_id + self.get_autorization() + if test_osm: + vim_name = os.environ.get("OSMNBITEST_VIM_NAME") + if not vim_name: + raise TestException( + "Needed to define OSMNBITEST_VIM_XXX variables to create a real VIM for deployment") + else: + vim_name = "fakeVim" + # Get VIM + r = self.test("_VIMGET1", "Get VIM ID", "GET", "/admin/v1/vim_accounts?name={}".format(vim_name), headers_json, + None, 200, r_header_json, "json") + vims = r.json() + if vims: + return vims[0]["_id"] + # Add VIM + if test_osm: + # check needed environ parameters: + if not os.environ.get("OSMNBITEST_VIM_URL") or not os.environ.get("OSMNBITEST_VIM_TENANT"): + raise TestException("Env OSMNBITEST_VIM_URL and OSMNBITEST_VIM_TENANT are needed for create a real VIM" + " to deploy on whit the --test-osm option") + vim_data = "{{schema_version: '1.0', name: '{}', vim_type: {}, vim_url: '{}', vim_tenant_name: '{}', "\ + "vim_user: {}, vim_password: {}".format(vim_name, + os.environ.get("OSMNBITEST_VIM_TYPE", "openstack"), + os.environ.get("OSMNBITEST_VIM_URL"), + os.environ.get("OSMNBITEST_VIM_TENANT"), + os.environ.get("OSMNBITEST_VIM_USER"), + os.environ.get("OSMNBITEST_VIM_PASSWORD")) + if os.environ.get("OSMNBITEST_VIM_CONFIG"): + vim_data += " ,config: {}".format(os.environ.get("OSMNBITEST_VIM_CONFIG")) + vim_data += "}" + else: + vim_data = "{schema_version: '1.0', name: fakeVim, vim_type: openstack, vim_url: 'http://10.11.12.13/fake'"\ + ", vim_tenant_name: 'vimtenant', vim_user: vimuser, vim_password: vimpassword}" + r = self.test("_VIMGET2", "Create VIM", "POST", "/admin/v1/vim_accounts", headers_yaml, vim_data, + (201), {"Location": "/admin/v1/vim_accounts/", "Content-Type": "application/yaml"}, "yaml") + location = r.headers.get("Location") + return location[location.rfind("/") + 1:] + + +class TestNonAuthorized: + description = "test invalid URLs. methods and no authorization" + + @staticmethod + def run(engine, test_osm, manual_check, test_params=None): + engine.remove_authorization() + test_not_authorized_list = ( + ("NA1", "Invalid token", "GET", "/admin/v1/users", headers_json, None, 401, r_header_json, "json"), + ("NA2", "Invalid URL", "POST", "/admin/v1/nonexist", headers_yaml, None, 405, r_header_yaml, "yaml"), + ("NA3", "Invalid version", "DELETE", "/admin/v2/users", headers_yaml, None, 405, r_header_yaml, "yaml"), + ) + for t in test_not_authorized_list: + engine.test(*t) + + +class TestUsersProjects: + description = "test project and user creation" + + @staticmethod + def run(engine, test_osm, manual_check, test_params=None): + engine.get_autorization() + engine.test("PU1", "Create project non admin", "POST", "/admin/v1/projects", headers_json, {"name": "P1"}, + (201, 204), {"Location": "/admin/v1/projects/", "Content-Type": "application/json"}, "json") + engine.test("PU2", "Create project admin", "POST", "/admin/v1/projects", headers_json, + {"name": "Padmin", "admin": True}, (201, 204), + {"Location": "/admin/v1/projects/", "Content-Type": "application/json"}, "json") + engine.test("PU3", "Create project bad format", "POST", "/admin/v1/projects", headers_json, {"name": 1}, 422, + r_header_json, "json") + engine.test("PU4", "Create user with bad project", "POST", "/admin/v1/users", headers_json, + {"username": "U1", "projects": ["P1", "P2", "Padmin"], "password": "pw1"}, 409, + r_header_json, "json") + engine.test("PU5", "Create user with bad project and force", "POST", "/admin/v1/users?FORCE=True", headers_json, + {"username": "U1", "projects": ["P1", "P2", "Padmin"], "password": "pw1"}, 201, + {"Location": "/admin/v1/users/", "Content-Type": "application/json"}, "json") + engine.test("PU6", "Create user 2", "POST", "/admin/v1/users", headers_json, + {"username": "U2", "projects": ["P1"], "password": "pw2"}, 201, + {"Location": "/admin/v1/users/", "Content-Type": "application/json"}, "json") + + engine.test("PU7", "Edit user U1, delete P2 project", "PATCH", "/admin/v1/users/U1", headers_json, + {"projects": {"$'P2'": None}}, 204, None, None) + res = engine.test("PU1", "Check user U1, contains the right projects", "GET", "/admin/v1/users/U1", + headers_json, None, 200, None, json) + u1 = res.json() + # print(u1) + expected_projects = ["P1", "Padmin"] + if u1["projects"] != expected_projects: + raise TestException("User content projects '{}' different than expected '{}'. Edition has not done" + " properly".format(u1["projects"], expected_projects)) + + engine.test("PU8", "Edit user U1, set Padmin as default project", "PUT", "/admin/v1/users/U1", headers_json, + {"projects": {"$'Padmin'": None, "$+[0]": "Padmin"}}, 204, None, None) + res = engine.test("PU1", "Check user U1, contains the right projects", "GET", "/admin/v1/users/U1", + headers_json, None, 200, None, json) + u1 = res.json() + # print(u1) + expected_projects = ["Padmin", "P1"] + if u1["projects"] != expected_projects: + raise TestException("User content projects '{}' different than expected '{}'. Edition has not done" + " properly".format(u1["projects"], expected_projects)) + + engine.test("PU9", "Edit user U1, change password", "PATCH", "/admin/v1/users/U1", headers_json, + {"password": "pw1_new"}, 204, None, None) + + engine.test("PU10", "Change to project P1 non existing", "POST", "/admin/v1/tokens/", headers_json, + {"project_id": "P1"}, 401, r_header_json, "json") + + res = engine.test("PU1", "Change to user U1 project P1", "POST", "/admin/v1/tokens", headers_json, + {"username": "U1", "password": "pw1_new", "project_id": "P1"}, (200, 201), + r_header_json, "json") + response = res.json() + engine.set_header({"Authorization": "Bearer {}".format(response["id"])}) + + engine.test("PU11", "Edit user projects non admin", "PUT", "/admin/v1/users/U1", headers_json, + {"projects": {"$'P1'": None}}, 401, r_header_json, "json") + engine.test("PU12", "Add new project non admin", "POST", "/admin/v1/projects", headers_json, + {"name": "P2"}, 401, r_header_json, "json") + engine.test("PU13", "Add new user non admin", "POST", "/admin/v1/users", headers_json, + {"username": "U3", "projects": ["P1"], "password": "pw3"}, 401, + r_header_json, "json") + + res = engine.test("PU14", "Change to user U1 project Padmin", "POST", "/admin/v1/tokens", headers_json, + {"project_id": "Padmin"}, (200, 201), r_header_json, "json") + response = res.json() + engine.set_header({"Authorization": "Bearer {}".format(response["id"])}) + + engine.test("PU15", "Add new project admin", "POST", "/admin/v1/projects", headers_json, {"name": "P2"}, + (201, 204), {"Location": "/admin/v1/projects/", "Content-Type": "application/json"}, "json") + engine.test("PU16", "Add new user U3 admin", "POST", "/admin/v1/users", + headers_json, {"username": "U3", "projects": ["P2"], "password": "pw3"}, (201, 204), + {"Location": "/admin/v1/users/", "Content-Type": "application/json"}, "json") + engine.test("PU17", "Edit user projects admin", "PUT", "/admin/v1/users/U3", headers_json, + {"projects": ["P2"]}, 204, None, None) + + engine.test("PU18", "Delete project P2 conflict", "DELETE", "/admin/v1/projects/P2", headers_json, None, 409, + r_header_json, "json") + engine.test("PU19", "Delete project P2 forcing", "DELETE", "/admin/v1/projects/P2?FORCE=True", headers_json, + None, 204, None, None) + + engine.test("PU20", "Delete user U1. Conflict deleting own user", "DELETE", "/admin/v1/users/U1", headers_json, + None, 409, r_header_json, "json") + engine.test("PU21", "Delete user U2", "DELETE", "/admin/v1/users/U2", headers_json, None, 204, None, None) + engine.test("PU22", "Delete user U3", "DELETE", "/admin/v1/users/U3", headers_json, None, 204, None, None) + # change to admin + engine.remove_authorization() # To force get authorization + engine.get_autorization() + engine.test("PU23", "Delete user U1", "DELETE", "/admin/v1/users/U1", headers_json, None, 204, None, None) + engine.test("PU24", "Delete project P1", "DELETE", "/admin/v1/projects/P1", headers_json, None, 204, None, None) + engine.test("PU25", "Delete project Padmin", "DELETE", "/admin/v1/projects/Padmin", headers_json, None, 204, + None, None) + + +class TestFakeVim: + description = "Creates/edit/delete fake VIMs and SDN controllers" + + def __init__(self): + self.vim = { + "schema_version": "1.0", + "schema_type": "No idea", + "name": "myVim", + "description": "Descriptor name", + "vim_type": "openstack", + "vim_url": "http://localhost:/vim", + "vim_tenant_name": "vimTenant", + "vim_user": "user", + "vim_password": "password", + "config": {"config_param": 1} + } + self.sdn = { + "name": "sdn-name", + "description": "sdn-description", + "dpid": "50:50:52:54:00:94:21:21", + "ip": "192.168.15.17", + "port": 8080, + "type": "opendaylight", + "version": "3.5.6", + "user": "user", + "password": "passwd" + } + self.port_mapping = [ + {"compute_node": "compute node 1", + "ports": [{"pci": "0000:81:00.0", "switch_port": "port-2/1", "switch_mac": "52:54:00:94:21:21"}, + {"pci": "0000:81:00.1", "switch_port": "port-2/2", "switch_mac": "52:54:00:94:21:22"} + ]}, + {"compute_node": "compute node 2", + "ports": [{"pci": "0000:81:00.0", "switch_port": "port-2/3", "switch_mac": "52:54:00:94:21:23"}, + {"pci": "0000:81:00.1", "switch_port": "port-2/4", "switch_mac": "52:54:00:94:21:24"} + ]} + ] + + def run(self, engine, test_osm, manual_check, test_params=None): + + vim_bad = self.vim.copy() + vim_bad.pop("name") + + engine.get_autorization() + engine.test("FVIM1", "Create VIM", "POST", "/admin/v1/vim_accounts", headers_json, self.vim, (201, 204), + {"Location": "/admin/v1/vim_accounts/", "Content-Type": "application/json"}, "json") + engine.test("FVIM2", "Create VIM without name, bad schema", "POST", "/admin/v1/vim_accounts", headers_json, + vim_bad, 422, None, headers_json) + engine.test("FVIM3", "Create VIM name repeated", "POST", "/admin/v1/vim_accounts", headers_json, self.vim, + 409, None, headers_json) + engine.test("FVIM4", "Show VIMs", "GET", "/admin/v1/vim_accounts", headers_yaml, None, 200, r_header_yaml, + "yaml") + engine.test("FVIM5", "Show VIM", "GET", "/admin/v1/vim_accounts/", headers_yaml, None, 200, + r_header_yaml, "yaml") + if not test_osm: + # delete with FORCE + engine.test("FVIM6", "Delete VIM", "DELETE", "/admin/v1/vim_accounts/?FORCE=True", headers_yaml, + None, 202, None, 0) + engine.test("FVIM7", "Check VIM is deleted", "GET", "/admin/v1/vim_accounts/", headers_yaml, None, + 404, r_header_yaml, "yaml") + else: + # delete and wait until is really deleted + engine.test("FVIM6", "Delete VIM", "DELETE", "/admin/v1/vim_accounts/", headers_yaml, None, 202, + None, 0) + wait = timeout + while wait >= 0: + r = engine.test("FVIM7", "Check VIM is deleted", "GET", "/admin/v1/vim_accounts/", headers_yaml, + None, None, r_header_yaml, "yaml") + if r.status_code == 404: + break + elif r.status_code == 200: + wait -= 5 + sleep(5) + else: + raise TestException("Vim created at 'FVIM1' is not delete after {} seconds".format(timeout)) + + +class TestVIMSDN(TestFakeVim): + description = "Creates VIM with SDN editing SDN controllers and port_mapping" + + def __init__(self): + TestFakeVim.__init__(self) + + def run(self, engine, test_osm, manual_check, test_params=None): + engine.get_autorization() + # Added SDN + engine.test("VIMSDN1", "Create SDN", "POST", "/admin/v1/sdns", headers_json, self.sdn, (201, 204), + {"Location": "/admin/v1/sdns/", "Content-Type": "application/json"}, "json") + # sleep(5) + # Edit SDN + engine.test("VIMSDN2", "Edit SDN", "PATCH", "/admin/v1/sdns/", headers_json, {"name": "new_sdn_name"}, + 204, None, None) + # sleep(5) + # VIM with SDN + self.vim["config"]["sdn-controller"] = engine.test_ids["VIMSDN1"] + self.vim["config"]["sdn-port-mapping"] = self.port_mapping + engine.test("VIMSDN3", "Create VIM", "POST", "/admin/v1/vim_accounts", headers_json, self.vim, (200, 204, 201), + {"Location": "/admin/v1/vim_accounts/", "Content-Type": "application/json"}, "json"), + + self.port_mapping[0]["compute_node"] = "compute node XX" + engine.test("VIMSDN4", "Edit VIM change port-mapping", "PUT", "/admin/v1/vim_accounts/", headers_json, + {"config": {"sdn-port-mapping": self.port_mapping}}, 204, None, None) + engine.test("VIMSDN5", "Edit VIM remove port-mapping", "PUT", "/admin/v1/vim_accounts/", headers_json, + {"config": {"sdn-port-mapping": None}}, 204, None, None) + + if not test_osm: + # delete with FORCE + engine.test("VIMSDN6", "Delete VIM remove port-mapping", "DELETE", + "/admin/v1/vim_accounts/?FORCE=True", headers_json, None, 202, None, 0) + engine.test("VIMSDN7", "Delete SDNC", "DELETE", "/admin/v1/sdns/?FORCE=True", headers_json, None, + 202, None, 0) + + engine.test("VIMSDN8", "Check VIM is deleted", "GET", "/admin/v1/vim_accounts/", headers_yaml, + None, 404, r_header_yaml, "yaml") + engine.test("VIMSDN9", "Check SDN is deleted", "GET", "/admin/v1/sdns/", headers_yaml, None, + 404, r_header_yaml, "yaml") + else: + # delete and wait until is really deleted + engine.test("VIMSDN6", "Delete VIM remove port-mapping", "DELETE", "/admin/v1/vim_accounts/", + headers_json, None, (202, 201, 204), None, 0) + engine.test("VIMSDN7", "Delete SDN", "DELETE", "/admin/v1/sdns/", headers_json, None, + (202, 201, 204), None, 0) + wait = timeout + while wait >= 0: + r = engine.test("VIMSDN8", "Check VIM is deleted", "GET", "/admin/v1/vim_accounts/", + headers_yaml, None, None, r_header_yaml, "yaml") + if r.status_code == 404: + break + elif r.status_code == 200: + wait -= 5 + sleep(5) + else: + raise TestException("Vim created at 'VIMSDN3' is not delete after {} seconds".format(timeout)) + while wait >= 0: + r = engine.test("VIMSDN9", "Check SDNC is deleted", "GET", "/admin/v1/sdns/", + headers_yaml, None, None, r_header_yaml, "yaml") + if r.status_code == 404: + break + elif r.status_code == 200: + wait -= 5 + sleep(5) + else: + raise TestException("SDNC created at 'VIMSDN1' is not delete after {} seconds".format(timeout)) + + +class TestDeploy: + description = "Base class for downloading descriptors from ETSI, onboard and deploy in real VIM" + + def __init__(self): + self.step = 0 + self.nsd_id = None + self.vim_id = None + self.nsd_test = None + self.ns_test = None + self.vnfds_test = [] + self.descriptor_url = "https://osm-download.etsi.org/ftp/osm-3.0-three/2nd-hackfest/packages/" + self.vnfd_filenames = ("cirros_vnf.tar.gz",) + self.nsd_filename = "cirros_2vnf_ns.tar.gz" + self.uses_configuration = False + + def create_descriptors(self, engine): + temp_dir = os.path.dirname(__file__) + "/temp/" + if not os.path.exists(temp_dir): + os.makedirs(temp_dir) + for vnfd_filename in self.vnfd_filenames: + if "/" in vnfd_filename: + vnfd_filename_path = vnfd_filename + if not os.path.exists(vnfd_filename_path): + raise TestException("File '{}' does not exist".format(vnfd_filename_path)) + else: + vnfd_filename_path = temp_dir + vnfd_filename + if not os.path.exists(vnfd_filename_path): + with open(vnfd_filename_path, "wb") as file: + response = requests.get(self.descriptor_url + vnfd_filename) + if response.status_code >= 300: + raise TestException("Error downloading descriptor from '{}': {}".format( + self.descriptor_url + vnfd_filename, response.status_code)) + file.write(response.content) + if vnfd_filename_path.endswith(".yaml"): + headers = headers_yaml + else: + headers = headers_zip_yaml + if self.step % 2 == 0: + # vnfd CREATE AND UPLOAD in one step: + engine.test("DEPLOY{}".format(self.step), "Onboard VNFD in one step", "POST", + "/vnfpkgm/v1/vnf_packages_content", headers, "@b" + vnfd_filename_path, 201, + {"Location": "/vnfpkgm/v1/vnf_packages_content/", "Content-Type": "application/yaml"}, yaml) + self.vnfds_test.append("DEPLOY" + str(self.step)) + self.step += 1 + else: + # vnfd CREATE AND UPLOAD ZIP + engine.test("DEPLOY{}".format(self.step), "Onboard VNFD step 1", "POST", "/vnfpkgm/v1/vnf_packages", + headers_json, None, 201, + {"Location": "/vnfpkgm/v1/vnf_packages/", "Content-Type": "application/json"}, "json") + self.vnfds_test.append("DEPLOY" + str(self.step)) + self.step += 1 + # location = r.headers["Location"] + # vnfd_id = location[location.rfind("/")+1:] + engine.test("DEPLOY{}".format(self.step), "Onboard VNFD step 2 as ZIP", "PUT", + "/vnfpkgm/v1/vnf_packages/<>/package_content", + headers, "@b" + vnfd_filename_path, 204, None, 0) + self.step += 2 + + if "/" in self.nsd_filename: + nsd_filename_path = self.nsd_filename + if not os.path.exists(nsd_filename_path): + raise TestException("File '{}' does not exist".format(nsd_filename_path)) + else: + nsd_filename_path = temp_dir + self.nsd_filename + if not os.path.exists(nsd_filename_path): + with open(nsd_filename_path, "wb") as file: + response = requests.get(self.descriptor_url + self.nsd_filename) + if response.status_code >= 300: + raise TestException("Error downloading descriptor from '{}': {}".format( + self.descriptor_url + self.nsd_filename, response.status_code)) + file.write(response.content) + if nsd_filename_path.endswith(".yaml"): + headers = headers_yaml + else: + headers = headers_zip_yaml + + self.nsd_test = "DEPLOY" + str(self.step) + if self.step % 2 == 0: + # nsd CREATE AND UPLOAD in one step: + engine.test("DEPLOY{}".format(self.step), "Onboard NSD in one step", "POST", + "/nsd/v1/ns_descriptors_content", headers, "@b" + nsd_filename_path, 201, + {"Location": "/nsd/v1/ns_descriptors_content/", "Content-Type": "application/yaml"}, yaml) + self.step += 1 + else: + # nsd CREATE AND UPLOAD ZIP + engine.test("DEPLOY{}".format(self.step), "Onboard NSD step 1", "POST", "/nsd/v1/ns_descriptors", + headers_json, None, 201, + {"Location": "/nsd/v1/ns_descriptors/", "Content-Type": "application/json"}, "json") + self.step += 1 + # location = r.headers["Location"] + # vnfd_id = location[location.rfind("/")+1:] + engine.test("DEPLOY{}".format(self.step), "Onboard NSD step 2 as ZIP", "PUT", + "/nsd/v1/ns_descriptors/<>/nsd_content", + headers, "@b" + nsd_filename_path, 204, None, 0) + self.step += 2 + self.nsd_id = engine.test_ids[self.nsd_test] + + def delete_descriptors(self, engine): + # delete descriptors + engine.test("DEPLOY{}".format(self.step), "Delete NSSD SOL005", "DELETE", + "/nsd/v1/ns_descriptors/<{}>".format(self.nsd_test), + headers_yaml, None, 204, None, 0) + self.step += 1 + for vnfd_test in self.vnfds_test: + engine.test("DEPLOY{}".format(self.step), "Delete VNFD SOL005", "DELETE", + "/vnfpkgm/v1/vnf_packages/<{}>".format(vnfd_test), headers_yaml, None, 204, None, 0) + self.step += 1 + + def instantiate(self, engine, ns_data): + ns_data_text = yaml.safe_dump(ns_data, default_flow_style=True, width=256) + # create NS Two steps + r = engine.test("DEPLOY{}".format(self.step), "Create NS step 1", "POST", "/nslcm/v1/ns_instances", + headers_yaml, ns_data_text, 201, + {"Location": "nslcm/v1/ns_instances/", "Content-Type": "application/yaml"}, "yaml") + self.ns_test = "DEPLOY{}".format(self.step) + engine.test_ids[self.ns_test] + self.step += 1 + r = engine.test("DEPLOY{}".format(self.step), "Instantiate NS step 2", "POST", + "/nslcm/v1/ns_instances/<{}>/instantiate".format(self.ns_test), headers_yaml, ns_data_text, + 201, {"Location": "nslcm/v1/ns_lcm_op_occs/", "Content-Type": "application/yaml"}, "yaml") + nslcmop_test = "DEPLOY{}".format(self.step) + self.step += 1 + + if test_osm: + # Wait until status is Ok + wait = timeout_configure if self.uses_configuration else timeout_deploy + while wait >= 0: + r = engine.test("DEPLOY{}".format(self.step), "Wait until NS is deployed and configured", "GET", + "/nslcm/v1/ns_lcm_op_occs/<{}>".format(nslcmop_test), headers_json, None, + 200, r_header_json, "json") + nslcmop = r.json() + if "COMPLETED" in nslcmop["operationState"]: + break + elif "FAILED" in nslcmop["operationState"]: + raise TestException("NS instantiate has failed: {}".format(nslcmop["detailed-status"])) + wait -= 5 + sleep(5) + else: + raise TestException("NS instantiate is not done after {} seconds".format(timeout_deploy)) + self.step += 1 + + def _wait_nslcmop_ready(self, engine, nslcmop_test, timeout_deploy): + wait = timeout + while wait >= 0: + r = engine.test("DEPLOY{}".format(self.step), "Wait to ns lcm operation complete", "GET", + "/nslcm/v1/ns_lcm_op_occs/<{}>".format(nslcmop_test), headers_json, None, + 200, r_header_json, "json") + nslcmop = r.json() + if "COMPLETED" in nslcmop["operationState"]: + break + elif "FAILED" in nslcmop["operationState"]: + raise TestException("NS terminate has failed: {}".format(nslcmop["detailed-status"])) + wait -= 5 + sleep(5) + else: + raise TestException("NS instantiate is not terminate after {} seconds".format(timeout)) + + def terminate(self, engine): + # remove deployment + if test_osm: + r = engine.test("DEPLOY{}".format(self.step), "Terminate NS", "POST", + "/nslcm/v1/ns_instances/<{}>/terminate".format(self.ns_test), headers_yaml, None, + 201, {"Location": "nslcm/v1/ns_lcm_op_occs/", "Content-Type": "application/yaml"}, "yaml") + nslcmop2_test = "DEPLOY{}".format(self.step) + self.step += 1 + # Wait until status is Ok + self._wait_nslcmop_ready(engine, nslcmop2_test, timeout_deploy) + + r = engine.test("DEPLOY{}".format(self.step), "Delete NS", "DELETE", + "/nslcm/v1/ns_instances/<{}>".format(self.ns_test), headers_yaml, None, + 204, None, 0) + self.step += 1 + else: + r = engine.test("DEPLOY{}".format(self.step), "Delete NS with FORCE", "DELETE", + "/nslcm/v1/ns_instances/<{}>?FORCE=True".format(self.ns_test), headers_yaml, None, + 204, None, 0) + self.step += 1 + + # check all it is deleted + r = engine.test("DEPLOY{}".format(self.step), "Check NS is deleted", "GET", + "/nslcm/v1/ns_instances/<{}>".format(self.ns_test), headers_yaml, None, + 404, None, "yaml") + self.step += 1 + r = engine.test("DEPLOY{}".format(self.step), "Check NSLCMOPs are deleted", "GET", + "/nslcm/v1/ns_lcm_op_occs?nsInstanceId=<{}>".format(self.ns_test), headers_json, None, + 200, None, "json") + nslcmops = r.json() + if not isinstance(nslcmops, list) or nslcmops: + raise TestException("NS {} deleted but with ns_lcm_op_occ active: {}".format(self.ns_test, nslcmops)) + + def test_ns(self, engine, test_osm): + pass + + def aditional_operations(self, engine, test_osm, manual_check): + pass + + def run(self, engine, test_osm, manual_check, test_params=None): + engine.get_autorization() + nsname = os.environ.get("OSMNBITEST_NS_NAME", "OSMNBITEST") + if test_params: + if "vnfd-files" in test_params: + self.vnfd_filenames = test_params["vnfd-files"].split(",") + if "nsd-file" in test_params: + self.nsd_filename = test_params["nsd-file"] + if test_params.get("ns-name"): + nsname = test_params["ns-name"] + self.create_descriptors(engine) + + # create real VIM if not exist + self.vim_id = engine.get_create_vim(test_osm) + ns_data = {"nsDescription": "default description", "nsName": nsname, "nsdId": self.nsd_id, + "vimAccountId": self.vim_id} + if test_params and test_params.get("ns-config"): + if isinstance(test_params["ns-config"], str): + ns_data.update(yaml.load(test_params["ns-config"])) + else: + ns_data.update(test_params["ns-config"]) + self.instantiate(engine, ns_data) + + if manual_check: + input('NS has been deployed. Perform manual check and press enter to resume') + else: + self.test_ns(engine, test_osm) + self.aditional_operations(engine, test_osm, manual_check) + self.terminate(engine) + self.delete_descriptors(engine) + + +class TestDeployHackfestCirros(TestDeploy): + description = "Load and deploy Hackfest cirros_2vnf_ns example" + + def __init__(self): + super().__init__() + self.vnfd_filenames = ("cirros_vnf.tar.gz",) + self.nsd_filename = "cirros_2vnf_ns.tar.gz" + + def run(self, engine, test_osm, manual_check, test_params=None): + super().run(engine, test_osm, manual_check, test_params) + + +class TestDeployIpMac(TestDeploy): + description = "Load and deploy descriptor examples setting mac, ip address at descriptor and instantiate params" + + def __init__(self): + super().__init__() + self.vnfd_filenames = ("vnfd_2vdu_set_ip_mac2.yaml", "vnfd_2vdu_set_ip_mac.yaml") + self.nsd_filename = "scenario_2vdu_set_ip_mac.yaml" + self.descriptor_url = \ + "https://osm.etsi.org/gitweb/?p=osm/RO.git;a=blob_plain;f=test/RO_tests/v3_2vdu_set_ip_mac/" + + def run(self, engine, test_osm, manual_check, test_params=None): + # super().run(engine, test_osm, manual_check, test_params) + # run again setting IPs with instantiate parameters + instantiation_params = { + "vnf": [ + { + "member-vnf-index": "1", + "internal-vld": [ + { + "name": "internal_vld1", # net_internal + "ip-profile": { + "ip-version": "ipv4", + "subnet-address": "10.9.8.0/24", + "dhcp-params": {"count": 100, "start-address": "10.9.8.100"} + }, + "internal-connection-point": [ + { + "id-ref": "eth2", + "ip-address": "10.9.8.2", + }, + { + "id-ref": "eth3", + "ip-address": "10.9.8.3", + } + ] + }, + ], + + "vdu": [ + { + "id": "VM1", + "interface": [ + # { + # "name": "iface11", + # "floating-ip-required": True, + # }, + { + "name": "iface13", + "mac-address": "52:33:44:55:66:13" + }, + ], + }, + { + "id": "VM2", + "interface": [ + { + "name": "iface21", + "ip-address": "10.31.31.21", + "mac-address": "52:33:44:55:66:21" + }, + ], + }, + ] + }, + ] + } + super().run(engine, test_osm, manual_check, test_params={"ns-config": instantiation_params}) + + +class TestDeployHackfest4(TestDeploy): + description = "Load and deploy Hackfest 4 example." + + def __init__(self): + super().__init__() + self.vnfd_filenames = ("hackfest_4_vnfd.tar.gz",) + self.nsd_filename = "hackfest_4_nsd.tar.gz" + self.uses_configuration = True + + def create_descriptors(self, engine): + super().create_descriptors(engine) + # Modify VNFD to add scaling + payload = """ + scaling-group-descriptor: + - name: "scale_dataVM" + max-instance-count: 10 + scaling-policy: + - name: "auto_cpu_util_above_threshold" + scaling-type: "automatic" + threshold-time: 0 + cooldown-time: 60 + scaling-criteria: + - name: "cpu_util_above_threshold" + scale-in-threshold: 15 + scale-in-relational-operation: "LE" + scale-out-threshold: 60 + scale-out-relational-operation: "GE" + vnf-monitoring-param-ref: "all_aaa_cpu_util" + vdu: + - vdu-id-ref: dataVM + count: 1 + scaling-config-action: + - trigger: post-scale-out + vnf-config-primitive-name-ref: touch + - trigger: pre-scale-in + vnf-config-primitive-name-ref: touch + vnf-configuration: + config-primitive: + - name: touch + parameter: + - name: filename + data-type: STRING + default-value: '/home/ubuntu/touched' + """ + engine.test("DEPLOY{}".format(self.step), "Edit VNFD ", "PATCH", + "/vnfpkgm/v1/vnf_packages/<{}>".format(self.vnfds_test[0]), headers_yaml, payload, 204, None, None) + self.step += 1 + + def run(self, engine, test_osm, manual_check, test_params=None): + super().run(engine, test_osm, manual_check, test_params) + + +class TestDeployHackfest3Charmed(TestDeploy): + description = "Load and deploy Hackfest 3charmed_ns example. Modifies it for adding scaling and performs " \ + "primitive actions and scaling" + + def __init__(self): + super().__init__() + self.vnfd_filenames = ("hackfest_3charmed_vnfd.tar.gz",) + self.nsd_filename = "hackfest_3charmed_nsd.tar.gz" + self.uses_configuration = True + + # def create_descriptors(self, engine): + # super().create_descriptors(engine) + # # Modify VNFD to add scaling + # payload = """ + # scaling-group-descriptor: + # - name: "scale_dataVM" + # max-instance-count: 10 + # scaling-policy: + # - name: "auto_cpu_util_above_threshold" + # scaling-type: "automatic" + # threshold-time: 0 + # cooldown-time: 60 + # scaling-criteria: + # - name: "cpu_util_above_threshold" + # scale-in-threshold: 15 + # scale-in-relational-operation: "LE" + # scale-out-threshold: 60 + # scale-out-relational-operation: "GE" + # vnf-monitoring-param-ref: "all_aaa_cpu_util" + # vdu: + # - vdu-id-ref: dataVM + # count: 1 + # scaling-config-action: + # - trigger: post-scale-out + # vnf-config-primitive-name-ref: touch + # - trigger: pre-scale-in + # vnf-config-primitive-name-ref: touch + # vnf-configuration: + # config-primitive: + # - name: touch + # parameter: + # - name: filename + # data-type: STRING + # default-value: '/home/ubuntu/touched' + # """ + # engine.test("DEPLOY{}".format(self.step), "Edit VNFD ", "PATCH", + # "/vnfpkgm/v1/vnf_packages/<{}>".format(self.vnfds_test[0]), + # headers_yaml, payload, 200, + # r_header_yaml, yaml) + # self.vnfds_test.append("DEPLOY" + str(self.step)) + # self.step += 1 + + def aditional_operations(self, engine, test_osm, manual_check): + if not test_osm: + return + # 1 perform action + payload = '{member_vnf_index: "2", primitive: touch, primitive_params: { filename: /home/ubuntu/OSMTESTNBI }}' + engine.test("DEPLOY{}".format(self.step), "Executer service primitive over NS", "POST", + "/nslcm/v1/ns_instances/<{}>/action".format(self.ns_test), headers_yaml, payload, + 201, {"Location": "nslcm/v1/ns_lcm_op_occs/", "Content-Type": "application/yaml"}, "yaml") + nslcmop2_action = "DEPLOY{}".format(self.step) + self.step += 1 + # Wait until status is Ok + self._wait_nslcmop_ready(engine, nslcmop2_action, timeout_deploy) + if manual_check: + input('NS service primitive has been executed. Check that file /home/ubuntu/OSMTESTNBI is present at ' + 'TODO_PUT_IP') + # TODO check automatic + + # # 2 perform scale out + # payload = '{scaleType: SCALE_VNF, scaleVnfData: {scaleVnfType: SCALE_OUT, scaleByStepData: ' \ + # '{scaling-group-descriptor: scale_dataVM, member-vnf-index: "1"}}}' + # engine.test("DEPLOY{}".format(self.step), "Execute scale action over NS", "POST", + # "/nslcm/v1/ns_instances/<{}>/scale".format(self.ns_test), headers_yaml, payload, + # 201, {"Location": "nslcm/v1/ns_lcm_op_occs/", "Content-Type": "application/yaml"}, "yaml") + # nslcmop2_scale_out = "DEPLOY{}".format(self.step) + # self._wait_nslcmop_ready(engine, nslcmop2_scale_out, timeout_deploy) + # if manual_check: + # input('NS scale out done. Check that file /home/ubuntu/touched is present and new VM is created') + # # TODO check automatic + # + # # 2 perform scale in + # payload = '{scaleType: SCALE_VNF, scaleVnfData: {scaleVnfType: SCALE_IN, scaleByStepData: ' \ + # '{scaling-group-descriptor: scale_dataVM, member-vnf-index: "1"}}}' + # engine.test("DEPLOY{}".format(self.step), "Execute scale action over NS", "POST", + # "/nslcm/v1/ns_instances/<{}>/scale".format(self.ns_test), headers_yaml, payload, + # 201, {"Location": "nslcm/v1/ns_lcm_op_occs/", "Content-Type": "application/yaml"}, "yaml") + # nslcmop2_scale_in = "DEPLOY{}".format(self.step) + # self._wait_nslcmop_ready(engine, nslcmop2_scale_in, timeout_deploy) + # if manual_check: + # input('NS scale in done. Check that file /home/ubuntu/touched is updated and new VM is deleted') + # # TODO check automatic + + def run(self, engine, test_osm, manual_check, test_params=None): + super().run(engine, test_osm, manual_check, test_params) + if __name__ == "__main__": global logger @@ -234,16 +1032,38 @@ if __name__ == "__main__": logger = logging.getLogger('NBI') # load parameters and configuration opts, args = getopt.getopt(sys.argv[1:], "hvu:p:", - ["url=", "user=", "password=", "help", "version", "verbose", "project=", "insecure"]) + ["url=", "user=", "password=", "help", "version", "verbose", "no-verbose", + "project=", "insecure", "timeout", "timeout-deploy", "timeout-configure", + "test=", "list", "test-osm", "manual-check", "params="]) url = "https://localhost:9999/osm" user = password = project = "admin" + test_osm = False + manual_check = False verbose = 0 verify = True + test_classes = { + "NonAuthorized": TestNonAuthorized, + "FakeVIM": TestFakeVim, + "TestUsersProjects": TestUsersProjects, + "VIM-SDN": TestVIMSDN, + "Deploy-Custom": TestDeploy, + "Deploy-Hackfest-Cirros": TestDeployHackfestCirros, + "Deploy-Hackfest-3Charmed": TestDeployHackfest3Charmed, + "Deploy-Hackfest-4": TestDeployHackfest4, + "Deploy-CirrosMacIp": TestDeployIpMac, + } + test_to_do = [] + test_params = {} for o, a in opts: + # print("parameter:", o, a) if o == "--version": print("test version " + __version__ + ' ' + version_date) - sys.exit() + exit() + elif o == "--list": + for test, test_class in test_classes.items(): + print("{:20} {}".format(test + ":", test_class.description)) + exit() elif o in ("-v", "--verbose"): verbose += 1 elif o == "no-verbose": @@ -251,6 +1071,10 @@ if __name__ == "__main__": elif o in ("-h", "--help"): usage() sys.exit() + elif o == "--test-osm": + test_osm = True + elif o == "--manual-check": + manual_check = True elif o == "--url": url = a elif o in ("-u", "--user"): @@ -259,8 +1083,28 @@ if __name__ == "__main__": password = a elif o == "--project": project = a + elif o == "--test": + # print("asdfadf", o, a, a.split(",")) + for _test in a.split(","): + if _test not in test_classes: + print("Invalid test name '{}'. Use option '--list' to show available tests".format(_test), + file=sys.stderr) + exit(1) + test_to_do.append(_test) + elif o == "--params": + param_key, _, param_value = a.partition("=") + text_index = len(test_to_do) + if text_index not in test_params: + test_params[text_index] = {} + test_params[text_index][param_key] = param_value elif o == "--insecure": verify = False + elif o == "--timeout": + timeout = int(a) + elif o == "--timeout-deploy": + timeout_deploy = int(a) + elif o == "--timeout-configure": + timeout_configure = int(a) else: assert False, "Unhandled option" if verbose == 0: @@ -270,155 +1114,158 @@ if __name__ == "__main__": else: logger.setLevel(logging.ERROR) - test_rest = TestRest(url) - - # tests without authorization - for t in test_not_authorized_list: - test_rest.test(*t) + test_rest = TestRest(url, user=user, password=password, project=project) + # print("tests to do:", test_to_do) + if test_to_do: + text_index = 0 + for test in test_to_do: + text_index += 1 + test_class = test_classes[test] + test_class().run(test_rest, test_osm, manual_check, test_params.get(text_index)) + else: + for test, test_class in test_classes.items(): + test_class().run(test_rest, test_osm, manual_check, test_params.get(0)) + exit(0) # get token - r = test_rest.test("token1", "Obtain token", "POST", "/admin/v1/tokens", headers_json, - {"username": user, "password": password, "project_id": project}, - (200, 201), {"Content-Type": "application/json"}, "json") - response = r.json() - token = response["id"] - test_rest.set_header({"Authorization": "Bearer {}".format(token)}) - - # tests once authorized - for t in test_authorized_list: - test_rest.test(*t) - - # tests admin - for t in test_admin_list1: - test_rest.test(*t) - - # vnfd CREATE - r = test_rest.test("VNFD1", "Onboard VNFD step 1", "POST", "/vnfpkgm/v1/vnf_packages", headers_json, None, - 201, {"Location": "/vnfpkgm/v1/vnf_packages/", "Content-Type": "application/json"}, "json") - location = r.headers["Location"] - vnfd_id = location[location.rfind("/")+1:] - # print(location, vnfd_id) - - # vnfd UPLOAD test - r = test_rest.test("VNFD2", "Onboard VNFD step 2 as TEXT", "PUT", - "/vnfpkgm/v1/vnf_packages/{}/package_content".format(vnfd_id), - r_header_text, "@./cirros_vnf/cirros_vnfd.yaml", 204, None, 0) - - # vnfd SHOW OSM format - r = test_rest.test("VNFD3", "Show VNFD OSM format", "GET", - "/vnfpkgm/v1/vnf_packages_content/{}".format(vnfd_id), - headers_json, None, 200, r_header_json, "json") - - # vnfd SHOW text - r = test_rest.test("VNFD4", "Show VNFD SOL005 text", "GET", - "/vnfpkgm/v1/vnf_packages/{}/package_content".format(vnfd_id), - headers_text, None, 200, r_header_text, "text") - - # vnfd UPLOAD ZIP - makedirs("temp", exist_ok=True) - tar = tarfile.open("temp/cirros_vnf.tar.gz", "w:gz") - tar.add("cirros_vnf") - tar.close() - r = test_rest.test("VNFD5", "Onboard VNFD step 3 replace with ZIP", "PUT", - "/vnfpkgm/v1/vnf_packages/{}/package_content".format(vnfd_id), - r_header_zip, "@b./temp/cirros_vnf.tar.gz", 204, None, 0) - - # vnfd SHOW OSM format - r = test_rest.test("VNFD6", "Show VNFD OSM format", "GET", - "/vnfpkgm/v1/vnf_packages_content/{}".format(vnfd_id), - headers_json, None, 200, r_header_json, "json") - - # vnfd SHOW zip - r = test_rest.test("VNFD7", "Show VNFD SOL005 zip", "GET", - "/vnfpkgm/v1/vnf_packages/{}/package_content".format(vnfd_id), - headers_zip, None, 200, r_header_zip, "zip") - # vnfd SHOW descriptor - r = test_rest.test("VNFD8", "Show VNFD descriptor", "GET", - "/vnfpkgm/v1/vnf_packages/{}/vnfd".format(vnfd_id), - headers_text, None, 200, r_header_text, "text") - # vnfd SHOW actifact - r = test_rest.test("VNFD9", "Show VNFD artifact", "GET", - "/vnfpkgm/v1/vnf_packages/{}/artifacts/icons/cirros-64.png".format(vnfd_id), - headers_text, None, 200, r_header_octect, "text") - - # # vnfd DELETE - # r = test_rest.test("VNFD10", "Delete VNFD SOL005 text", "DELETE", - # "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_id), - # headers_yaml, None, 204, None, 0) - - # nsd CREATE - r = test_rest.test("NSD1", "Onboard NSD step 1", "POST", "/nsd/v1/ns_descriptors", headers_json, None, - 201, {"Location": "/nsd/v1/ns_descriptors/", "Content-Type": "application/json"}, "json") - location = r.headers["Location"] - nsd_id = location[location.rfind("/")+1:] - # print(location, nsd_id) - # nsd UPLOAD test - r = test_rest.test("NSD2", "Onboard NSD with missing vnfd", "PUT", - "/nsd/v1/ns_descriptors/{}/nsd_content?constituent-vnfd.0.vnfd-id-ref" - "=NONEXISTING-VNFD".format(nsd_id), - r_header_text, "@./cirros_ns/cirros_nsd.yaml", 409, r_header_yaml, "yaml") - - # # VNF_CREATE + # # tests once authorized + # for t in test_authorized_list: + # test_rest.test(*t) + # + # # tests admin + # for t in test_admin_list1: + # test_rest.test(*t) + # + # # vnfd CREATE + # r = test_rest.test("VNFD1", "Onboard VNFD step 1", "POST", "/vnfpkgm/v1/vnf_packages", headers_json, None, + # 201, {"Location": "/vnfpkgm/v1/vnf_packages/", "Content-Type": "application/json"}, "json") + # location = r.headers["Location"] + # vnfd_id = location[location.rfind("/")+1:] + # # print(location, vnfd_id) + # + # # vnfd UPLOAD test + # r = test_rest.test("VNFD2", "Onboard VNFD step 2 as TEXT", "PUT", + # "/vnfpkgm/v1/vnf_packages/{}/package_content".format(vnfd_id), + # r_header_text, "@./cirros_vnf/cirros_vnfd.yaml", 204, None, 0) + # + # # vnfd SHOW OSM format + # r = test_rest.test("VNFD3", "Show VNFD OSM format", "GET", + # "/vnfpkgm/v1/vnf_packages_content/{}".format(vnfd_id), + # headers_json, None, 200, r_header_json, "json") + # + # # vnfd SHOW text + # r = test_rest.test("VNFD4", "Show VNFD SOL005 text", "GET", + # "/vnfpkgm/v1/vnf_packages/{}/package_content".format(vnfd_id), + # headers_text, None, 200, r_header_text, "text") + # + # # vnfd UPLOAD ZIP + # makedirs("temp", exist_ok=True) + # tar = tarfile.open("temp/cirros_vnf.tar.gz", "w:gz") + # tar.add("cirros_vnf") + # tar.close() # r = test_rest.test("VNFD5", "Onboard VNFD step 3 replace with ZIP", "PUT", - # "/vnfpkgm/v1/vnf_packages/{}/package_content".format(vnfd_id), + # "/vnfpkgm/v1/vnf_packages/{}/package_content".format(vnfd_id), # r_header_zip, "@b./temp/cirros_vnf.tar.gz", 204, None, 0) - - r = test_rest.test("NSD2", "Onboard NSD step 2 as TEXT", "PUT", - "/nsd/v1/ns_descriptors/{}/nsd_content".format(nsd_id), - r_header_text, "@./cirros_ns/cirros_nsd.yaml", 204, None, 0) - - # nsd SHOW OSM format - r = test_rest.test("NSD3", "Show NSD OSM format", "GET", "/nsd/v1/ns_descriptors_content/{}".format(nsd_id), - headers_json, None, 200, r_header_json, "json") - - # nsd SHOW text - r = test_rest.test("NSD4", "Show NSD SOL005 text", "GET", - "/nsd/v1/ns_descriptors/{}/nsd_content".format(nsd_id), - headers_text, None, 200, r_header_text, "text") - - # nsd UPLOAD ZIP - makedirs("temp", exist_ok=True) - tar = tarfile.open("temp/cirros_ns.tar.gz", "w:gz") - tar.add("cirros_ns") - tar.close() - r = test_rest.test("NSD5", "Onboard NSD step 3 replace with ZIP", "PUT", - "/nsd/v1/ns_descriptors/{}/nsd_content".format(nsd_id), - r_header_zip, "@b./temp/cirros_ns.tar.gz", 204, None, 0) - - # nsd SHOW OSM format - r = test_rest.test("NSD6", "Show NSD OSM format", "GET", "/nsd/v1/ns_descriptors_content/{}".format(nsd_id), - headers_json, None, 200, r_header_json, "json") - - # nsd SHOW zip - r = test_rest.test("NSD7", "Show NSD SOL005 zip", "GET", "/nsd/v1/ns_descriptors/{}/nsd_content".format(nsd_id), - headers_zip, None, 200, r_header_zip, "zip") - - # nsd SHOW descriptor - r = test_rest.test("NSD8", "Show NSD descriptor", "GET", "/nsd/v1/ns_descriptors/{}/nsd".format(nsd_id), - headers_text, None, 200, r_header_text, "text") - # nsd SHOW actifact - r = test_rest.test("NSD9", "Show NSD artifact", "GET", - "/nsd/v1/ns_descriptors/{}/artifacts/icons/osm_2x.png".format(nsd_id), - headers_text, None, 200, r_header_octect, "text") - - # vnfd DELETE - r = test_rest.test("VNFD10", "Delete VNFD conflict", "DELETE", "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_id), - headers_yaml, None, 409, r_header_yaml, "yaml") - - # nsd DELETE - r = test_rest.test("NSD10", "Delete NSD SOL005 text", "DELETE", "/nsd/v1/ns_descriptors/{}".format(nsd_id), - headers_yaml, None, 204, None, 0) - - # vnfd DELETE - r = test_rest.test("VNFD10", "Delete VNFD SOL005 text", "DELETE", "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_id), - headers_yaml, None, 204, None, 0) - + # + # # vnfd SHOW OSM format + # r = test_rest.test("VNFD6", "Show VNFD OSM format", "GET", + # "/vnfpkgm/v1/vnf_packages_content/{}".format(vnfd_id), + # headers_json, None, 200, r_header_json, "json") + # + # # vnfd SHOW zip + # r = test_rest.test("VNFD7", "Show VNFD SOL005 zip", "GET", + # "/vnfpkgm/v1/vnf_packages/{}/package_content".format(vnfd_id), + # headers_zip, None, 200, r_header_zip, "zip") + # # vnfd SHOW descriptor + # r = test_rest.test("VNFD8", "Show VNFD descriptor", "GET", + # "/vnfpkgm/v1/vnf_packages/{}/vnfd".format(vnfd_id), + # headers_text, None, 200, r_header_text, "text") + # # vnfd SHOW actifact + # r = test_rest.test("VNFD9", "Show VNFD artifact", "GET", + # "/vnfpkgm/v1/vnf_packages/{}/artifacts/icons/cirros-64.png".format(vnfd_id), + # headers_text, None, 200, r_header_octect, "text") + # + # # # vnfd DELETE + # # r = test_rest.test("VNFD10", "Delete VNFD SOL005 text", "DELETE", + # # "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_id), + # # headers_yaml, None, 204, None, 0) + # + # # nsd CREATE + # r = test_rest.test("NSD1", "Onboard NSD step 1", "POST", "/nsd/v1/ns_descriptors", headers_json, None, + # 201, {"Location": "/nsd/v1/ns_descriptors/", "Content-Type": "application/json"}, "json") + # location = r.headers["Location"] + # nsd_id = location[location.rfind("/")+1:] + # # print(location, nsd_id) + # + # # nsd UPLOAD test + # r = test_rest.test("NSD2", "Onboard NSD with missing vnfd", "PUT", + # "/nsd/v1/ns_descriptors/<>/nsd_content?constituent-vnfd.0.vnfd-id-ref" + # "=NONEXISTING-VNFD".format(nsd_id), + # r_header_text, "@./cirros_ns/cirros_nsd.yaml", 409, r_header_yaml, "yaml") + # + # # # VNF_CREATE + # # r = test_rest.test("VNFD5", "Onboard VNFD step 3 replace with ZIP", "PUT", + # # "/vnfpkgm/v1/vnf_packages/{}/package_content".format(vnfd_id), + # # r_header_zip, "@b./temp/cirros_vnf.tar.gz", 204, None, 0) + # + # r = test_rest.test("NSD2", "Onboard NSD step 2 as TEXT", "PUT", + # "/nsd/v1/ns_descriptors/{}/nsd_content".format(nsd_id), + # r_header_text, "@./cirros_ns/cirros_nsd.yaml", 204, None, 0) + # + # # nsd SHOW OSM format + # r = test_rest.test("NSD3", "Show NSD OSM format", "GET", "/nsd/v1/ns_descriptors_content/{}".format(nsd_id), + # headers_json, None, 200, r_header_json, "json") + # + # # nsd SHOW text + # r = test_rest.test("NSD4", "Show NSD SOL005 text", "GET", + # "/nsd/v1/ns_descriptors/{}/nsd_content".format(nsd_id), + # headers_text, None, 200, r_header_text, "text") + # + # # nsd UPLOAD ZIP + # makedirs("temp", exist_ok=True) + # tar = tarfile.open("temp/cirros_ns.tar.gz", "w:gz") + # tar.add("cirros_ns") + # tar.close() + # r = test_rest.test("NSD5", "Onboard NSD step 3 replace with ZIP", "PUT", + # "/nsd/v1/ns_descriptors/{}/nsd_content".format(nsd_id), + # r_header_zip, "@b./temp/cirros_ns.tar.gz", 204, None, 0) + # + # # nsd SHOW OSM format + # r = test_rest.test("NSD6", "Show NSD OSM format", "GET", "/nsd/v1/ns_descriptors_content/{}".format(nsd_id), + # headers_json, None, 200, r_header_json, "json") + # + # # nsd SHOW zip + # r = test_rest.test("NSD7","Show NSD SOL005 zip","GET", "/nsd/v1/ns_descriptors/{}/nsd_content".format(nsd_id), + # headers_zip, None, 200, r_header_zip, "zip") + # + # # nsd SHOW descriptor + # r = test_rest.test("NSD8", "Show NSD descriptor", "GET", "/nsd/v1/ns_descriptors/{}/nsd".format(nsd_id), + # headers_text, None, 200, r_header_text, "text") + # # nsd SHOW actifact + # r = test_rest.test("NSD9", "Show NSD artifact", "GET", + # "/nsd/v1/ns_descriptors/{}/artifacts/icons/osm_2x.png".format(nsd_id), + # headers_text, None, 200, r_header_octect, "text") + # + # # vnfd DELETE + # r = test_rest.test("VNFD10", "Delete VNFD conflict", "DELETE", "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_id), + # headers_yaml, None, 409, r_header_yaml, "yaml") + # + # # nsd DELETE + # r = test_rest.test("NSD10", "Delete NSD SOL005 text", "DELETE", "/nsd/v1/ns_descriptors/{}".format(nsd_id), + # headers_yaml, None, 204, None, 0) + # + # # vnfd DELETE + # r = test_rest.test("VNFD10","Delete VNFD SOL005 text","DELETE", "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_id), + # headers_yaml, None, 204, None, 0) print("PASS") + except TestException as e: + logger.error(test + "Test {} Exception: {}".format(test, str(e))) + exit(1) + except getopt.GetoptError as e: + logger.error(e) + print(e, file=sys.stderr) + exit(1) except Exception as e: - if test: - logger.error(test + " Exception: " + str(e)) - exit(1) - else: - logger.critical(test + " Exception: " + str(e), exc_info=True) + logger.critical(test + " Exception: " + str(e), exc_info=True) diff --git a/osm_nbi/validation.py b/osm_nbi/validation.py index 308d872..4ffd0b7 100644 --- a/osm_nbi/validation.py +++ b/osm_nbi/validation.py @@ -12,16 +12,18 @@ Validator of input data using JSON schemas for those items that not contains an # Basis schemas patern_name = "^[ -~]+$" +nameshort_schema = {"type": "string", "minLength": 1, "maxLength": 60, "pattern": "^[^,;()\.\$'\"]+$"} passwd_schema = {"type": "string", "minLength": 1, "maxLength": 60} -nameshort_schema = {"type": "string", "minLength": 1, "maxLength": 60, "pattern": "^[^,;()'\"]+$"} name_schema = {"type": "string", "minLength": 1, "maxLength": 255, "pattern": "^[^,;()'\"]+$"} string_schema = {"type": "string", "minLength": 1, "maxLength": 255} xml_text_schema = {"type": "string", "minLength": 1, "maxLength": 1000, "pattern": "^[^']+$"} description_schema = {"type": ["string", "null"], "maxLength": 255, "pattern": "^[^'\"]+$"} -id_schema_fake = {"type": "string", "minLength": 2, - "maxLength": 36} +id_schema_fake = {"type": "string", "minLength": 2, "maxLength": 36} +bool_schema = {"type": "boolean"} +null_schema = {"type": "null"} # "pattern": "^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$" id_schema = {"type": "string", "pattern": "^[a-fA-F0-9]{8}(-[a-fA-F0-9]{4}){3}-[a-fA-F0-9]{12}$"} +time_schema = {"type": "string", "pattern": "^[0-9]{4}-[0-1][0-9]-[0-3][0-9]T[0-2][0-9]([0-5]:){2}"} pci_schema = {"type": "string", "pattern": "^[0-9a-fA-F]{4}(:[0-9a-fA-F]{2}){2}\.[0-9a-fA-F]$"} http_schema = {"type": "string", "pattern": "^https?://[^'\"=]+$"} bandwidth_schema = {"type": "string", "pattern": "^[0-9]+ *([MG]bps)?$"} @@ -46,7 +48,134 @@ schema_version_2 = {"type": "integer", "minimum": 2, "maximum": 2} log_level_schema = {"type": "string", "enum": ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]} checksum_schema = {"type": "string", "pattern": "^[0-9a-fA-F]{32}$"} size_schema = {"type": "integer", "minimum": 1, "maximum": 100} +array_edition_schema = { + "type": "object", + "patternProperties": { + "^\$": "Any" + }, + "additionalProperties": False, + "minProperties": 1, +} + + +ns_instantiate_vdu = { + "title": "ns action instantiate input schema for vdu", + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "id": name_schema, + "volume": { + "type": "array", + "minItems": 1, + "items": { + "type": "object", + "properties": { + "name": name_schema, + "vim-volume-id": name_schema, + }, + "required": ["name", "vim-volume-id"], + "additionalProperties": False + } + }, + "interface": { + "type": "array", + "minItems": 1, + "items": { + "type": "object", + "properties": { + "name": name_schema, + "ip-address": ip_schema, + "mac-address": mac_schema, + "floating-ip-required": bool_schema, + }, + "required": ["name"], + "additionalProperties": False + } + } + }, + "required": ["id"], + "additionalProperties": False +} + +ip_profile_dns_schema = { + "type": "array", + "minItems": 1, + "items": { + "type": "object", + "properties": { + "address": ip_schema, + }, + "required": ["address"], + "additionalProperties": False + } +} + +ip_profile_dhcp_schema = { + "type": "object", + "properties": { + "enabled": {"type": "boolean"}, + "count": integer1_schema, + "start-address": ip_schema + }, + "additionalProperties": False, +} + +ip_profile_schema = { + "title": "ip profile validation schame", + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "ip-version": {"enum": ["ipv4", "ipv6"]}, + "subnet-address": ip_prefix_schema, + "gateway-address": ip_schema, + "dns-server": ip_profile_dns_schema, + "dhcp-params": ip_profile_dhcp_schema, + } +} + +ip_profile_update_schema = { + "title": "ip profile validation schame", + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "ip-version": {"enum": ["ipv4", "ipv6"]}, + "subnet-address": {"oneOf": [null_schema, ip_prefix_schema]}, + "gateway-address": {"oneOf": [null_schema, ip_schema]}, + "dns-server": {"oneOf": [null_schema, ip_profile_dns_schema]}, + + "dhcp-params": {"oneOf": [null_schema, ip_profile_dhcp_schema]}, + }, + "additionalProperties": False +} +ns_instantiate_internal_vld = { + "title": "ns action instantiate input schema for vdu", + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "name": name_schema, + "vim-network-name": name_schema, + "ip-profile": ip_profile_update_schema, + "internal-connection-point": { + "type": "array", + "minItems": 1, + "items": { + "type": "object", + "properties": { + "id-ref": name_schema, + "ip-address": ip_schema, + # "mac-address": mac_schema, + }, + "required": ["id-ref"], + "minProperties": 2, + "additionalProperties": False + }, + } + }, + "required": ["name"], + "minProperties": 2, + "additionalProperties": False +} ns_instantiate = { "title": "ns action instantiate input schema", @@ -54,10 +183,11 @@ ns_instantiate = { "type": "object", "properties": { "nsName": name_schema, - "nsDescription": description_schema, + "nsDescription": {"oneOf": [description_schema, {"type": "null"}]}, "nsdId": id_schema, "vimAccountId": id_schema, "ssh_keys": {"type": "string"}, + "nsr_id": id_schema, "vnf": { "type": "array", "minItems": 1, @@ -66,8 +196,20 @@ ns_instantiate = { "properties": { "member-vnf-index": name_schema, "vimAccountId": id_schema, + "vdu": { + "type": "array", + "minItems": 1, + "items": ns_instantiate_vdu, + }, + "internal-vld": { + "type": "array", + "minItems": 1, + "items": ns_instantiate_internal_vld + } }, - "required": ["member-vnf-index"] + "required": ["member-vnf-index"], + "minProperties": 2, + "additionalProperties": False } }, "vld": { @@ -79,27 +221,76 @@ ns_instantiate = { "name": string_schema, "vim-network-name": {"OneOf": [string_schema, object_schema]}, "ip-profile": object_schema, + "vnfd-connection-point-ref": { + "type": "array", + "minItems": 1, + "items": { + "type": "object", + "properties": { + "member-vnf-index-ref": name_schema, + "vnfd-connection-point-ref": name_schema, + "ip-address": ip_schema, + # "mac-address": mac_schema, + }, + "required": ["member-vnf-index-ref", "vnfd-connection-point-ref"], + "minProperties": 3, + "additionalProperties": False + }, + } }, - "required": ["name"] + "required": ["name"], + "additionalProperties": False } }, }, - "required": ["nsName", "nsdId", "vimAccountId"] + "required": ["nsName", "nsdId", "vimAccountId"], + "additionalProperties": False } ns_action = { # TODO for the moment it is only contemplated the vnfd primitive execution - "title": "ns action update input schema", + "title": "ns action input schema", "$schema": "http://json-schema.org/draft-04/schema#", "type": "object", "properties": { "member_vnf_index": name_schema, "vnf_member_index": name_schema, # TODO for backward compatibility. To remove in future + "vdu_id": name_schema, "primitive": name_schema, "primitive_params": {"type": "object"}, }, "required": ["primitive", "primitive_params"], # TODO add member_vnf_index "additionalProperties": False } +ns_scale = { # TODO for the moment it is only VDU-scaling + "title": "ns scale input schema", + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "scaleType": {"enum": ["SCALE_VNF"]}, + "scaleVnfData": { + "type": "object", + "properties": { + "vnfInstanceId": name_schema, + "scaleVnfType": {"enum": ["SCALE_OUT", 'SCALE_IN']}, + "scaleByStepData": { + "type": "object", + "properties": { + "scaling-group-descriptor": name_schema, + "member-vnf-index": name_schema, + "scaling-policy": name_schema, + }, + "required": ["scaling-group-descriptor", "member-vnf-index"], + "additionalProperties": False + }, + }, + "required": ["scaleVnfType", "scaleByStepData"], # vnfInstanceId + "additionalProperties": False + }, + "scaleTime": time_schema, + }, + "required": ["scaleType", "scaleVnfData"], + "additionalProperties": False +} schema_version = {"type": "string", "enum": ["1.0"]} @@ -118,7 +309,7 @@ vim_account_edit_schema = { "vim_tenant": name_schema, "vim_tenant_name": name_schema, "vim_username": nameshort_schema, - "vim_password": nameshort_schema, + "vim_password": passwd_schema, "config": {"type": "object"} }, "additionalProperties": False @@ -142,7 +333,7 @@ vim_account_new_schema = { # "vim_tenant": name_schema, "vim_tenant_name": name_schema, "vim_user": nameshort_schema, - "vim_password": nameshort_schema, + "vim_password": passwd_schema, "config": {"type": "object"} }, "required": ["name", "vim_url", "vim_type", "vim_user", "vim_password", "vim_tenant_name"], @@ -203,7 +394,7 @@ sdn_port_mapping_schema = { } sdn_external_port_schema = { "$schema": "http://json-schema.org/draft-04/schema#", - "title": "External port ingformation", + "title": "External port information", "type": "object", "properties": { "port": {"type": "string", "minLength": 1, "maxLength": 60}, @@ -213,15 +404,77 @@ sdn_external_port_schema = { "required": ["port"] } +# USERS +user_project_schema = { + "type": "array", + "minItems": 1, + "items": nameshort_schema, +} +user_new_schema = { + "$schema": "http://json-schema.org/draft-04/schema#", + "title": "New user schema", + "type": "object", + "properties": { + "username": nameshort_schema, + "password": passwd_schema, + "projects": user_project_schema, + }, + "required": ["username", "password", "projects"], + "additionalProperties": False +} +user_edit_schema = { + "$schema": "http://json-schema.org/draft-04/schema#", + "title": "User edit schema for administrators", + "type": "object", + "properties": { + "password": passwd_schema, + "projects": { + "oneOff": [ + user_project_schema, + array_edition_schema + ] + }, + } +} + +# PROJECTS +project_new_schema = { + "$schema": "http://json-schema.org/draft-04/schema#", + "title": "New project schema for administrators", + "type": "object", + "properties": { + "name": nameshort_schema, + "admin": bool_schema, + }, + "required": ["name"], + "additionalProperties": False +} +project_edit_schema = { + "$schema": "http://json-schema.org/draft-04/schema#", + "title": "Project edit schema for administrators", + "type": "object", + "properties": { + "admin": bool_schema, + }, + "additionalProperties": False, + "minProperties": 1 +} + +# GLOBAL SCHEMAS nbi_new_input_schemas = { + "users": user_new_schema, + "projects": project_new_schema, "vim_accounts": vim_account_new_schema, "sdns": sdn_new_schema, "ns_instantiate": ns_instantiate, "ns_action": ns_action, + "ns_scale": ns_scale } nbi_edit_input_schemas = { + "users": user_edit_schema, + "projects": project_edit_schema, "vim_accounts": vim_account_edit_schema, "sdns": sdn_edit_schema } @@ -233,7 +486,7 @@ class ValidationError(Exception): def validate_input(indata, item, new=True): """ - Validates input data agains json schema + Validates input data against json schema :param indata: user input data. Should be a dictionary :param item: can be users, projects, vims, sdns, ns_xxxxx :param new: True if the validation is for creating or False if it is for editing @@ -252,4 +505,4 @@ def validate_input(indata, item, new=True): error_pos = "at '" + ":".join(map(str, e.path)) + "'" else: error_pos = "" - raise ValidationError("Format error {} '{}' ".format(error_pos, e)) + raise ValidationError("Format error {} '{}' ".format(error_pos, e.message)) diff --git a/setup.py b/setup.py index 3cdb8ad..f4c9f0d 100644 --- a/setup.py +++ b/setup.py @@ -15,7 +15,7 @@ setup( name=_name, description='OSM North Bound Interface', long_description=README, - version_command=('git describe --tags --long --dirty', 'pep440-git'), + version_command=('git describe --match v* --tags --long --dirty', 'pep440-git'), # version=VERSION, # python_requires='>3.5.0', author='ETSI OSM',