Feature 11023 - 11026 : Advanced cluster management 73/14473/11
authorrshri <shrinithi.r@tataelxsi.co.in>
Fri, 5 Jul 2024 14:35:51 +0000 (14:35 +0000)
committerrshri <shrinithi.r@tataelxsi.co.in>
Fri, 16 Aug 2024 11:11:59 +0000 (11:11 +0000)
Change-Id: I5177211eedcb61f599f60f43c01a3fbb21faf8f8
Signed-off-by: rshri <shrinithi.r@tataelxsi.co.in>
osm_nbi/base_topic.py
osm_nbi/descriptor_topics.py
osm_nbi/engine.py
osm_nbi/instance_topics.py
osm_nbi/k8s_topics.py [new file with mode: 0644]
osm_nbi/nbi.py
osm_nbi/resources_to_operations.yml
osm_nbi/validation.py

index 5b5818c..ab60dc1 100644 (file)
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import logging
+# import logging
+import random
+import string
 from uuid import uuid4
 from http import HTTPStatus
 from time import time
 from osm_common.dbbase import deep_update_rfc7396, DbException
+from osm_common.msgbase import MsgException
+from osm_common.fsbase import FsException
 from osm_nbi.validation import validate_input, ValidationError, is_valid_uuid
 from yaml import safe_load, YAMLError
 
@@ -198,7 +202,7 @@ class BaseTopic:
         self.db = db
         self.fs = fs
         self.msg = msg
-        self.logger = logging.getLogger("nbi.engine")
+        # self.logger = logging.getLogger("nbi.base")
         self.auth = auth
 
     @staticmethod
@@ -263,6 +267,7 @@ class BaseTopic:
         :return: The same input content, or a changed version of it.
         """
         if self.schema_edit:
+            # self.logger.info("the schema edit is : {}".format(self.schema_edit))
             validate_input(input, self.schema_edit)
         return input
 
@@ -405,6 +410,31 @@ class BaseTopic:
                 content["_admin"]["projects_write"] = list(project_id)
         return None
 
+    @staticmethod
+    def format_on_operation(content, operation_type, operation_params):
+        if content["current_operation"] is None:
+            op_id = str(uuid4())
+            content["current_operation"] = op_id
+        else:
+            op_id = content["current_operation"]
+        now = time()
+        if "operationHistory" not in content:
+            content["operationHistory"] = []
+
+        operation = {}
+        operation["operationType"] = operation_type
+        operation["git_operation_info"] = None
+        operation["op_id"] = op_id
+        operation["result"] = None
+        operation["workflowState"] = "PROCESSING"
+        operation["resourceState"] = "NOT_READY"
+        operation["creationDate"] = now
+        operation["endDate"] = None
+        operation["operationParams"] = operation_params
+
+        content["operationHistory"].append(operation)
+        return op_id
+
     @staticmethod
     def format_on_edit(final_content, edit_content):
         """
@@ -618,6 +648,130 @@ class BaseTopic:
             HTTPStatus.INTERNAL_SERVER_ERROR,
         )
 
+    def create_gitname(self, content, session, _id=None):
+        if not self.multiproject:
+            _filter = {}
+        else:
+            _filter = self._get_project_filter(session)
+        _filter["name"] = content["name"]
+        if _id:
+            _filter["_id.neq"] = _id
+        if self.db.get_one(
+            self.topic, _filter, fail_on_empty=False, fail_on_more=False
+        ):
+            n = 5
+            # using random.choices()
+            # generating random strings
+            res = "".join(random.choices(string.ascii_lowercase + string.digits, k=n))
+            res1 = content["name"]
+            new_name1 = res1 + res
+            new_name = new_name1.lower()
+            return new_name
+        else:
+            return content["name"]
+
+    def new_profile(self, rollback, session, indata=None, kwargs=None, headers=None):
+        step = "name unique check"
+        try:
+            self.check_unique_name(session, indata["name"])
+
+            step = "validating input parameters"
+            profile_request = self._remove_envelop(indata)
+            self._update_input_with_kwargs(profile_request, kwargs)
+            profile_request = self._validate_input_new(
+                profile_request, session["force"]
+            )
+            operation_params = profile_request
+
+            step = "filling profile details from input data"
+            profile_create = self._create_profile(profile_request, session)
+
+            step = "creating profile at database"
+            self.format_on_new(
+                profile_create, session["project_id"], make_public=session["public"]
+            )
+            profile_create["current_operation"] = None
+            op_id = self.format_on_operation(
+                profile_create,
+                "create",
+                operation_params,
+            )
+
+            _id = self.db.create(self.topic, profile_create)
+            rollback.append({"topic": self.topic, "_id": _id})
+            self.db.set_one(self.topic, {"_id": _id}, profile_create)
+            if op_id:
+                profile_create["op_id"] = op_id
+            self._send_msg("profile_create", {"profile_id": _id, "operation_id": op_id})
+
+            return _id, None
+        except (
+            ValidationError,
+            EngineException,
+            DbException,
+            MsgException,
+            FsException,
+        ) as e:
+            raise type(e)("{} while '{}'".format(e, step), http_code=e.http_code)
+
+    def _create_profile(self, profile_request, session):
+        profile_desc = {
+            "name": profile_request["name"],
+            "description": profile_request["description"],
+            "default": False,
+            "git_name": self.create_gitname(profile_request, session),
+            "state": "IN_CREATION",
+            "operatingState": "IN_PROGRESS",
+            "resourceState": "IN_PROGRESS.REQUEST_RECEIVED",
+        }
+        return profile_desc
+
+    def default_profile(
+        self, rollback, session, indata=None, kwargs=None, headers=None
+    ):
+        step = "validating input parameters"
+        try:
+            profile_request = self._remove_envelop(indata)
+            self._update_input_with_kwargs(profile_request, kwargs)
+            operation_params = profile_request
+
+            step = "filling profile details from input data"
+            profile_create = self._create_default_profile(profile_request, session)
+
+            step = "creating profile at database"
+            self.format_on_new(
+                profile_create, session["project_id"], make_public=session["public"]
+            )
+            profile_create["current_operation"] = None
+            self.format_on_operation(
+                profile_create,
+                "create",
+                operation_params,
+            )
+            _id = self.db.create(self.topic, profile_create)
+            rollback.append({"topic": self.topic, "_id": _id})
+            return _id
+        except (
+            ValidationError,
+            EngineException,
+            DbException,
+            MsgException,
+            FsException,
+        ) as e:
+            raise type(e)("{} while '{}'".format(e, step), http_code=e.http_code)
+
+    def _create_default_profile(self, profile_request, session):
+        profile_desc = {
+            "name": profile_request["name"],
+            "description": f"{self.topic} profile for cluster {profile_request['name']}",
+            "default": True,
+            "git_name": self.create_gitname(profile_request, session),
+            "state": "IN_CREATION",
+            "operatingState": "IN_PROGRESS",
+            "resourceState": "IN_PROGRESS.REQUEST_RECEIVED",
+        }
+        return profile_desc
+
     def delete_list(self, session, filter_q=None):
         """
         Delete a several entries of a topic. This is for internal usage and test only, not exposed to NBI API
@@ -660,9 +814,30 @@ class BaseTopic:
         else:
             filter_q = self._get_project_filter(session)
         filter_q[self.id_field(self.topic, _id)] = _id
+
         item_content = self.db.get_one(self.topic, filter_q)
         nsd_id = item_content.get("_id")
 
+        if (
+            self.topic == "k8sinfra_controller"
+            or self.topic == "k8sinfra_config"
+            or self.topic == "k8sapp"
+            or self.topic == "k8sresource"
+            or self.topic == "clusters"
+        ):
+            if "state" in item_content:
+                item_content["state"] = "IN_DELETION"
+                item_content["operatingState"] = "PROCESSING"
+                self.db.set_one(self.topic, {"_id": _id}, item_content)
+
+            item_content_1 = self.db.get_one(self.topic, filter_q)
+            item_content_1["current_operation"] = None
+            op_id = self.format_on_operation(
+                item_content_1,
+                "delete",
+                None,
+            )
+
         self.check_conflict_on_del(session, _id, item_content)
 
         # While deteling ns descriptor associated ns config template should also get deleted.
@@ -683,7 +858,6 @@ class BaseTopic:
                         )
         if dry_run:
             return None
-
         if self.multiproject and session["project_id"]:
             # remove reference from project_read if there are more projects referencing it. If it last one,
             # do not remove reference, but delete
@@ -695,7 +869,6 @@ class BaseTopic:
                 ),
                 None,
             )
-
             # check if there are projects referencing it (apart from ANY, that means, public)....
             if other_projects_referencing:
                 # remove references but not delete
@@ -721,11 +894,30 @@ class BaseTopic:
                         "You have not write permission to delete it",
                         http_code=HTTPStatus.UNAUTHORIZED,
                     )
-
         # delete
-        self.db.del_one(self.topic, filter_q)
-        self.delete_extra(session, _id, item_content, not_send_msg=not_send_msg)
-        self._send_msg("deleted", {"_id": _id}, not_send_msg=not_send_msg)
+        if (
+            self.topic == "k8sinfra_controller"
+            or self.topic == "k8sinfra_config"
+            or self.topic == "k8sapp"
+            or self.topic == "k8sresource"
+        ):
+            self.db.set_one(self.topic, {"_id": _id}, item_content_1)
+            self._send_msg(
+                "delete",
+                {"profile_id": _id, "operation_id": op_id},
+                not_send_msg=not_send_msg,
+            )
+        elif self.topic == "clusters":
+            self.db.set_one("clusters", {"_id": _id}, item_content_1)
+            self._send_msg(
+                "delete",
+                {"cluster_id": _id, "operation_id": op_id},
+                not_send_msg=not_send_msg,
+            )
+        else:
+            self.db.del_one(self.topic, filter_q)
+            self.delete_extra(session, _id, item_content, not_send_msg=not_send_msg)
+            self._send_msg("deleted", {"_id": _id}, not_send_msg=not_send_msg)
         return None
 
     def edit(self, session, _id, indata=None, kwargs=None, content=None):
@@ -744,6 +936,23 @@ class BaseTopic:
         if kwargs:
             self._update_input_with_kwargs(indata, kwargs)
         try:
+            if (
+                self.topic == "k8sinfra_controller"
+                or self.topic == "k8sinfra_config"
+                or self.topic == "k8sapp"
+                or self.topic == "k8sresource"
+            ):
+                check = self.db.get_one(self.topic, {"_id": _id})
+                if check["default"] is True:
+                    raise EngineException(
+                        "Cannot edit default profiles",
+                        HTTPStatus.UNPROCESSABLE_ENTITY,
+                    )
+                if "name" in indata:
+                    if check["name"] == indata["name"]:
+                        pass
+                    else:
+                        self.check_unique_name(session, indata["name"])
             if indata and session.get("set_project"):
                 raise EngineException(
                     "Cannot edit content and set to project (query string SET_PROJECT) at same time",
@@ -767,7 +976,35 @@ class BaseTopic:
             if op_id:
                 indata["op_id"] = op_id
             indata["_id"] = _id
-            self._send_msg("edited", indata)
+            if (
+                self.topic == "k8sinfra_controller"
+                or self.topic == "k8sinfra_config"
+                or self.topic == "k8sapp"
+                or self.topic == "k8sresource"
+            ):
+                pass
+            else:
+                self._send_msg("edited", indata)
             return op_id
         except ValidationError as e:
             raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
+
+    def detach(self, session, _id, profile_type):
+        # To detach the profiles from every cluster
+        filter_q = {}
+        existing_clusters = self.db.get_list("clusters", filter_q)
+        existing_clusters_profiles = [
+            profile["_id"]
+            for profile in existing_clusters
+            if profile.get("profile_type", _id)
+        ]
+        update_dict = None
+        for profile in existing_clusters_profiles:
+            filter_q = {"_id": profile}
+            data = self.db.get_one("clusters", filter_q)
+            if profile_type in data:
+                profile_ids = data[profile_type]
+                if _id in profile_ids:
+                    profile_ids.remove(_id)
+                    update_dict = {profile_type: profile_ids}
+                    self.db.set_one("clusters", filter_q, update_dict)
index 28c923e..e8c609a 100644 (file)
@@ -2253,7 +2253,7 @@ class NsConfigTemplateTopic(DescriptorTopic):
 
         # check NS CONFIG TEMPLATE used by NS
         ns_config_template_id = _id
-        self.logger.info("The id is : {}".format(_id))
+        self.logger.info("The id is : {}".format(_id))
         if self.db.get_list(
             "nsrs", {"instantiate_params.nsConfigTemplateId": ns_config_template_id}
         ):
index fe7d8db..e419e8a 100644 (file)
@@ -53,6 +53,14 @@ from osm_nbi.instance_topics import (
     NsiTopic,
     NsiLcmOpTopic,
 )
+from osm_nbi.k8s_topics import (
+    K8sTopic,
+    InfraContTopic,
+    InfraConfTopic,
+    AppTopic,
+    ResourceTopic,
+    K8saddTopic,
+)
 from osm_nbi.vnf_instance_topics import VnfInstances, VnfLcmOpTopic
 from osm_nbi.pmjobs_topics import PmJobsTopic
 from osm_nbi.subscription_topics import NslcmSubscriptionsTopic
@@ -92,6 +100,12 @@ class Engine(object):
         "vnflcmops": VnfLcmOpTopic,
         "vnflcm_subscriptions": VnflcmSubscriptionsTopic,
         "nsconfigtemps": NsConfigTemplateTopic,
+        "k8s": K8sTopic,
+        "infras_cont": InfraContTopic,
+        "infras_conf": InfraConfTopic,
+        "apps": AppTopic,
+        "resources": ResourceTopic,
+        "k8sops": K8saddTopic,
         # [NEW_TOPIC]: add an entry here
         # "pm_jobs": PmJobsTopic will be added manually because it needs other parameters
     }
@@ -265,6 +279,26 @@ class Engine(object):
         with self.write_lock:
             return self.map_topic[topic].new(rollback, session, indata, kwargs, headers)
 
+    def add_item(
+        self, rollback, session, topic, indata=None, kwargs=None, headers=None
+    ):
+        """
+        register a cluster in the database.
+        :param rollback: list to append created items at database in case a rollback must to be done
+        :param session: contains the used login username and working project, force to avoid checkins, public
+        :param topic: it can be: cluster for adding cluster into database
+        :param indata: data to be inserted
+        :param kwargs: used to override the indata descriptor
+        :param headers: http request headers
+        :return: _id: identity of the inserted data.
+        """
+        if topic not in self.map_topic:
+            raise EngineException(
+                "Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR
+            )
+        with self.write_lock:
+            return self.map_topic[topic].add(rollback, session, indata, kwargs, headers)
+
     def upload_content(self, session, topic, _id, indata, kwargs, headers):
         """
         Upload content for an already created entry (_id)
@@ -304,7 +338,7 @@ class Engine(object):
         """
         Get complete information on an item
         :param session: contains the used login username and working project
-        :param topic: it can be: users, projects, vnfds, nsds,
+        :param topic: it can be: users, projects, vnfds, nsds, clusters,
         :param _id: server id of the item
         :param filter_q: other arguments
         :param api_req: True if this call is serving an external API request. False if serving internal request.
@@ -316,6 +350,23 @@ class Engine(object):
             )
         return self.map_topic[topic].show(session, _id, filter_q, api_req)
 
+    def get_one_item(self, session, topic, _id, profile, filter_q=None, api_req=False):
+        """
+        Get complete information on an item
+        :param session: contains the used login username and working project
+        :param topic: it can be: users, projects, vnfds, nsds, clusters profile,
+        :param _id: server id of the item
+        :param profile: contains the profile type
+        :param filter_q: other arguments
+        :param api_req: True if this call is serving an external API request. False if serving internal request.
+        :return: dictionary, raise exception if not found.
+        """
+        if topic not in self.map_topic:
+            raise EngineException(
+                "Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR
+            )
+        return self.map_topic[topic].show_one(session, _id, profile, filter_q, api_req)
+
     def get_file(self, session, topic, _id, path=None, accept_header=None):
         """
         Get descriptor package or artifact file content
@@ -365,6 +416,24 @@ class Engine(object):
         with self.write_lock:
             return self.map_topic[topic].delete(session, _id, not_send_msg=not_send_msg)
 
+    def remove(self, session, topic, _id, not_send_msg=None):
+        """
+        Delete item by its internal id
+        :param session: contains the used login username and working project
+        :param topic: it can be: users, projects, vnfds, nsds, clusters,
+        :param _id: server id of the item
+        :param not_send_msg: If False, message will not be sent to kafka.
+            If a list, message is not sent, but content is stored in this variable so that the caller can send this
+            message using its own loop. If None, message is sent
+        :return: dictionary with deleted item _id. It raises exception if not found.
+        """
+        if topic not in self.map_topic:
+            raise EngineException(
+                "Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR
+            )
+        with self.write_lock:
+            return self.map_topic[topic].remove(session, _id, not_send_msg=not_send_msg)
+
     def edit_item(self, session, topic, _id, indata=None, kwargs=None):
         """
         Update an existing entry at database
@@ -382,6 +451,24 @@ class Engine(object):
         with self.write_lock:
             return self.map_topic[topic].edit(session, _id, indata, kwargs)
 
+    def edit(self, session, topic, _id, item, indata=None, kwargs=None):
+        """
+        Update an existing entry at database
+        :param session: contains the used login username and working project
+        :param topic: it can be: users, projects, vnfds, nsds, ...
+        :param _id: identifier to be updated
+        :param item: it shows the type of profiles
+        :param indata: data to be inserted
+        :param kwargs: used to override the indata descriptor
+        :return: dictionary with edited item _id, raise exception if not found.
+        """
+        if topic not in self.map_topic:
+            raise EngineException(
+                "Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR
+            )
+        with self.write_lock:
+            return self.map_topic[topic].edit(session, _id, item, indata, kwargs)
+
     def cancel_item(
         self, rollback, session, topic, indata=None, kwargs=None, headers=None
     ):
index 22fd7b8..96dc007 100644 (file)
@@ -2427,7 +2427,7 @@ class NsLcmOpTopic(BaseTopic):
                     new_sw_version = vnfd.get("software-version", "1.0")
                     if new_sw_version != old_sw_version:
                         vnf_index = vnfr["member-vnf-index-ref"]
-                        self.logger.info("nsr {}".format(nsr))
+                        self.logger.info("nsr {}".format(nsr))
                         for vdu in vnfd["vdu"]:
                             self.nsrtopic._add_shared_volumes_to_nsr(
                                 vdu, vnfd, nsr, vnf_index, latest_vnfd_revision
@@ -2851,9 +2851,9 @@ class NsiTopic(BaseTopic):
         except ValidationError as e:
             raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
         except Exception as e:  # TODO remove try Except, it is captured at nbi.py
-            self.logger.exception(
-                "Exception {} at NsiTopic.new()".format(e), exc_info=True
-            )
+            self.logger.exception(
+            #    "Exception {} at NsiTopic.new()".format(e), exc_info=True
+            )
             raise EngineException("Error {}: {}".format(step, e))
 
     def edit(self, session, _id, indata=None, kwargs=None, content=None):
@@ -2979,7 +2979,7 @@ class NsiLcmOpTopic(BaseTopic):
             _filter = self._get_project_filter(session)
             _filter["_id"] = netsliceInstanceId
             nsir = self.db.get_one("nsis", _filter)
-            logging_prefix = "nsi={} {} ".format(netsliceInstanceId, operation)
+            logging_prefix = "nsi={} {} ".format(netsliceInstanceId, operation)
             del _filter["_id"]
 
             # initial checking
@@ -3087,10 +3087,10 @@ class NsiLcmOpTopic(BaseTopic):
                         self.db.set_one("nsis", {"_id": nsir["_id"]}, _update)
                 except (DbException, EngineException) as e:
                     if e.http_code == HTTPStatus.NOT_FOUND:
-                        self.logger.info(
-                            logging_prefix
-                            + "skipping NS={} because not found".format(nsr_id)
-                        )
+                        self.logger.info(
+                        #    logging_prefix
+                        #    + "skipping NS={} because not found".format(nsr_id)
+                        )
                         pass
                     else:
                         raise
diff --git a/osm_nbi/k8s_topics.py b/osm_nbi/k8s_topics.py
new file mode 100644 (file)
index 0000000..9a6eb06
--- /dev/null
@@ -0,0 +1,592 @@
+# -*- coding: utf-8 -*-
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# import logging
+from http import HTTPStatus
+
+from osm_nbi.base_topic import BaseTopic, EngineException
+
+from osm_nbi.validation import (
+    ValidationError,
+    clustercreation_new_schema,
+    infra_controller_profile_create_new_schema,
+    infra_config_profile_create_new_schema,
+    app_profile_create_new_schema,
+    resource_profile_create_new_schema,
+    infra_controller_profile_create_edit_schema,
+    infra_config_profile_create_edit_schema,
+    app_profile_create_edit_schema,
+    resource_profile_create_edit_schema,
+    k8scluster_new_schema,
+    attach_dettach_profile_schema,
+)
+from osm_common.dbbase import DbException
+from osm_common.msgbase import MsgException
+from osm_common.fsbase import FsException
+
+__author__ = "Shrinithi R <shrinithi.r@tataelxsi.co.in>"
+
+
+class InfraContTopic(BaseTopic):
+    topic = "k8sinfra_controller"
+    topic_msg = "k8s_infra_controller"
+    schema_new = infra_controller_profile_create_new_schema
+    schema_edit = infra_controller_profile_create_edit_schema
+
+    def __init__(self, db, fs, msg, auth):
+        BaseTopic.__init__(self, db, fs, msg, auth)
+        # self.logger = logging.getLogger("nbi.k8s_topics")
+
+    def new(self, rollback, session, indata=None, kwargs=None, headers=None):
+        # To create the new infra controller profile
+        return self.new_profile(rollback, session, indata, kwargs, headers)
+
+    def default(self, rollback, session, indata=None, kwargs=None, headers=None):
+        # To create the default infra controller profile while creating the cluster
+        return self.default_profile(rollback, session, indata, kwargs, headers)
+
+    def delete(self, session, _id, dry_run=False, not_send_msg=None):
+        item_content = self.db.get_one(self.topic, {"_id": _id})
+        if item_content.get("default", False):
+            raise EngineException(
+                "Cannot delete item because it is marked as default",
+                http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+            )
+        # Before deleting, detach the profile from the associated clusters.
+        self.detach(session, _id, profile_type="infra_controller_profiles")
+        # To delete the infra controller profile
+        super().delete(session, _id, not_send_msg=not_send_msg)
+        return
+
+
+class InfraConfTopic(BaseTopic):
+    topic = "k8sinfra_config"
+    topic_msg = "k8s_infra_config"
+    schema_new = infra_config_profile_create_new_schema
+    schema_edit = infra_config_profile_create_edit_schema
+
+    def __init__(self, db, fs, msg, auth):
+        BaseTopic.__init__(self, db, fs, msg, auth)
+        # self.logger = logging.getLogger("nbi.k8s_topics")
+
+    def new(self, rollback, session, indata=None, kwargs=None, headers=None):
+        # To create the new infra config profile
+        return self.new_profile(rollback, session, indata, kwargs, headers)
+
+    def default(self, rollback, session, indata=None, kwargs=None, headers=None):
+        # To create the default infra config profile while creating the cluster
+        return self.default_profile(rollback, session, indata, kwargs, headers)
+
+    def delete(self, session, _id, dry_run=False, not_send_msg=None):
+        item_content = self.db.get_one(self.topic, {"_id": _id})
+        if item_content.get("default", False):
+            raise EngineException(
+                "Cannot delete item because it is marked as default",
+                http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+            )
+        # Before deleting, detach the profile from the associated clusters.
+        self.detach(session, _id, profile_type="infra_config_profiles")
+        # To delete the infra config profile
+        super().delete(session, _id, not_send_msg=not_send_msg)
+        return
+
+
+class AppTopic(BaseTopic):
+    topic = "k8sapp"
+    topic_msg = "k8s_app"
+    schema_new = app_profile_create_new_schema
+    schema_edit = app_profile_create_edit_schema
+
+    def __init__(self, db, fs, msg, auth):
+        BaseTopic.__init__(self, db, fs, msg, auth)
+        # self.logger = logging.getLogger("nbi.k8s_topics")
+
+    def new(self, rollback, session, indata=None, kwargs=None, headers=None):
+        # To create the new app profile
+        return self.new_profile(rollback, session, indata, kwargs, headers)
+
+    def default(self, rollback, session, indata=None, kwargs=None, headers=None):
+        # To create the default app profile while creating the cluster
+        return self.default_profile(rollback, session, indata, kwargs, headers)
+
+    def delete(self, session, _id, dry_run=False, not_send_msg=None):
+        item_content = self.db.get_one(self.topic, {"_id": _id})
+        if item_content.get("default", False):
+            raise EngineException(
+                "Cannot delete item because it is marked as default",
+                http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+            )
+        # Before deleting, detach the profile from the associated clusters.
+        self.detach(session, _id, profile_type="app_profiles")
+        # To delete the app profile
+        super().delete(session, _id, not_send_msg=not_send_msg)
+        return
+
+
+class ResourceTopic(BaseTopic):
+    topic = "k8sresource"
+    topic_msg = "k8s_resource"
+    schema_new = resource_profile_create_new_schema
+    schema_edit = resource_profile_create_edit_schema
+
+    def __init__(self, db, fs, msg, auth):
+        BaseTopic.__init__(self, db, fs, msg, auth)
+        # self.logger = logging.getLogger("nbi.k8s_topics")
+
+    def new(self, rollback, session, indata=None, kwargs=None, headers=None):
+        # To create the new resource profile
+        return self.new_profile(rollback, session, indata, kwargs, headers)
+
+    def default(self, rollback, session, indata=None, kwargs=None, headers=None):
+        # To create the default resource profile while creating the cluster
+        return self.default_profile(rollback, session, indata, kwargs, headers)
+
+    def delete(self, session, _id, dry_run=False, not_send_msg=None):
+        item_content = self.db.get_one(self.topic, {"_id": _id})
+        if item_content.get("default", False):
+            raise EngineException(
+                "Cannot delete item because it is marked as default",
+                http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+            )
+        # Before deleting, detach the profile from the associated clusters.
+        self.detach(session, _id, profile_type="resource_profiles")
+        # To delete the resource profile
+        super().delete(session, _id, not_send_msg=not_send_msg)
+        return
+
+
+class K8sTopic(BaseTopic):
+    topic = "clusters"
+    topic_msg = "cluster"
+    schema_new = clustercreation_new_schema
+    schema_edit = attach_dettach_profile_schema
+
+    def __init__(self, db, fs, msg, auth):
+        BaseTopic.__init__(self, db, fs, msg, auth)
+        self.infra_contr_topic = InfraContTopic(db, fs, msg, auth)
+        self.infra_conf_topic = InfraConfTopic(db, fs, msg, auth)
+        self.resource_topic = ResourceTopic(db, fs, msg, auth)
+        self.app_topic = AppTopic(db, fs, msg, auth)
+        # self.logger = logging.getLogger("nbi.k8s_topics")
+
+    def new(self, rollback, session, indata=None, kwargs=None, headers=None):
+        """
+        Creates a new k8scluster into database.
+        :param rollback: list to append the created items at database in case a rollback must be done
+        :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
+        :param indata: params to be used for the k8cluster
+        :param kwargs: used to override the indata
+        :param headers: http request headers
+        :return: the _id of k8scluster created at database. Or an exception of type
+            EngineException, ValidationError, DbException, FsException, MsgException.
+            Note: Exceptions are not captured on purpose. They should be captured at called
+        """
+        step = "checking quotas"  # first step must be defined outside try
+        try:
+            self.check_quota(session)
+            step = "name unique check"
+            self.check_unique_name(session, indata["name"])
+            step = "validating input parameters"
+            cls_request = self._remove_envelop(indata)
+            self._update_input_with_kwargs(cls_request, kwargs)
+            cls_request = self._validate_input_new(cls_request, session["force"])
+            operation_params = cls_request
+
+            step = "filling cluster details from input data"
+            cls_create = self._create_cluster(
+                cls_request, rollback, session, indata, kwargs, headers
+            )
+
+            step = "creating cluster at database"
+            self.format_on_new(
+                cls_create, session["project_id"], make_public=session["public"]
+            )
+            cls_create["current_operation"] = None
+            op_id = self.format_on_operation(
+                cls_create,
+                "create",
+                operation_params,
+            )
+            _id = self.db.create(self.topic, cls_create)
+            rollback.append({"topic": self.topic, "_id": _id})
+            self.db.set_one("clusters", {"_id": _id}, cls_create)
+            self._send_msg("create", {"cluster_id": _id, "operation_id": op_id})
+
+            return _id, None
+        except (
+            ValidationError,
+            EngineException,
+            DbException,
+            MsgException,
+            FsException,
+        ) as e:
+            raise type(e)("{} while '{}'".format(e, step), http_code=e.http_code)
+
+    def _create_cluster(self, cls_request, rollback, session, indata, kwargs, headers):
+        # Check whether the region name and resource group have been given
+        v = "Not_present"
+        t = "Not_present"
+        for key, value in indata.items():
+            if key == "region_name":
+                v = "Present"
+            if key == "resource_group":
+                t = "Present"
+
+        # Get the vim_account details
+        vim_account_details = self.db.get_one(
+            "vim_accounts", {"name": cls_request["vim_account"]}
+        )
+
+        if v == "Not_present" and t == "Not_present":
+            region_name = vim_account_details["config"]["region_name"]
+            resource_group = vim_account_details["config"]["resource_group"]
+        elif v == "Present" and t == "Not_present":
+            region_name = cls_request["region_name"]
+            resource_group = vim_account_details["config"]["resource_group"]
+        elif v == "Not_present" and t == "Present":
+            region_name = vim_account_details["config"]["region_name"]
+            resource_group = cls_request["resource_group"]
+        else:
+            region_name = cls_request["region_name"]
+            resource_group = cls_request["resource_group"]
+
+        cls_desc = {
+            "name": cls_request["name"],
+            "vim_account": self.check_vim(session, cls_request["vim_account"]),
+            "k8s_version": cls_request["k8s_version"],
+            "node_size": cls_request["node_size"],
+            "node_count": cls_request["node_count"],
+            "description": cls_request["description"],
+            "region_name": region_name,
+            "resource_group": resource_group,
+            "infra_controller_profiles": [
+                self._create_default_profiles(
+                    rollback, session, indata, kwargs, headers, self.infra_contr_topic
+                )
+            ],
+            "infra_config_profiles": [
+                self._create_default_profiles(
+                    rollback, session, indata, kwargs, headers, self.infra_conf_topic
+                )
+            ],
+            "resource_profiles": [
+                self._create_default_profiles(
+                    rollback, session, indata, kwargs, headers, self.resource_topic
+                )
+            ],
+            "app_profiles": [
+                self._create_default_profiles(
+                    rollback, session, indata, kwargs, headers, self.app_topic
+                )
+            ],
+            "created": "true",
+            "state": "IN_CREATION",
+            "operatingState": "PROCESSING",
+            "git_name": self.create_gitname(cls_request, session),
+            "resourceState": "IN_PROGRESS.REQUEST_RECEIVED",
+        }
+        return cls_desc
+
+    def check_vim(self, session, name):
+        try:
+            vim_account_details = self.db.get_one("vim_accounts", {"name": name})
+            if vim_account_details is not None:
+                return name
+        except ValidationError as e:
+            raise EngineException(
+                e,
+                HTTPStatus.UNPROCESSABLE_ENTITY,
+            )
+
+    def _create_default_profiles(
+        self, rollback, session, indata, kwargs, headers, topic
+    ):
+        topic = self.to_select_topic(topic)
+        default_profiles = topic.default(rollback, session, indata, kwargs, headers)
+        return default_profiles
+
+    def to_select_topic(self, topic):
+        if topic == "infra_controller_profiles":
+            topic = self.infra_contr_topic
+        elif topic == "infra_config_profiles":
+            topic = self.infra_conf_topic
+        elif topic == "resource_profiles":
+            topic = self.resource_topic
+        elif topic == "app_profiles":
+            topic = self.app_topic
+        return topic
+
+    def show_one(self, session, _id, profile, filter_q=None, api_req=False):
+        try:
+            filter_q = self._get_project_filter(session)
+            filter_q[self.id_field(self.topic, _id)] = _id
+            content = self.db.get_one(self.topic, filter_q)
+            existing_profiles = []
+            topic = None
+            topic = self.to_select_topic(profile)
+            for profile_id in content[profile]:
+                data = topic.show(session, profile_id, filter_q, api_req)
+                existing_profiles.append(data)
+            return existing_profiles
+        except ValidationError as e:
+            raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
+
+    def state_check(self, profile_id, session, topic):
+        topic = self.to_select_topic(topic)
+        content = topic.show(session, profile_id, filter_q=None, api_req=False)
+        state = content["state"]
+        if state == "CREATED":
+            return
+        else:
+            raise EngineException(
+                f" {profile_id}  is not in created state",
+                HTTPStatus.UNPROCESSABLE_ENTITY,
+            )
+
+    def edit(self, session, _id, item, indata=None, kwargs=None):
+        indata = self._remove_envelop(indata)
+        indata = self._validate_input_edit(indata, content=None, force=session["force"])
+        if indata.get("add_profile"):
+            self.add_profile(session, _id, item, indata)
+        elif indata.get("remove_profile"):
+            self.remove_profile(session, _id, item, indata)
+        else:
+            error_msg = "Add / remove operation is only applicable"
+            raise EngineException(error_msg, HTTPStatus.UNPROCESSABLE_ENTITY)
+
+    def add_profile(self, session, _id, item, indata=None):
+        indata = self._remove_envelop(indata)
+        operation_params = indata
+        profile_id = indata["add_profile"][0]["id"]
+        # check state
+        self.state_check(profile_id, session, item)
+        filter_q = self._get_project_filter(session)
+        filter_q[self.id_field(self.topic, _id)] = _id
+        content = self.db.get_one(self.topic, filter_q)
+        profile_list = content[item]
+
+        if profile_id not in profile_list:
+            content["operatingState"] = "PROCESSING"
+            content["current_operation"] = None
+            op_id = self.format_on_operation(
+                content,
+                "add",
+                operation_params,
+            )
+            self.db.set_one("clusters", {"_id": content["_id"]}, content)
+            self._send_msg(
+                "add",
+                {
+                    "cluster_id": _id,
+                    "profile_id": profile_id,
+                    "profile_type": item,
+                    "operation_id": op_id,
+                },
+            )
+        else:
+            raise EngineException(
+                f"{item} {profile_id} already exists", HTTPStatus.UNPROCESSABLE_ENTITY
+            )
+
+    def _get_default_profiles(self, session, topic):
+        topic = self.to_select_topic(topic)
+        existing_profiles = topic.list(session, filter_q=None, api_req=False)
+        default_profiles = [
+            profile["_id"]
+            for profile in existing_profiles
+            if profile.get("default", False)
+        ]
+        return default_profiles
+
+    def remove_profile(self, session, _id, item, indata):
+        indata = self._remove_envelop(indata)
+        operation_params = indata
+        profile_id = indata["remove_profile"][0]["id"]
+        filter_q = self._get_project_filter(session)
+        filter_q[self.id_field(self.topic, _id)] = _id
+        content = self.db.get_one(self.topic, filter_q)
+        profile_list = content[item]
+
+        default_profiles = self._get_default_profiles(session, item)
+
+        if profile_id in default_profiles:
+            raise EngineException(
+                "Cannot remove default profile", HTTPStatus.UNPROCESSABLE_ENTITY
+            )
+        if profile_id in profile_list:
+            content["operatingState"] = "PROCESSING"
+            content["current_operation"] = None
+            op_id = self.format_on_operation(
+                content,
+                "remove",
+                operation_params,
+            )
+            self.db.set_one("clusters", {"_id": content["_id"]}, content)
+            self._send_msg(
+                "remove",
+                {
+                    "cluster_id": _id,
+                    "profile_id": profile_id,
+                    "profile_type": item,
+                    "operation_id": op_id,
+                },
+            )
+        else:
+            raise EngineException(
+                f"{item} {profile_id} does'nt exists", HTTPStatus.UNPROCESSABLE_ENTITY
+            )
+
+
+class K8saddTopic(BaseTopic):
+    topic = "clusters"
+    topic_msg = "cluster"
+    schema_new = k8scluster_new_schema
+
+    def __init__(self, db, fs, msg, auth):
+        BaseTopic.__init__(self, db, fs, msg, auth)
+
+    def add(self, rollback, session, indata, kwargs=None, headers=None):
+        step = "checking quotas"
+        try:
+            self.check_quota(session)
+            step = "name unique check"
+            self.check_unique_name(session, indata["name"])
+            step = "validating input parameters"
+            cls_add_request = self._remove_envelop(indata)
+            self._update_input_with_kwargs(cls_add_request, kwargs)
+            cls_add_request = self._validate_input_new(
+                cls_add_request, session["force"]
+            )
+            operation_params = cls_add_request
+
+            step = "filling cluster details from input data"
+            cls_add = self._add_cluster(cls_add_request, session)
+
+            step = "creating cluster at database"
+            self.format_on_new(
+                cls_add, session["project_id"], make_public=session["public"]
+            )
+            cls_add["current_operation"] = None
+            op_id = self.format_on_operation(
+                cls_add,
+                "register",
+                operation_params,
+            )
+            _id = self.db.create(self.topic, cls_add)
+            self.db.set_one(self.topic, {"_id": _id}, cls_add)
+            rollback.append({"topic": self.topic, "_id": _id})
+            self._send_msg("register", {"cluster_id": _id, "operation_id": op_id})
+            return _id, None
+        except (
+            ValidationError,
+            EngineException,
+            DbException,
+            MsgException,
+            FsException,
+        ) as e:
+            raise type(e)("{} while '{}'".format(e, step), http_code=e.http_code)
+
+    def _add_cluster(self, cls_add_request, session):
+        cls_add = {
+            "name": cls_add_request["name"],
+            "description": cls_add_request["description"],
+            "credentials": cls_add_request["credentials"],
+            "vim_account": cls_add_request["vim_account"],
+            "k8s_version": cls_add_request["k8s_version"],
+            "nets": cls_add_request["nets"],
+            "created": "false",
+            "state": "IN_CREATION",
+            "operatingState": "PROCESSING",
+            "git_name": self.create_gitname(cls_add_request, session),
+            "resourceState": "IN_PROGRESS.REQUEST_RECEIVED",
+        }
+        return cls_add
+
+    def remove(self, session, _id, dry_run=False, not_send_msg=None):
+        """
+        Delete item by its internal _id
+        :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
+        :param _id: server internal id
+        :param dry_run: make checking but do not delete
+        :param not_send_msg: To not send message (False) or store content (list) instead
+        :return: operation id (None if there is not operation), raise exception if error or not found, conflict, ...
+        """
+
+        # To allow addressing projects and users by name AS WELL AS by _id
+        if not self.multiproject:
+            filter_q = {}
+        else:
+            filter_q = self._get_project_filter(session)
+        filter_q[self.id_field(self.topic, _id)] = _id
+        item_content = self.db.get_one(self.topic, filter_q)
+
+        item_content["operatingState"] = "PROCESSING"
+        item_content["current_operation"] = None
+        op_id = self.format_on_operation(
+            item_content,
+            "deregister",
+            None,
+        )
+        self.db.set_one(self.topic, {"_id": _id}, item_content)
+
+        self.check_conflict_on_del(session, _id, item_content)
+        if dry_run:
+            return None
+
+        if self.multiproject and session["project_id"]:
+            # remove reference from project_read if there are more projects referencing it. If it last one,
+            # do not remove reference, but delete
+            other_projects_referencing = next(
+                (
+                    p
+                    for p in item_content["_admin"]["projects_read"]
+                    if p not in session["project_id"] and p != "ANY"
+                ),
+                None,
+            )
+
+            # check if there are projects referencing it (apart from ANY, that means, public)....
+            if other_projects_referencing:
+                # remove references but not delete
+                update_dict_pull = {
+                    "_admin.projects_read": session["project_id"],
+                    "_admin.projects_write": session["project_id"],
+                }
+                self.db.set_one(
+                    self.topic, filter_q, update_dict=None, pull_list=update_dict_pull
+                )
+                return None
+            else:
+                can_write = next(
+                    (
+                        p
+                        for p in item_content["_admin"]["projects_write"]
+                        if p == "ANY" or p in session["project_id"]
+                    ),
+                    None,
+                )
+                if not can_write:
+                    raise EngineException(
+                        "You have not write permission to delete it",
+                        http_code=HTTPStatus.UNAUTHORIZED,
+                    )
+
+        # delete
+        self._send_msg(
+            "deregister",
+            {"cluster_id": _id, "operation_id": op_id},
+            not_send_msg=not_send_msg,
+        )
+        return None
index 14a0899..f031665 100644 (file)
@@ -149,6 +149,24 @@ URL: /osm                                                       GET     POST
             /subscriptions                                      X       X
                 /<subscriptionId>                               X                       X
 
+        /k8scluster/v1
+            /clusters                                           O       O
+                /<clustersId>                                   O                       O
+                    app_profiles                                O                               O
+                    infra_controller_profiles                   O                               O
+                    infra_config_profiles                       O                               O
+                    resource_profiles                           O                               O
+                    deregister                                                          O
+                /register                                               O
+            /app_profiles                                       O       O
+                /<app_profilesId>                               O                       O       O
+            /infra_controller_profiles                          O       O
+                /<infra_controller_profilesId>                  O                       O       O
+            /infra_config_profiles                              O       O
+                /<infra_config_profilesId>                      O                       O       O
+            /resource_profiles                                  O       O
+                /<resource_profilesID>                          O                       O       O
+
 query string:
     Follows SOL005 section 4.3.2 It contains extra METHOD to override http method, FORCE to force.
         simpleFilterExpr := <attrName>["."<attrName>]*["."<op>]"="<value>[","<value>]*
@@ -660,6 +678,74 @@ valid_url_methods = {
             }
         },
     },
+    "k8scluster": {
+        "v1": {
+            "clusters": {
+                "METHODS": ("GET", "POST"),
+                "ROLE_PERMISSION": "k8scluster:",
+                "<ID>": {
+                    "METHODS": ("GET", "DELETE"),
+                    "ROLE_PERMISSION": "k8scluster:id:",
+                    "app_profiles": {
+                        "METHODS": ("PATCH", "GET"),
+                        "ROLE_PERMISSION": "k8scluster:id:app_profiles:",
+                    },
+                    "infra_controller_profiles": {
+                        "METHODS": ("PATCH", "GET"),
+                        "ROLE_PERMISSION": "k8scluster:id:infra_profiles:",
+                    },
+                    "infra_config_profiles": {
+                        "METHODS": ("PATCH", "GET"),
+                        "ROLE_PERMISSION": "k8scluster:id:infra_profiles:",
+                    },
+                    "resource_profiles": {
+                        "METHODS": ("PATCH", "GET"),
+                        "ROLE_PERMISSION": "k8scluster:id:infra_profiles:",
+                    },
+                    "deregister": {
+                        "METHODS": ("DELETE",),
+                        "ROLE_PERMISSION": "k8scluster:id:deregister:",
+                    },
+                },
+                "register": {
+                    "METHODS": ("POST",),
+                    "ROLE_PERMISSION": "k8scluster:register:",
+                },
+            },
+            "app_profiles": {
+                "METHODS": ("POST", "GET"),
+                "ROLE_PERMISSION": "k8scluster:app_profiles:",
+                "<ID>": {
+                    "METHODS": ("GET", "PATCH", "DELETE"),
+                    "ROLE_PERMISSION": "k8scluster:app_profiles:id:",
+                },
+            },
+            "infra_controller_profiles": {
+                "METHODS": ("POST", "GET"),
+                "ROLE_PERMISSION": "k8scluster:infra_controller_profiles:",
+                "<ID>": {
+                    "METHODS": ("GET", "PATCH", "DELETE"),
+                    "ROLE_PERMISSION": "k8scluster:infra_controller_profiles:id:",
+                },
+            },
+            "infra_config_profiles": {
+                "METHODS": ("POST", "GET"),
+                "ROLE_PERMISSION": "k8scluster:infra_config_profiles:",
+                "<ID>": {
+                    "METHODS": ("GET", "PATCH", "DELETE"),
+                    "ROLE_PERMISSION": "k8scluster:infra_config_profiles:id:",
+                },
+            },
+            "resource_profiles": {
+                "METHODS": ("POST", "GET"),
+                "ROLE_PERMISSION": "k8scluster:resource_profiles:",
+                "<ID>": {
+                    "METHODS": ("GET", "PATCH", "DELETE"),
+                    "ROLE_PERMISSION": "k8scluster:resource_profiles:id:",
+                },
+            },
+        }
+    },
 }
 
 
@@ -731,7 +817,6 @@ class Server(object):
                     cherrypy.request.headers.pop("Content-File-MD5", None)
             if not indata:
                 indata = {}
-
             format_yaml = False
             if cherrypy.request.headers.get("Query-String-Format") == "yaml":
                 format_yaml = True
@@ -1464,6 +1549,7 @@ class Server(object):
                 "nsilcm",
                 "nspm",
                 "vnflcm",
+                "k8scluster",
             ):
                 raise NbiException(
                     "URL main_topic '{}' not supported".format(main_topic),
@@ -1533,6 +1619,18 @@ class Server(object):
                     engine_topic = "nsilcmops"
             elif main_topic == "pdu":
                 engine_topic = "pdus"
+            elif main_topic == "k8scluster":
+                engine_topic = "k8s"
+                if topic == "clusters" and _id == "register" or item == "deregister":
+                    engine_topic = "k8sops"
+                elif topic == "infra_controller_profiles":
+                    engine_topic = "infras_cont"
+                elif topic == "infra_config_profiles":
+                    engine_topic = "infras_conf"
+                elif topic == "resource_profiles":
+                    engine_topic = "resources"
+                elif topic == "app_profiles":
+                    engine_topic = "apps"
             if (
                 engine_topic == "vims"
             ):  # TODO this is for backward compatibility, it will be removed in the future
@@ -1572,6 +1670,22 @@ class Server(object):
                     outdata = self.engine.get_item_list(
                         engine_session, engine_topic, kwargs, api_req=True
                     )
+                elif topic == "clusters" and item in (
+                    "infra_controller_profiles",
+                    "infra_config_profiles",
+                    "app_profiles",
+                    "resource_profiles",
+                ):
+                    profile = item
+                    filter_q = None
+                    outdata = self.engine.get_one_item(
+                        engine_session,
+                        engine_topic,
+                        _id,
+                        profile,
+                        filter_q,
+                        api_req=True,
+                    )
                 else:
                     if item == "reports":
                         # TODO check that project_id (_id in this context) has permissions
@@ -1727,6 +1841,30 @@ class Server(object):
                     )
                     self._set_location_header(main_topic, version, topic, _id)
                     cherrypy.response.status = HTTPStatus.ACCEPTED.value
+                elif topic == "clusters" and _id == "register":
+                    # To register a cluster
+                    _id, _ = self.engine.add_item(
+                        rollback, engine_session, engine_topic, indata, kwargs
+                    )
+                    self._set_location_header(main_topic, version, topic, _id)
+                    outdata = {"id": _id}
+                elif (
+                    topic
+                    in (
+                        "clusters",
+                        "infra_controller_profiles",
+                        "infra_config_profiles",
+                        "app_profiles",
+                        "resource_profiles",
+                    )
+                    and item is None
+                ):
+                    # creates cluster, infra_controller_profiles, app_profiles, infra_config_profiles, and resource_profiles
+                    _id, _ = self.engine.new_item(
+                        rollback, engine_session, engine_topic, indata, kwargs
+                    )
+                    self._set_location_header(main_topic, version, topic, _id)
+                    outdata = {"_id": _id}
                 else:
                     _id, op_id = self.engine.new_item(
                         rollback,
@@ -1777,11 +1915,23 @@ class Server(object):
                         )
                         if op_id:
                             outdata = {"_id": op_id}
+                    elif topic == "clusters" and item == "deregister":
+                        if not op_id:
+                            op_id = self.engine.remove(
+                                engine_session, engine_topic, _id
+                            )
+                            if op_id:
+                                outdata = {"_id": op_id}
+                        cherrypy.response.status = (
+                            HTTPStatus.ACCEPTED.value
+                            if op_id
+                            else HTTPStatus.NO_CONTENT.value
+                        )
                     # if there is not any deletion in process, delete
-                    if not op_id:
+                    elif not op_id:
                         op_id = self.engine.del_item(engine_session, engine_topic, _id)
                         if op_id:
-                            outdata = {"op_id": op_id}
+                            outdata = {"_id": op_id}
                     cherrypy.response.status = (
                         HTTPStatus.ACCEPTED.value
                         if op_id
@@ -1815,6 +1965,15 @@ class Server(object):
                     )
                     if not completed:
                         cherrypy.response.headers["Transaction-Id"] = id
+                elif item in (
+                    "app_profiles",
+                    "resource_profiles",
+                    "infra_controller_profiles",
+                    "infra_config_profiles",
+                ):
+                    op_id = self.engine.edit(
+                        engine_session, engine_topic, _id, item, indata, kwargs
+                    )
                 else:
                     op_id = self.engine.edit_item(
                         engine_session, engine_topic, _id, indata, kwargs
index 6e2eb45..139e59b 100644 (file)
@@ -391,6 +391,79 @@ resources_to_operations:
 
   "GET /nsilcm/v1/nsi_lcm_op_occs/<nsiLcmOpOccId>": "slice_instances:opps:id:get"
 
+
+################################################################################
+############################ K8S CLUSTERS ######################################
+################################################################################
+
+  "GET /k8scluster/v1/clusters": "k8scluster:get"
+
+  "POST /k8scluster/v1/clusters": "k8scluster:post"
+
+  "GET /k8scluster/v1/clusters/<clustersId>": "k8scluster:id:get"
+
+  "DELETE /k8scluster/v1/clusters/<clustersId>": "k8scluster:id:delete"
+
+  "POST /k8scluster/v1/app_profiles": "k8scluster:app_profiles:post"
+
+  "GET /k8scluster/v1/app_profiles": "k8scluster:app_profiles:get"
+
+  "GET /k8scluster/v1/app_profiles/<app_profilesId>": "k8scluster:app_profiles:id:get"
+
+  "PATCH /k8scluster/v1/app_profiles/<app_profilesId>": "k8scluster:app_profiles:id:patch"
+
+  "DELETE /k8scluster/v1/app_profiles/<app_profilesId>": "k8scluster:app_profiles:id:delete"
+
+  "POST /k8scluster/v1/resource_profiles": "k8scluster:resource_profiles:post"
+
+  "GET /k8scluster/v1/resource_profiles": "k8scluster:resource_profiles:get"
+
+  "GET /k8scluster/v1/resource_profiles/<resource_profilesId>": "k8scluster:resource_profiles:id:get"
+
+  "PATCH /k8scluster/v1/resource_profiles/<resource_profilesId>": "k8scluster:resource_profiles:id:patch"
+
+  "DELETE /k8scluster/v1/resource_profiles/<resource_profilesId>": "k8scluster:resource_profiles:id:delete"
+
+  "GET /k8scluster/v1/infra_controller_profiles": "k8scluster:infra_controller_profiles:get"
+
+  "POST /k8scluster/v1/infra_controller_profiles": "k8scluster:infra_controller_profiles:post"
+
+  "GET /k8scluster/v1/infra_controller_profiles/<infra_controller_profilesId>": "k8scluster:infra_controller_profiles:id:get"
+
+  "PATCH /k8scluster/v1/infra_controller_profiles/<infra_controller_profilesId>": "k8scluster:infra_controller_profiles:id:patch"
+
+  "DELETE /k8scluster/v1/infra_controller_profiles/<infra_controller_profilesId>": "k8scluster:infra_controller_profiles:id:delete"
+
+  "GET /k8scluster/v1/infra_config_profiles": "k8scluster:infra_config_profiles:get"
+
+  "POST /k8scluster/v1/infra_config_profiles": "k8scluster:infra_config_profiles:post"
+
+  "GET /k8scluster/v1/infra_config_profiles/<infra_config_profilesId>": "k8scluster:infra_config_profiles:id:get"
+
+  "PATCH /k8scluster/v1/infra_config_profiles/<infra_config_profilesId>": "k8scluster:infra_config_profiles:id:patch"
+
+  "DELETE /k8scluster/v1/infra_config_profiles/<infra_config_profilesId>": "k8scluster:infra_config_profiles:id:delete"
+
+  "PATCH /k8scluster/v1/clusters/<clustersId>/app_profiles/": "k8scluster:id:app_profiles:patch"
+
+  "GET /k8scluster/v1/clusters/<clustersId>/app_profiles": "k8scluster:id:app_profiles:get"
+
+  "PATCH /k8scluster/v1/clusters/<clustersId>/resource_profiles/": "k8scluster:id:resource_profiles:patch"
+
+  "GET /k8scluster/v1/clusters/<clustersId>/resource_profiles": "k8scluster:id:resource_profiles:get"
+
+  "PATCH /k8scluster/v1/clusters/<clustersId>/infra_controller_profiles": "k8scluster:id:infra_controller_profiles:patch"
+
+  "GET /k8scluster/v1/<clustersId>/infra_controller_profiles": "k8scluster:id:infra_controller_profiles:get"
+
+  "PATCH /k8scluster/v1/clusters/<clustersId>/infra_config_profiles": "k8scluster:id:infra_config_profiles:patch"
+
+  "GET /k8scluster/v1/<clustersId>/infra_config_profiles": "k8scluster:id:infra_config_profiles:get"
+
+  "POST /k8scluster/v1/clusters/register": "k8scluster:register:post"
+
+  "DELETE /k8scluster/v1/clusters/<clustersId>/deregister": "k8scluster:id:deregister:delete"
+
 ################################################################################
 ############################ QUERY STRING admin operations #####################
 ################################################################################
index d282bde..2373ee9 100644 (file)
@@ -972,7 +972,7 @@ osmrepo_properties = {
     "name": name_schema,
     "description": description_schema,
     "type": osmrepo_types,
-    "url": description_schema
+    "url": description_schema,
     # "user": string_schema,
     # "password": passwd_schema
 }
@@ -1073,6 +1073,147 @@ vnfpkgop_new_schema = {
     "additionalProperties": False,
 }
 
+clustercreation_new_schema = {
+    "title": "cluster creation operation input schema",
+    "$schema": "http://json-schema.org/draft-04/schema#",
+    "type": "object",
+    "properties": {
+        "name": name_schema,
+        "vim_account": string_schema,
+        "k8s_version": string_schema,
+        "node_size": string_schema,
+        "node_count": integer0_schema,
+        "description": string_schema,
+        "region_name": string_schema,
+        "resource_group": string_schema,
+        "infra_controller_profiles": shortname_schema,
+        "infra_config_profiles": shortname_schema,
+        "resource_profiles": shortname_schema,
+        "app_profiles": shortname_schema,
+    },
+    "required": [
+        "name",
+        "vim_account",
+        "k8s_version",
+        "node_size",
+        "node_count",
+        "description",
+    ],
+    "additionalProperties": False,
+}
+
+infra_controller_profile_create_new_schema = {
+    "title": "infra profile creation operation input schema",
+    "$schema": "http://json-schema.org/draft-04/schema#",
+    "type": "object",
+    "properties": {
+        "name": name_schema,
+        "description": string_schema,
+    },
+    "additionalProperties": False,
+}
+
+infra_controller_profile_create_edit_schema = {
+    "title": "infra profile creation operation input schema",
+    "$schema": "http://json-schema.org/draft-04/schema#",
+    "type": "object",
+    "properties": {
+        "name": name_schema,
+        "description": string_schema,
+    },
+    "additionalProperties": False,
+}
+
+infra_config_profile_create_new_schema = {
+    "title": "infra profile creation operation input schema",
+    "$schema": "http://json-schema.org/draft-04/schema#",
+    "type": "object",
+    "properties": {
+        "name": name_schema,
+        "description": string_schema,
+    },
+    "additionalProperties": False,
+}
+
+infra_config_profile_create_edit_schema = {
+    "title": "infra profile creation operation input schema",
+    "$schema": "http://json-schema.org/draft-04/schema#",
+    "type": "object",
+    "properties": {
+        "name": name_schema,
+        "description": string_schema,
+    },
+    "additionalProperties": False,
+}
+
+app_profile_create_new_schema = {
+    "title": "app profile creation operation input schema",
+    "$schema": "http://json-schema.org/draft-04/schema#",
+    "type": "object",
+    "properties": {
+        "name": name_schema,
+        "description": string_schema,
+    },
+    "additionalProperties": False,
+}
+app_profile_create_edit_schema = {
+    "title": "app profile creation operation input schema",
+    "$schema": "http://json-schema.org/draft-04/schema#",
+    "type": "object",
+    "properties": {
+        "name": name_schema,
+        "description": string_schema,
+    },
+    "additionalProperties": False,
+}
+
+resource_profile_create_new_schema = {
+    "title": "resource profile creation operation input schema",
+    "$schema": "http://json-schema.org/draft-04/schema#",
+    "type": "object",
+    "properties": {
+        "name": name_schema,
+        "description": string_schema,
+    },
+    "additionalProperties": False,
+}
+resource_profile_create_edit_schema = {
+    "title": "resource profile creation operation input schema",
+    "$schema": "http://json-schema.org/draft-04/schema#",
+    "type": "object",
+    "properties": {
+        "name": name_schema,
+        "description": string_schema,
+    },
+    "additionalProperties": False,
+}
+
+attach_profile = {
+    "type": "array",
+    "items": {
+        "type": "object",
+        "properties": {"id": id_schema},
+        "additionalProperties": False,
+    },
+}
+remove_profile = {
+    "type": "array",
+    "items": {
+        "type": "object",
+        "properties": {"id": id_schema},
+        "additionalProperties": False,
+    },
+}
+attach_dettach_profile_schema = {
+    "title": "attach/dettach profiles",
+    "$schema": "http://json-schema.org/draft-04/schema#",
+    "type": "object",
+    "properties": {
+        "add_profile": attach_profile,
+        "remove_profile": remove_profile,
+    },
+    "additionalProperties": False,
+}
 # USERS
 project_role_mappings = {
     "title": "list pf projects/roles",
@@ -1086,6 +1227,7 @@ project_role_mappings = {
     },
     "minItems": 1,
 }
+
 project_role_mappings_optional = {
     "title": "list of projects/roles or projects only",
     "$schema": "http://json-schema.org/draft-04/schema#",
@@ -1098,6 +1240,7 @@ project_role_mappings_optional = {
     },
     "minItems": 1,
 }
+
 user_new_schema = {
     "$schema": "http://json-schema.org/draft-04/schema#",
     "title": "New user schema",