blob: e860a81244096c3b0867ef9acf616c1668da56e8 [file] [log] [blame]
rshri932105f2024-07-05 15:11:55 +00001# -*- coding: utf-8 -*-
2
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
12# implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
15
16__author__ = (
17 "Shrinithi R <shrinithi.r@tataelxsi.co.in>",
18 "Shahithya Y <shahithya.y@tataelxsi.co.in>",
19)
20
rshric3564942024-11-12 18:12:38 +000021import copy
rshri932105f2024-07-05 15:11:55 +000022import logging
yshahd940c652024-10-17 06:11:12 +000023from time import time
garciadeblas72412282024-11-07 12:41:54 +010024import traceback
rshri932105f2024-07-05 15:11:55 +000025from osm_lcm.lcm_utils import LcmBase
26from copy import deepcopy
27from osm_lcm import odu_workflows
28from osm_lcm import vim_sdn
rshri948f7de2024-12-02 03:42:35 +000029from osm_lcm.data_utils.list_utils import find_in_list
garciadeblasbc96f382025-01-22 16:02:18 +010030from osm_lcm.n2vc.kubectl import Kubectl
31import yaml
rshri932105f2024-07-05 15:11:55 +000032
yshah2f39b8a2024-12-19 11:06:24 +000033MAP_PROFILE = {
34 "infra_controller_profiles": "infra-controllers",
35 "infra_config_profiles": "infra-configs",
36 "resource_profiles": "managed_resources",
37 "app_profiles": "apps",
38}
39
rshri932105f2024-07-05 15:11:55 +000040
garciadeblas72412282024-11-07 12:41:54 +010041class GitOpsLcm(LcmBase):
garciadeblasea865ff2024-11-20 12:42:49 +010042 db_collection = "gitops"
yshah564ec9c2024-11-29 07:33:32 +000043 workflow_status = None
44 resource_status = None
45
46 profile_collection_mapping = {
47 "infra_controller_profiles": "k8sinfra_controller",
48 "infra_config_profiles": "k8sinfra_config",
49 "resource_profiles": "k8sresource",
50 "app_profiles": "k8sapp",
51 }
garciadeblasea865ff2024-11-20 12:42:49 +010052
garciadeblas72412282024-11-07 12:41:54 +010053 def __init__(self, msg, lcm_tasks, config):
54 self.logger = logging.getLogger("lcm.gitops")
55 self.lcm_tasks = lcm_tasks
56 self.odu = odu_workflows.OduWorkflow(msg, self.lcm_tasks, config)
57 self._checkloop_kustomization_timeout = 900
58 self._checkloop_resource_timeout = 900
59 self._workflows = {}
60 super().__init__(msg, self.logger)
61
62 async def check_dummy_operation(self, op_id, op_params, content):
63 self.logger.info(f"Operation {op_id}. Params: {op_params}. Content: {content}")
64 return True, "OK"
65
garciadeblasea865ff2024-11-20 12:42:49 +010066 def initialize_operation(self, item_id, op_id):
67 db_item = self.db.get_one(self.db_collection, {"_id": item_id})
68 operation = next(
69 (op for op in db_item.get("operationHistory", []) if op["op_id"] == op_id),
70 None,
71 )
72 operation["workflowState"] = "PROCESSING"
73 operation["resourceState"] = "NOT_READY"
74 operation["operationState"] = "IN_PROGRESS"
75 operation["gitOperationInfo"] = None
76 db_item["current_operation"] = operation["op_id"]
77 self.db.set_one(self.db_collection, {"_id": item_id}, db_item)
78
yshah564ec9c2024-11-29 07:33:32 +000079 def get_operation_params(self, item, operation_id):
80 operation_history = item.get("operationHistory", [])
81 operation = find_in_list(
82 operation_history, lambda op: op["op_id"] == operation_id
83 )
84 return operation.get("operationParams", {})
85
86 def get_operation_type(self, item, operation_id):
87 operation_history = item.get("operationHistory", [])
88 operation = find_in_list(
89 operation_history, lambda op: op["op_id"] == operation_id
90 )
91 return operation.get("operationType", {})
92
garciadeblasbe890702024-12-20 11:39:13 +010093 def update_state_operation_history(
94 self, content, op_id, workflow_state=None, resource_state=None
95 ):
96 self.logger.info(
97 f"Update state of operation {op_id} in Operation History in DB"
98 )
99 self.logger.info(
100 f"Workflow state: {workflow_state}. Resource state: {resource_state}"
101 )
102 self.logger.debug(f"Content: {content}")
103
104 op_num = 0
105 for operation in content["operationHistory"]:
106 self.logger.debug("Operations: {}".format(operation))
107 if operation["op_id"] == op_id:
108 self.logger.debug("Found operation number: {}".format(op_num))
109 if workflow_state is not None:
110 operation["workflowState"] = workflow_state
111
112 if resource_state is not None:
113 operation["resourceState"] = resource_state
114 break
115 op_num += 1
116 self.logger.debug("content: {}".format(content))
117
118 return content
119
garciadeblas7eae6f42024-11-08 10:41:38 +0100120 def update_operation_history(
garciadeblasf9092892024-12-12 11:07:08 +0100121 self, content, op_id, workflow_status=None, resource_status=None, op_end=True
garciadeblas7eae6f42024-11-08 10:41:38 +0100122 ):
123 self.logger.info(
124 f"Update Operation History in DB. Workflow status: {workflow_status}. Resource status: {resource_status}"
125 )
126 self.logger.debug(f"Content: {content}")
127
garciadeblas7eae6f42024-11-08 10:41:38 +0100128 op_num = 0
129 for operation in content["operationHistory"]:
130 self.logger.debug("Operations: {}".format(operation))
131 if operation["op_id"] == op_id:
132 self.logger.debug("Found operation number: {}".format(op_num))
garciadeblas8bde3f42024-12-20 10:37:12 +0100133 if workflow_status is not None:
134 if workflow_status:
135 operation["workflowState"] = "COMPLETED"
136 operation["result"] = True
137 else:
138 operation["workflowState"] = "ERROR"
139 operation["operationState"] = "FAILED"
140 operation["result"] = False
garciadeblas7eae6f42024-11-08 10:41:38 +0100141
garciadeblas8bde3f42024-12-20 10:37:12 +0100142 if resource_status is not None:
143 if resource_status:
144 operation["resourceState"] = "READY"
145 operation["operationState"] = "COMPLETED"
146 operation["result"] = True
147 else:
148 operation["resourceState"] = "NOT_READY"
149 operation["operationState"] = "FAILED"
150 operation["result"] = False
garciadeblas7eae6f42024-11-08 10:41:38 +0100151
garciadeblasf9092892024-12-12 11:07:08 +0100152 if op_end:
153 now = time()
154 operation["endDate"] = now
garciadeblas7eae6f42024-11-08 10:41:38 +0100155 break
156 op_num += 1
157 self.logger.debug("content: {}".format(content))
158
159 return content
160
garciadeblas713e1962025-01-17 12:49:19 +0100161 async def check_workflow_and_update_db(self, op_id, workflow_name, db_content):
yshah564ec9c2024-11-29 07:33:32 +0000162 workflow_status, workflow_msg = await self.odu.check_workflow_status(
163 workflow_name
164 )
165 self.logger.info(
166 "Workflow Status: {} Workflow Message: {}".format(
167 workflow_status, workflow_msg
168 )
169 )
170 operation_type = self.get_operation_type(db_content, op_id)
171 if operation_type == "create" and workflow_status:
172 db_content["state"] = "CREATED"
173 elif operation_type == "create" and not workflow_status:
174 db_content["state"] = "FAILED_CREATION"
175 elif operation_type == "delete" and workflow_status:
176 db_content["state"] = "DELETED"
177 elif operation_type == "delete" and not workflow_status:
178 db_content["state"] = "FAILED_DELETION"
179
180 if workflow_status:
181 db_content["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
182 else:
183 db_content["resourceState"] = "ERROR"
184
185 db_content = self.update_operation_history(
186 db_content, op_id, workflow_status, None
187 )
188 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
189 return workflow_status
190
garciadeblas713e1962025-01-17 12:49:19 +0100191 async def check_resource_and_update_db(
192 self, resource_name, op_id, op_params, db_content
193 ):
yshah564ec9c2024-11-29 07:33:32 +0000194 workflow_status = True
195
196 resource_status, resource_msg = await self.check_resource_status(
197 resource_name, op_id, op_params, db_content
198 )
199 self.logger.info(
200 "Resource Status: {} Resource Message: {}".format(
201 resource_status, resource_msg
202 )
203 )
204
205 if resource_status:
206 db_content["resourceState"] = "READY"
207 else:
208 db_content["resourceState"] = "ERROR"
209
210 db_content = self.update_operation_history(
211 db_content, op_id, workflow_status, resource_status
212 )
213 db_content["operatingState"] = "IDLE"
214 db_content["current_operation"] = None
215 return resource_status, db_content
216
garciadeblasbc96f382025-01-22 16:02:18 +0100217 async def common_check_list(
218 self, op_id, checkings_list, db_collection, db_item, kubectl=None
219 ):
garciadeblas72412282024-11-07 12:41:54 +0100220 try:
221 for checking in checkings_list:
222 if checking["enable"]:
223 status, message = await self.odu.readiness_loop(
224 item=checking["item"],
225 name=checking["name"],
226 namespace=checking["namespace"],
garciadeblasbc96f382025-01-22 16:02:18 +0100227 flag=checking.get("flag"),
228 deleted=checking.get("deleted", False),
garciadeblas72412282024-11-07 12:41:54 +0100229 timeout=checking["timeout"],
garciadeblasbc96f382025-01-22 16:02:18 +0100230 kubectl=kubectl,
garciadeblas72412282024-11-07 12:41:54 +0100231 )
232 if not status:
garciadeblasc5e9d572025-01-21 18:48:58 +0100233 error_message = "Resources not ready: "
234 error_message += checking.get("error_message", "")
235 return status, f"{error_message}: {message}"
garciadeblas7eae6f42024-11-08 10:41:38 +0100236 else:
237 db_item["resourceState"] = checking["resourceState"]
garciadeblasbe890702024-12-20 11:39:13 +0100238 db_item = self.update_state_operation_history(
239 db_item, op_id, None, checking["resourceState"]
garciadeblas7eae6f42024-11-08 10:41:38 +0100240 )
241 self.db.set_one(db_collection, {"_id": db_item["_id"]}, db_item)
garciadeblas72412282024-11-07 12:41:54 +0100242 except Exception as e:
243 self.logger.debug(traceback.format_exc())
244 self.logger.debug(f"Exception: {e}", exc_info=True)
245 return False, f"Unexpected exception: {e}"
246 return True, "OK"
247
248 async def check_resource_status(self, key, op_id, op_params, content):
249 self.logger.info(
garciadeblasbc96f382025-01-22 16:02:18 +0100250 f"Check resource status. Key: {key}. Operation: {op_id}. Params: {op_params}."
garciadeblas72412282024-11-07 12:41:54 +0100251 )
garciadeblasbc96f382025-01-22 16:02:18 +0100252 self.logger.debug(f"Check resource status. Content: {content}")
garciadeblas72412282024-11-07 12:41:54 +0100253 check_resource_function = self._workflows.get(key, {}).get(
254 "check_resource_function"
255 )
256 self.logger.info("check_resource function : {}".format(check_resource_function))
257 if check_resource_function:
258 return await check_resource_function(op_id, op_params, content)
259 else:
260 return await self.check_dummy_operation(op_id, op_params, content)
261
garciadeblas995cbf32024-12-18 12:54:00 +0100262 def decrypted_copy(self, content, fields=["age_pubkey", "age_privkey"]):
263 # This deep copy is intended to be passed to ODU workflows.
264 content_copy = copy.deepcopy(content)
rshric3564942024-11-12 18:12:38 +0000265
266 # decrypting the key
267 self.db.encrypt_decrypt_fields(
garciadeblas995cbf32024-12-18 12:54:00 +0100268 content_copy,
rshric3564942024-11-12 18:12:38 +0000269 "decrypt",
garciadeblas995cbf32024-12-18 12:54:00 +0100270 fields,
rshric3564942024-11-12 18:12:38 +0000271 schema_version="1.11",
garciadeblas995cbf32024-12-18 12:54:00 +0100272 salt=content_copy["_id"],
rshric3564942024-11-12 18:12:38 +0000273 )
garciadeblas995cbf32024-12-18 12:54:00 +0100274 return content_copy
rshric3564942024-11-12 18:12:38 +0000275
garciadeblasbc96f382025-01-22 16:02:18 +0100276 def cluster_kubectl(self, db_cluster):
277 cluster_kubeconfig = db_cluster["credentials"]
278 kubeconfig_path = f"/tmp/{db_cluster['_id']}_kubeconfig.yaml"
279 with open(kubeconfig_path, "w") as kubeconfig_file:
280 yaml.safe_dump(cluster_kubeconfig, kubeconfig_file)
281 return Kubectl(config_file=kubeconfig_path)
282
garciadeblas72412282024-11-07 12:41:54 +0100283
284class ClusterLcm(GitOpsLcm):
garciadeblas96b94f52024-07-08 16:18:21 +0200285 db_collection = "clusters"
rshri932105f2024-07-05 15:11:55 +0000286
287 def __init__(self, msg, lcm_tasks, config):
288 """
289 Init, Connect to database, filesystem storage, and messaging
290 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
291 :return: None
292 """
garciadeblas72412282024-11-07 12:41:54 +0100293 super().__init__(msg, lcm_tasks, config)
294 self._workflows = {
295 "create_cluster": {
296 "check_resource_function": self.check_create_cluster,
297 },
garciadeblasb0a42c22024-11-13 16:00:10 +0100298 "register_cluster": {
299 "check_resource_function": self.check_register_cluster,
300 },
301 "update_cluster": {
302 "check_resource_function": self.check_update_cluster,
303 },
garciadeblasbc96f382025-01-22 16:02:18 +0100304 "delete_cluster": {
305 "check_resource_function": self.check_delete_cluster,
306 },
garciadeblas72412282024-11-07 12:41:54 +0100307 }
rshri932105f2024-07-05 15:11:55 +0000308 self.regist = vim_sdn.K8sClusterLcm(msg, self.lcm_tasks, config)
309
rshri948f7de2024-12-02 03:42:35 +0000310 async def create(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000311 self.logger.info("cluster Create Enter")
312
garciadeblas995cbf32024-12-18 12:54:00 +0100313 # To get the cluster and op ids
rshri948f7de2024-12-02 03:42:35 +0000314 cluster_id = params["cluster_id"]
rshri948f7de2024-12-02 03:42:35 +0000315 op_id = params["operation_id"]
rshri948f7de2024-12-02 03:42:35 +0000316
317 # To initialize the operation states
318 self.initialize_operation(cluster_id, op_id)
319
garciadeblas995cbf32024-12-18 12:54:00 +0100320 # To get the cluster
321 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
322
323 # To get the operation params details
324 op_params = self.get_operation_params(db_cluster, op_id)
325
326 # To copy the cluster content and decrypting fields to use in workflows
327 workflow_content = {
328 "cluster": self.decrypted_copy(db_cluster),
329 }
rshric3564942024-11-12 18:12:38 +0000330
rshri948f7de2024-12-02 03:42:35 +0000331 # To get the vim account details
rshric3564942024-11-12 18:12:38 +0000332 db_vim = self.db.get_one("vim_accounts", {"name": db_cluster["vim_account"]})
garciadeblas995cbf32024-12-18 12:54:00 +0100333 workflow_content["vim_account"] = db_vim
rshric3564942024-11-12 18:12:38 +0000334
garciadeblasadb81e82024-11-08 01:11:46 +0100335 _, workflow_name = await self.odu.launch_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +0100336 "create_cluster", op_id, op_params, workflow_content
garciadeblas96b94f52024-07-08 16:18:21 +0200337 )
rshri932105f2024-07-05 15:11:55 +0000338 self.logger.info("workflow_name is :{}".format(workflow_name))
339
garciadeblas96b94f52024-07-08 16:18:21 +0200340 workflow_status, workflow_msg = await self.odu.check_workflow_status(
341 workflow_name
342 )
rshri932105f2024-07-05 15:11:55 +0000343 self.logger.info(
344 "workflow_status is :{} and workflow_msg is :{}".format(
345 workflow_status, workflow_msg
346 )
347 )
348 if workflow_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200349 db_cluster["state"] = "CREATED"
350 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
rshri932105f2024-07-05 15:11:55 +0000351 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200352 db_cluster["state"] = "FAILED_CREATION"
353 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000354 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +0000355 db_cluster = self.update_operation_history(
356 db_cluster, op_id, workflow_status, None
357 )
garciadeblas96b94f52024-07-08 16:18:21 +0200358 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +0000359
garciadeblas28bff0f2024-09-16 12:53:07 +0200360 # Clean items used in the workflow, no matter if the workflow succeeded
361 clean_status, clean_msg = await self.odu.clean_items_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +0100362 "create_cluster", op_id, op_params, workflow_content
garciadeblas28bff0f2024-09-16 12:53:07 +0200363 )
364 self.logger.info(
365 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
366 )
367
rshri932105f2024-07-05 15:11:55 +0000368 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +0100369 resource_status, resource_msg = await self.check_resource_status(
garciadeblas995cbf32024-12-18 12:54:00 +0100370 "create_cluster", op_id, op_params, workflow_content
rshri932105f2024-07-05 15:11:55 +0000371 )
372 self.logger.info(
373 "resource_status is :{} and resource_msg is :{}".format(
374 resource_status, resource_msg
375 )
376 )
377 if resource_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200378 db_cluster["resourceState"] = "READY"
rshri932105f2024-07-05 15:11:55 +0000379 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200380 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000381
garciadeblas96b94f52024-07-08 16:18:21 +0200382 db_cluster["operatingState"] = "IDLE"
383 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000384 db_cluster, op_id, workflow_status, resource_status
rshri932105f2024-07-05 15:11:55 +0000385 )
shahithya70a3fc92024-11-12 11:01:05 +0000386 db_cluster["current_operation"] = None
garciadeblas3e5eeec2025-01-21 11:49:38 +0100387
388 # Retrieve credentials
389 cluster_creds = None
390 if db_cluster["resourceState"] == "READY" and db_cluster["state"] == "CREATED":
391 result, cluster_creds = await self.odu.get_cluster_credentials(db_cluster)
392 # TODO: manage the case where the credentials are not available
393 if result:
394 db_cluster["credentials"] = cluster_creds
395
396 # Update db_cluster
garciadeblas96b94f52024-07-08 16:18:21 +0200397 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
398 self.update_profile_state(db_cluster, workflow_status, resource_status)
rshri948f7de2024-12-02 03:42:35 +0000399
garciadeblas3e5eeec2025-01-21 11:49:38 +0100400 # Register the cluster in k8sclusters collection
rshri948f7de2024-12-02 03:42:35 +0000401 db_register = self.db.get_one("k8sclusters", {"name": db_cluster["name"]})
garciadeblas3e5eeec2025-01-21 11:49:38 +0100402 if cluster_creds:
rshri948f7de2024-12-02 03:42:35 +0000403 db_register["credentials"] = cluster_creds
garciadeblas3e5eeec2025-01-21 11:49:38 +0100404 # To call the lcm.py for registering the cluster in k8scluster lcm.
rshri948f7de2024-12-02 03:42:35 +0000405 self.db.set_one("k8sclusters", {"_id": db_register["_id"]}, db_register)
406 register = await self.regist.create(db_register, order_id)
407 self.logger.debug(f"Register is : {register}")
408 else:
409 db_register["_admin"]["operationalState"] = "ERROR"
410 result, cluster_creds = await self.odu.get_cluster_credentials(db_cluster)
411 # To call the lcm.py for registering the cluster in k8scluster lcm.
412 db_register["credentials"] = cluster_creds
413 self.db.set_one("k8sclusters", {"_id": db_register["_id"]}, db_register)
414
rshri932105f2024-07-05 15:11:55 +0000415 return
416
garciadeblas72412282024-11-07 12:41:54 +0100417 async def check_create_cluster(self, op_id, op_params, content):
garciadeblas7eae6f42024-11-08 10:41:38 +0100418 self.logger.info(
419 f"check_create_cluster Operation {op_id}. Params: {op_params}."
420 )
421 # self.logger.debug(f"Content: {content}")
garciadeblas72412282024-11-07 12:41:54 +0100422 db_cluster = content["cluster"]
423 cluster_name = db_cluster["git_name"].lower()
424 cluster_kustomization_name = cluster_name
425 db_vim_account = content["vim_account"]
426 cloud_type = db_vim_account["vim_type"]
427 nodepool_name = ""
428 if cloud_type == "aws":
429 nodepool_name = f"{cluster_name}-nodegroup"
430 cluster_name = f"{cluster_name}-cluster"
431 elif cloud_type == "gcp":
432 nodepool_name = f"nodepool-{cluster_name}"
433 bootstrap = op_params.get("bootstrap", True)
434 if cloud_type in ("azure", "gcp", "aws"):
435 checkings_list = [
436 {
437 "item": "kustomization",
438 "name": cluster_kustomization_name,
439 "namespace": "managed-resources",
440 "flag": "Ready",
yshahcb9075f2024-11-22 12:08:57 +0000441 "timeout": 1500,
garciadeblas72412282024-11-07 12:41:54 +0100442 "enable": True,
garciadeblas7eae6f42024-11-08 10:41:38 +0100443 "resourceState": "IN_PROGRESS.KUSTOMIZATION_READY",
garciadeblas72412282024-11-07 12:41:54 +0100444 },
445 {
446 "item": f"cluster_{cloud_type}",
447 "name": cluster_name,
448 "namespace": "",
449 "flag": "Synced",
450 "timeout": self._checkloop_resource_timeout,
451 "enable": True,
garciadeblas7eae6f42024-11-08 10:41:38 +0100452 "resourceState": "IN_PROGRESS.RESOURCE_SYNCED.CLUSTER",
garciadeblas72412282024-11-07 12:41:54 +0100453 },
454 {
455 "item": f"cluster_{cloud_type}",
456 "name": cluster_name,
457 "namespace": "",
458 "flag": "Ready",
459 "timeout": self._checkloop_resource_timeout,
460 "enable": True,
garciadeblas7eae6f42024-11-08 10:41:38 +0100461 "resourceState": "IN_PROGRESS.RESOURCE_READY.CLUSTER",
garciadeblas72412282024-11-07 12:41:54 +0100462 },
463 {
464 "item": "kustomization",
465 "name": f"{cluster_kustomization_name}-bstrp-fluxctrl",
466 "namespace": "managed-resources",
467 "flag": "Ready",
yshahcb9075f2024-11-22 12:08:57 +0000468 "timeout": self._checkloop_resource_timeout,
garciadeblas72412282024-11-07 12:41:54 +0100469 "enable": bootstrap,
garciadeblas7eae6f42024-11-08 10:41:38 +0100470 "resourceState": "IN_PROGRESS.BOOTSTRAP_OK",
garciadeblas72412282024-11-07 12:41:54 +0100471 },
472 ]
473 else:
474 return False, "Not suitable VIM account to check cluster status"
475 if nodepool_name:
476 nodepool_check = {
477 "item": f"nodepool_{cloud_type}",
478 "name": nodepool_name,
479 "namespace": "",
480 "flag": "Ready",
481 "timeout": self._checkloop_resource_timeout,
482 "enable": True,
garciadeblas7eae6f42024-11-08 10:41:38 +0100483 "resourceState": "IN_PROGRESS.RESOURCE_READY.NODEPOOL",
garciadeblas72412282024-11-07 12:41:54 +0100484 }
485 checkings_list.insert(3, nodepool_check)
yshahcb9075f2024-11-22 12:08:57 +0000486 return await self.common_check_list(
487 op_id, checkings_list, "clusters", db_cluster
488 )
garciadeblas72412282024-11-07 12:41:54 +0100489
garciadeblas96b94f52024-07-08 16:18:21 +0200490 def update_profile_state(self, db_cluster, workflow_status, resource_status):
rshri932105f2024-07-05 15:11:55 +0000491 profiles = [
492 "infra_controller_profiles",
493 "infra_config_profiles",
494 "app_profiles",
495 "resource_profiles",
496 ]
rshri948f7de2024-12-02 03:42:35 +0000497 """
rshri932105f2024-07-05 15:11:55 +0000498 profiles_collection = {
499 "infra_controller_profiles": "k8sinfra_controller",
500 "infra_config_profiles": "k8sinfra_config",
501 "app_profiles": "k8sapp",
502 "resource_profiles": "k8sresource",
503 }
rshri948f7de2024-12-02 03:42:35 +0000504 """
Your Name86149632024-11-14 16:17:16 +0000505 self.logger.info("the db_cluster is :{}".format(db_cluster))
rshri932105f2024-07-05 15:11:55 +0000506 for profile_type in profiles:
garciadeblas96b94f52024-07-08 16:18:21 +0200507 profile_id = db_cluster[profile_type]
rshri948f7de2024-12-02 03:42:35 +0000508 # db_collection = profiles_collection[profile_type]
509 db_collection = self.profile_collection_mapping[profile_type]
rshri932105f2024-07-05 15:11:55 +0000510 db_profile = self.db.get_one(db_collection, {"_id": profile_id})
yshahcb9075f2024-11-22 12:08:57 +0000511 op_id = db_profile["operationHistory"][-1].get("op_id")
garciadeblas96b94f52024-07-08 16:18:21 +0200512 db_profile["state"] = db_cluster["state"]
513 db_profile["resourceState"] = db_cluster["resourceState"]
514 db_profile["operatingState"] = db_cluster["operatingState"]
rshric3564942024-11-12 18:12:38 +0000515 db_profile["age_pubkey"] = db_cluster["age_pubkey"]
Your Name86149632024-11-14 16:17:16 +0000516 db_profile["age_privkey"] = db_cluster["age_privkey"]
rshri932105f2024-07-05 15:11:55 +0000517 db_profile = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000518 db_profile, op_id, workflow_status, resource_status
rshri932105f2024-07-05 15:11:55 +0000519 )
rshri932105f2024-07-05 15:11:55 +0000520 self.db.set_one(db_collection, {"_id": db_profile["_id"]}, db_profile)
521
rshri948f7de2024-12-02 03:42:35 +0000522 async def delete(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000523 self.logger.info("cluster delete Enter")
rshri948f7de2024-12-02 03:42:35 +0000524
garciadeblas995cbf32024-12-18 12:54:00 +0100525 # To get the cluster and op ids
rshri948f7de2024-12-02 03:42:35 +0000526 cluster_id = params["cluster_id"]
rshri948f7de2024-12-02 03:42:35 +0000527 op_id = params["operation_id"]
rshri948f7de2024-12-02 03:42:35 +0000528
529 # To initialize the operation states
530 self.initialize_operation(cluster_id, op_id)
531
garciadeblas995cbf32024-12-18 12:54:00 +0100532 # To get the cluster
533 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
534
535 # To get the operation params details
536 op_params = self.get_operation_params(db_cluster, op_id)
537
538 # To copy the cluster content and decrypting fields to use in workflows
539 workflow_content = {
540 "cluster": self.decrypted_copy(db_cluster),
541 }
rshri948f7de2024-12-02 03:42:35 +0000542
garciadeblasbc96f382025-01-22 16:02:18 +0100543 # To get the vim account details
544 db_vim = self.db.get_one("vim_accounts", {"name": db_cluster["vim_account"]})
545 workflow_content["vim_account"] = db_vim
546
garciadeblas6b2112c2024-12-20 10:35:13 +0100547 # TODO: workaround until NBI rejects cluster deletion requests for registered clusters
548 # This if clause will be removed
garciadeblas12470812024-11-18 10:33:12 +0100549 if db_cluster["created"] == "false":
rshri948f7de2024-12-02 03:42:35 +0000550 return await self.deregister(params, order_id)
rshri932105f2024-07-05 15:11:55 +0000551
garciadeblasadb81e82024-11-08 01:11:46 +0100552 _, workflow_name = await self.odu.launch_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +0100553 "delete_cluster", op_id, op_params, workflow_content
garciadeblas96b94f52024-07-08 16:18:21 +0200554 )
rshri932105f2024-07-05 15:11:55 +0000555 self.logger.info("workflow_name is :{}".format(workflow_name))
556
garciadeblas96b94f52024-07-08 16:18:21 +0200557 workflow_status, workflow_msg = await self.odu.check_workflow_status(
558 workflow_name
559 )
rshri932105f2024-07-05 15:11:55 +0000560 self.logger.info(
561 "workflow_status is :{} and workflow_msg is :{}".format(
562 workflow_status, workflow_msg
563 )
564 )
565 if workflow_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200566 db_cluster["state"] = "DELETED"
567 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
rshri932105f2024-07-05 15:11:55 +0000568 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200569 db_cluster["state"] = "FAILED_DELETION"
570 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000571 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +0000572 db_cluster = self.update_operation_history(
573 db_cluster, op_id, workflow_status, None
574 )
garciadeblas96b94f52024-07-08 16:18:21 +0200575 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +0000576
garciadeblas98f9a3d2024-12-10 13:42:47 +0100577 # Clean items used in the workflow or in the cluster, no matter if the workflow succeeded
578 clean_status, clean_msg = await self.odu.clean_items_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +0100579 "delete_cluster", op_id, op_params, workflow_content
garciadeblas98f9a3d2024-12-10 13:42:47 +0100580 )
581 self.logger.info(
582 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
583 )
584
rshri932105f2024-07-05 15:11:55 +0000585 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +0100586 resource_status, resource_msg = await self.check_resource_status(
garciadeblas995cbf32024-12-18 12:54:00 +0100587 "delete_cluster", op_id, op_params, workflow_content
rshri932105f2024-07-05 15:11:55 +0000588 )
589 self.logger.info(
590 "resource_status is :{} and resource_msg is :{}".format(
591 resource_status, resource_msg
592 )
593 )
594 if resource_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200595 db_cluster["resourceState"] = "READY"
rshri932105f2024-07-05 15:11:55 +0000596 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200597 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000598
garciadeblas96b94f52024-07-08 16:18:21 +0200599 db_cluster["operatingState"] = "IDLE"
600 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000601 db_cluster, op_id, workflow_status, resource_status
garciadeblas96b94f52024-07-08 16:18:21 +0200602 )
shahithya70a3fc92024-11-12 11:01:05 +0000603 db_cluster["current_operation"] = None
garciadeblas96b94f52024-07-08 16:18:21 +0200604 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +0000605
garciadeblas96b94f52024-07-08 16:18:21 +0200606 # To delete it from DB
607 if db_cluster["state"] == "DELETED":
608 self.delete_cluster(db_cluster)
rshri948f7de2024-12-02 03:42:35 +0000609
610 # To delete it from k8scluster collection
611 self.db.del_one("k8sclusters", {"name": db_cluster["name"]})
612
rshri932105f2024-07-05 15:11:55 +0000613 return
614
garciadeblasbc96f382025-01-22 16:02:18 +0100615 async def check_delete_cluster(self, op_id, op_params, content):
616 self.logger.info(
617 f"check_delete_cluster Operation {op_id}. Params: {op_params}."
618 )
619 self.logger.debug(f"Content: {content}")
620 db_cluster = content["cluster"]
621 cluster_name = db_cluster["git_name"].lower()
622 cluster_kustomization_name = cluster_name
623 db_vim_account = content["vim_account"]
624 cloud_type = db_vim_account["vim_type"]
625 if cloud_type == "aws":
626 cluster_name = f"{cluster_name}-cluster"
627 if cloud_type in ("azure", "gcp", "aws"):
628 checkings_list = [
629 {
630 "item": "kustomization",
631 "name": cluster_kustomization_name,
632 "namespace": "managed-resources",
633 "deleted": True,
634 "timeout": self._checkloop_kustomization_timeout,
635 "enable": True,
636 "resourceState": "IN_PROGRESS.KUSTOMIZATION_DELETED",
637 },
638 {
639 "item": f"cluster_{cloud_type}",
640 "name": cluster_name,
641 "namespace": "",
642 "deleted": True,
643 "timeout": self._checkloop_resource_timeout,
644 "enable": True,
645 "resourceState": "IN_PROGRESS.RESOURCE_DELETED.CLUSTER",
646 },
647 ]
648 else:
649 return False, "Not suitable VIM account to check cluster status"
650 return await self.common_check_list(
651 op_id, checkings_list, "clusters", db_cluster
652 )
653
garciadeblas96b94f52024-07-08 16:18:21 +0200654 def delete_cluster(self, db_cluster):
655 # Actually, item_content is equal to db_cluster
656 # item_content = self.db.get_one("clusters", {"_id": db_cluster["_id"]})
657 # self.logger.debug("item_content is : {}".format(item_content))
rshri932105f2024-07-05 15:11:55 +0000658
rshri932105f2024-07-05 15:11:55 +0000659 # detach profiles
660 update_dict = None
661 profiles_to_detach = [
662 "infra_controller_profiles",
663 "infra_config_profiles",
664 "app_profiles",
665 "resource_profiles",
666 ]
rshri948f7de2024-12-02 03:42:35 +0000667 """
rshri932105f2024-07-05 15:11:55 +0000668 profiles_collection = {
669 "infra_controller_profiles": "k8sinfra_controller",
670 "infra_config_profiles": "k8sinfra_config",
671 "app_profiles": "k8sapp",
672 "resource_profiles": "k8sresource",
673 }
rshri948f7de2024-12-02 03:42:35 +0000674 """
rshri932105f2024-07-05 15:11:55 +0000675 for profile_type in profiles_to_detach:
garciadeblas96b94f52024-07-08 16:18:21 +0200676 if db_cluster.get(profile_type):
garciadeblas96b94f52024-07-08 16:18:21 +0200677 profile_ids = db_cluster[profile_type]
rshri932105f2024-07-05 15:11:55 +0000678 profile_ids_copy = deepcopy(profile_ids)
rshri932105f2024-07-05 15:11:55 +0000679 for profile_id in profile_ids_copy:
rshri948f7de2024-12-02 03:42:35 +0000680 # db_collection = profiles_collection[profile_type]
681 db_collection = self.profile_collection_mapping[profile_type]
rshri932105f2024-07-05 15:11:55 +0000682 db_profile = self.db.get_one(db_collection, {"_id": profile_id})
garciadeblasc2552852024-10-22 12:39:32 +0200683 self.logger.debug("the db_profile is :{}".format(db_profile))
684 self.logger.debug(
garciadeblas96b94f52024-07-08 16:18:21 +0200685 "the item_content name is :{}".format(db_cluster["name"])
rshri932105f2024-07-05 15:11:55 +0000686 )
garciadeblasc2552852024-10-22 12:39:32 +0200687 self.logger.debug(
rshri932105f2024-07-05 15:11:55 +0000688 "the db_profile name is :{}".format(db_profile["name"])
689 )
garciadeblas96b94f52024-07-08 16:18:21 +0200690 if db_cluster["name"] == db_profile["name"]:
rshri932105f2024-07-05 15:11:55 +0000691 self.db.del_one(db_collection, {"_id": profile_id})
692 else:
rshri932105f2024-07-05 15:11:55 +0000693 profile_ids.remove(profile_id)
694 update_dict = {profile_type: profile_ids}
rshri932105f2024-07-05 15:11:55 +0000695 self.db.set_one(
garciadeblas96b94f52024-07-08 16:18:21 +0200696 "clusters", {"_id": db_cluster["_id"]}, update_dict
rshri932105f2024-07-05 15:11:55 +0000697 )
garciadeblas96b94f52024-07-08 16:18:21 +0200698 self.db.del_one("clusters", {"_id": db_cluster["_id"]})
rshri932105f2024-07-05 15:11:55 +0000699
rshri948f7de2024-12-02 03:42:35 +0000700 async def attach_profile(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000701 self.logger.info("profile attach Enter")
rshri948f7de2024-12-02 03:42:35 +0000702
garciadeblas995cbf32024-12-18 12:54:00 +0100703 # To get the cluster and op ids
rshri948f7de2024-12-02 03:42:35 +0000704 cluster_id = params["cluster_id"]
rshri948f7de2024-12-02 03:42:35 +0000705 op_id = params["operation_id"]
rshri948f7de2024-12-02 03:42:35 +0000706
707 # To initialize the operation states
708 self.initialize_operation(cluster_id, op_id)
709
garciadeblas995cbf32024-12-18 12:54:00 +0100710 # To get the cluster
711 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
712
713 # To get the operation params details
714 op_params = self.get_operation_params(db_cluster, op_id)
715
716 # To copy the cluster content and decrypting fields to use in workflows
717 workflow_content = {
718 "cluster": self.decrypted_copy(db_cluster),
719 }
rshri948f7de2024-12-02 03:42:35 +0000720
721 # To get the profile details
722 profile_id = params["profile_id"]
723 profile_type = params["profile_type"]
724 profile_collection = self.profile_collection_mapping[profile_type]
725 db_profile = self.db.get_one(profile_collection, {"_id": profile_id})
726 db_profile["profile_type"] = profile_type
727 # content["profile"] = db_profile
garciadeblas995cbf32024-12-18 12:54:00 +0100728 workflow_content["profile"] = db_profile
rshri932105f2024-07-05 15:11:55 +0000729
garciadeblasadb81e82024-11-08 01:11:46 +0100730 _, workflow_name = await self.odu.launch_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +0100731 "attach_profile_to_cluster", op_id, op_params, workflow_content
garciadeblas96b94f52024-07-08 16:18:21 +0200732 )
rshri932105f2024-07-05 15:11:55 +0000733 self.logger.info("workflow_name is :{}".format(workflow_name))
734
garciadeblas96b94f52024-07-08 16:18:21 +0200735 workflow_status, workflow_msg = await self.odu.check_workflow_status(
736 workflow_name
737 )
rshri932105f2024-07-05 15:11:55 +0000738 self.logger.info(
739 "workflow_status is :{} and workflow_msg is :{}".format(
740 workflow_status, workflow_msg
741 )
742 )
743 if workflow_status:
744 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
745 else:
746 db_cluster["resourceState"] = "ERROR"
747 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +0000748 db_cluster = self.update_operation_history(
749 db_cluster, op_id, workflow_status, None
750 )
rshri932105f2024-07-05 15:11:55 +0000751 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
752
753 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +0100754 resource_status, resource_msg = await self.check_resource_status(
garciadeblas995cbf32024-12-18 12:54:00 +0100755 "attach_profile_to_cluster", op_id, op_params, workflow_content
rshri932105f2024-07-05 15:11:55 +0000756 )
757 self.logger.info(
758 "resource_status is :{} and resource_msg is :{}".format(
759 resource_status, resource_msg
760 )
761 )
762 if resource_status:
763 db_cluster["resourceState"] = "READY"
764 else:
765 db_cluster["resourceState"] = "ERROR"
766
767 db_cluster["operatingState"] = "IDLE"
768 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000769 db_cluster, op_id, workflow_status, resource_status
rshri932105f2024-07-05 15:11:55 +0000770 )
rshri932105f2024-07-05 15:11:55 +0000771 profile_list = db_cluster[profile_type]
rshri932105f2024-07-05 15:11:55 +0000772 if resource_status:
rshri932105f2024-07-05 15:11:55 +0000773 profile_list.append(profile_id)
rshri932105f2024-07-05 15:11:55 +0000774 db_cluster[profile_type] = profile_list
shahithya70a3fc92024-11-12 11:01:05 +0000775 db_cluster["current_operation"] = None
rshri932105f2024-07-05 15:11:55 +0000776 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
777
778 return
779
rshri948f7de2024-12-02 03:42:35 +0000780 async def detach_profile(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000781 self.logger.info("profile dettach Enter")
rshri948f7de2024-12-02 03:42:35 +0000782
garciadeblas995cbf32024-12-18 12:54:00 +0100783 # To get the cluster and op ids
rshri948f7de2024-12-02 03:42:35 +0000784 cluster_id = params["cluster_id"]
rshri948f7de2024-12-02 03:42:35 +0000785 op_id = params["operation_id"]
rshri948f7de2024-12-02 03:42:35 +0000786
787 # To initialize the operation states
788 self.initialize_operation(cluster_id, op_id)
789
garciadeblas995cbf32024-12-18 12:54:00 +0100790 # To get the cluster
791 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
792
793 # To get the operation params details
794 op_params = self.get_operation_params(db_cluster, op_id)
795
796 # To copy the cluster content and decrypting fields to use in workflows
797 workflow_content = {
798 "cluster": self.decrypted_copy(db_cluster),
799 }
rshri948f7de2024-12-02 03:42:35 +0000800
801 # To get the profile details
802 profile_id = params["profile_id"]
803 profile_type = params["profile_type"]
804 profile_collection = self.profile_collection_mapping[profile_type]
805 db_profile = self.db.get_one(profile_collection, {"_id": profile_id})
806 db_profile["profile_type"] = profile_type
807 # content["profile"] = db_profile
garciadeblas995cbf32024-12-18 12:54:00 +0100808 workflow_content["profile"] = db_profile
rshri932105f2024-07-05 15:11:55 +0000809
garciadeblasadb81e82024-11-08 01:11:46 +0100810 _, workflow_name = await self.odu.launch_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +0100811 "detach_profile_from_cluster", op_id, op_params, workflow_content
garciadeblas96b94f52024-07-08 16:18:21 +0200812 )
rshri932105f2024-07-05 15:11:55 +0000813 self.logger.info("workflow_name is :{}".format(workflow_name))
814
garciadeblas96b94f52024-07-08 16:18:21 +0200815 workflow_status, workflow_msg = await self.odu.check_workflow_status(
816 workflow_name
817 )
rshri932105f2024-07-05 15:11:55 +0000818 self.logger.info(
819 "workflow_status is :{} and workflow_msg is :{}".format(
820 workflow_status, workflow_msg
821 )
822 )
823 if workflow_status:
824 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
825 else:
826 db_cluster["resourceState"] = "ERROR"
827 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +0000828 db_cluster = self.update_operation_history(
829 db_cluster, op_id, workflow_status, None
830 )
rshri932105f2024-07-05 15:11:55 +0000831 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
832
833 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +0100834 resource_status, resource_msg = await self.check_resource_status(
garciadeblas995cbf32024-12-18 12:54:00 +0100835 "detach_profile_from_cluster", op_id, op_params, workflow_content
rshri932105f2024-07-05 15:11:55 +0000836 )
837 self.logger.info(
838 "resource_status is :{} and resource_msg is :{}".format(
839 resource_status, resource_msg
840 )
841 )
842 if resource_status:
843 db_cluster["resourceState"] = "READY"
844 else:
845 db_cluster["resourceState"] = "ERROR"
846
847 db_cluster["operatingState"] = "IDLE"
848 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000849 db_cluster, op_id, workflow_status, resource_status
rshri932105f2024-07-05 15:11:55 +0000850 )
rshri932105f2024-07-05 15:11:55 +0000851 profile_list = db_cluster[profile_type]
852 self.logger.info("profile list is : {}".format(profile_list))
853 if resource_status:
rshri932105f2024-07-05 15:11:55 +0000854 profile_list.remove(profile_id)
rshri932105f2024-07-05 15:11:55 +0000855 db_cluster[profile_type] = profile_list
shahithya70a3fc92024-11-12 11:01:05 +0000856 db_cluster["current_operation"] = None
rshri932105f2024-07-05 15:11:55 +0000857 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
858
859 return
860
rshri948f7de2024-12-02 03:42:35 +0000861 async def register(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000862 self.logger.info("cluster register enter")
863
garciadeblas995cbf32024-12-18 12:54:00 +0100864 # To get the cluster and op ids
rshri948f7de2024-12-02 03:42:35 +0000865 cluster_id = params["cluster_id"]
rshri948f7de2024-12-02 03:42:35 +0000866 op_id = params["operation_id"]
rshri948f7de2024-12-02 03:42:35 +0000867
868 # To initialize the operation states
869 self.initialize_operation(cluster_id, op_id)
870
garciadeblas995cbf32024-12-18 12:54:00 +0100871 # To get the cluster
872 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
873
874 # To get the operation params details
875 op_params = self.get_operation_params(db_cluster, op_id)
876
877 # To copy the cluster content and decrypting fields to use in workflows
878 workflow_content = {
879 "cluster": self.decrypted_copy(db_cluster),
880 }
rshric3564942024-11-12 18:12:38 +0000881
garciadeblasadb81e82024-11-08 01:11:46 +0100882 _, workflow_name = await self.odu.launch_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +0100883 "register_cluster", op_id, op_params, workflow_content
garciadeblas96b94f52024-07-08 16:18:21 +0200884 )
rshri932105f2024-07-05 15:11:55 +0000885 self.logger.info("workflow_name is :{}".format(workflow_name))
886
garciadeblas96b94f52024-07-08 16:18:21 +0200887 workflow_status, workflow_msg = await self.odu.check_workflow_status(
888 workflow_name
889 )
rshri932105f2024-07-05 15:11:55 +0000890 self.logger.info(
891 "workflow_status is :{} and workflow_msg is :{}".format(
892 workflow_status, workflow_msg
893 )
894 )
895 if workflow_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200896 db_cluster["state"] = "CREATED"
897 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
rshri932105f2024-07-05 15:11:55 +0000898 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200899 db_cluster["state"] = "FAILED_CREATION"
900 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000901 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +0000902 db_cluster = self.update_operation_history(
903 db_cluster, op_id, workflow_status, None
904 )
garciadeblas96b94f52024-07-08 16:18:21 +0200905 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +0000906
garciadeblasdde3a312024-09-17 13:25:06 +0200907 # Clean items used in the workflow, no matter if the workflow succeeded
908 clean_status, clean_msg = await self.odu.clean_items_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +0100909 "register_cluster", op_id, op_params, workflow_content
garciadeblasdde3a312024-09-17 13:25:06 +0200910 )
911 self.logger.info(
912 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
913 )
914
rshri932105f2024-07-05 15:11:55 +0000915 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +0100916 resource_status, resource_msg = await self.check_resource_status(
garciadeblas995cbf32024-12-18 12:54:00 +0100917 "register_cluster", op_id, op_params, workflow_content
rshri932105f2024-07-05 15:11:55 +0000918 )
919 self.logger.info(
920 "resource_status is :{} and resource_msg is :{}".format(
921 resource_status, resource_msg
922 )
923 )
924 if resource_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200925 db_cluster["resourceState"] = "READY"
rshri932105f2024-07-05 15:11:55 +0000926 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200927 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000928
garciadeblas96b94f52024-07-08 16:18:21 +0200929 db_cluster["operatingState"] = "IDLE"
930 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000931 db_cluster, op_id, workflow_status, resource_status
rshri932105f2024-07-05 15:11:55 +0000932 )
shahithya70a3fc92024-11-12 11:01:05 +0000933 db_cluster["current_operation"] = None
garciadeblas96b94f52024-07-08 16:18:21 +0200934 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri948f7de2024-12-02 03:42:35 +0000935
936 db_register = self.db.get_one("k8sclusters", {"name": db_cluster["name"]})
937 db_register["credentials"] = db_cluster["credentials"]
938 self.db.set_one("k8sclusters", {"_id": db_register["_id"]}, db_register)
939
940 if db_cluster["resourceState"] == "READY" and db_cluster["state"] == "CREATED":
941 # To call the lcm.py for registering the cluster in k8scluster lcm.
942 register = await self.regist.create(db_register, order_id)
943 self.logger.debug(f"Register is : {register}")
944 else:
945 db_register["_admin"]["operationalState"] = "ERROR"
946 self.db.set_one("k8sclusters", {"_id": db_register["_id"]}, db_register)
947
rshri932105f2024-07-05 15:11:55 +0000948 return
949
garciadeblasbc96f382025-01-22 16:02:18 +0100950 async def check_register_cluster(self, op_id, op_params, content):
951 self.logger.info(
952 f"check_register_cluster Operation {op_id}. Params: {op_params}."
953 )
954 # self.logger.debug(f"Content: {content}")
955 db_cluster = content["cluster"]
956 cluster_name = db_cluster["git_name"].lower()
957 cluster_kustomization_name = cluster_name
958 bootstrap = op_params.get("bootstrap", True)
959 checkings_list = [
960 {
961 "item": "kustomization",
962 "name": f"{cluster_kustomization_name}-bstrp-fluxctrl",
963 "namespace": "managed-resources",
964 "flag": "Ready",
965 "timeout": self._checkloop_kustomization_timeout,
966 "enable": bootstrap,
967 "resourceState": "IN_PROGRESS.BOOTSTRAP_OK",
968 },
969 ]
970 return await self.common_check_list(
971 op_id, checkings_list, "clusters", db_cluster
972 )
973
rshri948f7de2024-12-02 03:42:35 +0000974 async def deregister(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000975 self.logger.info("cluster deregister enter")
976
garciadeblas995cbf32024-12-18 12:54:00 +0100977 # To get the cluster and op ids
rshri948f7de2024-12-02 03:42:35 +0000978 cluster_id = params["cluster_id"]
rshri948f7de2024-12-02 03:42:35 +0000979 op_id = params["operation_id"]
rshri948f7de2024-12-02 03:42:35 +0000980
981 # To initialize the operation states
982 self.initialize_operation(cluster_id, op_id)
983
garciadeblas995cbf32024-12-18 12:54:00 +0100984 # To get the cluster
985 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
986
987 # To get the operation params details
988 op_params = self.get_operation_params(db_cluster, op_id)
989
990 # To copy the cluster content and decrypting fields to use in workflows
991 workflow_content = {
992 "cluster": self.decrypted_copy(db_cluster),
993 }
rshri932105f2024-07-05 15:11:55 +0000994
garciadeblasadb81e82024-11-08 01:11:46 +0100995 _, workflow_name = await self.odu.launch_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +0100996 "deregister_cluster", op_id, op_params, workflow_content
garciadeblas96b94f52024-07-08 16:18:21 +0200997 )
rshri932105f2024-07-05 15:11:55 +0000998 self.logger.info("workflow_name is :{}".format(workflow_name))
999
garciadeblas96b94f52024-07-08 16:18:21 +02001000 workflow_status, workflow_msg = await self.odu.check_workflow_status(
1001 workflow_name
1002 )
rshri932105f2024-07-05 15:11:55 +00001003 self.logger.info(
1004 "workflow_status is :{} and workflow_msg is :{}".format(
1005 workflow_status, workflow_msg
1006 )
1007 )
1008 if workflow_status:
garciadeblas96b94f52024-07-08 16:18:21 +02001009 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
rshri932105f2024-07-05 15:11:55 +00001010 else:
garciadeblas96b94f52024-07-08 16:18:21 +02001011 db_cluster["state"] = "FAILED_DELETION"
1012 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +00001013 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +00001014 db_cluster = self.update_operation_history(
1015 db_cluster, op_id, workflow_status, None
1016 )
garciadeblas96b94f52024-07-08 16:18:21 +02001017 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +00001018
garciadeblas91bb2c42024-11-12 11:17:12 +01001019 # Clean items used in the workflow or in the cluster, no matter if the workflow succeeded
1020 clean_status, clean_msg = await self.odu.clean_items_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +01001021 "deregister_cluster", op_id, op_params, workflow_content
garciadeblas91bb2c42024-11-12 11:17:12 +01001022 )
1023 self.logger.info(
1024 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
1025 )
1026
rshri932105f2024-07-05 15:11:55 +00001027 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +01001028 resource_status, resource_msg = await self.check_resource_status(
garciadeblas995cbf32024-12-18 12:54:00 +01001029 "deregister_cluster", op_id, op_params, workflow_content
rshri932105f2024-07-05 15:11:55 +00001030 )
1031 self.logger.info(
1032 "resource_status is :{} and resource_msg is :{}".format(
1033 resource_status, resource_msg
1034 )
1035 )
1036 if resource_status:
garciadeblas96b94f52024-07-08 16:18:21 +02001037 db_cluster["resourceState"] = "READY"
rshri932105f2024-07-05 15:11:55 +00001038 else:
garciadeblas96b94f52024-07-08 16:18:21 +02001039 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +00001040
garciadeblas96b94f52024-07-08 16:18:21 +02001041 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +00001042 db_cluster, op_id, workflow_status, resource_status
garciadeblas96b94f52024-07-08 16:18:21 +02001043 )
garciadeblas6b2112c2024-12-20 10:35:13 +01001044 # TODO: workaround until NBI rejects cluster deletion requests for registered clusters
1045 # Setting created flag to true avoids infinite loops when deregistering a cluster
1046 db_cluster["created"] = "true"
garciadeblas96b94f52024-07-08 16:18:21 +02001047 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +00001048
garciadeblas98f9a3d2024-12-10 13:42:47 +01001049 return await self.delete(params, order_id)
rshri932105f2024-07-05 15:11:55 +00001050
rshri948f7de2024-12-02 03:42:35 +00001051 async def get_creds(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001052 self.logger.info("Cluster get creds Enter")
rshri948f7de2024-12-02 03:42:35 +00001053 cluster_id = params["cluster_id"]
1054 op_id = params["operation_id"]
1055 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
garciadeblas96b94f52024-07-08 16:18:21 +02001056 result, cluster_creds = await self.odu.get_cluster_credentials(db_cluster)
1057 if result:
1058 db_cluster["credentials"] = cluster_creds
yshahd940c652024-10-17 06:11:12 +00001059 op_len = 0
1060 for operations in db_cluster["operationHistory"]:
1061 if operations["op_id"] == op_id:
1062 db_cluster["operationHistory"][op_len]["result"] = result
1063 db_cluster["operationHistory"][op_len]["endDate"] = time()
1064 op_len += 1
shahithya70a3fc92024-11-12 11:01:05 +00001065 db_cluster["current_operation"] = None
yshahd940c652024-10-17 06:11:12 +00001066 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri948f7de2024-12-02 03:42:35 +00001067 self.logger.info("Cluster Get Creds Exit")
yshah771dea82024-07-05 15:11:49 +00001068 return
1069
rshri948f7de2024-12-02 03:42:35 +00001070 async def update(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001071 self.logger.info("Cluster update Enter")
rshri948f7de2024-12-02 03:42:35 +00001072 # To get the cluster details
1073 cluster_id = params["cluster_id"]
1074 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
1075
1076 # To get the operation params details
1077 op_id = params["operation_id"]
1078 op_params = self.get_operation_params(db_cluster, op_id)
yshah771dea82024-07-05 15:11:49 +00001079
garciadeblas995cbf32024-12-18 12:54:00 +01001080 # To copy the cluster content and decrypting fields to use in workflows
1081 workflow_content = {
1082 "cluster": self.decrypted_copy(db_cluster),
1083 }
rshric3564942024-11-12 18:12:38 +00001084
1085 # vim account details
1086 db_vim = self.db.get_one("vim_accounts", {"name": db_cluster["vim_account"]})
garciadeblas995cbf32024-12-18 12:54:00 +01001087 workflow_content["vim_account"] = db_vim
rshric3564942024-11-12 18:12:38 +00001088
garciadeblasadb81e82024-11-08 01:11:46 +01001089 _, workflow_name = await self.odu.launch_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +01001090 "update_cluster", op_id, op_params, workflow_content
garciadeblas96b94f52024-07-08 16:18:21 +02001091 )
1092 workflow_status, workflow_msg = await self.odu.check_workflow_status(
1093 workflow_name
1094 )
1095 self.logger.info(
1096 "Workflow Status: {} Workflow Message: {}".format(
1097 workflow_status, workflow_msg
yshah771dea82024-07-05 15:11:49 +00001098 )
garciadeblas96b94f52024-07-08 16:18:21 +02001099 )
1100
1101 if workflow_status:
1102 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
1103 else:
1104 db_cluster["resourceState"] = "ERROR"
1105
yshahcb9075f2024-11-22 12:08:57 +00001106 db_cluster = self.update_operation_history(
1107 db_cluster, op_id, workflow_status, None
1108 )
garciadeblas96b94f52024-07-08 16:18:21 +02001109 # self.logger.info("Db content: {}".format(db_content))
1110 # self.db.set_one(self.db_collection, {"_id": _id}, db_cluster)
1111 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
1112
garciadeblas28bff0f2024-09-16 12:53:07 +02001113 # Clean items used in the workflow, no matter if the workflow succeeded
1114 clean_status, clean_msg = await self.odu.clean_items_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +01001115 "update_cluster", op_id, op_params, workflow_content
garciadeblas28bff0f2024-09-16 12:53:07 +02001116 )
1117 self.logger.info(
1118 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
1119 )
garciadeblas96b94f52024-07-08 16:18:21 +02001120 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +01001121 resource_status, resource_msg = await self.check_resource_status(
garciadeblas995cbf32024-12-18 12:54:00 +01001122 "update_cluster", op_id, op_params, workflow_content
garciadeblas96b94f52024-07-08 16:18:21 +02001123 )
1124 self.logger.info(
1125 "Resource Status: {} Resource Message: {}".format(
1126 resource_status, resource_msg
1127 )
1128 )
yshah771dea82024-07-05 15:11:49 +00001129
1130 if resource_status:
garciadeblas96b94f52024-07-08 16:18:21 +02001131 db_cluster["resourceState"] = "READY"
yshah771dea82024-07-05 15:11:49 +00001132 else:
garciadeblas96b94f52024-07-08 16:18:21 +02001133 db_cluster["resourceState"] = "ERROR"
yshah771dea82024-07-05 15:11:49 +00001134
yshah0defcd52024-11-18 07:41:35 +00001135 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +00001136 db_cluster, op_id, workflow_status, resource_status
yshah0defcd52024-11-18 07:41:35 +00001137 )
1138
garciadeblas96b94f52024-07-08 16:18:21 +02001139 db_cluster["operatingState"] = "IDLE"
garciadeblas96b94f52024-07-08 16:18:21 +02001140 # self.logger.info("db_cluster: {}".format(db_cluster))
1141 # TODO: verify enxtcondition
1142 # For the moment, if the workflow completed successfully, then we update the db accordingly.
1143 if workflow_status:
1144 if "k8s_version" in op_params:
1145 db_cluster["k8s_version"] = op_params["k8s_version"]
yshah0defcd52024-11-18 07:41:35 +00001146 if "node_count" in op_params:
garciadeblas96b94f52024-07-08 16:18:21 +02001147 db_cluster["node_count"] = op_params["node_count"]
yshah0defcd52024-11-18 07:41:35 +00001148 if "node_size" in op_params:
1149 db_cluster["node_count"] = op_params["node_size"]
garciadeblas96b94f52024-07-08 16:18:21 +02001150 # self.db.set_one(self.db_collection, {"_id": _id}, db_content)
shahithya70a3fc92024-11-12 11:01:05 +00001151 db_cluster["current_operation"] = None
garciadeblas96b94f52024-07-08 16:18:21 +02001152 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
yshah771dea82024-07-05 15:11:49 +00001153 return
1154
garciadeblasbc96f382025-01-22 16:02:18 +01001155 async def check_update_cluster(self, op_id, op_params, content):
1156 self.logger.info(
1157 f"check_update_cluster Operation {op_id}. Params: {op_params}."
1158 )
1159 self.logger.debug(f"Content: {content}")
1160 return await self.check_dummy_operation(op_id, op_params, content)
1161 # db_cluster = content["cluster"]
1162 # cluster_id = db_cluster["_id"]
1163 # cluster_kubectl = self.cluster_kubectl(db_cluster)
1164 # cluster_name = db_cluster["git_name"].lower()
1165 # cluster_kustomization_name = cluster_name
1166 # checkings_list = [
1167 # {
1168 # "item": "kustomization",
1169 # "name": cluster_kustomization_name,
1170 # "namespace": "managed-resources",
1171 # "flag": "Ready",
1172 # "timeout": self._checkloop_kustomization_timeout,
1173 # "enable": True,
1174 # "resourceState": "IN_PROGRESS.KUSTOMIZATION_READY",
1175 # },
1176 # ]
1177 # return await self.common_check_list(
1178 # op_id, checkings_list, "clusters", db_cluster, cluster_kubectl
1179 # )
1180
yshah771dea82024-07-05 15:11:49 +00001181
garciadeblas72412282024-11-07 12:41:54 +01001182class CloudCredentialsLcm(GitOpsLcm):
yshah771dea82024-07-05 15:11:49 +00001183 db_collection = "vim_accounts"
1184
1185 def __init__(self, msg, lcm_tasks, config):
1186 """
1187 Init, Connect to database, filesystem storage, and messaging
1188 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1189 :return: None
1190 """
garciadeblas72412282024-11-07 12:41:54 +01001191 super().__init__(msg, lcm_tasks, config)
yshah771dea82024-07-05 15:11:49 +00001192
yshah564ec9c2024-11-29 07:33:32 +00001193 async def add(self, params, order_id):
yshah771dea82024-07-05 15:11:49 +00001194 self.logger.info("Cloud Credentials create")
yshah564ec9c2024-11-29 07:33:32 +00001195 vim_id = params["_id"]
1196 op_id = vim_id
1197 op_params = params
1198 db_content = self.db.get_one(self.db_collection, {"_id": vim_id})
1199 vim_config = db_content.get("config", {})
1200 self.db.encrypt_decrypt_fields(
1201 vim_config.get("credentials"),
1202 "decrypt",
1203 ["password", "secret"],
1204 schema_version=db_content["schema_version"],
1205 salt=vim_id,
1206 )
1207
garciadeblasadb81e82024-11-08 01:11:46 +01001208 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001209 "create_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001210 )
1211
1212 workflow_status, workflow_msg = await self.odu.check_workflow_status(
1213 workflow_name
1214 )
1215
1216 self.logger.info(
1217 "Workflow Status: {} Workflow Msg: {}".format(workflow_status, workflow_msg)
1218 )
1219
garciadeblas28bff0f2024-09-16 12:53:07 +02001220 # Clean items used in the workflow, no matter if the workflow succeeded
1221 clean_status, clean_msg = await self.odu.clean_items_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001222 "create_cloud_credentials", op_id, op_params, db_content
garciadeblas28bff0f2024-09-16 12:53:07 +02001223 )
1224 self.logger.info(
1225 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
1226 )
1227
yshah771dea82024-07-05 15:11:49 +00001228 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +01001229 resource_status, resource_msg = await self.check_resource_status(
yshah564ec9c2024-11-29 07:33:32 +00001230 "create_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001231 )
1232 self.logger.info(
1233 "Resource Status: {} Resource Message: {}".format(
1234 resource_status, resource_msg
1235 )
1236 )
garciadeblas15b8a302024-09-23 12:40:13 +02001237
yshah564ec9c2024-11-29 07:33:32 +00001238 db_content["_admin"]["operationalState"] = "ENABLED"
1239 for operation in db_content["_admin"]["operations"]:
garciadeblas15b8a302024-09-23 12:40:13 +02001240 if operation["lcmOperationType"] == "create":
1241 operation["operationState"] = "ENABLED"
yshah564ec9c2024-11-29 07:33:32 +00001242 self.logger.info("Content : {}".format(db_content))
1243 self.db.set_one("vim_accounts", {"_id": db_content["_id"]}, db_content)
yshah771dea82024-07-05 15:11:49 +00001244 return
1245
yshah564ec9c2024-11-29 07:33:32 +00001246 async def edit(self, params, order_id):
1247 self.logger.info("Cloud Credentials Update")
1248 vim_id = params["_id"]
1249 op_id = vim_id
1250 op_params = params
1251 db_content = self.db.get_one("vim_accounts", {"_id": vim_id})
1252 vim_config = db_content.get("config", {})
1253 self.db.encrypt_decrypt_fields(
1254 vim_config.get("credentials"),
1255 "decrypt",
1256 ["password", "secret"],
1257 schema_version=db_content["schema_version"],
1258 salt=vim_id,
1259 )
1260
garciadeblasadb81e82024-11-08 01:11:46 +01001261 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001262 "update_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001263 )
1264 workflow_status, workflow_msg = await self.odu.check_workflow_status(
1265 workflow_name
1266 )
1267 self.logger.info(
1268 "Workflow Status: {} Workflow Msg: {}".format(workflow_status, workflow_msg)
1269 )
1270
garciadeblas28bff0f2024-09-16 12:53:07 +02001271 # Clean items used in the workflow, no matter if the workflow succeeded
1272 clean_status, clean_msg = await self.odu.clean_items_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001273 "update_cloud_credentials", op_id, op_params, db_content
garciadeblas28bff0f2024-09-16 12:53:07 +02001274 )
1275 self.logger.info(
1276 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
1277 )
1278
yshah771dea82024-07-05 15:11:49 +00001279 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +01001280 resource_status, resource_msg = await self.check_resource_status(
yshah564ec9c2024-11-29 07:33:32 +00001281 "update_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001282 )
1283 self.logger.info(
1284 "Resource Status: {} Resource Message: {}".format(
1285 resource_status, resource_msg
1286 )
1287 )
1288 return
1289
yshah564ec9c2024-11-29 07:33:32 +00001290 async def remove(self, params, order_id):
1291 self.logger.info("Cloud Credentials remove")
1292 vim_id = params["_id"]
1293 op_id = vim_id
1294 op_params = params
1295 db_content = self.db.get_one("vim_accounts", {"_id": vim_id})
1296
garciadeblasadb81e82024-11-08 01:11:46 +01001297 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001298 "delete_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001299 )
1300 workflow_status, workflow_msg = await self.odu.check_workflow_status(
1301 workflow_name
1302 )
1303 self.logger.info(
1304 "Workflow Status: {} Workflow Msg: {}".format(workflow_status, workflow_msg)
1305 )
1306
1307 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +01001308 resource_status, resource_msg = await self.check_resource_status(
yshah564ec9c2024-11-29 07:33:32 +00001309 "delete_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001310 )
1311 self.logger.info(
1312 "Resource Status: {} Resource Message: {}".format(
1313 resource_status, resource_msg
1314 )
1315 )
yshah564ec9c2024-11-29 07:33:32 +00001316 self.db.del_one(self.db_collection, {"_id": db_content["_id"]})
yshah771dea82024-07-05 15:11:49 +00001317 return
1318
rshri932105f2024-07-05 15:11:55 +00001319
garciadeblas72412282024-11-07 12:41:54 +01001320class K8sAppLcm(GitOpsLcm):
rshri948f7de2024-12-02 03:42:35 +00001321 db_collection = "k8sapp"
1322
rshri932105f2024-07-05 15:11:55 +00001323 def __init__(self, msg, lcm_tasks, config):
1324 """
1325 Init, Connect to database, filesystem storage, and messaging
1326 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1327 :return: None
1328 """
garciadeblas72412282024-11-07 12:41:54 +01001329 super().__init__(msg, lcm_tasks, config)
rshri932105f2024-07-05 15:11:55 +00001330
rshri948f7de2024-12-02 03:42:35 +00001331 async def create(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001332 self.logger.info("App Create Enter")
1333
rshri948f7de2024-12-02 03:42:35 +00001334 op_id = params["operation_id"]
1335 profile_id = params["profile_id"]
1336
1337 # To initialize the operation states
1338 self.initialize_operation(profile_id, op_id)
1339
1340 content = self.db.get_one("k8sapp", {"_id": profile_id})
1341 content["profile_type"] = "applications"
1342 op_params = self.get_operation_params(content, op_id)
1343 self.db.set_one("k8sapp", {"_id": content["_id"]}, content)
1344
garciadeblasadb81e82024-11-08 01:11:46 +01001345 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001346 "create_profile", op_id, op_params, content
1347 )
rshri932105f2024-07-05 15:11:55 +00001348 self.logger.info("workflow_name is :{}".format(workflow_name))
1349
garciadeblas713e1962025-01-17 12:49:19 +01001350 workflow_status = await self.check_workflow_and_update_db(
1351 op_id, workflow_name, content
1352 )
rshri932105f2024-07-05 15:11:55 +00001353
1354 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01001355 resource_status, content = await self.check_resource_and_update_db(
garciadeblas96b94f52024-07-08 16:18:21 +02001356 "create_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001357 )
yshah564ec9c2024-11-29 07:33:32 +00001358 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1359 self.logger.info(f"App Create Exit with resource status: {resource_status}")
rshri932105f2024-07-05 15:11:55 +00001360 return
1361
rshri948f7de2024-12-02 03:42:35 +00001362 async def delete(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001363 self.logger.info("App delete Enter")
rshri932105f2024-07-05 15:11:55 +00001364
rshri948f7de2024-12-02 03:42:35 +00001365 op_id = params["operation_id"]
1366 profile_id = params["profile_id"]
1367
1368 # To initialize the operation states
1369 self.initialize_operation(profile_id, op_id)
1370
1371 content = self.db.get_one("k8sapp", {"_id": profile_id})
1372 op_params = self.get_operation_params(content, op_id)
1373
garciadeblasadb81e82024-11-08 01:11:46 +01001374 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001375 "delete_profile", op_id, op_params, content
1376 )
rshri932105f2024-07-05 15:11:55 +00001377 self.logger.info("workflow_name is :{}".format(workflow_name))
1378
garciadeblas713e1962025-01-17 12:49:19 +01001379 workflow_status = await self.check_workflow_and_update_db(
1380 op_id, workflow_name, content
1381 )
rshri932105f2024-07-05 15:11:55 +00001382
1383 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01001384 resource_status, content = await self.check_resource_and_update_db(
garciadeblas96b94f52024-07-08 16:18:21 +02001385 "delete_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001386 )
rshri932105f2024-07-05 15:11:55 +00001387
yshah564ec9c2024-11-29 07:33:32 +00001388 if resource_status:
1389 content["state"] = "DELETED"
1390 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1391 self.db.del_one(self.db_collection, {"_id": content["_id"]})
1392 self.logger.info(f"App Delete Exit with resource status: {resource_status}")
rshri932105f2024-07-05 15:11:55 +00001393 return
1394
1395
garciadeblas72412282024-11-07 12:41:54 +01001396class K8sResourceLcm(GitOpsLcm):
rshri948f7de2024-12-02 03:42:35 +00001397 db_collection = "k8sresource"
1398
rshri932105f2024-07-05 15:11:55 +00001399 def __init__(self, msg, lcm_tasks, config):
1400 """
1401 Init, Connect to database, filesystem storage, and messaging
1402 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1403 :return: None
1404 """
garciadeblas72412282024-11-07 12:41:54 +01001405 super().__init__(msg, lcm_tasks, config)
rshri932105f2024-07-05 15:11:55 +00001406
rshri948f7de2024-12-02 03:42:35 +00001407 async def create(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001408 self.logger.info("Resource Create Enter")
1409
rshri948f7de2024-12-02 03:42:35 +00001410 op_id = params["operation_id"]
1411 profile_id = params["profile_id"]
1412
1413 # To initialize the operation states
1414 self.initialize_operation(profile_id, op_id)
1415
1416 content = self.db.get_one("k8sresource", {"_id": profile_id})
1417 content["profile_type"] = "managed-resources"
1418 op_params = self.get_operation_params(content, op_id)
1419 self.db.set_one("k8sresource", {"_id": content["_id"]}, content)
1420
garciadeblasadb81e82024-11-08 01:11:46 +01001421 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001422 "create_profile", op_id, op_params, content
1423 )
rshri932105f2024-07-05 15:11:55 +00001424 self.logger.info("workflow_name is :{}".format(workflow_name))
1425
garciadeblas713e1962025-01-17 12:49:19 +01001426 workflow_status = await self.check_workflow_and_update_db(
1427 op_id, workflow_name, content
1428 )
rshri932105f2024-07-05 15:11:55 +00001429
1430 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01001431 resource_status, content = await self.check_resource_and_update_db(
garciadeblas96b94f52024-07-08 16:18:21 +02001432 "create_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001433 )
yshah564ec9c2024-11-29 07:33:32 +00001434 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1435 self.logger.info(
1436 f"Resource Create Exit with resource status: {resource_status}"
rshri932105f2024-07-05 15:11:55 +00001437 )
rshri932105f2024-07-05 15:11:55 +00001438 return
1439
rshri948f7de2024-12-02 03:42:35 +00001440 async def delete(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001441 self.logger.info("Resource delete Enter")
rshri948f7de2024-12-02 03:42:35 +00001442
1443 op_id = params["operation_id"]
1444 profile_id = params["profile_id"]
1445
1446 # To initialize the operation states
1447 self.initialize_operation(profile_id, op_id)
1448
1449 content = self.db.get_one("k8sresource", {"_id": profile_id})
1450 op_params = self.get_operation_params(content, op_id)
rshri932105f2024-07-05 15:11:55 +00001451
garciadeblasadb81e82024-11-08 01:11:46 +01001452 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001453 "delete_profile", op_id, op_params, content
1454 )
rshri932105f2024-07-05 15:11:55 +00001455 self.logger.info("workflow_name is :{}".format(workflow_name))
1456
garciadeblas713e1962025-01-17 12:49:19 +01001457 workflow_status = await self.check_workflow_and_update_db(
1458 op_id, workflow_name, content
1459 )
rshri932105f2024-07-05 15:11:55 +00001460
1461 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01001462 resource_status, content = await self.check_resource_and_update_db(
garciadeblas96b94f52024-07-08 16:18:21 +02001463 "delete_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001464 )
rshri932105f2024-07-05 15:11:55 +00001465
yshah564ec9c2024-11-29 07:33:32 +00001466 if resource_status:
1467 content["state"] = "DELETED"
1468 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1469 self.db.del_one(self.db_collection, {"_id": content["_id"]})
1470 self.logger.info(
1471 f"Resource Delete Exit with resource status: {resource_status}"
garciadeblas96b94f52024-07-08 16:18:21 +02001472 )
rshri932105f2024-07-05 15:11:55 +00001473 return
1474
1475
garciadeblas72412282024-11-07 12:41:54 +01001476class K8sInfraControllerLcm(GitOpsLcm):
rshri948f7de2024-12-02 03:42:35 +00001477 db_collection = "k8sinfra_controller"
1478
rshri932105f2024-07-05 15:11:55 +00001479 def __init__(self, msg, lcm_tasks, config):
1480 """
1481 Init, Connect to database, filesystem storage, and messaging
1482 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1483 :return: None
1484 """
garciadeblas72412282024-11-07 12:41:54 +01001485 super().__init__(msg, lcm_tasks, config)
rshri932105f2024-07-05 15:11:55 +00001486
rshri948f7de2024-12-02 03:42:35 +00001487 async def create(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001488 self.logger.info("Infra controller Create Enter")
1489
rshri948f7de2024-12-02 03:42:35 +00001490 op_id = params["operation_id"]
1491 profile_id = params["profile_id"]
1492
1493 # To initialize the operation states
1494 self.initialize_operation(profile_id, op_id)
1495
1496 content = self.db.get_one("k8sinfra_controller", {"_id": profile_id})
1497 content["profile_type"] = "infra-controllers"
1498 op_params = self.get_operation_params(content, op_id)
1499 self.db.set_one("k8sinfra_controller", {"_id": content["_id"]}, content)
1500
garciadeblasadb81e82024-11-08 01:11:46 +01001501 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001502 "create_profile", op_id, op_params, content
1503 )
rshri932105f2024-07-05 15:11:55 +00001504 self.logger.info("workflow_name is :{}".format(workflow_name))
1505
garciadeblas713e1962025-01-17 12:49:19 +01001506 workflow_status = await self.check_workflow_and_update_db(
1507 op_id, workflow_name, content
1508 )
rshri932105f2024-07-05 15:11:55 +00001509
1510 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01001511 resource_status, content = await self.check_resource_and_update_db(
garciadeblas96b94f52024-07-08 16:18:21 +02001512 "create_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001513 )
yshah564ec9c2024-11-29 07:33:32 +00001514 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1515 self.logger.info(
1516 f"Infra Controller Create Exit with resource status: {resource_status}"
rshri932105f2024-07-05 15:11:55 +00001517 )
rshri932105f2024-07-05 15:11:55 +00001518 return
1519
rshri948f7de2024-12-02 03:42:35 +00001520 async def delete(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001521 self.logger.info("Infra controller delete Enter")
rshri932105f2024-07-05 15:11:55 +00001522
rshri948f7de2024-12-02 03:42:35 +00001523 op_id = params["operation_id"]
1524 profile_id = params["profile_id"]
1525
1526 # To initialize the operation states
1527 self.initialize_operation(profile_id, op_id)
1528
1529 content = self.db.get_one("k8sinfra_controller", {"_id": profile_id})
1530 op_params = self.get_operation_params(content, op_id)
1531
garciadeblasadb81e82024-11-08 01:11:46 +01001532 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001533 "delete_profile", op_id, op_params, content
1534 )
rshri932105f2024-07-05 15:11:55 +00001535 self.logger.info("workflow_name is :{}".format(workflow_name))
1536
garciadeblas713e1962025-01-17 12:49:19 +01001537 workflow_status = await self.check_workflow_and_update_db(
1538 op_id, workflow_name, content
1539 )
rshri932105f2024-07-05 15:11:55 +00001540
1541 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01001542 resource_status, content = await self.check_resource_and_update_db(
garciadeblas96b94f52024-07-08 16:18:21 +02001543 "delete_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001544 )
rshri932105f2024-07-05 15:11:55 +00001545
yshah564ec9c2024-11-29 07:33:32 +00001546 if resource_status:
1547 content["state"] = "DELETED"
1548 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1549 self.db.del_one(self.db_collection, {"_id": content["_id"]})
1550 self.logger.info(
1551 f"Infra Controller Delete Exit with resource status: {resource_status}"
garciadeblas96b94f52024-07-08 16:18:21 +02001552 )
rshri932105f2024-07-05 15:11:55 +00001553 return
1554
1555
garciadeblas72412282024-11-07 12:41:54 +01001556class K8sInfraConfigLcm(GitOpsLcm):
rshri948f7de2024-12-02 03:42:35 +00001557 db_collection = "k8sinfra_config"
1558
rshri932105f2024-07-05 15:11:55 +00001559 def __init__(self, msg, lcm_tasks, config):
1560 """
1561 Init, Connect to database, filesystem storage, and messaging
1562 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1563 :return: None
1564 """
garciadeblas72412282024-11-07 12:41:54 +01001565 super().__init__(msg, lcm_tasks, config)
rshri932105f2024-07-05 15:11:55 +00001566
rshri948f7de2024-12-02 03:42:35 +00001567 async def create(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001568 self.logger.info("Infra config Create Enter")
1569
rshri948f7de2024-12-02 03:42:35 +00001570 op_id = params["operation_id"]
1571 profile_id = params["profile_id"]
1572
1573 # To initialize the operation states
1574 self.initialize_operation(profile_id, op_id)
1575
1576 content = self.db.get_one("k8sinfra_config", {"_id": profile_id})
1577 content["profile_type"] = "infra-configs"
1578 op_params = self.get_operation_params(content, op_id)
1579 self.db.set_one("k8sinfra_config", {"_id": content["_id"]}, content)
1580
garciadeblasadb81e82024-11-08 01:11:46 +01001581 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001582 "create_profile", op_id, op_params, content
1583 )
rshri932105f2024-07-05 15:11:55 +00001584 self.logger.info("workflow_name is :{}".format(workflow_name))
1585
garciadeblas713e1962025-01-17 12:49:19 +01001586 workflow_status = await self.check_workflow_and_update_db(
1587 op_id, workflow_name, content
1588 )
rshri932105f2024-07-05 15:11:55 +00001589
1590 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01001591 resource_status, content = await self.check_resource_and_update_db(
garciadeblas96b94f52024-07-08 16:18:21 +02001592 "create_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001593 )
yshah564ec9c2024-11-29 07:33:32 +00001594 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1595 self.logger.info(
1596 f"Infra Config Create Exit with resource status: {resource_status}"
rshri932105f2024-07-05 15:11:55 +00001597 )
rshri932105f2024-07-05 15:11:55 +00001598 return
1599
rshri948f7de2024-12-02 03:42:35 +00001600 async def delete(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001601 self.logger.info("Infra config delete Enter")
1602
rshri948f7de2024-12-02 03:42:35 +00001603 op_id = params["operation_id"]
1604 profile_id = params["profile_id"]
1605
1606 # To initialize the operation states
1607 self.initialize_operation(profile_id, op_id)
1608
1609 content = self.db.get_one("k8sinfra_config", {"_id": profile_id})
1610 op_params = self.get_operation_params(content, op_id)
1611
garciadeblasadb81e82024-11-08 01:11:46 +01001612 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001613 "delete_profile", op_id, op_params, content
1614 )
rshri932105f2024-07-05 15:11:55 +00001615 self.logger.info("workflow_name is :{}".format(workflow_name))
rshri932105f2024-07-05 15:11:55 +00001616
garciadeblas713e1962025-01-17 12:49:19 +01001617 workflow_status = await self.check_workflow_and_update_db(
1618 op_id, workflow_name, content
1619 )
yshah564ec9c2024-11-29 07:33:32 +00001620
rshri932105f2024-07-05 15:11:55 +00001621 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01001622 resource_status, content = await self.check_resource_and_update_db(
yshah564ec9c2024-11-29 07:33:32 +00001623 "delete_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001624 )
yshah564ec9c2024-11-29 07:33:32 +00001625
rshri932105f2024-07-05 15:11:55 +00001626 if resource_status:
yshah564ec9c2024-11-29 07:33:32 +00001627 content["state"] = "DELETED"
1628 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1629 self.db.del_one(self.db_collection, {"_id": content["_id"]})
1630 self.logger.info(
1631 f"Infra Config Delete Exit with resource status: {resource_status}"
garciadeblas96b94f52024-07-08 16:18:21 +02001632 )
rshri932105f2024-07-05 15:11:55 +00001633
rshri932105f2024-07-05 15:11:55 +00001634 return
yshah771dea82024-07-05 15:11:49 +00001635
1636
garciadeblas72412282024-11-07 12:41:54 +01001637class OkaLcm(GitOpsLcm):
yshah771dea82024-07-05 15:11:49 +00001638 db_collection = "okas"
1639
1640 def __init__(self, msg, lcm_tasks, config):
1641 """
1642 Init, Connect to database, filesystem storage, and messaging
1643 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1644 :return: None
1645 """
garciadeblas72412282024-11-07 12:41:54 +01001646 super().__init__(msg, lcm_tasks, config)
yshah771dea82024-07-05 15:11:49 +00001647
yshah564ec9c2024-11-29 07:33:32 +00001648 async def create(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001649 self.logger.info("OKA Create Enter")
yshah564ec9c2024-11-29 07:33:32 +00001650 op_id = params["operation_id"]
1651 oka_id = params["oka_id"]
1652 self.initialize_operation(oka_id, op_id)
1653 db_content = self.db.get_one(self.db_collection, {"_id": oka_id})
1654 op_params = self.get_operation_params(db_content, op_id)
yshah771dea82024-07-05 15:11:49 +00001655
garciadeblasadb81e82024-11-08 01:11:46 +01001656 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001657 "create_oka", op_id, op_params, db_content
1658 )
yshah564ec9c2024-11-29 07:33:32 +00001659
garciadeblas713e1962025-01-17 12:49:19 +01001660 workflow_status = await self.check_workflow_and_update_db(
1661 op_id, workflow_name, db_content
1662 )
yshah771dea82024-07-05 15:11:49 +00001663
1664 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01001665 resource_status, db_content = await self.check_resource_and_update_db(
garciadeblas96b94f52024-07-08 16:18:21 +02001666 "create_oka", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001667 )
garciadeblas96b94f52024-07-08 16:18:21 +02001668 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
yshah564ec9c2024-11-29 07:33:32 +00001669 self.logger.info(f"OKA Create Exit with resource status: {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001670 return
1671
yshah564ec9c2024-11-29 07:33:32 +00001672 async def edit(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001673 self.logger.info("OKA Edit Enter")
yshah564ec9c2024-11-29 07:33:32 +00001674 op_id = params["operation_id"]
1675 oka_id = params["oka_id"]
1676 self.initialize_operation(oka_id, op_id)
1677 db_content = self.db.get_one(self.db_collection, {"_id": oka_id})
1678 op_params = self.get_operation_params(db_content, op_id)
yshah771dea82024-07-05 15:11:49 +00001679
garciadeblasadb81e82024-11-08 01:11:46 +01001680 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001681 "update_oka", op_id, op_params, db_content
garciadeblas96b94f52024-07-08 16:18:21 +02001682 )
garciadeblas713e1962025-01-17 12:49:19 +01001683 workflow_status = await self.check_workflow_and_update_db(
1684 op_id, workflow_name, db_content
1685 )
yshah771dea82024-07-05 15:11:49 +00001686
1687 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01001688 resource_status, db_content = await self.check_resource_and_update_db(
garciadeblas96b94f52024-07-08 16:18:21 +02001689 "update_oka", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001690 )
garciadeblas96b94f52024-07-08 16:18:21 +02001691 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
yshah564ec9c2024-11-29 07:33:32 +00001692 self.logger.info(f"OKA Update Exit with resource status: {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001693 return
1694
yshah564ec9c2024-11-29 07:33:32 +00001695 async def delete(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001696 self.logger.info("OKA delete Enter")
yshah564ec9c2024-11-29 07:33:32 +00001697 op_id = params["operation_id"]
1698 oka_id = params["oka_id"]
1699 self.initialize_operation(oka_id, op_id)
1700 db_content = self.db.get_one(self.db_collection, {"_id": oka_id})
1701 op_params = self.get_operation_params(db_content, op_id)
yshah771dea82024-07-05 15:11:49 +00001702
garciadeblasadb81e82024-11-08 01:11:46 +01001703 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001704 "delete_oka", op_id, op_params, db_content
garciadeblas96b94f52024-07-08 16:18:21 +02001705 )
garciadeblas713e1962025-01-17 12:49:19 +01001706 workflow_status = await self.check_workflow_and_update_db(
1707 op_id, workflow_name, db_content
1708 )
yshah771dea82024-07-05 15:11:49 +00001709
1710 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01001711 resource_status, db_content = await self.check_resource_and_update_db(
garciadeblas96b94f52024-07-08 16:18:21 +02001712 "delete_oka", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001713 )
yshah771dea82024-07-05 15:11:49 +00001714
yshah564ec9c2024-11-29 07:33:32 +00001715 if resource_status:
1716 db_content["state"] == "DELETED"
1717 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
garciadeblas96b94f52024-07-08 16:18:21 +02001718 self.db.del_one(self.db_collection, {"_id": db_content["_id"]})
yshah564ec9c2024-11-29 07:33:32 +00001719 self.logger.info(f"OKA Delete Exit with resource status: {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001720 return
1721
1722
garciadeblas72412282024-11-07 12:41:54 +01001723class KsuLcm(GitOpsLcm):
yshah771dea82024-07-05 15:11:49 +00001724 db_collection = "ksus"
1725
1726 def __init__(self, msg, lcm_tasks, config):
1727 """
1728 Init, Connect to database, filesystem storage, and messaging
1729 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1730 :return: None
1731 """
garciadeblas72412282024-11-07 12:41:54 +01001732 super().__init__(msg, lcm_tasks, config)
garciadeblasbc96f382025-01-22 16:02:18 +01001733 self._workflows = {
1734 "create_ksus": {
1735 "check_resource_function": self.check_create_ksus,
1736 },
1737 "delete_ksus": {
1738 "check_resource_function": self.check_delete_ksus,
1739 },
1740 }
1741
1742 def get_dbclusters_from_profile(self, profile_id, profile_type):
1743 cluster_list = []
1744 db_clusters = self.db.get_list("clusters")
1745 self.logger.info(f"Getting list of clusters for {profile_type} {profile_id}")
1746 for db_cluster in db_clusters:
1747 if profile_id in db_cluster.get(profile_type, []):
1748 self.logger.info(
1749 f"Profile {profile_id} found in cluster {db_cluster['name']}"
1750 )
1751 cluster_list.append(db_cluster)
1752 return cluster_list
yshah771dea82024-07-05 15:11:49 +00001753
yshah564ec9c2024-11-29 07:33:32 +00001754 async def create(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001755 self.logger.info("ksu Create Enter")
yshah564ec9c2024-11-29 07:33:32 +00001756 db_content = []
1757 op_params = []
1758 op_id = params["operation_id"]
1759 for ksu_id in params["ksus_list"]:
1760 self.logger.info("Ksu ID: {}".format(ksu_id))
1761 self.initialize_operation(ksu_id, op_id)
1762 db_ksu = self.db.get_one(self.db_collection, {"_id": ksu_id})
1763 self.logger.info("Db KSU: {}".format(db_ksu))
1764 db_content.append(db_ksu)
1765 ksu_params = {}
1766 ksu_params = self.get_operation_params(db_ksu, op_id)
1767 self.logger.info("Operation Params: {}".format(ksu_params))
1768 # Update ksu_params["profile"] with profile name and age-pubkey
1769 profile_type = ksu_params["profile"]["profile_type"]
1770 profile_id = ksu_params["profile"]["_id"]
1771 profile_collection = self.profile_collection_mapping[profile_type]
1772 db_profile = self.db.get_one(profile_collection, {"_id": profile_id})
1773 ksu_params["profile"]["name"] = db_profile["name"]
1774 ksu_params["profile"]["age_pubkey"] = db_profile.get("age_pubkey", "")
1775 # Update ksu_params["oka"] with sw_catalog_path (when missing)
garciadeblas8c9c5442025-01-17 01:06:05 +01001776 # TODO: remove this in favor of doing it in ODU workflow
yshah564ec9c2024-11-29 07:33:32 +00001777 for oka in ksu_params["oka"]:
1778 if "sw_catalog_path" not in oka:
1779 oka_id = oka["_id"]
1780 db_oka = self.db.get_one("okas", {"_id": oka_id})
yshah2f39b8a2024-12-19 11:06:24 +00001781 oka_type = MAP_PROFILE[
1782 db_oka.get("profile_type", "infra_controller_profiles")
1783 ]
garciadeblas8c9c5442025-01-17 01:06:05 +01001784 oka[
1785 "sw_catalog_path"
garciadeblas1ad4e882025-01-24 14:24:41 +01001786 ] = f"{oka_type}/{db_oka['git_name'].lower()}/templates"
yshah564ec9c2024-11-29 07:33:32 +00001787 op_params.append(ksu_params)
yshah771dea82024-07-05 15:11:49 +00001788
garciadeblasbc96f382025-01-22 16:02:18 +01001789 # A single workflow is launched for all KSUs
garciadeblasadb81e82024-11-08 01:11:46 +01001790 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001791 "create_ksus", op_id, op_params, db_content
garciadeblas96b94f52024-07-08 16:18:21 +02001792 )
garciadeblasbc96f382025-01-22 16:02:18 +01001793 # Update workflow status in all KSUs
1794 wf_status_list = []
yshah564ec9c2024-11-29 07:33:32 +00001795 for db_ksu, ksu_params in zip(db_content, op_params):
garciadeblas713e1962025-01-17 12:49:19 +01001796 workflow_status = await self.check_workflow_and_update_db(
1797 op_id, workflow_name, db_ksu
1798 )
garciadeblasbc96f382025-01-22 16:02:18 +01001799 wf_status_list.append(workflow_status)
1800 # Update resource status in all KSUs
1801 # TODO: Is an operation correct if n KSUs are right and 1 is not OK?
1802 res_status_list = []
1803 for db_ksu, ksu_params, wf_status in zip(db_content, op_params, wf_status_list):
1804 if wf_status:
garciadeblas713e1962025-01-17 12:49:19 +01001805 resource_status, db_ksu = await self.check_resource_and_update_db(
yshah564ec9c2024-11-29 07:33:32 +00001806 "create_ksus", op_id, ksu_params, db_ksu
1807 )
garciadeblasbc96f382025-01-22 16:02:18 +01001808 else:
1809 resource_status = False
1810 res_status_list.append(resource_status)
garciadeblas96b94f52024-07-08 16:18:21 +02001811 self.db.set_one(self.db_collection, {"_id": db_ksu["_id"]}, db_ksu)
1812
garciadeblasd8429852024-10-17 15:30:30 +02001813 # Clean items used in the workflow, no matter if the workflow succeeded
1814 clean_status, clean_msg = await self.odu.clean_items_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001815 "create_ksus", op_id, op_params, db_content
garciadeblasd8429852024-10-17 15:30:30 +02001816 )
1817 self.logger.info(
1818 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
1819 )
garciadeblasbc96f382025-01-22 16:02:18 +01001820 self.logger.info(f"KSU Create EXIT with Resource Status {res_status_list}")
yshah771dea82024-07-05 15:11:49 +00001821 return
1822
yshah564ec9c2024-11-29 07:33:32 +00001823 async def edit(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001824 self.logger.info("ksu edit Enter")
yshah564ec9c2024-11-29 07:33:32 +00001825 db_content = []
1826 op_params = []
1827 op_id = params["operation_id"]
1828 for ksu_id in params["ksus_list"]:
1829 self.initialize_operation(ksu_id, op_id)
1830 db_ksu = self.db.get_one("ksus", {"_id": ksu_id})
1831 db_content.append(db_ksu)
1832 ksu_params = {}
1833 ksu_params = self.get_operation_params(db_ksu, op_id)
1834 # Update ksu_params["profile"] with profile name and age-pubkey
1835 profile_type = ksu_params["profile"]["profile_type"]
1836 profile_id = ksu_params["profile"]["_id"]
1837 profile_collection = self.profile_collection_mapping[profile_type]
1838 db_profile = self.db.get_one(profile_collection, {"_id": profile_id})
1839 ksu_params["profile"]["name"] = db_profile["name"]
1840 ksu_params["profile"]["age_pubkey"] = db_profile.get("age_pubkey", "")
1841 # Update ksu_params["oka"] with sw_catalog_path (when missing)
garciadeblas8c9c5442025-01-17 01:06:05 +01001842 # TODO: remove this in favor of doing it in ODU workflow
yshah564ec9c2024-11-29 07:33:32 +00001843 for oka in ksu_params["oka"]:
1844 if "sw_catalog_path" not in oka:
1845 oka_id = oka["_id"]
1846 db_oka = self.db.get_one("okas", {"_id": oka_id})
yshah2f39b8a2024-12-19 11:06:24 +00001847 oka_type = MAP_PROFILE[
1848 db_oka.get("profile_type", "infra_controller_profiles")
1849 ]
garciadeblas8c9c5442025-01-17 01:06:05 +01001850 oka[
1851 "sw_catalog_path"
1852 ] = f"{oka_type}/{db_oka['git_name']}/templates"
yshah564ec9c2024-11-29 07:33:32 +00001853 op_params.append(ksu_params)
yshah771dea82024-07-05 15:11:49 +00001854
garciadeblasadb81e82024-11-08 01:11:46 +01001855 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001856 "update_ksus", op_id, op_params, db_content
garciadeblas96b94f52024-07-08 16:18:21 +02001857 )
yshah771dea82024-07-05 15:11:49 +00001858
yshah564ec9c2024-11-29 07:33:32 +00001859 for db_ksu, ksu_params in zip(db_content, op_params):
garciadeblas713e1962025-01-17 12:49:19 +01001860 workflow_status = await self.check_workflow_and_update_db(
1861 op_id, workflow_name, db_ksu
1862 )
yshah564ec9c2024-11-29 07:33:32 +00001863
garciadeblas96b94f52024-07-08 16:18:21 +02001864 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01001865 resource_status, db_ksu = await self.check_resource_and_update_db(
yshah564ec9c2024-11-29 07:33:32 +00001866 "update_ksus", op_id, ksu_params, db_ksu
garciadeblas96b94f52024-07-08 16:18:21 +02001867 )
garciadeblas96b94f52024-07-08 16:18:21 +02001868 db_ksu["name"] = ksu_params["name"]
1869 db_ksu["description"] = ksu_params["description"]
1870 db_ksu["profile"]["profile_type"] = ksu_params["profile"][
1871 "profile_type"
1872 ]
1873 db_ksu["profile"]["_id"] = ksu_params["profile"]["_id"]
1874 db_ksu["oka"] = ksu_params["oka"]
1875 self.db.set_one(self.db_collection, {"_id": db_ksu["_id"]}, db_ksu)
1876
yshah564ec9c2024-11-29 07:33:32 +00001877 # Clean items used in the workflow, no matter if the workflow succeeded
1878 clean_status, clean_msg = await self.odu.clean_items_workflow(
1879 "create_ksus", op_id, op_params, db_content
garciadeblas96b94f52024-07-08 16:18:21 +02001880 )
1881 self.logger.info(
yshah564ec9c2024-11-29 07:33:32 +00001882 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
garciadeblas96b94f52024-07-08 16:18:21 +02001883 )
yshah564ec9c2024-11-29 07:33:32 +00001884 self.logger.info(f"KSU Update EXIT with Resource Status {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001885 return
1886
yshah564ec9c2024-11-29 07:33:32 +00001887 async def delete(self, params, order_id):
1888 self.logger.info("ksu delete Enter")
1889 db_content = []
1890 op_params = []
1891 op_id = params["operation_id"]
1892 for ksu_id in params["ksus_list"]:
1893 self.initialize_operation(ksu_id, op_id)
1894 db_ksu = self.db.get_one("ksus", {"_id": ksu_id})
1895 db_content.append(db_ksu)
1896 ksu_params = {}
1897 ksu_params["profile"] = {}
1898 ksu_params["profile"]["profile_type"] = db_ksu["profile"]["profile_type"]
1899 ksu_params["profile"]["_id"] = db_ksu["profile"]["_id"]
1900 # Update ksu_params["profile"] with profile name and age-pubkey
1901 profile_type = ksu_params["profile"]["profile_type"]
1902 profile_id = ksu_params["profile"]["_id"]
1903 profile_collection = self.profile_collection_mapping[profile_type]
1904 db_profile = self.db.get_one(profile_collection, {"_id": profile_id})
1905 ksu_params["profile"]["name"] = db_profile["name"]
1906 ksu_params["profile"]["age_pubkey"] = db_profile.get("age_pubkey", "")
1907 op_params.append(ksu_params)
yshah771dea82024-07-05 15:11:49 +00001908
garciadeblasadb81e82024-11-08 01:11:46 +01001909 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001910 "delete_ksus", op_id, op_params, db_content
1911 )
1912
1913 for db_ksu, ksu_params in zip(db_content, op_params):
garciadeblas713e1962025-01-17 12:49:19 +01001914 workflow_status = await self.check_workflow_and_update_db(
1915 op_id, workflow_name, db_ksu
1916 )
yshah564ec9c2024-11-29 07:33:32 +00001917
1918 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01001919 resource_status, db_ksu = await self.check_resource_and_update_db(
yshah564ec9c2024-11-29 07:33:32 +00001920 "delete_ksus", op_id, ksu_params, db_ksu
1921 )
1922
1923 if resource_status:
1924 db_ksu["state"] == "DELETED"
1925 self.db.set_one(self.db_collection, {"_id": db_ksu["_id"]}, db_ksu)
1926 self.db.del_one(self.db_collection, {"_id": db_ksu["_id"]})
1927
1928 self.logger.info(f"KSU Delete Exit with resource status: {resource_status}")
1929 return
1930
1931 async def clone(self, params, order_id):
1932 self.logger.info("ksu clone Enter")
1933 op_id = params["operation_id"]
1934 ksus_id = params["ksus_list"][0]
1935 self.initialize_operation(ksus_id, op_id)
1936 db_content = self.db.get_one(self.db_collection, {"_id": ksus_id})
1937 op_params = self.get_operation_params(db_content, op_id)
1938 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001939 "clone_ksus", op_id, op_params, db_content
1940 )
yshah564ec9c2024-11-29 07:33:32 +00001941
garciadeblas713e1962025-01-17 12:49:19 +01001942 workflow_status = await self.check_workflow_and_update_db(
1943 op_id, workflow_name, db_content
1944 )
yshah771dea82024-07-05 15:11:49 +00001945
1946 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01001947 resource_status, db_content = await self.check_resource_and_update_db(
garciadeblas96b94f52024-07-08 16:18:21 +02001948 "clone_ksus", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001949 )
garciadeblas96b94f52024-07-08 16:18:21 +02001950 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
yshah564ec9c2024-11-29 07:33:32 +00001951
1952 self.logger.info(f"KSU Clone Exit with resource status: {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001953 return
1954
yshah564ec9c2024-11-29 07:33:32 +00001955 async def move(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001956 self.logger.info("ksu move Enter")
yshah564ec9c2024-11-29 07:33:32 +00001957 op_id = params["operation_id"]
1958 ksus_id = params["ksus_list"][0]
1959 self.initialize_operation(ksus_id, op_id)
1960 db_content = self.db.get_one(self.db_collection, {"_id": ksus_id})
1961 op_params = self.get_operation_params(db_content, op_id)
garciadeblasadb81e82024-11-08 01:11:46 +01001962 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001963 "move_ksus", op_id, op_params, db_content
1964 )
yshah564ec9c2024-11-29 07:33:32 +00001965
garciadeblas713e1962025-01-17 12:49:19 +01001966 workflow_status = await self.check_workflow_and_update_db(
1967 op_id, workflow_name, db_content
1968 )
yshah771dea82024-07-05 15:11:49 +00001969
1970 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01001971 resource_status, db_content = await self.check_resource_and_update_db(
garciadeblas96b94f52024-07-08 16:18:21 +02001972 "move_ksus", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001973 )
garciadeblas96b94f52024-07-08 16:18:21 +02001974 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
yshah564ec9c2024-11-29 07:33:32 +00001975
1976 self.logger.info(f"KSU Move Exit with resource status: {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001977 return
garciadeblasbc96f382025-01-22 16:02:18 +01001978
1979 async def check_create_ksus(self, op_id, op_params, content):
1980 self.logger.info(f"check_create_ksus Operation {op_id}. Params: {op_params}.")
1981 self.logger.debug(f"Content: {content}")
1982 db_ksu = content
1983 kustomization_name = db_ksu["git_name"].lower()
1984 oka_list = op_params["oka"]
1985 oka_item = oka_list[0]
1986 oka_params = oka_item.get("transformation", {})
1987 target_ns = oka_params.get("namespace", "default")
1988 profile_id = op_params.get("profile", {}).get("_id")
1989 profile_type = op_params.get("profile", {}).get("profile_type")
1990 self.logger.info(
1991 f"Checking status of KSU {db_ksu['name']} for profile {profile_id}."
1992 )
1993 dbcluster_list = self.get_dbclusters_from_profile(profile_id, profile_type)
1994 if not dbcluster_list:
1995 self.logger.info(f"No clusters found for profile {profile_id}.")
1996 for db_cluster in dbcluster_list:
1997 try:
1998 self.logger.info(
1999 f"Checking status of KSU in cluster {db_cluster['name']}."
2000 )
2001 cluster_kubectl = self.cluster_kubectl(db_cluster)
2002 checkings_list = [
2003 {
2004 "item": "kustomization",
2005 "name": kustomization_name,
2006 "namespace": target_ns,
2007 "flag": "Ready",
2008 "timeout": self._checkloop_kustomization_timeout,
2009 "enable": True,
2010 "resourceState": "IN_PROGRESS.KUSTOMIZATION_READY",
2011 },
2012 ]
2013 self.logger.info(
2014 f"Checking status of KSU {db_ksu['name']} for profile {profile_id}."
2015 )
2016 result, message = await self.common_check_list(
2017 op_id, checkings_list, "ksus", db_ksu, kubectl=cluster_kubectl
2018 )
2019 if not result:
2020 return False, message
2021 except Exception as e:
2022 self.logger.error(
2023 f"Error checking KSU in cluster {db_cluster['name']}."
2024 )
2025 self.logger.error(e)
2026 return False, f"Error checking KSU in cluster {db_cluster['name']}."
2027 return True, "OK"
2028
2029 async def check_delete_ksus(self, op_id, op_params, content):
2030 self.logger.info(f"check_delete_ksus Operation {op_id}. Params: {op_params}.")
2031 self.logger.debug(f"Content: {content}")
2032 db_ksu = content
2033 kustomization_name = db_ksu["git_name"].lower()
2034 oka_list = db_ksu["oka"]
2035 oka_item = oka_list[0]
2036 oka_params = oka_item.get("transformation", {})
2037 target_ns = oka_params.get("namespace", "default")
2038 profile_id = op_params.get("profile", {}).get("_id")
2039 profile_type = op_params.get("profile", {}).get("profile_type")
2040 self.logger.info(
2041 f"Checking status of KSU {db_ksu['name']} for profile {profile_id}."
2042 )
2043 dbcluster_list = self.get_dbclusters_from_profile(profile_id, profile_type)
2044 if not dbcluster_list:
2045 self.logger.info(f"No clusters found for profile {profile_id}.")
2046 for db_cluster in dbcluster_list:
2047 try:
2048 self.logger.info(
2049 f"Checking status of KSU in cluster {db_cluster['name']}."
2050 )
2051 cluster_kubectl = self.cluster_kubectl(db_cluster)
2052 checkings_list = [
2053 {
2054 "item": "kustomization",
2055 "name": kustomization_name,
2056 "namespace": target_ns,
2057 "deleted": True,
2058 "timeout": self._checkloop_kustomization_timeout,
2059 "enable": True,
2060 "resourceState": "IN_PROGRESS.KUSTOMIZATION_DELETED",
2061 },
2062 ]
2063 self.logger.info(
2064 f"Checking status of KSU {db_ksu['name']} for profile {profile_id}."
2065 )
2066 result, message = await self.common_check_list(
2067 op_id, checkings_list, "ksus", db_ksu, kubectl=cluster_kubectl
2068 )
2069 if not result:
2070 return False, message
2071 except Exception as e:
2072 self.logger.error(
2073 f"Error checking KSU in cluster {db_cluster['name']}."
2074 )
2075 self.logger.error(e)
2076 return False, f"Error checking KSU in cluster {db_cluster['name']}."
2077 return True, "OK"