blob: 67173e7fd0d758339f953f443b37d4c03f28f259 [file] [log] [blame]
rshri932105f2024-07-05 15:11:55 +00001# -*- coding: utf-8 -*-
2
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
12# implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
15
16__author__ = (
17 "Shrinithi R <shrinithi.r@tataelxsi.co.in>",
18 "Shahithya Y <shahithya.y@tataelxsi.co.in>",
19)
20
rshric3564942024-11-12 18:12:38 +000021import copy
rshri932105f2024-07-05 15:11:55 +000022import logging
yshahd940c652024-10-17 06:11:12 +000023from time import time
garciadeblas72412282024-11-07 12:41:54 +010024import traceback
rshri932105f2024-07-05 15:11:55 +000025from osm_lcm.lcm_utils import LcmBase
26from copy import deepcopy
27from osm_lcm import odu_workflows
28from osm_lcm import vim_sdn
rshri948f7de2024-12-02 03:42:35 +000029from osm_lcm.data_utils.list_utils import find_in_list
garciadeblasbc96f382025-01-22 16:02:18 +010030from osm_lcm.n2vc.kubectl import Kubectl
31import yaml
rshri932105f2024-07-05 15:11:55 +000032
yshah2f39b8a2024-12-19 11:06:24 +000033MAP_PROFILE = {
34 "infra_controller_profiles": "infra-controllers",
35 "infra_config_profiles": "infra-configs",
36 "resource_profiles": "managed_resources",
37 "app_profiles": "apps",
38}
39
rshri932105f2024-07-05 15:11:55 +000040
garciadeblas72412282024-11-07 12:41:54 +010041class GitOpsLcm(LcmBase):
garciadeblasea865ff2024-11-20 12:42:49 +010042 db_collection = "gitops"
yshah564ec9c2024-11-29 07:33:32 +000043 workflow_status = None
44 resource_status = None
45
46 profile_collection_mapping = {
47 "infra_controller_profiles": "k8sinfra_controller",
48 "infra_config_profiles": "k8sinfra_config",
49 "resource_profiles": "k8sresource",
50 "app_profiles": "k8sapp",
51 }
garciadeblasea865ff2024-11-20 12:42:49 +010052
garciadeblas72412282024-11-07 12:41:54 +010053 def __init__(self, msg, lcm_tasks, config):
54 self.logger = logging.getLogger("lcm.gitops")
55 self.lcm_tasks = lcm_tasks
56 self.odu = odu_workflows.OduWorkflow(msg, self.lcm_tasks, config)
57 self._checkloop_kustomization_timeout = 900
58 self._checkloop_resource_timeout = 900
59 self._workflows = {}
60 super().__init__(msg, self.logger)
61
62 async def check_dummy_operation(self, op_id, op_params, content):
63 self.logger.info(f"Operation {op_id}. Params: {op_params}. Content: {content}")
64 return True, "OK"
65
garciadeblasea865ff2024-11-20 12:42:49 +010066 def initialize_operation(self, item_id, op_id):
67 db_item = self.db.get_one(self.db_collection, {"_id": item_id})
68 operation = next(
69 (op for op in db_item.get("operationHistory", []) if op["op_id"] == op_id),
70 None,
71 )
72 operation["workflowState"] = "PROCESSING"
73 operation["resourceState"] = "NOT_READY"
74 operation["operationState"] = "IN_PROGRESS"
75 operation["gitOperationInfo"] = None
76 db_item["current_operation"] = operation["op_id"]
77 self.db.set_one(self.db_collection, {"_id": item_id}, db_item)
78
yshah564ec9c2024-11-29 07:33:32 +000079 def get_operation_params(self, item, operation_id):
80 operation_history = item.get("operationHistory", [])
81 operation = find_in_list(
82 operation_history, lambda op: op["op_id"] == operation_id
83 )
84 return operation.get("operationParams", {})
85
86 def get_operation_type(self, item, operation_id):
87 operation_history = item.get("operationHistory", [])
88 operation = find_in_list(
89 operation_history, lambda op: op["op_id"] == operation_id
90 )
91 return operation.get("operationType", {})
92
garciadeblasbe890702024-12-20 11:39:13 +010093 def update_state_operation_history(
94 self, content, op_id, workflow_state=None, resource_state=None
95 ):
96 self.logger.info(
97 f"Update state of operation {op_id} in Operation History in DB"
98 )
99 self.logger.info(
100 f"Workflow state: {workflow_state}. Resource state: {resource_state}"
101 )
102 self.logger.debug(f"Content: {content}")
103
104 op_num = 0
105 for operation in content["operationHistory"]:
106 self.logger.debug("Operations: {}".format(operation))
107 if operation["op_id"] == op_id:
108 self.logger.debug("Found operation number: {}".format(op_num))
109 if workflow_state is not None:
110 operation["workflowState"] = workflow_state
111
112 if resource_state is not None:
113 operation["resourceState"] = resource_state
114 break
115 op_num += 1
116 self.logger.debug("content: {}".format(content))
117
118 return content
119
garciadeblas7eae6f42024-11-08 10:41:38 +0100120 def update_operation_history(
garciadeblasf9092892024-12-12 11:07:08 +0100121 self, content, op_id, workflow_status=None, resource_status=None, op_end=True
garciadeblas7eae6f42024-11-08 10:41:38 +0100122 ):
123 self.logger.info(
124 f"Update Operation History in DB. Workflow status: {workflow_status}. Resource status: {resource_status}"
125 )
126 self.logger.debug(f"Content: {content}")
127
garciadeblas7eae6f42024-11-08 10:41:38 +0100128 op_num = 0
129 for operation in content["operationHistory"]:
130 self.logger.debug("Operations: {}".format(operation))
131 if operation["op_id"] == op_id:
132 self.logger.debug("Found operation number: {}".format(op_num))
garciadeblas8bde3f42024-12-20 10:37:12 +0100133 if workflow_status is not None:
134 if workflow_status:
135 operation["workflowState"] = "COMPLETED"
136 operation["result"] = True
137 else:
138 operation["workflowState"] = "ERROR"
139 operation["operationState"] = "FAILED"
140 operation["result"] = False
garciadeblas7eae6f42024-11-08 10:41:38 +0100141
garciadeblas8bde3f42024-12-20 10:37:12 +0100142 if resource_status is not None:
143 if resource_status:
144 operation["resourceState"] = "READY"
145 operation["operationState"] = "COMPLETED"
146 operation["result"] = True
147 else:
148 operation["resourceState"] = "NOT_READY"
149 operation["operationState"] = "FAILED"
150 operation["result"] = False
garciadeblas7eae6f42024-11-08 10:41:38 +0100151
garciadeblasf9092892024-12-12 11:07:08 +0100152 if op_end:
153 now = time()
154 operation["endDate"] = now
garciadeblas7eae6f42024-11-08 10:41:38 +0100155 break
156 op_num += 1
157 self.logger.debug("content: {}".format(content))
158
159 return content
160
garciadeblas713e1962025-01-17 12:49:19 +0100161 async def check_workflow_and_update_db(self, op_id, workflow_name, db_content):
yshah564ec9c2024-11-29 07:33:32 +0000162 workflow_status, workflow_msg = await self.odu.check_workflow_status(
garciadeblas36fe58b2025-02-05 16:36:17 +0100163 op_id, workflow_name
yshah564ec9c2024-11-29 07:33:32 +0000164 )
165 self.logger.info(
166 "Workflow Status: {} Workflow Message: {}".format(
167 workflow_status, workflow_msg
168 )
169 )
170 operation_type = self.get_operation_type(db_content, op_id)
171 if operation_type == "create" and workflow_status:
172 db_content["state"] = "CREATED"
173 elif operation_type == "create" and not workflow_status:
174 db_content["state"] = "FAILED_CREATION"
175 elif operation_type == "delete" and workflow_status:
176 db_content["state"] = "DELETED"
177 elif operation_type == "delete" and not workflow_status:
178 db_content["state"] = "FAILED_DELETION"
179
180 if workflow_status:
181 db_content["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
182 else:
183 db_content["resourceState"] = "ERROR"
184
185 db_content = self.update_operation_history(
186 db_content, op_id, workflow_status, None
187 )
188 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
189 return workflow_status
190
garciadeblas713e1962025-01-17 12:49:19 +0100191 async def check_resource_and_update_db(
192 self, resource_name, op_id, op_params, db_content
193 ):
yshah564ec9c2024-11-29 07:33:32 +0000194 workflow_status = True
195
196 resource_status, resource_msg = await self.check_resource_status(
197 resource_name, op_id, op_params, db_content
198 )
199 self.logger.info(
200 "Resource Status: {} Resource Message: {}".format(
201 resource_status, resource_msg
202 )
203 )
204
205 if resource_status:
206 db_content["resourceState"] = "READY"
207 else:
208 db_content["resourceState"] = "ERROR"
209
210 db_content = self.update_operation_history(
211 db_content, op_id, workflow_status, resource_status
212 )
213 db_content["operatingState"] = "IDLE"
214 db_content["current_operation"] = None
215 return resource_status, db_content
216
garciadeblasbc96f382025-01-22 16:02:18 +0100217 async def common_check_list(
218 self, op_id, checkings_list, db_collection, db_item, kubectl=None
219 ):
garciadeblas72412282024-11-07 12:41:54 +0100220 try:
221 for checking in checkings_list:
222 if checking["enable"]:
223 status, message = await self.odu.readiness_loop(
garciadeblas36fe58b2025-02-05 16:36:17 +0100224 op_id=op_id,
garciadeblas72412282024-11-07 12:41:54 +0100225 item=checking["item"],
226 name=checking["name"],
227 namespace=checking["namespace"],
garciadeblas6c82c352025-01-27 16:53:45 +0100228 condition=checking.get("condition"),
garciadeblasbc96f382025-01-22 16:02:18 +0100229 deleted=checking.get("deleted", False),
garciadeblas72412282024-11-07 12:41:54 +0100230 timeout=checking["timeout"],
garciadeblasbc96f382025-01-22 16:02:18 +0100231 kubectl=kubectl,
garciadeblas72412282024-11-07 12:41:54 +0100232 )
233 if not status:
garciadeblasc5e9d572025-01-21 18:48:58 +0100234 error_message = "Resources not ready: "
235 error_message += checking.get("error_message", "")
236 return status, f"{error_message}: {message}"
garciadeblas7eae6f42024-11-08 10:41:38 +0100237 else:
238 db_item["resourceState"] = checking["resourceState"]
garciadeblasbe890702024-12-20 11:39:13 +0100239 db_item = self.update_state_operation_history(
240 db_item, op_id, None, checking["resourceState"]
garciadeblas7eae6f42024-11-08 10:41:38 +0100241 )
242 self.db.set_one(db_collection, {"_id": db_item["_id"]}, db_item)
garciadeblas72412282024-11-07 12:41:54 +0100243 except Exception as e:
244 self.logger.debug(traceback.format_exc())
245 self.logger.debug(f"Exception: {e}", exc_info=True)
246 return False, f"Unexpected exception: {e}"
247 return True, "OK"
248
249 async def check_resource_status(self, key, op_id, op_params, content):
250 self.logger.info(
garciadeblasbc96f382025-01-22 16:02:18 +0100251 f"Check resource status. Key: {key}. Operation: {op_id}. Params: {op_params}."
garciadeblas72412282024-11-07 12:41:54 +0100252 )
garciadeblasbc96f382025-01-22 16:02:18 +0100253 self.logger.debug(f"Check resource status. Content: {content}")
garciadeblas72412282024-11-07 12:41:54 +0100254 check_resource_function = self._workflows.get(key, {}).get(
255 "check_resource_function"
256 )
257 self.logger.info("check_resource function : {}".format(check_resource_function))
258 if check_resource_function:
259 return await check_resource_function(op_id, op_params, content)
260 else:
261 return await self.check_dummy_operation(op_id, op_params, content)
262
garciadeblas995cbf32024-12-18 12:54:00 +0100263 def decrypted_copy(self, content, fields=["age_pubkey", "age_privkey"]):
264 # This deep copy is intended to be passed to ODU workflows.
265 content_copy = copy.deepcopy(content)
rshric3564942024-11-12 18:12:38 +0000266
267 # decrypting the key
268 self.db.encrypt_decrypt_fields(
garciadeblas995cbf32024-12-18 12:54:00 +0100269 content_copy,
rshric3564942024-11-12 18:12:38 +0000270 "decrypt",
garciadeblas995cbf32024-12-18 12:54:00 +0100271 fields,
rshric3564942024-11-12 18:12:38 +0000272 schema_version="1.11",
garciadeblas995cbf32024-12-18 12:54:00 +0100273 salt=content_copy["_id"],
rshric3564942024-11-12 18:12:38 +0000274 )
garciadeblas995cbf32024-12-18 12:54:00 +0100275 return content_copy
rshric3564942024-11-12 18:12:38 +0000276
garciadeblasbc96f382025-01-22 16:02:18 +0100277 def cluster_kubectl(self, db_cluster):
278 cluster_kubeconfig = db_cluster["credentials"]
279 kubeconfig_path = f"/tmp/{db_cluster['_id']}_kubeconfig.yaml"
280 with open(kubeconfig_path, "w") as kubeconfig_file:
281 yaml.safe_dump(cluster_kubeconfig, kubeconfig_file)
282 return Kubectl(config_file=kubeconfig_path)
283
garciadeblas72412282024-11-07 12:41:54 +0100284
285class ClusterLcm(GitOpsLcm):
garciadeblas96b94f52024-07-08 16:18:21 +0200286 db_collection = "clusters"
rshri932105f2024-07-05 15:11:55 +0000287
288 def __init__(self, msg, lcm_tasks, config):
289 """
290 Init, Connect to database, filesystem storage, and messaging
291 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
292 :return: None
293 """
garciadeblas72412282024-11-07 12:41:54 +0100294 super().__init__(msg, lcm_tasks, config)
295 self._workflows = {
296 "create_cluster": {
297 "check_resource_function": self.check_create_cluster,
298 },
garciadeblasb0a42c22024-11-13 16:00:10 +0100299 "register_cluster": {
300 "check_resource_function": self.check_register_cluster,
301 },
302 "update_cluster": {
303 "check_resource_function": self.check_update_cluster,
304 },
garciadeblasbc96f382025-01-22 16:02:18 +0100305 "delete_cluster": {
306 "check_resource_function": self.check_delete_cluster,
307 },
garciadeblas72412282024-11-07 12:41:54 +0100308 }
rshri932105f2024-07-05 15:11:55 +0000309 self.regist = vim_sdn.K8sClusterLcm(msg, self.lcm_tasks, config)
310
rshri948f7de2024-12-02 03:42:35 +0000311 async def create(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000312 self.logger.info("cluster Create Enter")
313
garciadeblas995cbf32024-12-18 12:54:00 +0100314 # To get the cluster and op ids
rshri948f7de2024-12-02 03:42:35 +0000315 cluster_id = params["cluster_id"]
rshri948f7de2024-12-02 03:42:35 +0000316 op_id = params["operation_id"]
rshri948f7de2024-12-02 03:42:35 +0000317
318 # To initialize the operation states
319 self.initialize_operation(cluster_id, op_id)
320
garciadeblas995cbf32024-12-18 12:54:00 +0100321 # To get the cluster
322 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
323
324 # To get the operation params details
325 op_params = self.get_operation_params(db_cluster, op_id)
326
327 # To copy the cluster content and decrypting fields to use in workflows
328 workflow_content = {
329 "cluster": self.decrypted_copy(db_cluster),
330 }
rshric3564942024-11-12 18:12:38 +0000331
rshri948f7de2024-12-02 03:42:35 +0000332 # To get the vim account details
rshric3564942024-11-12 18:12:38 +0000333 db_vim = self.db.get_one("vim_accounts", {"name": db_cluster["vim_account"]})
garciadeblas995cbf32024-12-18 12:54:00 +0100334 workflow_content["vim_account"] = db_vim
rshric3564942024-11-12 18:12:38 +0000335
garciadeblasdc805482025-02-04 16:08:51 +0100336 workflow_res, workflow_name = await self.odu.launch_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +0100337 "create_cluster", op_id, op_params, workflow_content
garciadeblas96b94f52024-07-08 16:18:21 +0200338 )
garciadeblasdc805482025-02-04 16:08:51 +0100339 if not workflow_res:
340 self.logger.error(f"Failed to launch workflow: {workflow_name}")
341 db_cluster["state"] = "FAILED_CREATION"
342 db_cluster["resourceState"] = "ERROR"
343 db_cluster = self.update_operation_history(
344 db_cluster, op_id, workflow_status=False, resource_status=None
345 )
346 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
347 # Clean items used in the workflow, no matter if the workflow succeeded
348 clean_status, clean_msg = await self.odu.clean_items_workflow(
349 "create_cluster", op_id, op_params, workflow_content
350 )
351 self.logger.info(
352 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
353 )
354 return
rshri932105f2024-07-05 15:11:55 +0000355
garciadeblas891f2002025-02-03 16:12:43 +0100356 self.logger.info("workflow_name is: {}".format(workflow_name))
garciadeblas96b94f52024-07-08 16:18:21 +0200357 workflow_status, workflow_msg = await self.odu.check_workflow_status(
garciadeblas36fe58b2025-02-05 16:36:17 +0100358 op_id, workflow_name
garciadeblas96b94f52024-07-08 16:18:21 +0200359 )
rshri932105f2024-07-05 15:11:55 +0000360 self.logger.info(
garciadeblas891f2002025-02-03 16:12:43 +0100361 "workflow_status is: {} and workflow_msg is: {}".format(
rshri932105f2024-07-05 15:11:55 +0000362 workflow_status, workflow_msg
363 )
364 )
365 if workflow_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200366 db_cluster["state"] = "CREATED"
367 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
rshri932105f2024-07-05 15:11:55 +0000368 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200369 db_cluster["state"] = "FAILED_CREATION"
370 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000371 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +0000372 db_cluster = self.update_operation_history(
373 db_cluster, op_id, workflow_status, None
374 )
garciadeblas96b94f52024-07-08 16:18:21 +0200375 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +0000376
garciadeblas28bff0f2024-09-16 12:53:07 +0200377 # Clean items used in the workflow, no matter if the workflow succeeded
378 clean_status, clean_msg = await self.odu.clean_items_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +0100379 "create_cluster", op_id, op_params, workflow_content
garciadeblas28bff0f2024-09-16 12:53:07 +0200380 )
381 self.logger.info(
382 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
383 )
384
rshri932105f2024-07-05 15:11:55 +0000385 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +0100386 resource_status, resource_msg = await self.check_resource_status(
garciadeblas995cbf32024-12-18 12:54:00 +0100387 "create_cluster", op_id, op_params, workflow_content
rshri932105f2024-07-05 15:11:55 +0000388 )
389 self.logger.info(
390 "resource_status is :{} and resource_msg is :{}".format(
391 resource_status, resource_msg
392 )
393 )
394 if resource_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200395 db_cluster["resourceState"] = "READY"
rshri932105f2024-07-05 15:11:55 +0000396 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200397 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000398
garciadeblas96b94f52024-07-08 16:18:21 +0200399 db_cluster["operatingState"] = "IDLE"
400 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000401 db_cluster, op_id, workflow_status, resource_status
rshri932105f2024-07-05 15:11:55 +0000402 )
shahithya70a3fc92024-11-12 11:01:05 +0000403 db_cluster["current_operation"] = None
garciadeblas3e5eeec2025-01-21 11:49:38 +0100404
405 # Retrieve credentials
406 cluster_creds = None
407 if db_cluster["resourceState"] == "READY" and db_cluster["state"] == "CREATED":
408 result, cluster_creds = await self.odu.get_cluster_credentials(db_cluster)
409 # TODO: manage the case where the credentials are not available
410 if result:
411 db_cluster["credentials"] = cluster_creds
412
413 # Update db_cluster
garciadeblas96b94f52024-07-08 16:18:21 +0200414 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
415 self.update_profile_state(db_cluster, workflow_status, resource_status)
rshri948f7de2024-12-02 03:42:35 +0000416
garciadeblas3e5eeec2025-01-21 11:49:38 +0100417 # Register the cluster in k8sclusters collection
rshri948f7de2024-12-02 03:42:35 +0000418 db_register = self.db.get_one("k8sclusters", {"name": db_cluster["name"]})
garciadeblas3e5eeec2025-01-21 11:49:38 +0100419 if cluster_creds:
rshri948f7de2024-12-02 03:42:35 +0000420 db_register["credentials"] = cluster_creds
garciadeblas3e5eeec2025-01-21 11:49:38 +0100421 # To call the lcm.py for registering the cluster in k8scluster lcm.
rshri948f7de2024-12-02 03:42:35 +0000422 self.db.set_one("k8sclusters", {"_id": db_register["_id"]}, db_register)
423 register = await self.regist.create(db_register, order_id)
424 self.logger.debug(f"Register is : {register}")
425 else:
426 db_register["_admin"]["operationalState"] = "ERROR"
427 result, cluster_creds = await self.odu.get_cluster_credentials(db_cluster)
428 # To call the lcm.py for registering the cluster in k8scluster lcm.
429 db_register["credentials"] = cluster_creds
430 self.db.set_one("k8sclusters", {"_id": db_register["_id"]}, db_register)
431
rshri932105f2024-07-05 15:11:55 +0000432 return
433
garciadeblas72412282024-11-07 12:41:54 +0100434 async def check_create_cluster(self, op_id, op_params, content):
garciadeblas7eae6f42024-11-08 10:41:38 +0100435 self.logger.info(
436 f"check_create_cluster Operation {op_id}. Params: {op_params}."
437 )
garciadeblas72412282024-11-07 12:41:54 +0100438 db_cluster = content["cluster"]
439 cluster_name = db_cluster["git_name"].lower()
440 cluster_kustomization_name = cluster_name
441 db_vim_account = content["vim_account"]
442 cloud_type = db_vim_account["vim_type"]
443 nodepool_name = ""
444 if cloud_type == "aws":
445 nodepool_name = f"{cluster_name}-nodegroup"
446 cluster_name = f"{cluster_name}-cluster"
447 elif cloud_type == "gcp":
448 nodepool_name = f"nodepool-{cluster_name}"
449 bootstrap = op_params.get("bootstrap", True)
450 if cloud_type in ("azure", "gcp", "aws"):
451 checkings_list = [
452 {
453 "item": "kustomization",
454 "name": cluster_kustomization_name,
455 "namespace": "managed-resources",
garciadeblas6c82c352025-01-27 16:53:45 +0100456 "condition": {
457 "jsonpath_filter": "status.conditions[?(@.type=='Ready')].status",
458 "value": "True",
459 },
yshahcb9075f2024-11-22 12:08:57 +0000460 "timeout": 1500,
garciadeblas72412282024-11-07 12:41:54 +0100461 "enable": True,
garciadeblas7eae6f42024-11-08 10:41:38 +0100462 "resourceState": "IN_PROGRESS.KUSTOMIZATION_READY",
garciadeblas72412282024-11-07 12:41:54 +0100463 },
464 {
465 "item": f"cluster_{cloud_type}",
466 "name": cluster_name,
467 "namespace": "",
garciadeblas6c82c352025-01-27 16:53:45 +0100468 "condition": {
469 "jsonpath_filter": "status.conditions[?(@.type=='Synced')].status",
470 "value": "True",
471 },
garciadeblas72412282024-11-07 12:41:54 +0100472 "timeout": self._checkloop_resource_timeout,
473 "enable": True,
garciadeblas7eae6f42024-11-08 10:41:38 +0100474 "resourceState": "IN_PROGRESS.RESOURCE_SYNCED.CLUSTER",
garciadeblas72412282024-11-07 12:41:54 +0100475 },
476 {
477 "item": f"cluster_{cloud_type}",
478 "name": cluster_name,
479 "namespace": "",
garciadeblas6c82c352025-01-27 16:53:45 +0100480 "condition": {
481 "jsonpath_filter": "status.conditions[?(@.type=='Ready')].status",
482 "value": "True",
483 },
garciadeblas72412282024-11-07 12:41:54 +0100484 "timeout": self._checkloop_resource_timeout,
485 "enable": True,
garciadeblas7eae6f42024-11-08 10:41:38 +0100486 "resourceState": "IN_PROGRESS.RESOURCE_READY.CLUSTER",
garciadeblas72412282024-11-07 12:41:54 +0100487 },
488 {
489 "item": "kustomization",
490 "name": f"{cluster_kustomization_name}-bstrp-fluxctrl",
491 "namespace": "managed-resources",
garciadeblas6c82c352025-01-27 16:53:45 +0100492 "condition": {
493 "jsonpath_filter": "status.conditions[?(@.type=='Ready')].status",
494 "value": "True",
495 },
yshahcb9075f2024-11-22 12:08:57 +0000496 "timeout": self._checkloop_resource_timeout,
garciadeblas72412282024-11-07 12:41:54 +0100497 "enable": bootstrap,
garciadeblas7eae6f42024-11-08 10:41:38 +0100498 "resourceState": "IN_PROGRESS.BOOTSTRAP_OK",
garciadeblas72412282024-11-07 12:41:54 +0100499 },
500 ]
501 else:
502 return False, "Not suitable VIM account to check cluster status"
503 if nodepool_name:
504 nodepool_check = {
505 "item": f"nodepool_{cloud_type}",
506 "name": nodepool_name,
507 "namespace": "",
garciadeblas6c82c352025-01-27 16:53:45 +0100508 "condition": {
509 "jsonpath_filter": "status.conditions[?(@.type=='Ready')].status",
510 "value": "True",
511 },
garciadeblas72412282024-11-07 12:41:54 +0100512 "timeout": self._checkloop_resource_timeout,
513 "enable": True,
garciadeblas7eae6f42024-11-08 10:41:38 +0100514 "resourceState": "IN_PROGRESS.RESOURCE_READY.NODEPOOL",
garciadeblas72412282024-11-07 12:41:54 +0100515 }
516 checkings_list.insert(3, nodepool_check)
yshahcb9075f2024-11-22 12:08:57 +0000517 return await self.common_check_list(
518 op_id, checkings_list, "clusters", db_cluster
519 )
garciadeblas72412282024-11-07 12:41:54 +0100520
garciadeblas96b94f52024-07-08 16:18:21 +0200521 def update_profile_state(self, db_cluster, workflow_status, resource_status):
rshri932105f2024-07-05 15:11:55 +0000522 profiles = [
523 "infra_controller_profiles",
524 "infra_config_profiles",
525 "app_profiles",
526 "resource_profiles",
527 ]
rshri948f7de2024-12-02 03:42:35 +0000528 """
rshri932105f2024-07-05 15:11:55 +0000529 profiles_collection = {
530 "infra_controller_profiles": "k8sinfra_controller",
531 "infra_config_profiles": "k8sinfra_config",
532 "app_profiles": "k8sapp",
533 "resource_profiles": "k8sresource",
534 }
rshri948f7de2024-12-02 03:42:35 +0000535 """
Your Name86149632024-11-14 16:17:16 +0000536 self.logger.info("the db_cluster is :{}".format(db_cluster))
rshri932105f2024-07-05 15:11:55 +0000537 for profile_type in profiles:
garciadeblas96b94f52024-07-08 16:18:21 +0200538 profile_id = db_cluster[profile_type]
rshri948f7de2024-12-02 03:42:35 +0000539 db_collection = self.profile_collection_mapping[profile_type]
rshri932105f2024-07-05 15:11:55 +0000540 db_profile = self.db.get_one(db_collection, {"_id": profile_id})
yshahcb9075f2024-11-22 12:08:57 +0000541 op_id = db_profile["operationHistory"][-1].get("op_id")
garciadeblas96b94f52024-07-08 16:18:21 +0200542 db_profile["state"] = db_cluster["state"]
543 db_profile["resourceState"] = db_cluster["resourceState"]
544 db_profile["operatingState"] = db_cluster["operatingState"]
rshric3564942024-11-12 18:12:38 +0000545 db_profile["age_pubkey"] = db_cluster["age_pubkey"]
Your Name86149632024-11-14 16:17:16 +0000546 db_profile["age_privkey"] = db_cluster["age_privkey"]
rshri932105f2024-07-05 15:11:55 +0000547 db_profile = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000548 db_profile, op_id, workflow_status, resource_status
rshri932105f2024-07-05 15:11:55 +0000549 )
rshri932105f2024-07-05 15:11:55 +0000550 self.db.set_one(db_collection, {"_id": db_profile["_id"]}, db_profile)
551
rshri948f7de2024-12-02 03:42:35 +0000552 async def delete(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000553 self.logger.info("cluster delete Enter")
rshri948f7de2024-12-02 03:42:35 +0000554
garciadeblas995cbf32024-12-18 12:54:00 +0100555 # To get the cluster and op ids
rshri948f7de2024-12-02 03:42:35 +0000556 cluster_id = params["cluster_id"]
rshri948f7de2024-12-02 03:42:35 +0000557 op_id = params["operation_id"]
rshri948f7de2024-12-02 03:42:35 +0000558
559 # To initialize the operation states
560 self.initialize_operation(cluster_id, op_id)
561
garciadeblas995cbf32024-12-18 12:54:00 +0100562 # To get the cluster
563 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
564
565 # To get the operation params details
566 op_params = self.get_operation_params(db_cluster, op_id)
567
568 # To copy the cluster content and decrypting fields to use in workflows
569 workflow_content = {
570 "cluster": self.decrypted_copy(db_cluster),
571 }
rshri948f7de2024-12-02 03:42:35 +0000572
garciadeblasbc96f382025-01-22 16:02:18 +0100573 # To get the vim account details
574 db_vim = self.db.get_one("vim_accounts", {"name": db_cluster["vim_account"]})
575 workflow_content["vim_account"] = db_vim
576
garciadeblasdc805482025-02-04 16:08:51 +0100577 workflow_res, workflow_name = await self.odu.launch_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +0100578 "delete_cluster", op_id, op_params, workflow_content
garciadeblas96b94f52024-07-08 16:18:21 +0200579 )
garciadeblasdc805482025-02-04 16:08:51 +0100580 if not workflow_res:
581 self.logger.error(f"Failed to launch workflow: {workflow_name}")
582 db_cluster["state"] = "FAILED_DELETION"
583 db_cluster["resourceState"] = "ERROR"
584 db_cluster = self.update_operation_history(
585 db_cluster, op_id, workflow_status=False, resource_status=None
586 )
587 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
588 # Clean items used in the workflow, no matter if the workflow succeeded
589 clean_status, clean_msg = await self.odu.clean_items_workflow(
590 "delete_cluster", op_id, op_params, workflow_content
591 )
592 self.logger.info(
593 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
594 )
595 return
rshri932105f2024-07-05 15:11:55 +0000596
garciadeblas891f2002025-02-03 16:12:43 +0100597 self.logger.info("workflow_name is: {}".format(workflow_name))
garciadeblas96b94f52024-07-08 16:18:21 +0200598 workflow_status, workflow_msg = await self.odu.check_workflow_status(
garciadeblas36fe58b2025-02-05 16:36:17 +0100599 op_id, workflow_name
garciadeblas96b94f52024-07-08 16:18:21 +0200600 )
rshri932105f2024-07-05 15:11:55 +0000601 self.logger.info(
garciadeblas891f2002025-02-03 16:12:43 +0100602 "workflow_status is: {} and workflow_msg is: {}".format(
rshri932105f2024-07-05 15:11:55 +0000603 workflow_status, workflow_msg
604 )
605 )
606 if workflow_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200607 db_cluster["state"] = "DELETED"
608 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
rshri932105f2024-07-05 15:11:55 +0000609 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200610 db_cluster["state"] = "FAILED_DELETION"
611 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000612 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +0000613 db_cluster = self.update_operation_history(
614 db_cluster, op_id, workflow_status, None
615 )
garciadeblas96b94f52024-07-08 16:18:21 +0200616 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +0000617
garciadeblas98f9a3d2024-12-10 13:42:47 +0100618 # Clean items used in the workflow or in the cluster, no matter if the workflow succeeded
619 clean_status, clean_msg = await self.odu.clean_items_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +0100620 "delete_cluster", op_id, op_params, workflow_content
garciadeblas98f9a3d2024-12-10 13:42:47 +0100621 )
622 self.logger.info(
623 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
624 )
625
rshri932105f2024-07-05 15:11:55 +0000626 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +0100627 resource_status, resource_msg = await self.check_resource_status(
garciadeblas995cbf32024-12-18 12:54:00 +0100628 "delete_cluster", op_id, op_params, workflow_content
rshri932105f2024-07-05 15:11:55 +0000629 )
630 self.logger.info(
631 "resource_status is :{} and resource_msg is :{}".format(
632 resource_status, resource_msg
633 )
634 )
635 if resource_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200636 db_cluster["resourceState"] = "READY"
rshri932105f2024-07-05 15:11:55 +0000637 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200638 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000639
garciadeblas96b94f52024-07-08 16:18:21 +0200640 db_cluster["operatingState"] = "IDLE"
641 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000642 db_cluster, op_id, workflow_status, resource_status
garciadeblas96b94f52024-07-08 16:18:21 +0200643 )
shahithya70a3fc92024-11-12 11:01:05 +0000644 db_cluster["current_operation"] = None
garciadeblas96b94f52024-07-08 16:18:21 +0200645 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +0000646
garciadeblas96b94f52024-07-08 16:18:21 +0200647 # To delete it from DB
648 if db_cluster["state"] == "DELETED":
649 self.delete_cluster(db_cluster)
rshri948f7de2024-12-02 03:42:35 +0000650
651 # To delete it from k8scluster collection
652 self.db.del_one("k8sclusters", {"name": db_cluster["name"]})
653
rshri932105f2024-07-05 15:11:55 +0000654 return
655
garciadeblasbc96f382025-01-22 16:02:18 +0100656 async def check_delete_cluster(self, op_id, op_params, content):
657 self.logger.info(
658 f"check_delete_cluster Operation {op_id}. Params: {op_params}."
659 )
660 self.logger.debug(f"Content: {content}")
661 db_cluster = content["cluster"]
662 cluster_name = db_cluster["git_name"].lower()
663 cluster_kustomization_name = cluster_name
664 db_vim_account = content["vim_account"]
665 cloud_type = db_vim_account["vim_type"]
666 if cloud_type == "aws":
667 cluster_name = f"{cluster_name}-cluster"
668 if cloud_type in ("azure", "gcp", "aws"):
669 checkings_list = [
670 {
671 "item": "kustomization",
672 "name": cluster_kustomization_name,
673 "namespace": "managed-resources",
674 "deleted": True,
675 "timeout": self._checkloop_kustomization_timeout,
676 "enable": True,
677 "resourceState": "IN_PROGRESS.KUSTOMIZATION_DELETED",
678 },
679 {
680 "item": f"cluster_{cloud_type}",
681 "name": cluster_name,
682 "namespace": "",
683 "deleted": True,
684 "timeout": self._checkloop_resource_timeout,
685 "enable": True,
686 "resourceState": "IN_PROGRESS.RESOURCE_DELETED.CLUSTER",
687 },
688 ]
689 else:
690 return False, "Not suitable VIM account to check cluster status"
691 return await self.common_check_list(
692 op_id, checkings_list, "clusters", db_cluster
693 )
694
garciadeblas96b94f52024-07-08 16:18:21 +0200695 def delete_cluster(self, db_cluster):
696 # Actually, item_content is equal to db_cluster
rshri932105f2024-07-05 15:11:55 +0000697 # detach profiles
698 update_dict = None
699 profiles_to_detach = [
700 "infra_controller_profiles",
701 "infra_config_profiles",
702 "app_profiles",
703 "resource_profiles",
704 ]
rshri948f7de2024-12-02 03:42:35 +0000705 """
rshri932105f2024-07-05 15:11:55 +0000706 profiles_collection = {
707 "infra_controller_profiles": "k8sinfra_controller",
708 "infra_config_profiles": "k8sinfra_config",
709 "app_profiles": "k8sapp",
710 "resource_profiles": "k8sresource",
711 }
rshri948f7de2024-12-02 03:42:35 +0000712 """
rshri932105f2024-07-05 15:11:55 +0000713 for profile_type in profiles_to_detach:
garciadeblas96b94f52024-07-08 16:18:21 +0200714 if db_cluster.get(profile_type):
garciadeblas96b94f52024-07-08 16:18:21 +0200715 profile_ids = db_cluster[profile_type]
rshri932105f2024-07-05 15:11:55 +0000716 profile_ids_copy = deepcopy(profile_ids)
rshri932105f2024-07-05 15:11:55 +0000717 for profile_id in profile_ids_copy:
rshri948f7de2024-12-02 03:42:35 +0000718 db_collection = self.profile_collection_mapping[profile_type]
rshri932105f2024-07-05 15:11:55 +0000719 db_profile = self.db.get_one(db_collection, {"_id": profile_id})
garciadeblasc2552852024-10-22 12:39:32 +0200720 self.logger.debug("the db_profile is :{}".format(db_profile))
721 self.logger.debug(
garciadeblas96b94f52024-07-08 16:18:21 +0200722 "the item_content name is :{}".format(db_cluster["name"])
rshri932105f2024-07-05 15:11:55 +0000723 )
garciadeblasc2552852024-10-22 12:39:32 +0200724 self.logger.debug(
rshri932105f2024-07-05 15:11:55 +0000725 "the db_profile name is :{}".format(db_profile["name"])
726 )
garciadeblas96b94f52024-07-08 16:18:21 +0200727 if db_cluster["name"] == db_profile["name"]:
rshri932105f2024-07-05 15:11:55 +0000728 self.db.del_one(db_collection, {"_id": profile_id})
729 else:
rshri932105f2024-07-05 15:11:55 +0000730 profile_ids.remove(profile_id)
731 update_dict = {profile_type: profile_ids}
rshri932105f2024-07-05 15:11:55 +0000732 self.db.set_one(
garciadeblas96b94f52024-07-08 16:18:21 +0200733 "clusters", {"_id": db_cluster["_id"]}, update_dict
rshri932105f2024-07-05 15:11:55 +0000734 )
garciadeblas96b94f52024-07-08 16:18:21 +0200735 self.db.del_one("clusters", {"_id": db_cluster["_id"]})
rshri932105f2024-07-05 15:11:55 +0000736
rshri948f7de2024-12-02 03:42:35 +0000737 async def attach_profile(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000738 self.logger.info("profile attach Enter")
rshri948f7de2024-12-02 03:42:35 +0000739
garciadeblas995cbf32024-12-18 12:54:00 +0100740 # To get the cluster and op ids
rshri948f7de2024-12-02 03:42:35 +0000741 cluster_id = params["cluster_id"]
rshri948f7de2024-12-02 03:42:35 +0000742 op_id = params["operation_id"]
rshri948f7de2024-12-02 03:42:35 +0000743
744 # To initialize the operation states
745 self.initialize_operation(cluster_id, op_id)
746
garciadeblas995cbf32024-12-18 12:54:00 +0100747 # To get the cluster
748 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
749
750 # To get the operation params details
751 op_params = self.get_operation_params(db_cluster, op_id)
752
753 # To copy the cluster content and decrypting fields to use in workflows
754 workflow_content = {
755 "cluster": self.decrypted_copy(db_cluster),
756 }
rshri948f7de2024-12-02 03:42:35 +0000757
758 # To get the profile details
759 profile_id = params["profile_id"]
760 profile_type = params["profile_type"]
761 profile_collection = self.profile_collection_mapping[profile_type]
762 db_profile = self.db.get_one(profile_collection, {"_id": profile_id})
763 db_profile["profile_type"] = profile_type
764 # content["profile"] = db_profile
garciadeblas995cbf32024-12-18 12:54:00 +0100765 workflow_content["profile"] = db_profile
rshri932105f2024-07-05 15:11:55 +0000766
garciadeblasdc805482025-02-04 16:08:51 +0100767 workflow_res, workflow_name = await self.odu.launch_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +0100768 "attach_profile_to_cluster", op_id, op_params, workflow_content
garciadeblas96b94f52024-07-08 16:18:21 +0200769 )
garciadeblasdc805482025-02-04 16:08:51 +0100770 if not workflow_res:
771 self.logger.error(f"Failed to launch workflow: {workflow_name}")
772 db_cluster["resourceState"] = "ERROR"
773 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
774 db_cluster = self.update_operation_history(
775 db_cluster, op_id, workflow_status=False, resource_status=None
776 )
777 return
rshri932105f2024-07-05 15:11:55 +0000778
garciadeblas891f2002025-02-03 16:12:43 +0100779 self.logger.info("workflow_name is: {}".format(workflow_name))
garciadeblas96b94f52024-07-08 16:18:21 +0200780 workflow_status, workflow_msg = await self.odu.check_workflow_status(
garciadeblas36fe58b2025-02-05 16:36:17 +0100781 op_id, workflow_name
garciadeblas96b94f52024-07-08 16:18:21 +0200782 )
rshri932105f2024-07-05 15:11:55 +0000783 self.logger.info(
garciadeblas891f2002025-02-03 16:12:43 +0100784 "workflow_status is: {} and workflow_msg is: {}".format(
rshri932105f2024-07-05 15:11:55 +0000785 workflow_status, workflow_msg
786 )
787 )
788 if workflow_status:
789 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
790 else:
791 db_cluster["resourceState"] = "ERROR"
792 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +0000793 db_cluster = self.update_operation_history(
794 db_cluster, op_id, workflow_status, None
795 )
rshri932105f2024-07-05 15:11:55 +0000796 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
797
798 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +0100799 resource_status, resource_msg = await self.check_resource_status(
garciadeblas995cbf32024-12-18 12:54:00 +0100800 "attach_profile_to_cluster", op_id, op_params, workflow_content
rshri932105f2024-07-05 15:11:55 +0000801 )
802 self.logger.info(
803 "resource_status is :{} and resource_msg is :{}".format(
804 resource_status, resource_msg
805 )
806 )
807 if resource_status:
808 db_cluster["resourceState"] = "READY"
809 else:
810 db_cluster["resourceState"] = "ERROR"
811
812 db_cluster["operatingState"] = "IDLE"
813 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000814 db_cluster, op_id, workflow_status, resource_status
rshri932105f2024-07-05 15:11:55 +0000815 )
rshri932105f2024-07-05 15:11:55 +0000816 profile_list = db_cluster[profile_type]
rshri932105f2024-07-05 15:11:55 +0000817 if resource_status:
rshri932105f2024-07-05 15:11:55 +0000818 profile_list.append(profile_id)
rshri932105f2024-07-05 15:11:55 +0000819 db_cluster[profile_type] = profile_list
shahithya70a3fc92024-11-12 11:01:05 +0000820 db_cluster["current_operation"] = None
rshri932105f2024-07-05 15:11:55 +0000821 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
822
823 return
824
rshri948f7de2024-12-02 03:42:35 +0000825 async def detach_profile(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000826 self.logger.info("profile dettach Enter")
rshri948f7de2024-12-02 03:42:35 +0000827
garciadeblas995cbf32024-12-18 12:54:00 +0100828 # To get the cluster and op ids
rshri948f7de2024-12-02 03:42:35 +0000829 cluster_id = params["cluster_id"]
rshri948f7de2024-12-02 03:42:35 +0000830 op_id = params["operation_id"]
rshri948f7de2024-12-02 03:42:35 +0000831
832 # To initialize the operation states
833 self.initialize_operation(cluster_id, op_id)
834
garciadeblas995cbf32024-12-18 12:54:00 +0100835 # To get the cluster
836 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
837
838 # To get the operation params details
839 op_params = self.get_operation_params(db_cluster, op_id)
840
841 # To copy the cluster content and decrypting fields to use in workflows
842 workflow_content = {
843 "cluster": self.decrypted_copy(db_cluster),
844 }
rshri948f7de2024-12-02 03:42:35 +0000845
846 # To get the profile details
847 profile_id = params["profile_id"]
848 profile_type = params["profile_type"]
849 profile_collection = self.profile_collection_mapping[profile_type]
850 db_profile = self.db.get_one(profile_collection, {"_id": profile_id})
851 db_profile["profile_type"] = profile_type
garciadeblas995cbf32024-12-18 12:54:00 +0100852 workflow_content["profile"] = db_profile
rshri932105f2024-07-05 15:11:55 +0000853
garciadeblasdc805482025-02-04 16:08:51 +0100854 workflow_res, workflow_name = await self.odu.launch_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +0100855 "detach_profile_from_cluster", op_id, op_params, workflow_content
garciadeblas96b94f52024-07-08 16:18:21 +0200856 )
garciadeblasdc805482025-02-04 16:08:51 +0100857 if not workflow_res:
858 self.logger.error(f"Failed to launch workflow: {workflow_name}")
859 db_cluster["resourceState"] = "ERROR"
860 db_cluster = self.update_operation_history(
861 db_cluster, op_id, workflow_status=False, resource_status=None
862 )
863 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
864 return
rshri932105f2024-07-05 15:11:55 +0000865
garciadeblas891f2002025-02-03 16:12:43 +0100866 self.logger.info("workflow_name is: {}".format(workflow_name))
garciadeblas96b94f52024-07-08 16:18:21 +0200867 workflow_status, workflow_msg = await self.odu.check_workflow_status(
garciadeblas36fe58b2025-02-05 16:36:17 +0100868 op_id, workflow_name
garciadeblas96b94f52024-07-08 16:18:21 +0200869 )
rshri932105f2024-07-05 15:11:55 +0000870 self.logger.info(
garciadeblas891f2002025-02-03 16:12:43 +0100871 "workflow_status is: {} and workflow_msg is: {}".format(
rshri932105f2024-07-05 15:11:55 +0000872 workflow_status, workflow_msg
873 )
874 )
875 if workflow_status:
876 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
877 else:
878 db_cluster["resourceState"] = "ERROR"
879 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +0000880 db_cluster = self.update_operation_history(
881 db_cluster, op_id, workflow_status, None
882 )
rshri932105f2024-07-05 15:11:55 +0000883 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
884
885 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +0100886 resource_status, resource_msg = await self.check_resource_status(
garciadeblas995cbf32024-12-18 12:54:00 +0100887 "detach_profile_from_cluster", op_id, op_params, workflow_content
rshri932105f2024-07-05 15:11:55 +0000888 )
889 self.logger.info(
890 "resource_status is :{} and resource_msg is :{}".format(
891 resource_status, resource_msg
892 )
893 )
894 if resource_status:
895 db_cluster["resourceState"] = "READY"
896 else:
897 db_cluster["resourceState"] = "ERROR"
898
899 db_cluster["operatingState"] = "IDLE"
900 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000901 db_cluster, op_id, workflow_status, resource_status
rshri932105f2024-07-05 15:11:55 +0000902 )
rshri932105f2024-07-05 15:11:55 +0000903 profile_list = db_cluster[profile_type]
904 self.logger.info("profile list is : {}".format(profile_list))
905 if resource_status:
rshri932105f2024-07-05 15:11:55 +0000906 profile_list.remove(profile_id)
rshri932105f2024-07-05 15:11:55 +0000907 db_cluster[profile_type] = profile_list
shahithya70a3fc92024-11-12 11:01:05 +0000908 db_cluster["current_operation"] = None
rshri932105f2024-07-05 15:11:55 +0000909 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
910
911 return
912
rshri948f7de2024-12-02 03:42:35 +0000913 async def register(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000914 self.logger.info("cluster register enter")
915
garciadeblas995cbf32024-12-18 12:54:00 +0100916 # To get the cluster and op ids
rshri948f7de2024-12-02 03:42:35 +0000917 cluster_id = params["cluster_id"]
rshri948f7de2024-12-02 03:42:35 +0000918 op_id = params["operation_id"]
rshri948f7de2024-12-02 03:42:35 +0000919
920 # To initialize the operation states
921 self.initialize_operation(cluster_id, op_id)
922
garciadeblas995cbf32024-12-18 12:54:00 +0100923 # To get the cluster
924 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
925
926 # To get the operation params details
927 op_params = self.get_operation_params(db_cluster, op_id)
928
929 # To copy the cluster content and decrypting fields to use in workflows
930 workflow_content = {
931 "cluster": self.decrypted_copy(db_cluster),
932 }
rshric3564942024-11-12 18:12:38 +0000933
garciadeblasdc805482025-02-04 16:08:51 +0100934 workflow_res, workflow_name = await self.odu.launch_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +0100935 "register_cluster", op_id, op_params, workflow_content
garciadeblas96b94f52024-07-08 16:18:21 +0200936 )
garciadeblasdc805482025-02-04 16:08:51 +0100937 if not workflow_res:
938 self.logger.error(f"Failed to launch workflow: {workflow_name}")
939 db_cluster["state"] = "FAILED_CREATION"
940 db_cluster["resourceState"] = "ERROR"
941 db_cluster = self.update_operation_history(
942 db_cluster, op_id, workflow_status=False, resource_status=None
943 )
944 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
945 # Clean items used in the workflow, no matter if the workflow succeeded
946 clean_status, clean_msg = await self.odu.clean_items_workflow(
947 "register_cluster", op_id, op_params, workflow_content
948 )
949 self.logger.info(
950 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
951 )
952 return
rshri932105f2024-07-05 15:11:55 +0000953
garciadeblas891f2002025-02-03 16:12:43 +0100954 self.logger.info("workflow_name is: {}".format(workflow_name))
garciadeblas96b94f52024-07-08 16:18:21 +0200955 workflow_status, workflow_msg = await self.odu.check_workflow_status(
garciadeblas36fe58b2025-02-05 16:36:17 +0100956 op_id, workflow_name
garciadeblas96b94f52024-07-08 16:18:21 +0200957 )
rshri932105f2024-07-05 15:11:55 +0000958 self.logger.info(
garciadeblas891f2002025-02-03 16:12:43 +0100959 "workflow_status is: {} and workflow_msg is: {}".format(
rshri932105f2024-07-05 15:11:55 +0000960 workflow_status, workflow_msg
961 )
962 )
963 if workflow_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200964 db_cluster["state"] = "CREATED"
965 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
rshri932105f2024-07-05 15:11:55 +0000966 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200967 db_cluster["state"] = "FAILED_CREATION"
968 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000969 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +0000970 db_cluster = self.update_operation_history(
971 db_cluster, op_id, workflow_status, None
972 )
garciadeblas96b94f52024-07-08 16:18:21 +0200973 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +0000974
garciadeblasdde3a312024-09-17 13:25:06 +0200975 # Clean items used in the workflow, no matter if the workflow succeeded
976 clean_status, clean_msg = await self.odu.clean_items_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +0100977 "register_cluster", op_id, op_params, workflow_content
garciadeblasdde3a312024-09-17 13:25:06 +0200978 )
979 self.logger.info(
980 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
981 )
982
rshri932105f2024-07-05 15:11:55 +0000983 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +0100984 resource_status, resource_msg = await self.check_resource_status(
garciadeblas995cbf32024-12-18 12:54:00 +0100985 "register_cluster", op_id, op_params, workflow_content
rshri932105f2024-07-05 15:11:55 +0000986 )
987 self.logger.info(
988 "resource_status is :{} and resource_msg is :{}".format(
989 resource_status, resource_msg
990 )
991 )
992 if resource_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200993 db_cluster["resourceState"] = "READY"
rshri932105f2024-07-05 15:11:55 +0000994 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200995 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000996
garciadeblas96b94f52024-07-08 16:18:21 +0200997 db_cluster["operatingState"] = "IDLE"
998 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000999 db_cluster, op_id, workflow_status, resource_status
rshri932105f2024-07-05 15:11:55 +00001000 )
shahithya70a3fc92024-11-12 11:01:05 +00001001 db_cluster["current_operation"] = None
garciadeblas96b94f52024-07-08 16:18:21 +02001002 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri948f7de2024-12-02 03:42:35 +00001003
1004 db_register = self.db.get_one("k8sclusters", {"name": db_cluster["name"]})
1005 db_register["credentials"] = db_cluster["credentials"]
1006 self.db.set_one("k8sclusters", {"_id": db_register["_id"]}, db_register)
1007
1008 if db_cluster["resourceState"] == "READY" and db_cluster["state"] == "CREATED":
1009 # To call the lcm.py for registering the cluster in k8scluster lcm.
1010 register = await self.regist.create(db_register, order_id)
1011 self.logger.debug(f"Register is : {register}")
1012 else:
1013 db_register["_admin"]["operationalState"] = "ERROR"
1014 self.db.set_one("k8sclusters", {"_id": db_register["_id"]}, db_register)
1015
rshri932105f2024-07-05 15:11:55 +00001016 return
1017
garciadeblasbc96f382025-01-22 16:02:18 +01001018 async def check_register_cluster(self, op_id, op_params, content):
1019 self.logger.info(
1020 f"check_register_cluster Operation {op_id}. Params: {op_params}."
1021 )
1022 # self.logger.debug(f"Content: {content}")
1023 db_cluster = content["cluster"]
1024 cluster_name = db_cluster["git_name"].lower()
1025 cluster_kustomization_name = cluster_name
1026 bootstrap = op_params.get("bootstrap", True)
1027 checkings_list = [
1028 {
1029 "item": "kustomization",
1030 "name": f"{cluster_kustomization_name}-bstrp-fluxctrl",
1031 "namespace": "managed-resources",
garciadeblas6c82c352025-01-27 16:53:45 +01001032 "condition": {
1033 "jsonpath_filter": "status.conditions[?(@.type=='Ready')].status",
1034 "value": "True",
1035 },
garciadeblasbc96f382025-01-22 16:02:18 +01001036 "timeout": self._checkloop_kustomization_timeout,
1037 "enable": bootstrap,
1038 "resourceState": "IN_PROGRESS.BOOTSTRAP_OK",
1039 },
1040 ]
1041 return await self.common_check_list(
1042 op_id, checkings_list, "clusters", db_cluster
1043 )
1044
rshri948f7de2024-12-02 03:42:35 +00001045 async def deregister(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001046 self.logger.info("cluster deregister enter")
1047
garciadeblas995cbf32024-12-18 12:54:00 +01001048 # To get the cluster and op ids
rshri948f7de2024-12-02 03:42:35 +00001049 cluster_id = params["cluster_id"]
rshri948f7de2024-12-02 03:42:35 +00001050 op_id = params["operation_id"]
rshri948f7de2024-12-02 03:42:35 +00001051
1052 # To initialize the operation states
1053 self.initialize_operation(cluster_id, op_id)
1054
garciadeblas995cbf32024-12-18 12:54:00 +01001055 # To get the cluster
1056 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
1057
1058 # To get the operation params details
1059 op_params = self.get_operation_params(db_cluster, op_id)
1060
1061 # To copy the cluster content and decrypting fields to use in workflows
1062 workflow_content = {
1063 "cluster": self.decrypted_copy(db_cluster),
1064 }
rshri932105f2024-07-05 15:11:55 +00001065
garciadeblasdc805482025-02-04 16:08:51 +01001066 workflow_res, workflow_name = await self.odu.launch_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +01001067 "deregister_cluster", op_id, op_params, workflow_content
garciadeblas96b94f52024-07-08 16:18:21 +02001068 )
garciadeblasdc805482025-02-04 16:08:51 +01001069 if not workflow_res:
1070 self.logger.error(f"Failed to launch workflow: {workflow_name}")
1071 db_cluster["state"] = "FAILED_DELETION"
1072 db_cluster["resourceState"] = "ERROR"
1073 db_cluster = self.update_operation_history(
1074 db_cluster, op_id, workflow_status=False, resource_status=None
1075 )
1076 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
garciadeblasdc805482025-02-04 16:08:51 +01001077 return
rshri932105f2024-07-05 15:11:55 +00001078
garciadeblas891f2002025-02-03 16:12:43 +01001079 self.logger.info("workflow_name is: {}".format(workflow_name))
garciadeblas96b94f52024-07-08 16:18:21 +02001080 workflow_status, workflow_msg = await self.odu.check_workflow_status(
garciadeblas36fe58b2025-02-05 16:36:17 +01001081 op_id, workflow_name
garciadeblas96b94f52024-07-08 16:18:21 +02001082 )
rshri932105f2024-07-05 15:11:55 +00001083 self.logger.info(
garciadeblas891f2002025-02-03 16:12:43 +01001084 "workflow_status is: {} and workflow_msg is: {}".format(
rshri932105f2024-07-05 15:11:55 +00001085 workflow_status, workflow_msg
1086 )
1087 )
1088 if workflow_status:
garciadeblas96b94f52024-07-08 16:18:21 +02001089 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
rshri932105f2024-07-05 15:11:55 +00001090 else:
garciadeblas96b94f52024-07-08 16:18:21 +02001091 db_cluster["state"] = "FAILED_DELETION"
1092 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +00001093 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +00001094 db_cluster = self.update_operation_history(
1095 db_cluster, op_id, workflow_status, None
1096 )
garciadeblas96b94f52024-07-08 16:18:21 +02001097 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +00001098
1099 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +01001100 resource_status, resource_msg = await self.check_resource_status(
garciadeblas995cbf32024-12-18 12:54:00 +01001101 "deregister_cluster", op_id, op_params, workflow_content
rshri932105f2024-07-05 15:11:55 +00001102 )
1103 self.logger.info(
1104 "resource_status is :{} and resource_msg is :{}".format(
1105 resource_status, resource_msg
1106 )
1107 )
1108 if resource_status:
garciadeblas96b94f52024-07-08 16:18:21 +02001109 db_cluster["resourceState"] = "READY"
rshri932105f2024-07-05 15:11:55 +00001110 else:
garciadeblas96b94f52024-07-08 16:18:21 +02001111 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +00001112
garciadeblas96b94f52024-07-08 16:18:21 +02001113 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +00001114 db_cluster, op_id, workflow_status, resource_status
garciadeblas96b94f52024-07-08 16:18:21 +02001115 )
1116 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +00001117
garciadeblas14984462025-02-05 09:32:52 +01001118 await self.delete(params, order_id)
1119 # Clean items used in the workflow or in the cluster, no matter if the workflow succeeded
1120 clean_status, clean_msg = await self.odu.clean_items_workflow(
1121 "deregister_cluster", op_id, op_params, workflow_content
1122 )
1123 self.logger.info(
1124 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
1125 )
1126 return
rshri932105f2024-07-05 15:11:55 +00001127
rshri948f7de2024-12-02 03:42:35 +00001128 async def get_creds(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001129 self.logger.info("Cluster get creds Enter")
rshri948f7de2024-12-02 03:42:35 +00001130 cluster_id = params["cluster_id"]
1131 op_id = params["operation_id"]
1132 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
garciadeblas96b94f52024-07-08 16:18:21 +02001133 result, cluster_creds = await self.odu.get_cluster_credentials(db_cluster)
1134 if result:
1135 db_cluster["credentials"] = cluster_creds
yshahd940c652024-10-17 06:11:12 +00001136 op_len = 0
1137 for operations in db_cluster["operationHistory"]:
1138 if operations["op_id"] == op_id:
1139 db_cluster["operationHistory"][op_len]["result"] = result
1140 db_cluster["operationHistory"][op_len]["endDate"] = time()
1141 op_len += 1
shahithya70a3fc92024-11-12 11:01:05 +00001142 db_cluster["current_operation"] = None
yshahd940c652024-10-17 06:11:12 +00001143 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri948f7de2024-12-02 03:42:35 +00001144 self.logger.info("Cluster Get Creds Exit")
yshah771dea82024-07-05 15:11:49 +00001145 return
1146
rshri948f7de2024-12-02 03:42:35 +00001147 async def update(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001148 self.logger.info("Cluster update Enter")
rshri948f7de2024-12-02 03:42:35 +00001149 # To get the cluster details
1150 cluster_id = params["cluster_id"]
1151 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
1152
1153 # To get the operation params details
1154 op_id = params["operation_id"]
1155 op_params = self.get_operation_params(db_cluster, op_id)
yshah771dea82024-07-05 15:11:49 +00001156
garciadeblas995cbf32024-12-18 12:54:00 +01001157 # To copy the cluster content and decrypting fields to use in workflows
1158 workflow_content = {
1159 "cluster": self.decrypted_copy(db_cluster),
1160 }
rshric3564942024-11-12 18:12:38 +00001161
1162 # vim account details
1163 db_vim = self.db.get_one("vim_accounts", {"name": db_cluster["vim_account"]})
garciadeblas995cbf32024-12-18 12:54:00 +01001164 workflow_content["vim_account"] = db_vim
rshric3564942024-11-12 18:12:38 +00001165
garciadeblasdc805482025-02-04 16:08:51 +01001166 workflow_res, workflow_name = await self.odu.launch_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +01001167 "update_cluster", op_id, op_params, workflow_content
garciadeblas96b94f52024-07-08 16:18:21 +02001168 )
garciadeblasdc805482025-02-04 16:08:51 +01001169 if not workflow_res:
1170 self.logger.error(f"Failed to launch workflow: {workflow_name}")
1171 db_cluster["resourceState"] = "ERROR"
1172 db_cluster = self.update_operation_history(
1173 db_cluster, op_id, workflow_status=False, resource_status=None
1174 )
1175 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
1176 # Clean items used in the workflow, no matter if the workflow succeeded
1177 clean_status, clean_msg = await self.odu.clean_items_workflow(
1178 "update_cluster", op_id, op_params, workflow_content
1179 )
1180 self.logger.info(
1181 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
1182 )
1183 return
garciadeblas891f2002025-02-03 16:12:43 +01001184 self.logger.info("workflow_name is: {}".format(workflow_name))
garciadeblas96b94f52024-07-08 16:18:21 +02001185 workflow_status, workflow_msg = await self.odu.check_workflow_status(
garciadeblas36fe58b2025-02-05 16:36:17 +01001186 op_id, workflow_name
garciadeblas96b94f52024-07-08 16:18:21 +02001187 )
1188 self.logger.info(
1189 "Workflow Status: {} Workflow Message: {}".format(
1190 workflow_status, workflow_msg
yshah771dea82024-07-05 15:11:49 +00001191 )
garciadeblas96b94f52024-07-08 16:18:21 +02001192 )
1193
1194 if workflow_status:
1195 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
1196 else:
1197 db_cluster["resourceState"] = "ERROR"
1198
yshahcb9075f2024-11-22 12:08:57 +00001199 db_cluster = self.update_operation_history(
1200 db_cluster, op_id, workflow_status, None
1201 )
garciadeblas96b94f52024-07-08 16:18:21 +02001202 # self.logger.info("Db content: {}".format(db_content))
1203 # self.db.set_one(self.db_collection, {"_id": _id}, db_cluster)
1204 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
1205
garciadeblas28bff0f2024-09-16 12:53:07 +02001206 # Clean items used in the workflow, no matter if the workflow succeeded
1207 clean_status, clean_msg = await self.odu.clean_items_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +01001208 "update_cluster", op_id, op_params, workflow_content
garciadeblas28bff0f2024-09-16 12:53:07 +02001209 )
1210 self.logger.info(
1211 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
1212 )
garciadeblas96b94f52024-07-08 16:18:21 +02001213 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +01001214 resource_status, resource_msg = await self.check_resource_status(
garciadeblas995cbf32024-12-18 12:54:00 +01001215 "update_cluster", op_id, op_params, workflow_content
garciadeblas96b94f52024-07-08 16:18:21 +02001216 )
1217 self.logger.info(
1218 "Resource Status: {} Resource Message: {}".format(
1219 resource_status, resource_msg
1220 )
1221 )
yshah771dea82024-07-05 15:11:49 +00001222
1223 if resource_status:
garciadeblas96b94f52024-07-08 16:18:21 +02001224 db_cluster["resourceState"] = "READY"
yshah771dea82024-07-05 15:11:49 +00001225 else:
garciadeblas96b94f52024-07-08 16:18:21 +02001226 db_cluster["resourceState"] = "ERROR"
yshah771dea82024-07-05 15:11:49 +00001227
yshah0defcd52024-11-18 07:41:35 +00001228 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +00001229 db_cluster, op_id, workflow_status, resource_status
yshah0defcd52024-11-18 07:41:35 +00001230 )
1231
garciadeblas96b94f52024-07-08 16:18:21 +02001232 db_cluster["operatingState"] = "IDLE"
garciadeblas96b94f52024-07-08 16:18:21 +02001233 # self.logger.info("db_cluster: {}".format(db_cluster))
garciadeblas6c82c352025-01-27 16:53:45 +01001234 # TODO: verify condition
garciadeblas96b94f52024-07-08 16:18:21 +02001235 # For the moment, if the workflow completed successfully, then we update the db accordingly.
1236 if workflow_status:
1237 if "k8s_version" in op_params:
1238 db_cluster["k8s_version"] = op_params["k8s_version"]
yshah0defcd52024-11-18 07:41:35 +00001239 if "node_count" in op_params:
garciadeblas96b94f52024-07-08 16:18:21 +02001240 db_cluster["node_count"] = op_params["node_count"]
yshah0defcd52024-11-18 07:41:35 +00001241 if "node_size" in op_params:
1242 db_cluster["node_count"] = op_params["node_size"]
garciadeblas96b94f52024-07-08 16:18:21 +02001243 # self.db.set_one(self.db_collection, {"_id": _id}, db_content)
shahithya70a3fc92024-11-12 11:01:05 +00001244 db_cluster["current_operation"] = None
garciadeblas96b94f52024-07-08 16:18:21 +02001245 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
yshah771dea82024-07-05 15:11:49 +00001246 return
1247
garciadeblasbc96f382025-01-22 16:02:18 +01001248 async def check_update_cluster(self, op_id, op_params, content):
1249 self.logger.info(
1250 f"check_update_cluster Operation {op_id}. Params: {op_params}."
1251 )
1252 self.logger.debug(f"Content: {content}")
garciadeblas39eb5092025-01-27 18:31:06 +01001253 # return await self.check_dummy_operation(op_id, op_params, content)
1254 db_cluster = content["cluster"]
1255 cluster_name = db_cluster["git_name"].lower()
1256 cluster_kustomization_name = cluster_name
1257 db_vim_account = content["vim_account"]
1258 cloud_type = db_vim_account["vim_type"]
1259 if cloud_type == "aws":
1260 cluster_name = f"{cluster_name}-cluster"
1261 if cloud_type in ("azure", "gcp", "aws"):
1262 checkings_list = [
1263 {
1264 "item": "kustomization",
1265 "name": cluster_kustomization_name,
1266 "namespace": "managed-resources",
1267 "condition": {
1268 "jsonpath_filter": "status.conditions[?(@.type=='Ready')].status",
1269 "value": "True",
1270 },
1271 "timeout": self._checkloop_kustomization_timeout,
1272 "enable": True,
1273 "resourceState": "IN_PROGRESS.KUSTOMIZATION_READY",
1274 },
1275 ]
1276 else:
1277 return False, "Not suitable VIM account to check cluster status"
1278 # Scale operation
1279 if "node_count" in op_params:
1280 checkings_list.append(
1281 {
1282 "item": f"cluster_{cloud_type}",
1283 "name": cluster_name,
1284 "namespace": "",
1285 "condition": {
1286 "jsonpath_filter": "status.atProvider.defaultNodePool[0].nodeCount",
1287 "value": f"{op_params['node_count']}",
1288 },
1289 "timeout": self._checkloop_resource_timeout * 2,
1290 "enable": True,
1291 "resourceState": "IN_PROGRESS.RESOURCE_READY.NODE_COUNT.CLUSTER",
1292 }
1293 )
1294 # Upgrade operation
1295 if "k8s_version" in op_params:
1296 checkings_list.append(
1297 {
1298 "item": f"cluster_{cloud_type}",
1299 "name": cluster_name,
1300 "namespace": "",
1301 "condition": {
1302 "jsonpath_filter": "status.atProvider.defaultNodePool[0].orchestratorVersion",
1303 "value": op_params["k8s_version"],
1304 },
1305 "timeout": self._checkloop_resource_timeout * 2,
1306 "enable": True,
1307 "resourceState": "IN_PROGRESS.RESOURCE_READY.K8S_VERSION.CLUSTER",
1308 }
1309 )
1310 return await self.common_check_list(
1311 op_id, checkings_list, "clusters", db_cluster
1312 )
garciadeblasbc96f382025-01-22 16:02:18 +01001313
yshah771dea82024-07-05 15:11:49 +00001314
garciadeblas72412282024-11-07 12:41:54 +01001315class CloudCredentialsLcm(GitOpsLcm):
yshah771dea82024-07-05 15:11:49 +00001316 db_collection = "vim_accounts"
1317
1318 def __init__(self, msg, lcm_tasks, config):
1319 """
1320 Init, Connect to database, filesystem storage, and messaging
1321 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1322 :return: None
1323 """
garciadeblas72412282024-11-07 12:41:54 +01001324 super().__init__(msg, lcm_tasks, config)
yshah771dea82024-07-05 15:11:49 +00001325
yshah564ec9c2024-11-29 07:33:32 +00001326 async def add(self, params, order_id):
yshah771dea82024-07-05 15:11:49 +00001327 self.logger.info("Cloud Credentials create")
yshah564ec9c2024-11-29 07:33:32 +00001328 vim_id = params["_id"]
1329 op_id = vim_id
1330 op_params = params
1331 db_content = self.db.get_one(self.db_collection, {"_id": vim_id})
1332 vim_config = db_content.get("config", {})
1333 self.db.encrypt_decrypt_fields(
1334 vim_config.get("credentials"),
1335 "decrypt",
1336 ["password", "secret"],
1337 schema_version=db_content["schema_version"],
1338 salt=vim_id,
1339 )
1340
garciadeblasdc805482025-02-04 16:08:51 +01001341 workflow_res, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001342 "create_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001343 )
1344
1345 workflow_status, workflow_msg = await self.odu.check_workflow_status(
garciadeblas36fe58b2025-02-05 16:36:17 +01001346 op_id, workflow_name
yshah771dea82024-07-05 15:11:49 +00001347 )
1348
1349 self.logger.info(
1350 "Workflow Status: {} Workflow Msg: {}".format(workflow_status, workflow_msg)
1351 )
1352
garciadeblas28bff0f2024-09-16 12:53:07 +02001353 # Clean items used in the workflow, no matter if the workflow succeeded
1354 clean_status, clean_msg = await self.odu.clean_items_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001355 "create_cloud_credentials", op_id, op_params, db_content
garciadeblas28bff0f2024-09-16 12:53:07 +02001356 )
1357 self.logger.info(
1358 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
1359 )
1360
yshah771dea82024-07-05 15:11:49 +00001361 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +01001362 resource_status, resource_msg = await self.check_resource_status(
yshah564ec9c2024-11-29 07:33:32 +00001363 "create_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001364 )
1365 self.logger.info(
1366 "Resource Status: {} Resource Message: {}".format(
1367 resource_status, resource_msg
1368 )
1369 )
garciadeblas15b8a302024-09-23 12:40:13 +02001370
yshah564ec9c2024-11-29 07:33:32 +00001371 db_content["_admin"]["operationalState"] = "ENABLED"
1372 for operation in db_content["_admin"]["operations"]:
garciadeblas15b8a302024-09-23 12:40:13 +02001373 if operation["lcmOperationType"] == "create":
1374 operation["operationState"] = "ENABLED"
yshah564ec9c2024-11-29 07:33:32 +00001375 self.logger.info("Content : {}".format(db_content))
1376 self.db.set_one("vim_accounts", {"_id": db_content["_id"]}, db_content)
yshah771dea82024-07-05 15:11:49 +00001377 return
1378
yshah564ec9c2024-11-29 07:33:32 +00001379 async def edit(self, params, order_id):
1380 self.logger.info("Cloud Credentials Update")
1381 vim_id = params["_id"]
1382 op_id = vim_id
1383 op_params = params
1384 db_content = self.db.get_one("vim_accounts", {"_id": vim_id})
1385 vim_config = db_content.get("config", {})
1386 self.db.encrypt_decrypt_fields(
1387 vim_config.get("credentials"),
1388 "decrypt",
1389 ["password", "secret"],
1390 schema_version=db_content["schema_version"],
1391 salt=vim_id,
1392 )
1393
garciadeblasdc805482025-02-04 16:08:51 +01001394 workflow_res, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001395 "update_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001396 )
1397 workflow_status, workflow_msg = await self.odu.check_workflow_status(
garciadeblas36fe58b2025-02-05 16:36:17 +01001398 op_id, workflow_name
yshah771dea82024-07-05 15:11:49 +00001399 )
1400 self.logger.info(
1401 "Workflow Status: {} Workflow Msg: {}".format(workflow_status, workflow_msg)
1402 )
1403
garciadeblas28bff0f2024-09-16 12:53:07 +02001404 # Clean items used in the workflow, no matter if the workflow succeeded
1405 clean_status, clean_msg = await self.odu.clean_items_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001406 "update_cloud_credentials", op_id, op_params, db_content
garciadeblas28bff0f2024-09-16 12:53:07 +02001407 )
1408 self.logger.info(
1409 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
1410 )
1411
yshah771dea82024-07-05 15:11:49 +00001412 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +01001413 resource_status, resource_msg = await self.check_resource_status(
yshah564ec9c2024-11-29 07:33:32 +00001414 "update_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001415 )
1416 self.logger.info(
1417 "Resource Status: {} Resource Message: {}".format(
1418 resource_status, resource_msg
1419 )
1420 )
1421 return
1422
yshah564ec9c2024-11-29 07:33:32 +00001423 async def remove(self, params, order_id):
1424 self.logger.info("Cloud Credentials remove")
1425 vim_id = params["_id"]
1426 op_id = vim_id
1427 op_params = params
1428 db_content = self.db.get_one("vim_accounts", {"_id": vim_id})
1429
garciadeblasdc805482025-02-04 16:08:51 +01001430 workflow_res, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001431 "delete_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001432 )
1433 workflow_status, workflow_msg = await self.odu.check_workflow_status(
garciadeblas36fe58b2025-02-05 16:36:17 +01001434 op_id, workflow_name
yshah771dea82024-07-05 15:11:49 +00001435 )
1436 self.logger.info(
1437 "Workflow Status: {} Workflow Msg: {}".format(workflow_status, workflow_msg)
1438 )
1439
1440 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +01001441 resource_status, resource_msg = await self.check_resource_status(
yshah564ec9c2024-11-29 07:33:32 +00001442 "delete_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001443 )
1444 self.logger.info(
1445 "Resource Status: {} Resource Message: {}".format(
1446 resource_status, resource_msg
1447 )
1448 )
yshah564ec9c2024-11-29 07:33:32 +00001449 self.db.del_one(self.db_collection, {"_id": db_content["_id"]})
yshah771dea82024-07-05 15:11:49 +00001450 return
1451
rshri932105f2024-07-05 15:11:55 +00001452
garciadeblas72412282024-11-07 12:41:54 +01001453class K8sAppLcm(GitOpsLcm):
rshri948f7de2024-12-02 03:42:35 +00001454 db_collection = "k8sapp"
1455
rshri932105f2024-07-05 15:11:55 +00001456 def __init__(self, msg, lcm_tasks, config):
1457 """
1458 Init, Connect to database, filesystem storage, and messaging
1459 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1460 :return: None
1461 """
garciadeblas72412282024-11-07 12:41:54 +01001462 super().__init__(msg, lcm_tasks, config)
rshri932105f2024-07-05 15:11:55 +00001463
rshri948f7de2024-12-02 03:42:35 +00001464 async def create(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001465 self.logger.info("App Create Enter")
1466
rshri948f7de2024-12-02 03:42:35 +00001467 op_id = params["operation_id"]
1468 profile_id = params["profile_id"]
1469
1470 # To initialize the operation states
1471 self.initialize_operation(profile_id, op_id)
1472
1473 content = self.db.get_one("k8sapp", {"_id": profile_id})
1474 content["profile_type"] = "applications"
1475 op_params = self.get_operation_params(content, op_id)
1476 self.db.set_one("k8sapp", {"_id": content["_id"]}, content)
1477
garciadeblasdc805482025-02-04 16:08:51 +01001478 workflow_res, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001479 "create_profile", op_id, op_params, content
1480 )
garciadeblas891f2002025-02-03 16:12:43 +01001481 self.logger.info("workflow_name is: {}".format(workflow_name))
rshri932105f2024-07-05 15:11:55 +00001482
garciadeblas713e1962025-01-17 12:49:19 +01001483 workflow_status = await self.check_workflow_and_update_db(
1484 op_id, workflow_name, content
1485 )
rshri932105f2024-07-05 15:11:55 +00001486
1487 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01001488 resource_status, content = await self.check_resource_and_update_db(
garciadeblas96b94f52024-07-08 16:18:21 +02001489 "create_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001490 )
yshah564ec9c2024-11-29 07:33:32 +00001491 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1492 self.logger.info(f"App Create Exit with resource status: {resource_status}")
rshri932105f2024-07-05 15:11:55 +00001493 return
1494
rshri948f7de2024-12-02 03:42:35 +00001495 async def delete(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001496 self.logger.info("App delete Enter")
rshri932105f2024-07-05 15:11:55 +00001497
rshri948f7de2024-12-02 03:42:35 +00001498 op_id = params["operation_id"]
1499 profile_id = params["profile_id"]
1500
1501 # To initialize the operation states
1502 self.initialize_operation(profile_id, op_id)
1503
1504 content = self.db.get_one("k8sapp", {"_id": profile_id})
1505 op_params = self.get_operation_params(content, op_id)
1506
garciadeblasdc805482025-02-04 16:08:51 +01001507 workflow_res, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001508 "delete_profile", op_id, op_params, content
1509 )
garciadeblas891f2002025-02-03 16:12:43 +01001510 self.logger.info("workflow_name is: {}".format(workflow_name))
rshri932105f2024-07-05 15:11:55 +00001511
garciadeblas713e1962025-01-17 12:49:19 +01001512 workflow_status = await self.check_workflow_and_update_db(
1513 op_id, workflow_name, content
1514 )
rshri932105f2024-07-05 15:11:55 +00001515
1516 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01001517 resource_status, content = await self.check_resource_and_update_db(
garciadeblas96b94f52024-07-08 16:18:21 +02001518 "delete_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001519 )
rshri932105f2024-07-05 15:11:55 +00001520
yshah564ec9c2024-11-29 07:33:32 +00001521 if resource_status:
1522 content["state"] = "DELETED"
1523 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1524 self.db.del_one(self.db_collection, {"_id": content["_id"]})
1525 self.logger.info(f"App Delete Exit with resource status: {resource_status}")
rshri932105f2024-07-05 15:11:55 +00001526 return
1527
1528
garciadeblas72412282024-11-07 12:41:54 +01001529class K8sResourceLcm(GitOpsLcm):
rshri948f7de2024-12-02 03:42:35 +00001530 db_collection = "k8sresource"
1531
rshri932105f2024-07-05 15:11:55 +00001532 def __init__(self, msg, lcm_tasks, config):
1533 """
1534 Init, Connect to database, filesystem storage, and messaging
1535 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1536 :return: None
1537 """
garciadeblas72412282024-11-07 12:41:54 +01001538 super().__init__(msg, lcm_tasks, config)
rshri932105f2024-07-05 15:11:55 +00001539
rshri948f7de2024-12-02 03:42:35 +00001540 async def create(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001541 self.logger.info("Resource Create Enter")
1542
rshri948f7de2024-12-02 03:42:35 +00001543 op_id = params["operation_id"]
1544 profile_id = params["profile_id"]
1545
1546 # To initialize the operation states
1547 self.initialize_operation(profile_id, op_id)
1548
1549 content = self.db.get_one("k8sresource", {"_id": profile_id})
1550 content["profile_type"] = "managed-resources"
1551 op_params = self.get_operation_params(content, op_id)
1552 self.db.set_one("k8sresource", {"_id": content["_id"]}, content)
1553
garciadeblasdc805482025-02-04 16:08:51 +01001554 workflow_res, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001555 "create_profile", op_id, op_params, content
1556 )
garciadeblas891f2002025-02-03 16:12:43 +01001557 self.logger.info("workflow_name is: {}".format(workflow_name))
rshri932105f2024-07-05 15:11:55 +00001558
garciadeblas713e1962025-01-17 12:49:19 +01001559 workflow_status = await self.check_workflow_and_update_db(
1560 op_id, workflow_name, content
1561 )
rshri932105f2024-07-05 15:11:55 +00001562
1563 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01001564 resource_status, content = await self.check_resource_and_update_db(
garciadeblas96b94f52024-07-08 16:18:21 +02001565 "create_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001566 )
yshah564ec9c2024-11-29 07:33:32 +00001567 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1568 self.logger.info(
1569 f"Resource Create Exit with resource status: {resource_status}"
rshri932105f2024-07-05 15:11:55 +00001570 )
rshri932105f2024-07-05 15:11:55 +00001571 return
1572
rshri948f7de2024-12-02 03:42:35 +00001573 async def delete(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001574 self.logger.info("Resource delete Enter")
rshri948f7de2024-12-02 03:42:35 +00001575
1576 op_id = params["operation_id"]
1577 profile_id = params["profile_id"]
1578
1579 # To initialize the operation states
1580 self.initialize_operation(profile_id, op_id)
1581
1582 content = self.db.get_one("k8sresource", {"_id": profile_id})
1583 op_params = self.get_operation_params(content, op_id)
rshri932105f2024-07-05 15:11:55 +00001584
garciadeblasdc805482025-02-04 16:08:51 +01001585 workflow_res, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001586 "delete_profile", op_id, op_params, content
1587 )
garciadeblas891f2002025-02-03 16:12:43 +01001588 self.logger.info("workflow_name is: {}".format(workflow_name))
rshri932105f2024-07-05 15:11:55 +00001589
garciadeblas713e1962025-01-17 12:49:19 +01001590 workflow_status = await self.check_workflow_and_update_db(
1591 op_id, workflow_name, content
1592 )
rshri932105f2024-07-05 15:11:55 +00001593
1594 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01001595 resource_status, content = await self.check_resource_and_update_db(
garciadeblas96b94f52024-07-08 16:18:21 +02001596 "delete_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001597 )
rshri932105f2024-07-05 15:11:55 +00001598
yshah564ec9c2024-11-29 07:33:32 +00001599 if resource_status:
1600 content["state"] = "DELETED"
1601 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1602 self.db.del_one(self.db_collection, {"_id": content["_id"]})
1603 self.logger.info(
1604 f"Resource Delete Exit with resource status: {resource_status}"
garciadeblas96b94f52024-07-08 16:18:21 +02001605 )
rshri932105f2024-07-05 15:11:55 +00001606 return
1607
1608
garciadeblas72412282024-11-07 12:41:54 +01001609class K8sInfraControllerLcm(GitOpsLcm):
rshri948f7de2024-12-02 03:42:35 +00001610 db_collection = "k8sinfra_controller"
1611
rshri932105f2024-07-05 15:11:55 +00001612 def __init__(self, msg, lcm_tasks, config):
1613 """
1614 Init, Connect to database, filesystem storage, and messaging
1615 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1616 :return: None
1617 """
garciadeblas72412282024-11-07 12:41:54 +01001618 super().__init__(msg, lcm_tasks, config)
rshri932105f2024-07-05 15:11:55 +00001619
rshri948f7de2024-12-02 03:42:35 +00001620 async def create(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001621 self.logger.info("Infra controller Create Enter")
1622
rshri948f7de2024-12-02 03:42:35 +00001623 op_id = params["operation_id"]
1624 profile_id = params["profile_id"]
1625
1626 # To initialize the operation states
1627 self.initialize_operation(profile_id, op_id)
1628
1629 content = self.db.get_one("k8sinfra_controller", {"_id": profile_id})
1630 content["profile_type"] = "infra-controllers"
1631 op_params = self.get_operation_params(content, op_id)
1632 self.db.set_one("k8sinfra_controller", {"_id": content["_id"]}, content)
1633
garciadeblasdc805482025-02-04 16:08:51 +01001634 workflow_res, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001635 "create_profile", op_id, op_params, content
1636 )
garciadeblas891f2002025-02-03 16:12:43 +01001637 self.logger.info("workflow_name is: {}".format(workflow_name))
rshri932105f2024-07-05 15:11:55 +00001638
garciadeblas713e1962025-01-17 12:49:19 +01001639 workflow_status = await self.check_workflow_and_update_db(
1640 op_id, workflow_name, content
1641 )
rshri932105f2024-07-05 15:11:55 +00001642
1643 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01001644 resource_status, content = await self.check_resource_and_update_db(
garciadeblas96b94f52024-07-08 16:18:21 +02001645 "create_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001646 )
yshah564ec9c2024-11-29 07:33:32 +00001647 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1648 self.logger.info(
1649 f"Infra Controller Create Exit with resource status: {resource_status}"
rshri932105f2024-07-05 15:11:55 +00001650 )
rshri932105f2024-07-05 15:11:55 +00001651 return
1652
rshri948f7de2024-12-02 03:42:35 +00001653 async def delete(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001654 self.logger.info("Infra controller delete Enter")
rshri932105f2024-07-05 15:11:55 +00001655
rshri948f7de2024-12-02 03:42:35 +00001656 op_id = params["operation_id"]
1657 profile_id = params["profile_id"]
1658
1659 # To initialize the operation states
1660 self.initialize_operation(profile_id, op_id)
1661
1662 content = self.db.get_one("k8sinfra_controller", {"_id": profile_id})
1663 op_params = self.get_operation_params(content, op_id)
1664
garciadeblasdc805482025-02-04 16:08:51 +01001665 workflow_res, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001666 "delete_profile", op_id, op_params, content
1667 )
garciadeblas891f2002025-02-03 16:12:43 +01001668 self.logger.info("workflow_name is: {}".format(workflow_name))
rshri932105f2024-07-05 15:11:55 +00001669
garciadeblas713e1962025-01-17 12:49:19 +01001670 workflow_status = await self.check_workflow_and_update_db(
1671 op_id, workflow_name, content
1672 )
rshri932105f2024-07-05 15:11:55 +00001673
1674 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01001675 resource_status, content = await self.check_resource_and_update_db(
garciadeblas96b94f52024-07-08 16:18:21 +02001676 "delete_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001677 )
rshri932105f2024-07-05 15:11:55 +00001678
yshah564ec9c2024-11-29 07:33:32 +00001679 if resource_status:
1680 content["state"] = "DELETED"
1681 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1682 self.db.del_one(self.db_collection, {"_id": content["_id"]})
1683 self.logger.info(
1684 f"Infra Controller Delete Exit with resource status: {resource_status}"
garciadeblas96b94f52024-07-08 16:18:21 +02001685 )
rshri932105f2024-07-05 15:11:55 +00001686 return
1687
1688
garciadeblas72412282024-11-07 12:41:54 +01001689class K8sInfraConfigLcm(GitOpsLcm):
rshri948f7de2024-12-02 03:42:35 +00001690 db_collection = "k8sinfra_config"
1691
rshri932105f2024-07-05 15:11:55 +00001692 def __init__(self, msg, lcm_tasks, config):
1693 """
1694 Init, Connect to database, filesystem storage, and messaging
1695 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1696 :return: None
1697 """
garciadeblas72412282024-11-07 12:41:54 +01001698 super().__init__(msg, lcm_tasks, config)
rshri932105f2024-07-05 15:11:55 +00001699
rshri948f7de2024-12-02 03:42:35 +00001700 async def create(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001701 self.logger.info("Infra config Create Enter")
1702
rshri948f7de2024-12-02 03:42:35 +00001703 op_id = params["operation_id"]
1704 profile_id = params["profile_id"]
1705
1706 # To initialize the operation states
1707 self.initialize_operation(profile_id, op_id)
1708
1709 content = self.db.get_one("k8sinfra_config", {"_id": profile_id})
1710 content["profile_type"] = "infra-configs"
1711 op_params = self.get_operation_params(content, op_id)
1712 self.db.set_one("k8sinfra_config", {"_id": content["_id"]}, content)
1713
garciadeblasdc805482025-02-04 16:08:51 +01001714 workflow_res, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001715 "create_profile", op_id, op_params, content
1716 )
garciadeblas891f2002025-02-03 16:12:43 +01001717 self.logger.info("workflow_name is: {}".format(workflow_name))
rshri932105f2024-07-05 15:11:55 +00001718
garciadeblas713e1962025-01-17 12:49:19 +01001719 workflow_status = await self.check_workflow_and_update_db(
1720 op_id, workflow_name, content
1721 )
rshri932105f2024-07-05 15:11:55 +00001722
1723 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01001724 resource_status, content = await self.check_resource_and_update_db(
garciadeblas96b94f52024-07-08 16:18:21 +02001725 "create_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001726 )
yshah564ec9c2024-11-29 07:33:32 +00001727 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1728 self.logger.info(
1729 f"Infra Config Create Exit with resource status: {resource_status}"
rshri932105f2024-07-05 15:11:55 +00001730 )
rshri932105f2024-07-05 15:11:55 +00001731 return
1732
rshri948f7de2024-12-02 03:42:35 +00001733 async def delete(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001734 self.logger.info("Infra config delete Enter")
1735
rshri948f7de2024-12-02 03:42:35 +00001736 op_id = params["operation_id"]
1737 profile_id = params["profile_id"]
1738
1739 # To initialize the operation states
1740 self.initialize_operation(profile_id, op_id)
1741
1742 content = self.db.get_one("k8sinfra_config", {"_id": profile_id})
1743 op_params = self.get_operation_params(content, op_id)
1744
garciadeblasdc805482025-02-04 16:08:51 +01001745 workflow_res, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001746 "delete_profile", op_id, op_params, content
1747 )
garciadeblas891f2002025-02-03 16:12:43 +01001748 self.logger.info("workflow_name is: {}".format(workflow_name))
rshri932105f2024-07-05 15:11:55 +00001749
garciadeblas713e1962025-01-17 12:49:19 +01001750 workflow_status = await self.check_workflow_and_update_db(
1751 op_id, workflow_name, content
1752 )
yshah564ec9c2024-11-29 07:33:32 +00001753
rshri932105f2024-07-05 15:11:55 +00001754 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01001755 resource_status, content = await self.check_resource_and_update_db(
yshah564ec9c2024-11-29 07:33:32 +00001756 "delete_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001757 )
yshah564ec9c2024-11-29 07:33:32 +00001758
rshri932105f2024-07-05 15:11:55 +00001759 if resource_status:
yshah564ec9c2024-11-29 07:33:32 +00001760 content["state"] = "DELETED"
1761 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1762 self.db.del_one(self.db_collection, {"_id": content["_id"]})
1763 self.logger.info(
1764 f"Infra Config Delete Exit with resource status: {resource_status}"
garciadeblas96b94f52024-07-08 16:18:21 +02001765 )
rshri932105f2024-07-05 15:11:55 +00001766
rshri932105f2024-07-05 15:11:55 +00001767 return
yshah771dea82024-07-05 15:11:49 +00001768
1769
garciadeblas72412282024-11-07 12:41:54 +01001770class OkaLcm(GitOpsLcm):
yshah771dea82024-07-05 15:11:49 +00001771 db_collection = "okas"
1772
1773 def __init__(self, msg, lcm_tasks, config):
1774 """
1775 Init, Connect to database, filesystem storage, and messaging
1776 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1777 :return: None
1778 """
garciadeblas72412282024-11-07 12:41:54 +01001779 super().__init__(msg, lcm_tasks, config)
yshah771dea82024-07-05 15:11:49 +00001780
yshah564ec9c2024-11-29 07:33:32 +00001781 async def create(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001782 self.logger.info("OKA Create Enter")
yshah564ec9c2024-11-29 07:33:32 +00001783 op_id = params["operation_id"]
1784 oka_id = params["oka_id"]
1785 self.initialize_operation(oka_id, op_id)
1786 db_content = self.db.get_one(self.db_collection, {"_id": oka_id})
1787 op_params = self.get_operation_params(db_content, op_id)
yshah771dea82024-07-05 15:11:49 +00001788
garciadeblasdc805482025-02-04 16:08:51 +01001789 workflow_res, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001790 "create_oka", op_id, op_params, db_content
1791 )
yshah564ec9c2024-11-29 07:33:32 +00001792
garciadeblas713e1962025-01-17 12:49:19 +01001793 workflow_status = await self.check_workflow_and_update_db(
1794 op_id, workflow_name, db_content
1795 )
yshah771dea82024-07-05 15:11:49 +00001796
1797 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01001798 resource_status, db_content = await self.check_resource_and_update_db(
garciadeblas96b94f52024-07-08 16:18:21 +02001799 "create_oka", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001800 )
garciadeblas96b94f52024-07-08 16:18:21 +02001801 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
yshah564ec9c2024-11-29 07:33:32 +00001802 self.logger.info(f"OKA Create Exit with resource status: {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001803 return
1804
yshah564ec9c2024-11-29 07:33:32 +00001805 async def edit(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001806 self.logger.info("OKA Edit Enter")
yshah564ec9c2024-11-29 07:33:32 +00001807 op_id = params["operation_id"]
1808 oka_id = params["oka_id"]
1809 self.initialize_operation(oka_id, op_id)
1810 db_content = self.db.get_one(self.db_collection, {"_id": oka_id})
1811 op_params = self.get_operation_params(db_content, op_id)
yshah771dea82024-07-05 15:11:49 +00001812
garciadeblasdc805482025-02-04 16:08:51 +01001813 workflow_res, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001814 "update_oka", op_id, op_params, db_content
garciadeblas96b94f52024-07-08 16:18:21 +02001815 )
garciadeblas713e1962025-01-17 12:49:19 +01001816 workflow_status = await self.check_workflow_and_update_db(
1817 op_id, workflow_name, db_content
1818 )
yshah771dea82024-07-05 15:11:49 +00001819
1820 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01001821 resource_status, db_content = await self.check_resource_and_update_db(
garciadeblas96b94f52024-07-08 16:18:21 +02001822 "update_oka", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001823 )
garciadeblas96b94f52024-07-08 16:18:21 +02001824 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
yshah564ec9c2024-11-29 07:33:32 +00001825 self.logger.info(f"OKA Update Exit with resource status: {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001826 return
1827
yshah564ec9c2024-11-29 07:33:32 +00001828 async def delete(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001829 self.logger.info("OKA delete Enter")
yshah564ec9c2024-11-29 07:33:32 +00001830 op_id = params["operation_id"]
1831 oka_id = params["oka_id"]
1832 self.initialize_operation(oka_id, op_id)
1833 db_content = self.db.get_one(self.db_collection, {"_id": oka_id})
1834 op_params = self.get_operation_params(db_content, op_id)
yshah771dea82024-07-05 15:11:49 +00001835
garciadeblasdc805482025-02-04 16:08:51 +01001836 workflow_res, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001837 "delete_oka", op_id, op_params, db_content
garciadeblas96b94f52024-07-08 16:18:21 +02001838 )
garciadeblas713e1962025-01-17 12:49:19 +01001839 workflow_status = await self.check_workflow_and_update_db(
1840 op_id, workflow_name, db_content
1841 )
yshah771dea82024-07-05 15:11:49 +00001842
1843 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01001844 resource_status, db_content = await self.check_resource_and_update_db(
garciadeblas96b94f52024-07-08 16:18:21 +02001845 "delete_oka", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001846 )
yshah771dea82024-07-05 15:11:49 +00001847
yshah564ec9c2024-11-29 07:33:32 +00001848 if resource_status:
1849 db_content["state"] == "DELETED"
1850 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
garciadeblas96b94f52024-07-08 16:18:21 +02001851 self.db.del_one(self.db_collection, {"_id": db_content["_id"]})
yshah564ec9c2024-11-29 07:33:32 +00001852 self.logger.info(f"OKA Delete Exit with resource status: {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001853 return
1854
1855
garciadeblas72412282024-11-07 12:41:54 +01001856class KsuLcm(GitOpsLcm):
yshah771dea82024-07-05 15:11:49 +00001857 db_collection = "ksus"
1858
1859 def __init__(self, msg, lcm_tasks, config):
1860 """
1861 Init, Connect to database, filesystem storage, and messaging
1862 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1863 :return: None
1864 """
garciadeblas72412282024-11-07 12:41:54 +01001865 super().__init__(msg, lcm_tasks, config)
garciadeblasbc96f382025-01-22 16:02:18 +01001866 self._workflows = {
1867 "create_ksus": {
1868 "check_resource_function": self.check_create_ksus,
1869 },
1870 "delete_ksus": {
1871 "check_resource_function": self.check_delete_ksus,
1872 },
1873 }
1874
1875 def get_dbclusters_from_profile(self, profile_id, profile_type):
1876 cluster_list = []
1877 db_clusters = self.db.get_list("clusters")
1878 self.logger.info(f"Getting list of clusters for {profile_type} {profile_id}")
1879 for db_cluster in db_clusters:
1880 if profile_id in db_cluster.get(profile_type, []):
1881 self.logger.info(
1882 f"Profile {profile_id} found in cluster {db_cluster['name']}"
1883 )
1884 cluster_list.append(db_cluster)
1885 return cluster_list
yshah771dea82024-07-05 15:11:49 +00001886
yshah564ec9c2024-11-29 07:33:32 +00001887 async def create(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001888 self.logger.info("ksu Create Enter")
yshah564ec9c2024-11-29 07:33:32 +00001889 db_content = []
1890 op_params = []
1891 op_id = params["operation_id"]
1892 for ksu_id in params["ksus_list"]:
1893 self.logger.info("Ksu ID: {}".format(ksu_id))
1894 self.initialize_operation(ksu_id, op_id)
1895 db_ksu = self.db.get_one(self.db_collection, {"_id": ksu_id})
1896 self.logger.info("Db KSU: {}".format(db_ksu))
1897 db_content.append(db_ksu)
1898 ksu_params = {}
1899 ksu_params = self.get_operation_params(db_ksu, op_id)
1900 self.logger.info("Operation Params: {}".format(ksu_params))
1901 # Update ksu_params["profile"] with profile name and age-pubkey
1902 profile_type = ksu_params["profile"]["profile_type"]
1903 profile_id = ksu_params["profile"]["_id"]
1904 profile_collection = self.profile_collection_mapping[profile_type]
1905 db_profile = self.db.get_one(profile_collection, {"_id": profile_id})
1906 ksu_params["profile"]["name"] = db_profile["name"]
1907 ksu_params["profile"]["age_pubkey"] = db_profile.get("age_pubkey", "")
1908 # Update ksu_params["oka"] with sw_catalog_path (when missing)
garciadeblas8c9c5442025-01-17 01:06:05 +01001909 # TODO: remove this in favor of doing it in ODU workflow
yshah564ec9c2024-11-29 07:33:32 +00001910 for oka in ksu_params["oka"]:
1911 if "sw_catalog_path" not in oka:
1912 oka_id = oka["_id"]
1913 db_oka = self.db.get_one("okas", {"_id": oka_id})
yshah2f39b8a2024-12-19 11:06:24 +00001914 oka_type = MAP_PROFILE[
1915 db_oka.get("profile_type", "infra_controller_profiles")
1916 ]
garciadeblas8c9c5442025-01-17 01:06:05 +01001917 oka[
1918 "sw_catalog_path"
garciadeblas1ad4e882025-01-24 14:24:41 +01001919 ] = f"{oka_type}/{db_oka['git_name'].lower()}/templates"
yshah564ec9c2024-11-29 07:33:32 +00001920 op_params.append(ksu_params)
yshah771dea82024-07-05 15:11:49 +00001921
garciadeblasbc96f382025-01-22 16:02:18 +01001922 # A single workflow is launched for all KSUs
garciadeblasdc805482025-02-04 16:08:51 +01001923 workflow_res, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001924 "create_ksus", op_id, op_params, db_content
garciadeblas96b94f52024-07-08 16:18:21 +02001925 )
garciadeblasbc96f382025-01-22 16:02:18 +01001926 # Update workflow status in all KSUs
1927 wf_status_list = []
yshah564ec9c2024-11-29 07:33:32 +00001928 for db_ksu, ksu_params in zip(db_content, op_params):
garciadeblas713e1962025-01-17 12:49:19 +01001929 workflow_status = await self.check_workflow_and_update_db(
1930 op_id, workflow_name, db_ksu
1931 )
garciadeblasbc96f382025-01-22 16:02:18 +01001932 wf_status_list.append(workflow_status)
1933 # Update resource status in all KSUs
1934 # TODO: Is an operation correct if n KSUs are right and 1 is not OK?
1935 res_status_list = []
1936 for db_ksu, ksu_params, wf_status in zip(db_content, op_params, wf_status_list):
1937 if wf_status:
garciadeblas713e1962025-01-17 12:49:19 +01001938 resource_status, db_ksu = await self.check_resource_and_update_db(
yshah564ec9c2024-11-29 07:33:32 +00001939 "create_ksus", op_id, ksu_params, db_ksu
1940 )
garciadeblasbc96f382025-01-22 16:02:18 +01001941 else:
1942 resource_status = False
1943 res_status_list.append(resource_status)
garciadeblas96b94f52024-07-08 16:18:21 +02001944 self.db.set_one(self.db_collection, {"_id": db_ksu["_id"]}, db_ksu)
1945
garciadeblasd8429852024-10-17 15:30:30 +02001946 # Clean items used in the workflow, no matter if the workflow succeeded
1947 clean_status, clean_msg = await self.odu.clean_items_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001948 "create_ksus", op_id, op_params, db_content
garciadeblasd8429852024-10-17 15:30:30 +02001949 )
1950 self.logger.info(
1951 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
1952 )
garciadeblasbc96f382025-01-22 16:02:18 +01001953 self.logger.info(f"KSU Create EXIT with Resource Status {res_status_list}")
yshah771dea82024-07-05 15:11:49 +00001954 return
1955
yshah564ec9c2024-11-29 07:33:32 +00001956 async def edit(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001957 self.logger.info("ksu edit Enter")
yshah564ec9c2024-11-29 07:33:32 +00001958 db_content = []
1959 op_params = []
1960 op_id = params["operation_id"]
1961 for ksu_id in params["ksus_list"]:
1962 self.initialize_operation(ksu_id, op_id)
1963 db_ksu = self.db.get_one("ksus", {"_id": ksu_id})
1964 db_content.append(db_ksu)
1965 ksu_params = {}
1966 ksu_params = self.get_operation_params(db_ksu, op_id)
1967 # Update ksu_params["profile"] with profile name and age-pubkey
1968 profile_type = ksu_params["profile"]["profile_type"]
1969 profile_id = ksu_params["profile"]["_id"]
1970 profile_collection = self.profile_collection_mapping[profile_type]
1971 db_profile = self.db.get_one(profile_collection, {"_id": profile_id})
1972 ksu_params["profile"]["name"] = db_profile["name"]
1973 ksu_params["profile"]["age_pubkey"] = db_profile.get("age_pubkey", "")
1974 # Update ksu_params["oka"] with sw_catalog_path (when missing)
garciadeblas8c9c5442025-01-17 01:06:05 +01001975 # TODO: remove this in favor of doing it in ODU workflow
yshah564ec9c2024-11-29 07:33:32 +00001976 for oka in ksu_params["oka"]:
1977 if "sw_catalog_path" not in oka:
1978 oka_id = oka["_id"]
1979 db_oka = self.db.get_one("okas", {"_id": oka_id})
yshah2f39b8a2024-12-19 11:06:24 +00001980 oka_type = MAP_PROFILE[
1981 db_oka.get("profile_type", "infra_controller_profiles")
1982 ]
garciadeblas8c9c5442025-01-17 01:06:05 +01001983 oka[
1984 "sw_catalog_path"
1985 ] = f"{oka_type}/{db_oka['git_name']}/templates"
yshah564ec9c2024-11-29 07:33:32 +00001986 op_params.append(ksu_params)
yshah771dea82024-07-05 15:11:49 +00001987
garciadeblasdc805482025-02-04 16:08:51 +01001988 workflow_res, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001989 "update_ksus", op_id, op_params, db_content
garciadeblas96b94f52024-07-08 16:18:21 +02001990 )
yshah771dea82024-07-05 15:11:49 +00001991
yshah564ec9c2024-11-29 07:33:32 +00001992 for db_ksu, ksu_params in zip(db_content, op_params):
garciadeblas713e1962025-01-17 12:49:19 +01001993 workflow_status = await self.check_workflow_and_update_db(
1994 op_id, workflow_name, db_ksu
1995 )
yshah564ec9c2024-11-29 07:33:32 +00001996
garciadeblas96b94f52024-07-08 16:18:21 +02001997 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01001998 resource_status, db_ksu = await self.check_resource_and_update_db(
yshah564ec9c2024-11-29 07:33:32 +00001999 "update_ksus", op_id, ksu_params, db_ksu
garciadeblas96b94f52024-07-08 16:18:21 +02002000 )
garciadeblas96b94f52024-07-08 16:18:21 +02002001 db_ksu["name"] = ksu_params["name"]
2002 db_ksu["description"] = ksu_params["description"]
2003 db_ksu["profile"]["profile_type"] = ksu_params["profile"][
2004 "profile_type"
2005 ]
2006 db_ksu["profile"]["_id"] = ksu_params["profile"]["_id"]
2007 db_ksu["oka"] = ksu_params["oka"]
2008 self.db.set_one(self.db_collection, {"_id": db_ksu["_id"]}, db_ksu)
2009
yshah564ec9c2024-11-29 07:33:32 +00002010 # Clean items used in the workflow, no matter if the workflow succeeded
2011 clean_status, clean_msg = await self.odu.clean_items_workflow(
2012 "create_ksus", op_id, op_params, db_content
garciadeblas96b94f52024-07-08 16:18:21 +02002013 )
2014 self.logger.info(
yshah564ec9c2024-11-29 07:33:32 +00002015 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
garciadeblas96b94f52024-07-08 16:18:21 +02002016 )
yshah564ec9c2024-11-29 07:33:32 +00002017 self.logger.info(f"KSU Update EXIT with Resource Status {resource_status}")
yshah771dea82024-07-05 15:11:49 +00002018 return
2019
yshah564ec9c2024-11-29 07:33:32 +00002020 async def delete(self, params, order_id):
2021 self.logger.info("ksu delete Enter")
2022 db_content = []
2023 op_params = []
2024 op_id = params["operation_id"]
2025 for ksu_id in params["ksus_list"]:
2026 self.initialize_operation(ksu_id, op_id)
2027 db_ksu = self.db.get_one("ksus", {"_id": ksu_id})
2028 db_content.append(db_ksu)
2029 ksu_params = {}
2030 ksu_params["profile"] = {}
2031 ksu_params["profile"]["profile_type"] = db_ksu["profile"]["profile_type"]
2032 ksu_params["profile"]["_id"] = db_ksu["profile"]["_id"]
2033 # Update ksu_params["profile"] with profile name and age-pubkey
2034 profile_type = ksu_params["profile"]["profile_type"]
2035 profile_id = ksu_params["profile"]["_id"]
2036 profile_collection = self.profile_collection_mapping[profile_type]
2037 db_profile = self.db.get_one(profile_collection, {"_id": profile_id})
2038 ksu_params["profile"]["name"] = db_profile["name"]
2039 ksu_params["profile"]["age_pubkey"] = db_profile.get("age_pubkey", "")
2040 op_params.append(ksu_params)
yshah771dea82024-07-05 15:11:49 +00002041
garciadeblasdc805482025-02-04 16:08:51 +01002042 workflow_res, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00002043 "delete_ksus", op_id, op_params, db_content
2044 )
2045
2046 for db_ksu, ksu_params in zip(db_content, op_params):
garciadeblas713e1962025-01-17 12:49:19 +01002047 workflow_status = await self.check_workflow_and_update_db(
2048 op_id, workflow_name, db_ksu
2049 )
yshah564ec9c2024-11-29 07:33:32 +00002050
2051 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01002052 resource_status, db_ksu = await self.check_resource_and_update_db(
yshah564ec9c2024-11-29 07:33:32 +00002053 "delete_ksus", op_id, ksu_params, db_ksu
2054 )
2055
2056 if resource_status:
2057 db_ksu["state"] == "DELETED"
2058 self.db.set_one(self.db_collection, {"_id": db_ksu["_id"]}, db_ksu)
2059 self.db.del_one(self.db_collection, {"_id": db_ksu["_id"]})
2060
2061 self.logger.info(f"KSU Delete Exit with resource status: {resource_status}")
2062 return
2063
2064 async def clone(self, params, order_id):
2065 self.logger.info("ksu clone Enter")
2066 op_id = params["operation_id"]
2067 ksus_id = params["ksus_list"][0]
2068 self.initialize_operation(ksus_id, op_id)
2069 db_content = self.db.get_one(self.db_collection, {"_id": ksus_id})
2070 op_params = self.get_operation_params(db_content, op_id)
garciadeblasdc805482025-02-04 16:08:51 +01002071 workflow_res, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02002072 "clone_ksus", op_id, op_params, db_content
2073 )
yshah564ec9c2024-11-29 07:33:32 +00002074
garciadeblas713e1962025-01-17 12:49:19 +01002075 workflow_status = await self.check_workflow_and_update_db(
2076 op_id, workflow_name, db_content
2077 )
yshah771dea82024-07-05 15:11:49 +00002078
2079 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01002080 resource_status, db_content = await self.check_resource_and_update_db(
garciadeblas96b94f52024-07-08 16:18:21 +02002081 "clone_ksus", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00002082 )
garciadeblas96b94f52024-07-08 16:18:21 +02002083 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
yshah564ec9c2024-11-29 07:33:32 +00002084
2085 self.logger.info(f"KSU Clone Exit with resource status: {resource_status}")
yshah771dea82024-07-05 15:11:49 +00002086 return
2087
yshah564ec9c2024-11-29 07:33:32 +00002088 async def move(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02002089 self.logger.info("ksu move Enter")
yshah564ec9c2024-11-29 07:33:32 +00002090 op_id = params["operation_id"]
2091 ksus_id = params["ksus_list"][0]
2092 self.initialize_operation(ksus_id, op_id)
2093 db_content = self.db.get_one(self.db_collection, {"_id": ksus_id})
2094 op_params = self.get_operation_params(db_content, op_id)
garciadeblasdc805482025-02-04 16:08:51 +01002095 workflow_res, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02002096 "move_ksus", op_id, op_params, db_content
2097 )
yshah564ec9c2024-11-29 07:33:32 +00002098
garciadeblas713e1962025-01-17 12:49:19 +01002099 workflow_status = await self.check_workflow_and_update_db(
2100 op_id, workflow_name, db_content
2101 )
yshah771dea82024-07-05 15:11:49 +00002102
2103 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01002104 resource_status, db_content = await self.check_resource_and_update_db(
garciadeblas96b94f52024-07-08 16:18:21 +02002105 "move_ksus", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00002106 )
garciadeblas96b94f52024-07-08 16:18:21 +02002107 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
yshah564ec9c2024-11-29 07:33:32 +00002108
2109 self.logger.info(f"KSU Move Exit with resource status: {resource_status}")
yshah771dea82024-07-05 15:11:49 +00002110 return
garciadeblasbc96f382025-01-22 16:02:18 +01002111
2112 async def check_create_ksus(self, op_id, op_params, content):
2113 self.logger.info(f"check_create_ksus Operation {op_id}. Params: {op_params}.")
2114 self.logger.debug(f"Content: {content}")
2115 db_ksu = content
2116 kustomization_name = db_ksu["git_name"].lower()
2117 oka_list = op_params["oka"]
2118 oka_item = oka_list[0]
2119 oka_params = oka_item.get("transformation", {})
2120 target_ns = oka_params.get("namespace", "default")
2121 profile_id = op_params.get("profile", {}).get("_id")
2122 profile_type = op_params.get("profile", {}).get("profile_type")
2123 self.logger.info(
2124 f"Checking status of KSU {db_ksu['name']} for profile {profile_id}."
2125 )
2126 dbcluster_list = self.get_dbclusters_from_profile(profile_id, profile_type)
2127 if not dbcluster_list:
2128 self.logger.info(f"No clusters found for profile {profile_id}.")
2129 for db_cluster in dbcluster_list:
2130 try:
2131 self.logger.info(
garciadeblase3462922025-02-03 08:44:19 +01002132 f"Checking status of KSU {db_ksu['name']} in cluster {db_cluster['name']}."
garciadeblasbc96f382025-01-22 16:02:18 +01002133 )
2134 cluster_kubectl = self.cluster_kubectl(db_cluster)
2135 checkings_list = [
2136 {
2137 "item": "kustomization",
2138 "name": kustomization_name,
2139 "namespace": target_ns,
garciadeblas6c82c352025-01-27 16:53:45 +01002140 "condition": {
2141 "jsonpath_filter": "status.conditions[?(@.type=='Ready')].status",
2142 "value": "True",
2143 },
garciadeblasbc96f382025-01-22 16:02:18 +01002144 "timeout": self._checkloop_kustomization_timeout,
2145 "enable": True,
2146 "resourceState": "IN_PROGRESS.KUSTOMIZATION_READY",
2147 },
2148 ]
2149 self.logger.info(
2150 f"Checking status of KSU {db_ksu['name']} for profile {profile_id}."
2151 )
2152 result, message = await self.common_check_list(
2153 op_id, checkings_list, "ksus", db_ksu, kubectl=cluster_kubectl
2154 )
2155 if not result:
2156 return False, message
2157 except Exception as e:
2158 self.logger.error(
2159 f"Error checking KSU in cluster {db_cluster['name']}."
2160 )
2161 self.logger.error(e)
2162 return False, f"Error checking KSU in cluster {db_cluster['name']}."
2163 return True, "OK"
2164
2165 async def check_delete_ksus(self, op_id, op_params, content):
2166 self.logger.info(f"check_delete_ksus Operation {op_id}. Params: {op_params}.")
2167 self.logger.debug(f"Content: {content}")
2168 db_ksu = content
2169 kustomization_name = db_ksu["git_name"].lower()
2170 oka_list = db_ksu["oka"]
2171 oka_item = oka_list[0]
2172 oka_params = oka_item.get("transformation", {})
2173 target_ns = oka_params.get("namespace", "default")
2174 profile_id = op_params.get("profile", {}).get("_id")
2175 profile_type = op_params.get("profile", {}).get("profile_type")
2176 self.logger.info(
2177 f"Checking status of KSU {db_ksu['name']} for profile {profile_id}."
2178 )
2179 dbcluster_list = self.get_dbclusters_from_profile(profile_id, profile_type)
2180 if not dbcluster_list:
2181 self.logger.info(f"No clusters found for profile {profile_id}.")
2182 for db_cluster in dbcluster_list:
2183 try:
2184 self.logger.info(
2185 f"Checking status of KSU in cluster {db_cluster['name']}."
2186 )
2187 cluster_kubectl = self.cluster_kubectl(db_cluster)
2188 checkings_list = [
2189 {
2190 "item": "kustomization",
2191 "name": kustomization_name,
2192 "namespace": target_ns,
2193 "deleted": True,
2194 "timeout": self._checkloop_kustomization_timeout,
2195 "enable": True,
2196 "resourceState": "IN_PROGRESS.KUSTOMIZATION_DELETED",
2197 },
2198 ]
2199 self.logger.info(
2200 f"Checking status of KSU {db_ksu['name']} for profile {profile_id}."
2201 )
2202 result, message = await self.common_check_list(
2203 op_id, checkings_list, "ksus", db_ksu, kubectl=cluster_kubectl
2204 )
2205 if not result:
2206 return False, message
2207 except Exception as e:
2208 self.logger.error(
2209 f"Error checking KSU in cluster {db_cluster['name']}."
2210 )
2211 self.logger.error(e)
2212 return False, f"Error checking KSU in cluster {db_cluster['name']}."
2213 return True, "OK"