blob: 78cbdfda5e948f731463187b080d74c85037ad8e [file] [log] [blame]
rshri932105f2024-07-05 15:11:55 +00001# -*- coding: utf-8 -*-
2
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
12# implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
15
16__author__ = (
17 "Shrinithi R <shrinithi.r@tataelxsi.co.in>",
18 "Shahithya Y <shahithya.y@tataelxsi.co.in>",
19)
20
rshric3564942024-11-12 18:12:38 +000021import copy
rshri932105f2024-07-05 15:11:55 +000022import logging
yshahd940c652024-10-17 06:11:12 +000023from time import time
garciadeblas72412282024-11-07 12:41:54 +010024import traceback
rshri932105f2024-07-05 15:11:55 +000025from osm_lcm.lcm_utils import LcmBase
26from copy import deepcopy
27from osm_lcm import odu_workflows
28from osm_lcm import vim_sdn
rshri948f7de2024-12-02 03:42:35 +000029from osm_lcm.data_utils.list_utils import find_in_list
rshri932105f2024-07-05 15:11:55 +000030
yshah2f39b8a2024-12-19 11:06:24 +000031MAP_PROFILE = {
32 "infra_controller_profiles": "infra-controllers",
33 "infra_config_profiles": "infra-configs",
34 "resource_profiles": "managed_resources",
35 "app_profiles": "apps",
36}
37
rshri932105f2024-07-05 15:11:55 +000038
garciadeblas72412282024-11-07 12:41:54 +010039class GitOpsLcm(LcmBase):
garciadeblasea865ff2024-11-20 12:42:49 +010040 db_collection = "gitops"
yshah564ec9c2024-11-29 07:33:32 +000041 workflow_status = None
42 resource_status = None
43
44 profile_collection_mapping = {
45 "infra_controller_profiles": "k8sinfra_controller",
46 "infra_config_profiles": "k8sinfra_config",
47 "resource_profiles": "k8sresource",
48 "app_profiles": "k8sapp",
49 }
garciadeblasea865ff2024-11-20 12:42:49 +010050
garciadeblas72412282024-11-07 12:41:54 +010051 def __init__(self, msg, lcm_tasks, config):
52 self.logger = logging.getLogger("lcm.gitops")
53 self.lcm_tasks = lcm_tasks
54 self.odu = odu_workflows.OduWorkflow(msg, self.lcm_tasks, config)
55 self._checkloop_kustomization_timeout = 900
56 self._checkloop_resource_timeout = 900
57 self._workflows = {}
58 super().__init__(msg, self.logger)
59
60 async def check_dummy_operation(self, op_id, op_params, content):
61 self.logger.info(f"Operation {op_id}. Params: {op_params}. Content: {content}")
62 return True, "OK"
63
garciadeblasea865ff2024-11-20 12:42:49 +010064 def initialize_operation(self, item_id, op_id):
65 db_item = self.db.get_one(self.db_collection, {"_id": item_id})
66 operation = next(
67 (op for op in db_item.get("operationHistory", []) if op["op_id"] == op_id),
68 None,
69 )
70 operation["workflowState"] = "PROCESSING"
71 operation["resourceState"] = "NOT_READY"
72 operation["operationState"] = "IN_PROGRESS"
73 operation["gitOperationInfo"] = None
74 db_item["current_operation"] = operation["op_id"]
75 self.db.set_one(self.db_collection, {"_id": item_id}, db_item)
76
yshah564ec9c2024-11-29 07:33:32 +000077 def get_operation_params(self, item, operation_id):
78 operation_history = item.get("operationHistory", [])
79 operation = find_in_list(
80 operation_history, lambda op: op["op_id"] == operation_id
81 )
82 return operation.get("operationParams", {})
83
84 def get_operation_type(self, item, operation_id):
85 operation_history = item.get("operationHistory", [])
86 operation = find_in_list(
87 operation_history, lambda op: op["op_id"] == operation_id
88 )
89 return operation.get("operationType", {})
90
garciadeblasbe890702024-12-20 11:39:13 +010091 def update_state_operation_history(
92 self, content, op_id, workflow_state=None, resource_state=None
93 ):
94 self.logger.info(
95 f"Update state of operation {op_id} in Operation History in DB"
96 )
97 self.logger.info(
98 f"Workflow state: {workflow_state}. Resource state: {resource_state}"
99 )
100 self.logger.debug(f"Content: {content}")
101
102 op_num = 0
103 for operation in content["operationHistory"]:
104 self.logger.debug("Operations: {}".format(operation))
105 if operation["op_id"] == op_id:
106 self.logger.debug("Found operation number: {}".format(op_num))
107 if workflow_state is not None:
108 operation["workflowState"] = workflow_state
109
110 if resource_state is not None:
111 operation["resourceState"] = resource_state
112 break
113 op_num += 1
114 self.logger.debug("content: {}".format(content))
115
116 return content
117
garciadeblas7eae6f42024-11-08 10:41:38 +0100118 def update_operation_history(
garciadeblasf9092892024-12-12 11:07:08 +0100119 self, content, op_id, workflow_status=None, resource_status=None, op_end=True
garciadeblas7eae6f42024-11-08 10:41:38 +0100120 ):
121 self.logger.info(
122 f"Update Operation History in DB. Workflow status: {workflow_status}. Resource status: {resource_status}"
123 )
124 self.logger.debug(f"Content: {content}")
125
garciadeblas7eae6f42024-11-08 10:41:38 +0100126 op_num = 0
127 for operation in content["operationHistory"]:
128 self.logger.debug("Operations: {}".format(operation))
129 if operation["op_id"] == op_id:
130 self.logger.debug("Found operation number: {}".format(op_num))
garciadeblas8bde3f42024-12-20 10:37:12 +0100131 if workflow_status is not None:
132 if workflow_status:
133 operation["workflowState"] = "COMPLETED"
134 operation["result"] = True
135 else:
136 operation["workflowState"] = "ERROR"
137 operation["operationState"] = "FAILED"
138 operation["result"] = False
garciadeblas7eae6f42024-11-08 10:41:38 +0100139
garciadeblas8bde3f42024-12-20 10:37:12 +0100140 if resource_status is not None:
141 if resource_status:
142 operation["resourceState"] = "READY"
143 operation["operationState"] = "COMPLETED"
144 operation["result"] = True
145 else:
146 operation["resourceState"] = "NOT_READY"
147 operation["operationState"] = "FAILED"
148 operation["result"] = False
garciadeblas7eae6f42024-11-08 10:41:38 +0100149
garciadeblasf9092892024-12-12 11:07:08 +0100150 if op_end:
151 now = time()
152 operation["endDate"] = now
garciadeblas7eae6f42024-11-08 10:41:38 +0100153 break
154 op_num += 1
155 self.logger.debug("content: {}".format(content))
156
157 return content
158
garciadeblas713e1962025-01-17 12:49:19 +0100159 async def check_workflow_and_update_db(self, op_id, workflow_name, db_content):
yshah564ec9c2024-11-29 07:33:32 +0000160 workflow_status, workflow_msg = await self.odu.check_workflow_status(
161 workflow_name
162 )
163 self.logger.info(
164 "Workflow Status: {} Workflow Message: {}".format(
165 workflow_status, workflow_msg
166 )
167 )
168 operation_type = self.get_operation_type(db_content, op_id)
169 if operation_type == "create" and workflow_status:
170 db_content["state"] = "CREATED"
171 elif operation_type == "create" and not workflow_status:
172 db_content["state"] = "FAILED_CREATION"
173 elif operation_type == "delete" and workflow_status:
174 db_content["state"] = "DELETED"
175 elif operation_type == "delete" and not workflow_status:
176 db_content["state"] = "FAILED_DELETION"
177
178 if workflow_status:
179 db_content["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
180 else:
181 db_content["resourceState"] = "ERROR"
182
183 db_content = self.update_operation_history(
184 db_content, op_id, workflow_status, None
185 )
186 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
187 return workflow_status
188
garciadeblas713e1962025-01-17 12:49:19 +0100189 async def check_resource_and_update_db(
190 self, resource_name, op_id, op_params, db_content
191 ):
yshah564ec9c2024-11-29 07:33:32 +0000192 workflow_status = True
193
194 resource_status, resource_msg = await self.check_resource_status(
195 resource_name, op_id, op_params, db_content
196 )
197 self.logger.info(
198 "Resource Status: {} Resource Message: {}".format(
199 resource_status, resource_msg
200 )
201 )
202
203 if resource_status:
204 db_content["resourceState"] = "READY"
205 else:
206 db_content["resourceState"] = "ERROR"
207
208 db_content = self.update_operation_history(
209 db_content, op_id, workflow_status, resource_status
210 )
211 db_content["operatingState"] = "IDLE"
212 db_content["current_operation"] = None
213 return resource_status, db_content
214
yshahcb9075f2024-11-22 12:08:57 +0000215 async def common_check_list(self, op_id, checkings_list, db_collection, db_item):
garciadeblas72412282024-11-07 12:41:54 +0100216 try:
217 for checking in checkings_list:
218 if checking["enable"]:
219 status, message = await self.odu.readiness_loop(
220 item=checking["item"],
221 name=checking["name"],
222 namespace=checking["namespace"],
223 flag=checking["flag"],
224 timeout=checking["timeout"],
225 )
226 if not status:
garciadeblasc5e9d572025-01-21 18:48:58 +0100227 error_message = "Resources not ready: "
228 error_message += checking.get("error_message", "")
229 return status, f"{error_message}: {message}"
garciadeblas7eae6f42024-11-08 10:41:38 +0100230 else:
231 db_item["resourceState"] = checking["resourceState"]
garciadeblasbe890702024-12-20 11:39:13 +0100232 db_item = self.update_state_operation_history(
233 db_item, op_id, None, checking["resourceState"]
garciadeblas7eae6f42024-11-08 10:41:38 +0100234 )
235 self.db.set_one(db_collection, {"_id": db_item["_id"]}, db_item)
garciadeblas72412282024-11-07 12:41:54 +0100236 except Exception as e:
237 self.logger.debug(traceback.format_exc())
238 self.logger.debug(f"Exception: {e}", exc_info=True)
239 return False, f"Unexpected exception: {e}"
240 return True, "OK"
241
242 async def check_resource_status(self, key, op_id, op_params, content):
243 self.logger.info(
244 f"Check resource status. Key: {key}. Operation: {op_id}. Params: {op_params}. Content: {content}"
245 )
246 check_resource_function = self._workflows.get(key, {}).get(
247 "check_resource_function"
248 )
249 self.logger.info("check_resource function : {}".format(check_resource_function))
250 if check_resource_function:
251 return await check_resource_function(op_id, op_params, content)
252 else:
253 return await self.check_dummy_operation(op_id, op_params, content)
254
garciadeblas995cbf32024-12-18 12:54:00 +0100255 def decrypted_copy(self, content, fields=["age_pubkey", "age_privkey"]):
256 # This deep copy is intended to be passed to ODU workflows.
257 content_copy = copy.deepcopy(content)
rshric3564942024-11-12 18:12:38 +0000258
259 # decrypting the key
260 self.db.encrypt_decrypt_fields(
garciadeblas995cbf32024-12-18 12:54:00 +0100261 content_copy,
rshric3564942024-11-12 18:12:38 +0000262 "decrypt",
garciadeblas995cbf32024-12-18 12:54:00 +0100263 fields,
rshric3564942024-11-12 18:12:38 +0000264 schema_version="1.11",
garciadeblas995cbf32024-12-18 12:54:00 +0100265 salt=content_copy["_id"],
rshric3564942024-11-12 18:12:38 +0000266 )
garciadeblas995cbf32024-12-18 12:54:00 +0100267 return content_copy
rshric3564942024-11-12 18:12:38 +0000268
garciadeblas72412282024-11-07 12:41:54 +0100269
270class ClusterLcm(GitOpsLcm):
garciadeblas96b94f52024-07-08 16:18:21 +0200271 db_collection = "clusters"
rshri932105f2024-07-05 15:11:55 +0000272
273 def __init__(self, msg, lcm_tasks, config):
274 """
275 Init, Connect to database, filesystem storage, and messaging
276 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
277 :return: None
278 """
garciadeblas72412282024-11-07 12:41:54 +0100279 super().__init__(msg, lcm_tasks, config)
280 self._workflows = {
281 "create_cluster": {
282 "check_resource_function": self.check_create_cluster,
283 },
garciadeblasb0a42c22024-11-13 16:00:10 +0100284 "register_cluster": {
285 "check_resource_function": self.check_register_cluster,
286 },
287 "update_cluster": {
288 "check_resource_function": self.check_update_cluster,
289 },
garciadeblas72412282024-11-07 12:41:54 +0100290 }
rshri932105f2024-07-05 15:11:55 +0000291 self.regist = vim_sdn.K8sClusterLcm(msg, self.lcm_tasks, config)
292
rshri948f7de2024-12-02 03:42:35 +0000293 async def create(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000294 self.logger.info("cluster Create Enter")
295
garciadeblas995cbf32024-12-18 12:54:00 +0100296 # To get the cluster and op ids
rshri948f7de2024-12-02 03:42:35 +0000297 cluster_id = params["cluster_id"]
rshri948f7de2024-12-02 03:42:35 +0000298 op_id = params["operation_id"]
rshri948f7de2024-12-02 03:42:35 +0000299
300 # To initialize the operation states
301 self.initialize_operation(cluster_id, op_id)
302
garciadeblas995cbf32024-12-18 12:54:00 +0100303 # To get the cluster
304 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
305
306 # To get the operation params details
307 op_params = self.get_operation_params(db_cluster, op_id)
308
309 # To copy the cluster content and decrypting fields to use in workflows
310 workflow_content = {
311 "cluster": self.decrypted_copy(db_cluster),
312 }
rshric3564942024-11-12 18:12:38 +0000313
rshri948f7de2024-12-02 03:42:35 +0000314 # To get the vim account details
rshric3564942024-11-12 18:12:38 +0000315 db_vim = self.db.get_one("vim_accounts", {"name": db_cluster["vim_account"]})
garciadeblas995cbf32024-12-18 12:54:00 +0100316 workflow_content["vim_account"] = db_vim
rshric3564942024-11-12 18:12:38 +0000317
garciadeblasadb81e82024-11-08 01:11:46 +0100318 _, workflow_name = await self.odu.launch_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +0100319 "create_cluster", op_id, op_params, workflow_content
garciadeblas96b94f52024-07-08 16:18:21 +0200320 )
rshri932105f2024-07-05 15:11:55 +0000321 self.logger.info("workflow_name is :{}".format(workflow_name))
322
garciadeblas96b94f52024-07-08 16:18:21 +0200323 workflow_status, workflow_msg = await self.odu.check_workflow_status(
324 workflow_name
325 )
rshri932105f2024-07-05 15:11:55 +0000326 self.logger.info(
327 "workflow_status is :{} and workflow_msg is :{}".format(
328 workflow_status, workflow_msg
329 )
330 )
331 if workflow_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200332 db_cluster["state"] = "CREATED"
333 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
rshri932105f2024-07-05 15:11:55 +0000334 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200335 db_cluster["state"] = "FAILED_CREATION"
336 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000337 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +0000338 db_cluster = self.update_operation_history(
339 db_cluster, op_id, workflow_status, None
340 )
garciadeblas96b94f52024-07-08 16:18:21 +0200341 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +0000342
garciadeblas28bff0f2024-09-16 12:53:07 +0200343 # Clean items used in the workflow, no matter if the workflow succeeded
344 clean_status, clean_msg = await self.odu.clean_items_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +0100345 "create_cluster", op_id, op_params, workflow_content
garciadeblas28bff0f2024-09-16 12:53:07 +0200346 )
347 self.logger.info(
348 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
349 )
350
rshri932105f2024-07-05 15:11:55 +0000351 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +0100352 resource_status, resource_msg = await self.check_resource_status(
garciadeblas995cbf32024-12-18 12:54:00 +0100353 "create_cluster", op_id, op_params, workflow_content
rshri932105f2024-07-05 15:11:55 +0000354 )
355 self.logger.info(
356 "resource_status is :{} and resource_msg is :{}".format(
357 resource_status, resource_msg
358 )
359 )
360 if resource_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200361 db_cluster["resourceState"] = "READY"
rshri932105f2024-07-05 15:11:55 +0000362 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200363 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000364
garciadeblas96b94f52024-07-08 16:18:21 +0200365 db_cluster["operatingState"] = "IDLE"
366 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000367 db_cluster, op_id, workflow_status, resource_status
rshri932105f2024-07-05 15:11:55 +0000368 )
shahithya70a3fc92024-11-12 11:01:05 +0000369 db_cluster["current_operation"] = None
garciadeblas3e5eeec2025-01-21 11:49:38 +0100370
371 # Retrieve credentials
372 cluster_creds = None
373 if db_cluster["resourceState"] == "READY" and db_cluster["state"] == "CREATED":
374 result, cluster_creds = await self.odu.get_cluster_credentials(db_cluster)
375 # TODO: manage the case where the credentials are not available
376 if result:
377 db_cluster["credentials"] = cluster_creds
378
379 # Update db_cluster
garciadeblas96b94f52024-07-08 16:18:21 +0200380 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
381 self.update_profile_state(db_cluster, workflow_status, resource_status)
rshri948f7de2024-12-02 03:42:35 +0000382
garciadeblas3e5eeec2025-01-21 11:49:38 +0100383 # Register the cluster in k8sclusters collection
rshri948f7de2024-12-02 03:42:35 +0000384 db_register = self.db.get_one("k8sclusters", {"name": db_cluster["name"]})
garciadeblas3e5eeec2025-01-21 11:49:38 +0100385 if cluster_creds:
rshri948f7de2024-12-02 03:42:35 +0000386 db_register["credentials"] = cluster_creds
garciadeblas3e5eeec2025-01-21 11:49:38 +0100387 # To call the lcm.py for registering the cluster in k8scluster lcm.
rshri948f7de2024-12-02 03:42:35 +0000388 self.db.set_one("k8sclusters", {"_id": db_register["_id"]}, db_register)
389 register = await self.regist.create(db_register, order_id)
390 self.logger.debug(f"Register is : {register}")
391 else:
392 db_register["_admin"]["operationalState"] = "ERROR"
393 result, cluster_creds = await self.odu.get_cluster_credentials(db_cluster)
394 # To call the lcm.py for registering the cluster in k8scluster lcm.
395 db_register["credentials"] = cluster_creds
396 self.db.set_one("k8sclusters", {"_id": db_register["_id"]}, db_register)
397
rshri932105f2024-07-05 15:11:55 +0000398 return
399
garciadeblas72412282024-11-07 12:41:54 +0100400 async def check_create_cluster(self, op_id, op_params, content):
garciadeblas7eae6f42024-11-08 10:41:38 +0100401 self.logger.info(
402 f"check_create_cluster Operation {op_id}. Params: {op_params}."
403 )
404 # self.logger.debug(f"Content: {content}")
garciadeblas72412282024-11-07 12:41:54 +0100405 db_cluster = content["cluster"]
406 cluster_name = db_cluster["git_name"].lower()
407 cluster_kustomization_name = cluster_name
408 db_vim_account = content["vim_account"]
409 cloud_type = db_vim_account["vim_type"]
410 nodepool_name = ""
411 if cloud_type == "aws":
412 nodepool_name = f"{cluster_name}-nodegroup"
413 cluster_name = f"{cluster_name}-cluster"
414 elif cloud_type == "gcp":
415 nodepool_name = f"nodepool-{cluster_name}"
416 bootstrap = op_params.get("bootstrap", True)
417 if cloud_type in ("azure", "gcp", "aws"):
418 checkings_list = [
419 {
420 "item": "kustomization",
421 "name": cluster_kustomization_name,
422 "namespace": "managed-resources",
423 "flag": "Ready",
yshahcb9075f2024-11-22 12:08:57 +0000424 "timeout": 1500,
garciadeblas72412282024-11-07 12:41:54 +0100425 "enable": True,
garciadeblas7eae6f42024-11-08 10:41:38 +0100426 "resourceState": "IN_PROGRESS.KUSTOMIZATION_READY",
garciadeblas72412282024-11-07 12:41:54 +0100427 },
428 {
429 "item": f"cluster_{cloud_type}",
430 "name": cluster_name,
431 "namespace": "",
432 "flag": "Synced",
433 "timeout": self._checkloop_resource_timeout,
434 "enable": True,
garciadeblas7eae6f42024-11-08 10:41:38 +0100435 "resourceState": "IN_PROGRESS.RESOURCE_SYNCED.CLUSTER",
garciadeblas72412282024-11-07 12:41:54 +0100436 },
437 {
438 "item": f"cluster_{cloud_type}",
439 "name": cluster_name,
440 "namespace": "",
441 "flag": "Ready",
442 "timeout": self._checkloop_resource_timeout,
443 "enable": True,
garciadeblas7eae6f42024-11-08 10:41:38 +0100444 "resourceState": "IN_PROGRESS.RESOURCE_READY.CLUSTER",
garciadeblas72412282024-11-07 12:41:54 +0100445 },
446 {
447 "item": "kustomization",
448 "name": f"{cluster_kustomization_name}-bstrp-fluxctrl",
449 "namespace": "managed-resources",
450 "flag": "Ready",
yshahcb9075f2024-11-22 12:08:57 +0000451 "timeout": self._checkloop_resource_timeout,
garciadeblas72412282024-11-07 12:41:54 +0100452 "enable": bootstrap,
garciadeblas7eae6f42024-11-08 10:41:38 +0100453 "resourceState": "IN_PROGRESS.BOOTSTRAP_OK",
garciadeblas72412282024-11-07 12:41:54 +0100454 },
455 ]
456 else:
457 return False, "Not suitable VIM account to check cluster status"
458 if nodepool_name:
459 nodepool_check = {
460 "item": f"nodepool_{cloud_type}",
461 "name": nodepool_name,
462 "namespace": "",
463 "flag": "Ready",
464 "timeout": self._checkloop_resource_timeout,
465 "enable": True,
garciadeblas7eae6f42024-11-08 10:41:38 +0100466 "resourceState": "IN_PROGRESS.RESOURCE_READY.NODEPOOL",
garciadeblas72412282024-11-07 12:41:54 +0100467 }
468 checkings_list.insert(3, nodepool_check)
yshahcb9075f2024-11-22 12:08:57 +0000469 return await self.common_check_list(
470 op_id, checkings_list, "clusters", db_cluster
471 )
garciadeblas72412282024-11-07 12:41:54 +0100472
garciadeblasb0a42c22024-11-13 16:00:10 +0100473 async def check_register_cluster(self, op_id, op_params, content):
474 self.logger.info(
475 f"check_register_cluster Operation {op_id}. Params: {op_params}."
476 )
477 # self.logger.debug(f"Content: {content}")
478 db_cluster = content["cluster"]
479 cluster_name = db_cluster["git_name"].lower()
480 cluster_kustomization_name = cluster_name
481 bootstrap = op_params.get("bootstrap", True)
482 checkings_list = [
483 {
484 "item": "kustomization",
485 "name": f"{cluster_kustomization_name}-bstrp-fluxctrl",
486 "namespace": "managed-resources",
487 "flag": "Ready",
488 "timeout": self._checkloop_kustomization_timeout,
489 "enable": bootstrap,
490 "resourceState": "IN_PROGRESS.BOOTSTRAP_OK",
491 },
492 ]
yshahcb9075f2024-11-22 12:08:57 +0000493 return await self.common_check_list(
494 op_id, checkings_list, "clusters", db_cluster
495 )
garciadeblasb0a42c22024-11-13 16:00:10 +0100496
497 async def check_update_cluster(self, op_id, op_params, content):
498 self.logger.info(
499 f"check_create_cluster Operation {op_id}. Params: {op_params}."
500 )
501 # self.logger.debug(f"Content: {content}")
502 db_cluster = content["cluster"]
503 cluster_name = db_cluster["git_name"].lower()
504 cluster_kustomization_name = cluster_name
505 db_vim_account = content["vim_account"]
506 cloud_type = db_vim_account["vim_type"]
507 nodepool_name = ""
508 if cloud_type == "aws":
509 nodepool_name = f"{cluster_name}-nodegroup"
510 cluster_name = f"{cluster_name}-cluster"
511 elif cloud_type == "gcp":
512 nodepool_name = f"nodepool-{cluster_name}"
513 if cloud_type in ("azure", "gcp", "aws"):
514 checkings_list = [
515 {
516 "item": "kustomization",
517 "name": cluster_kustomization_name,
518 "namespace": "managed-resources",
519 "flag": "Ready",
520 "timeout": self._checkloop_kustomization_timeout,
521 "enable": True,
522 "resourceState": "IN_PROGRESS.KUSTOMIZATION_READY",
523 },
524 {
525 "item": f"cluster_{cloud_type}",
526 "name": cluster_name,
527 "namespace": "",
528 "flag": "Synced",
529 "timeout": self._checkloop_resource_timeout,
530 "enable": True,
531 "resourceState": "IN_PROGRESS.RESOURCE_SYNCED.CLUSTER",
532 },
533 {
534 "item": f"cluster_{cloud_type}",
535 "name": cluster_name,
536 "namespace": "",
537 "flag": "Ready",
538 "timeout": self._checkloop_resource_timeout,
539 "enable": True,
540 "resourceState": "IN_PROGRESS.RESOURCE_READY.CLUSTER",
541 },
542 ]
543 else:
544 return False, "Not suitable VIM account to check cluster status"
545 if nodepool_name:
546 nodepool_check = {
547 "item": f"nodepool_{cloud_type}",
548 "name": nodepool_name,
549 "namespace": "",
550 "flag": "Ready",
551 "timeout": self._checkloop_resource_timeout,
552 "enable": True,
553 "resourceState": "IN_PROGRESS.RESOURCE_READY.NODEPOOL",
554 }
555 checkings_list.append(nodepool_check)
yshahcb9075f2024-11-22 12:08:57 +0000556 return await self.common_check_list(
557 op_id, checkings_list, "clusters", db_cluster
558 )
garciadeblasb0a42c22024-11-13 16:00:10 +0100559
garciadeblas96b94f52024-07-08 16:18:21 +0200560 def update_profile_state(self, db_cluster, workflow_status, resource_status):
rshri932105f2024-07-05 15:11:55 +0000561 profiles = [
562 "infra_controller_profiles",
563 "infra_config_profiles",
564 "app_profiles",
565 "resource_profiles",
566 ]
rshri948f7de2024-12-02 03:42:35 +0000567 """
rshri932105f2024-07-05 15:11:55 +0000568 profiles_collection = {
569 "infra_controller_profiles": "k8sinfra_controller",
570 "infra_config_profiles": "k8sinfra_config",
571 "app_profiles": "k8sapp",
572 "resource_profiles": "k8sresource",
573 }
rshri948f7de2024-12-02 03:42:35 +0000574 """
Your Name86149632024-11-14 16:17:16 +0000575 self.logger.info("the db_cluster is :{}".format(db_cluster))
rshri932105f2024-07-05 15:11:55 +0000576 for profile_type in profiles:
garciadeblas96b94f52024-07-08 16:18:21 +0200577 profile_id = db_cluster[profile_type]
rshri948f7de2024-12-02 03:42:35 +0000578 # db_collection = profiles_collection[profile_type]
579 db_collection = self.profile_collection_mapping[profile_type]
rshri932105f2024-07-05 15:11:55 +0000580 db_profile = self.db.get_one(db_collection, {"_id": profile_id})
yshahcb9075f2024-11-22 12:08:57 +0000581 op_id = db_profile["operationHistory"][-1].get("op_id")
garciadeblas96b94f52024-07-08 16:18:21 +0200582 db_profile["state"] = db_cluster["state"]
583 db_profile["resourceState"] = db_cluster["resourceState"]
584 db_profile["operatingState"] = db_cluster["operatingState"]
rshric3564942024-11-12 18:12:38 +0000585 db_profile["age_pubkey"] = db_cluster["age_pubkey"]
Your Name86149632024-11-14 16:17:16 +0000586 db_profile["age_privkey"] = db_cluster["age_privkey"]
rshri932105f2024-07-05 15:11:55 +0000587 db_profile = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000588 db_profile, op_id, workflow_status, resource_status
rshri932105f2024-07-05 15:11:55 +0000589 )
rshri932105f2024-07-05 15:11:55 +0000590 self.db.set_one(db_collection, {"_id": db_profile["_id"]}, db_profile)
591
rshri948f7de2024-12-02 03:42:35 +0000592 async def delete(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000593 self.logger.info("cluster delete Enter")
rshri948f7de2024-12-02 03:42:35 +0000594
garciadeblas995cbf32024-12-18 12:54:00 +0100595 # To get the cluster and op ids
rshri948f7de2024-12-02 03:42:35 +0000596 cluster_id = params["cluster_id"]
rshri948f7de2024-12-02 03:42:35 +0000597 op_id = params["operation_id"]
rshri948f7de2024-12-02 03:42:35 +0000598
599 # To initialize the operation states
600 self.initialize_operation(cluster_id, op_id)
601
garciadeblas995cbf32024-12-18 12:54:00 +0100602 # To get the cluster
603 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
604
605 # To get the operation params details
606 op_params = self.get_operation_params(db_cluster, op_id)
607
608 # To copy the cluster content and decrypting fields to use in workflows
609 workflow_content = {
610 "cluster": self.decrypted_copy(db_cluster),
611 }
rshri948f7de2024-12-02 03:42:35 +0000612
garciadeblas6b2112c2024-12-20 10:35:13 +0100613 # TODO: workaround until NBI rejects cluster deletion requests for registered clusters
614 # This if clause will be removed
garciadeblas12470812024-11-18 10:33:12 +0100615 if db_cluster["created"] == "false":
rshri948f7de2024-12-02 03:42:35 +0000616 return await self.deregister(params, order_id)
rshri932105f2024-07-05 15:11:55 +0000617
garciadeblasadb81e82024-11-08 01:11:46 +0100618 _, workflow_name = await self.odu.launch_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +0100619 "delete_cluster", op_id, op_params, workflow_content
garciadeblas96b94f52024-07-08 16:18:21 +0200620 )
rshri932105f2024-07-05 15:11:55 +0000621 self.logger.info("workflow_name is :{}".format(workflow_name))
622
garciadeblas96b94f52024-07-08 16:18:21 +0200623 workflow_status, workflow_msg = await self.odu.check_workflow_status(
624 workflow_name
625 )
rshri932105f2024-07-05 15:11:55 +0000626 self.logger.info(
627 "workflow_status is :{} and workflow_msg is :{}".format(
628 workflow_status, workflow_msg
629 )
630 )
631 if workflow_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200632 db_cluster["state"] = "DELETED"
633 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
rshri932105f2024-07-05 15:11:55 +0000634 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200635 db_cluster["state"] = "FAILED_DELETION"
636 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000637 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +0000638 db_cluster = self.update_operation_history(
639 db_cluster, op_id, workflow_status, None
640 )
garciadeblas96b94f52024-07-08 16:18:21 +0200641 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +0000642
garciadeblas98f9a3d2024-12-10 13:42:47 +0100643 # Clean items used in the workflow or in the cluster, no matter if the workflow succeeded
644 clean_status, clean_msg = await self.odu.clean_items_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +0100645 "delete_cluster", op_id, op_params, workflow_content
garciadeblas98f9a3d2024-12-10 13:42:47 +0100646 )
647 self.logger.info(
648 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
649 )
650
rshri932105f2024-07-05 15:11:55 +0000651 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +0100652 resource_status, resource_msg = await self.check_resource_status(
garciadeblas995cbf32024-12-18 12:54:00 +0100653 "delete_cluster", op_id, op_params, workflow_content
rshri932105f2024-07-05 15:11:55 +0000654 )
655 self.logger.info(
656 "resource_status is :{} and resource_msg is :{}".format(
657 resource_status, resource_msg
658 )
659 )
660 if resource_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200661 db_cluster["resourceState"] = "READY"
rshri932105f2024-07-05 15:11:55 +0000662 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200663 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000664
garciadeblas96b94f52024-07-08 16:18:21 +0200665 db_cluster["operatingState"] = "IDLE"
666 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000667 db_cluster, op_id, workflow_status, resource_status
garciadeblas96b94f52024-07-08 16:18:21 +0200668 )
shahithya70a3fc92024-11-12 11:01:05 +0000669 db_cluster["current_operation"] = None
garciadeblas96b94f52024-07-08 16:18:21 +0200670 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +0000671
garciadeblas96b94f52024-07-08 16:18:21 +0200672 # To delete it from DB
673 if db_cluster["state"] == "DELETED":
674 self.delete_cluster(db_cluster)
rshri948f7de2024-12-02 03:42:35 +0000675
676 # To delete it from k8scluster collection
677 self.db.del_one("k8sclusters", {"name": db_cluster["name"]})
678
rshri932105f2024-07-05 15:11:55 +0000679 return
680
garciadeblas96b94f52024-07-08 16:18:21 +0200681 def delete_cluster(self, db_cluster):
682 # Actually, item_content is equal to db_cluster
683 # item_content = self.db.get_one("clusters", {"_id": db_cluster["_id"]})
684 # self.logger.debug("item_content is : {}".format(item_content))
rshri932105f2024-07-05 15:11:55 +0000685
rshri932105f2024-07-05 15:11:55 +0000686 # detach profiles
687 update_dict = None
688 profiles_to_detach = [
689 "infra_controller_profiles",
690 "infra_config_profiles",
691 "app_profiles",
692 "resource_profiles",
693 ]
rshri948f7de2024-12-02 03:42:35 +0000694 """
rshri932105f2024-07-05 15:11:55 +0000695 profiles_collection = {
696 "infra_controller_profiles": "k8sinfra_controller",
697 "infra_config_profiles": "k8sinfra_config",
698 "app_profiles": "k8sapp",
699 "resource_profiles": "k8sresource",
700 }
rshri948f7de2024-12-02 03:42:35 +0000701 """
rshri932105f2024-07-05 15:11:55 +0000702 for profile_type in profiles_to_detach:
garciadeblas96b94f52024-07-08 16:18:21 +0200703 if db_cluster.get(profile_type):
garciadeblas96b94f52024-07-08 16:18:21 +0200704 profile_ids = db_cluster[profile_type]
rshri932105f2024-07-05 15:11:55 +0000705 profile_ids_copy = deepcopy(profile_ids)
rshri932105f2024-07-05 15:11:55 +0000706 for profile_id in profile_ids_copy:
rshri948f7de2024-12-02 03:42:35 +0000707 # db_collection = profiles_collection[profile_type]
708 db_collection = self.profile_collection_mapping[profile_type]
rshri932105f2024-07-05 15:11:55 +0000709 db_profile = self.db.get_one(db_collection, {"_id": profile_id})
garciadeblasc2552852024-10-22 12:39:32 +0200710 self.logger.debug("the db_profile is :{}".format(db_profile))
711 self.logger.debug(
garciadeblas96b94f52024-07-08 16:18:21 +0200712 "the item_content name is :{}".format(db_cluster["name"])
rshri932105f2024-07-05 15:11:55 +0000713 )
garciadeblasc2552852024-10-22 12:39:32 +0200714 self.logger.debug(
rshri932105f2024-07-05 15:11:55 +0000715 "the db_profile name is :{}".format(db_profile["name"])
716 )
garciadeblas96b94f52024-07-08 16:18:21 +0200717 if db_cluster["name"] == db_profile["name"]:
rshri932105f2024-07-05 15:11:55 +0000718 self.db.del_one(db_collection, {"_id": profile_id})
719 else:
rshri932105f2024-07-05 15:11:55 +0000720 profile_ids.remove(profile_id)
721 update_dict = {profile_type: profile_ids}
rshri932105f2024-07-05 15:11:55 +0000722 self.db.set_one(
garciadeblas96b94f52024-07-08 16:18:21 +0200723 "clusters", {"_id": db_cluster["_id"]}, update_dict
rshri932105f2024-07-05 15:11:55 +0000724 )
garciadeblas96b94f52024-07-08 16:18:21 +0200725 self.db.del_one("clusters", {"_id": db_cluster["_id"]})
rshri932105f2024-07-05 15:11:55 +0000726
rshri948f7de2024-12-02 03:42:35 +0000727 async def attach_profile(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000728 self.logger.info("profile attach Enter")
rshri948f7de2024-12-02 03:42:35 +0000729
garciadeblas995cbf32024-12-18 12:54:00 +0100730 # To get the cluster and op ids
rshri948f7de2024-12-02 03:42:35 +0000731 cluster_id = params["cluster_id"]
rshri948f7de2024-12-02 03:42:35 +0000732 op_id = params["operation_id"]
rshri948f7de2024-12-02 03:42:35 +0000733
734 # To initialize the operation states
735 self.initialize_operation(cluster_id, op_id)
736
garciadeblas995cbf32024-12-18 12:54:00 +0100737 # To get the cluster
738 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
739
740 # To get the operation params details
741 op_params = self.get_operation_params(db_cluster, op_id)
742
743 # To copy the cluster content and decrypting fields to use in workflows
744 workflow_content = {
745 "cluster": self.decrypted_copy(db_cluster),
746 }
rshri948f7de2024-12-02 03:42:35 +0000747
748 # To get the profile details
749 profile_id = params["profile_id"]
750 profile_type = params["profile_type"]
751 profile_collection = self.profile_collection_mapping[profile_type]
752 db_profile = self.db.get_one(profile_collection, {"_id": profile_id})
753 db_profile["profile_type"] = profile_type
754 # content["profile"] = db_profile
garciadeblas995cbf32024-12-18 12:54:00 +0100755 workflow_content["profile"] = db_profile
rshri932105f2024-07-05 15:11:55 +0000756
garciadeblasadb81e82024-11-08 01:11:46 +0100757 _, workflow_name = await self.odu.launch_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +0100758 "attach_profile_to_cluster", op_id, op_params, workflow_content
garciadeblas96b94f52024-07-08 16:18:21 +0200759 )
rshri932105f2024-07-05 15:11:55 +0000760 self.logger.info("workflow_name is :{}".format(workflow_name))
761
garciadeblas96b94f52024-07-08 16:18:21 +0200762 workflow_status, workflow_msg = await self.odu.check_workflow_status(
763 workflow_name
764 )
rshri932105f2024-07-05 15:11:55 +0000765 self.logger.info(
766 "workflow_status is :{} and workflow_msg is :{}".format(
767 workflow_status, workflow_msg
768 )
769 )
770 if workflow_status:
771 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
772 else:
773 db_cluster["resourceState"] = "ERROR"
774 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +0000775 db_cluster = self.update_operation_history(
776 db_cluster, op_id, workflow_status, None
777 )
rshri932105f2024-07-05 15:11:55 +0000778 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
779
780 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +0100781 resource_status, resource_msg = await self.check_resource_status(
garciadeblas995cbf32024-12-18 12:54:00 +0100782 "attach_profile_to_cluster", op_id, op_params, workflow_content
rshri932105f2024-07-05 15:11:55 +0000783 )
784 self.logger.info(
785 "resource_status is :{} and resource_msg is :{}".format(
786 resource_status, resource_msg
787 )
788 )
789 if resource_status:
790 db_cluster["resourceState"] = "READY"
791 else:
792 db_cluster["resourceState"] = "ERROR"
793
794 db_cluster["operatingState"] = "IDLE"
795 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000796 db_cluster, op_id, workflow_status, resource_status
rshri932105f2024-07-05 15:11:55 +0000797 )
rshri932105f2024-07-05 15:11:55 +0000798 profile_list = db_cluster[profile_type]
rshri932105f2024-07-05 15:11:55 +0000799 if resource_status:
rshri932105f2024-07-05 15:11:55 +0000800 profile_list.append(profile_id)
rshri932105f2024-07-05 15:11:55 +0000801 db_cluster[profile_type] = profile_list
shahithya70a3fc92024-11-12 11:01:05 +0000802 db_cluster["current_operation"] = None
rshri932105f2024-07-05 15:11:55 +0000803 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
804
805 return
806
rshri948f7de2024-12-02 03:42:35 +0000807 async def detach_profile(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000808 self.logger.info("profile dettach Enter")
rshri948f7de2024-12-02 03:42:35 +0000809
garciadeblas995cbf32024-12-18 12:54:00 +0100810 # To get the cluster and op ids
rshri948f7de2024-12-02 03:42:35 +0000811 cluster_id = params["cluster_id"]
rshri948f7de2024-12-02 03:42:35 +0000812 op_id = params["operation_id"]
rshri948f7de2024-12-02 03:42:35 +0000813
814 # To initialize the operation states
815 self.initialize_operation(cluster_id, op_id)
816
garciadeblas995cbf32024-12-18 12:54:00 +0100817 # To get the cluster
818 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
819
820 # To get the operation params details
821 op_params = self.get_operation_params(db_cluster, op_id)
822
823 # To copy the cluster content and decrypting fields to use in workflows
824 workflow_content = {
825 "cluster": self.decrypted_copy(db_cluster),
826 }
rshri948f7de2024-12-02 03:42:35 +0000827
828 # To get the profile details
829 profile_id = params["profile_id"]
830 profile_type = params["profile_type"]
831 profile_collection = self.profile_collection_mapping[profile_type]
832 db_profile = self.db.get_one(profile_collection, {"_id": profile_id})
833 db_profile["profile_type"] = profile_type
834 # content["profile"] = db_profile
garciadeblas995cbf32024-12-18 12:54:00 +0100835 workflow_content["profile"] = db_profile
rshri932105f2024-07-05 15:11:55 +0000836
garciadeblasadb81e82024-11-08 01:11:46 +0100837 _, workflow_name = await self.odu.launch_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +0100838 "detach_profile_from_cluster", op_id, op_params, workflow_content
garciadeblas96b94f52024-07-08 16:18:21 +0200839 )
rshri932105f2024-07-05 15:11:55 +0000840 self.logger.info("workflow_name is :{}".format(workflow_name))
841
garciadeblas96b94f52024-07-08 16:18:21 +0200842 workflow_status, workflow_msg = await self.odu.check_workflow_status(
843 workflow_name
844 )
rshri932105f2024-07-05 15:11:55 +0000845 self.logger.info(
846 "workflow_status is :{} and workflow_msg is :{}".format(
847 workflow_status, workflow_msg
848 )
849 )
850 if workflow_status:
851 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
852 else:
853 db_cluster["resourceState"] = "ERROR"
854 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +0000855 db_cluster = self.update_operation_history(
856 db_cluster, op_id, workflow_status, None
857 )
rshri932105f2024-07-05 15:11:55 +0000858 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
859
860 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +0100861 resource_status, resource_msg = await self.check_resource_status(
garciadeblas995cbf32024-12-18 12:54:00 +0100862 "detach_profile_from_cluster", op_id, op_params, workflow_content
rshri932105f2024-07-05 15:11:55 +0000863 )
864 self.logger.info(
865 "resource_status is :{} and resource_msg is :{}".format(
866 resource_status, resource_msg
867 )
868 )
869 if resource_status:
870 db_cluster["resourceState"] = "READY"
871 else:
872 db_cluster["resourceState"] = "ERROR"
873
874 db_cluster["operatingState"] = "IDLE"
875 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000876 db_cluster, op_id, workflow_status, resource_status
rshri932105f2024-07-05 15:11:55 +0000877 )
rshri932105f2024-07-05 15:11:55 +0000878 profile_list = db_cluster[profile_type]
879 self.logger.info("profile list is : {}".format(profile_list))
880 if resource_status:
rshri932105f2024-07-05 15:11:55 +0000881 profile_list.remove(profile_id)
rshri932105f2024-07-05 15:11:55 +0000882 db_cluster[profile_type] = profile_list
shahithya70a3fc92024-11-12 11:01:05 +0000883 db_cluster["current_operation"] = None
rshri932105f2024-07-05 15:11:55 +0000884 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
885
886 return
887
rshri948f7de2024-12-02 03:42:35 +0000888 async def register(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000889 self.logger.info("cluster register enter")
890
garciadeblas995cbf32024-12-18 12:54:00 +0100891 # To get the cluster and op ids
rshri948f7de2024-12-02 03:42:35 +0000892 cluster_id = params["cluster_id"]
rshri948f7de2024-12-02 03:42:35 +0000893 op_id = params["operation_id"]
rshri948f7de2024-12-02 03:42:35 +0000894
895 # To initialize the operation states
896 self.initialize_operation(cluster_id, op_id)
897
garciadeblas995cbf32024-12-18 12:54:00 +0100898 # To get the cluster
899 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
900
901 # To get the operation params details
902 op_params = self.get_operation_params(db_cluster, op_id)
903
904 # To copy the cluster content and decrypting fields to use in workflows
905 workflow_content = {
906 "cluster": self.decrypted_copy(db_cluster),
907 }
rshric3564942024-11-12 18:12:38 +0000908
garciadeblasadb81e82024-11-08 01:11:46 +0100909 _, workflow_name = await self.odu.launch_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +0100910 "register_cluster", op_id, op_params, workflow_content
garciadeblas96b94f52024-07-08 16:18:21 +0200911 )
rshri932105f2024-07-05 15:11:55 +0000912 self.logger.info("workflow_name is :{}".format(workflow_name))
913
garciadeblas96b94f52024-07-08 16:18:21 +0200914 workflow_status, workflow_msg = await self.odu.check_workflow_status(
915 workflow_name
916 )
rshri932105f2024-07-05 15:11:55 +0000917 self.logger.info(
918 "workflow_status is :{} and workflow_msg is :{}".format(
919 workflow_status, workflow_msg
920 )
921 )
922 if workflow_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200923 db_cluster["state"] = "CREATED"
924 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
rshri932105f2024-07-05 15:11:55 +0000925 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200926 db_cluster["state"] = "FAILED_CREATION"
927 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000928 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +0000929 db_cluster = self.update_operation_history(
930 db_cluster, op_id, workflow_status, None
931 )
garciadeblas96b94f52024-07-08 16:18:21 +0200932 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +0000933
garciadeblasdde3a312024-09-17 13:25:06 +0200934 # Clean items used in the workflow, no matter if the workflow succeeded
935 clean_status, clean_msg = await self.odu.clean_items_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +0100936 "register_cluster", op_id, op_params, workflow_content
garciadeblasdde3a312024-09-17 13:25:06 +0200937 )
938 self.logger.info(
939 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
940 )
941
rshri932105f2024-07-05 15:11:55 +0000942 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +0100943 resource_status, resource_msg = await self.check_resource_status(
garciadeblas995cbf32024-12-18 12:54:00 +0100944 "register_cluster", op_id, op_params, workflow_content
rshri932105f2024-07-05 15:11:55 +0000945 )
946 self.logger.info(
947 "resource_status is :{} and resource_msg is :{}".format(
948 resource_status, resource_msg
949 )
950 )
951 if resource_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200952 db_cluster["resourceState"] = "READY"
rshri932105f2024-07-05 15:11:55 +0000953 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200954 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000955
garciadeblas96b94f52024-07-08 16:18:21 +0200956 db_cluster["operatingState"] = "IDLE"
957 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000958 db_cluster, op_id, workflow_status, resource_status
rshri932105f2024-07-05 15:11:55 +0000959 )
shahithya70a3fc92024-11-12 11:01:05 +0000960 db_cluster["current_operation"] = None
garciadeblas96b94f52024-07-08 16:18:21 +0200961 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri948f7de2024-12-02 03:42:35 +0000962
963 db_register = self.db.get_one("k8sclusters", {"name": db_cluster["name"]})
964 db_register["credentials"] = db_cluster["credentials"]
965 self.db.set_one("k8sclusters", {"_id": db_register["_id"]}, db_register)
966
967 if db_cluster["resourceState"] == "READY" and db_cluster["state"] == "CREATED":
968 # To call the lcm.py for registering the cluster in k8scluster lcm.
969 register = await self.regist.create(db_register, order_id)
970 self.logger.debug(f"Register is : {register}")
971 else:
972 db_register["_admin"]["operationalState"] = "ERROR"
973 self.db.set_one("k8sclusters", {"_id": db_register["_id"]}, db_register)
974
rshri932105f2024-07-05 15:11:55 +0000975 return
976
rshri948f7de2024-12-02 03:42:35 +0000977 async def deregister(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000978 self.logger.info("cluster deregister enter")
979
garciadeblas995cbf32024-12-18 12:54:00 +0100980 # To get the cluster and op ids
rshri948f7de2024-12-02 03:42:35 +0000981 cluster_id = params["cluster_id"]
rshri948f7de2024-12-02 03:42:35 +0000982 op_id = params["operation_id"]
rshri948f7de2024-12-02 03:42:35 +0000983
984 # To initialize the operation states
985 self.initialize_operation(cluster_id, op_id)
986
garciadeblas995cbf32024-12-18 12:54:00 +0100987 # To get the cluster
988 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
989
990 # To get the operation params details
991 op_params = self.get_operation_params(db_cluster, op_id)
992
993 # To copy the cluster content and decrypting fields to use in workflows
994 workflow_content = {
995 "cluster": self.decrypted_copy(db_cluster),
996 }
rshri932105f2024-07-05 15:11:55 +0000997
garciadeblasadb81e82024-11-08 01:11:46 +0100998 _, workflow_name = await self.odu.launch_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +0100999 "deregister_cluster", op_id, op_params, workflow_content
garciadeblas96b94f52024-07-08 16:18:21 +02001000 )
rshri932105f2024-07-05 15:11:55 +00001001 self.logger.info("workflow_name is :{}".format(workflow_name))
1002
garciadeblas96b94f52024-07-08 16:18:21 +02001003 workflow_status, workflow_msg = await self.odu.check_workflow_status(
1004 workflow_name
1005 )
rshri932105f2024-07-05 15:11:55 +00001006 self.logger.info(
1007 "workflow_status is :{} and workflow_msg is :{}".format(
1008 workflow_status, workflow_msg
1009 )
1010 )
1011 if workflow_status:
garciadeblas96b94f52024-07-08 16:18:21 +02001012 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
rshri932105f2024-07-05 15:11:55 +00001013 else:
garciadeblas96b94f52024-07-08 16:18:21 +02001014 db_cluster["state"] = "FAILED_DELETION"
1015 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +00001016 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +00001017 db_cluster = self.update_operation_history(
1018 db_cluster, op_id, workflow_status, None
1019 )
garciadeblas96b94f52024-07-08 16:18:21 +02001020 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +00001021
garciadeblas91bb2c42024-11-12 11:17:12 +01001022 # Clean items used in the workflow or in the cluster, no matter if the workflow succeeded
1023 clean_status, clean_msg = await self.odu.clean_items_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +01001024 "deregister_cluster", op_id, op_params, workflow_content
garciadeblas91bb2c42024-11-12 11:17:12 +01001025 )
1026 self.logger.info(
1027 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
1028 )
1029
rshri932105f2024-07-05 15:11:55 +00001030 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +01001031 resource_status, resource_msg = await self.check_resource_status(
garciadeblas995cbf32024-12-18 12:54:00 +01001032 "deregister_cluster", op_id, op_params, workflow_content
rshri932105f2024-07-05 15:11:55 +00001033 )
1034 self.logger.info(
1035 "resource_status is :{} and resource_msg is :{}".format(
1036 resource_status, resource_msg
1037 )
1038 )
1039 if resource_status:
garciadeblas96b94f52024-07-08 16:18:21 +02001040 db_cluster["resourceState"] = "READY"
rshri932105f2024-07-05 15:11:55 +00001041 else:
garciadeblas96b94f52024-07-08 16:18:21 +02001042 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +00001043
garciadeblas96b94f52024-07-08 16:18:21 +02001044 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +00001045 db_cluster, op_id, workflow_status, resource_status
garciadeblas96b94f52024-07-08 16:18:21 +02001046 )
garciadeblas6b2112c2024-12-20 10:35:13 +01001047 # TODO: workaround until NBI rejects cluster deletion requests for registered clusters
1048 # Setting created flag to true avoids infinite loops when deregistering a cluster
1049 db_cluster["created"] = "true"
garciadeblas96b94f52024-07-08 16:18:21 +02001050 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +00001051
garciadeblas98f9a3d2024-12-10 13:42:47 +01001052 return await self.delete(params, order_id)
rshri932105f2024-07-05 15:11:55 +00001053
rshri948f7de2024-12-02 03:42:35 +00001054 async def get_creds(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001055 self.logger.info("Cluster get creds Enter")
rshri948f7de2024-12-02 03:42:35 +00001056 cluster_id = params["cluster_id"]
1057 op_id = params["operation_id"]
1058 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
garciadeblas96b94f52024-07-08 16:18:21 +02001059 result, cluster_creds = await self.odu.get_cluster_credentials(db_cluster)
1060 if result:
1061 db_cluster["credentials"] = cluster_creds
yshahd940c652024-10-17 06:11:12 +00001062 op_len = 0
1063 for operations in db_cluster["operationHistory"]:
1064 if operations["op_id"] == op_id:
1065 db_cluster["operationHistory"][op_len]["result"] = result
1066 db_cluster["operationHistory"][op_len]["endDate"] = time()
1067 op_len += 1
shahithya70a3fc92024-11-12 11:01:05 +00001068 db_cluster["current_operation"] = None
yshahd940c652024-10-17 06:11:12 +00001069 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri948f7de2024-12-02 03:42:35 +00001070 self.logger.info("Cluster Get Creds Exit")
yshah771dea82024-07-05 15:11:49 +00001071 return
1072
rshri948f7de2024-12-02 03:42:35 +00001073 async def update(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001074 self.logger.info("Cluster update Enter")
rshri948f7de2024-12-02 03:42:35 +00001075 # To get the cluster details
1076 cluster_id = params["cluster_id"]
1077 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
1078
1079 # To get the operation params details
1080 op_id = params["operation_id"]
1081 op_params = self.get_operation_params(db_cluster, op_id)
yshah771dea82024-07-05 15:11:49 +00001082
garciadeblas995cbf32024-12-18 12:54:00 +01001083 # To copy the cluster content and decrypting fields to use in workflows
1084 workflow_content = {
1085 "cluster": self.decrypted_copy(db_cluster),
1086 }
rshric3564942024-11-12 18:12:38 +00001087
1088 # vim account details
1089 db_vim = self.db.get_one("vim_accounts", {"name": db_cluster["vim_account"]})
garciadeblas995cbf32024-12-18 12:54:00 +01001090 workflow_content["vim_account"] = db_vim
rshric3564942024-11-12 18:12:38 +00001091
garciadeblasadb81e82024-11-08 01:11:46 +01001092 _, workflow_name = await self.odu.launch_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +01001093 "update_cluster", op_id, op_params, workflow_content
garciadeblas96b94f52024-07-08 16:18:21 +02001094 )
1095 workflow_status, workflow_msg = await self.odu.check_workflow_status(
1096 workflow_name
1097 )
1098 self.logger.info(
1099 "Workflow Status: {} Workflow Message: {}".format(
1100 workflow_status, workflow_msg
yshah771dea82024-07-05 15:11:49 +00001101 )
garciadeblas96b94f52024-07-08 16:18:21 +02001102 )
1103
1104 if workflow_status:
1105 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
1106 else:
1107 db_cluster["resourceState"] = "ERROR"
1108
yshahcb9075f2024-11-22 12:08:57 +00001109 db_cluster = self.update_operation_history(
1110 db_cluster, op_id, workflow_status, None
1111 )
garciadeblas96b94f52024-07-08 16:18:21 +02001112 # self.logger.info("Db content: {}".format(db_content))
1113 # self.db.set_one(self.db_collection, {"_id": _id}, db_cluster)
1114 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
1115
garciadeblas28bff0f2024-09-16 12:53:07 +02001116 # Clean items used in the workflow, no matter if the workflow succeeded
1117 clean_status, clean_msg = await self.odu.clean_items_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +01001118 "update_cluster", op_id, op_params, workflow_content
garciadeblas28bff0f2024-09-16 12:53:07 +02001119 )
1120 self.logger.info(
1121 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
1122 )
garciadeblas96b94f52024-07-08 16:18:21 +02001123 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +01001124 resource_status, resource_msg = await self.check_resource_status(
garciadeblas995cbf32024-12-18 12:54:00 +01001125 "update_cluster", op_id, op_params, workflow_content
garciadeblas96b94f52024-07-08 16:18:21 +02001126 )
1127 self.logger.info(
1128 "Resource Status: {} Resource Message: {}".format(
1129 resource_status, resource_msg
1130 )
1131 )
yshah771dea82024-07-05 15:11:49 +00001132
1133 if resource_status:
garciadeblas96b94f52024-07-08 16:18:21 +02001134 db_cluster["resourceState"] = "READY"
yshah771dea82024-07-05 15:11:49 +00001135 else:
garciadeblas96b94f52024-07-08 16:18:21 +02001136 db_cluster["resourceState"] = "ERROR"
yshah771dea82024-07-05 15:11:49 +00001137
yshah0defcd52024-11-18 07:41:35 +00001138 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +00001139 db_cluster, op_id, workflow_status, resource_status
yshah0defcd52024-11-18 07:41:35 +00001140 )
1141
garciadeblas96b94f52024-07-08 16:18:21 +02001142 db_cluster["operatingState"] = "IDLE"
garciadeblas96b94f52024-07-08 16:18:21 +02001143 # self.logger.info("db_cluster: {}".format(db_cluster))
1144 # TODO: verify enxtcondition
1145 # For the moment, if the workflow completed successfully, then we update the db accordingly.
1146 if workflow_status:
1147 if "k8s_version" in op_params:
1148 db_cluster["k8s_version"] = op_params["k8s_version"]
yshah0defcd52024-11-18 07:41:35 +00001149 if "node_count" in op_params:
garciadeblas96b94f52024-07-08 16:18:21 +02001150 db_cluster["node_count"] = op_params["node_count"]
yshah0defcd52024-11-18 07:41:35 +00001151 if "node_size" in op_params:
1152 db_cluster["node_count"] = op_params["node_size"]
garciadeblas96b94f52024-07-08 16:18:21 +02001153 # self.db.set_one(self.db_collection, {"_id": _id}, db_content)
shahithya70a3fc92024-11-12 11:01:05 +00001154 db_cluster["current_operation"] = None
garciadeblas96b94f52024-07-08 16:18:21 +02001155 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
yshah771dea82024-07-05 15:11:49 +00001156 return
1157
1158
garciadeblas72412282024-11-07 12:41:54 +01001159class CloudCredentialsLcm(GitOpsLcm):
yshah771dea82024-07-05 15:11:49 +00001160 db_collection = "vim_accounts"
1161
1162 def __init__(self, msg, lcm_tasks, config):
1163 """
1164 Init, Connect to database, filesystem storage, and messaging
1165 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1166 :return: None
1167 """
garciadeblas72412282024-11-07 12:41:54 +01001168 super().__init__(msg, lcm_tasks, config)
yshah771dea82024-07-05 15:11:49 +00001169
yshah564ec9c2024-11-29 07:33:32 +00001170 async def add(self, params, order_id):
yshah771dea82024-07-05 15:11:49 +00001171 self.logger.info("Cloud Credentials create")
yshah564ec9c2024-11-29 07:33:32 +00001172 vim_id = params["_id"]
1173 op_id = vim_id
1174 op_params = params
1175 db_content = self.db.get_one(self.db_collection, {"_id": vim_id})
1176 vim_config = db_content.get("config", {})
1177 self.db.encrypt_decrypt_fields(
1178 vim_config.get("credentials"),
1179 "decrypt",
1180 ["password", "secret"],
1181 schema_version=db_content["schema_version"],
1182 salt=vim_id,
1183 )
1184
garciadeblasadb81e82024-11-08 01:11:46 +01001185 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001186 "create_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001187 )
1188
1189 workflow_status, workflow_msg = await self.odu.check_workflow_status(
1190 workflow_name
1191 )
1192
1193 self.logger.info(
1194 "Workflow Status: {} Workflow Msg: {}".format(workflow_status, workflow_msg)
1195 )
1196
garciadeblas28bff0f2024-09-16 12:53:07 +02001197 # Clean items used in the workflow, no matter if the workflow succeeded
1198 clean_status, clean_msg = await self.odu.clean_items_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001199 "create_cloud_credentials", op_id, op_params, db_content
garciadeblas28bff0f2024-09-16 12:53:07 +02001200 )
1201 self.logger.info(
1202 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
1203 )
1204
yshah771dea82024-07-05 15:11:49 +00001205 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +01001206 resource_status, resource_msg = await self.check_resource_status(
yshah564ec9c2024-11-29 07:33:32 +00001207 "create_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001208 )
1209 self.logger.info(
1210 "Resource Status: {} Resource Message: {}".format(
1211 resource_status, resource_msg
1212 )
1213 )
garciadeblas15b8a302024-09-23 12:40:13 +02001214
yshah564ec9c2024-11-29 07:33:32 +00001215 db_content["_admin"]["operationalState"] = "ENABLED"
1216 for operation in db_content["_admin"]["operations"]:
garciadeblas15b8a302024-09-23 12:40:13 +02001217 if operation["lcmOperationType"] == "create":
1218 operation["operationState"] = "ENABLED"
yshah564ec9c2024-11-29 07:33:32 +00001219 self.logger.info("Content : {}".format(db_content))
1220 self.db.set_one("vim_accounts", {"_id": db_content["_id"]}, db_content)
yshah771dea82024-07-05 15:11:49 +00001221 return
1222
yshah564ec9c2024-11-29 07:33:32 +00001223 async def edit(self, params, order_id):
1224 self.logger.info("Cloud Credentials Update")
1225 vim_id = params["_id"]
1226 op_id = vim_id
1227 op_params = params
1228 db_content = self.db.get_one("vim_accounts", {"_id": vim_id})
1229 vim_config = db_content.get("config", {})
1230 self.db.encrypt_decrypt_fields(
1231 vim_config.get("credentials"),
1232 "decrypt",
1233 ["password", "secret"],
1234 schema_version=db_content["schema_version"],
1235 salt=vim_id,
1236 )
1237
garciadeblasadb81e82024-11-08 01:11:46 +01001238 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001239 "update_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001240 )
1241 workflow_status, workflow_msg = await self.odu.check_workflow_status(
1242 workflow_name
1243 )
1244 self.logger.info(
1245 "Workflow Status: {} Workflow Msg: {}".format(workflow_status, workflow_msg)
1246 )
1247
garciadeblas28bff0f2024-09-16 12:53:07 +02001248 # Clean items used in the workflow, no matter if the workflow succeeded
1249 clean_status, clean_msg = await self.odu.clean_items_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001250 "update_cloud_credentials", op_id, op_params, db_content
garciadeblas28bff0f2024-09-16 12:53:07 +02001251 )
1252 self.logger.info(
1253 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
1254 )
1255
yshah771dea82024-07-05 15:11:49 +00001256 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +01001257 resource_status, resource_msg = await self.check_resource_status(
yshah564ec9c2024-11-29 07:33:32 +00001258 "update_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001259 )
1260 self.logger.info(
1261 "Resource Status: {} Resource Message: {}".format(
1262 resource_status, resource_msg
1263 )
1264 )
1265 return
1266
yshah564ec9c2024-11-29 07:33:32 +00001267 async def remove(self, params, order_id):
1268 self.logger.info("Cloud Credentials remove")
1269 vim_id = params["_id"]
1270 op_id = vim_id
1271 op_params = params
1272 db_content = self.db.get_one("vim_accounts", {"_id": vim_id})
1273
garciadeblasadb81e82024-11-08 01:11:46 +01001274 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001275 "delete_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001276 )
1277 workflow_status, workflow_msg = await self.odu.check_workflow_status(
1278 workflow_name
1279 )
1280 self.logger.info(
1281 "Workflow Status: {} Workflow Msg: {}".format(workflow_status, workflow_msg)
1282 )
1283
1284 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +01001285 resource_status, resource_msg = await self.check_resource_status(
yshah564ec9c2024-11-29 07:33:32 +00001286 "delete_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001287 )
1288 self.logger.info(
1289 "Resource Status: {} Resource Message: {}".format(
1290 resource_status, resource_msg
1291 )
1292 )
yshah564ec9c2024-11-29 07:33:32 +00001293 self.db.del_one(self.db_collection, {"_id": db_content["_id"]})
yshah771dea82024-07-05 15:11:49 +00001294 return
1295
rshri932105f2024-07-05 15:11:55 +00001296
garciadeblas72412282024-11-07 12:41:54 +01001297class K8sAppLcm(GitOpsLcm):
rshri948f7de2024-12-02 03:42:35 +00001298 db_collection = "k8sapp"
1299
rshri932105f2024-07-05 15:11:55 +00001300 def __init__(self, msg, lcm_tasks, config):
1301 """
1302 Init, Connect to database, filesystem storage, and messaging
1303 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1304 :return: None
1305 """
garciadeblas72412282024-11-07 12:41:54 +01001306 super().__init__(msg, lcm_tasks, config)
rshri932105f2024-07-05 15:11:55 +00001307
rshri948f7de2024-12-02 03:42:35 +00001308 async def create(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001309 self.logger.info("App Create Enter")
1310
rshri948f7de2024-12-02 03:42:35 +00001311 op_id = params["operation_id"]
1312 profile_id = params["profile_id"]
1313
1314 # To initialize the operation states
1315 self.initialize_operation(profile_id, op_id)
1316
1317 content = self.db.get_one("k8sapp", {"_id": profile_id})
1318 content["profile_type"] = "applications"
1319 op_params = self.get_operation_params(content, op_id)
1320 self.db.set_one("k8sapp", {"_id": content["_id"]}, content)
1321
garciadeblasadb81e82024-11-08 01:11:46 +01001322 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001323 "create_profile", op_id, op_params, content
1324 )
rshri932105f2024-07-05 15:11:55 +00001325 self.logger.info("workflow_name is :{}".format(workflow_name))
1326
garciadeblas713e1962025-01-17 12:49:19 +01001327 workflow_status = await self.check_workflow_and_update_db(
1328 op_id, workflow_name, content
1329 )
rshri932105f2024-07-05 15:11:55 +00001330
1331 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01001332 resource_status, content = await self.check_resource_and_update_db(
garciadeblas96b94f52024-07-08 16:18:21 +02001333 "create_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001334 )
yshah564ec9c2024-11-29 07:33:32 +00001335 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1336 self.logger.info(f"App Create Exit with resource status: {resource_status}")
rshri932105f2024-07-05 15:11:55 +00001337 return
1338
rshri948f7de2024-12-02 03:42:35 +00001339 async def delete(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001340 self.logger.info("App delete Enter")
rshri932105f2024-07-05 15:11:55 +00001341
rshri948f7de2024-12-02 03:42:35 +00001342 op_id = params["operation_id"]
1343 profile_id = params["profile_id"]
1344
1345 # To initialize the operation states
1346 self.initialize_operation(profile_id, op_id)
1347
1348 content = self.db.get_one("k8sapp", {"_id": profile_id})
1349 op_params = self.get_operation_params(content, op_id)
1350
garciadeblasadb81e82024-11-08 01:11:46 +01001351 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001352 "delete_profile", op_id, op_params, content
1353 )
rshri932105f2024-07-05 15:11:55 +00001354 self.logger.info("workflow_name is :{}".format(workflow_name))
1355
garciadeblas713e1962025-01-17 12:49:19 +01001356 workflow_status = await self.check_workflow_and_update_db(
1357 op_id, workflow_name, content
1358 )
rshri932105f2024-07-05 15:11:55 +00001359
1360 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01001361 resource_status, content = await self.check_resource_and_update_db(
garciadeblas96b94f52024-07-08 16:18:21 +02001362 "delete_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001363 )
rshri932105f2024-07-05 15:11:55 +00001364
yshah564ec9c2024-11-29 07:33:32 +00001365 if resource_status:
1366 content["state"] = "DELETED"
1367 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1368 self.db.del_one(self.db_collection, {"_id": content["_id"]})
1369 self.logger.info(f"App Delete Exit with resource status: {resource_status}")
rshri932105f2024-07-05 15:11:55 +00001370 return
1371
1372
garciadeblas72412282024-11-07 12:41:54 +01001373class K8sResourceLcm(GitOpsLcm):
rshri948f7de2024-12-02 03:42:35 +00001374 db_collection = "k8sresource"
1375
rshri932105f2024-07-05 15:11:55 +00001376 def __init__(self, msg, lcm_tasks, config):
1377 """
1378 Init, Connect to database, filesystem storage, and messaging
1379 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1380 :return: None
1381 """
garciadeblas72412282024-11-07 12:41:54 +01001382 super().__init__(msg, lcm_tasks, config)
rshri932105f2024-07-05 15:11:55 +00001383
rshri948f7de2024-12-02 03:42:35 +00001384 async def create(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001385 self.logger.info("Resource Create Enter")
1386
rshri948f7de2024-12-02 03:42:35 +00001387 op_id = params["operation_id"]
1388 profile_id = params["profile_id"]
1389
1390 # To initialize the operation states
1391 self.initialize_operation(profile_id, op_id)
1392
1393 content = self.db.get_one("k8sresource", {"_id": profile_id})
1394 content["profile_type"] = "managed-resources"
1395 op_params = self.get_operation_params(content, op_id)
1396 self.db.set_one("k8sresource", {"_id": content["_id"]}, content)
1397
garciadeblasadb81e82024-11-08 01:11:46 +01001398 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001399 "create_profile", op_id, op_params, content
1400 )
rshri932105f2024-07-05 15:11:55 +00001401 self.logger.info("workflow_name is :{}".format(workflow_name))
1402
garciadeblas713e1962025-01-17 12:49:19 +01001403 workflow_status = await self.check_workflow_and_update_db(
1404 op_id, workflow_name, content
1405 )
rshri932105f2024-07-05 15:11:55 +00001406
1407 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01001408 resource_status, content = await self.check_resource_and_update_db(
garciadeblas96b94f52024-07-08 16:18:21 +02001409 "create_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001410 )
yshah564ec9c2024-11-29 07:33:32 +00001411 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1412 self.logger.info(
1413 f"Resource Create Exit with resource status: {resource_status}"
rshri932105f2024-07-05 15:11:55 +00001414 )
rshri932105f2024-07-05 15:11:55 +00001415 return
1416
rshri948f7de2024-12-02 03:42:35 +00001417 async def delete(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001418 self.logger.info("Resource delete Enter")
rshri948f7de2024-12-02 03:42:35 +00001419
1420 op_id = params["operation_id"]
1421 profile_id = params["profile_id"]
1422
1423 # To initialize the operation states
1424 self.initialize_operation(profile_id, op_id)
1425
1426 content = self.db.get_one("k8sresource", {"_id": profile_id})
1427 op_params = self.get_operation_params(content, op_id)
rshri932105f2024-07-05 15:11:55 +00001428
garciadeblasadb81e82024-11-08 01:11:46 +01001429 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001430 "delete_profile", op_id, op_params, content
1431 )
rshri932105f2024-07-05 15:11:55 +00001432 self.logger.info("workflow_name is :{}".format(workflow_name))
1433
garciadeblas713e1962025-01-17 12:49:19 +01001434 workflow_status = await self.check_workflow_and_update_db(
1435 op_id, workflow_name, content
1436 )
rshri932105f2024-07-05 15:11:55 +00001437
1438 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01001439 resource_status, content = await self.check_resource_and_update_db(
garciadeblas96b94f52024-07-08 16:18:21 +02001440 "delete_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001441 )
rshri932105f2024-07-05 15:11:55 +00001442
yshah564ec9c2024-11-29 07:33:32 +00001443 if resource_status:
1444 content["state"] = "DELETED"
1445 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1446 self.db.del_one(self.db_collection, {"_id": content["_id"]})
1447 self.logger.info(
1448 f"Resource Delete Exit with resource status: {resource_status}"
garciadeblas96b94f52024-07-08 16:18:21 +02001449 )
rshri932105f2024-07-05 15:11:55 +00001450 return
1451
1452
garciadeblas72412282024-11-07 12:41:54 +01001453class K8sInfraControllerLcm(GitOpsLcm):
rshri948f7de2024-12-02 03:42:35 +00001454 db_collection = "k8sinfra_controller"
1455
rshri932105f2024-07-05 15:11:55 +00001456 def __init__(self, msg, lcm_tasks, config):
1457 """
1458 Init, Connect to database, filesystem storage, and messaging
1459 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1460 :return: None
1461 """
garciadeblas72412282024-11-07 12:41:54 +01001462 super().__init__(msg, lcm_tasks, config)
rshri932105f2024-07-05 15:11:55 +00001463
rshri948f7de2024-12-02 03:42:35 +00001464 async def create(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001465 self.logger.info("Infra controller Create Enter")
1466
rshri948f7de2024-12-02 03:42:35 +00001467 op_id = params["operation_id"]
1468 profile_id = params["profile_id"]
1469
1470 # To initialize the operation states
1471 self.initialize_operation(profile_id, op_id)
1472
1473 content = self.db.get_one("k8sinfra_controller", {"_id": profile_id})
1474 content["profile_type"] = "infra-controllers"
1475 op_params = self.get_operation_params(content, op_id)
1476 self.db.set_one("k8sinfra_controller", {"_id": content["_id"]}, content)
1477
garciadeblasadb81e82024-11-08 01:11:46 +01001478 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001479 "create_profile", op_id, op_params, content
1480 )
rshri932105f2024-07-05 15:11:55 +00001481 self.logger.info("workflow_name is :{}".format(workflow_name))
1482
garciadeblas713e1962025-01-17 12:49:19 +01001483 workflow_status = await self.check_workflow_and_update_db(
1484 op_id, workflow_name, content
1485 )
rshri932105f2024-07-05 15:11:55 +00001486
1487 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01001488 resource_status, content = await self.check_resource_and_update_db(
garciadeblas96b94f52024-07-08 16:18:21 +02001489 "create_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001490 )
yshah564ec9c2024-11-29 07:33:32 +00001491 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1492 self.logger.info(
1493 f"Infra Controller Create Exit with resource status: {resource_status}"
rshri932105f2024-07-05 15:11:55 +00001494 )
rshri932105f2024-07-05 15:11:55 +00001495 return
1496
rshri948f7de2024-12-02 03:42:35 +00001497 async def delete(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001498 self.logger.info("Infra controller delete Enter")
rshri932105f2024-07-05 15:11:55 +00001499
rshri948f7de2024-12-02 03:42:35 +00001500 op_id = params["operation_id"]
1501 profile_id = params["profile_id"]
1502
1503 # To initialize the operation states
1504 self.initialize_operation(profile_id, op_id)
1505
1506 content = self.db.get_one("k8sinfra_controller", {"_id": profile_id})
1507 op_params = self.get_operation_params(content, op_id)
1508
garciadeblasadb81e82024-11-08 01:11:46 +01001509 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001510 "delete_profile", op_id, op_params, content
1511 )
rshri932105f2024-07-05 15:11:55 +00001512 self.logger.info("workflow_name is :{}".format(workflow_name))
1513
garciadeblas713e1962025-01-17 12:49:19 +01001514 workflow_status = await self.check_workflow_and_update_db(
1515 op_id, workflow_name, content
1516 )
rshri932105f2024-07-05 15:11:55 +00001517
1518 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01001519 resource_status, content = await self.check_resource_and_update_db(
garciadeblas96b94f52024-07-08 16:18:21 +02001520 "delete_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001521 )
rshri932105f2024-07-05 15:11:55 +00001522
yshah564ec9c2024-11-29 07:33:32 +00001523 if resource_status:
1524 content["state"] = "DELETED"
1525 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1526 self.db.del_one(self.db_collection, {"_id": content["_id"]})
1527 self.logger.info(
1528 f"Infra Controller Delete Exit with resource status: {resource_status}"
garciadeblas96b94f52024-07-08 16:18:21 +02001529 )
rshri932105f2024-07-05 15:11:55 +00001530 return
1531
1532
garciadeblas72412282024-11-07 12:41:54 +01001533class K8sInfraConfigLcm(GitOpsLcm):
rshri948f7de2024-12-02 03:42:35 +00001534 db_collection = "k8sinfra_config"
1535
rshri932105f2024-07-05 15:11:55 +00001536 def __init__(self, msg, lcm_tasks, config):
1537 """
1538 Init, Connect to database, filesystem storage, and messaging
1539 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1540 :return: None
1541 """
garciadeblas72412282024-11-07 12:41:54 +01001542 super().__init__(msg, lcm_tasks, config)
rshri932105f2024-07-05 15:11:55 +00001543
rshri948f7de2024-12-02 03:42:35 +00001544 async def create(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001545 self.logger.info("Infra config Create Enter")
1546
rshri948f7de2024-12-02 03:42:35 +00001547 op_id = params["operation_id"]
1548 profile_id = params["profile_id"]
1549
1550 # To initialize the operation states
1551 self.initialize_operation(profile_id, op_id)
1552
1553 content = self.db.get_one("k8sinfra_config", {"_id": profile_id})
1554 content["profile_type"] = "infra-configs"
1555 op_params = self.get_operation_params(content, op_id)
1556 self.db.set_one("k8sinfra_config", {"_id": content["_id"]}, content)
1557
garciadeblasadb81e82024-11-08 01:11:46 +01001558 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001559 "create_profile", op_id, op_params, content
1560 )
rshri932105f2024-07-05 15:11:55 +00001561 self.logger.info("workflow_name is :{}".format(workflow_name))
1562
garciadeblas713e1962025-01-17 12:49:19 +01001563 workflow_status = await self.check_workflow_and_update_db(
1564 op_id, workflow_name, content
1565 )
rshri932105f2024-07-05 15:11:55 +00001566
1567 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01001568 resource_status, content = await self.check_resource_and_update_db(
garciadeblas96b94f52024-07-08 16:18:21 +02001569 "create_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001570 )
yshah564ec9c2024-11-29 07:33:32 +00001571 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1572 self.logger.info(
1573 f"Infra Config Create Exit with resource status: {resource_status}"
rshri932105f2024-07-05 15:11:55 +00001574 )
rshri932105f2024-07-05 15:11:55 +00001575 return
1576
rshri948f7de2024-12-02 03:42:35 +00001577 async def delete(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001578 self.logger.info("Infra config delete Enter")
1579
rshri948f7de2024-12-02 03:42:35 +00001580 op_id = params["operation_id"]
1581 profile_id = params["profile_id"]
1582
1583 # To initialize the operation states
1584 self.initialize_operation(profile_id, op_id)
1585
1586 content = self.db.get_one("k8sinfra_config", {"_id": profile_id})
1587 op_params = self.get_operation_params(content, op_id)
1588
garciadeblasadb81e82024-11-08 01:11:46 +01001589 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001590 "delete_profile", op_id, op_params, content
1591 )
rshri932105f2024-07-05 15:11:55 +00001592 self.logger.info("workflow_name is :{}".format(workflow_name))
rshri932105f2024-07-05 15:11:55 +00001593
garciadeblas713e1962025-01-17 12:49:19 +01001594 workflow_status = await self.check_workflow_and_update_db(
1595 op_id, workflow_name, content
1596 )
yshah564ec9c2024-11-29 07:33:32 +00001597
rshri932105f2024-07-05 15:11:55 +00001598 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01001599 resource_status, content = await self.check_resource_and_update_db(
yshah564ec9c2024-11-29 07:33:32 +00001600 "delete_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001601 )
yshah564ec9c2024-11-29 07:33:32 +00001602
rshri932105f2024-07-05 15:11:55 +00001603 if resource_status:
yshah564ec9c2024-11-29 07:33:32 +00001604 content["state"] = "DELETED"
1605 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1606 self.db.del_one(self.db_collection, {"_id": content["_id"]})
1607 self.logger.info(
1608 f"Infra Config Delete Exit with resource status: {resource_status}"
garciadeblas96b94f52024-07-08 16:18:21 +02001609 )
rshri932105f2024-07-05 15:11:55 +00001610
rshri932105f2024-07-05 15:11:55 +00001611 return
yshah771dea82024-07-05 15:11:49 +00001612
1613
garciadeblas72412282024-11-07 12:41:54 +01001614class OkaLcm(GitOpsLcm):
yshah771dea82024-07-05 15:11:49 +00001615 db_collection = "okas"
1616
1617 def __init__(self, msg, lcm_tasks, config):
1618 """
1619 Init, Connect to database, filesystem storage, and messaging
1620 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1621 :return: None
1622 """
garciadeblas72412282024-11-07 12:41:54 +01001623 super().__init__(msg, lcm_tasks, config)
yshah771dea82024-07-05 15:11:49 +00001624
yshah564ec9c2024-11-29 07:33:32 +00001625 async def create(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001626 self.logger.info("OKA Create Enter")
yshah564ec9c2024-11-29 07:33:32 +00001627 op_id = params["operation_id"]
1628 oka_id = params["oka_id"]
1629 self.initialize_operation(oka_id, op_id)
1630 db_content = self.db.get_one(self.db_collection, {"_id": oka_id})
1631 op_params = self.get_operation_params(db_content, op_id)
yshah771dea82024-07-05 15:11:49 +00001632
garciadeblasadb81e82024-11-08 01:11:46 +01001633 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001634 "create_oka", op_id, op_params, db_content
1635 )
yshah564ec9c2024-11-29 07:33:32 +00001636
garciadeblas713e1962025-01-17 12:49:19 +01001637 workflow_status = await self.check_workflow_and_update_db(
1638 op_id, workflow_name, db_content
1639 )
yshah771dea82024-07-05 15:11:49 +00001640
1641 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01001642 resource_status, db_content = await self.check_resource_and_update_db(
garciadeblas96b94f52024-07-08 16:18:21 +02001643 "create_oka", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001644 )
garciadeblas96b94f52024-07-08 16:18:21 +02001645 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
yshah564ec9c2024-11-29 07:33:32 +00001646 self.logger.info(f"OKA Create Exit with resource status: {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001647 return
1648
yshah564ec9c2024-11-29 07:33:32 +00001649 async def edit(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001650 self.logger.info("OKA Edit Enter")
yshah564ec9c2024-11-29 07:33:32 +00001651 op_id = params["operation_id"]
1652 oka_id = params["oka_id"]
1653 self.initialize_operation(oka_id, op_id)
1654 db_content = self.db.get_one(self.db_collection, {"_id": oka_id})
1655 op_params = self.get_operation_params(db_content, op_id)
yshah771dea82024-07-05 15:11:49 +00001656
garciadeblasadb81e82024-11-08 01:11:46 +01001657 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001658 "update_oka", op_id, op_params, db_content
garciadeblas96b94f52024-07-08 16:18:21 +02001659 )
garciadeblas713e1962025-01-17 12:49:19 +01001660 workflow_status = await self.check_workflow_and_update_db(
1661 op_id, workflow_name, db_content
1662 )
yshah771dea82024-07-05 15:11:49 +00001663
1664 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01001665 resource_status, db_content = await self.check_resource_and_update_db(
garciadeblas96b94f52024-07-08 16:18:21 +02001666 "update_oka", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001667 )
garciadeblas96b94f52024-07-08 16:18:21 +02001668 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
yshah564ec9c2024-11-29 07:33:32 +00001669 self.logger.info(f"OKA Update Exit with resource status: {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001670 return
1671
yshah564ec9c2024-11-29 07:33:32 +00001672 async def delete(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001673 self.logger.info("OKA delete Enter")
yshah564ec9c2024-11-29 07:33:32 +00001674 op_id = params["operation_id"]
1675 oka_id = params["oka_id"]
1676 self.initialize_operation(oka_id, op_id)
1677 db_content = self.db.get_one(self.db_collection, {"_id": oka_id})
1678 op_params = self.get_operation_params(db_content, op_id)
yshah771dea82024-07-05 15:11:49 +00001679
garciadeblasadb81e82024-11-08 01:11:46 +01001680 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001681 "delete_oka", op_id, op_params, db_content
garciadeblas96b94f52024-07-08 16:18:21 +02001682 )
garciadeblas713e1962025-01-17 12:49:19 +01001683 workflow_status = await self.check_workflow_and_update_db(
1684 op_id, workflow_name, db_content
1685 )
yshah771dea82024-07-05 15:11:49 +00001686
1687 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01001688 resource_status, db_content = await self.check_resource_and_update_db(
garciadeblas96b94f52024-07-08 16:18:21 +02001689 "delete_oka", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001690 )
yshah771dea82024-07-05 15:11:49 +00001691
yshah564ec9c2024-11-29 07:33:32 +00001692 if resource_status:
1693 db_content["state"] == "DELETED"
1694 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
garciadeblas96b94f52024-07-08 16:18:21 +02001695 self.db.del_one(self.db_collection, {"_id": db_content["_id"]})
yshah564ec9c2024-11-29 07:33:32 +00001696 self.logger.info(f"OKA Delete Exit with resource status: {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001697 return
1698
1699
garciadeblas72412282024-11-07 12:41:54 +01001700class KsuLcm(GitOpsLcm):
yshah771dea82024-07-05 15:11:49 +00001701 db_collection = "ksus"
1702
1703 def __init__(self, msg, lcm_tasks, config):
1704 """
1705 Init, Connect to database, filesystem storage, and messaging
1706 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1707 :return: None
1708 """
garciadeblas72412282024-11-07 12:41:54 +01001709 super().__init__(msg, lcm_tasks, config)
yshah771dea82024-07-05 15:11:49 +00001710
yshah564ec9c2024-11-29 07:33:32 +00001711 async def create(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001712 self.logger.info("ksu Create Enter")
yshah564ec9c2024-11-29 07:33:32 +00001713 db_content = []
1714 op_params = []
1715 op_id = params["operation_id"]
1716 for ksu_id in params["ksus_list"]:
1717 self.logger.info("Ksu ID: {}".format(ksu_id))
1718 self.initialize_operation(ksu_id, op_id)
1719 db_ksu = self.db.get_one(self.db_collection, {"_id": ksu_id})
1720 self.logger.info("Db KSU: {}".format(db_ksu))
1721 db_content.append(db_ksu)
1722 ksu_params = {}
1723 ksu_params = self.get_operation_params(db_ksu, op_id)
1724 self.logger.info("Operation Params: {}".format(ksu_params))
1725 # Update ksu_params["profile"] with profile name and age-pubkey
1726 profile_type = ksu_params["profile"]["profile_type"]
1727 profile_id = ksu_params["profile"]["_id"]
1728 profile_collection = self.profile_collection_mapping[profile_type]
1729 db_profile = self.db.get_one(profile_collection, {"_id": profile_id})
1730 ksu_params["profile"]["name"] = db_profile["name"]
1731 ksu_params["profile"]["age_pubkey"] = db_profile.get("age_pubkey", "")
1732 # Update ksu_params["oka"] with sw_catalog_path (when missing)
garciadeblas8c9c5442025-01-17 01:06:05 +01001733 # TODO: remove this in favor of doing it in ODU workflow
yshah564ec9c2024-11-29 07:33:32 +00001734 for oka in ksu_params["oka"]:
1735 if "sw_catalog_path" not in oka:
1736 oka_id = oka["_id"]
1737 db_oka = self.db.get_one("okas", {"_id": oka_id})
yshah2f39b8a2024-12-19 11:06:24 +00001738 oka_type = MAP_PROFILE[
1739 db_oka.get("profile_type", "infra_controller_profiles")
1740 ]
garciadeblas8c9c5442025-01-17 01:06:05 +01001741 oka[
1742 "sw_catalog_path"
1743 ] = f"{oka_type}/{db_oka['git_name']}/templates"
yshah564ec9c2024-11-29 07:33:32 +00001744 op_params.append(ksu_params)
yshah771dea82024-07-05 15:11:49 +00001745
garciadeblasadb81e82024-11-08 01:11:46 +01001746 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001747 "create_ksus", op_id, op_params, db_content
garciadeblas96b94f52024-07-08 16:18:21 +02001748 )
yshah564ec9c2024-11-29 07:33:32 +00001749 for db_ksu, ksu_params in zip(db_content, op_params):
garciadeblas713e1962025-01-17 12:49:19 +01001750 workflow_status = await self.check_workflow_and_update_db(
1751 op_id, workflow_name, db_ksu
1752 )
yshah771dea82024-07-05 15:11:49 +00001753
garciadeblas96b94f52024-07-08 16:18:21 +02001754 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01001755 resource_status, db_ksu = await self.check_resource_and_update_db(
yshah564ec9c2024-11-29 07:33:32 +00001756 "create_ksus", op_id, ksu_params, db_ksu
1757 )
yshah771dea82024-07-05 15:11:49 +00001758
garciadeblas96b94f52024-07-08 16:18:21 +02001759 self.db.set_one(self.db_collection, {"_id": db_ksu["_id"]}, db_ksu)
1760
garciadeblasd8429852024-10-17 15:30:30 +02001761 # Clean items used in the workflow, no matter if the workflow succeeded
1762 clean_status, clean_msg = await self.odu.clean_items_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001763 "create_ksus", op_id, op_params, db_content
garciadeblasd8429852024-10-17 15:30:30 +02001764 )
1765 self.logger.info(
1766 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
1767 )
yshah564ec9c2024-11-29 07:33:32 +00001768 self.logger.info(f"KSU Create EXIT with Resource Status {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001769 return
1770
yshah564ec9c2024-11-29 07:33:32 +00001771 async def edit(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001772 self.logger.info("ksu edit Enter")
yshah564ec9c2024-11-29 07:33:32 +00001773 db_content = []
1774 op_params = []
1775 op_id = params["operation_id"]
1776 for ksu_id in params["ksus_list"]:
1777 self.initialize_operation(ksu_id, op_id)
1778 db_ksu = self.db.get_one("ksus", {"_id": ksu_id})
1779 db_content.append(db_ksu)
1780 ksu_params = {}
1781 ksu_params = self.get_operation_params(db_ksu, op_id)
1782 # Update ksu_params["profile"] with profile name and age-pubkey
1783 profile_type = ksu_params["profile"]["profile_type"]
1784 profile_id = ksu_params["profile"]["_id"]
1785 profile_collection = self.profile_collection_mapping[profile_type]
1786 db_profile = self.db.get_one(profile_collection, {"_id": profile_id})
1787 ksu_params["profile"]["name"] = db_profile["name"]
1788 ksu_params["profile"]["age_pubkey"] = db_profile.get("age_pubkey", "")
1789 # Update ksu_params["oka"] with sw_catalog_path (when missing)
garciadeblas8c9c5442025-01-17 01:06:05 +01001790 # TODO: remove this in favor of doing it in ODU workflow
yshah564ec9c2024-11-29 07:33:32 +00001791 for oka in ksu_params["oka"]:
1792 if "sw_catalog_path" not in oka:
1793 oka_id = oka["_id"]
1794 db_oka = self.db.get_one("okas", {"_id": oka_id})
yshah2f39b8a2024-12-19 11:06:24 +00001795 oka_type = MAP_PROFILE[
1796 db_oka.get("profile_type", "infra_controller_profiles")
1797 ]
garciadeblas8c9c5442025-01-17 01:06:05 +01001798 oka[
1799 "sw_catalog_path"
1800 ] = f"{oka_type}/{db_oka['git_name']}/templates"
yshah564ec9c2024-11-29 07:33:32 +00001801 op_params.append(ksu_params)
yshah771dea82024-07-05 15:11:49 +00001802
garciadeblasadb81e82024-11-08 01:11:46 +01001803 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001804 "update_ksus", op_id, op_params, db_content
garciadeblas96b94f52024-07-08 16:18:21 +02001805 )
yshah771dea82024-07-05 15:11:49 +00001806
yshah564ec9c2024-11-29 07:33:32 +00001807 for db_ksu, ksu_params in zip(db_content, op_params):
garciadeblas713e1962025-01-17 12:49:19 +01001808 workflow_status = await self.check_workflow_and_update_db(
1809 op_id, workflow_name, db_ksu
1810 )
yshah564ec9c2024-11-29 07:33:32 +00001811
garciadeblas96b94f52024-07-08 16:18:21 +02001812 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01001813 resource_status, db_ksu = await self.check_resource_and_update_db(
yshah564ec9c2024-11-29 07:33:32 +00001814 "update_ksus", op_id, ksu_params, db_ksu
garciadeblas96b94f52024-07-08 16:18:21 +02001815 )
garciadeblas96b94f52024-07-08 16:18:21 +02001816 db_ksu["name"] = ksu_params["name"]
1817 db_ksu["description"] = ksu_params["description"]
1818 db_ksu["profile"]["profile_type"] = ksu_params["profile"][
1819 "profile_type"
1820 ]
1821 db_ksu["profile"]["_id"] = ksu_params["profile"]["_id"]
1822 db_ksu["oka"] = ksu_params["oka"]
1823 self.db.set_one(self.db_collection, {"_id": db_ksu["_id"]}, db_ksu)
1824
yshah564ec9c2024-11-29 07:33:32 +00001825 # Clean items used in the workflow, no matter if the workflow succeeded
1826 clean_status, clean_msg = await self.odu.clean_items_workflow(
1827 "create_ksus", op_id, op_params, db_content
garciadeblas96b94f52024-07-08 16:18:21 +02001828 )
1829 self.logger.info(
yshah564ec9c2024-11-29 07:33:32 +00001830 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
garciadeblas96b94f52024-07-08 16:18:21 +02001831 )
yshah564ec9c2024-11-29 07:33:32 +00001832 self.logger.info(f"KSU Update EXIT with Resource Status {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001833 return
1834
yshah564ec9c2024-11-29 07:33:32 +00001835 async def delete(self, params, order_id):
1836 self.logger.info("ksu delete Enter")
1837 db_content = []
1838 op_params = []
1839 op_id = params["operation_id"]
1840 for ksu_id in params["ksus_list"]:
1841 self.initialize_operation(ksu_id, op_id)
1842 db_ksu = self.db.get_one("ksus", {"_id": ksu_id})
1843 db_content.append(db_ksu)
1844 ksu_params = {}
1845 ksu_params["profile"] = {}
1846 ksu_params["profile"]["profile_type"] = db_ksu["profile"]["profile_type"]
1847 ksu_params["profile"]["_id"] = db_ksu["profile"]["_id"]
1848 # Update ksu_params["profile"] with profile name and age-pubkey
1849 profile_type = ksu_params["profile"]["profile_type"]
1850 profile_id = ksu_params["profile"]["_id"]
1851 profile_collection = self.profile_collection_mapping[profile_type]
1852 db_profile = self.db.get_one(profile_collection, {"_id": profile_id})
1853 ksu_params["profile"]["name"] = db_profile["name"]
1854 ksu_params["profile"]["age_pubkey"] = db_profile.get("age_pubkey", "")
1855 op_params.append(ksu_params)
yshah771dea82024-07-05 15:11:49 +00001856
garciadeblasadb81e82024-11-08 01:11:46 +01001857 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001858 "delete_ksus", op_id, op_params, db_content
1859 )
1860
1861 for db_ksu, ksu_params in zip(db_content, op_params):
garciadeblas713e1962025-01-17 12:49:19 +01001862 workflow_status = await self.check_workflow_and_update_db(
1863 op_id, workflow_name, db_ksu
1864 )
yshah564ec9c2024-11-29 07:33:32 +00001865
1866 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01001867 resource_status, db_ksu = await self.check_resource_and_update_db(
yshah564ec9c2024-11-29 07:33:32 +00001868 "delete_ksus", op_id, ksu_params, db_ksu
1869 )
1870
1871 if resource_status:
1872 db_ksu["state"] == "DELETED"
1873 self.db.set_one(self.db_collection, {"_id": db_ksu["_id"]}, db_ksu)
1874 self.db.del_one(self.db_collection, {"_id": db_ksu["_id"]})
1875
1876 self.logger.info(f"KSU Delete Exit with resource status: {resource_status}")
1877 return
1878
1879 async def clone(self, params, order_id):
1880 self.logger.info("ksu clone Enter")
1881 op_id = params["operation_id"]
1882 ksus_id = params["ksus_list"][0]
1883 self.initialize_operation(ksus_id, op_id)
1884 db_content = self.db.get_one(self.db_collection, {"_id": ksus_id})
1885 op_params = self.get_operation_params(db_content, op_id)
1886 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001887 "clone_ksus", op_id, op_params, db_content
1888 )
yshah564ec9c2024-11-29 07:33:32 +00001889
garciadeblas713e1962025-01-17 12:49:19 +01001890 workflow_status = await self.check_workflow_and_update_db(
1891 op_id, workflow_name, db_content
1892 )
yshah771dea82024-07-05 15:11:49 +00001893
1894 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01001895 resource_status, db_content = await self.check_resource_and_update_db(
garciadeblas96b94f52024-07-08 16:18:21 +02001896 "clone_ksus", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001897 )
garciadeblas96b94f52024-07-08 16:18:21 +02001898 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
yshah564ec9c2024-11-29 07:33:32 +00001899
1900 self.logger.info(f"KSU Clone Exit with resource status: {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001901 return
1902
yshah564ec9c2024-11-29 07:33:32 +00001903 async def move(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001904 self.logger.info("ksu move Enter")
yshah564ec9c2024-11-29 07:33:32 +00001905 op_id = params["operation_id"]
1906 ksus_id = params["ksus_list"][0]
1907 self.initialize_operation(ksus_id, op_id)
1908 db_content = self.db.get_one(self.db_collection, {"_id": ksus_id})
1909 op_params = self.get_operation_params(db_content, op_id)
garciadeblasadb81e82024-11-08 01:11:46 +01001910 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001911 "move_ksus", op_id, op_params, db_content
1912 )
yshah564ec9c2024-11-29 07:33:32 +00001913
garciadeblas713e1962025-01-17 12:49:19 +01001914 workflow_status = await self.check_workflow_and_update_db(
1915 op_id, workflow_name, db_content
1916 )
yshah771dea82024-07-05 15:11:49 +00001917
1918 if workflow_status:
garciadeblas713e1962025-01-17 12:49:19 +01001919 resource_status, db_content = await self.check_resource_and_update_db(
garciadeblas96b94f52024-07-08 16:18:21 +02001920 "move_ksus", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001921 )
garciadeblas96b94f52024-07-08 16:18:21 +02001922 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
yshah564ec9c2024-11-29 07:33:32 +00001923
1924 self.logger.info(f"KSU Move Exit with resource status: {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001925 return