blob: 3d974fcebaae7b4a318db986b6e546ebc7a92074 [file] [log] [blame]
rshri932105f2024-07-05 15:11:55 +00001# -*- coding: utf-8 -*-
2
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
12# implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
15
16__author__ = (
17 "Shrinithi R <shrinithi.r@tataelxsi.co.in>",
18 "Shahithya Y <shahithya.y@tataelxsi.co.in>",
19)
20
rshric3564942024-11-12 18:12:38 +000021import copy
rshri932105f2024-07-05 15:11:55 +000022import logging
yshahd940c652024-10-17 06:11:12 +000023from time import time
garciadeblas72412282024-11-07 12:41:54 +010024import traceback
rshri932105f2024-07-05 15:11:55 +000025from osm_lcm.lcm_utils import LcmBase
26from copy import deepcopy
27from osm_lcm import odu_workflows
28from osm_lcm import vim_sdn
rshri948f7de2024-12-02 03:42:35 +000029from osm_lcm.data_utils.list_utils import find_in_list
rshri932105f2024-07-05 15:11:55 +000030
yshah2f39b8a2024-12-19 11:06:24 +000031MAP_PROFILE = {
32 "infra_controller_profiles": "infra-controllers",
33 "infra_config_profiles": "infra-configs",
34 "resource_profiles": "managed_resources",
35 "app_profiles": "apps",
36}
37
rshri932105f2024-07-05 15:11:55 +000038
garciadeblas72412282024-11-07 12:41:54 +010039class GitOpsLcm(LcmBase):
garciadeblasea865ff2024-11-20 12:42:49 +010040 db_collection = "gitops"
yshah564ec9c2024-11-29 07:33:32 +000041 workflow_status = None
42 resource_status = None
43
44 profile_collection_mapping = {
45 "infra_controller_profiles": "k8sinfra_controller",
46 "infra_config_profiles": "k8sinfra_config",
47 "resource_profiles": "k8sresource",
48 "app_profiles": "k8sapp",
49 }
garciadeblasea865ff2024-11-20 12:42:49 +010050
garciadeblas72412282024-11-07 12:41:54 +010051 def __init__(self, msg, lcm_tasks, config):
52 self.logger = logging.getLogger("lcm.gitops")
53 self.lcm_tasks = lcm_tasks
54 self.odu = odu_workflows.OduWorkflow(msg, self.lcm_tasks, config)
55 self._checkloop_kustomization_timeout = 900
56 self._checkloop_resource_timeout = 900
57 self._workflows = {}
58 super().__init__(msg, self.logger)
59
60 async def check_dummy_operation(self, op_id, op_params, content):
61 self.logger.info(f"Operation {op_id}. Params: {op_params}. Content: {content}")
62 return True, "OK"
63
garciadeblasea865ff2024-11-20 12:42:49 +010064 def initialize_operation(self, item_id, op_id):
65 db_item = self.db.get_one(self.db_collection, {"_id": item_id})
66 operation = next(
67 (op for op in db_item.get("operationHistory", []) if op["op_id"] == op_id),
68 None,
69 )
70 operation["workflowState"] = "PROCESSING"
71 operation["resourceState"] = "NOT_READY"
72 operation["operationState"] = "IN_PROGRESS"
73 operation["gitOperationInfo"] = None
74 db_item["current_operation"] = operation["op_id"]
75 self.db.set_one(self.db_collection, {"_id": item_id}, db_item)
76
yshah564ec9c2024-11-29 07:33:32 +000077 def get_operation_params(self, item, operation_id):
78 operation_history = item.get("operationHistory", [])
79 operation = find_in_list(
80 operation_history, lambda op: op["op_id"] == operation_id
81 )
82 return operation.get("operationParams", {})
83
84 def get_operation_type(self, item, operation_id):
85 operation_history = item.get("operationHistory", [])
86 operation = find_in_list(
87 operation_history, lambda op: op["op_id"] == operation_id
88 )
89 return operation.get("operationType", {})
90
garciadeblas7eae6f42024-11-08 10:41:38 +010091 def update_operation_history(
garciadeblasf9092892024-12-12 11:07:08 +010092 self, content, op_id, workflow_status=None, resource_status=None, op_end=True
garciadeblas7eae6f42024-11-08 10:41:38 +010093 ):
94 self.logger.info(
95 f"Update Operation History in DB. Workflow status: {workflow_status}. Resource status: {resource_status}"
96 )
97 self.logger.debug(f"Content: {content}")
98
garciadeblas7eae6f42024-11-08 10:41:38 +010099 op_num = 0
100 for operation in content["operationHistory"]:
101 self.logger.debug("Operations: {}".format(operation))
102 if operation["op_id"] == op_id:
103 self.logger.debug("Found operation number: {}".format(op_num))
garciadeblas8bde3f42024-12-20 10:37:12 +0100104 if workflow_status is not None:
105 if workflow_status:
106 operation["workflowState"] = "COMPLETED"
107 operation["result"] = True
108 else:
109 operation["workflowState"] = "ERROR"
110 operation["operationState"] = "FAILED"
111 operation["result"] = False
garciadeblas7eae6f42024-11-08 10:41:38 +0100112
garciadeblas8bde3f42024-12-20 10:37:12 +0100113 if resource_status is not None:
114 if resource_status:
115 operation["resourceState"] = "READY"
116 operation["operationState"] = "COMPLETED"
117 operation["result"] = True
118 else:
119 operation["resourceState"] = "NOT_READY"
120 operation["operationState"] = "FAILED"
121 operation["result"] = False
garciadeblas7eae6f42024-11-08 10:41:38 +0100122
garciadeblasf9092892024-12-12 11:07:08 +0100123 if op_end:
124 now = time()
125 operation["endDate"] = now
garciadeblas7eae6f42024-11-08 10:41:38 +0100126 break
127 op_num += 1
128 self.logger.debug("content: {}".format(content))
129
130 return content
131
yshah564ec9c2024-11-29 07:33:32 +0000132 async def check_workflow(self, op_id, workflow_name, db_content):
133 workflow_status, workflow_msg = await self.odu.check_workflow_status(
134 workflow_name
135 )
136 self.logger.info(
137 "Workflow Status: {} Workflow Message: {}".format(
138 workflow_status, workflow_msg
139 )
140 )
141 operation_type = self.get_operation_type(db_content, op_id)
142 if operation_type == "create" and workflow_status:
143 db_content["state"] = "CREATED"
144 elif operation_type == "create" and not workflow_status:
145 db_content["state"] = "FAILED_CREATION"
146 elif operation_type == "delete" and workflow_status:
147 db_content["state"] = "DELETED"
148 elif operation_type == "delete" and not workflow_status:
149 db_content["state"] = "FAILED_DELETION"
150
151 if workflow_status:
152 db_content["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
153 else:
154 db_content["resourceState"] = "ERROR"
155
156 db_content = self.update_operation_history(
157 db_content, op_id, workflow_status, None
158 )
159 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
160 return workflow_status
161
162 async def check_resource(self, resource_name, op_id, op_params, db_content):
163 workflow_status = True
164
165 resource_status, resource_msg = await self.check_resource_status(
166 resource_name, op_id, op_params, db_content
167 )
168 self.logger.info(
169 "Resource Status: {} Resource Message: {}".format(
170 resource_status, resource_msg
171 )
172 )
173
174 if resource_status:
175 db_content["resourceState"] = "READY"
176 else:
177 db_content["resourceState"] = "ERROR"
178
179 db_content = self.update_operation_history(
180 db_content, op_id, workflow_status, resource_status
181 )
182 db_content["operatingState"] = "IDLE"
183 db_content["current_operation"] = None
184 return resource_status, db_content
185
yshahcb9075f2024-11-22 12:08:57 +0000186 async def common_check_list(self, op_id, checkings_list, db_collection, db_item):
garciadeblas72412282024-11-07 12:41:54 +0100187 try:
188 for checking in checkings_list:
189 if checking["enable"]:
190 status, message = await self.odu.readiness_loop(
191 item=checking["item"],
192 name=checking["name"],
193 namespace=checking["namespace"],
194 flag=checking["flag"],
195 timeout=checking["timeout"],
196 )
197 if not status:
198 return status, message
garciadeblas7eae6f42024-11-08 10:41:38 +0100199 else:
200 db_item["resourceState"] = checking["resourceState"]
201 db_item = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000202 db_item, op_id, "COMPLETED", checking["resourceState"]
garciadeblas7eae6f42024-11-08 10:41:38 +0100203 )
204 self.db.set_one(db_collection, {"_id": db_item["_id"]}, db_item)
garciadeblas72412282024-11-07 12:41:54 +0100205 except Exception as e:
206 self.logger.debug(traceback.format_exc())
207 self.logger.debug(f"Exception: {e}", exc_info=True)
208 return False, f"Unexpected exception: {e}"
209 return True, "OK"
210
211 async def check_resource_status(self, key, op_id, op_params, content):
212 self.logger.info(
213 f"Check resource status. Key: {key}. Operation: {op_id}. Params: {op_params}. Content: {content}"
214 )
215 check_resource_function = self._workflows.get(key, {}).get(
216 "check_resource_function"
217 )
218 self.logger.info("check_resource function : {}".format(check_resource_function))
219 if check_resource_function:
220 return await check_resource_function(op_id, op_params, content)
221 else:
222 return await self.check_dummy_operation(op_id, op_params, content)
223
rshric3564942024-11-12 18:12:38 +0000224 def decrypting_key(self, content):
225 # This deep copy is for to be passed to ODU workflows.
226 cluster_copy = copy.deepcopy(content)
227
228 # decrypting the key
229 self.db.encrypt_decrypt_fields(
230 cluster_copy,
231 "decrypt",
232 ["age_pubkey", "age_privkey"],
233 schema_version="1.11",
234 salt=cluster_copy["_id"],
235 )
236 db_cluster_copy = {
237 "cluster": cluster_copy,
238 }
239 return db_cluster_copy
240
garciadeblas72412282024-11-07 12:41:54 +0100241
242class ClusterLcm(GitOpsLcm):
garciadeblas96b94f52024-07-08 16:18:21 +0200243 db_collection = "clusters"
rshri932105f2024-07-05 15:11:55 +0000244
245 def __init__(self, msg, lcm_tasks, config):
246 """
247 Init, Connect to database, filesystem storage, and messaging
248 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
249 :return: None
250 """
garciadeblas72412282024-11-07 12:41:54 +0100251 super().__init__(msg, lcm_tasks, config)
252 self._workflows = {
253 "create_cluster": {
254 "check_resource_function": self.check_create_cluster,
255 },
garciadeblasb0a42c22024-11-13 16:00:10 +0100256 "register_cluster": {
257 "check_resource_function": self.check_register_cluster,
258 },
259 "update_cluster": {
260 "check_resource_function": self.check_update_cluster,
261 },
garciadeblas72412282024-11-07 12:41:54 +0100262 }
rshri932105f2024-07-05 15:11:55 +0000263 self.regist = vim_sdn.K8sClusterLcm(msg, self.lcm_tasks, config)
264
rshri948f7de2024-12-02 03:42:35 +0000265 async def create(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000266 self.logger.info("cluster Create Enter")
267
rshri948f7de2024-12-02 03:42:35 +0000268 # To get the cluster details
269 cluster_id = params["cluster_id"]
270 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
271
272 # To get the operation params details
273 op_id = params["operation_id"]
274 op_params = self.get_operation_params(db_cluster, op_id)
275
276 # To initialize the operation states
277 self.initialize_operation(cluster_id, op_id)
278
279 # To copy the cluster content and decrypting the key to use in workflows
rshric3564942024-11-12 18:12:38 +0000280 db_cluster_copy = self.decrypting_key(db_cluster)
281
rshri948f7de2024-12-02 03:42:35 +0000282 # To get the vim account details
rshric3564942024-11-12 18:12:38 +0000283 db_vim = self.db.get_one("vim_accounts", {"name": db_cluster["vim_account"]})
284 db_cluster_copy["vim_account"] = db_vim
285
garciadeblasadb81e82024-11-08 01:11:46 +0100286 _, workflow_name = await self.odu.launch_workflow(
rshric3564942024-11-12 18:12:38 +0000287 "create_cluster", op_id, op_params, db_cluster_copy
garciadeblas96b94f52024-07-08 16:18:21 +0200288 )
rshri932105f2024-07-05 15:11:55 +0000289 self.logger.info("workflow_name is :{}".format(workflow_name))
290
garciadeblas96b94f52024-07-08 16:18:21 +0200291 workflow_status, workflow_msg = await self.odu.check_workflow_status(
292 workflow_name
293 )
rshri932105f2024-07-05 15:11:55 +0000294 self.logger.info(
295 "workflow_status is :{} and workflow_msg is :{}".format(
296 workflow_status, workflow_msg
297 )
298 )
299 if workflow_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200300 db_cluster["state"] = "CREATED"
301 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
rshri932105f2024-07-05 15:11:55 +0000302 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200303 db_cluster["state"] = "FAILED_CREATION"
304 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000305 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +0000306 db_cluster = self.update_operation_history(
307 db_cluster, op_id, workflow_status, None
308 )
garciadeblas96b94f52024-07-08 16:18:21 +0200309 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +0000310
garciadeblas28bff0f2024-09-16 12:53:07 +0200311 # Clean items used in the workflow, no matter if the workflow succeeded
312 clean_status, clean_msg = await self.odu.clean_items_workflow(
rshric3564942024-11-12 18:12:38 +0000313 "create_cluster", op_id, op_params, db_cluster_copy
garciadeblas28bff0f2024-09-16 12:53:07 +0200314 )
315 self.logger.info(
316 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
317 )
318
rshri932105f2024-07-05 15:11:55 +0000319 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +0100320 resource_status, resource_msg = await self.check_resource_status(
rshric3564942024-11-12 18:12:38 +0000321 "create_cluster", op_id, op_params, db_cluster_copy
rshri932105f2024-07-05 15:11:55 +0000322 )
323 self.logger.info(
324 "resource_status is :{} and resource_msg is :{}".format(
325 resource_status, resource_msg
326 )
327 )
328 if resource_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200329 db_cluster["resourceState"] = "READY"
rshri932105f2024-07-05 15:11:55 +0000330 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200331 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000332
garciadeblas96b94f52024-07-08 16:18:21 +0200333 db_cluster["operatingState"] = "IDLE"
334 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000335 db_cluster, op_id, workflow_status, resource_status
rshri932105f2024-07-05 15:11:55 +0000336 )
shahithya70a3fc92024-11-12 11:01:05 +0000337 db_cluster["current_operation"] = None
garciadeblas96b94f52024-07-08 16:18:21 +0200338 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
339 self.update_profile_state(db_cluster, workflow_status, resource_status)
rshri948f7de2024-12-02 03:42:35 +0000340
341 db_register = self.db.get_one("k8sclusters", {"name": db_cluster["name"]})
342
343 if db_cluster["resourceState"] == "READY" and db_cluster["state"] == "CREATED":
344 result, cluster_creds = await self.odu.get_cluster_credentials(db_cluster)
345 # To call the lcm.py for registering the cluster in k8scluster lcm.
346 db_register["credentials"] = cluster_creds
347 self.db.set_one("k8sclusters", {"_id": db_register["_id"]}, db_register)
348 register = await self.regist.create(db_register, order_id)
349 self.logger.debug(f"Register is : {register}")
350 else:
351 db_register["_admin"]["operationalState"] = "ERROR"
352 result, cluster_creds = await self.odu.get_cluster_credentials(db_cluster)
353 # To call the lcm.py for registering the cluster in k8scluster lcm.
354 db_register["credentials"] = cluster_creds
355 self.db.set_one("k8sclusters", {"_id": db_register["_id"]}, db_register)
356
rshri932105f2024-07-05 15:11:55 +0000357 return
358
garciadeblas72412282024-11-07 12:41:54 +0100359 async def check_create_cluster(self, op_id, op_params, content):
garciadeblas7eae6f42024-11-08 10:41:38 +0100360 self.logger.info(
361 f"check_create_cluster Operation {op_id}. Params: {op_params}."
362 )
363 # self.logger.debug(f"Content: {content}")
garciadeblas72412282024-11-07 12:41:54 +0100364 db_cluster = content["cluster"]
365 cluster_name = db_cluster["git_name"].lower()
366 cluster_kustomization_name = cluster_name
367 db_vim_account = content["vim_account"]
368 cloud_type = db_vim_account["vim_type"]
369 nodepool_name = ""
370 if cloud_type == "aws":
371 nodepool_name = f"{cluster_name}-nodegroup"
372 cluster_name = f"{cluster_name}-cluster"
373 elif cloud_type == "gcp":
374 nodepool_name = f"nodepool-{cluster_name}"
375 bootstrap = op_params.get("bootstrap", True)
376 if cloud_type in ("azure", "gcp", "aws"):
377 checkings_list = [
378 {
379 "item": "kustomization",
380 "name": cluster_kustomization_name,
381 "namespace": "managed-resources",
382 "flag": "Ready",
yshahcb9075f2024-11-22 12:08:57 +0000383 "timeout": 1500,
garciadeblas72412282024-11-07 12:41:54 +0100384 "enable": True,
garciadeblas7eae6f42024-11-08 10:41:38 +0100385 "resourceState": "IN_PROGRESS.KUSTOMIZATION_READY",
garciadeblas72412282024-11-07 12:41:54 +0100386 },
387 {
388 "item": f"cluster_{cloud_type}",
389 "name": cluster_name,
390 "namespace": "",
391 "flag": "Synced",
392 "timeout": self._checkloop_resource_timeout,
393 "enable": True,
garciadeblas7eae6f42024-11-08 10:41:38 +0100394 "resourceState": "IN_PROGRESS.RESOURCE_SYNCED.CLUSTER",
garciadeblas72412282024-11-07 12:41:54 +0100395 },
396 {
397 "item": f"cluster_{cloud_type}",
398 "name": cluster_name,
399 "namespace": "",
400 "flag": "Ready",
401 "timeout": self._checkloop_resource_timeout,
402 "enable": True,
garciadeblas7eae6f42024-11-08 10:41:38 +0100403 "resourceState": "IN_PROGRESS.RESOURCE_READY.CLUSTER",
garciadeblas72412282024-11-07 12:41:54 +0100404 },
405 {
406 "item": "kustomization",
407 "name": f"{cluster_kustomization_name}-bstrp-fluxctrl",
408 "namespace": "managed-resources",
409 "flag": "Ready",
yshahcb9075f2024-11-22 12:08:57 +0000410 "timeout": self._checkloop_resource_timeout,
garciadeblas72412282024-11-07 12:41:54 +0100411 "enable": bootstrap,
garciadeblas7eae6f42024-11-08 10:41:38 +0100412 "resourceState": "IN_PROGRESS.BOOTSTRAP_OK",
garciadeblas72412282024-11-07 12:41:54 +0100413 },
414 ]
415 else:
416 return False, "Not suitable VIM account to check cluster status"
417 if nodepool_name:
418 nodepool_check = {
419 "item": f"nodepool_{cloud_type}",
420 "name": nodepool_name,
421 "namespace": "",
422 "flag": "Ready",
423 "timeout": self._checkloop_resource_timeout,
424 "enable": True,
garciadeblas7eae6f42024-11-08 10:41:38 +0100425 "resourceState": "IN_PROGRESS.RESOURCE_READY.NODEPOOL",
garciadeblas72412282024-11-07 12:41:54 +0100426 }
427 checkings_list.insert(3, nodepool_check)
yshahcb9075f2024-11-22 12:08:57 +0000428 return await self.common_check_list(
429 op_id, checkings_list, "clusters", db_cluster
430 )
garciadeblas72412282024-11-07 12:41:54 +0100431
garciadeblasb0a42c22024-11-13 16:00:10 +0100432 async def check_register_cluster(self, op_id, op_params, content):
433 self.logger.info(
434 f"check_register_cluster Operation {op_id}. Params: {op_params}."
435 )
436 # self.logger.debug(f"Content: {content}")
437 db_cluster = content["cluster"]
438 cluster_name = db_cluster["git_name"].lower()
439 cluster_kustomization_name = cluster_name
440 bootstrap = op_params.get("bootstrap", True)
441 checkings_list = [
442 {
443 "item": "kustomization",
444 "name": f"{cluster_kustomization_name}-bstrp-fluxctrl",
445 "namespace": "managed-resources",
446 "flag": "Ready",
447 "timeout": self._checkloop_kustomization_timeout,
448 "enable": bootstrap,
449 "resourceState": "IN_PROGRESS.BOOTSTRAP_OK",
450 },
451 ]
yshahcb9075f2024-11-22 12:08:57 +0000452 return await self.common_check_list(
453 op_id, checkings_list, "clusters", db_cluster
454 )
garciadeblasb0a42c22024-11-13 16:00:10 +0100455
456 async def check_update_cluster(self, op_id, op_params, content):
457 self.logger.info(
458 f"check_create_cluster Operation {op_id}. Params: {op_params}."
459 )
460 # self.logger.debug(f"Content: {content}")
461 db_cluster = content["cluster"]
462 cluster_name = db_cluster["git_name"].lower()
463 cluster_kustomization_name = cluster_name
464 db_vim_account = content["vim_account"]
465 cloud_type = db_vim_account["vim_type"]
466 nodepool_name = ""
467 if cloud_type == "aws":
468 nodepool_name = f"{cluster_name}-nodegroup"
469 cluster_name = f"{cluster_name}-cluster"
470 elif cloud_type == "gcp":
471 nodepool_name = f"nodepool-{cluster_name}"
472 if cloud_type in ("azure", "gcp", "aws"):
473 checkings_list = [
474 {
475 "item": "kustomization",
476 "name": cluster_kustomization_name,
477 "namespace": "managed-resources",
478 "flag": "Ready",
479 "timeout": self._checkloop_kustomization_timeout,
480 "enable": True,
481 "resourceState": "IN_PROGRESS.KUSTOMIZATION_READY",
482 },
483 {
484 "item": f"cluster_{cloud_type}",
485 "name": cluster_name,
486 "namespace": "",
487 "flag": "Synced",
488 "timeout": self._checkloop_resource_timeout,
489 "enable": True,
490 "resourceState": "IN_PROGRESS.RESOURCE_SYNCED.CLUSTER",
491 },
492 {
493 "item": f"cluster_{cloud_type}",
494 "name": cluster_name,
495 "namespace": "",
496 "flag": "Ready",
497 "timeout": self._checkloop_resource_timeout,
498 "enable": True,
499 "resourceState": "IN_PROGRESS.RESOURCE_READY.CLUSTER",
500 },
501 ]
502 else:
503 return False, "Not suitable VIM account to check cluster status"
504 if nodepool_name:
505 nodepool_check = {
506 "item": f"nodepool_{cloud_type}",
507 "name": nodepool_name,
508 "namespace": "",
509 "flag": "Ready",
510 "timeout": self._checkloop_resource_timeout,
511 "enable": True,
512 "resourceState": "IN_PROGRESS.RESOURCE_READY.NODEPOOL",
513 }
514 checkings_list.append(nodepool_check)
yshahcb9075f2024-11-22 12:08:57 +0000515 return await self.common_check_list(
516 op_id, checkings_list, "clusters", db_cluster
517 )
garciadeblasb0a42c22024-11-13 16:00:10 +0100518
garciadeblas96b94f52024-07-08 16:18:21 +0200519 def update_profile_state(self, db_cluster, workflow_status, resource_status):
rshri932105f2024-07-05 15:11:55 +0000520 profiles = [
521 "infra_controller_profiles",
522 "infra_config_profiles",
523 "app_profiles",
524 "resource_profiles",
525 ]
rshri948f7de2024-12-02 03:42:35 +0000526 """
rshri932105f2024-07-05 15:11:55 +0000527 profiles_collection = {
528 "infra_controller_profiles": "k8sinfra_controller",
529 "infra_config_profiles": "k8sinfra_config",
530 "app_profiles": "k8sapp",
531 "resource_profiles": "k8sresource",
532 }
rshri948f7de2024-12-02 03:42:35 +0000533 """
Your Name86149632024-11-14 16:17:16 +0000534 self.logger.info("the db_cluster is :{}".format(db_cluster))
rshri932105f2024-07-05 15:11:55 +0000535 for profile_type in profiles:
garciadeblas96b94f52024-07-08 16:18:21 +0200536 profile_id = db_cluster[profile_type]
rshri948f7de2024-12-02 03:42:35 +0000537 # db_collection = profiles_collection[profile_type]
538 db_collection = self.profile_collection_mapping[profile_type]
rshri932105f2024-07-05 15:11:55 +0000539 db_profile = self.db.get_one(db_collection, {"_id": profile_id})
yshahcb9075f2024-11-22 12:08:57 +0000540 op_id = db_profile["operationHistory"][-1].get("op_id")
garciadeblas96b94f52024-07-08 16:18:21 +0200541 db_profile["state"] = db_cluster["state"]
542 db_profile["resourceState"] = db_cluster["resourceState"]
543 db_profile["operatingState"] = db_cluster["operatingState"]
rshric3564942024-11-12 18:12:38 +0000544 db_profile["age_pubkey"] = db_cluster["age_pubkey"]
Your Name86149632024-11-14 16:17:16 +0000545 db_profile["age_privkey"] = db_cluster["age_privkey"]
rshri932105f2024-07-05 15:11:55 +0000546 db_profile = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000547 db_profile, op_id, workflow_status, resource_status
rshri932105f2024-07-05 15:11:55 +0000548 )
rshri932105f2024-07-05 15:11:55 +0000549 self.db.set_one(db_collection, {"_id": db_profile["_id"]}, db_profile)
550
rshri948f7de2024-12-02 03:42:35 +0000551 async def delete(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000552 self.logger.info("cluster delete Enter")
rshri948f7de2024-12-02 03:42:35 +0000553
554 # To get the cluster details
555 cluster_id = params["cluster_id"]
556 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
557
558 # To get the operation params details
559 op_id = params["operation_id"]
560 op_params = self.get_operation_params(db_cluster, op_id)
561
562 # To initialize the operation states
563 self.initialize_operation(cluster_id, op_id)
564
565 # To copy the cluster content and decrypting the key to use in workflows
566 db_cluster_copy = self.decrypting_key(db_cluster)
567
garciadeblas6b2112c2024-12-20 10:35:13 +0100568 # TODO: workaround until NBI rejects cluster deletion requests for registered clusters
569 # This if clause will be removed
garciadeblas12470812024-11-18 10:33:12 +0100570 if db_cluster["created"] == "false":
rshri948f7de2024-12-02 03:42:35 +0000571 return await self.deregister(params, order_id)
rshri932105f2024-07-05 15:11:55 +0000572
garciadeblasadb81e82024-11-08 01:11:46 +0100573 _, workflow_name = await self.odu.launch_workflow(
rshri948f7de2024-12-02 03:42:35 +0000574 "delete_cluster", op_id, op_params, db_cluster_copy
garciadeblas96b94f52024-07-08 16:18:21 +0200575 )
rshri932105f2024-07-05 15:11:55 +0000576 self.logger.info("workflow_name is :{}".format(workflow_name))
577
garciadeblas96b94f52024-07-08 16:18:21 +0200578 workflow_status, workflow_msg = await self.odu.check_workflow_status(
579 workflow_name
580 )
rshri932105f2024-07-05 15:11:55 +0000581 self.logger.info(
582 "workflow_status is :{} and workflow_msg is :{}".format(
583 workflow_status, workflow_msg
584 )
585 )
586 if workflow_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200587 db_cluster["state"] = "DELETED"
588 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
rshri932105f2024-07-05 15:11:55 +0000589 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200590 db_cluster["state"] = "FAILED_DELETION"
591 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000592 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +0000593 db_cluster = self.update_operation_history(
594 db_cluster, op_id, workflow_status, None
595 )
garciadeblas96b94f52024-07-08 16:18:21 +0200596 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +0000597
garciadeblas98f9a3d2024-12-10 13:42:47 +0100598 # Clean items used in the workflow or in the cluster, no matter if the workflow succeeded
599 clean_status, clean_msg = await self.odu.clean_items_workflow(
600 "delete_cluster", op_id, op_params, db_cluster_copy
601 )
602 self.logger.info(
603 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
604 )
605
rshri932105f2024-07-05 15:11:55 +0000606 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +0100607 resource_status, resource_msg = await self.check_resource_status(
rshri948f7de2024-12-02 03:42:35 +0000608 "delete_cluster", op_id, op_params, db_cluster_copy
rshri932105f2024-07-05 15:11:55 +0000609 )
610 self.logger.info(
611 "resource_status is :{} and resource_msg is :{}".format(
612 resource_status, resource_msg
613 )
614 )
615 if resource_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200616 db_cluster["resourceState"] = "READY"
rshri932105f2024-07-05 15:11:55 +0000617 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200618 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000619
garciadeblas96b94f52024-07-08 16:18:21 +0200620 db_cluster["operatingState"] = "IDLE"
621 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000622 db_cluster, op_id, workflow_status, resource_status
garciadeblas96b94f52024-07-08 16:18:21 +0200623 )
shahithya70a3fc92024-11-12 11:01:05 +0000624 db_cluster["current_operation"] = None
garciadeblas96b94f52024-07-08 16:18:21 +0200625 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +0000626
garciadeblas96b94f52024-07-08 16:18:21 +0200627 # To delete it from DB
628 if db_cluster["state"] == "DELETED":
629 self.delete_cluster(db_cluster)
rshri948f7de2024-12-02 03:42:35 +0000630
631 # To delete it from k8scluster collection
632 self.db.del_one("k8sclusters", {"name": db_cluster["name"]})
633
rshri932105f2024-07-05 15:11:55 +0000634 return
635
garciadeblas96b94f52024-07-08 16:18:21 +0200636 def delete_cluster(self, db_cluster):
637 # Actually, item_content is equal to db_cluster
638 # item_content = self.db.get_one("clusters", {"_id": db_cluster["_id"]})
639 # self.logger.debug("item_content is : {}".format(item_content))
rshri932105f2024-07-05 15:11:55 +0000640
rshri932105f2024-07-05 15:11:55 +0000641 # detach profiles
642 update_dict = None
643 profiles_to_detach = [
644 "infra_controller_profiles",
645 "infra_config_profiles",
646 "app_profiles",
647 "resource_profiles",
648 ]
rshri948f7de2024-12-02 03:42:35 +0000649 """
rshri932105f2024-07-05 15:11:55 +0000650 profiles_collection = {
651 "infra_controller_profiles": "k8sinfra_controller",
652 "infra_config_profiles": "k8sinfra_config",
653 "app_profiles": "k8sapp",
654 "resource_profiles": "k8sresource",
655 }
rshri948f7de2024-12-02 03:42:35 +0000656 """
rshri932105f2024-07-05 15:11:55 +0000657 for profile_type in profiles_to_detach:
garciadeblas96b94f52024-07-08 16:18:21 +0200658 if db_cluster.get(profile_type):
garciadeblas96b94f52024-07-08 16:18:21 +0200659 profile_ids = db_cluster[profile_type]
rshri932105f2024-07-05 15:11:55 +0000660 profile_ids_copy = deepcopy(profile_ids)
rshri932105f2024-07-05 15:11:55 +0000661 for profile_id in profile_ids_copy:
rshri948f7de2024-12-02 03:42:35 +0000662 # db_collection = profiles_collection[profile_type]
663 db_collection = self.profile_collection_mapping[profile_type]
rshri932105f2024-07-05 15:11:55 +0000664 db_profile = self.db.get_one(db_collection, {"_id": profile_id})
garciadeblasc2552852024-10-22 12:39:32 +0200665 self.logger.debug("the db_profile is :{}".format(db_profile))
666 self.logger.debug(
garciadeblas96b94f52024-07-08 16:18:21 +0200667 "the item_content name is :{}".format(db_cluster["name"])
rshri932105f2024-07-05 15:11:55 +0000668 )
garciadeblasc2552852024-10-22 12:39:32 +0200669 self.logger.debug(
rshri932105f2024-07-05 15:11:55 +0000670 "the db_profile name is :{}".format(db_profile["name"])
671 )
garciadeblas96b94f52024-07-08 16:18:21 +0200672 if db_cluster["name"] == db_profile["name"]:
rshri932105f2024-07-05 15:11:55 +0000673 self.db.del_one(db_collection, {"_id": profile_id})
674 else:
rshri932105f2024-07-05 15:11:55 +0000675 profile_ids.remove(profile_id)
676 update_dict = {profile_type: profile_ids}
rshri932105f2024-07-05 15:11:55 +0000677 self.db.set_one(
garciadeblas96b94f52024-07-08 16:18:21 +0200678 "clusters", {"_id": db_cluster["_id"]}, update_dict
rshri932105f2024-07-05 15:11:55 +0000679 )
garciadeblas96b94f52024-07-08 16:18:21 +0200680 self.db.del_one("clusters", {"_id": db_cluster["_id"]})
rshri932105f2024-07-05 15:11:55 +0000681
rshri948f7de2024-12-02 03:42:35 +0000682 async def attach_profile(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000683 self.logger.info("profile attach Enter")
rshri948f7de2024-12-02 03:42:35 +0000684
685 # To get the cluster details
686 cluster_id = params["cluster_id"]
687 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
688 # content = {
689 # "cluster": db_cluster,
690 # }
691
692 # To get the operation params details
693 op_id = params["operation_id"]
694 op_params = self.get_operation_params(db_cluster, op_id)
695
696 # To initialize the operation states
697 self.initialize_operation(cluster_id, op_id)
698
699 # To copy the cluster content and decrypting the key to use in workflows
700 db_cluster_copy = self.decrypting_key(db_cluster)
701
702 # To get the profile details
703 profile_id = params["profile_id"]
704 profile_type = params["profile_type"]
705 profile_collection = self.profile_collection_mapping[profile_type]
706 db_profile = self.db.get_one(profile_collection, {"_id": profile_id})
707 db_profile["profile_type"] = profile_type
708 # content["profile"] = db_profile
709 db_cluster_copy["profile"] = db_profile
rshri932105f2024-07-05 15:11:55 +0000710
garciadeblasadb81e82024-11-08 01:11:46 +0100711 _, workflow_name = await self.odu.launch_workflow(
rshri948f7de2024-12-02 03:42:35 +0000712 "attach_profile_to_cluster", op_id, op_params, db_cluster_copy
garciadeblas96b94f52024-07-08 16:18:21 +0200713 )
rshri932105f2024-07-05 15:11:55 +0000714 self.logger.info("workflow_name is :{}".format(workflow_name))
715
garciadeblas96b94f52024-07-08 16:18:21 +0200716 workflow_status, workflow_msg = await self.odu.check_workflow_status(
717 workflow_name
718 )
rshri932105f2024-07-05 15:11:55 +0000719 self.logger.info(
720 "workflow_status is :{} and workflow_msg is :{}".format(
721 workflow_status, workflow_msg
722 )
723 )
724 if workflow_status:
725 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
726 else:
727 db_cluster["resourceState"] = "ERROR"
728 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +0000729 db_cluster = self.update_operation_history(
730 db_cluster, op_id, workflow_status, None
731 )
rshri932105f2024-07-05 15:11:55 +0000732 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
733
734 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +0100735 resource_status, resource_msg = await self.check_resource_status(
rshri948f7de2024-12-02 03:42:35 +0000736 "attach_profile_to_cluster", op_id, op_params, db_cluster_copy
rshri932105f2024-07-05 15:11:55 +0000737 )
738 self.logger.info(
739 "resource_status is :{} and resource_msg is :{}".format(
740 resource_status, resource_msg
741 )
742 )
743 if resource_status:
744 db_cluster["resourceState"] = "READY"
745 else:
746 db_cluster["resourceState"] = "ERROR"
747
748 db_cluster["operatingState"] = "IDLE"
749 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000750 db_cluster, op_id, workflow_status, resource_status
rshri932105f2024-07-05 15:11:55 +0000751 )
rshri932105f2024-07-05 15:11:55 +0000752 profile_list = db_cluster[profile_type]
rshri932105f2024-07-05 15:11:55 +0000753 if resource_status:
rshri932105f2024-07-05 15:11:55 +0000754 profile_list.append(profile_id)
rshri932105f2024-07-05 15:11:55 +0000755 db_cluster[profile_type] = profile_list
shahithya70a3fc92024-11-12 11:01:05 +0000756 db_cluster["current_operation"] = None
rshri932105f2024-07-05 15:11:55 +0000757 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
758
759 return
760
rshri948f7de2024-12-02 03:42:35 +0000761 async def detach_profile(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000762 self.logger.info("profile dettach Enter")
rshri948f7de2024-12-02 03:42:35 +0000763
764 # To get the cluster details
765 cluster_id = params["cluster_id"]
766 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
767 # content = {
768 # "cluster": db_cluster,
769 # }
770
771 # To get the operation params details
772 op_id = params["operation_id"]
773 op_params = self.get_operation_params(db_cluster, op_id)
774
775 # To initialize the operation states
776 self.initialize_operation(cluster_id, op_id)
777
778 # To copy the cluster content and decrypting the key to use in workflows
779 db_cluster_copy = self.decrypting_key(db_cluster)
780
781 # To get the profile details
782 profile_id = params["profile_id"]
783 profile_type = params["profile_type"]
784 profile_collection = self.profile_collection_mapping[profile_type]
785 db_profile = self.db.get_one(profile_collection, {"_id": profile_id})
786 db_profile["profile_type"] = profile_type
787 # content["profile"] = db_profile
788 db_cluster_copy["profile"] = db_profile
rshri932105f2024-07-05 15:11:55 +0000789
garciadeblasadb81e82024-11-08 01:11:46 +0100790 _, workflow_name = await self.odu.launch_workflow(
rshri948f7de2024-12-02 03:42:35 +0000791 "detach_profile_from_cluster", op_id, op_params, db_cluster_copy
garciadeblas96b94f52024-07-08 16:18:21 +0200792 )
rshri932105f2024-07-05 15:11:55 +0000793 self.logger.info("workflow_name is :{}".format(workflow_name))
794
garciadeblas96b94f52024-07-08 16:18:21 +0200795 workflow_status, workflow_msg = await self.odu.check_workflow_status(
796 workflow_name
797 )
rshri932105f2024-07-05 15:11:55 +0000798 self.logger.info(
799 "workflow_status is :{} and workflow_msg is :{}".format(
800 workflow_status, workflow_msg
801 )
802 )
803 if workflow_status:
804 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
805 else:
806 db_cluster["resourceState"] = "ERROR"
807 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +0000808 db_cluster = self.update_operation_history(
809 db_cluster, op_id, workflow_status, None
810 )
rshri932105f2024-07-05 15:11:55 +0000811 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
812
813 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +0100814 resource_status, resource_msg = await self.check_resource_status(
rshri948f7de2024-12-02 03:42:35 +0000815 "detach_profile_from_cluster", op_id, op_params, db_cluster_copy
rshri932105f2024-07-05 15:11:55 +0000816 )
817 self.logger.info(
818 "resource_status is :{} and resource_msg is :{}".format(
819 resource_status, resource_msg
820 )
821 )
822 if resource_status:
823 db_cluster["resourceState"] = "READY"
824 else:
825 db_cluster["resourceState"] = "ERROR"
826
827 db_cluster["operatingState"] = "IDLE"
828 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000829 db_cluster, op_id, workflow_status, resource_status
rshri932105f2024-07-05 15:11:55 +0000830 )
rshri932105f2024-07-05 15:11:55 +0000831 profile_list = db_cluster[profile_type]
832 self.logger.info("profile list is : {}".format(profile_list))
833 if resource_status:
rshri932105f2024-07-05 15:11:55 +0000834 profile_list.remove(profile_id)
rshri932105f2024-07-05 15:11:55 +0000835 db_cluster[profile_type] = profile_list
shahithya70a3fc92024-11-12 11:01:05 +0000836 db_cluster["current_operation"] = None
rshri932105f2024-07-05 15:11:55 +0000837 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
838
839 return
840
rshri948f7de2024-12-02 03:42:35 +0000841 async def register(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000842 self.logger.info("cluster register enter")
843
rshri948f7de2024-12-02 03:42:35 +0000844 # To get the cluster details
845 cluster_id = params["cluster_id"]
846 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
847 # content = {
848 # "cluster": db_cluster,
849 # }
850
851 # To get the operation params details
852 op_id = params["operation_id"]
853 op_params = self.get_operation_params(db_cluster, op_id)
854
855 # To initialize the operation states
856 self.initialize_operation(cluster_id, op_id)
857
858 # To copy the cluster content and decrypting the key to use in workflows
rshric3564942024-11-12 18:12:38 +0000859 db_cluster_copy = self.decrypting_key(db_cluster)
860
garciadeblasadb81e82024-11-08 01:11:46 +0100861 _, workflow_name = await self.odu.launch_workflow(
rshric3564942024-11-12 18:12:38 +0000862 "register_cluster", op_id, op_params, db_cluster_copy
garciadeblas96b94f52024-07-08 16:18:21 +0200863 )
rshri932105f2024-07-05 15:11:55 +0000864 self.logger.info("workflow_name is :{}".format(workflow_name))
865
garciadeblas96b94f52024-07-08 16:18:21 +0200866 workflow_status, workflow_msg = await self.odu.check_workflow_status(
867 workflow_name
868 )
rshri932105f2024-07-05 15:11:55 +0000869 self.logger.info(
870 "workflow_status is :{} and workflow_msg is :{}".format(
871 workflow_status, workflow_msg
872 )
873 )
874 if workflow_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200875 db_cluster["state"] = "CREATED"
876 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
rshri932105f2024-07-05 15:11:55 +0000877 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200878 db_cluster["state"] = "FAILED_CREATION"
879 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000880 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +0000881 db_cluster = self.update_operation_history(
882 db_cluster, op_id, workflow_status, None
883 )
garciadeblas96b94f52024-07-08 16:18:21 +0200884 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +0000885
garciadeblasdde3a312024-09-17 13:25:06 +0200886 # Clean items used in the workflow, no matter if the workflow succeeded
887 clean_status, clean_msg = await self.odu.clean_items_workflow(
rshric3564942024-11-12 18:12:38 +0000888 "register_cluster", op_id, op_params, db_cluster_copy
garciadeblasdde3a312024-09-17 13:25:06 +0200889 )
890 self.logger.info(
891 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
892 )
893
rshri932105f2024-07-05 15:11:55 +0000894 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +0100895 resource_status, resource_msg = await self.check_resource_status(
rshric3564942024-11-12 18:12:38 +0000896 "register_cluster", op_id, op_params, db_cluster_copy
rshri932105f2024-07-05 15:11:55 +0000897 )
898 self.logger.info(
899 "resource_status is :{} and resource_msg is :{}".format(
900 resource_status, resource_msg
901 )
902 )
903 if resource_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200904 db_cluster["resourceState"] = "READY"
rshri932105f2024-07-05 15:11:55 +0000905 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200906 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000907
garciadeblas96b94f52024-07-08 16:18:21 +0200908 db_cluster["operatingState"] = "IDLE"
909 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000910 db_cluster, op_id, workflow_status, resource_status
rshri932105f2024-07-05 15:11:55 +0000911 )
shahithya70a3fc92024-11-12 11:01:05 +0000912 db_cluster["current_operation"] = None
garciadeblas96b94f52024-07-08 16:18:21 +0200913 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri948f7de2024-12-02 03:42:35 +0000914
915 db_register = self.db.get_one("k8sclusters", {"name": db_cluster["name"]})
916 db_register["credentials"] = db_cluster["credentials"]
917 self.db.set_one("k8sclusters", {"_id": db_register["_id"]}, db_register)
918
919 if db_cluster["resourceState"] == "READY" and db_cluster["state"] == "CREATED":
920 # To call the lcm.py for registering the cluster in k8scluster lcm.
921 register = await self.regist.create(db_register, order_id)
922 self.logger.debug(f"Register is : {register}")
923 else:
924 db_register["_admin"]["operationalState"] = "ERROR"
925 self.db.set_one("k8sclusters", {"_id": db_register["_id"]}, db_register)
926
rshri932105f2024-07-05 15:11:55 +0000927 return
928
rshri948f7de2024-12-02 03:42:35 +0000929 async def deregister(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000930 self.logger.info("cluster deregister enter")
931
rshri948f7de2024-12-02 03:42:35 +0000932 # To get the cluster details
933 cluster_id = params["cluster_id"]
934 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
935 # content = {
936 # "cluster": db_cluster,
937 # }
938
939 # To get the operation params details
940 op_id = params["operation_id"]
941 op_params = self.get_operation_params(db_cluster, op_id)
942
943 # To initialize the operation states
944 self.initialize_operation(cluster_id, op_id)
945
946 # To copy the cluster content and decrypting the key to use in workflows
947 db_cluster_copy = self.decrypting_key(db_cluster)
rshri932105f2024-07-05 15:11:55 +0000948
garciadeblasadb81e82024-11-08 01:11:46 +0100949 _, workflow_name = await self.odu.launch_workflow(
rshri948f7de2024-12-02 03:42:35 +0000950 "deregister_cluster", op_id, op_params, db_cluster_copy
garciadeblas96b94f52024-07-08 16:18:21 +0200951 )
rshri932105f2024-07-05 15:11:55 +0000952 self.logger.info("workflow_name is :{}".format(workflow_name))
953
garciadeblas96b94f52024-07-08 16:18:21 +0200954 workflow_status, workflow_msg = await self.odu.check_workflow_status(
955 workflow_name
956 )
rshri932105f2024-07-05 15:11:55 +0000957 self.logger.info(
958 "workflow_status is :{} and workflow_msg is :{}".format(
959 workflow_status, workflow_msg
960 )
961 )
962 if workflow_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200963 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
rshri932105f2024-07-05 15:11:55 +0000964 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200965 db_cluster["state"] = "FAILED_DELETION"
966 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000967 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +0000968 db_cluster = self.update_operation_history(
969 db_cluster, op_id, workflow_status, None
970 )
garciadeblas96b94f52024-07-08 16:18:21 +0200971 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +0000972
garciadeblas91bb2c42024-11-12 11:17:12 +0100973 # Clean items used in the workflow or in the cluster, no matter if the workflow succeeded
974 clean_status, clean_msg = await self.odu.clean_items_workflow(
rshri948f7de2024-12-02 03:42:35 +0000975 "deregister_cluster", op_id, op_params, db_cluster_copy
garciadeblas91bb2c42024-11-12 11:17:12 +0100976 )
977 self.logger.info(
978 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
979 )
980
rshri932105f2024-07-05 15:11:55 +0000981 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +0100982 resource_status, resource_msg = await self.check_resource_status(
rshri948f7de2024-12-02 03:42:35 +0000983 "deregister_cluster", op_id, op_params, db_cluster_copy
rshri932105f2024-07-05 15:11:55 +0000984 )
985 self.logger.info(
986 "resource_status is :{} and resource_msg is :{}".format(
987 resource_status, resource_msg
988 )
989 )
990 if resource_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200991 db_cluster["resourceState"] = "READY"
rshri932105f2024-07-05 15:11:55 +0000992 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200993 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000994
garciadeblas96b94f52024-07-08 16:18:21 +0200995 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000996 db_cluster, op_id, workflow_status, resource_status
garciadeblas96b94f52024-07-08 16:18:21 +0200997 )
garciadeblas6b2112c2024-12-20 10:35:13 +0100998 # TODO: workaround until NBI rejects cluster deletion requests for registered clusters
999 # Setting created flag to true avoids infinite loops when deregistering a cluster
1000 db_cluster["created"] = "true"
garciadeblas96b94f52024-07-08 16:18:21 +02001001 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +00001002
garciadeblas98f9a3d2024-12-10 13:42:47 +01001003 return await self.delete(params, order_id)
rshri932105f2024-07-05 15:11:55 +00001004
rshri948f7de2024-12-02 03:42:35 +00001005 async def get_creds(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001006 self.logger.info("Cluster get creds Enter")
rshri948f7de2024-12-02 03:42:35 +00001007 cluster_id = params["cluster_id"]
1008 op_id = params["operation_id"]
1009 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
garciadeblas96b94f52024-07-08 16:18:21 +02001010 result, cluster_creds = await self.odu.get_cluster_credentials(db_cluster)
1011 if result:
1012 db_cluster["credentials"] = cluster_creds
yshahd940c652024-10-17 06:11:12 +00001013 op_len = 0
1014 for operations in db_cluster["operationHistory"]:
1015 if operations["op_id"] == op_id:
1016 db_cluster["operationHistory"][op_len]["result"] = result
1017 db_cluster["operationHistory"][op_len]["endDate"] = time()
1018 op_len += 1
shahithya70a3fc92024-11-12 11:01:05 +00001019 db_cluster["current_operation"] = None
yshahd940c652024-10-17 06:11:12 +00001020 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri948f7de2024-12-02 03:42:35 +00001021 self.logger.info("Cluster Get Creds Exit")
yshah771dea82024-07-05 15:11:49 +00001022 return
1023
rshri948f7de2024-12-02 03:42:35 +00001024 async def update(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001025 self.logger.info("Cluster update Enter")
rshri948f7de2024-12-02 03:42:35 +00001026 # To get the cluster details
1027 cluster_id = params["cluster_id"]
1028 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
1029
1030 # To get the operation params details
1031 op_id = params["operation_id"]
1032 op_params = self.get_operation_params(db_cluster, op_id)
yshah771dea82024-07-05 15:11:49 +00001033
rshric3564942024-11-12 18:12:38 +00001034 db_cluster_copy = self.decrypting_key(db_cluster)
1035
1036 # vim account details
1037 db_vim = self.db.get_one("vim_accounts", {"name": db_cluster["vim_account"]})
1038 db_cluster_copy["vim_account"] = db_vim
1039
garciadeblasadb81e82024-11-08 01:11:46 +01001040 _, workflow_name = await self.odu.launch_workflow(
rshric3564942024-11-12 18:12:38 +00001041 "update_cluster", op_id, op_params, db_cluster_copy
garciadeblas96b94f52024-07-08 16:18:21 +02001042 )
1043 workflow_status, workflow_msg = await self.odu.check_workflow_status(
1044 workflow_name
1045 )
1046 self.logger.info(
1047 "Workflow Status: {} Workflow Message: {}".format(
1048 workflow_status, workflow_msg
yshah771dea82024-07-05 15:11:49 +00001049 )
garciadeblas96b94f52024-07-08 16:18:21 +02001050 )
1051
1052 if workflow_status:
1053 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
1054 else:
1055 db_cluster["resourceState"] = "ERROR"
1056
yshahcb9075f2024-11-22 12:08:57 +00001057 db_cluster = self.update_operation_history(
1058 db_cluster, op_id, workflow_status, None
1059 )
garciadeblas96b94f52024-07-08 16:18:21 +02001060 # self.logger.info("Db content: {}".format(db_content))
1061 # self.db.set_one(self.db_collection, {"_id": _id}, db_cluster)
1062 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
1063
garciadeblas28bff0f2024-09-16 12:53:07 +02001064 # Clean items used in the workflow, no matter if the workflow succeeded
1065 clean_status, clean_msg = await self.odu.clean_items_workflow(
rshric3564942024-11-12 18:12:38 +00001066 "update_cluster", op_id, op_params, db_cluster_copy
garciadeblas28bff0f2024-09-16 12:53:07 +02001067 )
1068 self.logger.info(
1069 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
1070 )
garciadeblas96b94f52024-07-08 16:18:21 +02001071 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +01001072 resource_status, resource_msg = await self.check_resource_status(
rshric3564942024-11-12 18:12:38 +00001073 "update_cluster", op_id, op_params, db_cluster_copy
garciadeblas96b94f52024-07-08 16:18:21 +02001074 )
1075 self.logger.info(
1076 "Resource Status: {} Resource Message: {}".format(
1077 resource_status, resource_msg
1078 )
1079 )
yshah771dea82024-07-05 15:11:49 +00001080
1081 if resource_status:
garciadeblas96b94f52024-07-08 16:18:21 +02001082 db_cluster["resourceState"] = "READY"
yshah771dea82024-07-05 15:11:49 +00001083 else:
garciadeblas96b94f52024-07-08 16:18:21 +02001084 db_cluster["resourceState"] = "ERROR"
yshah771dea82024-07-05 15:11:49 +00001085
yshah0defcd52024-11-18 07:41:35 +00001086 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +00001087 db_cluster, op_id, workflow_status, resource_status
yshah0defcd52024-11-18 07:41:35 +00001088 )
1089
garciadeblas96b94f52024-07-08 16:18:21 +02001090 db_cluster["operatingState"] = "IDLE"
garciadeblas96b94f52024-07-08 16:18:21 +02001091 # self.logger.info("db_cluster: {}".format(db_cluster))
1092 # TODO: verify enxtcondition
1093 # For the moment, if the workflow completed successfully, then we update the db accordingly.
1094 if workflow_status:
1095 if "k8s_version" in op_params:
1096 db_cluster["k8s_version"] = op_params["k8s_version"]
yshah0defcd52024-11-18 07:41:35 +00001097 if "node_count" in op_params:
garciadeblas96b94f52024-07-08 16:18:21 +02001098 db_cluster["node_count"] = op_params["node_count"]
yshah0defcd52024-11-18 07:41:35 +00001099 if "node_size" in op_params:
1100 db_cluster["node_count"] = op_params["node_size"]
garciadeblas96b94f52024-07-08 16:18:21 +02001101 # self.db.set_one(self.db_collection, {"_id": _id}, db_content)
shahithya70a3fc92024-11-12 11:01:05 +00001102 db_cluster["current_operation"] = None
garciadeblas96b94f52024-07-08 16:18:21 +02001103 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
yshah771dea82024-07-05 15:11:49 +00001104 return
1105
1106
garciadeblas72412282024-11-07 12:41:54 +01001107class CloudCredentialsLcm(GitOpsLcm):
yshah771dea82024-07-05 15:11:49 +00001108 db_collection = "vim_accounts"
1109
1110 def __init__(self, msg, lcm_tasks, config):
1111 """
1112 Init, Connect to database, filesystem storage, and messaging
1113 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1114 :return: None
1115 """
garciadeblas72412282024-11-07 12:41:54 +01001116 super().__init__(msg, lcm_tasks, config)
yshah771dea82024-07-05 15:11:49 +00001117
yshah564ec9c2024-11-29 07:33:32 +00001118 async def add(self, params, order_id):
yshah771dea82024-07-05 15:11:49 +00001119 self.logger.info("Cloud Credentials create")
yshah564ec9c2024-11-29 07:33:32 +00001120 vim_id = params["_id"]
1121 op_id = vim_id
1122 op_params = params
1123 db_content = self.db.get_one(self.db_collection, {"_id": vim_id})
1124 vim_config = db_content.get("config", {})
1125 self.db.encrypt_decrypt_fields(
1126 vim_config.get("credentials"),
1127 "decrypt",
1128 ["password", "secret"],
1129 schema_version=db_content["schema_version"],
1130 salt=vim_id,
1131 )
1132
garciadeblasadb81e82024-11-08 01:11:46 +01001133 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001134 "create_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001135 )
1136
1137 workflow_status, workflow_msg = await self.odu.check_workflow_status(
1138 workflow_name
1139 )
1140
1141 self.logger.info(
1142 "Workflow Status: {} Workflow Msg: {}".format(workflow_status, workflow_msg)
1143 )
1144
garciadeblas28bff0f2024-09-16 12:53:07 +02001145 # Clean items used in the workflow, no matter if the workflow succeeded
1146 clean_status, clean_msg = await self.odu.clean_items_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001147 "create_cloud_credentials", op_id, op_params, db_content
garciadeblas28bff0f2024-09-16 12:53:07 +02001148 )
1149 self.logger.info(
1150 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
1151 )
1152
yshah771dea82024-07-05 15:11:49 +00001153 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +01001154 resource_status, resource_msg = await self.check_resource_status(
yshah564ec9c2024-11-29 07:33:32 +00001155 "create_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001156 )
1157 self.logger.info(
1158 "Resource Status: {} Resource Message: {}".format(
1159 resource_status, resource_msg
1160 )
1161 )
garciadeblas15b8a302024-09-23 12:40:13 +02001162
yshah564ec9c2024-11-29 07:33:32 +00001163 db_content["_admin"]["operationalState"] = "ENABLED"
1164 for operation in db_content["_admin"]["operations"]:
garciadeblas15b8a302024-09-23 12:40:13 +02001165 if operation["lcmOperationType"] == "create":
1166 operation["operationState"] = "ENABLED"
yshah564ec9c2024-11-29 07:33:32 +00001167 self.logger.info("Content : {}".format(db_content))
1168 self.db.set_one("vim_accounts", {"_id": db_content["_id"]}, db_content)
yshah771dea82024-07-05 15:11:49 +00001169 return
1170
yshah564ec9c2024-11-29 07:33:32 +00001171 async def edit(self, params, order_id):
1172 self.logger.info("Cloud Credentials Update")
1173 vim_id = params["_id"]
1174 op_id = vim_id
1175 op_params = params
1176 db_content = self.db.get_one("vim_accounts", {"_id": vim_id})
1177 vim_config = db_content.get("config", {})
1178 self.db.encrypt_decrypt_fields(
1179 vim_config.get("credentials"),
1180 "decrypt",
1181 ["password", "secret"],
1182 schema_version=db_content["schema_version"],
1183 salt=vim_id,
1184 )
1185
garciadeblasadb81e82024-11-08 01:11:46 +01001186 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001187 "update_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001188 )
1189 workflow_status, workflow_msg = await self.odu.check_workflow_status(
1190 workflow_name
1191 )
1192 self.logger.info(
1193 "Workflow Status: {} Workflow Msg: {}".format(workflow_status, workflow_msg)
1194 )
1195
garciadeblas28bff0f2024-09-16 12:53:07 +02001196 # Clean items used in the workflow, no matter if the workflow succeeded
1197 clean_status, clean_msg = await self.odu.clean_items_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001198 "update_cloud_credentials", op_id, op_params, db_content
garciadeblas28bff0f2024-09-16 12:53:07 +02001199 )
1200 self.logger.info(
1201 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
1202 )
1203
yshah771dea82024-07-05 15:11:49 +00001204 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +01001205 resource_status, resource_msg = await self.check_resource_status(
yshah564ec9c2024-11-29 07:33:32 +00001206 "update_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001207 )
1208 self.logger.info(
1209 "Resource Status: {} Resource Message: {}".format(
1210 resource_status, resource_msg
1211 )
1212 )
1213 return
1214
yshah564ec9c2024-11-29 07:33:32 +00001215 async def remove(self, params, order_id):
1216 self.logger.info("Cloud Credentials remove")
1217 vim_id = params["_id"]
1218 op_id = vim_id
1219 op_params = params
1220 db_content = self.db.get_one("vim_accounts", {"_id": vim_id})
1221
garciadeblasadb81e82024-11-08 01:11:46 +01001222 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001223 "delete_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001224 )
1225 workflow_status, workflow_msg = await self.odu.check_workflow_status(
1226 workflow_name
1227 )
1228 self.logger.info(
1229 "Workflow Status: {} Workflow Msg: {}".format(workflow_status, workflow_msg)
1230 )
1231
1232 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +01001233 resource_status, resource_msg = await self.check_resource_status(
yshah564ec9c2024-11-29 07:33:32 +00001234 "delete_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001235 )
1236 self.logger.info(
1237 "Resource Status: {} Resource Message: {}".format(
1238 resource_status, resource_msg
1239 )
1240 )
yshah564ec9c2024-11-29 07:33:32 +00001241 self.db.del_one(self.db_collection, {"_id": db_content["_id"]})
yshah771dea82024-07-05 15:11:49 +00001242 return
1243
rshri932105f2024-07-05 15:11:55 +00001244
garciadeblas72412282024-11-07 12:41:54 +01001245class K8sAppLcm(GitOpsLcm):
rshri948f7de2024-12-02 03:42:35 +00001246 db_collection = "k8sapp"
1247
rshri932105f2024-07-05 15:11:55 +00001248 def __init__(self, msg, lcm_tasks, config):
1249 """
1250 Init, Connect to database, filesystem storage, and messaging
1251 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1252 :return: None
1253 """
garciadeblas72412282024-11-07 12:41:54 +01001254 super().__init__(msg, lcm_tasks, config)
rshri932105f2024-07-05 15:11:55 +00001255
rshri948f7de2024-12-02 03:42:35 +00001256 async def create(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001257 self.logger.info("App Create Enter")
1258
rshri948f7de2024-12-02 03:42:35 +00001259 op_id = params["operation_id"]
1260 profile_id = params["profile_id"]
1261
1262 # To initialize the operation states
1263 self.initialize_operation(profile_id, op_id)
1264
1265 content = self.db.get_one("k8sapp", {"_id": profile_id})
1266 content["profile_type"] = "applications"
1267 op_params = self.get_operation_params(content, op_id)
1268 self.db.set_one("k8sapp", {"_id": content["_id"]}, content)
1269
garciadeblasadb81e82024-11-08 01:11:46 +01001270 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001271 "create_profile", op_id, op_params, content
1272 )
rshri932105f2024-07-05 15:11:55 +00001273 self.logger.info("workflow_name is :{}".format(workflow_name))
1274
yshah564ec9c2024-11-29 07:33:32 +00001275 workflow_status = await self.check_workflow(op_id, workflow_name, content)
rshri932105f2024-07-05 15:11:55 +00001276
1277 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001278 resource_status, content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001279 "create_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001280 )
yshah564ec9c2024-11-29 07:33:32 +00001281 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1282 self.logger.info(f"App Create Exit with resource status: {resource_status}")
rshri932105f2024-07-05 15:11:55 +00001283 return
1284
rshri948f7de2024-12-02 03:42:35 +00001285 async def delete(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001286 self.logger.info("App delete Enter")
rshri932105f2024-07-05 15:11:55 +00001287
rshri948f7de2024-12-02 03:42:35 +00001288 op_id = params["operation_id"]
1289 profile_id = params["profile_id"]
1290
1291 # To initialize the operation states
1292 self.initialize_operation(profile_id, op_id)
1293
1294 content = self.db.get_one("k8sapp", {"_id": profile_id})
1295 op_params = self.get_operation_params(content, op_id)
1296
garciadeblasadb81e82024-11-08 01:11:46 +01001297 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001298 "delete_profile", op_id, op_params, content
1299 )
rshri932105f2024-07-05 15:11:55 +00001300 self.logger.info("workflow_name is :{}".format(workflow_name))
1301
yshah564ec9c2024-11-29 07:33:32 +00001302 workflow_status = await self.check_workflow(op_id, workflow_name, content)
rshri932105f2024-07-05 15:11:55 +00001303
1304 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001305 resource_status, content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001306 "delete_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001307 )
rshri932105f2024-07-05 15:11:55 +00001308
yshah564ec9c2024-11-29 07:33:32 +00001309 if resource_status:
1310 content["state"] = "DELETED"
1311 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1312 self.db.del_one(self.db_collection, {"_id": content["_id"]})
1313 self.logger.info(f"App Delete Exit with resource status: {resource_status}")
rshri932105f2024-07-05 15:11:55 +00001314 return
1315
1316
garciadeblas72412282024-11-07 12:41:54 +01001317class K8sResourceLcm(GitOpsLcm):
rshri948f7de2024-12-02 03:42:35 +00001318 db_collection = "k8sresource"
1319
rshri932105f2024-07-05 15:11:55 +00001320 def __init__(self, msg, lcm_tasks, config):
1321 """
1322 Init, Connect to database, filesystem storage, and messaging
1323 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1324 :return: None
1325 """
garciadeblas72412282024-11-07 12:41:54 +01001326 super().__init__(msg, lcm_tasks, config)
rshri932105f2024-07-05 15:11:55 +00001327
rshri948f7de2024-12-02 03:42:35 +00001328 async def create(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001329 self.logger.info("Resource Create Enter")
1330
rshri948f7de2024-12-02 03:42:35 +00001331 op_id = params["operation_id"]
1332 profile_id = params["profile_id"]
1333
1334 # To initialize the operation states
1335 self.initialize_operation(profile_id, op_id)
1336
1337 content = self.db.get_one("k8sresource", {"_id": profile_id})
1338 content["profile_type"] = "managed-resources"
1339 op_params = self.get_operation_params(content, op_id)
1340 self.db.set_one("k8sresource", {"_id": content["_id"]}, content)
1341
garciadeblasadb81e82024-11-08 01:11:46 +01001342 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001343 "create_profile", op_id, op_params, content
1344 )
rshri932105f2024-07-05 15:11:55 +00001345 self.logger.info("workflow_name is :{}".format(workflow_name))
1346
yshah564ec9c2024-11-29 07:33:32 +00001347 workflow_status = await self.check_workflow(op_id, workflow_name, content)
rshri932105f2024-07-05 15:11:55 +00001348
1349 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001350 resource_status, content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001351 "create_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001352 )
yshah564ec9c2024-11-29 07:33:32 +00001353 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1354 self.logger.info(
1355 f"Resource Create Exit with resource status: {resource_status}"
rshri932105f2024-07-05 15:11:55 +00001356 )
rshri932105f2024-07-05 15:11:55 +00001357 return
1358
rshri948f7de2024-12-02 03:42:35 +00001359 async def delete(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001360 self.logger.info("Resource delete Enter")
rshri948f7de2024-12-02 03:42:35 +00001361
1362 op_id = params["operation_id"]
1363 profile_id = params["profile_id"]
1364
1365 # To initialize the operation states
1366 self.initialize_operation(profile_id, op_id)
1367
1368 content = self.db.get_one("k8sresource", {"_id": profile_id})
1369 op_params = self.get_operation_params(content, op_id)
rshri932105f2024-07-05 15:11:55 +00001370
garciadeblasadb81e82024-11-08 01:11:46 +01001371 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001372 "delete_profile", op_id, op_params, content
1373 )
rshri932105f2024-07-05 15:11:55 +00001374 self.logger.info("workflow_name is :{}".format(workflow_name))
1375
yshah564ec9c2024-11-29 07:33:32 +00001376 workflow_status = await self.check_workflow(op_id, workflow_name, content)
rshri932105f2024-07-05 15:11:55 +00001377
1378 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001379 resource_status, content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001380 "delete_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001381 )
rshri932105f2024-07-05 15:11:55 +00001382
yshah564ec9c2024-11-29 07:33:32 +00001383 if resource_status:
1384 content["state"] = "DELETED"
1385 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1386 self.db.del_one(self.db_collection, {"_id": content["_id"]})
1387 self.logger.info(
1388 f"Resource Delete Exit with resource status: {resource_status}"
garciadeblas96b94f52024-07-08 16:18:21 +02001389 )
rshri932105f2024-07-05 15:11:55 +00001390 return
1391
1392
garciadeblas72412282024-11-07 12:41:54 +01001393class K8sInfraControllerLcm(GitOpsLcm):
rshri948f7de2024-12-02 03:42:35 +00001394 db_collection = "k8sinfra_controller"
1395
rshri932105f2024-07-05 15:11:55 +00001396 def __init__(self, msg, lcm_tasks, config):
1397 """
1398 Init, Connect to database, filesystem storage, and messaging
1399 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1400 :return: None
1401 """
garciadeblas72412282024-11-07 12:41:54 +01001402 super().__init__(msg, lcm_tasks, config)
rshri932105f2024-07-05 15:11:55 +00001403
rshri948f7de2024-12-02 03:42:35 +00001404 async def create(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001405 self.logger.info("Infra controller Create Enter")
1406
rshri948f7de2024-12-02 03:42:35 +00001407 op_id = params["operation_id"]
1408 profile_id = params["profile_id"]
1409
1410 # To initialize the operation states
1411 self.initialize_operation(profile_id, op_id)
1412
1413 content = self.db.get_one("k8sinfra_controller", {"_id": profile_id})
1414 content["profile_type"] = "infra-controllers"
1415 op_params = self.get_operation_params(content, op_id)
1416 self.db.set_one("k8sinfra_controller", {"_id": content["_id"]}, content)
1417
garciadeblasadb81e82024-11-08 01:11:46 +01001418 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001419 "create_profile", op_id, op_params, content
1420 )
rshri932105f2024-07-05 15:11:55 +00001421 self.logger.info("workflow_name is :{}".format(workflow_name))
1422
yshah564ec9c2024-11-29 07:33:32 +00001423 workflow_status = await self.check_workflow(op_id, workflow_name, content)
rshri932105f2024-07-05 15:11:55 +00001424
1425 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001426 resource_status, content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001427 "create_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001428 )
yshah564ec9c2024-11-29 07:33:32 +00001429 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1430 self.logger.info(
1431 f"Infra Controller Create Exit with resource status: {resource_status}"
rshri932105f2024-07-05 15:11:55 +00001432 )
rshri932105f2024-07-05 15:11:55 +00001433 return
1434
rshri948f7de2024-12-02 03:42:35 +00001435 async def delete(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001436 self.logger.info("Infra controller delete Enter")
rshri932105f2024-07-05 15:11:55 +00001437
rshri948f7de2024-12-02 03:42:35 +00001438 op_id = params["operation_id"]
1439 profile_id = params["profile_id"]
1440
1441 # To initialize the operation states
1442 self.initialize_operation(profile_id, op_id)
1443
1444 content = self.db.get_one("k8sinfra_controller", {"_id": profile_id})
1445 op_params = self.get_operation_params(content, op_id)
1446
garciadeblasadb81e82024-11-08 01:11:46 +01001447 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001448 "delete_profile", op_id, op_params, content
1449 )
rshri932105f2024-07-05 15:11:55 +00001450 self.logger.info("workflow_name is :{}".format(workflow_name))
1451
yshah564ec9c2024-11-29 07:33:32 +00001452 workflow_status = await self.check_workflow(op_id, workflow_name, content)
rshri932105f2024-07-05 15:11:55 +00001453
1454 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001455 resource_status, content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001456 "delete_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001457 )
rshri932105f2024-07-05 15:11:55 +00001458
yshah564ec9c2024-11-29 07:33:32 +00001459 if resource_status:
1460 content["state"] = "DELETED"
1461 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1462 self.db.del_one(self.db_collection, {"_id": content["_id"]})
1463 self.logger.info(
1464 f"Infra Controller Delete Exit with resource status: {resource_status}"
garciadeblas96b94f52024-07-08 16:18:21 +02001465 )
rshri932105f2024-07-05 15:11:55 +00001466 return
1467
1468
garciadeblas72412282024-11-07 12:41:54 +01001469class K8sInfraConfigLcm(GitOpsLcm):
rshri948f7de2024-12-02 03:42:35 +00001470 db_collection = "k8sinfra_config"
1471
rshri932105f2024-07-05 15:11:55 +00001472 def __init__(self, msg, lcm_tasks, config):
1473 """
1474 Init, Connect to database, filesystem storage, and messaging
1475 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1476 :return: None
1477 """
garciadeblas72412282024-11-07 12:41:54 +01001478 super().__init__(msg, lcm_tasks, config)
rshri932105f2024-07-05 15:11:55 +00001479
rshri948f7de2024-12-02 03:42:35 +00001480 async def create(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001481 self.logger.info("Infra config Create Enter")
1482
rshri948f7de2024-12-02 03:42:35 +00001483 op_id = params["operation_id"]
1484 profile_id = params["profile_id"]
1485
1486 # To initialize the operation states
1487 self.initialize_operation(profile_id, op_id)
1488
1489 content = self.db.get_one("k8sinfra_config", {"_id": profile_id})
1490 content["profile_type"] = "infra-configs"
1491 op_params = self.get_operation_params(content, op_id)
1492 self.db.set_one("k8sinfra_config", {"_id": content["_id"]}, content)
1493
garciadeblasadb81e82024-11-08 01:11:46 +01001494 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001495 "create_profile", op_id, op_params, content
1496 )
rshri932105f2024-07-05 15:11:55 +00001497 self.logger.info("workflow_name is :{}".format(workflow_name))
1498
yshah564ec9c2024-11-29 07:33:32 +00001499 workflow_status = await self.check_workflow(op_id, workflow_name, content)
rshri932105f2024-07-05 15:11:55 +00001500
1501 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001502 resource_status, content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001503 "create_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001504 )
yshah564ec9c2024-11-29 07:33:32 +00001505 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1506 self.logger.info(
1507 f"Infra Config Create Exit with resource status: {resource_status}"
rshri932105f2024-07-05 15:11:55 +00001508 )
rshri932105f2024-07-05 15:11:55 +00001509 return
1510
rshri948f7de2024-12-02 03:42:35 +00001511 async def delete(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001512 self.logger.info("Infra config delete Enter")
1513
rshri948f7de2024-12-02 03:42:35 +00001514 op_id = params["operation_id"]
1515 profile_id = params["profile_id"]
1516
1517 # To initialize the operation states
1518 self.initialize_operation(profile_id, op_id)
1519
1520 content = self.db.get_one("k8sinfra_config", {"_id": profile_id})
1521 op_params = self.get_operation_params(content, op_id)
1522
garciadeblasadb81e82024-11-08 01:11:46 +01001523 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001524 "delete_profile", op_id, op_params, content
1525 )
rshri932105f2024-07-05 15:11:55 +00001526 self.logger.info("workflow_name is :{}".format(workflow_name))
rshri932105f2024-07-05 15:11:55 +00001527
yshah564ec9c2024-11-29 07:33:32 +00001528 workflow_status = await self.check_workflow(op_id, workflow_name, content)
1529
rshri932105f2024-07-05 15:11:55 +00001530 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001531 resource_status, content = await self.check_resource(
1532 "delete_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001533 )
yshah564ec9c2024-11-29 07:33:32 +00001534
rshri932105f2024-07-05 15:11:55 +00001535 if resource_status:
yshah564ec9c2024-11-29 07:33:32 +00001536 content["state"] = "DELETED"
1537 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1538 self.db.del_one(self.db_collection, {"_id": content["_id"]})
1539 self.logger.info(
1540 f"Infra Config Delete Exit with resource status: {resource_status}"
garciadeblas96b94f52024-07-08 16:18:21 +02001541 )
rshri932105f2024-07-05 15:11:55 +00001542
rshri932105f2024-07-05 15:11:55 +00001543 return
yshah771dea82024-07-05 15:11:49 +00001544
1545
garciadeblas72412282024-11-07 12:41:54 +01001546class OkaLcm(GitOpsLcm):
yshah771dea82024-07-05 15:11:49 +00001547 db_collection = "okas"
1548
1549 def __init__(self, msg, lcm_tasks, config):
1550 """
1551 Init, Connect to database, filesystem storage, and messaging
1552 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1553 :return: None
1554 """
garciadeblas72412282024-11-07 12:41:54 +01001555 super().__init__(msg, lcm_tasks, config)
yshah771dea82024-07-05 15:11:49 +00001556
yshah564ec9c2024-11-29 07:33:32 +00001557 async def create(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001558 self.logger.info("OKA Create Enter")
yshah564ec9c2024-11-29 07:33:32 +00001559 op_id = params["operation_id"]
1560 oka_id = params["oka_id"]
1561 self.initialize_operation(oka_id, op_id)
1562 db_content = self.db.get_one(self.db_collection, {"_id": oka_id})
1563 op_params = self.get_operation_params(db_content, op_id)
yshah771dea82024-07-05 15:11:49 +00001564
garciadeblasadb81e82024-11-08 01:11:46 +01001565 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001566 "create_oka", op_id, op_params, db_content
1567 )
yshah564ec9c2024-11-29 07:33:32 +00001568
1569 workflow_status = await self.check_workflow(op_id, workflow_name, db_content)
yshah771dea82024-07-05 15:11:49 +00001570
1571 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001572 resource_status, db_content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001573 "create_oka", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001574 )
garciadeblas96b94f52024-07-08 16:18:21 +02001575 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
yshah564ec9c2024-11-29 07:33:32 +00001576 self.logger.info(f"OKA Create Exit with resource status: {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001577 return
1578
yshah564ec9c2024-11-29 07:33:32 +00001579 async def edit(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001580 self.logger.info("OKA Edit Enter")
yshah564ec9c2024-11-29 07:33:32 +00001581 op_id = params["operation_id"]
1582 oka_id = params["oka_id"]
1583 self.initialize_operation(oka_id, op_id)
1584 db_content = self.db.get_one(self.db_collection, {"_id": oka_id})
1585 op_params = self.get_operation_params(db_content, op_id)
yshah771dea82024-07-05 15:11:49 +00001586
garciadeblasadb81e82024-11-08 01:11:46 +01001587 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001588 "update_oka", op_id, op_params, db_content
garciadeblas96b94f52024-07-08 16:18:21 +02001589 )
yshah564ec9c2024-11-29 07:33:32 +00001590 workflow_status = await self.check_workflow(op_id, workflow_name, db_content)
yshah771dea82024-07-05 15:11:49 +00001591
1592 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001593 resource_status, db_content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001594 "update_oka", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001595 )
garciadeblas96b94f52024-07-08 16:18:21 +02001596 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
yshah564ec9c2024-11-29 07:33:32 +00001597 self.logger.info(f"OKA Update Exit with resource status: {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001598 return
1599
yshah564ec9c2024-11-29 07:33:32 +00001600 async def delete(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001601 self.logger.info("OKA delete Enter")
yshah564ec9c2024-11-29 07:33:32 +00001602 op_id = params["operation_id"]
1603 oka_id = params["oka_id"]
1604 self.initialize_operation(oka_id, op_id)
1605 db_content = self.db.get_one(self.db_collection, {"_id": oka_id})
1606 op_params = self.get_operation_params(db_content, op_id)
yshah771dea82024-07-05 15:11:49 +00001607
garciadeblasadb81e82024-11-08 01:11:46 +01001608 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001609 "delete_oka", op_id, op_params, db_content
garciadeblas96b94f52024-07-08 16:18:21 +02001610 )
yshah564ec9c2024-11-29 07:33:32 +00001611 workflow_status = await self.check_workflow(op_id, workflow_name, db_content)
yshah771dea82024-07-05 15:11:49 +00001612
1613 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001614 resource_status, db_content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001615 "delete_oka", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001616 )
yshah771dea82024-07-05 15:11:49 +00001617
yshah564ec9c2024-11-29 07:33:32 +00001618 if resource_status:
1619 db_content["state"] == "DELETED"
1620 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
garciadeblas96b94f52024-07-08 16:18:21 +02001621 self.db.del_one(self.db_collection, {"_id": db_content["_id"]})
yshah564ec9c2024-11-29 07:33:32 +00001622 self.logger.info(f"OKA Delete Exit with resource status: {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001623 return
1624
1625
garciadeblas72412282024-11-07 12:41:54 +01001626class KsuLcm(GitOpsLcm):
yshah771dea82024-07-05 15:11:49 +00001627 db_collection = "ksus"
1628
1629 def __init__(self, msg, lcm_tasks, config):
1630 """
1631 Init, Connect to database, filesystem storage, and messaging
1632 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1633 :return: None
1634 """
garciadeblas72412282024-11-07 12:41:54 +01001635 super().__init__(msg, lcm_tasks, config)
yshah771dea82024-07-05 15:11:49 +00001636
yshah564ec9c2024-11-29 07:33:32 +00001637 async def create(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001638 self.logger.info("ksu Create Enter")
yshah564ec9c2024-11-29 07:33:32 +00001639 db_content = []
1640 op_params = []
1641 op_id = params["operation_id"]
1642 for ksu_id in params["ksus_list"]:
1643 self.logger.info("Ksu ID: {}".format(ksu_id))
1644 self.initialize_operation(ksu_id, op_id)
1645 db_ksu = self.db.get_one(self.db_collection, {"_id": ksu_id})
1646 self.logger.info("Db KSU: {}".format(db_ksu))
1647 db_content.append(db_ksu)
1648 ksu_params = {}
1649 ksu_params = self.get_operation_params(db_ksu, op_id)
1650 self.logger.info("Operation Params: {}".format(ksu_params))
1651 # Update ksu_params["profile"] with profile name and age-pubkey
1652 profile_type = ksu_params["profile"]["profile_type"]
1653 profile_id = ksu_params["profile"]["_id"]
1654 profile_collection = self.profile_collection_mapping[profile_type]
1655 db_profile = self.db.get_one(profile_collection, {"_id": profile_id})
1656 ksu_params["profile"]["name"] = db_profile["name"]
1657 ksu_params["profile"]["age_pubkey"] = db_profile.get("age_pubkey", "")
1658 # Update ksu_params["oka"] with sw_catalog_path (when missing)
1659 for oka in ksu_params["oka"]:
1660 if "sw_catalog_path" not in oka:
1661 oka_id = oka["_id"]
1662 db_oka = self.db.get_one("okas", {"_id": oka_id})
yshah2f39b8a2024-12-19 11:06:24 +00001663 oka_type = MAP_PROFILE[
1664 db_oka.get("profile_type", "infra_controller_profiles")
1665 ]
1666 oka["sw_catalog_path"] = f"{oka_type}/{db_oka['git_name']}"
yshah564ec9c2024-11-29 07:33:32 +00001667 op_params.append(ksu_params)
yshah771dea82024-07-05 15:11:49 +00001668
garciadeblasadb81e82024-11-08 01:11:46 +01001669 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001670 "create_ksus", op_id, op_params, db_content
garciadeblas96b94f52024-07-08 16:18:21 +02001671 )
yshah564ec9c2024-11-29 07:33:32 +00001672 for db_ksu, ksu_params in zip(db_content, op_params):
1673 workflow_status = await self.check_workflow(op_id, workflow_name, db_ksu)
yshah771dea82024-07-05 15:11:49 +00001674
garciadeblas96b94f52024-07-08 16:18:21 +02001675 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001676 resource_status, db_ksu = await self.check_resource(
1677 "create_ksus", op_id, ksu_params, db_ksu
1678 )
yshah771dea82024-07-05 15:11:49 +00001679
garciadeblas96b94f52024-07-08 16:18:21 +02001680 self.db.set_one(self.db_collection, {"_id": db_ksu["_id"]}, db_ksu)
1681
garciadeblasd8429852024-10-17 15:30:30 +02001682 # Clean items used in the workflow, no matter if the workflow succeeded
1683 clean_status, clean_msg = await self.odu.clean_items_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001684 "create_ksus", op_id, op_params, db_content
garciadeblasd8429852024-10-17 15:30:30 +02001685 )
1686 self.logger.info(
1687 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
1688 )
yshah564ec9c2024-11-29 07:33:32 +00001689 self.logger.info(f"KSU Create EXIT with Resource Status {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001690 return
1691
yshah564ec9c2024-11-29 07:33:32 +00001692 async def edit(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001693 self.logger.info("ksu edit Enter")
yshah564ec9c2024-11-29 07:33:32 +00001694 db_content = []
1695 op_params = []
1696 op_id = params["operation_id"]
1697 for ksu_id in params["ksus_list"]:
1698 self.initialize_operation(ksu_id, op_id)
1699 db_ksu = self.db.get_one("ksus", {"_id": ksu_id})
1700 db_content.append(db_ksu)
1701 ksu_params = {}
1702 ksu_params = self.get_operation_params(db_ksu, op_id)
1703 # Update ksu_params["profile"] with profile name and age-pubkey
1704 profile_type = ksu_params["profile"]["profile_type"]
1705 profile_id = ksu_params["profile"]["_id"]
1706 profile_collection = self.profile_collection_mapping[profile_type]
1707 db_profile = self.db.get_one(profile_collection, {"_id": profile_id})
1708 ksu_params["profile"]["name"] = db_profile["name"]
1709 ksu_params["profile"]["age_pubkey"] = db_profile.get("age_pubkey", "")
1710 # Update ksu_params["oka"] with sw_catalog_path (when missing)
1711 for oka in ksu_params["oka"]:
1712 if "sw_catalog_path" not in oka:
1713 oka_id = oka["_id"]
1714 db_oka = self.db.get_one("okas", {"_id": oka_id})
yshah2f39b8a2024-12-19 11:06:24 +00001715 oka_type = MAP_PROFILE[
1716 db_oka.get("profile_type", "infra_controller_profiles")
1717 ]
1718 oka["sw_catalog_path"] = f"{oka_type}/{db_oka['git_name']}"
yshah564ec9c2024-11-29 07:33:32 +00001719 op_params.append(ksu_params)
yshah771dea82024-07-05 15:11:49 +00001720
garciadeblasadb81e82024-11-08 01:11:46 +01001721 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001722 "update_ksus", op_id, op_params, db_content
garciadeblas96b94f52024-07-08 16:18:21 +02001723 )
yshah771dea82024-07-05 15:11:49 +00001724
yshah564ec9c2024-11-29 07:33:32 +00001725 for db_ksu, ksu_params in zip(db_content, op_params):
1726 workflow_status = await self.check_workflow(op_id, workflow_name, db_ksu)
1727
garciadeblas96b94f52024-07-08 16:18:21 +02001728 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001729 resource_status, db_ksu = await self.check_resource(
1730 "update_ksus", op_id, ksu_params, db_ksu
garciadeblas96b94f52024-07-08 16:18:21 +02001731 )
garciadeblas96b94f52024-07-08 16:18:21 +02001732 db_ksu["name"] = ksu_params["name"]
1733 db_ksu["description"] = ksu_params["description"]
1734 db_ksu["profile"]["profile_type"] = ksu_params["profile"][
1735 "profile_type"
1736 ]
1737 db_ksu["profile"]["_id"] = ksu_params["profile"]["_id"]
1738 db_ksu["oka"] = ksu_params["oka"]
1739 self.db.set_one(self.db_collection, {"_id": db_ksu["_id"]}, db_ksu)
1740
yshah564ec9c2024-11-29 07:33:32 +00001741 # Clean items used in the workflow, no matter if the workflow succeeded
1742 clean_status, clean_msg = await self.odu.clean_items_workflow(
1743 "create_ksus", op_id, op_params, db_content
garciadeblas96b94f52024-07-08 16:18:21 +02001744 )
1745 self.logger.info(
yshah564ec9c2024-11-29 07:33:32 +00001746 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
garciadeblas96b94f52024-07-08 16:18:21 +02001747 )
yshah564ec9c2024-11-29 07:33:32 +00001748 self.logger.info(f"KSU Update EXIT with Resource Status {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001749 return
1750
yshah564ec9c2024-11-29 07:33:32 +00001751 async def delete(self, params, order_id):
1752 self.logger.info("ksu delete Enter")
1753 db_content = []
1754 op_params = []
1755 op_id = params["operation_id"]
1756 for ksu_id in params["ksus_list"]:
1757 self.initialize_operation(ksu_id, op_id)
1758 db_ksu = self.db.get_one("ksus", {"_id": ksu_id})
1759 db_content.append(db_ksu)
1760 ksu_params = {}
1761 ksu_params["profile"] = {}
1762 ksu_params["profile"]["profile_type"] = db_ksu["profile"]["profile_type"]
1763 ksu_params["profile"]["_id"] = db_ksu["profile"]["_id"]
1764 # Update ksu_params["profile"] with profile name and age-pubkey
1765 profile_type = ksu_params["profile"]["profile_type"]
1766 profile_id = ksu_params["profile"]["_id"]
1767 profile_collection = self.profile_collection_mapping[profile_type]
1768 db_profile = self.db.get_one(profile_collection, {"_id": profile_id})
1769 ksu_params["profile"]["name"] = db_profile["name"]
1770 ksu_params["profile"]["age_pubkey"] = db_profile.get("age_pubkey", "")
1771 op_params.append(ksu_params)
yshah771dea82024-07-05 15:11:49 +00001772
garciadeblasadb81e82024-11-08 01:11:46 +01001773 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001774 "delete_ksus", op_id, op_params, db_content
1775 )
1776
1777 for db_ksu, ksu_params in zip(db_content, op_params):
1778 workflow_status = await self.check_workflow(op_id, workflow_name, db_ksu)
1779
1780 if workflow_status:
1781 resource_status, db_ksu = await self.check_resource(
1782 "delete_ksus", op_id, ksu_params, db_ksu
1783 )
1784
1785 if resource_status:
1786 db_ksu["state"] == "DELETED"
1787 self.db.set_one(self.db_collection, {"_id": db_ksu["_id"]}, db_ksu)
1788 self.db.del_one(self.db_collection, {"_id": db_ksu["_id"]})
1789
1790 self.logger.info(f"KSU Delete Exit with resource status: {resource_status}")
1791 return
1792
1793 async def clone(self, params, order_id):
1794 self.logger.info("ksu clone Enter")
1795 op_id = params["operation_id"]
1796 ksus_id = params["ksus_list"][0]
1797 self.initialize_operation(ksus_id, op_id)
1798 db_content = self.db.get_one(self.db_collection, {"_id": ksus_id})
1799 op_params = self.get_operation_params(db_content, op_id)
1800 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001801 "clone_ksus", op_id, op_params, db_content
1802 )
yshah564ec9c2024-11-29 07:33:32 +00001803
1804 workflow_status = await self.check_workflow(op_id, workflow_name, db_content)
yshah771dea82024-07-05 15:11:49 +00001805
1806 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001807 resource_status, db_content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001808 "clone_ksus", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001809 )
garciadeblas96b94f52024-07-08 16:18:21 +02001810 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
yshah564ec9c2024-11-29 07:33:32 +00001811
1812 self.logger.info(f"KSU Clone Exit with resource status: {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001813 return
1814
yshah564ec9c2024-11-29 07:33:32 +00001815 async def move(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001816 self.logger.info("ksu move Enter")
yshah564ec9c2024-11-29 07:33:32 +00001817 op_id = params["operation_id"]
1818 ksus_id = params["ksus_list"][0]
1819 self.initialize_operation(ksus_id, op_id)
1820 db_content = self.db.get_one(self.db_collection, {"_id": ksus_id})
1821 op_params = self.get_operation_params(db_content, op_id)
garciadeblasadb81e82024-11-08 01:11:46 +01001822 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001823 "move_ksus", op_id, op_params, db_content
1824 )
yshah564ec9c2024-11-29 07:33:32 +00001825
1826 workflow_status = await self.check_workflow(op_id, workflow_name, db_content)
yshah771dea82024-07-05 15:11:49 +00001827
1828 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001829 resource_status, db_content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001830 "move_ksus", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001831 )
garciadeblas96b94f52024-07-08 16:18:21 +02001832 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
yshah564ec9c2024-11-29 07:33:32 +00001833
1834 self.logger.info(f"KSU Move Exit with resource status: {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001835 return