blob: 70dee64dad6c82c5a3aab9eeebb835103b95836b [file] [log] [blame]
rshri932105f2024-07-05 15:11:55 +00001# -*- coding: utf-8 -*-
2
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
12# implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
15
16__author__ = (
17 "Shrinithi R <shrinithi.r@tataelxsi.co.in>",
18 "Shahithya Y <shahithya.y@tataelxsi.co.in>",
19)
20
rshric3564942024-11-12 18:12:38 +000021import copy
rshri932105f2024-07-05 15:11:55 +000022import logging
yshahd940c652024-10-17 06:11:12 +000023from time import time
garciadeblas72412282024-11-07 12:41:54 +010024import traceback
rshri932105f2024-07-05 15:11:55 +000025from osm_lcm.lcm_utils import LcmBase
26from copy import deepcopy
27from osm_lcm import odu_workflows
28from osm_lcm import vim_sdn
rshri948f7de2024-12-02 03:42:35 +000029from osm_lcm.data_utils.list_utils import find_in_list
rshri932105f2024-07-05 15:11:55 +000030
yshah2f39b8a2024-12-19 11:06:24 +000031MAP_PROFILE = {
32 "infra_controller_profiles": "infra-controllers",
33 "infra_config_profiles": "infra-configs",
34 "resource_profiles": "managed_resources",
35 "app_profiles": "apps",
36}
37
rshri932105f2024-07-05 15:11:55 +000038
garciadeblas72412282024-11-07 12:41:54 +010039class GitOpsLcm(LcmBase):
garciadeblasea865ff2024-11-20 12:42:49 +010040 db_collection = "gitops"
yshah564ec9c2024-11-29 07:33:32 +000041 workflow_status = None
42 resource_status = None
43
44 profile_collection_mapping = {
45 "infra_controller_profiles": "k8sinfra_controller",
46 "infra_config_profiles": "k8sinfra_config",
47 "resource_profiles": "k8sresource",
48 "app_profiles": "k8sapp",
49 }
garciadeblasea865ff2024-11-20 12:42:49 +010050
garciadeblas72412282024-11-07 12:41:54 +010051 def __init__(self, msg, lcm_tasks, config):
52 self.logger = logging.getLogger("lcm.gitops")
53 self.lcm_tasks = lcm_tasks
54 self.odu = odu_workflows.OduWorkflow(msg, self.lcm_tasks, config)
55 self._checkloop_kustomization_timeout = 900
56 self._checkloop_resource_timeout = 900
57 self._workflows = {}
58 super().__init__(msg, self.logger)
59
60 async def check_dummy_operation(self, op_id, op_params, content):
61 self.logger.info(f"Operation {op_id}. Params: {op_params}. Content: {content}")
62 return True, "OK"
63
garciadeblasea865ff2024-11-20 12:42:49 +010064 def initialize_operation(self, item_id, op_id):
65 db_item = self.db.get_one(self.db_collection, {"_id": item_id})
66 operation = next(
67 (op for op in db_item.get("operationHistory", []) if op["op_id"] == op_id),
68 None,
69 )
70 operation["workflowState"] = "PROCESSING"
71 operation["resourceState"] = "NOT_READY"
72 operation["operationState"] = "IN_PROGRESS"
73 operation["gitOperationInfo"] = None
74 db_item["current_operation"] = operation["op_id"]
75 self.db.set_one(self.db_collection, {"_id": item_id}, db_item)
76
yshah564ec9c2024-11-29 07:33:32 +000077 def get_operation_params(self, item, operation_id):
78 operation_history = item.get("operationHistory", [])
79 operation = find_in_list(
80 operation_history, lambda op: op["op_id"] == operation_id
81 )
82 return operation.get("operationParams", {})
83
84 def get_operation_type(self, item, operation_id):
85 operation_history = item.get("operationHistory", [])
86 operation = find_in_list(
87 operation_history, lambda op: op["op_id"] == operation_id
88 )
89 return operation.get("operationType", {})
90
garciadeblas7eae6f42024-11-08 10:41:38 +010091 def update_operation_history(
garciadeblasf9092892024-12-12 11:07:08 +010092 self, content, op_id, workflow_status=None, resource_status=None, op_end=True
garciadeblas7eae6f42024-11-08 10:41:38 +010093 ):
94 self.logger.info(
95 f"Update Operation History in DB. Workflow status: {workflow_status}. Resource status: {resource_status}"
96 )
97 self.logger.debug(f"Content: {content}")
98
garciadeblas7eae6f42024-11-08 10:41:38 +010099 op_num = 0
100 for operation in content["operationHistory"]:
101 self.logger.debug("Operations: {}".format(operation))
102 if operation["op_id"] == op_id:
103 self.logger.debug("Found operation number: {}".format(op_num))
garciadeblas7eae6f42024-11-08 10:41:38 +0100104 if workflow_status:
garciadeblasf9092892024-12-12 11:07:08 +0100105 operation["workflowState"] = "COMPLETED"
106 operation["result"] = True
garciadeblas7eae6f42024-11-08 10:41:38 +0100107 else:
garciadeblasf9092892024-12-12 11:07:08 +0100108 operation["workflowState"] = "ERROR"
109 operation["operationState"] = "FAILED"
110 operation["result"] = False
garciadeblas7eae6f42024-11-08 10:41:38 +0100111
112 if resource_status:
garciadeblasf9092892024-12-12 11:07:08 +0100113 operation["resourceState"] = "READY"
114 operation["operationState"] = "COMPLETED"
115 operation["result"] = True
garciadeblas7eae6f42024-11-08 10:41:38 +0100116 else:
garciadeblasf9092892024-12-12 11:07:08 +0100117 operation["resourceState"] = "NOT_READY"
118 operation["operationState"] = "FAILED"
119 operation["result"] = False
garciadeblas7eae6f42024-11-08 10:41:38 +0100120
garciadeblasf9092892024-12-12 11:07:08 +0100121 if op_end:
122 now = time()
123 operation["endDate"] = now
garciadeblas7eae6f42024-11-08 10:41:38 +0100124 break
125 op_num += 1
126 self.logger.debug("content: {}".format(content))
127
128 return content
129
yshah564ec9c2024-11-29 07:33:32 +0000130 async def check_workflow(self, op_id, workflow_name, db_content):
131 workflow_status, workflow_msg = await self.odu.check_workflow_status(
132 workflow_name
133 )
134 self.logger.info(
135 "Workflow Status: {} Workflow Message: {}".format(
136 workflow_status, workflow_msg
137 )
138 )
139 operation_type = self.get_operation_type(db_content, op_id)
140 if operation_type == "create" and workflow_status:
141 db_content["state"] = "CREATED"
142 elif operation_type == "create" and not workflow_status:
143 db_content["state"] = "FAILED_CREATION"
144 elif operation_type == "delete" and workflow_status:
145 db_content["state"] = "DELETED"
146 elif operation_type == "delete" and not workflow_status:
147 db_content["state"] = "FAILED_DELETION"
148
149 if workflow_status:
150 db_content["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
151 else:
152 db_content["resourceState"] = "ERROR"
153
154 db_content = self.update_operation_history(
155 db_content, op_id, workflow_status, None
156 )
157 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
158 return workflow_status
159
160 async def check_resource(self, resource_name, op_id, op_params, db_content):
161 workflow_status = True
162
163 resource_status, resource_msg = await self.check_resource_status(
164 resource_name, op_id, op_params, db_content
165 )
166 self.logger.info(
167 "Resource Status: {} Resource Message: {}".format(
168 resource_status, resource_msg
169 )
170 )
171
172 if resource_status:
173 db_content["resourceState"] = "READY"
174 else:
175 db_content["resourceState"] = "ERROR"
176
177 db_content = self.update_operation_history(
178 db_content, op_id, workflow_status, resource_status
179 )
180 db_content["operatingState"] = "IDLE"
181 db_content["current_operation"] = None
182 return resource_status, db_content
183
yshahcb9075f2024-11-22 12:08:57 +0000184 async def common_check_list(self, op_id, checkings_list, db_collection, db_item):
garciadeblas72412282024-11-07 12:41:54 +0100185 try:
186 for checking in checkings_list:
187 if checking["enable"]:
188 status, message = await self.odu.readiness_loop(
189 item=checking["item"],
190 name=checking["name"],
191 namespace=checking["namespace"],
192 flag=checking["flag"],
193 timeout=checking["timeout"],
194 )
195 if not status:
196 return status, message
garciadeblas7eae6f42024-11-08 10:41:38 +0100197 else:
198 db_item["resourceState"] = checking["resourceState"]
199 db_item = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000200 db_item, op_id, "COMPLETED", checking["resourceState"]
garciadeblas7eae6f42024-11-08 10:41:38 +0100201 )
202 self.db.set_one(db_collection, {"_id": db_item["_id"]}, db_item)
garciadeblas72412282024-11-07 12:41:54 +0100203 except Exception as e:
204 self.logger.debug(traceback.format_exc())
205 self.logger.debug(f"Exception: {e}", exc_info=True)
206 return False, f"Unexpected exception: {e}"
207 return True, "OK"
208
209 async def check_resource_status(self, key, op_id, op_params, content):
210 self.logger.info(
211 f"Check resource status. Key: {key}. Operation: {op_id}. Params: {op_params}. Content: {content}"
212 )
213 check_resource_function = self._workflows.get(key, {}).get(
214 "check_resource_function"
215 )
216 self.logger.info("check_resource function : {}".format(check_resource_function))
217 if check_resource_function:
218 return await check_resource_function(op_id, op_params, content)
219 else:
220 return await self.check_dummy_operation(op_id, op_params, content)
221
rshric3564942024-11-12 18:12:38 +0000222 def decrypting_key(self, content):
223 # This deep copy is for to be passed to ODU workflows.
224 cluster_copy = copy.deepcopy(content)
225
226 # decrypting the key
227 self.db.encrypt_decrypt_fields(
228 cluster_copy,
229 "decrypt",
230 ["age_pubkey", "age_privkey"],
231 schema_version="1.11",
232 salt=cluster_copy["_id"],
233 )
234 db_cluster_copy = {
235 "cluster": cluster_copy,
236 }
237 return db_cluster_copy
238
garciadeblas72412282024-11-07 12:41:54 +0100239
240class ClusterLcm(GitOpsLcm):
garciadeblas96b94f52024-07-08 16:18:21 +0200241 db_collection = "clusters"
rshri932105f2024-07-05 15:11:55 +0000242
243 def __init__(self, msg, lcm_tasks, config):
244 """
245 Init, Connect to database, filesystem storage, and messaging
246 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
247 :return: None
248 """
garciadeblas72412282024-11-07 12:41:54 +0100249 super().__init__(msg, lcm_tasks, config)
250 self._workflows = {
251 "create_cluster": {
252 "check_resource_function": self.check_create_cluster,
253 },
garciadeblasb0a42c22024-11-13 16:00:10 +0100254 "register_cluster": {
255 "check_resource_function": self.check_register_cluster,
256 },
257 "update_cluster": {
258 "check_resource_function": self.check_update_cluster,
259 },
garciadeblas72412282024-11-07 12:41:54 +0100260 }
rshri932105f2024-07-05 15:11:55 +0000261 self.regist = vim_sdn.K8sClusterLcm(msg, self.lcm_tasks, config)
262
rshri948f7de2024-12-02 03:42:35 +0000263 async def create(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000264 self.logger.info("cluster Create Enter")
265
rshri948f7de2024-12-02 03:42:35 +0000266 # To get the cluster details
267 cluster_id = params["cluster_id"]
268 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
269
270 # To get the operation params details
271 op_id = params["operation_id"]
272 op_params = self.get_operation_params(db_cluster, op_id)
273
274 # To initialize the operation states
275 self.initialize_operation(cluster_id, op_id)
276
277 # To copy the cluster content and decrypting the key to use in workflows
rshric3564942024-11-12 18:12:38 +0000278 db_cluster_copy = self.decrypting_key(db_cluster)
279
rshri948f7de2024-12-02 03:42:35 +0000280 # To get the vim account details
rshric3564942024-11-12 18:12:38 +0000281 db_vim = self.db.get_one("vim_accounts", {"name": db_cluster["vim_account"]})
282 db_cluster_copy["vim_account"] = db_vim
283
garciadeblasadb81e82024-11-08 01:11:46 +0100284 _, workflow_name = await self.odu.launch_workflow(
rshric3564942024-11-12 18:12:38 +0000285 "create_cluster", op_id, op_params, db_cluster_copy
garciadeblas96b94f52024-07-08 16:18:21 +0200286 )
rshri932105f2024-07-05 15:11:55 +0000287 self.logger.info("workflow_name is :{}".format(workflow_name))
288
garciadeblas96b94f52024-07-08 16:18:21 +0200289 workflow_status, workflow_msg = await self.odu.check_workflow_status(
290 workflow_name
291 )
rshri932105f2024-07-05 15:11:55 +0000292 self.logger.info(
293 "workflow_status is :{} and workflow_msg is :{}".format(
294 workflow_status, workflow_msg
295 )
296 )
297 if workflow_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200298 db_cluster["state"] = "CREATED"
299 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
rshri932105f2024-07-05 15:11:55 +0000300 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200301 db_cluster["state"] = "FAILED_CREATION"
302 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000303 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +0000304 db_cluster = self.update_operation_history(
305 db_cluster, op_id, workflow_status, None
306 )
garciadeblas96b94f52024-07-08 16:18:21 +0200307 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +0000308
garciadeblas28bff0f2024-09-16 12:53:07 +0200309 # Clean items used in the workflow, no matter if the workflow succeeded
310 clean_status, clean_msg = await self.odu.clean_items_workflow(
rshric3564942024-11-12 18:12:38 +0000311 "create_cluster", op_id, op_params, db_cluster_copy
garciadeblas28bff0f2024-09-16 12:53:07 +0200312 )
313 self.logger.info(
314 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
315 )
316
rshri932105f2024-07-05 15:11:55 +0000317 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +0100318 resource_status, resource_msg = await self.check_resource_status(
rshric3564942024-11-12 18:12:38 +0000319 "create_cluster", op_id, op_params, db_cluster_copy
rshri932105f2024-07-05 15:11:55 +0000320 )
321 self.logger.info(
322 "resource_status is :{} and resource_msg is :{}".format(
323 resource_status, resource_msg
324 )
325 )
326 if resource_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200327 db_cluster["resourceState"] = "READY"
rshri932105f2024-07-05 15:11:55 +0000328 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200329 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000330
garciadeblas96b94f52024-07-08 16:18:21 +0200331 db_cluster["operatingState"] = "IDLE"
332 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000333 db_cluster, op_id, workflow_status, resource_status
rshri932105f2024-07-05 15:11:55 +0000334 )
shahithya70a3fc92024-11-12 11:01:05 +0000335 db_cluster["current_operation"] = None
garciadeblas96b94f52024-07-08 16:18:21 +0200336 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
337 self.update_profile_state(db_cluster, workflow_status, resource_status)
rshri948f7de2024-12-02 03:42:35 +0000338
339 db_register = self.db.get_one("k8sclusters", {"name": db_cluster["name"]})
340
341 if db_cluster["resourceState"] == "READY" and db_cluster["state"] == "CREATED":
342 result, cluster_creds = await self.odu.get_cluster_credentials(db_cluster)
343 # To call the lcm.py for registering the cluster in k8scluster lcm.
344 db_register["credentials"] = cluster_creds
345 self.db.set_one("k8sclusters", {"_id": db_register["_id"]}, db_register)
346 register = await self.regist.create(db_register, order_id)
347 self.logger.debug(f"Register is : {register}")
348 else:
349 db_register["_admin"]["operationalState"] = "ERROR"
350 result, cluster_creds = await self.odu.get_cluster_credentials(db_cluster)
351 # To call the lcm.py for registering the cluster in k8scluster lcm.
352 db_register["credentials"] = cluster_creds
353 self.db.set_one("k8sclusters", {"_id": db_register["_id"]}, db_register)
354
rshri932105f2024-07-05 15:11:55 +0000355 return
356
garciadeblas72412282024-11-07 12:41:54 +0100357 async def check_create_cluster(self, op_id, op_params, content):
garciadeblas7eae6f42024-11-08 10:41:38 +0100358 self.logger.info(
359 f"check_create_cluster Operation {op_id}. Params: {op_params}."
360 )
361 # self.logger.debug(f"Content: {content}")
garciadeblas72412282024-11-07 12:41:54 +0100362 db_cluster = content["cluster"]
363 cluster_name = db_cluster["git_name"].lower()
364 cluster_kustomization_name = cluster_name
365 db_vim_account = content["vim_account"]
366 cloud_type = db_vim_account["vim_type"]
367 nodepool_name = ""
368 if cloud_type == "aws":
369 nodepool_name = f"{cluster_name}-nodegroup"
370 cluster_name = f"{cluster_name}-cluster"
371 elif cloud_type == "gcp":
372 nodepool_name = f"nodepool-{cluster_name}"
373 bootstrap = op_params.get("bootstrap", True)
374 if cloud_type in ("azure", "gcp", "aws"):
375 checkings_list = [
376 {
377 "item": "kustomization",
378 "name": cluster_kustomization_name,
379 "namespace": "managed-resources",
380 "flag": "Ready",
yshahcb9075f2024-11-22 12:08:57 +0000381 "timeout": 1500,
garciadeblas72412282024-11-07 12:41:54 +0100382 "enable": True,
garciadeblas7eae6f42024-11-08 10:41:38 +0100383 "resourceState": "IN_PROGRESS.KUSTOMIZATION_READY",
garciadeblas72412282024-11-07 12:41:54 +0100384 },
385 {
386 "item": f"cluster_{cloud_type}",
387 "name": cluster_name,
388 "namespace": "",
389 "flag": "Synced",
390 "timeout": self._checkloop_resource_timeout,
391 "enable": True,
garciadeblas7eae6f42024-11-08 10:41:38 +0100392 "resourceState": "IN_PROGRESS.RESOURCE_SYNCED.CLUSTER",
garciadeblas72412282024-11-07 12:41:54 +0100393 },
394 {
395 "item": f"cluster_{cloud_type}",
396 "name": cluster_name,
397 "namespace": "",
398 "flag": "Ready",
399 "timeout": self._checkloop_resource_timeout,
400 "enable": True,
garciadeblas7eae6f42024-11-08 10:41:38 +0100401 "resourceState": "IN_PROGRESS.RESOURCE_READY.CLUSTER",
garciadeblas72412282024-11-07 12:41:54 +0100402 },
403 {
404 "item": "kustomization",
405 "name": f"{cluster_kustomization_name}-bstrp-fluxctrl",
406 "namespace": "managed-resources",
407 "flag": "Ready",
yshahcb9075f2024-11-22 12:08:57 +0000408 "timeout": self._checkloop_resource_timeout,
garciadeblas72412282024-11-07 12:41:54 +0100409 "enable": bootstrap,
garciadeblas7eae6f42024-11-08 10:41:38 +0100410 "resourceState": "IN_PROGRESS.BOOTSTRAP_OK",
garciadeblas72412282024-11-07 12:41:54 +0100411 },
412 ]
413 else:
414 return False, "Not suitable VIM account to check cluster status"
415 if nodepool_name:
416 nodepool_check = {
417 "item": f"nodepool_{cloud_type}",
418 "name": nodepool_name,
419 "namespace": "",
420 "flag": "Ready",
421 "timeout": self._checkloop_resource_timeout,
422 "enable": True,
garciadeblas7eae6f42024-11-08 10:41:38 +0100423 "resourceState": "IN_PROGRESS.RESOURCE_READY.NODEPOOL",
garciadeblas72412282024-11-07 12:41:54 +0100424 }
425 checkings_list.insert(3, nodepool_check)
yshahcb9075f2024-11-22 12:08:57 +0000426 return await self.common_check_list(
427 op_id, checkings_list, "clusters", db_cluster
428 )
garciadeblas72412282024-11-07 12:41:54 +0100429
garciadeblasb0a42c22024-11-13 16:00:10 +0100430 async def check_register_cluster(self, op_id, op_params, content):
431 self.logger.info(
432 f"check_register_cluster Operation {op_id}. Params: {op_params}."
433 )
434 # self.logger.debug(f"Content: {content}")
435 db_cluster = content["cluster"]
436 cluster_name = db_cluster["git_name"].lower()
437 cluster_kustomization_name = cluster_name
438 bootstrap = op_params.get("bootstrap", True)
439 checkings_list = [
440 {
441 "item": "kustomization",
442 "name": f"{cluster_kustomization_name}-bstrp-fluxctrl",
443 "namespace": "managed-resources",
444 "flag": "Ready",
445 "timeout": self._checkloop_kustomization_timeout,
446 "enable": bootstrap,
447 "resourceState": "IN_PROGRESS.BOOTSTRAP_OK",
448 },
449 ]
yshahcb9075f2024-11-22 12:08:57 +0000450 return await self.common_check_list(
451 op_id, checkings_list, "clusters", db_cluster
452 )
garciadeblasb0a42c22024-11-13 16:00:10 +0100453
454 async def check_update_cluster(self, op_id, op_params, content):
455 self.logger.info(
456 f"check_create_cluster Operation {op_id}. Params: {op_params}."
457 )
458 # self.logger.debug(f"Content: {content}")
459 db_cluster = content["cluster"]
460 cluster_name = db_cluster["git_name"].lower()
461 cluster_kustomization_name = cluster_name
462 db_vim_account = content["vim_account"]
463 cloud_type = db_vim_account["vim_type"]
464 nodepool_name = ""
465 if cloud_type == "aws":
466 nodepool_name = f"{cluster_name}-nodegroup"
467 cluster_name = f"{cluster_name}-cluster"
468 elif cloud_type == "gcp":
469 nodepool_name = f"nodepool-{cluster_name}"
470 if cloud_type in ("azure", "gcp", "aws"):
471 checkings_list = [
472 {
473 "item": "kustomization",
474 "name": cluster_kustomization_name,
475 "namespace": "managed-resources",
476 "flag": "Ready",
477 "timeout": self._checkloop_kustomization_timeout,
478 "enable": True,
479 "resourceState": "IN_PROGRESS.KUSTOMIZATION_READY",
480 },
481 {
482 "item": f"cluster_{cloud_type}",
483 "name": cluster_name,
484 "namespace": "",
485 "flag": "Synced",
486 "timeout": self._checkloop_resource_timeout,
487 "enable": True,
488 "resourceState": "IN_PROGRESS.RESOURCE_SYNCED.CLUSTER",
489 },
490 {
491 "item": f"cluster_{cloud_type}",
492 "name": cluster_name,
493 "namespace": "",
494 "flag": "Ready",
495 "timeout": self._checkloop_resource_timeout,
496 "enable": True,
497 "resourceState": "IN_PROGRESS.RESOURCE_READY.CLUSTER",
498 },
499 ]
500 else:
501 return False, "Not suitable VIM account to check cluster status"
502 if nodepool_name:
503 nodepool_check = {
504 "item": f"nodepool_{cloud_type}",
505 "name": nodepool_name,
506 "namespace": "",
507 "flag": "Ready",
508 "timeout": self._checkloop_resource_timeout,
509 "enable": True,
510 "resourceState": "IN_PROGRESS.RESOURCE_READY.NODEPOOL",
511 }
512 checkings_list.append(nodepool_check)
yshahcb9075f2024-11-22 12:08:57 +0000513 return await self.common_check_list(
514 op_id, checkings_list, "clusters", db_cluster
515 )
garciadeblasb0a42c22024-11-13 16:00:10 +0100516
garciadeblas96b94f52024-07-08 16:18:21 +0200517 def update_profile_state(self, db_cluster, workflow_status, resource_status):
rshri932105f2024-07-05 15:11:55 +0000518 profiles = [
519 "infra_controller_profiles",
520 "infra_config_profiles",
521 "app_profiles",
522 "resource_profiles",
523 ]
rshri948f7de2024-12-02 03:42:35 +0000524 """
rshri932105f2024-07-05 15:11:55 +0000525 profiles_collection = {
526 "infra_controller_profiles": "k8sinfra_controller",
527 "infra_config_profiles": "k8sinfra_config",
528 "app_profiles": "k8sapp",
529 "resource_profiles": "k8sresource",
530 }
rshri948f7de2024-12-02 03:42:35 +0000531 """
Your Name86149632024-11-14 16:17:16 +0000532 self.logger.info("the db_cluster is :{}".format(db_cluster))
rshri932105f2024-07-05 15:11:55 +0000533 for profile_type in profiles:
garciadeblas96b94f52024-07-08 16:18:21 +0200534 profile_id = db_cluster[profile_type]
rshri948f7de2024-12-02 03:42:35 +0000535 # db_collection = profiles_collection[profile_type]
536 db_collection = self.profile_collection_mapping[profile_type]
rshri932105f2024-07-05 15:11:55 +0000537 db_profile = self.db.get_one(db_collection, {"_id": profile_id})
yshahcb9075f2024-11-22 12:08:57 +0000538 op_id = db_profile["operationHistory"][-1].get("op_id")
garciadeblas96b94f52024-07-08 16:18:21 +0200539 db_profile["state"] = db_cluster["state"]
540 db_profile["resourceState"] = db_cluster["resourceState"]
541 db_profile["operatingState"] = db_cluster["operatingState"]
rshric3564942024-11-12 18:12:38 +0000542 db_profile["age_pubkey"] = db_cluster["age_pubkey"]
Your Name86149632024-11-14 16:17:16 +0000543 db_profile["age_privkey"] = db_cluster["age_privkey"]
rshri932105f2024-07-05 15:11:55 +0000544 db_profile = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000545 db_profile, op_id, workflow_status, resource_status
rshri932105f2024-07-05 15:11:55 +0000546 )
rshri932105f2024-07-05 15:11:55 +0000547 self.db.set_one(db_collection, {"_id": db_profile["_id"]}, db_profile)
548
rshri948f7de2024-12-02 03:42:35 +0000549 async def delete(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000550 self.logger.info("cluster delete Enter")
rshri948f7de2024-12-02 03:42:35 +0000551
552 # To get the cluster details
553 cluster_id = params["cluster_id"]
554 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
555
556 # To get the operation params details
557 op_id = params["operation_id"]
558 op_params = self.get_operation_params(db_cluster, op_id)
559
560 # To initialize the operation states
561 self.initialize_operation(cluster_id, op_id)
562
563 # To copy the cluster content and decrypting the key to use in workflows
564 db_cluster_copy = self.decrypting_key(db_cluster)
565
garciadeblas12470812024-11-18 10:33:12 +0100566 if db_cluster["created"] == "false":
rshri948f7de2024-12-02 03:42:35 +0000567 return await self.deregister(params, order_id)
rshri932105f2024-07-05 15:11:55 +0000568
garciadeblasadb81e82024-11-08 01:11:46 +0100569 _, workflow_name = await self.odu.launch_workflow(
rshri948f7de2024-12-02 03:42:35 +0000570 "delete_cluster", op_id, op_params, db_cluster_copy
garciadeblas96b94f52024-07-08 16:18:21 +0200571 )
rshri932105f2024-07-05 15:11:55 +0000572 self.logger.info("workflow_name is :{}".format(workflow_name))
573
garciadeblas96b94f52024-07-08 16:18:21 +0200574 workflow_status, workflow_msg = await self.odu.check_workflow_status(
575 workflow_name
576 )
rshri932105f2024-07-05 15:11:55 +0000577 self.logger.info(
578 "workflow_status is :{} and workflow_msg is :{}".format(
579 workflow_status, workflow_msg
580 )
581 )
582 if workflow_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200583 db_cluster["state"] = "DELETED"
584 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
rshri932105f2024-07-05 15:11:55 +0000585 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200586 db_cluster["state"] = "FAILED_DELETION"
587 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000588 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +0000589 db_cluster = self.update_operation_history(
590 db_cluster, op_id, workflow_status, None
591 )
garciadeblas96b94f52024-07-08 16:18:21 +0200592 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +0000593
garciadeblas98f9a3d2024-12-10 13:42:47 +0100594 # Clean items used in the workflow or in the cluster, no matter if the workflow succeeded
595 clean_status, clean_msg = await self.odu.clean_items_workflow(
596 "delete_cluster", op_id, op_params, db_cluster_copy
597 )
598 self.logger.info(
599 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
600 )
601
rshri932105f2024-07-05 15:11:55 +0000602 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +0100603 resource_status, resource_msg = await self.check_resource_status(
rshri948f7de2024-12-02 03:42:35 +0000604 "delete_cluster", op_id, op_params, db_cluster_copy
rshri932105f2024-07-05 15:11:55 +0000605 )
606 self.logger.info(
607 "resource_status is :{} and resource_msg is :{}".format(
608 resource_status, resource_msg
609 )
610 )
611 if resource_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200612 db_cluster["resourceState"] = "READY"
rshri932105f2024-07-05 15:11:55 +0000613 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200614 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000615
garciadeblas96b94f52024-07-08 16:18:21 +0200616 db_cluster["operatingState"] = "IDLE"
617 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000618 db_cluster, op_id, workflow_status, resource_status
garciadeblas96b94f52024-07-08 16:18:21 +0200619 )
shahithya70a3fc92024-11-12 11:01:05 +0000620 db_cluster["current_operation"] = None
garciadeblas96b94f52024-07-08 16:18:21 +0200621 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +0000622
garciadeblas96b94f52024-07-08 16:18:21 +0200623 # To delete it from DB
624 if db_cluster["state"] == "DELETED":
625 self.delete_cluster(db_cluster)
rshri948f7de2024-12-02 03:42:35 +0000626
627 # To delete it from k8scluster collection
628 self.db.del_one("k8sclusters", {"name": db_cluster["name"]})
629
rshri932105f2024-07-05 15:11:55 +0000630 return
631
garciadeblas96b94f52024-07-08 16:18:21 +0200632 def delete_cluster(self, db_cluster):
633 # Actually, item_content is equal to db_cluster
634 # item_content = self.db.get_one("clusters", {"_id": db_cluster["_id"]})
635 # self.logger.debug("item_content is : {}".format(item_content))
rshri932105f2024-07-05 15:11:55 +0000636
rshri932105f2024-07-05 15:11:55 +0000637 # detach profiles
638 update_dict = None
639 profiles_to_detach = [
640 "infra_controller_profiles",
641 "infra_config_profiles",
642 "app_profiles",
643 "resource_profiles",
644 ]
rshri948f7de2024-12-02 03:42:35 +0000645 """
rshri932105f2024-07-05 15:11:55 +0000646 profiles_collection = {
647 "infra_controller_profiles": "k8sinfra_controller",
648 "infra_config_profiles": "k8sinfra_config",
649 "app_profiles": "k8sapp",
650 "resource_profiles": "k8sresource",
651 }
rshri948f7de2024-12-02 03:42:35 +0000652 """
rshri932105f2024-07-05 15:11:55 +0000653 for profile_type in profiles_to_detach:
garciadeblas96b94f52024-07-08 16:18:21 +0200654 if db_cluster.get(profile_type):
garciadeblas96b94f52024-07-08 16:18:21 +0200655 profile_ids = db_cluster[profile_type]
rshri932105f2024-07-05 15:11:55 +0000656 profile_ids_copy = deepcopy(profile_ids)
rshri932105f2024-07-05 15:11:55 +0000657 for profile_id in profile_ids_copy:
rshri948f7de2024-12-02 03:42:35 +0000658 # db_collection = profiles_collection[profile_type]
659 db_collection = self.profile_collection_mapping[profile_type]
rshri932105f2024-07-05 15:11:55 +0000660 db_profile = self.db.get_one(db_collection, {"_id": profile_id})
garciadeblasc2552852024-10-22 12:39:32 +0200661 self.logger.debug("the db_profile is :{}".format(db_profile))
662 self.logger.debug(
garciadeblas96b94f52024-07-08 16:18:21 +0200663 "the item_content name is :{}".format(db_cluster["name"])
rshri932105f2024-07-05 15:11:55 +0000664 )
garciadeblasc2552852024-10-22 12:39:32 +0200665 self.logger.debug(
rshri932105f2024-07-05 15:11:55 +0000666 "the db_profile name is :{}".format(db_profile["name"])
667 )
garciadeblas96b94f52024-07-08 16:18:21 +0200668 if db_cluster["name"] == db_profile["name"]:
rshri932105f2024-07-05 15:11:55 +0000669 self.db.del_one(db_collection, {"_id": profile_id})
670 else:
rshri932105f2024-07-05 15:11:55 +0000671 profile_ids.remove(profile_id)
672 update_dict = {profile_type: profile_ids}
rshri932105f2024-07-05 15:11:55 +0000673 self.db.set_one(
garciadeblas96b94f52024-07-08 16:18:21 +0200674 "clusters", {"_id": db_cluster["_id"]}, update_dict
rshri932105f2024-07-05 15:11:55 +0000675 )
garciadeblas96b94f52024-07-08 16:18:21 +0200676 self.db.del_one("clusters", {"_id": db_cluster["_id"]})
rshri932105f2024-07-05 15:11:55 +0000677
rshri948f7de2024-12-02 03:42:35 +0000678 async def attach_profile(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000679 self.logger.info("profile attach Enter")
rshri948f7de2024-12-02 03:42:35 +0000680
681 # To get the cluster details
682 cluster_id = params["cluster_id"]
683 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
684 # content = {
685 # "cluster": db_cluster,
686 # }
687
688 # To get the operation params details
689 op_id = params["operation_id"]
690 op_params = self.get_operation_params(db_cluster, op_id)
691
692 # To initialize the operation states
693 self.initialize_operation(cluster_id, op_id)
694
695 # To copy the cluster content and decrypting the key to use in workflows
696 db_cluster_copy = self.decrypting_key(db_cluster)
697
698 # To get the profile details
699 profile_id = params["profile_id"]
700 profile_type = params["profile_type"]
701 profile_collection = self.profile_collection_mapping[profile_type]
702 db_profile = self.db.get_one(profile_collection, {"_id": profile_id})
703 db_profile["profile_type"] = profile_type
704 # content["profile"] = db_profile
705 db_cluster_copy["profile"] = db_profile
rshri932105f2024-07-05 15:11:55 +0000706
garciadeblasadb81e82024-11-08 01:11:46 +0100707 _, workflow_name = await self.odu.launch_workflow(
rshri948f7de2024-12-02 03:42:35 +0000708 "attach_profile_to_cluster", op_id, op_params, db_cluster_copy
garciadeblas96b94f52024-07-08 16:18:21 +0200709 )
rshri932105f2024-07-05 15:11:55 +0000710 self.logger.info("workflow_name is :{}".format(workflow_name))
711
garciadeblas96b94f52024-07-08 16:18:21 +0200712 workflow_status, workflow_msg = await self.odu.check_workflow_status(
713 workflow_name
714 )
rshri932105f2024-07-05 15:11:55 +0000715 self.logger.info(
716 "workflow_status is :{} and workflow_msg is :{}".format(
717 workflow_status, workflow_msg
718 )
719 )
720 if workflow_status:
721 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
722 else:
723 db_cluster["resourceState"] = "ERROR"
724 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +0000725 db_cluster = self.update_operation_history(
726 db_cluster, op_id, workflow_status, None
727 )
rshri932105f2024-07-05 15:11:55 +0000728 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
729
730 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +0100731 resource_status, resource_msg = await self.check_resource_status(
rshri948f7de2024-12-02 03:42:35 +0000732 "attach_profile_to_cluster", op_id, op_params, db_cluster_copy
rshri932105f2024-07-05 15:11:55 +0000733 )
734 self.logger.info(
735 "resource_status is :{} and resource_msg is :{}".format(
736 resource_status, resource_msg
737 )
738 )
739 if resource_status:
740 db_cluster["resourceState"] = "READY"
741 else:
742 db_cluster["resourceState"] = "ERROR"
743
744 db_cluster["operatingState"] = "IDLE"
745 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000746 db_cluster, op_id, workflow_status, resource_status
rshri932105f2024-07-05 15:11:55 +0000747 )
rshri932105f2024-07-05 15:11:55 +0000748 profile_list = db_cluster[profile_type]
rshri932105f2024-07-05 15:11:55 +0000749 if resource_status:
rshri932105f2024-07-05 15:11:55 +0000750 profile_list.append(profile_id)
rshri932105f2024-07-05 15:11:55 +0000751 db_cluster[profile_type] = profile_list
shahithya70a3fc92024-11-12 11:01:05 +0000752 db_cluster["current_operation"] = None
rshri932105f2024-07-05 15:11:55 +0000753 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
754
755 return
756
rshri948f7de2024-12-02 03:42:35 +0000757 async def detach_profile(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000758 self.logger.info("profile dettach Enter")
rshri948f7de2024-12-02 03:42:35 +0000759
760 # To get the cluster details
761 cluster_id = params["cluster_id"]
762 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
763 # content = {
764 # "cluster": db_cluster,
765 # }
766
767 # To get the operation params details
768 op_id = params["operation_id"]
769 op_params = self.get_operation_params(db_cluster, op_id)
770
771 # To initialize the operation states
772 self.initialize_operation(cluster_id, op_id)
773
774 # To copy the cluster content and decrypting the key to use in workflows
775 db_cluster_copy = self.decrypting_key(db_cluster)
776
777 # To get the profile details
778 profile_id = params["profile_id"]
779 profile_type = params["profile_type"]
780 profile_collection = self.profile_collection_mapping[profile_type]
781 db_profile = self.db.get_one(profile_collection, {"_id": profile_id})
782 db_profile["profile_type"] = profile_type
783 # content["profile"] = db_profile
784 db_cluster_copy["profile"] = db_profile
rshri932105f2024-07-05 15:11:55 +0000785
garciadeblasadb81e82024-11-08 01:11:46 +0100786 _, workflow_name = await self.odu.launch_workflow(
rshri948f7de2024-12-02 03:42:35 +0000787 "detach_profile_from_cluster", op_id, op_params, db_cluster_copy
garciadeblas96b94f52024-07-08 16:18:21 +0200788 )
rshri932105f2024-07-05 15:11:55 +0000789 self.logger.info("workflow_name is :{}".format(workflow_name))
790
garciadeblas96b94f52024-07-08 16:18:21 +0200791 workflow_status, workflow_msg = await self.odu.check_workflow_status(
792 workflow_name
793 )
rshri932105f2024-07-05 15:11:55 +0000794 self.logger.info(
795 "workflow_status is :{} and workflow_msg is :{}".format(
796 workflow_status, workflow_msg
797 )
798 )
799 if workflow_status:
800 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
801 else:
802 db_cluster["resourceState"] = "ERROR"
803 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +0000804 db_cluster = self.update_operation_history(
805 db_cluster, op_id, workflow_status, None
806 )
rshri932105f2024-07-05 15:11:55 +0000807 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
808
809 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +0100810 resource_status, resource_msg = await self.check_resource_status(
rshri948f7de2024-12-02 03:42:35 +0000811 "detach_profile_from_cluster", op_id, op_params, db_cluster_copy
rshri932105f2024-07-05 15:11:55 +0000812 )
813 self.logger.info(
814 "resource_status is :{} and resource_msg is :{}".format(
815 resource_status, resource_msg
816 )
817 )
818 if resource_status:
819 db_cluster["resourceState"] = "READY"
820 else:
821 db_cluster["resourceState"] = "ERROR"
822
823 db_cluster["operatingState"] = "IDLE"
824 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000825 db_cluster, op_id, workflow_status, resource_status
rshri932105f2024-07-05 15:11:55 +0000826 )
rshri932105f2024-07-05 15:11:55 +0000827 profile_list = db_cluster[profile_type]
828 self.logger.info("profile list is : {}".format(profile_list))
829 if resource_status:
rshri932105f2024-07-05 15:11:55 +0000830 profile_list.remove(profile_id)
rshri932105f2024-07-05 15:11:55 +0000831 db_cluster[profile_type] = profile_list
shahithya70a3fc92024-11-12 11:01:05 +0000832 db_cluster["current_operation"] = None
rshri932105f2024-07-05 15:11:55 +0000833 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
834
835 return
836
rshri948f7de2024-12-02 03:42:35 +0000837 async def register(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000838 self.logger.info("cluster register enter")
839
rshri948f7de2024-12-02 03:42:35 +0000840 # To get the cluster details
841 cluster_id = params["cluster_id"]
842 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
843 # content = {
844 # "cluster": db_cluster,
845 # }
846
847 # To get the operation params details
848 op_id = params["operation_id"]
849 op_params = self.get_operation_params(db_cluster, op_id)
850
851 # To initialize the operation states
852 self.initialize_operation(cluster_id, op_id)
853
854 # To copy the cluster content and decrypting the key to use in workflows
rshric3564942024-11-12 18:12:38 +0000855 db_cluster_copy = self.decrypting_key(db_cluster)
856
garciadeblasadb81e82024-11-08 01:11:46 +0100857 _, workflow_name = await self.odu.launch_workflow(
rshric3564942024-11-12 18:12:38 +0000858 "register_cluster", op_id, op_params, db_cluster_copy
garciadeblas96b94f52024-07-08 16:18:21 +0200859 )
rshri932105f2024-07-05 15:11:55 +0000860 self.logger.info("workflow_name is :{}".format(workflow_name))
861
garciadeblas96b94f52024-07-08 16:18:21 +0200862 workflow_status, workflow_msg = await self.odu.check_workflow_status(
863 workflow_name
864 )
rshri932105f2024-07-05 15:11:55 +0000865 self.logger.info(
866 "workflow_status is :{} and workflow_msg is :{}".format(
867 workflow_status, workflow_msg
868 )
869 )
870 if workflow_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200871 db_cluster["state"] = "CREATED"
872 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
rshri932105f2024-07-05 15:11:55 +0000873 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200874 db_cluster["state"] = "FAILED_CREATION"
875 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000876 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +0000877 db_cluster = self.update_operation_history(
878 db_cluster, op_id, workflow_status, None
879 )
garciadeblas96b94f52024-07-08 16:18:21 +0200880 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +0000881
garciadeblasdde3a312024-09-17 13:25:06 +0200882 # Clean items used in the workflow, no matter if the workflow succeeded
883 clean_status, clean_msg = await self.odu.clean_items_workflow(
rshric3564942024-11-12 18:12:38 +0000884 "register_cluster", op_id, op_params, db_cluster_copy
garciadeblasdde3a312024-09-17 13:25:06 +0200885 )
886 self.logger.info(
887 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
888 )
889
rshri932105f2024-07-05 15:11:55 +0000890 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +0100891 resource_status, resource_msg = await self.check_resource_status(
rshric3564942024-11-12 18:12:38 +0000892 "register_cluster", op_id, op_params, db_cluster_copy
rshri932105f2024-07-05 15:11:55 +0000893 )
894 self.logger.info(
895 "resource_status is :{} and resource_msg is :{}".format(
896 resource_status, resource_msg
897 )
898 )
899 if resource_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200900 db_cluster["resourceState"] = "READY"
rshri932105f2024-07-05 15:11:55 +0000901 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200902 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000903
garciadeblas96b94f52024-07-08 16:18:21 +0200904 db_cluster["operatingState"] = "IDLE"
905 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000906 db_cluster, op_id, workflow_status, resource_status
rshri932105f2024-07-05 15:11:55 +0000907 )
shahithya70a3fc92024-11-12 11:01:05 +0000908 db_cluster["current_operation"] = None
garciadeblas96b94f52024-07-08 16:18:21 +0200909 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri948f7de2024-12-02 03:42:35 +0000910
911 db_register = self.db.get_one("k8sclusters", {"name": db_cluster["name"]})
912 db_register["credentials"] = db_cluster["credentials"]
913 self.db.set_one("k8sclusters", {"_id": db_register["_id"]}, db_register)
914
915 if db_cluster["resourceState"] == "READY" and db_cluster["state"] == "CREATED":
916 # To call the lcm.py for registering the cluster in k8scluster lcm.
917 register = await self.regist.create(db_register, order_id)
918 self.logger.debug(f"Register is : {register}")
919 else:
920 db_register["_admin"]["operationalState"] = "ERROR"
921 self.db.set_one("k8sclusters", {"_id": db_register["_id"]}, db_register)
922
rshri932105f2024-07-05 15:11:55 +0000923 return
924
rshri948f7de2024-12-02 03:42:35 +0000925 async def deregister(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000926 self.logger.info("cluster deregister enter")
927
rshri948f7de2024-12-02 03:42:35 +0000928 # To get the cluster details
929 cluster_id = params["cluster_id"]
930 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
931 # content = {
932 # "cluster": db_cluster,
933 # }
934
935 # To get the operation params details
936 op_id = params["operation_id"]
937 op_params = self.get_operation_params(db_cluster, op_id)
938
939 # To initialize the operation states
940 self.initialize_operation(cluster_id, op_id)
941
942 # To copy the cluster content and decrypting the key to use in workflows
943 db_cluster_copy = self.decrypting_key(db_cluster)
rshri932105f2024-07-05 15:11:55 +0000944
garciadeblasadb81e82024-11-08 01:11:46 +0100945 _, workflow_name = await self.odu.launch_workflow(
rshri948f7de2024-12-02 03:42:35 +0000946 "deregister_cluster", op_id, op_params, db_cluster_copy
garciadeblas96b94f52024-07-08 16:18:21 +0200947 )
rshri932105f2024-07-05 15:11:55 +0000948 self.logger.info("workflow_name is :{}".format(workflow_name))
949
garciadeblas96b94f52024-07-08 16:18:21 +0200950 workflow_status, workflow_msg = await self.odu.check_workflow_status(
951 workflow_name
952 )
rshri932105f2024-07-05 15:11:55 +0000953 self.logger.info(
954 "workflow_status is :{} and workflow_msg is :{}".format(
955 workflow_status, workflow_msg
956 )
957 )
958 if workflow_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200959 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
rshri932105f2024-07-05 15:11:55 +0000960 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200961 db_cluster["state"] = "FAILED_DELETION"
962 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000963 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +0000964 db_cluster = self.update_operation_history(
965 db_cluster, op_id, workflow_status, None
966 )
garciadeblas96b94f52024-07-08 16:18:21 +0200967 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +0000968
garciadeblas91bb2c42024-11-12 11:17:12 +0100969 # Clean items used in the workflow or in the cluster, no matter if the workflow succeeded
970 clean_status, clean_msg = await self.odu.clean_items_workflow(
rshri948f7de2024-12-02 03:42:35 +0000971 "deregister_cluster", op_id, op_params, db_cluster_copy
garciadeblas91bb2c42024-11-12 11:17:12 +0100972 )
973 self.logger.info(
974 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
975 )
976
rshri932105f2024-07-05 15:11:55 +0000977 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +0100978 resource_status, resource_msg = await self.check_resource_status(
rshri948f7de2024-12-02 03:42:35 +0000979 "deregister_cluster", op_id, op_params, db_cluster_copy
rshri932105f2024-07-05 15:11:55 +0000980 )
981 self.logger.info(
982 "resource_status is :{} and resource_msg is :{}".format(
983 resource_status, resource_msg
984 )
985 )
986 if resource_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200987 db_cluster["resourceState"] = "READY"
rshri932105f2024-07-05 15:11:55 +0000988 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200989 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000990
garciadeblas96b94f52024-07-08 16:18:21 +0200991 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000992 db_cluster, op_id, workflow_status, resource_status
garciadeblas96b94f52024-07-08 16:18:21 +0200993 )
994 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +0000995
garciadeblas98f9a3d2024-12-10 13:42:47 +0100996 return await self.delete(params, order_id)
rshri932105f2024-07-05 15:11:55 +0000997
rshri948f7de2024-12-02 03:42:35 +0000998 async def get_creds(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +0200999 self.logger.info("Cluster get creds Enter")
rshri948f7de2024-12-02 03:42:35 +00001000 cluster_id = params["cluster_id"]
1001 op_id = params["operation_id"]
1002 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
garciadeblas96b94f52024-07-08 16:18:21 +02001003 result, cluster_creds = await self.odu.get_cluster_credentials(db_cluster)
1004 if result:
1005 db_cluster["credentials"] = cluster_creds
yshahd940c652024-10-17 06:11:12 +00001006 op_len = 0
1007 for operations in db_cluster["operationHistory"]:
1008 if operations["op_id"] == op_id:
1009 db_cluster["operationHistory"][op_len]["result"] = result
1010 db_cluster["operationHistory"][op_len]["endDate"] = time()
1011 op_len += 1
shahithya70a3fc92024-11-12 11:01:05 +00001012 db_cluster["current_operation"] = None
yshahd940c652024-10-17 06:11:12 +00001013 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri948f7de2024-12-02 03:42:35 +00001014 self.logger.info("Cluster Get Creds Exit")
yshah771dea82024-07-05 15:11:49 +00001015 return
1016
rshri948f7de2024-12-02 03:42:35 +00001017 async def update(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001018 self.logger.info("Cluster update Enter")
rshri948f7de2024-12-02 03:42:35 +00001019 # To get the cluster details
1020 cluster_id = params["cluster_id"]
1021 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
1022
1023 # To get the operation params details
1024 op_id = params["operation_id"]
1025 op_params = self.get_operation_params(db_cluster, op_id)
yshah771dea82024-07-05 15:11:49 +00001026
rshric3564942024-11-12 18:12:38 +00001027 db_cluster_copy = self.decrypting_key(db_cluster)
1028
1029 # vim account details
1030 db_vim = self.db.get_one("vim_accounts", {"name": db_cluster["vim_account"]})
1031 db_cluster_copy["vim_account"] = db_vim
1032
garciadeblasadb81e82024-11-08 01:11:46 +01001033 _, workflow_name = await self.odu.launch_workflow(
rshric3564942024-11-12 18:12:38 +00001034 "update_cluster", op_id, op_params, db_cluster_copy
garciadeblas96b94f52024-07-08 16:18:21 +02001035 )
1036 workflow_status, workflow_msg = await self.odu.check_workflow_status(
1037 workflow_name
1038 )
1039 self.logger.info(
1040 "Workflow Status: {} Workflow Message: {}".format(
1041 workflow_status, workflow_msg
yshah771dea82024-07-05 15:11:49 +00001042 )
garciadeblas96b94f52024-07-08 16:18:21 +02001043 )
1044
1045 if workflow_status:
1046 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
1047 else:
1048 db_cluster["resourceState"] = "ERROR"
1049
yshahcb9075f2024-11-22 12:08:57 +00001050 db_cluster = self.update_operation_history(
1051 db_cluster, op_id, workflow_status, None
1052 )
garciadeblas96b94f52024-07-08 16:18:21 +02001053 # self.logger.info("Db content: {}".format(db_content))
1054 # self.db.set_one(self.db_collection, {"_id": _id}, db_cluster)
1055 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
1056
garciadeblas28bff0f2024-09-16 12:53:07 +02001057 # Clean items used in the workflow, no matter if the workflow succeeded
1058 clean_status, clean_msg = await self.odu.clean_items_workflow(
rshric3564942024-11-12 18:12:38 +00001059 "update_cluster", op_id, op_params, db_cluster_copy
garciadeblas28bff0f2024-09-16 12:53:07 +02001060 )
1061 self.logger.info(
1062 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
1063 )
garciadeblas96b94f52024-07-08 16:18:21 +02001064 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +01001065 resource_status, resource_msg = await self.check_resource_status(
rshric3564942024-11-12 18:12:38 +00001066 "update_cluster", op_id, op_params, db_cluster_copy
garciadeblas96b94f52024-07-08 16:18:21 +02001067 )
1068 self.logger.info(
1069 "Resource Status: {} Resource Message: {}".format(
1070 resource_status, resource_msg
1071 )
1072 )
yshah771dea82024-07-05 15:11:49 +00001073
1074 if resource_status:
garciadeblas96b94f52024-07-08 16:18:21 +02001075 db_cluster["resourceState"] = "READY"
yshah771dea82024-07-05 15:11:49 +00001076 else:
garciadeblas96b94f52024-07-08 16:18:21 +02001077 db_cluster["resourceState"] = "ERROR"
yshah771dea82024-07-05 15:11:49 +00001078
yshah0defcd52024-11-18 07:41:35 +00001079 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +00001080 db_cluster, op_id, workflow_status, resource_status
yshah0defcd52024-11-18 07:41:35 +00001081 )
1082
garciadeblas96b94f52024-07-08 16:18:21 +02001083 db_cluster["operatingState"] = "IDLE"
garciadeblas96b94f52024-07-08 16:18:21 +02001084 # self.logger.info("db_cluster: {}".format(db_cluster))
1085 # TODO: verify enxtcondition
1086 # For the moment, if the workflow completed successfully, then we update the db accordingly.
1087 if workflow_status:
1088 if "k8s_version" in op_params:
1089 db_cluster["k8s_version"] = op_params["k8s_version"]
yshah0defcd52024-11-18 07:41:35 +00001090 if "node_count" in op_params:
garciadeblas96b94f52024-07-08 16:18:21 +02001091 db_cluster["node_count"] = op_params["node_count"]
yshah0defcd52024-11-18 07:41:35 +00001092 if "node_size" in op_params:
1093 db_cluster["node_count"] = op_params["node_size"]
garciadeblas96b94f52024-07-08 16:18:21 +02001094 # self.db.set_one(self.db_collection, {"_id": _id}, db_content)
shahithya70a3fc92024-11-12 11:01:05 +00001095 db_cluster["current_operation"] = None
garciadeblas96b94f52024-07-08 16:18:21 +02001096 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
yshah771dea82024-07-05 15:11:49 +00001097 return
1098
1099
garciadeblas72412282024-11-07 12:41:54 +01001100class CloudCredentialsLcm(GitOpsLcm):
yshah771dea82024-07-05 15:11:49 +00001101 db_collection = "vim_accounts"
1102
1103 def __init__(self, msg, lcm_tasks, config):
1104 """
1105 Init, Connect to database, filesystem storage, and messaging
1106 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1107 :return: None
1108 """
garciadeblas72412282024-11-07 12:41:54 +01001109 super().__init__(msg, lcm_tasks, config)
yshah771dea82024-07-05 15:11:49 +00001110
yshah564ec9c2024-11-29 07:33:32 +00001111 async def add(self, params, order_id):
yshah771dea82024-07-05 15:11:49 +00001112 self.logger.info("Cloud Credentials create")
yshah564ec9c2024-11-29 07:33:32 +00001113 vim_id = params["_id"]
1114 op_id = vim_id
1115 op_params = params
1116 db_content = self.db.get_one(self.db_collection, {"_id": vim_id})
1117 vim_config = db_content.get("config", {})
1118 self.db.encrypt_decrypt_fields(
1119 vim_config.get("credentials"),
1120 "decrypt",
1121 ["password", "secret"],
1122 schema_version=db_content["schema_version"],
1123 salt=vim_id,
1124 )
1125
garciadeblasadb81e82024-11-08 01:11:46 +01001126 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001127 "create_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001128 )
1129
1130 workflow_status, workflow_msg = await self.odu.check_workflow_status(
1131 workflow_name
1132 )
1133
1134 self.logger.info(
1135 "Workflow Status: {} Workflow Msg: {}".format(workflow_status, workflow_msg)
1136 )
1137
garciadeblas28bff0f2024-09-16 12:53:07 +02001138 # Clean items used in the workflow, no matter if the workflow succeeded
1139 clean_status, clean_msg = await self.odu.clean_items_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001140 "create_cloud_credentials", op_id, op_params, db_content
garciadeblas28bff0f2024-09-16 12:53:07 +02001141 )
1142 self.logger.info(
1143 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
1144 )
1145
yshah771dea82024-07-05 15:11:49 +00001146 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +01001147 resource_status, resource_msg = await self.check_resource_status(
yshah564ec9c2024-11-29 07:33:32 +00001148 "create_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001149 )
1150 self.logger.info(
1151 "Resource Status: {} Resource Message: {}".format(
1152 resource_status, resource_msg
1153 )
1154 )
garciadeblas15b8a302024-09-23 12:40:13 +02001155
yshah564ec9c2024-11-29 07:33:32 +00001156 db_content["_admin"]["operationalState"] = "ENABLED"
1157 for operation in db_content["_admin"]["operations"]:
garciadeblas15b8a302024-09-23 12:40:13 +02001158 if operation["lcmOperationType"] == "create":
1159 operation["operationState"] = "ENABLED"
yshah564ec9c2024-11-29 07:33:32 +00001160 self.logger.info("Content : {}".format(db_content))
1161 self.db.set_one("vim_accounts", {"_id": db_content["_id"]}, db_content)
yshah771dea82024-07-05 15:11:49 +00001162 return
1163
yshah564ec9c2024-11-29 07:33:32 +00001164 async def edit(self, params, order_id):
1165 self.logger.info("Cloud Credentials Update")
1166 vim_id = params["_id"]
1167 op_id = vim_id
1168 op_params = params
1169 db_content = self.db.get_one("vim_accounts", {"_id": vim_id})
1170 vim_config = db_content.get("config", {})
1171 self.db.encrypt_decrypt_fields(
1172 vim_config.get("credentials"),
1173 "decrypt",
1174 ["password", "secret"],
1175 schema_version=db_content["schema_version"],
1176 salt=vim_id,
1177 )
1178
garciadeblasadb81e82024-11-08 01:11:46 +01001179 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001180 "update_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001181 )
1182 workflow_status, workflow_msg = await self.odu.check_workflow_status(
1183 workflow_name
1184 )
1185 self.logger.info(
1186 "Workflow Status: {} Workflow Msg: {}".format(workflow_status, workflow_msg)
1187 )
1188
garciadeblas28bff0f2024-09-16 12:53:07 +02001189 # Clean items used in the workflow, no matter if the workflow succeeded
1190 clean_status, clean_msg = await self.odu.clean_items_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001191 "update_cloud_credentials", op_id, op_params, db_content
garciadeblas28bff0f2024-09-16 12:53:07 +02001192 )
1193 self.logger.info(
1194 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
1195 )
1196
yshah771dea82024-07-05 15:11:49 +00001197 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +01001198 resource_status, resource_msg = await self.check_resource_status(
yshah564ec9c2024-11-29 07:33:32 +00001199 "update_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001200 )
1201 self.logger.info(
1202 "Resource Status: {} Resource Message: {}".format(
1203 resource_status, resource_msg
1204 )
1205 )
1206 return
1207
yshah564ec9c2024-11-29 07:33:32 +00001208 async def remove(self, params, order_id):
1209 self.logger.info("Cloud Credentials remove")
1210 vim_id = params["_id"]
1211 op_id = vim_id
1212 op_params = params
1213 db_content = self.db.get_one("vim_accounts", {"_id": vim_id})
1214
garciadeblasadb81e82024-11-08 01:11:46 +01001215 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001216 "delete_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001217 )
1218 workflow_status, workflow_msg = await self.odu.check_workflow_status(
1219 workflow_name
1220 )
1221 self.logger.info(
1222 "Workflow Status: {} Workflow Msg: {}".format(workflow_status, workflow_msg)
1223 )
1224
1225 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +01001226 resource_status, resource_msg = await self.check_resource_status(
yshah564ec9c2024-11-29 07:33:32 +00001227 "delete_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001228 )
1229 self.logger.info(
1230 "Resource Status: {} Resource Message: {}".format(
1231 resource_status, resource_msg
1232 )
1233 )
yshah564ec9c2024-11-29 07:33:32 +00001234 self.db.del_one(self.db_collection, {"_id": db_content["_id"]})
yshah771dea82024-07-05 15:11:49 +00001235 return
1236
rshri932105f2024-07-05 15:11:55 +00001237
garciadeblas72412282024-11-07 12:41:54 +01001238class K8sAppLcm(GitOpsLcm):
rshri948f7de2024-12-02 03:42:35 +00001239 db_collection = "k8sapp"
1240
rshri932105f2024-07-05 15:11:55 +00001241 def __init__(self, msg, lcm_tasks, config):
1242 """
1243 Init, Connect to database, filesystem storage, and messaging
1244 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1245 :return: None
1246 """
garciadeblas72412282024-11-07 12:41:54 +01001247 super().__init__(msg, lcm_tasks, config)
rshri932105f2024-07-05 15:11:55 +00001248
rshri948f7de2024-12-02 03:42:35 +00001249 async def create(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001250 self.logger.info("App Create Enter")
1251
rshri948f7de2024-12-02 03:42:35 +00001252 op_id = params["operation_id"]
1253 profile_id = params["profile_id"]
1254
1255 # To initialize the operation states
1256 self.initialize_operation(profile_id, op_id)
1257
1258 content = self.db.get_one("k8sapp", {"_id": profile_id})
1259 content["profile_type"] = "applications"
1260 op_params = self.get_operation_params(content, op_id)
1261 self.db.set_one("k8sapp", {"_id": content["_id"]}, content)
1262
garciadeblasadb81e82024-11-08 01:11:46 +01001263 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001264 "create_profile", op_id, op_params, content
1265 )
rshri932105f2024-07-05 15:11:55 +00001266 self.logger.info("workflow_name is :{}".format(workflow_name))
1267
yshah564ec9c2024-11-29 07:33:32 +00001268 workflow_status = await self.check_workflow(op_id, workflow_name, content)
rshri932105f2024-07-05 15:11:55 +00001269
1270 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001271 resource_status, content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001272 "create_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001273 )
yshah564ec9c2024-11-29 07:33:32 +00001274 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1275 self.logger.info(f"App Create Exit with resource status: {resource_status}")
rshri932105f2024-07-05 15:11:55 +00001276 return
1277
rshri948f7de2024-12-02 03:42:35 +00001278 async def delete(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001279 self.logger.info("App delete Enter")
rshri932105f2024-07-05 15:11:55 +00001280
rshri948f7de2024-12-02 03:42:35 +00001281 op_id = params["operation_id"]
1282 profile_id = params["profile_id"]
1283
1284 # To initialize the operation states
1285 self.initialize_operation(profile_id, op_id)
1286
1287 content = self.db.get_one("k8sapp", {"_id": profile_id})
1288 op_params = self.get_operation_params(content, op_id)
1289
garciadeblasadb81e82024-11-08 01:11:46 +01001290 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001291 "delete_profile", op_id, op_params, content
1292 )
rshri932105f2024-07-05 15:11:55 +00001293 self.logger.info("workflow_name is :{}".format(workflow_name))
1294
yshah564ec9c2024-11-29 07:33:32 +00001295 workflow_status = await self.check_workflow(op_id, workflow_name, content)
rshri932105f2024-07-05 15:11:55 +00001296
1297 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001298 resource_status, content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001299 "delete_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001300 )
rshri932105f2024-07-05 15:11:55 +00001301
yshah564ec9c2024-11-29 07:33:32 +00001302 if resource_status:
1303 content["state"] = "DELETED"
1304 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1305 self.db.del_one(self.db_collection, {"_id": content["_id"]})
1306 self.logger.info(f"App Delete Exit with resource status: {resource_status}")
rshri932105f2024-07-05 15:11:55 +00001307 return
1308
1309
garciadeblas72412282024-11-07 12:41:54 +01001310class K8sResourceLcm(GitOpsLcm):
rshri948f7de2024-12-02 03:42:35 +00001311 db_collection = "k8sresource"
1312
rshri932105f2024-07-05 15:11:55 +00001313 def __init__(self, msg, lcm_tasks, config):
1314 """
1315 Init, Connect to database, filesystem storage, and messaging
1316 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1317 :return: None
1318 """
garciadeblas72412282024-11-07 12:41:54 +01001319 super().__init__(msg, lcm_tasks, config)
rshri932105f2024-07-05 15:11:55 +00001320
rshri948f7de2024-12-02 03:42:35 +00001321 async def create(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001322 self.logger.info("Resource Create Enter")
1323
rshri948f7de2024-12-02 03:42:35 +00001324 op_id = params["operation_id"]
1325 profile_id = params["profile_id"]
1326
1327 # To initialize the operation states
1328 self.initialize_operation(profile_id, op_id)
1329
1330 content = self.db.get_one("k8sresource", {"_id": profile_id})
1331 content["profile_type"] = "managed-resources"
1332 op_params = self.get_operation_params(content, op_id)
1333 self.db.set_one("k8sresource", {"_id": content["_id"]}, content)
1334
garciadeblasadb81e82024-11-08 01:11:46 +01001335 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001336 "create_profile", op_id, op_params, content
1337 )
rshri932105f2024-07-05 15:11:55 +00001338 self.logger.info("workflow_name is :{}".format(workflow_name))
1339
yshah564ec9c2024-11-29 07:33:32 +00001340 workflow_status = await self.check_workflow(op_id, workflow_name, content)
rshri932105f2024-07-05 15:11:55 +00001341
1342 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001343 resource_status, content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001344 "create_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001345 )
yshah564ec9c2024-11-29 07:33:32 +00001346 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1347 self.logger.info(
1348 f"Resource Create Exit with resource status: {resource_status}"
rshri932105f2024-07-05 15:11:55 +00001349 )
rshri932105f2024-07-05 15:11:55 +00001350 return
1351
rshri948f7de2024-12-02 03:42:35 +00001352 async def delete(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001353 self.logger.info("Resource delete Enter")
rshri948f7de2024-12-02 03:42:35 +00001354
1355 op_id = params["operation_id"]
1356 profile_id = params["profile_id"]
1357
1358 # To initialize the operation states
1359 self.initialize_operation(profile_id, op_id)
1360
1361 content = self.db.get_one("k8sresource", {"_id": profile_id})
1362 op_params = self.get_operation_params(content, op_id)
rshri932105f2024-07-05 15:11:55 +00001363
garciadeblasadb81e82024-11-08 01:11:46 +01001364 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001365 "delete_profile", op_id, op_params, content
1366 )
rshri932105f2024-07-05 15:11:55 +00001367 self.logger.info("workflow_name is :{}".format(workflow_name))
1368
yshah564ec9c2024-11-29 07:33:32 +00001369 workflow_status = await self.check_workflow(op_id, workflow_name, content)
rshri932105f2024-07-05 15:11:55 +00001370
1371 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001372 resource_status, content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001373 "delete_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001374 )
rshri932105f2024-07-05 15:11:55 +00001375
yshah564ec9c2024-11-29 07:33:32 +00001376 if resource_status:
1377 content["state"] = "DELETED"
1378 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1379 self.db.del_one(self.db_collection, {"_id": content["_id"]})
1380 self.logger.info(
1381 f"Resource Delete Exit with resource status: {resource_status}"
garciadeblas96b94f52024-07-08 16:18:21 +02001382 )
rshri932105f2024-07-05 15:11:55 +00001383 return
1384
1385
garciadeblas72412282024-11-07 12:41:54 +01001386class K8sInfraControllerLcm(GitOpsLcm):
rshri948f7de2024-12-02 03:42:35 +00001387 db_collection = "k8sinfra_controller"
1388
rshri932105f2024-07-05 15:11:55 +00001389 def __init__(self, msg, lcm_tasks, config):
1390 """
1391 Init, Connect to database, filesystem storage, and messaging
1392 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1393 :return: None
1394 """
garciadeblas72412282024-11-07 12:41:54 +01001395 super().__init__(msg, lcm_tasks, config)
rshri932105f2024-07-05 15:11:55 +00001396
rshri948f7de2024-12-02 03:42:35 +00001397 async def create(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001398 self.logger.info("Infra controller Create Enter")
1399
rshri948f7de2024-12-02 03:42:35 +00001400 op_id = params["operation_id"]
1401 profile_id = params["profile_id"]
1402
1403 # To initialize the operation states
1404 self.initialize_operation(profile_id, op_id)
1405
1406 content = self.db.get_one("k8sinfra_controller", {"_id": profile_id})
1407 content["profile_type"] = "infra-controllers"
1408 op_params = self.get_operation_params(content, op_id)
1409 self.db.set_one("k8sinfra_controller", {"_id": content["_id"]}, content)
1410
garciadeblasadb81e82024-11-08 01:11:46 +01001411 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001412 "create_profile", op_id, op_params, content
1413 )
rshri932105f2024-07-05 15:11:55 +00001414 self.logger.info("workflow_name is :{}".format(workflow_name))
1415
yshah564ec9c2024-11-29 07:33:32 +00001416 workflow_status = await self.check_workflow(op_id, workflow_name, content)
rshri932105f2024-07-05 15:11:55 +00001417
1418 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001419 resource_status, content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001420 "create_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001421 )
yshah564ec9c2024-11-29 07:33:32 +00001422 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1423 self.logger.info(
1424 f"Infra Controller Create Exit with resource status: {resource_status}"
rshri932105f2024-07-05 15:11:55 +00001425 )
rshri932105f2024-07-05 15:11:55 +00001426 return
1427
rshri948f7de2024-12-02 03:42:35 +00001428 async def delete(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001429 self.logger.info("Infra controller delete Enter")
rshri932105f2024-07-05 15:11:55 +00001430
rshri948f7de2024-12-02 03:42:35 +00001431 op_id = params["operation_id"]
1432 profile_id = params["profile_id"]
1433
1434 # To initialize the operation states
1435 self.initialize_operation(profile_id, op_id)
1436
1437 content = self.db.get_one("k8sinfra_controller", {"_id": profile_id})
1438 op_params = self.get_operation_params(content, op_id)
1439
garciadeblasadb81e82024-11-08 01:11:46 +01001440 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001441 "delete_profile", op_id, op_params, content
1442 )
rshri932105f2024-07-05 15:11:55 +00001443 self.logger.info("workflow_name is :{}".format(workflow_name))
1444
yshah564ec9c2024-11-29 07:33:32 +00001445 workflow_status = await self.check_workflow(op_id, workflow_name, content)
rshri932105f2024-07-05 15:11:55 +00001446
1447 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001448 resource_status, content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001449 "delete_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001450 )
rshri932105f2024-07-05 15:11:55 +00001451
yshah564ec9c2024-11-29 07:33:32 +00001452 if resource_status:
1453 content["state"] = "DELETED"
1454 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1455 self.db.del_one(self.db_collection, {"_id": content["_id"]})
1456 self.logger.info(
1457 f"Infra Controller Delete Exit with resource status: {resource_status}"
garciadeblas96b94f52024-07-08 16:18:21 +02001458 )
rshri932105f2024-07-05 15:11:55 +00001459 return
1460
1461
garciadeblas72412282024-11-07 12:41:54 +01001462class K8sInfraConfigLcm(GitOpsLcm):
rshri948f7de2024-12-02 03:42:35 +00001463 db_collection = "k8sinfra_config"
1464
rshri932105f2024-07-05 15:11:55 +00001465 def __init__(self, msg, lcm_tasks, config):
1466 """
1467 Init, Connect to database, filesystem storage, and messaging
1468 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1469 :return: None
1470 """
garciadeblas72412282024-11-07 12:41:54 +01001471 super().__init__(msg, lcm_tasks, config)
rshri932105f2024-07-05 15:11:55 +00001472
rshri948f7de2024-12-02 03:42:35 +00001473 async def create(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001474 self.logger.info("Infra config Create Enter")
1475
rshri948f7de2024-12-02 03:42:35 +00001476 op_id = params["operation_id"]
1477 profile_id = params["profile_id"]
1478
1479 # To initialize the operation states
1480 self.initialize_operation(profile_id, op_id)
1481
1482 content = self.db.get_one("k8sinfra_config", {"_id": profile_id})
1483 content["profile_type"] = "infra-configs"
1484 op_params = self.get_operation_params(content, op_id)
1485 self.db.set_one("k8sinfra_config", {"_id": content["_id"]}, content)
1486
garciadeblasadb81e82024-11-08 01:11:46 +01001487 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001488 "create_profile", op_id, op_params, content
1489 )
rshri932105f2024-07-05 15:11:55 +00001490 self.logger.info("workflow_name is :{}".format(workflow_name))
1491
yshah564ec9c2024-11-29 07:33:32 +00001492 workflow_status = await self.check_workflow(op_id, workflow_name, content)
rshri932105f2024-07-05 15:11:55 +00001493
1494 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001495 resource_status, content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001496 "create_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001497 )
yshah564ec9c2024-11-29 07:33:32 +00001498 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1499 self.logger.info(
1500 f"Infra Config Create Exit with resource status: {resource_status}"
rshri932105f2024-07-05 15:11:55 +00001501 )
rshri932105f2024-07-05 15:11:55 +00001502 return
1503
rshri948f7de2024-12-02 03:42:35 +00001504 async def delete(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001505 self.logger.info("Infra config delete Enter")
1506
rshri948f7de2024-12-02 03:42:35 +00001507 op_id = params["operation_id"]
1508 profile_id = params["profile_id"]
1509
1510 # To initialize the operation states
1511 self.initialize_operation(profile_id, op_id)
1512
1513 content = self.db.get_one("k8sinfra_config", {"_id": profile_id})
1514 op_params = self.get_operation_params(content, op_id)
1515
garciadeblasadb81e82024-11-08 01:11:46 +01001516 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001517 "delete_profile", op_id, op_params, content
1518 )
rshri932105f2024-07-05 15:11:55 +00001519 self.logger.info("workflow_name is :{}".format(workflow_name))
rshri932105f2024-07-05 15:11:55 +00001520
yshah564ec9c2024-11-29 07:33:32 +00001521 workflow_status = await self.check_workflow(op_id, workflow_name, content)
1522
rshri932105f2024-07-05 15:11:55 +00001523 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001524 resource_status, content = await self.check_resource(
1525 "delete_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001526 )
yshah564ec9c2024-11-29 07:33:32 +00001527
rshri932105f2024-07-05 15:11:55 +00001528 if resource_status:
yshah564ec9c2024-11-29 07:33:32 +00001529 content["state"] = "DELETED"
1530 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1531 self.db.del_one(self.db_collection, {"_id": content["_id"]})
1532 self.logger.info(
1533 f"Infra Config Delete Exit with resource status: {resource_status}"
garciadeblas96b94f52024-07-08 16:18:21 +02001534 )
rshri932105f2024-07-05 15:11:55 +00001535
rshri932105f2024-07-05 15:11:55 +00001536 return
yshah771dea82024-07-05 15:11:49 +00001537
1538
garciadeblas72412282024-11-07 12:41:54 +01001539class OkaLcm(GitOpsLcm):
yshah771dea82024-07-05 15:11:49 +00001540 db_collection = "okas"
1541
1542 def __init__(self, msg, lcm_tasks, config):
1543 """
1544 Init, Connect to database, filesystem storage, and messaging
1545 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1546 :return: None
1547 """
garciadeblas72412282024-11-07 12:41:54 +01001548 super().__init__(msg, lcm_tasks, config)
yshah771dea82024-07-05 15:11:49 +00001549
yshah564ec9c2024-11-29 07:33:32 +00001550 async def create(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001551 self.logger.info("OKA Create Enter")
yshah564ec9c2024-11-29 07:33:32 +00001552 op_id = params["operation_id"]
1553 oka_id = params["oka_id"]
1554 self.initialize_operation(oka_id, op_id)
1555 db_content = self.db.get_one(self.db_collection, {"_id": oka_id})
1556 op_params = self.get_operation_params(db_content, op_id)
yshah771dea82024-07-05 15:11:49 +00001557
garciadeblasadb81e82024-11-08 01:11:46 +01001558 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001559 "create_oka", op_id, op_params, db_content
1560 )
yshah564ec9c2024-11-29 07:33:32 +00001561
1562 workflow_status = await self.check_workflow(op_id, workflow_name, db_content)
yshah771dea82024-07-05 15:11:49 +00001563
1564 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001565 resource_status, db_content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001566 "create_oka", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001567 )
garciadeblas96b94f52024-07-08 16:18:21 +02001568 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
yshah564ec9c2024-11-29 07:33:32 +00001569 self.logger.info(f"OKA Create Exit with resource status: {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001570 return
1571
yshah564ec9c2024-11-29 07:33:32 +00001572 async def edit(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001573 self.logger.info("OKA Edit Enter")
yshah564ec9c2024-11-29 07:33:32 +00001574 op_id = params["operation_id"]
1575 oka_id = params["oka_id"]
1576 self.initialize_operation(oka_id, op_id)
1577 db_content = self.db.get_one(self.db_collection, {"_id": oka_id})
1578 op_params = self.get_operation_params(db_content, op_id)
yshah771dea82024-07-05 15:11:49 +00001579
garciadeblasadb81e82024-11-08 01:11:46 +01001580 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001581 "update_oka", op_id, op_params, db_content
garciadeblas96b94f52024-07-08 16:18:21 +02001582 )
yshah564ec9c2024-11-29 07:33:32 +00001583 workflow_status = await self.check_workflow(op_id, workflow_name, db_content)
yshah771dea82024-07-05 15:11:49 +00001584
1585 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001586 resource_status, db_content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001587 "update_oka", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001588 )
garciadeblas96b94f52024-07-08 16:18:21 +02001589 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
yshah564ec9c2024-11-29 07:33:32 +00001590 self.logger.info(f"OKA Update Exit with resource status: {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001591 return
1592
yshah564ec9c2024-11-29 07:33:32 +00001593 async def delete(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001594 self.logger.info("OKA delete Enter")
yshah564ec9c2024-11-29 07:33:32 +00001595 op_id = params["operation_id"]
1596 oka_id = params["oka_id"]
1597 self.initialize_operation(oka_id, op_id)
1598 db_content = self.db.get_one(self.db_collection, {"_id": oka_id})
1599 op_params = self.get_operation_params(db_content, op_id)
yshah771dea82024-07-05 15:11:49 +00001600
garciadeblasadb81e82024-11-08 01:11:46 +01001601 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001602 "delete_oka", op_id, op_params, db_content
garciadeblas96b94f52024-07-08 16:18:21 +02001603 )
yshah564ec9c2024-11-29 07:33:32 +00001604 workflow_status = await self.check_workflow(op_id, workflow_name, db_content)
yshah771dea82024-07-05 15:11:49 +00001605
1606 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001607 resource_status, db_content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001608 "delete_oka", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001609 )
yshah771dea82024-07-05 15:11:49 +00001610
yshah564ec9c2024-11-29 07:33:32 +00001611 if resource_status:
1612 db_content["state"] == "DELETED"
1613 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
garciadeblas96b94f52024-07-08 16:18:21 +02001614 self.db.del_one(self.db_collection, {"_id": db_content["_id"]})
yshah564ec9c2024-11-29 07:33:32 +00001615 self.logger.info(f"OKA Delete Exit with resource status: {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001616 return
1617
1618
garciadeblas72412282024-11-07 12:41:54 +01001619class KsuLcm(GitOpsLcm):
yshah771dea82024-07-05 15:11:49 +00001620 db_collection = "ksus"
1621
1622 def __init__(self, msg, lcm_tasks, config):
1623 """
1624 Init, Connect to database, filesystem storage, and messaging
1625 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1626 :return: None
1627 """
garciadeblas72412282024-11-07 12:41:54 +01001628 super().__init__(msg, lcm_tasks, config)
yshah771dea82024-07-05 15:11:49 +00001629
yshah564ec9c2024-11-29 07:33:32 +00001630 async def create(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001631 self.logger.info("ksu Create Enter")
yshah564ec9c2024-11-29 07:33:32 +00001632 db_content = []
1633 op_params = []
1634 op_id = params["operation_id"]
1635 for ksu_id in params["ksus_list"]:
1636 self.logger.info("Ksu ID: {}".format(ksu_id))
1637 self.initialize_operation(ksu_id, op_id)
1638 db_ksu = self.db.get_one(self.db_collection, {"_id": ksu_id})
1639 self.logger.info("Db KSU: {}".format(db_ksu))
1640 db_content.append(db_ksu)
1641 ksu_params = {}
1642 ksu_params = self.get_operation_params(db_ksu, op_id)
1643 self.logger.info("Operation Params: {}".format(ksu_params))
1644 # Update ksu_params["profile"] with profile name and age-pubkey
1645 profile_type = ksu_params["profile"]["profile_type"]
1646 profile_id = ksu_params["profile"]["_id"]
1647 profile_collection = self.profile_collection_mapping[profile_type]
1648 db_profile = self.db.get_one(profile_collection, {"_id": profile_id})
1649 ksu_params["profile"]["name"] = db_profile["name"]
1650 ksu_params["profile"]["age_pubkey"] = db_profile.get("age_pubkey", "")
1651 # Update ksu_params["oka"] with sw_catalog_path (when missing)
1652 for oka in ksu_params["oka"]:
1653 if "sw_catalog_path" not in oka:
1654 oka_id = oka["_id"]
1655 db_oka = self.db.get_one("okas", {"_id": oka_id})
yshah2f39b8a2024-12-19 11:06:24 +00001656 oka_type = MAP_PROFILE[
1657 db_oka.get("profile_type", "infra_controller_profiles")
1658 ]
1659 oka["sw_catalog_path"] = f"{oka_type}/{db_oka['git_name']}"
yshah564ec9c2024-11-29 07:33:32 +00001660 op_params.append(ksu_params)
yshah771dea82024-07-05 15:11:49 +00001661
garciadeblasadb81e82024-11-08 01:11:46 +01001662 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001663 "create_ksus", op_id, op_params, db_content
garciadeblas96b94f52024-07-08 16:18:21 +02001664 )
yshah564ec9c2024-11-29 07:33:32 +00001665 for db_ksu, ksu_params in zip(db_content, op_params):
1666 workflow_status = await self.check_workflow(op_id, workflow_name, db_ksu)
yshah771dea82024-07-05 15:11:49 +00001667
garciadeblas96b94f52024-07-08 16:18:21 +02001668 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001669 resource_status, db_ksu = await self.check_resource(
1670 "create_ksus", op_id, ksu_params, db_ksu
1671 )
yshah771dea82024-07-05 15:11:49 +00001672
garciadeblas96b94f52024-07-08 16:18:21 +02001673 self.db.set_one(self.db_collection, {"_id": db_ksu["_id"]}, db_ksu)
1674
garciadeblasd8429852024-10-17 15:30:30 +02001675 # Clean items used in the workflow, no matter if the workflow succeeded
1676 clean_status, clean_msg = await self.odu.clean_items_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001677 "create_ksus", op_id, op_params, db_content
garciadeblasd8429852024-10-17 15:30:30 +02001678 )
1679 self.logger.info(
1680 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
1681 )
yshah564ec9c2024-11-29 07:33:32 +00001682 self.logger.info(f"KSU Create EXIT with Resource Status {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001683 return
1684
yshah564ec9c2024-11-29 07:33:32 +00001685 async def edit(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001686 self.logger.info("ksu edit Enter")
yshah564ec9c2024-11-29 07:33:32 +00001687 db_content = []
1688 op_params = []
1689 op_id = params["operation_id"]
1690 for ksu_id in params["ksus_list"]:
1691 self.initialize_operation(ksu_id, op_id)
1692 db_ksu = self.db.get_one("ksus", {"_id": ksu_id})
1693 db_content.append(db_ksu)
1694 ksu_params = {}
1695 ksu_params = self.get_operation_params(db_ksu, op_id)
1696 # Update ksu_params["profile"] with profile name and age-pubkey
1697 profile_type = ksu_params["profile"]["profile_type"]
1698 profile_id = ksu_params["profile"]["_id"]
1699 profile_collection = self.profile_collection_mapping[profile_type]
1700 db_profile = self.db.get_one(profile_collection, {"_id": profile_id})
1701 ksu_params["profile"]["name"] = db_profile["name"]
1702 ksu_params["profile"]["age_pubkey"] = db_profile.get("age_pubkey", "")
1703 # Update ksu_params["oka"] with sw_catalog_path (when missing)
1704 for oka in ksu_params["oka"]:
1705 if "sw_catalog_path" not in oka:
1706 oka_id = oka["_id"]
1707 db_oka = self.db.get_one("okas", {"_id": oka_id})
yshah2f39b8a2024-12-19 11:06:24 +00001708 oka_type = MAP_PROFILE[
1709 db_oka.get("profile_type", "infra_controller_profiles")
1710 ]
1711 oka["sw_catalog_path"] = f"{oka_type}/{db_oka['git_name']}"
yshah564ec9c2024-11-29 07:33:32 +00001712 op_params.append(ksu_params)
yshah771dea82024-07-05 15:11:49 +00001713
garciadeblasadb81e82024-11-08 01:11:46 +01001714 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001715 "update_ksus", op_id, op_params, db_content
garciadeblas96b94f52024-07-08 16:18:21 +02001716 )
yshah771dea82024-07-05 15:11:49 +00001717
yshah564ec9c2024-11-29 07:33:32 +00001718 for db_ksu, ksu_params in zip(db_content, op_params):
1719 workflow_status = await self.check_workflow(op_id, workflow_name, db_ksu)
1720
garciadeblas96b94f52024-07-08 16:18:21 +02001721 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001722 resource_status, db_ksu = await self.check_resource(
1723 "update_ksus", op_id, ksu_params, db_ksu
garciadeblas96b94f52024-07-08 16:18:21 +02001724 )
garciadeblas96b94f52024-07-08 16:18:21 +02001725 db_ksu["name"] = ksu_params["name"]
1726 db_ksu["description"] = ksu_params["description"]
1727 db_ksu["profile"]["profile_type"] = ksu_params["profile"][
1728 "profile_type"
1729 ]
1730 db_ksu["profile"]["_id"] = ksu_params["profile"]["_id"]
1731 db_ksu["oka"] = ksu_params["oka"]
1732 self.db.set_one(self.db_collection, {"_id": db_ksu["_id"]}, db_ksu)
1733
yshah564ec9c2024-11-29 07:33:32 +00001734 # Clean items used in the workflow, no matter if the workflow succeeded
1735 clean_status, clean_msg = await self.odu.clean_items_workflow(
1736 "create_ksus", op_id, op_params, db_content
garciadeblas96b94f52024-07-08 16:18:21 +02001737 )
1738 self.logger.info(
yshah564ec9c2024-11-29 07:33:32 +00001739 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
garciadeblas96b94f52024-07-08 16:18:21 +02001740 )
yshah564ec9c2024-11-29 07:33:32 +00001741 self.logger.info(f"KSU Update EXIT with Resource Status {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001742 return
1743
yshah564ec9c2024-11-29 07:33:32 +00001744 async def delete(self, params, order_id):
1745 self.logger.info("ksu delete Enter")
1746 db_content = []
1747 op_params = []
1748 op_id = params["operation_id"]
1749 for ksu_id in params["ksus_list"]:
1750 self.initialize_operation(ksu_id, op_id)
1751 db_ksu = self.db.get_one("ksus", {"_id": ksu_id})
1752 db_content.append(db_ksu)
1753 ksu_params = {}
1754 ksu_params["profile"] = {}
1755 ksu_params["profile"]["profile_type"] = db_ksu["profile"]["profile_type"]
1756 ksu_params["profile"]["_id"] = db_ksu["profile"]["_id"]
1757 # Update ksu_params["profile"] with profile name and age-pubkey
1758 profile_type = ksu_params["profile"]["profile_type"]
1759 profile_id = ksu_params["profile"]["_id"]
1760 profile_collection = self.profile_collection_mapping[profile_type]
1761 db_profile = self.db.get_one(profile_collection, {"_id": profile_id})
1762 ksu_params["profile"]["name"] = db_profile["name"]
1763 ksu_params["profile"]["age_pubkey"] = db_profile.get("age_pubkey", "")
1764 op_params.append(ksu_params)
yshah771dea82024-07-05 15:11:49 +00001765
garciadeblasadb81e82024-11-08 01:11:46 +01001766 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001767 "delete_ksus", op_id, op_params, db_content
1768 )
1769
1770 for db_ksu, ksu_params in zip(db_content, op_params):
1771 workflow_status = await self.check_workflow(op_id, workflow_name, db_ksu)
1772
1773 if workflow_status:
1774 resource_status, db_ksu = await self.check_resource(
1775 "delete_ksus", op_id, ksu_params, db_ksu
1776 )
1777
1778 if resource_status:
1779 db_ksu["state"] == "DELETED"
1780 self.db.set_one(self.db_collection, {"_id": db_ksu["_id"]}, db_ksu)
1781 self.db.del_one(self.db_collection, {"_id": db_ksu["_id"]})
1782
1783 self.logger.info(f"KSU Delete Exit with resource status: {resource_status}")
1784 return
1785
1786 async def clone(self, params, order_id):
1787 self.logger.info("ksu clone Enter")
1788 op_id = params["operation_id"]
1789 ksus_id = params["ksus_list"][0]
1790 self.initialize_operation(ksus_id, op_id)
1791 db_content = self.db.get_one(self.db_collection, {"_id": ksus_id})
1792 op_params = self.get_operation_params(db_content, op_id)
1793 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001794 "clone_ksus", op_id, op_params, db_content
1795 )
yshah564ec9c2024-11-29 07:33:32 +00001796
1797 workflow_status = await self.check_workflow(op_id, workflow_name, db_content)
yshah771dea82024-07-05 15:11:49 +00001798
1799 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001800 resource_status, db_content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001801 "clone_ksus", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001802 )
garciadeblas96b94f52024-07-08 16:18:21 +02001803 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
yshah564ec9c2024-11-29 07:33:32 +00001804
1805 self.logger.info(f"KSU Clone Exit with resource status: {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001806 return
1807
yshah564ec9c2024-11-29 07:33:32 +00001808 async def move(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001809 self.logger.info("ksu move Enter")
yshah564ec9c2024-11-29 07:33:32 +00001810 op_id = params["operation_id"]
1811 ksus_id = params["ksus_list"][0]
1812 self.initialize_operation(ksus_id, op_id)
1813 db_content = self.db.get_one(self.db_collection, {"_id": ksus_id})
1814 op_params = self.get_operation_params(db_content, op_id)
garciadeblasadb81e82024-11-08 01:11:46 +01001815 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001816 "move_ksus", op_id, op_params, db_content
1817 )
yshah564ec9c2024-11-29 07:33:32 +00001818
1819 workflow_status = await self.check_workflow(op_id, workflow_name, db_content)
yshah771dea82024-07-05 15:11:49 +00001820
1821 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001822 resource_status, db_content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001823 "move_ksus", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001824 )
garciadeblas96b94f52024-07-08 16:18:21 +02001825 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
yshah564ec9c2024-11-29 07:33:32 +00001826
1827 self.logger.info(f"KSU Move Exit with resource status: {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001828 return