blob: ab6001b74f9c65a74c59abb060660622eb489873 [file] [log] [blame]
rshri932105f2024-07-05 15:11:55 +00001# -*- coding: utf-8 -*-
2
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
12# implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
15
16__author__ = (
17 "Shrinithi R <shrinithi.r@tataelxsi.co.in>",
18 "Shahithya Y <shahithya.y@tataelxsi.co.in>",
19)
20
rshric3564942024-11-12 18:12:38 +000021import copy
rshri932105f2024-07-05 15:11:55 +000022import logging
yshahd940c652024-10-17 06:11:12 +000023from time import time
garciadeblas72412282024-11-07 12:41:54 +010024import traceback
rshri932105f2024-07-05 15:11:55 +000025from osm_lcm.lcm_utils import LcmBase
26from copy import deepcopy
27from osm_lcm import odu_workflows
28from osm_lcm import vim_sdn
rshri948f7de2024-12-02 03:42:35 +000029from osm_lcm.data_utils.list_utils import find_in_list
rshri932105f2024-07-05 15:11:55 +000030
yshah2f39b8a2024-12-19 11:06:24 +000031MAP_PROFILE = {
32 "infra_controller_profiles": "infra-controllers",
33 "infra_config_profiles": "infra-configs",
34 "resource_profiles": "managed_resources",
35 "app_profiles": "apps",
36}
37
rshri932105f2024-07-05 15:11:55 +000038
garciadeblas72412282024-11-07 12:41:54 +010039class GitOpsLcm(LcmBase):
garciadeblasea865ff2024-11-20 12:42:49 +010040 db_collection = "gitops"
yshah564ec9c2024-11-29 07:33:32 +000041 workflow_status = None
42 resource_status = None
43
44 profile_collection_mapping = {
45 "infra_controller_profiles": "k8sinfra_controller",
46 "infra_config_profiles": "k8sinfra_config",
47 "resource_profiles": "k8sresource",
48 "app_profiles": "k8sapp",
49 }
garciadeblasea865ff2024-11-20 12:42:49 +010050
garciadeblas72412282024-11-07 12:41:54 +010051 def __init__(self, msg, lcm_tasks, config):
52 self.logger = logging.getLogger("lcm.gitops")
53 self.lcm_tasks = lcm_tasks
54 self.odu = odu_workflows.OduWorkflow(msg, self.lcm_tasks, config)
55 self._checkloop_kustomization_timeout = 900
56 self._checkloop_resource_timeout = 900
57 self._workflows = {}
58 super().__init__(msg, self.logger)
59
60 async def check_dummy_operation(self, op_id, op_params, content):
61 self.logger.info(f"Operation {op_id}. Params: {op_params}. Content: {content}")
62 return True, "OK"
63
garciadeblasea865ff2024-11-20 12:42:49 +010064 def initialize_operation(self, item_id, op_id):
65 db_item = self.db.get_one(self.db_collection, {"_id": item_id})
66 operation = next(
67 (op for op in db_item.get("operationHistory", []) if op["op_id"] == op_id),
68 None,
69 )
70 operation["workflowState"] = "PROCESSING"
71 operation["resourceState"] = "NOT_READY"
72 operation["operationState"] = "IN_PROGRESS"
73 operation["gitOperationInfo"] = None
74 db_item["current_operation"] = operation["op_id"]
75 self.db.set_one(self.db_collection, {"_id": item_id}, db_item)
76
yshah564ec9c2024-11-29 07:33:32 +000077 def get_operation_params(self, item, operation_id):
78 operation_history = item.get("operationHistory", [])
79 operation = find_in_list(
80 operation_history, lambda op: op["op_id"] == operation_id
81 )
82 return operation.get("operationParams", {})
83
84 def get_operation_type(self, item, operation_id):
85 operation_history = item.get("operationHistory", [])
86 operation = find_in_list(
87 operation_history, lambda op: op["op_id"] == operation_id
88 )
89 return operation.get("operationType", {})
90
garciadeblas7eae6f42024-11-08 10:41:38 +010091 def update_operation_history(
garciadeblasf9092892024-12-12 11:07:08 +010092 self, content, op_id, workflow_status=None, resource_status=None, op_end=True
garciadeblas7eae6f42024-11-08 10:41:38 +010093 ):
94 self.logger.info(
95 f"Update Operation History in DB. Workflow status: {workflow_status}. Resource status: {resource_status}"
96 )
97 self.logger.debug(f"Content: {content}")
98
garciadeblas7eae6f42024-11-08 10:41:38 +010099 op_num = 0
100 for operation in content["operationHistory"]:
101 self.logger.debug("Operations: {}".format(operation))
102 if operation["op_id"] == op_id:
103 self.logger.debug("Found operation number: {}".format(op_num))
garciadeblas7eae6f42024-11-08 10:41:38 +0100104 if workflow_status:
garciadeblasf9092892024-12-12 11:07:08 +0100105 operation["workflowState"] = "COMPLETED"
106 operation["result"] = True
garciadeblas7eae6f42024-11-08 10:41:38 +0100107 else:
garciadeblasf9092892024-12-12 11:07:08 +0100108 operation["workflowState"] = "ERROR"
109 operation["operationState"] = "FAILED"
110 operation["result"] = False
garciadeblas7eae6f42024-11-08 10:41:38 +0100111
112 if resource_status:
garciadeblasf9092892024-12-12 11:07:08 +0100113 operation["resourceState"] = "READY"
114 operation["operationState"] = "COMPLETED"
115 operation["result"] = True
garciadeblas7eae6f42024-11-08 10:41:38 +0100116 else:
garciadeblasf9092892024-12-12 11:07:08 +0100117 operation["resourceState"] = "NOT_READY"
118 operation["operationState"] = "FAILED"
119 operation["result"] = False
garciadeblas7eae6f42024-11-08 10:41:38 +0100120
garciadeblasf9092892024-12-12 11:07:08 +0100121 if op_end:
122 now = time()
123 operation["endDate"] = now
garciadeblas7eae6f42024-11-08 10:41:38 +0100124 break
125 op_num += 1
126 self.logger.debug("content: {}".format(content))
127
128 return content
129
yshah564ec9c2024-11-29 07:33:32 +0000130 async def check_workflow(self, op_id, workflow_name, db_content):
131 workflow_status, workflow_msg = await self.odu.check_workflow_status(
132 workflow_name
133 )
134 self.logger.info(
135 "Workflow Status: {} Workflow Message: {}".format(
136 workflow_status, workflow_msg
137 )
138 )
139 operation_type = self.get_operation_type(db_content, op_id)
140 if operation_type == "create" and workflow_status:
141 db_content["state"] = "CREATED"
142 elif operation_type == "create" and not workflow_status:
143 db_content["state"] = "FAILED_CREATION"
144 elif operation_type == "delete" and workflow_status:
145 db_content["state"] = "DELETED"
146 elif operation_type == "delete" and not workflow_status:
147 db_content["state"] = "FAILED_DELETION"
148
149 if workflow_status:
150 db_content["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
151 else:
152 db_content["resourceState"] = "ERROR"
153
154 db_content = self.update_operation_history(
155 db_content, op_id, workflow_status, None
156 )
157 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
158 return workflow_status
159
160 async def check_resource(self, resource_name, op_id, op_params, db_content):
161 workflow_status = True
162
163 resource_status, resource_msg = await self.check_resource_status(
164 resource_name, op_id, op_params, db_content
165 )
166 self.logger.info(
167 "Resource Status: {} Resource Message: {}".format(
168 resource_status, resource_msg
169 )
170 )
171
172 if resource_status:
173 db_content["resourceState"] = "READY"
174 else:
175 db_content["resourceState"] = "ERROR"
176
177 db_content = self.update_operation_history(
178 db_content, op_id, workflow_status, resource_status
179 )
180 db_content["operatingState"] = "IDLE"
181 db_content["current_operation"] = None
182 return resource_status, db_content
183
yshahcb9075f2024-11-22 12:08:57 +0000184 async def common_check_list(self, op_id, checkings_list, db_collection, db_item):
garciadeblas72412282024-11-07 12:41:54 +0100185 try:
186 for checking in checkings_list:
187 if checking["enable"]:
188 status, message = await self.odu.readiness_loop(
189 item=checking["item"],
190 name=checking["name"],
191 namespace=checking["namespace"],
192 flag=checking["flag"],
193 timeout=checking["timeout"],
194 )
195 if not status:
196 return status, message
garciadeblas7eae6f42024-11-08 10:41:38 +0100197 else:
198 db_item["resourceState"] = checking["resourceState"]
199 db_item = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000200 db_item, op_id, "COMPLETED", checking["resourceState"]
garciadeblas7eae6f42024-11-08 10:41:38 +0100201 )
202 self.db.set_one(db_collection, {"_id": db_item["_id"]}, db_item)
garciadeblas72412282024-11-07 12:41:54 +0100203 except Exception as e:
204 self.logger.debug(traceback.format_exc())
205 self.logger.debug(f"Exception: {e}", exc_info=True)
206 return False, f"Unexpected exception: {e}"
207 return True, "OK"
208
209 async def check_resource_status(self, key, op_id, op_params, content):
210 self.logger.info(
211 f"Check resource status. Key: {key}. Operation: {op_id}. Params: {op_params}. Content: {content}"
212 )
213 check_resource_function = self._workflows.get(key, {}).get(
214 "check_resource_function"
215 )
216 self.logger.info("check_resource function : {}".format(check_resource_function))
217 if check_resource_function:
218 return await check_resource_function(op_id, op_params, content)
219 else:
220 return await self.check_dummy_operation(op_id, op_params, content)
221
rshric3564942024-11-12 18:12:38 +0000222 def decrypting_key(self, content):
223 # This deep copy is for to be passed to ODU workflows.
224 cluster_copy = copy.deepcopy(content)
225
226 # decrypting the key
227 self.db.encrypt_decrypt_fields(
228 cluster_copy,
229 "decrypt",
230 ["age_pubkey", "age_privkey"],
231 schema_version="1.11",
232 salt=cluster_copy["_id"],
233 )
234 db_cluster_copy = {
235 "cluster": cluster_copy,
236 }
237 return db_cluster_copy
238
garciadeblas72412282024-11-07 12:41:54 +0100239
240class ClusterLcm(GitOpsLcm):
garciadeblas96b94f52024-07-08 16:18:21 +0200241 db_collection = "clusters"
rshri932105f2024-07-05 15:11:55 +0000242
243 def __init__(self, msg, lcm_tasks, config):
244 """
245 Init, Connect to database, filesystem storage, and messaging
246 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
247 :return: None
248 """
garciadeblas72412282024-11-07 12:41:54 +0100249 super().__init__(msg, lcm_tasks, config)
250 self._workflows = {
251 "create_cluster": {
252 "check_resource_function": self.check_create_cluster,
253 },
garciadeblasb0a42c22024-11-13 16:00:10 +0100254 "register_cluster": {
255 "check_resource_function": self.check_register_cluster,
256 },
257 "update_cluster": {
258 "check_resource_function": self.check_update_cluster,
259 },
garciadeblas72412282024-11-07 12:41:54 +0100260 }
rshri932105f2024-07-05 15:11:55 +0000261 self.regist = vim_sdn.K8sClusterLcm(msg, self.lcm_tasks, config)
262
rshri948f7de2024-12-02 03:42:35 +0000263 async def create(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000264 self.logger.info("cluster Create Enter")
265
rshri948f7de2024-12-02 03:42:35 +0000266 # To get the cluster details
267 cluster_id = params["cluster_id"]
268 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
269
270 # To get the operation params details
271 op_id = params["operation_id"]
272 op_params = self.get_operation_params(db_cluster, op_id)
273
274 # To initialize the operation states
275 self.initialize_operation(cluster_id, op_id)
276
277 # To copy the cluster content and decrypting the key to use in workflows
rshric3564942024-11-12 18:12:38 +0000278 db_cluster_copy = self.decrypting_key(db_cluster)
279
rshri948f7de2024-12-02 03:42:35 +0000280 # To get the vim account details
rshric3564942024-11-12 18:12:38 +0000281 db_vim = self.db.get_one("vim_accounts", {"name": db_cluster["vim_account"]})
282 db_cluster_copy["vim_account"] = db_vim
283
garciadeblasadb81e82024-11-08 01:11:46 +0100284 _, workflow_name = await self.odu.launch_workflow(
rshric3564942024-11-12 18:12:38 +0000285 "create_cluster", op_id, op_params, db_cluster_copy
garciadeblas96b94f52024-07-08 16:18:21 +0200286 )
rshri932105f2024-07-05 15:11:55 +0000287 self.logger.info("workflow_name is :{}".format(workflow_name))
288
garciadeblas96b94f52024-07-08 16:18:21 +0200289 workflow_status, workflow_msg = await self.odu.check_workflow_status(
290 workflow_name
291 )
rshri932105f2024-07-05 15:11:55 +0000292 self.logger.info(
293 "workflow_status is :{} and workflow_msg is :{}".format(
294 workflow_status, workflow_msg
295 )
296 )
297 if workflow_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200298 db_cluster["state"] = "CREATED"
299 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
rshri932105f2024-07-05 15:11:55 +0000300 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200301 db_cluster["state"] = "FAILED_CREATION"
302 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000303 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +0000304 db_cluster = self.update_operation_history(
305 db_cluster, op_id, workflow_status, None
306 )
garciadeblas96b94f52024-07-08 16:18:21 +0200307 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +0000308
garciadeblas28bff0f2024-09-16 12:53:07 +0200309 # Clean items used in the workflow, no matter if the workflow succeeded
310 clean_status, clean_msg = await self.odu.clean_items_workflow(
rshric3564942024-11-12 18:12:38 +0000311 "create_cluster", op_id, op_params, db_cluster_copy
garciadeblas28bff0f2024-09-16 12:53:07 +0200312 )
313 self.logger.info(
314 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
315 )
316
rshri932105f2024-07-05 15:11:55 +0000317 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +0100318 resource_status, resource_msg = await self.check_resource_status(
rshric3564942024-11-12 18:12:38 +0000319 "create_cluster", op_id, op_params, db_cluster_copy
rshri932105f2024-07-05 15:11:55 +0000320 )
321 self.logger.info(
322 "resource_status is :{} and resource_msg is :{}".format(
323 resource_status, resource_msg
324 )
325 )
326 if resource_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200327 db_cluster["resourceState"] = "READY"
rshri932105f2024-07-05 15:11:55 +0000328 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200329 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000330
garciadeblas96b94f52024-07-08 16:18:21 +0200331 db_cluster["operatingState"] = "IDLE"
332 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000333 db_cluster, op_id, workflow_status, resource_status
rshri932105f2024-07-05 15:11:55 +0000334 )
shahithya70a3fc92024-11-12 11:01:05 +0000335 db_cluster["current_operation"] = None
garciadeblas96b94f52024-07-08 16:18:21 +0200336 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
337 self.update_profile_state(db_cluster, workflow_status, resource_status)
rshri948f7de2024-12-02 03:42:35 +0000338
339 db_register = self.db.get_one("k8sclusters", {"name": db_cluster["name"]})
340
341 if db_cluster["resourceState"] == "READY" and db_cluster["state"] == "CREATED":
342 result, cluster_creds = await self.odu.get_cluster_credentials(db_cluster)
343 # To call the lcm.py for registering the cluster in k8scluster lcm.
344 db_register["credentials"] = cluster_creds
345 self.db.set_one("k8sclusters", {"_id": db_register["_id"]}, db_register)
346 register = await self.regist.create(db_register, order_id)
347 self.logger.debug(f"Register is : {register}")
348 else:
349 db_register["_admin"]["operationalState"] = "ERROR"
350 result, cluster_creds = await self.odu.get_cluster_credentials(db_cluster)
351 # To call the lcm.py for registering the cluster in k8scluster lcm.
352 db_register["credentials"] = cluster_creds
353 self.db.set_one("k8sclusters", {"_id": db_register["_id"]}, db_register)
354
rshri932105f2024-07-05 15:11:55 +0000355 return
356
garciadeblas72412282024-11-07 12:41:54 +0100357 async def check_create_cluster(self, op_id, op_params, content):
garciadeblas7eae6f42024-11-08 10:41:38 +0100358 self.logger.info(
359 f"check_create_cluster Operation {op_id}. Params: {op_params}."
360 )
361 # self.logger.debug(f"Content: {content}")
garciadeblas72412282024-11-07 12:41:54 +0100362 db_cluster = content["cluster"]
363 cluster_name = db_cluster["git_name"].lower()
364 cluster_kustomization_name = cluster_name
365 db_vim_account = content["vim_account"]
366 cloud_type = db_vim_account["vim_type"]
367 nodepool_name = ""
368 if cloud_type == "aws":
369 nodepool_name = f"{cluster_name}-nodegroup"
370 cluster_name = f"{cluster_name}-cluster"
371 elif cloud_type == "gcp":
372 nodepool_name = f"nodepool-{cluster_name}"
373 bootstrap = op_params.get("bootstrap", True)
374 if cloud_type in ("azure", "gcp", "aws"):
375 checkings_list = [
376 {
377 "item": "kustomization",
378 "name": cluster_kustomization_name,
379 "namespace": "managed-resources",
380 "flag": "Ready",
yshahcb9075f2024-11-22 12:08:57 +0000381 "timeout": 1500,
garciadeblas72412282024-11-07 12:41:54 +0100382 "enable": True,
garciadeblas7eae6f42024-11-08 10:41:38 +0100383 "resourceState": "IN_PROGRESS.KUSTOMIZATION_READY",
garciadeblas72412282024-11-07 12:41:54 +0100384 },
385 {
386 "item": f"cluster_{cloud_type}",
387 "name": cluster_name,
388 "namespace": "",
389 "flag": "Synced",
390 "timeout": self._checkloop_resource_timeout,
391 "enable": True,
garciadeblas7eae6f42024-11-08 10:41:38 +0100392 "resourceState": "IN_PROGRESS.RESOURCE_SYNCED.CLUSTER",
garciadeblas72412282024-11-07 12:41:54 +0100393 },
394 {
395 "item": f"cluster_{cloud_type}",
396 "name": cluster_name,
397 "namespace": "",
398 "flag": "Ready",
399 "timeout": self._checkloop_resource_timeout,
400 "enable": True,
garciadeblas7eae6f42024-11-08 10:41:38 +0100401 "resourceState": "IN_PROGRESS.RESOURCE_READY.CLUSTER",
garciadeblas72412282024-11-07 12:41:54 +0100402 },
403 {
404 "item": "kustomization",
405 "name": f"{cluster_kustomization_name}-bstrp-fluxctrl",
406 "namespace": "managed-resources",
407 "flag": "Ready",
yshahcb9075f2024-11-22 12:08:57 +0000408 "timeout": self._checkloop_resource_timeout,
garciadeblas72412282024-11-07 12:41:54 +0100409 "enable": bootstrap,
garciadeblas7eae6f42024-11-08 10:41:38 +0100410 "resourceState": "IN_PROGRESS.BOOTSTRAP_OK",
garciadeblas72412282024-11-07 12:41:54 +0100411 },
412 ]
413 else:
414 return False, "Not suitable VIM account to check cluster status"
415 if nodepool_name:
416 nodepool_check = {
417 "item": f"nodepool_{cloud_type}",
418 "name": nodepool_name,
419 "namespace": "",
420 "flag": "Ready",
421 "timeout": self._checkloop_resource_timeout,
422 "enable": True,
garciadeblas7eae6f42024-11-08 10:41:38 +0100423 "resourceState": "IN_PROGRESS.RESOURCE_READY.NODEPOOL",
garciadeblas72412282024-11-07 12:41:54 +0100424 }
425 checkings_list.insert(3, nodepool_check)
yshahcb9075f2024-11-22 12:08:57 +0000426 return await self.common_check_list(
427 op_id, checkings_list, "clusters", db_cluster
428 )
garciadeblas72412282024-11-07 12:41:54 +0100429
garciadeblasb0a42c22024-11-13 16:00:10 +0100430 async def check_register_cluster(self, op_id, op_params, content):
431 self.logger.info(
432 f"check_register_cluster Operation {op_id}. Params: {op_params}."
433 )
434 # self.logger.debug(f"Content: {content}")
435 db_cluster = content["cluster"]
436 cluster_name = db_cluster["git_name"].lower()
437 cluster_kustomization_name = cluster_name
438 bootstrap = op_params.get("bootstrap", True)
439 checkings_list = [
440 {
441 "item": "kustomization",
442 "name": f"{cluster_kustomization_name}-bstrp-fluxctrl",
443 "namespace": "managed-resources",
444 "flag": "Ready",
445 "timeout": self._checkloop_kustomization_timeout,
446 "enable": bootstrap,
447 "resourceState": "IN_PROGRESS.BOOTSTRAP_OK",
448 },
449 ]
yshahcb9075f2024-11-22 12:08:57 +0000450 return await self.common_check_list(
451 op_id, checkings_list, "clusters", db_cluster
452 )
garciadeblasb0a42c22024-11-13 16:00:10 +0100453
454 async def check_update_cluster(self, op_id, op_params, content):
455 self.logger.info(
456 f"check_create_cluster Operation {op_id}. Params: {op_params}."
457 )
458 # self.logger.debug(f"Content: {content}")
459 db_cluster = content["cluster"]
460 cluster_name = db_cluster["git_name"].lower()
461 cluster_kustomization_name = cluster_name
462 db_vim_account = content["vim_account"]
463 cloud_type = db_vim_account["vim_type"]
464 nodepool_name = ""
465 if cloud_type == "aws":
466 nodepool_name = f"{cluster_name}-nodegroup"
467 cluster_name = f"{cluster_name}-cluster"
468 elif cloud_type == "gcp":
469 nodepool_name = f"nodepool-{cluster_name}"
470 if cloud_type in ("azure", "gcp", "aws"):
471 checkings_list = [
472 {
473 "item": "kustomization",
474 "name": cluster_kustomization_name,
475 "namespace": "managed-resources",
476 "flag": "Ready",
477 "timeout": self._checkloop_kustomization_timeout,
478 "enable": True,
479 "resourceState": "IN_PROGRESS.KUSTOMIZATION_READY",
480 },
481 {
482 "item": f"cluster_{cloud_type}",
483 "name": cluster_name,
484 "namespace": "",
485 "flag": "Synced",
486 "timeout": self._checkloop_resource_timeout,
487 "enable": True,
488 "resourceState": "IN_PROGRESS.RESOURCE_SYNCED.CLUSTER",
489 },
490 {
491 "item": f"cluster_{cloud_type}",
492 "name": cluster_name,
493 "namespace": "",
494 "flag": "Ready",
495 "timeout": self._checkloop_resource_timeout,
496 "enable": True,
497 "resourceState": "IN_PROGRESS.RESOURCE_READY.CLUSTER",
498 },
499 ]
500 else:
501 return False, "Not suitable VIM account to check cluster status"
502 if nodepool_name:
503 nodepool_check = {
504 "item": f"nodepool_{cloud_type}",
505 "name": nodepool_name,
506 "namespace": "",
507 "flag": "Ready",
508 "timeout": self._checkloop_resource_timeout,
509 "enable": True,
510 "resourceState": "IN_PROGRESS.RESOURCE_READY.NODEPOOL",
511 }
512 checkings_list.append(nodepool_check)
yshahcb9075f2024-11-22 12:08:57 +0000513 return await self.common_check_list(
514 op_id, checkings_list, "clusters", db_cluster
515 )
garciadeblasb0a42c22024-11-13 16:00:10 +0100516
garciadeblas96b94f52024-07-08 16:18:21 +0200517 def update_profile_state(self, db_cluster, workflow_status, resource_status):
rshri932105f2024-07-05 15:11:55 +0000518 profiles = [
519 "infra_controller_profiles",
520 "infra_config_profiles",
521 "app_profiles",
522 "resource_profiles",
523 ]
rshri948f7de2024-12-02 03:42:35 +0000524 """
rshri932105f2024-07-05 15:11:55 +0000525 profiles_collection = {
526 "infra_controller_profiles": "k8sinfra_controller",
527 "infra_config_profiles": "k8sinfra_config",
528 "app_profiles": "k8sapp",
529 "resource_profiles": "k8sresource",
530 }
rshri948f7de2024-12-02 03:42:35 +0000531 """
Your Name86149632024-11-14 16:17:16 +0000532 self.logger.info("the db_cluster is :{}".format(db_cluster))
rshri932105f2024-07-05 15:11:55 +0000533 for profile_type in profiles:
garciadeblas96b94f52024-07-08 16:18:21 +0200534 profile_id = db_cluster[profile_type]
rshri948f7de2024-12-02 03:42:35 +0000535 # db_collection = profiles_collection[profile_type]
536 db_collection = self.profile_collection_mapping[profile_type]
rshri932105f2024-07-05 15:11:55 +0000537 db_profile = self.db.get_one(db_collection, {"_id": profile_id})
yshahcb9075f2024-11-22 12:08:57 +0000538 op_id = db_profile["operationHistory"][-1].get("op_id")
garciadeblas96b94f52024-07-08 16:18:21 +0200539 db_profile["state"] = db_cluster["state"]
540 db_profile["resourceState"] = db_cluster["resourceState"]
541 db_profile["operatingState"] = db_cluster["operatingState"]
rshric3564942024-11-12 18:12:38 +0000542 db_profile["age_pubkey"] = db_cluster["age_pubkey"]
Your Name86149632024-11-14 16:17:16 +0000543 db_profile["age_privkey"] = db_cluster["age_privkey"]
rshri932105f2024-07-05 15:11:55 +0000544 db_profile = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000545 db_profile, op_id, workflow_status, resource_status
rshri932105f2024-07-05 15:11:55 +0000546 )
rshri932105f2024-07-05 15:11:55 +0000547 self.db.set_one(db_collection, {"_id": db_profile["_id"]}, db_profile)
548
rshri948f7de2024-12-02 03:42:35 +0000549 async def delete(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000550 self.logger.info("cluster delete Enter")
rshri948f7de2024-12-02 03:42:35 +0000551
552 # To get the cluster details
553 cluster_id = params["cluster_id"]
554 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
555
556 # To get the operation params details
557 op_id = params["operation_id"]
558 op_params = self.get_operation_params(db_cluster, op_id)
559
560 # To initialize the operation states
561 self.initialize_operation(cluster_id, op_id)
562
563 # To copy the cluster content and decrypting the key to use in workflows
564 db_cluster_copy = self.decrypting_key(db_cluster)
565
garciadeblas6b2112c2024-12-20 10:35:13 +0100566 # TODO: workaround until NBI rejects cluster deletion requests for registered clusters
567 # This if clause will be removed
garciadeblas12470812024-11-18 10:33:12 +0100568 if db_cluster["created"] == "false":
rshri948f7de2024-12-02 03:42:35 +0000569 return await self.deregister(params, order_id)
rshri932105f2024-07-05 15:11:55 +0000570
garciadeblasadb81e82024-11-08 01:11:46 +0100571 _, workflow_name = await self.odu.launch_workflow(
rshri948f7de2024-12-02 03:42:35 +0000572 "delete_cluster", op_id, op_params, db_cluster_copy
garciadeblas96b94f52024-07-08 16:18:21 +0200573 )
rshri932105f2024-07-05 15:11:55 +0000574 self.logger.info("workflow_name is :{}".format(workflow_name))
575
garciadeblas96b94f52024-07-08 16:18:21 +0200576 workflow_status, workflow_msg = await self.odu.check_workflow_status(
577 workflow_name
578 )
rshri932105f2024-07-05 15:11:55 +0000579 self.logger.info(
580 "workflow_status is :{} and workflow_msg is :{}".format(
581 workflow_status, workflow_msg
582 )
583 )
584 if workflow_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200585 db_cluster["state"] = "DELETED"
586 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
rshri932105f2024-07-05 15:11:55 +0000587 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200588 db_cluster["state"] = "FAILED_DELETION"
589 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000590 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +0000591 db_cluster = self.update_operation_history(
592 db_cluster, op_id, workflow_status, None
593 )
garciadeblas96b94f52024-07-08 16:18:21 +0200594 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +0000595
garciadeblas98f9a3d2024-12-10 13:42:47 +0100596 # Clean items used in the workflow or in the cluster, no matter if the workflow succeeded
597 clean_status, clean_msg = await self.odu.clean_items_workflow(
598 "delete_cluster", op_id, op_params, db_cluster_copy
599 )
600 self.logger.info(
601 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
602 )
603
rshri932105f2024-07-05 15:11:55 +0000604 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +0100605 resource_status, resource_msg = await self.check_resource_status(
rshri948f7de2024-12-02 03:42:35 +0000606 "delete_cluster", op_id, op_params, db_cluster_copy
rshri932105f2024-07-05 15:11:55 +0000607 )
608 self.logger.info(
609 "resource_status is :{} and resource_msg is :{}".format(
610 resource_status, resource_msg
611 )
612 )
613 if resource_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200614 db_cluster["resourceState"] = "READY"
rshri932105f2024-07-05 15:11:55 +0000615 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200616 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000617
garciadeblas96b94f52024-07-08 16:18:21 +0200618 db_cluster["operatingState"] = "IDLE"
619 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000620 db_cluster, op_id, workflow_status, resource_status
garciadeblas96b94f52024-07-08 16:18:21 +0200621 )
shahithya70a3fc92024-11-12 11:01:05 +0000622 db_cluster["current_operation"] = None
garciadeblas96b94f52024-07-08 16:18:21 +0200623 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +0000624
garciadeblas96b94f52024-07-08 16:18:21 +0200625 # To delete it from DB
626 if db_cluster["state"] == "DELETED":
627 self.delete_cluster(db_cluster)
rshri948f7de2024-12-02 03:42:35 +0000628
629 # To delete it from k8scluster collection
630 self.db.del_one("k8sclusters", {"name": db_cluster["name"]})
631
rshri932105f2024-07-05 15:11:55 +0000632 return
633
garciadeblas96b94f52024-07-08 16:18:21 +0200634 def delete_cluster(self, db_cluster):
635 # Actually, item_content is equal to db_cluster
636 # item_content = self.db.get_one("clusters", {"_id": db_cluster["_id"]})
637 # self.logger.debug("item_content is : {}".format(item_content))
rshri932105f2024-07-05 15:11:55 +0000638
rshri932105f2024-07-05 15:11:55 +0000639 # detach profiles
640 update_dict = None
641 profiles_to_detach = [
642 "infra_controller_profiles",
643 "infra_config_profiles",
644 "app_profiles",
645 "resource_profiles",
646 ]
rshri948f7de2024-12-02 03:42:35 +0000647 """
rshri932105f2024-07-05 15:11:55 +0000648 profiles_collection = {
649 "infra_controller_profiles": "k8sinfra_controller",
650 "infra_config_profiles": "k8sinfra_config",
651 "app_profiles": "k8sapp",
652 "resource_profiles": "k8sresource",
653 }
rshri948f7de2024-12-02 03:42:35 +0000654 """
rshri932105f2024-07-05 15:11:55 +0000655 for profile_type in profiles_to_detach:
garciadeblas96b94f52024-07-08 16:18:21 +0200656 if db_cluster.get(profile_type):
garciadeblas96b94f52024-07-08 16:18:21 +0200657 profile_ids = db_cluster[profile_type]
rshri932105f2024-07-05 15:11:55 +0000658 profile_ids_copy = deepcopy(profile_ids)
rshri932105f2024-07-05 15:11:55 +0000659 for profile_id in profile_ids_copy:
rshri948f7de2024-12-02 03:42:35 +0000660 # db_collection = profiles_collection[profile_type]
661 db_collection = self.profile_collection_mapping[profile_type]
rshri932105f2024-07-05 15:11:55 +0000662 db_profile = self.db.get_one(db_collection, {"_id": profile_id})
garciadeblasc2552852024-10-22 12:39:32 +0200663 self.logger.debug("the db_profile is :{}".format(db_profile))
664 self.logger.debug(
garciadeblas96b94f52024-07-08 16:18:21 +0200665 "the item_content name is :{}".format(db_cluster["name"])
rshri932105f2024-07-05 15:11:55 +0000666 )
garciadeblasc2552852024-10-22 12:39:32 +0200667 self.logger.debug(
rshri932105f2024-07-05 15:11:55 +0000668 "the db_profile name is :{}".format(db_profile["name"])
669 )
garciadeblas96b94f52024-07-08 16:18:21 +0200670 if db_cluster["name"] == db_profile["name"]:
rshri932105f2024-07-05 15:11:55 +0000671 self.db.del_one(db_collection, {"_id": profile_id})
672 else:
rshri932105f2024-07-05 15:11:55 +0000673 profile_ids.remove(profile_id)
674 update_dict = {profile_type: profile_ids}
rshri932105f2024-07-05 15:11:55 +0000675 self.db.set_one(
garciadeblas96b94f52024-07-08 16:18:21 +0200676 "clusters", {"_id": db_cluster["_id"]}, update_dict
rshri932105f2024-07-05 15:11:55 +0000677 )
garciadeblas96b94f52024-07-08 16:18:21 +0200678 self.db.del_one("clusters", {"_id": db_cluster["_id"]})
rshri932105f2024-07-05 15:11:55 +0000679
rshri948f7de2024-12-02 03:42:35 +0000680 async def attach_profile(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000681 self.logger.info("profile attach Enter")
rshri948f7de2024-12-02 03:42:35 +0000682
683 # To get the cluster details
684 cluster_id = params["cluster_id"]
685 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
686 # content = {
687 # "cluster": db_cluster,
688 # }
689
690 # To get the operation params details
691 op_id = params["operation_id"]
692 op_params = self.get_operation_params(db_cluster, op_id)
693
694 # To initialize the operation states
695 self.initialize_operation(cluster_id, op_id)
696
697 # To copy the cluster content and decrypting the key to use in workflows
698 db_cluster_copy = self.decrypting_key(db_cluster)
699
700 # To get the profile details
701 profile_id = params["profile_id"]
702 profile_type = params["profile_type"]
703 profile_collection = self.profile_collection_mapping[profile_type]
704 db_profile = self.db.get_one(profile_collection, {"_id": profile_id})
705 db_profile["profile_type"] = profile_type
706 # content["profile"] = db_profile
707 db_cluster_copy["profile"] = db_profile
rshri932105f2024-07-05 15:11:55 +0000708
garciadeblasadb81e82024-11-08 01:11:46 +0100709 _, workflow_name = await self.odu.launch_workflow(
rshri948f7de2024-12-02 03:42:35 +0000710 "attach_profile_to_cluster", op_id, op_params, db_cluster_copy
garciadeblas96b94f52024-07-08 16:18:21 +0200711 )
rshri932105f2024-07-05 15:11:55 +0000712 self.logger.info("workflow_name is :{}".format(workflow_name))
713
garciadeblas96b94f52024-07-08 16:18:21 +0200714 workflow_status, workflow_msg = await self.odu.check_workflow_status(
715 workflow_name
716 )
rshri932105f2024-07-05 15:11:55 +0000717 self.logger.info(
718 "workflow_status is :{} and workflow_msg is :{}".format(
719 workflow_status, workflow_msg
720 )
721 )
722 if workflow_status:
723 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
724 else:
725 db_cluster["resourceState"] = "ERROR"
726 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +0000727 db_cluster = self.update_operation_history(
728 db_cluster, op_id, workflow_status, None
729 )
rshri932105f2024-07-05 15:11:55 +0000730 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
731
732 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +0100733 resource_status, resource_msg = await self.check_resource_status(
rshri948f7de2024-12-02 03:42:35 +0000734 "attach_profile_to_cluster", op_id, op_params, db_cluster_copy
rshri932105f2024-07-05 15:11:55 +0000735 )
736 self.logger.info(
737 "resource_status is :{} and resource_msg is :{}".format(
738 resource_status, resource_msg
739 )
740 )
741 if resource_status:
742 db_cluster["resourceState"] = "READY"
743 else:
744 db_cluster["resourceState"] = "ERROR"
745
746 db_cluster["operatingState"] = "IDLE"
747 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000748 db_cluster, op_id, workflow_status, resource_status
rshri932105f2024-07-05 15:11:55 +0000749 )
rshri932105f2024-07-05 15:11:55 +0000750 profile_list = db_cluster[profile_type]
rshri932105f2024-07-05 15:11:55 +0000751 if resource_status:
rshri932105f2024-07-05 15:11:55 +0000752 profile_list.append(profile_id)
rshri932105f2024-07-05 15:11:55 +0000753 db_cluster[profile_type] = profile_list
shahithya70a3fc92024-11-12 11:01:05 +0000754 db_cluster["current_operation"] = None
rshri932105f2024-07-05 15:11:55 +0000755 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
756
757 return
758
rshri948f7de2024-12-02 03:42:35 +0000759 async def detach_profile(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000760 self.logger.info("profile dettach Enter")
rshri948f7de2024-12-02 03:42:35 +0000761
762 # To get the cluster details
763 cluster_id = params["cluster_id"]
764 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
765 # content = {
766 # "cluster": db_cluster,
767 # }
768
769 # To get the operation params details
770 op_id = params["operation_id"]
771 op_params = self.get_operation_params(db_cluster, op_id)
772
773 # To initialize the operation states
774 self.initialize_operation(cluster_id, op_id)
775
776 # To copy the cluster content and decrypting the key to use in workflows
777 db_cluster_copy = self.decrypting_key(db_cluster)
778
779 # To get the profile details
780 profile_id = params["profile_id"]
781 profile_type = params["profile_type"]
782 profile_collection = self.profile_collection_mapping[profile_type]
783 db_profile = self.db.get_one(profile_collection, {"_id": profile_id})
784 db_profile["profile_type"] = profile_type
785 # content["profile"] = db_profile
786 db_cluster_copy["profile"] = db_profile
rshri932105f2024-07-05 15:11:55 +0000787
garciadeblasadb81e82024-11-08 01:11:46 +0100788 _, workflow_name = await self.odu.launch_workflow(
rshri948f7de2024-12-02 03:42:35 +0000789 "detach_profile_from_cluster", op_id, op_params, db_cluster_copy
garciadeblas96b94f52024-07-08 16:18:21 +0200790 )
rshri932105f2024-07-05 15:11:55 +0000791 self.logger.info("workflow_name is :{}".format(workflow_name))
792
garciadeblas96b94f52024-07-08 16:18:21 +0200793 workflow_status, workflow_msg = await self.odu.check_workflow_status(
794 workflow_name
795 )
rshri932105f2024-07-05 15:11:55 +0000796 self.logger.info(
797 "workflow_status is :{} and workflow_msg is :{}".format(
798 workflow_status, workflow_msg
799 )
800 )
801 if workflow_status:
802 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
803 else:
804 db_cluster["resourceState"] = "ERROR"
805 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +0000806 db_cluster = self.update_operation_history(
807 db_cluster, op_id, workflow_status, None
808 )
rshri932105f2024-07-05 15:11:55 +0000809 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
810
811 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +0100812 resource_status, resource_msg = await self.check_resource_status(
rshri948f7de2024-12-02 03:42:35 +0000813 "detach_profile_from_cluster", op_id, op_params, db_cluster_copy
rshri932105f2024-07-05 15:11:55 +0000814 )
815 self.logger.info(
816 "resource_status is :{} and resource_msg is :{}".format(
817 resource_status, resource_msg
818 )
819 )
820 if resource_status:
821 db_cluster["resourceState"] = "READY"
822 else:
823 db_cluster["resourceState"] = "ERROR"
824
825 db_cluster["operatingState"] = "IDLE"
826 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000827 db_cluster, op_id, workflow_status, resource_status
rshri932105f2024-07-05 15:11:55 +0000828 )
rshri932105f2024-07-05 15:11:55 +0000829 profile_list = db_cluster[profile_type]
830 self.logger.info("profile list is : {}".format(profile_list))
831 if resource_status:
rshri932105f2024-07-05 15:11:55 +0000832 profile_list.remove(profile_id)
rshri932105f2024-07-05 15:11:55 +0000833 db_cluster[profile_type] = profile_list
shahithya70a3fc92024-11-12 11:01:05 +0000834 db_cluster["current_operation"] = None
rshri932105f2024-07-05 15:11:55 +0000835 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
836
837 return
838
rshri948f7de2024-12-02 03:42:35 +0000839 async def register(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000840 self.logger.info("cluster register enter")
841
rshri948f7de2024-12-02 03:42:35 +0000842 # To get the cluster details
843 cluster_id = params["cluster_id"]
844 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
845 # content = {
846 # "cluster": db_cluster,
847 # }
848
849 # To get the operation params details
850 op_id = params["operation_id"]
851 op_params = self.get_operation_params(db_cluster, op_id)
852
853 # To initialize the operation states
854 self.initialize_operation(cluster_id, op_id)
855
856 # To copy the cluster content and decrypting the key to use in workflows
rshric3564942024-11-12 18:12:38 +0000857 db_cluster_copy = self.decrypting_key(db_cluster)
858
garciadeblasadb81e82024-11-08 01:11:46 +0100859 _, workflow_name = await self.odu.launch_workflow(
rshric3564942024-11-12 18:12:38 +0000860 "register_cluster", op_id, op_params, db_cluster_copy
garciadeblas96b94f52024-07-08 16:18:21 +0200861 )
rshri932105f2024-07-05 15:11:55 +0000862 self.logger.info("workflow_name is :{}".format(workflow_name))
863
garciadeblas96b94f52024-07-08 16:18:21 +0200864 workflow_status, workflow_msg = await self.odu.check_workflow_status(
865 workflow_name
866 )
rshri932105f2024-07-05 15:11:55 +0000867 self.logger.info(
868 "workflow_status is :{} and workflow_msg is :{}".format(
869 workflow_status, workflow_msg
870 )
871 )
872 if workflow_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200873 db_cluster["state"] = "CREATED"
874 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
rshri932105f2024-07-05 15:11:55 +0000875 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200876 db_cluster["state"] = "FAILED_CREATION"
877 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000878 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +0000879 db_cluster = self.update_operation_history(
880 db_cluster, op_id, workflow_status, None
881 )
garciadeblas96b94f52024-07-08 16:18:21 +0200882 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +0000883
garciadeblasdde3a312024-09-17 13:25:06 +0200884 # Clean items used in the workflow, no matter if the workflow succeeded
885 clean_status, clean_msg = await self.odu.clean_items_workflow(
rshric3564942024-11-12 18:12:38 +0000886 "register_cluster", op_id, op_params, db_cluster_copy
garciadeblasdde3a312024-09-17 13:25:06 +0200887 )
888 self.logger.info(
889 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
890 )
891
rshri932105f2024-07-05 15:11:55 +0000892 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +0100893 resource_status, resource_msg = await self.check_resource_status(
rshric3564942024-11-12 18:12:38 +0000894 "register_cluster", op_id, op_params, db_cluster_copy
rshri932105f2024-07-05 15:11:55 +0000895 )
896 self.logger.info(
897 "resource_status is :{} and resource_msg is :{}".format(
898 resource_status, resource_msg
899 )
900 )
901 if resource_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200902 db_cluster["resourceState"] = "READY"
rshri932105f2024-07-05 15:11:55 +0000903 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200904 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000905
garciadeblas96b94f52024-07-08 16:18:21 +0200906 db_cluster["operatingState"] = "IDLE"
907 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000908 db_cluster, op_id, workflow_status, resource_status
rshri932105f2024-07-05 15:11:55 +0000909 )
shahithya70a3fc92024-11-12 11:01:05 +0000910 db_cluster["current_operation"] = None
garciadeblas96b94f52024-07-08 16:18:21 +0200911 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri948f7de2024-12-02 03:42:35 +0000912
913 db_register = self.db.get_one("k8sclusters", {"name": db_cluster["name"]})
914 db_register["credentials"] = db_cluster["credentials"]
915 self.db.set_one("k8sclusters", {"_id": db_register["_id"]}, db_register)
916
917 if db_cluster["resourceState"] == "READY" and db_cluster["state"] == "CREATED":
918 # To call the lcm.py for registering the cluster in k8scluster lcm.
919 register = await self.regist.create(db_register, order_id)
920 self.logger.debug(f"Register is : {register}")
921 else:
922 db_register["_admin"]["operationalState"] = "ERROR"
923 self.db.set_one("k8sclusters", {"_id": db_register["_id"]}, db_register)
924
rshri932105f2024-07-05 15:11:55 +0000925 return
926
rshri948f7de2024-12-02 03:42:35 +0000927 async def deregister(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000928 self.logger.info("cluster deregister enter")
929
rshri948f7de2024-12-02 03:42:35 +0000930 # To get the cluster details
931 cluster_id = params["cluster_id"]
932 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
933 # content = {
934 # "cluster": db_cluster,
935 # }
936
937 # To get the operation params details
938 op_id = params["operation_id"]
939 op_params = self.get_operation_params(db_cluster, op_id)
940
941 # To initialize the operation states
942 self.initialize_operation(cluster_id, op_id)
943
944 # To copy the cluster content and decrypting the key to use in workflows
945 db_cluster_copy = self.decrypting_key(db_cluster)
rshri932105f2024-07-05 15:11:55 +0000946
garciadeblasadb81e82024-11-08 01:11:46 +0100947 _, workflow_name = await self.odu.launch_workflow(
rshri948f7de2024-12-02 03:42:35 +0000948 "deregister_cluster", op_id, op_params, db_cluster_copy
garciadeblas96b94f52024-07-08 16:18:21 +0200949 )
rshri932105f2024-07-05 15:11:55 +0000950 self.logger.info("workflow_name is :{}".format(workflow_name))
951
garciadeblas96b94f52024-07-08 16:18:21 +0200952 workflow_status, workflow_msg = await self.odu.check_workflow_status(
953 workflow_name
954 )
rshri932105f2024-07-05 15:11:55 +0000955 self.logger.info(
956 "workflow_status is :{} and workflow_msg is :{}".format(
957 workflow_status, workflow_msg
958 )
959 )
960 if workflow_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200961 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
rshri932105f2024-07-05 15:11:55 +0000962 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200963 db_cluster["state"] = "FAILED_DELETION"
964 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000965 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +0000966 db_cluster = self.update_operation_history(
967 db_cluster, op_id, workflow_status, None
968 )
garciadeblas96b94f52024-07-08 16:18:21 +0200969 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +0000970
garciadeblas91bb2c42024-11-12 11:17:12 +0100971 # Clean items used in the workflow or in the cluster, no matter if the workflow succeeded
972 clean_status, clean_msg = await self.odu.clean_items_workflow(
rshri948f7de2024-12-02 03:42:35 +0000973 "deregister_cluster", op_id, op_params, db_cluster_copy
garciadeblas91bb2c42024-11-12 11:17:12 +0100974 )
975 self.logger.info(
976 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
977 )
978
rshri932105f2024-07-05 15:11:55 +0000979 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +0100980 resource_status, resource_msg = await self.check_resource_status(
rshri948f7de2024-12-02 03:42:35 +0000981 "deregister_cluster", op_id, op_params, db_cluster_copy
rshri932105f2024-07-05 15:11:55 +0000982 )
983 self.logger.info(
984 "resource_status is :{} and resource_msg is :{}".format(
985 resource_status, resource_msg
986 )
987 )
988 if resource_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200989 db_cluster["resourceState"] = "READY"
rshri932105f2024-07-05 15:11:55 +0000990 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200991 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000992
garciadeblas96b94f52024-07-08 16:18:21 +0200993 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000994 db_cluster, op_id, workflow_status, resource_status
garciadeblas96b94f52024-07-08 16:18:21 +0200995 )
garciadeblas6b2112c2024-12-20 10:35:13 +0100996 # TODO: workaround until NBI rejects cluster deletion requests for registered clusters
997 # Setting created flag to true avoids infinite loops when deregistering a cluster
998 db_cluster["created"] = "true"
garciadeblas96b94f52024-07-08 16:18:21 +0200999 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +00001000
garciadeblas98f9a3d2024-12-10 13:42:47 +01001001 return await self.delete(params, order_id)
rshri932105f2024-07-05 15:11:55 +00001002
rshri948f7de2024-12-02 03:42:35 +00001003 async def get_creds(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001004 self.logger.info("Cluster get creds Enter")
rshri948f7de2024-12-02 03:42:35 +00001005 cluster_id = params["cluster_id"]
1006 op_id = params["operation_id"]
1007 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
garciadeblas96b94f52024-07-08 16:18:21 +02001008 result, cluster_creds = await self.odu.get_cluster_credentials(db_cluster)
1009 if result:
1010 db_cluster["credentials"] = cluster_creds
yshahd940c652024-10-17 06:11:12 +00001011 op_len = 0
1012 for operations in db_cluster["operationHistory"]:
1013 if operations["op_id"] == op_id:
1014 db_cluster["operationHistory"][op_len]["result"] = result
1015 db_cluster["operationHistory"][op_len]["endDate"] = time()
1016 op_len += 1
shahithya70a3fc92024-11-12 11:01:05 +00001017 db_cluster["current_operation"] = None
yshahd940c652024-10-17 06:11:12 +00001018 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri948f7de2024-12-02 03:42:35 +00001019 self.logger.info("Cluster Get Creds Exit")
yshah771dea82024-07-05 15:11:49 +00001020 return
1021
rshri948f7de2024-12-02 03:42:35 +00001022 async def update(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001023 self.logger.info("Cluster update Enter")
rshri948f7de2024-12-02 03:42:35 +00001024 # To get the cluster details
1025 cluster_id = params["cluster_id"]
1026 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
1027
1028 # To get the operation params details
1029 op_id = params["operation_id"]
1030 op_params = self.get_operation_params(db_cluster, op_id)
yshah771dea82024-07-05 15:11:49 +00001031
rshric3564942024-11-12 18:12:38 +00001032 db_cluster_copy = self.decrypting_key(db_cluster)
1033
1034 # vim account details
1035 db_vim = self.db.get_one("vim_accounts", {"name": db_cluster["vim_account"]})
1036 db_cluster_copy["vim_account"] = db_vim
1037
garciadeblasadb81e82024-11-08 01:11:46 +01001038 _, workflow_name = await self.odu.launch_workflow(
rshric3564942024-11-12 18:12:38 +00001039 "update_cluster", op_id, op_params, db_cluster_copy
garciadeblas96b94f52024-07-08 16:18:21 +02001040 )
1041 workflow_status, workflow_msg = await self.odu.check_workflow_status(
1042 workflow_name
1043 )
1044 self.logger.info(
1045 "Workflow Status: {} Workflow Message: {}".format(
1046 workflow_status, workflow_msg
yshah771dea82024-07-05 15:11:49 +00001047 )
garciadeblas96b94f52024-07-08 16:18:21 +02001048 )
1049
1050 if workflow_status:
1051 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
1052 else:
1053 db_cluster["resourceState"] = "ERROR"
1054
yshahcb9075f2024-11-22 12:08:57 +00001055 db_cluster = self.update_operation_history(
1056 db_cluster, op_id, workflow_status, None
1057 )
garciadeblas96b94f52024-07-08 16:18:21 +02001058 # self.logger.info("Db content: {}".format(db_content))
1059 # self.db.set_one(self.db_collection, {"_id": _id}, db_cluster)
1060 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
1061
garciadeblas28bff0f2024-09-16 12:53:07 +02001062 # Clean items used in the workflow, no matter if the workflow succeeded
1063 clean_status, clean_msg = await self.odu.clean_items_workflow(
rshric3564942024-11-12 18:12:38 +00001064 "update_cluster", op_id, op_params, db_cluster_copy
garciadeblas28bff0f2024-09-16 12:53:07 +02001065 )
1066 self.logger.info(
1067 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
1068 )
garciadeblas96b94f52024-07-08 16:18:21 +02001069 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +01001070 resource_status, resource_msg = await self.check_resource_status(
rshric3564942024-11-12 18:12:38 +00001071 "update_cluster", op_id, op_params, db_cluster_copy
garciadeblas96b94f52024-07-08 16:18:21 +02001072 )
1073 self.logger.info(
1074 "Resource Status: {} Resource Message: {}".format(
1075 resource_status, resource_msg
1076 )
1077 )
yshah771dea82024-07-05 15:11:49 +00001078
1079 if resource_status:
garciadeblas96b94f52024-07-08 16:18:21 +02001080 db_cluster["resourceState"] = "READY"
yshah771dea82024-07-05 15:11:49 +00001081 else:
garciadeblas96b94f52024-07-08 16:18:21 +02001082 db_cluster["resourceState"] = "ERROR"
yshah771dea82024-07-05 15:11:49 +00001083
yshah0defcd52024-11-18 07:41:35 +00001084 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +00001085 db_cluster, op_id, workflow_status, resource_status
yshah0defcd52024-11-18 07:41:35 +00001086 )
1087
garciadeblas96b94f52024-07-08 16:18:21 +02001088 db_cluster["operatingState"] = "IDLE"
garciadeblas96b94f52024-07-08 16:18:21 +02001089 # self.logger.info("db_cluster: {}".format(db_cluster))
1090 # TODO: verify enxtcondition
1091 # For the moment, if the workflow completed successfully, then we update the db accordingly.
1092 if workflow_status:
1093 if "k8s_version" in op_params:
1094 db_cluster["k8s_version"] = op_params["k8s_version"]
yshah0defcd52024-11-18 07:41:35 +00001095 if "node_count" in op_params:
garciadeblas96b94f52024-07-08 16:18:21 +02001096 db_cluster["node_count"] = op_params["node_count"]
yshah0defcd52024-11-18 07:41:35 +00001097 if "node_size" in op_params:
1098 db_cluster["node_count"] = op_params["node_size"]
garciadeblas96b94f52024-07-08 16:18:21 +02001099 # self.db.set_one(self.db_collection, {"_id": _id}, db_content)
shahithya70a3fc92024-11-12 11:01:05 +00001100 db_cluster["current_operation"] = None
garciadeblas96b94f52024-07-08 16:18:21 +02001101 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
yshah771dea82024-07-05 15:11:49 +00001102 return
1103
1104
garciadeblas72412282024-11-07 12:41:54 +01001105class CloudCredentialsLcm(GitOpsLcm):
yshah771dea82024-07-05 15:11:49 +00001106 db_collection = "vim_accounts"
1107
1108 def __init__(self, msg, lcm_tasks, config):
1109 """
1110 Init, Connect to database, filesystem storage, and messaging
1111 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1112 :return: None
1113 """
garciadeblas72412282024-11-07 12:41:54 +01001114 super().__init__(msg, lcm_tasks, config)
yshah771dea82024-07-05 15:11:49 +00001115
yshah564ec9c2024-11-29 07:33:32 +00001116 async def add(self, params, order_id):
yshah771dea82024-07-05 15:11:49 +00001117 self.logger.info("Cloud Credentials create")
yshah564ec9c2024-11-29 07:33:32 +00001118 vim_id = params["_id"]
1119 op_id = vim_id
1120 op_params = params
1121 db_content = self.db.get_one(self.db_collection, {"_id": vim_id})
1122 vim_config = db_content.get("config", {})
1123 self.db.encrypt_decrypt_fields(
1124 vim_config.get("credentials"),
1125 "decrypt",
1126 ["password", "secret"],
1127 schema_version=db_content["schema_version"],
1128 salt=vim_id,
1129 )
1130
garciadeblasadb81e82024-11-08 01:11:46 +01001131 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001132 "create_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001133 )
1134
1135 workflow_status, workflow_msg = await self.odu.check_workflow_status(
1136 workflow_name
1137 )
1138
1139 self.logger.info(
1140 "Workflow Status: {} Workflow Msg: {}".format(workflow_status, workflow_msg)
1141 )
1142
garciadeblas28bff0f2024-09-16 12:53:07 +02001143 # Clean items used in the workflow, no matter if the workflow succeeded
1144 clean_status, clean_msg = await self.odu.clean_items_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001145 "create_cloud_credentials", op_id, op_params, db_content
garciadeblas28bff0f2024-09-16 12:53:07 +02001146 )
1147 self.logger.info(
1148 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
1149 )
1150
yshah771dea82024-07-05 15:11:49 +00001151 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +01001152 resource_status, resource_msg = await self.check_resource_status(
yshah564ec9c2024-11-29 07:33:32 +00001153 "create_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001154 )
1155 self.logger.info(
1156 "Resource Status: {} Resource Message: {}".format(
1157 resource_status, resource_msg
1158 )
1159 )
garciadeblas15b8a302024-09-23 12:40:13 +02001160
yshah564ec9c2024-11-29 07:33:32 +00001161 db_content["_admin"]["operationalState"] = "ENABLED"
1162 for operation in db_content["_admin"]["operations"]:
garciadeblas15b8a302024-09-23 12:40:13 +02001163 if operation["lcmOperationType"] == "create":
1164 operation["operationState"] = "ENABLED"
yshah564ec9c2024-11-29 07:33:32 +00001165 self.logger.info("Content : {}".format(db_content))
1166 self.db.set_one("vim_accounts", {"_id": db_content["_id"]}, db_content)
yshah771dea82024-07-05 15:11:49 +00001167 return
1168
yshah564ec9c2024-11-29 07:33:32 +00001169 async def edit(self, params, order_id):
1170 self.logger.info("Cloud Credentials Update")
1171 vim_id = params["_id"]
1172 op_id = vim_id
1173 op_params = params
1174 db_content = self.db.get_one("vim_accounts", {"_id": vim_id})
1175 vim_config = db_content.get("config", {})
1176 self.db.encrypt_decrypt_fields(
1177 vim_config.get("credentials"),
1178 "decrypt",
1179 ["password", "secret"],
1180 schema_version=db_content["schema_version"],
1181 salt=vim_id,
1182 )
1183
garciadeblasadb81e82024-11-08 01:11:46 +01001184 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001185 "update_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001186 )
1187 workflow_status, workflow_msg = await self.odu.check_workflow_status(
1188 workflow_name
1189 )
1190 self.logger.info(
1191 "Workflow Status: {} Workflow Msg: {}".format(workflow_status, workflow_msg)
1192 )
1193
garciadeblas28bff0f2024-09-16 12:53:07 +02001194 # Clean items used in the workflow, no matter if the workflow succeeded
1195 clean_status, clean_msg = await self.odu.clean_items_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001196 "update_cloud_credentials", op_id, op_params, db_content
garciadeblas28bff0f2024-09-16 12:53:07 +02001197 )
1198 self.logger.info(
1199 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
1200 )
1201
yshah771dea82024-07-05 15:11:49 +00001202 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +01001203 resource_status, resource_msg = await self.check_resource_status(
yshah564ec9c2024-11-29 07:33:32 +00001204 "update_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001205 )
1206 self.logger.info(
1207 "Resource Status: {} Resource Message: {}".format(
1208 resource_status, resource_msg
1209 )
1210 )
1211 return
1212
yshah564ec9c2024-11-29 07:33:32 +00001213 async def remove(self, params, order_id):
1214 self.logger.info("Cloud Credentials remove")
1215 vim_id = params["_id"]
1216 op_id = vim_id
1217 op_params = params
1218 db_content = self.db.get_one("vim_accounts", {"_id": vim_id})
1219
garciadeblasadb81e82024-11-08 01:11:46 +01001220 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001221 "delete_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001222 )
1223 workflow_status, workflow_msg = await self.odu.check_workflow_status(
1224 workflow_name
1225 )
1226 self.logger.info(
1227 "Workflow Status: {} Workflow Msg: {}".format(workflow_status, workflow_msg)
1228 )
1229
1230 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +01001231 resource_status, resource_msg = await self.check_resource_status(
yshah564ec9c2024-11-29 07:33:32 +00001232 "delete_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001233 )
1234 self.logger.info(
1235 "Resource Status: {} Resource Message: {}".format(
1236 resource_status, resource_msg
1237 )
1238 )
yshah564ec9c2024-11-29 07:33:32 +00001239 self.db.del_one(self.db_collection, {"_id": db_content["_id"]})
yshah771dea82024-07-05 15:11:49 +00001240 return
1241
rshri932105f2024-07-05 15:11:55 +00001242
garciadeblas72412282024-11-07 12:41:54 +01001243class K8sAppLcm(GitOpsLcm):
rshri948f7de2024-12-02 03:42:35 +00001244 db_collection = "k8sapp"
1245
rshri932105f2024-07-05 15:11:55 +00001246 def __init__(self, msg, lcm_tasks, config):
1247 """
1248 Init, Connect to database, filesystem storage, and messaging
1249 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1250 :return: None
1251 """
garciadeblas72412282024-11-07 12:41:54 +01001252 super().__init__(msg, lcm_tasks, config)
rshri932105f2024-07-05 15:11:55 +00001253
rshri948f7de2024-12-02 03:42:35 +00001254 async def create(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001255 self.logger.info("App Create Enter")
1256
rshri948f7de2024-12-02 03:42:35 +00001257 op_id = params["operation_id"]
1258 profile_id = params["profile_id"]
1259
1260 # To initialize the operation states
1261 self.initialize_operation(profile_id, op_id)
1262
1263 content = self.db.get_one("k8sapp", {"_id": profile_id})
1264 content["profile_type"] = "applications"
1265 op_params = self.get_operation_params(content, op_id)
1266 self.db.set_one("k8sapp", {"_id": content["_id"]}, content)
1267
garciadeblasadb81e82024-11-08 01:11:46 +01001268 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001269 "create_profile", op_id, op_params, content
1270 )
rshri932105f2024-07-05 15:11:55 +00001271 self.logger.info("workflow_name is :{}".format(workflow_name))
1272
yshah564ec9c2024-11-29 07:33:32 +00001273 workflow_status = await self.check_workflow(op_id, workflow_name, content)
rshri932105f2024-07-05 15:11:55 +00001274
1275 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001276 resource_status, content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001277 "create_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001278 )
yshah564ec9c2024-11-29 07:33:32 +00001279 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1280 self.logger.info(f"App Create Exit with resource status: {resource_status}")
rshri932105f2024-07-05 15:11:55 +00001281 return
1282
rshri948f7de2024-12-02 03:42:35 +00001283 async def delete(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001284 self.logger.info("App delete Enter")
rshri932105f2024-07-05 15:11:55 +00001285
rshri948f7de2024-12-02 03:42:35 +00001286 op_id = params["operation_id"]
1287 profile_id = params["profile_id"]
1288
1289 # To initialize the operation states
1290 self.initialize_operation(profile_id, op_id)
1291
1292 content = self.db.get_one("k8sapp", {"_id": profile_id})
1293 op_params = self.get_operation_params(content, op_id)
1294
garciadeblasadb81e82024-11-08 01:11:46 +01001295 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001296 "delete_profile", op_id, op_params, content
1297 )
rshri932105f2024-07-05 15:11:55 +00001298 self.logger.info("workflow_name is :{}".format(workflow_name))
1299
yshah564ec9c2024-11-29 07:33:32 +00001300 workflow_status = await self.check_workflow(op_id, workflow_name, content)
rshri932105f2024-07-05 15:11:55 +00001301
1302 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001303 resource_status, content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001304 "delete_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001305 )
rshri932105f2024-07-05 15:11:55 +00001306
yshah564ec9c2024-11-29 07:33:32 +00001307 if resource_status:
1308 content["state"] = "DELETED"
1309 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1310 self.db.del_one(self.db_collection, {"_id": content["_id"]})
1311 self.logger.info(f"App Delete Exit with resource status: {resource_status}")
rshri932105f2024-07-05 15:11:55 +00001312 return
1313
1314
garciadeblas72412282024-11-07 12:41:54 +01001315class K8sResourceLcm(GitOpsLcm):
rshri948f7de2024-12-02 03:42:35 +00001316 db_collection = "k8sresource"
1317
rshri932105f2024-07-05 15:11:55 +00001318 def __init__(self, msg, lcm_tasks, config):
1319 """
1320 Init, Connect to database, filesystem storage, and messaging
1321 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1322 :return: None
1323 """
garciadeblas72412282024-11-07 12:41:54 +01001324 super().__init__(msg, lcm_tasks, config)
rshri932105f2024-07-05 15:11:55 +00001325
rshri948f7de2024-12-02 03:42:35 +00001326 async def create(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001327 self.logger.info("Resource Create Enter")
1328
rshri948f7de2024-12-02 03:42:35 +00001329 op_id = params["operation_id"]
1330 profile_id = params["profile_id"]
1331
1332 # To initialize the operation states
1333 self.initialize_operation(profile_id, op_id)
1334
1335 content = self.db.get_one("k8sresource", {"_id": profile_id})
1336 content["profile_type"] = "managed-resources"
1337 op_params = self.get_operation_params(content, op_id)
1338 self.db.set_one("k8sresource", {"_id": content["_id"]}, content)
1339
garciadeblasadb81e82024-11-08 01:11:46 +01001340 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001341 "create_profile", op_id, op_params, content
1342 )
rshri932105f2024-07-05 15:11:55 +00001343 self.logger.info("workflow_name is :{}".format(workflow_name))
1344
yshah564ec9c2024-11-29 07:33:32 +00001345 workflow_status = await self.check_workflow(op_id, workflow_name, content)
rshri932105f2024-07-05 15:11:55 +00001346
1347 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001348 resource_status, content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001349 "create_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001350 )
yshah564ec9c2024-11-29 07:33:32 +00001351 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1352 self.logger.info(
1353 f"Resource Create Exit with resource status: {resource_status}"
rshri932105f2024-07-05 15:11:55 +00001354 )
rshri932105f2024-07-05 15:11:55 +00001355 return
1356
rshri948f7de2024-12-02 03:42:35 +00001357 async def delete(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001358 self.logger.info("Resource delete Enter")
rshri948f7de2024-12-02 03:42:35 +00001359
1360 op_id = params["operation_id"]
1361 profile_id = params["profile_id"]
1362
1363 # To initialize the operation states
1364 self.initialize_operation(profile_id, op_id)
1365
1366 content = self.db.get_one("k8sresource", {"_id": profile_id})
1367 op_params = self.get_operation_params(content, op_id)
rshri932105f2024-07-05 15:11:55 +00001368
garciadeblasadb81e82024-11-08 01:11:46 +01001369 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001370 "delete_profile", op_id, op_params, content
1371 )
rshri932105f2024-07-05 15:11:55 +00001372 self.logger.info("workflow_name is :{}".format(workflow_name))
1373
yshah564ec9c2024-11-29 07:33:32 +00001374 workflow_status = await self.check_workflow(op_id, workflow_name, content)
rshri932105f2024-07-05 15:11:55 +00001375
1376 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001377 resource_status, content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001378 "delete_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001379 )
rshri932105f2024-07-05 15:11:55 +00001380
yshah564ec9c2024-11-29 07:33:32 +00001381 if resource_status:
1382 content["state"] = "DELETED"
1383 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1384 self.db.del_one(self.db_collection, {"_id": content["_id"]})
1385 self.logger.info(
1386 f"Resource Delete Exit with resource status: {resource_status}"
garciadeblas96b94f52024-07-08 16:18:21 +02001387 )
rshri932105f2024-07-05 15:11:55 +00001388 return
1389
1390
garciadeblas72412282024-11-07 12:41:54 +01001391class K8sInfraControllerLcm(GitOpsLcm):
rshri948f7de2024-12-02 03:42:35 +00001392 db_collection = "k8sinfra_controller"
1393
rshri932105f2024-07-05 15:11:55 +00001394 def __init__(self, msg, lcm_tasks, config):
1395 """
1396 Init, Connect to database, filesystem storage, and messaging
1397 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1398 :return: None
1399 """
garciadeblas72412282024-11-07 12:41:54 +01001400 super().__init__(msg, lcm_tasks, config)
rshri932105f2024-07-05 15:11:55 +00001401
rshri948f7de2024-12-02 03:42:35 +00001402 async def create(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001403 self.logger.info("Infra controller Create Enter")
1404
rshri948f7de2024-12-02 03:42:35 +00001405 op_id = params["operation_id"]
1406 profile_id = params["profile_id"]
1407
1408 # To initialize the operation states
1409 self.initialize_operation(profile_id, op_id)
1410
1411 content = self.db.get_one("k8sinfra_controller", {"_id": profile_id})
1412 content["profile_type"] = "infra-controllers"
1413 op_params = self.get_operation_params(content, op_id)
1414 self.db.set_one("k8sinfra_controller", {"_id": content["_id"]}, content)
1415
garciadeblasadb81e82024-11-08 01:11:46 +01001416 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001417 "create_profile", op_id, op_params, content
1418 )
rshri932105f2024-07-05 15:11:55 +00001419 self.logger.info("workflow_name is :{}".format(workflow_name))
1420
yshah564ec9c2024-11-29 07:33:32 +00001421 workflow_status = await self.check_workflow(op_id, workflow_name, content)
rshri932105f2024-07-05 15:11:55 +00001422
1423 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001424 resource_status, content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001425 "create_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001426 )
yshah564ec9c2024-11-29 07:33:32 +00001427 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1428 self.logger.info(
1429 f"Infra Controller Create Exit with resource status: {resource_status}"
rshri932105f2024-07-05 15:11:55 +00001430 )
rshri932105f2024-07-05 15:11:55 +00001431 return
1432
rshri948f7de2024-12-02 03:42:35 +00001433 async def delete(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001434 self.logger.info("Infra controller delete Enter")
rshri932105f2024-07-05 15:11:55 +00001435
rshri948f7de2024-12-02 03:42:35 +00001436 op_id = params["operation_id"]
1437 profile_id = params["profile_id"]
1438
1439 # To initialize the operation states
1440 self.initialize_operation(profile_id, op_id)
1441
1442 content = self.db.get_one("k8sinfra_controller", {"_id": profile_id})
1443 op_params = self.get_operation_params(content, op_id)
1444
garciadeblasadb81e82024-11-08 01:11:46 +01001445 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001446 "delete_profile", op_id, op_params, content
1447 )
rshri932105f2024-07-05 15:11:55 +00001448 self.logger.info("workflow_name is :{}".format(workflow_name))
1449
yshah564ec9c2024-11-29 07:33:32 +00001450 workflow_status = await self.check_workflow(op_id, workflow_name, content)
rshri932105f2024-07-05 15:11:55 +00001451
1452 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001453 resource_status, content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001454 "delete_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001455 )
rshri932105f2024-07-05 15:11:55 +00001456
yshah564ec9c2024-11-29 07:33:32 +00001457 if resource_status:
1458 content["state"] = "DELETED"
1459 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1460 self.db.del_one(self.db_collection, {"_id": content["_id"]})
1461 self.logger.info(
1462 f"Infra Controller Delete Exit with resource status: {resource_status}"
garciadeblas96b94f52024-07-08 16:18:21 +02001463 )
rshri932105f2024-07-05 15:11:55 +00001464 return
1465
1466
garciadeblas72412282024-11-07 12:41:54 +01001467class K8sInfraConfigLcm(GitOpsLcm):
rshri948f7de2024-12-02 03:42:35 +00001468 db_collection = "k8sinfra_config"
1469
rshri932105f2024-07-05 15:11:55 +00001470 def __init__(self, msg, lcm_tasks, config):
1471 """
1472 Init, Connect to database, filesystem storage, and messaging
1473 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1474 :return: None
1475 """
garciadeblas72412282024-11-07 12:41:54 +01001476 super().__init__(msg, lcm_tasks, config)
rshri932105f2024-07-05 15:11:55 +00001477
rshri948f7de2024-12-02 03:42:35 +00001478 async def create(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001479 self.logger.info("Infra config Create Enter")
1480
rshri948f7de2024-12-02 03:42:35 +00001481 op_id = params["operation_id"]
1482 profile_id = params["profile_id"]
1483
1484 # To initialize the operation states
1485 self.initialize_operation(profile_id, op_id)
1486
1487 content = self.db.get_one("k8sinfra_config", {"_id": profile_id})
1488 content["profile_type"] = "infra-configs"
1489 op_params = self.get_operation_params(content, op_id)
1490 self.db.set_one("k8sinfra_config", {"_id": content["_id"]}, content)
1491
garciadeblasadb81e82024-11-08 01:11:46 +01001492 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001493 "create_profile", op_id, op_params, content
1494 )
rshri932105f2024-07-05 15:11:55 +00001495 self.logger.info("workflow_name is :{}".format(workflow_name))
1496
yshah564ec9c2024-11-29 07:33:32 +00001497 workflow_status = await self.check_workflow(op_id, workflow_name, content)
rshri932105f2024-07-05 15:11:55 +00001498
1499 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001500 resource_status, content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001501 "create_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001502 )
yshah564ec9c2024-11-29 07:33:32 +00001503 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1504 self.logger.info(
1505 f"Infra Config Create Exit with resource status: {resource_status}"
rshri932105f2024-07-05 15:11:55 +00001506 )
rshri932105f2024-07-05 15:11:55 +00001507 return
1508
rshri948f7de2024-12-02 03:42:35 +00001509 async def delete(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001510 self.logger.info("Infra config delete Enter")
1511
rshri948f7de2024-12-02 03:42:35 +00001512 op_id = params["operation_id"]
1513 profile_id = params["profile_id"]
1514
1515 # To initialize the operation states
1516 self.initialize_operation(profile_id, op_id)
1517
1518 content = self.db.get_one("k8sinfra_config", {"_id": profile_id})
1519 op_params = self.get_operation_params(content, op_id)
1520
garciadeblasadb81e82024-11-08 01:11:46 +01001521 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001522 "delete_profile", op_id, op_params, content
1523 )
rshri932105f2024-07-05 15:11:55 +00001524 self.logger.info("workflow_name is :{}".format(workflow_name))
rshri932105f2024-07-05 15:11:55 +00001525
yshah564ec9c2024-11-29 07:33:32 +00001526 workflow_status = await self.check_workflow(op_id, workflow_name, content)
1527
rshri932105f2024-07-05 15:11:55 +00001528 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001529 resource_status, content = await self.check_resource(
1530 "delete_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001531 )
yshah564ec9c2024-11-29 07:33:32 +00001532
rshri932105f2024-07-05 15:11:55 +00001533 if resource_status:
yshah564ec9c2024-11-29 07:33:32 +00001534 content["state"] = "DELETED"
1535 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1536 self.db.del_one(self.db_collection, {"_id": content["_id"]})
1537 self.logger.info(
1538 f"Infra Config Delete Exit with resource status: {resource_status}"
garciadeblas96b94f52024-07-08 16:18:21 +02001539 )
rshri932105f2024-07-05 15:11:55 +00001540
rshri932105f2024-07-05 15:11:55 +00001541 return
yshah771dea82024-07-05 15:11:49 +00001542
1543
garciadeblas72412282024-11-07 12:41:54 +01001544class OkaLcm(GitOpsLcm):
yshah771dea82024-07-05 15:11:49 +00001545 db_collection = "okas"
1546
1547 def __init__(self, msg, lcm_tasks, config):
1548 """
1549 Init, Connect to database, filesystem storage, and messaging
1550 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1551 :return: None
1552 """
garciadeblas72412282024-11-07 12:41:54 +01001553 super().__init__(msg, lcm_tasks, config)
yshah771dea82024-07-05 15:11:49 +00001554
yshah564ec9c2024-11-29 07:33:32 +00001555 async def create(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001556 self.logger.info("OKA Create Enter")
yshah564ec9c2024-11-29 07:33:32 +00001557 op_id = params["operation_id"]
1558 oka_id = params["oka_id"]
1559 self.initialize_operation(oka_id, op_id)
1560 db_content = self.db.get_one(self.db_collection, {"_id": oka_id})
1561 op_params = self.get_operation_params(db_content, op_id)
yshah771dea82024-07-05 15:11:49 +00001562
garciadeblasadb81e82024-11-08 01:11:46 +01001563 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001564 "create_oka", op_id, op_params, db_content
1565 )
yshah564ec9c2024-11-29 07:33:32 +00001566
1567 workflow_status = await self.check_workflow(op_id, workflow_name, db_content)
yshah771dea82024-07-05 15:11:49 +00001568
1569 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001570 resource_status, db_content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001571 "create_oka", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001572 )
garciadeblas96b94f52024-07-08 16:18:21 +02001573 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
yshah564ec9c2024-11-29 07:33:32 +00001574 self.logger.info(f"OKA Create Exit with resource status: {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001575 return
1576
yshah564ec9c2024-11-29 07:33:32 +00001577 async def edit(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001578 self.logger.info("OKA Edit Enter")
yshah564ec9c2024-11-29 07:33:32 +00001579 op_id = params["operation_id"]
1580 oka_id = params["oka_id"]
1581 self.initialize_operation(oka_id, op_id)
1582 db_content = self.db.get_one(self.db_collection, {"_id": oka_id})
1583 op_params = self.get_operation_params(db_content, op_id)
yshah771dea82024-07-05 15:11:49 +00001584
garciadeblasadb81e82024-11-08 01:11:46 +01001585 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001586 "update_oka", op_id, op_params, db_content
garciadeblas96b94f52024-07-08 16:18:21 +02001587 )
yshah564ec9c2024-11-29 07:33:32 +00001588 workflow_status = await self.check_workflow(op_id, workflow_name, db_content)
yshah771dea82024-07-05 15:11:49 +00001589
1590 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001591 resource_status, db_content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001592 "update_oka", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001593 )
garciadeblas96b94f52024-07-08 16:18:21 +02001594 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
yshah564ec9c2024-11-29 07:33:32 +00001595 self.logger.info(f"OKA Update Exit with resource status: {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001596 return
1597
yshah564ec9c2024-11-29 07:33:32 +00001598 async def delete(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001599 self.logger.info("OKA delete Enter")
yshah564ec9c2024-11-29 07:33:32 +00001600 op_id = params["operation_id"]
1601 oka_id = params["oka_id"]
1602 self.initialize_operation(oka_id, op_id)
1603 db_content = self.db.get_one(self.db_collection, {"_id": oka_id})
1604 op_params = self.get_operation_params(db_content, op_id)
yshah771dea82024-07-05 15:11:49 +00001605
garciadeblasadb81e82024-11-08 01:11:46 +01001606 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001607 "delete_oka", op_id, op_params, db_content
garciadeblas96b94f52024-07-08 16:18:21 +02001608 )
yshah564ec9c2024-11-29 07:33:32 +00001609 workflow_status = await self.check_workflow(op_id, workflow_name, db_content)
yshah771dea82024-07-05 15:11:49 +00001610
1611 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001612 resource_status, db_content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001613 "delete_oka", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001614 )
yshah771dea82024-07-05 15:11:49 +00001615
yshah564ec9c2024-11-29 07:33:32 +00001616 if resource_status:
1617 db_content["state"] == "DELETED"
1618 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
garciadeblas96b94f52024-07-08 16:18:21 +02001619 self.db.del_one(self.db_collection, {"_id": db_content["_id"]})
yshah564ec9c2024-11-29 07:33:32 +00001620 self.logger.info(f"OKA Delete Exit with resource status: {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001621 return
1622
1623
garciadeblas72412282024-11-07 12:41:54 +01001624class KsuLcm(GitOpsLcm):
yshah771dea82024-07-05 15:11:49 +00001625 db_collection = "ksus"
1626
1627 def __init__(self, msg, lcm_tasks, config):
1628 """
1629 Init, Connect to database, filesystem storage, and messaging
1630 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1631 :return: None
1632 """
garciadeblas72412282024-11-07 12:41:54 +01001633 super().__init__(msg, lcm_tasks, config)
yshah771dea82024-07-05 15:11:49 +00001634
yshah564ec9c2024-11-29 07:33:32 +00001635 async def create(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001636 self.logger.info("ksu Create Enter")
yshah564ec9c2024-11-29 07:33:32 +00001637 db_content = []
1638 op_params = []
1639 op_id = params["operation_id"]
1640 for ksu_id in params["ksus_list"]:
1641 self.logger.info("Ksu ID: {}".format(ksu_id))
1642 self.initialize_operation(ksu_id, op_id)
1643 db_ksu = self.db.get_one(self.db_collection, {"_id": ksu_id})
1644 self.logger.info("Db KSU: {}".format(db_ksu))
1645 db_content.append(db_ksu)
1646 ksu_params = {}
1647 ksu_params = self.get_operation_params(db_ksu, op_id)
1648 self.logger.info("Operation Params: {}".format(ksu_params))
1649 # Update ksu_params["profile"] with profile name and age-pubkey
1650 profile_type = ksu_params["profile"]["profile_type"]
1651 profile_id = ksu_params["profile"]["_id"]
1652 profile_collection = self.profile_collection_mapping[profile_type]
1653 db_profile = self.db.get_one(profile_collection, {"_id": profile_id})
1654 ksu_params["profile"]["name"] = db_profile["name"]
1655 ksu_params["profile"]["age_pubkey"] = db_profile.get("age_pubkey", "")
1656 # Update ksu_params["oka"] with sw_catalog_path (when missing)
1657 for oka in ksu_params["oka"]:
1658 if "sw_catalog_path" not in oka:
1659 oka_id = oka["_id"]
1660 db_oka = self.db.get_one("okas", {"_id": oka_id})
yshah2f39b8a2024-12-19 11:06:24 +00001661 oka_type = MAP_PROFILE[
1662 db_oka.get("profile_type", "infra_controller_profiles")
1663 ]
1664 oka["sw_catalog_path"] = f"{oka_type}/{db_oka['git_name']}"
yshah564ec9c2024-11-29 07:33:32 +00001665 op_params.append(ksu_params)
yshah771dea82024-07-05 15:11:49 +00001666
garciadeblasadb81e82024-11-08 01:11:46 +01001667 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001668 "create_ksus", op_id, op_params, db_content
garciadeblas96b94f52024-07-08 16:18:21 +02001669 )
yshah564ec9c2024-11-29 07:33:32 +00001670 for db_ksu, ksu_params in zip(db_content, op_params):
1671 workflow_status = await self.check_workflow(op_id, workflow_name, db_ksu)
yshah771dea82024-07-05 15:11:49 +00001672
garciadeblas96b94f52024-07-08 16:18:21 +02001673 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001674 resource_status, db_ksu = await self.check_resource(
1675 "create_ksus", op_id, ksu_params, db_ksu
1676 )
yshah771dea82024-07-05 15:11:49 +00001677
garciadeblas96b94f52024-07-08 16:18:21 +02001678 self.db.set_one(self.db_collection, {"_id": db_ksu["_id"]}, db_ksu)
1679
garciadeblasd8429852024-10-17 15:30:30 +02001680 # Clean items used in the workflow, no matter if the workflow succeeded
1681 clean_status, clean_msg = await self.odu.clean_items_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001682 "create_ksus", op_id, op_params, db_content
garciadeblasd8429852024-10-17 15:30:30 +02001683 )
1684 self.logger.info(
1685 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
1686 )
yshah564ec9c2024-11-29 07:33:32 +00001687 self.logger.info(f"KSU Create EXIT with Resource Status {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001688 return
1689
yshah564ec9c2024-11-29 07:33:32 +00001690 async def edit(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001691 self.logger.info("ksu edit Enter")
yshah564ec9c2024-11-29 07:33:32 +00001692 db_content = []
1693 op_params = []
1694 op_id = params["operation_id"]
1695 for ksu_id in params["ksus_list"]:
1696 self.initialize_operation(ksu_id, op_id)
1697 db_ksu = self.db.get_one("ksus", {"_id": ksu_id})
1698 db_content.append(db_ksu)
1699 ksu_params = {}
1700 ksu_params = self.get_operation_params(db_ksu, op_id)
1701 # Update ksu_params["profile"] with profile name and age-pubkey
1702 profile_type = ksu_params["profile"]["profile_type"]
1703 profile_id = ksu_params["profile"]["_id"]
1704 profile_collection = self.profile_collection_mapping[profile_type]
1705 db_profile = self.db.get_one(profile_collection, {"_id": profile_id})
1706 ksu_params["profile"]["name"] = db_profile["name"]
1707 ksu_params["profile"]["age_pubkey"] = db_profile.get("age_pubkey", "")
1708 # Update ksu_params["oka"] with sw_catalog_path (when missing)
1709 for oka in ksu_params["oka"]:
1710 if "sw_catalog_path" not in oka:
1711 oka_id = oka["_id"]
1712 db_oka = self.db.get_one("okas", {"_id": oka_id})
yshah2f39b8a2024-12-19 11:06:24 +00001713 oka_type = MAP_PROFILE[
1714 db_oka.get("profile_type", "infra_controller_profiles")
1715 ]
1716 oka["sw_catalog_path"] = f"{oka_type}/{db_oka['git_name']}"
yshah564ec9c2024-11-29 07:33:32 +00001717 op_params.append(ksu_params)
yshah771dea82024-07-05 15:11:49 +00001718
garciadeblasadb81e82024-11-08 01:11:46 +01001719 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001720 "update_ksus", op_id, op_params, db_content
garciadeblas96b94f52024-07-08 16:18:21 +02001721 )
yshah771dea82024-07-05 15:11:49 +00001722
yshah564ec9c2024-11-29 07:33:32 +00001723 for db_ksu, ksu_params in zip(db_content, op_params):
1724 workflow_status = await self.check_workflow(op_id, workflow_name, db_ksu)
1725
garciadeblas96b94f52024-07-08 16:18:21 +02001726 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001727 resource_status, db_ksu = await self.check_resource(
1728 "update_ksus", op_id, ksu_params, db_ksu
garciadeblas96b94f52024-07-08 16:18:21 +02001729 )
garciadeblas96b94f52024-07-08 16:18:21 +02001730 db_ksu["name"] = ksu_params["name"]
1731 db_ksu["description"] = ksu_params["description"]
1732 db_ksu["profile"]["profile_type"] = ksu_params["profile"][
1733 "profile_type"
1734 ]
1735 db_ksu["profile"]["_id"] = ksu_params["profile"]["_id"]
1736 db_ksu["oka"] = ksu_params["oka"]
1737 self.db.set_one(self.db_collection, {"_id": db_ksu["_id"]}, db_ksu)
1738
yshah564ec9c2024-11-29 07:33:32 +00001739 # Clean items used in the workflow, no matter if the workflow succeeded
1740 clean_status, clean_msg = await self.odu.clean_items_workflow(
1741 "create_ksus", op_id, op_params, db_content
garciadeblas96b94f52024-07-08 16:18:21 +02001742 )
1743 self.logger.info(
yshah564ec9c2024-11-29 07:33:32 +00001744 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
garciadeblas96b94f52024-07-08 16:18:21 +02001745 )
yshah564ec9c2024-11-29 07:33:32 +00001746 self.logger.info(f"KSU Update EXIT with Resource Status {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001747 return
1748
yshah564ec9c2024-11-29 07:33:32 +00001749 async def delete(self, params, order_id):
1750 self.logger.info("ksu delete Enter")
1751 db_content = []
1752 op_params = []
1753 op_id = params["operation_id"]
1754 for ksu_id in params["ksus_list"]:
1755 self.initialize_operation(ksu_id, op_id)
1756 db_ksu = self.db.get_one("ksus", {"_id": ksu_id})
1757 db_content.append(db_ksu)
1758 ksu_params = {}
1759 ksu_params["profile"] = {}
1760 ksu_params["profile"]["profile_type"] = db_ksu["profile"]["profile_type"]
1761 ksu_params["profile"]["_id"] = db_ksu["profile"]["_id"]
1762 # Update ksu_params["profile"] with profile name and age-pubkey
1763 profile_type = ksu_params["profile"]["profile_type"]
1764 profile_id = ksu_params["profile"]["_id"]
1765 profile_collection = self.profile_collection_mapping[profile_type]
1766 db_profile = self.db.get_one(profile_collection, {"_id": profile_id})
1767 ksu_params["profile"]["name"] = db_profile["name"]
1768 ksu_params["profile"]["age_pubkey"] = db_profile.get("age_pubkey", "")
1769 op_params.append(ksu_params)
yshah771dea82024-07-05 15:11:49 +00001770
garciadeblasadb81e82024-11-08 01:11:46 +01001771 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001772 "delete_ksus", op_id, op_params, db_content
1773 )
1774
1775 for db_ksu, ksu_params in zip(db_content, op_params):
1776 workflow_status = await self.check_workflow(op_id, workflow_name, db_ksu)
1777
1778 if workflow_status:
1779 resource_status, db_ksu = await self.check_resource(
1780 "delete_ksus", op_id, ksu_params, db_ksu
1781 )
1782
1783 if resource_status:
1784 db_ksu["state"] == "DELETED"
1785 self.db.set_one(self.db_collection, {"_id": db_ksu["_id"]}, db_ksu)
1786 self.db.del_one(self.db_collection, {"_id": db_ksu["_id"]})
1787
1788 self.logger.info(f"KSU Delete Exit with resource status: {resource_status}")
1789 return
1790
1791 async def clone(self, params, order_id):
1792 self.logger.info("ksu clone Enter")
1793 op_id = params["operation_id"]
1794 ksus_id = params["ksus_list"][0]
1795 self.initialize_operation(ksus_id, op_id)
1796 db_content = self.db.get_one(self.db_collection, {"_id": ksus_id})
1797 op_params = self.get_operation_params(db_content, op_id)
1798 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001799 "clone_ksus", op_id, op_params, db_content
1800 )
yshah564ec9c2024-11-29 07:33:32 +00001801
1802 workflow_status = await self.check_workflow(op_id, workflow_name, db_content)
yshah771dea82024-07-05 15:11:49 +00001803
1804 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001805 resource_status, db_content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001806 "clone_ksus", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001807 )
garciadeblas96b94f52024-07-08 16:18:21 +02001808 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
yshah564ec9c2024-11-29 07:33:32 +00001809
1810 self.logger.info(f"KSU Clone Exit with resource status: {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001811 return
1812
yshah564ec9c2024-11-29 07:33:32 +00001813 async def move(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001814 self.logger.info("ksu move Enter")
yshah564ec9c2024-11-29 07:33:32 +00001815 op_id = params["operation_id"]
1816 ksus_id = params["ksus_list"][0]
1817 self.initialize_operation(ksus_id, op_id)
1818 db_content = self.db.get_one(self.db_collection, {"_id": ksus_id})
1819 op_params = self.get_operation_params(db_content, op_id)
garciadeblasadb81e82024-11-08 01:11:46 +01001820 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001821 "move_ksus", op_id, op_params, db_content
1822 )
yshah564ec9c2024-11-29 07:33:32 +00001823
1824 workflow_status = await self.check_workflow(op_id, workflow_name, db_content)
yshah771dea82024-07-05 15:11:49 +00001825
1826 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001827 resource_status, db_content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001828 "move_ksus", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001829 )
garciadeblas96b94f52024-07-08 16:18:21 +02001830 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
yshah564ec9c2024-11-29 07:33:32 +00001831
1832 self.logger.info(f"KSU Move Exit with resource status: {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001833 return