blob: 7b4124c40f6ab17eab1a06a9d34686717654aeb4 [file] [log] [blame]
rshri932105f2024-07-05 15:11:55 +00001# -*- coding: utf-8 -*-
2
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
12# implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
15
16__author__ = (
17 "Shrinithi R <shrinithi.r@tataelxsi.co.in>",
18 "Shahithya Y <shahithya.y@tataelxsi.co.in>",
19)
20
rshric3564942024-11-12 18:12:38 +000021import copy
rshri932105f2024-07-05 15:11:55 +000022import logging
yshahd940c652024-10-17 06:11:12 +000023from time import time
garciadeblas72412282024-11-07 12:41:54 +010024import traceback
rshri932105f2024-07-05 15:11:55 +000025from osm_lcm.lcm_utils import LcmBase
26from copy import deepcopy
27from osm_lcm import odu_workflows
28from osm_lcm import vim_sdn
rshri948f7de2024-12-02 03:42:35 +000029from osm_lcm.data_utils.list_utils import find_in_list
rshri932105f2024-07-05 15:11:55 +000030
yshah2f39b8a2024-12-19 11:06:24 +000031MAP_PROFILE = {
32 "infra_controller_profiles": "infra-controllers",
33 "infra_config_profiles": "infra-configs",
34 "resource_profiles": "managed_resources",
35 "app_profiles": "apps",
36}
37
rshri932105f2024-07-05 15:11:55 +000038
garciadeblas72412282024-11-07 12:41:54 +010039class GitOpsLcm(LcmBase):
garciadeblasea865ff2024-11-20 12:42:49 +010040 db_collection = "gitops"
yshah564ec9c2024-11-29 07:33:32 +000041 workflow_status = None
42 resource_status = None
43
44 profile_collection_mapping = {
45 "infra_controller_profiles": "k8sinfra_controller",
46 "infra_config_profiles": "k8sinfra_config",
47 "resource_profiles": "k8sresource",
48 "app_profiles": "k8sapp",
49 }
garciadeblasea865ff2024-11-20 12:42:49 +010050
garciadeblas72412282024-11-07 12:41:54 +010051 def __init__(self, msg, lcm_tasks, config):
52 self.logger = logging.getLogger("lcm.gitops")
53 self.lcm_tasks = lcm_tasks
54 self.odu = odu_workflows.OduWorkflow(msg, self.lcm_tasks, config)
55 self._checkloop_kustomization_timeout = 900
56 self._checkloop_resource_timeout = 900
57 self._workflows = {}
58 super().__init__(msg, self.logger)
59
60 async def check_dummy_operation(self, op_id, op_params, content):
61 self.logger.info(f"Operation {op_id}. Params: {op_params}. Content: {content}")
62 return True, "OK"
63
garciadeblasea865ff2024-11-20 12:42:49 +010064 def initialize_operation(self, item_id, op_id):
65 db_item = self.db.get_one(self.db_collection, {"_id": item_id})
66 operation = next(
67 (op for op in db_item.get("operationHistory", []) if op["op_id"] == op_id),
68 None,
69 )
70 operation["workflowState"] = "PROCESSING"
71 operation["resourceState"] = "NOT_READY"
72 operation["operationState"] = "IN_PROGRESS"
73 operation["gitOperationInfo"] = None
74 db_item["current_operation"] = operation["op_id"]
75 self.db.set_one(self.db_collection, {"_id": item_id}, db_item)
76
yshah564ec9c2024-11-29 07:33:32 +000077 def get_operation_params(self, item, operation_id):
78 operation_history = item.get("operationHistory", [])
79 operation = find_in_list(
80 operation_history, lambda op: op["op_id"] == operation_id
81 )
82 return operation.get("operationParams", {})
83
84 def get_operation_type(self, item, operation_id):
85 operation_history = item.get("operationHistory", [])
86 operation = find_in_list(
87 operation_history, lambda op: op["op_id"] == operation_id
88 )
89 return operation.get("operationType", {})
90
garciadeblasbe890702024-12-20 11:39:13 +010091 def update_state_operation_history(
92 self, content, op_id, workflow_state=None, resource_state=None
93 ):
94 self.logger.info(
95 f"Update state of operation {op_id} in Operation History in DB"
96 )
97 self.logger.info(
98 f"Workflow state: {workflow_state}. Resource state: {resource_state}"
99 )
100 self.logger.debug(f"Content: {content}")
101
102 op_num = 0
103 for operation in content["operationHistory"]:
104 self.logger.debug("Operations: {}".format(operation))
105 if operation["op_id"] == op_id:
106 self.logger.debug("Found operation number: {}".format(op_num))
107 if workflow_state is not None:
108 operation["workflowState"] = workflow_state
109
110 if resource_state is not None:
111 operation["resourceState"] = resource_state
112 break
113 op_num += 1
114 self.logger.debug("content: {}".format(content))
115
116 return content
117
garciadeblas7eae6f42024-11-08 10:41:38 +0100118 def update_operation_history(
garciadeblasf9092892024-12-12 11:07:08 +0100119 self, content, op_id, workflow_status=None, resource_status=None, op_end=True
garciadeblas7eae6f42024-11-08 10:41:38 +0100120 ):
121 self.logger.info(
122 f"Update Operation History in DB. Workflow status: {workflow_status}. Resource status: {resource_status}"
123 )
124 self.logger.debug(f"Content: {content}")
125
garciadeblas7eae6f42024-11-08 10:41:38 +0100126 op_num = 0
127 for operation in content["operationHistory"]:
128 self.logger.debug("Operations: {}".format(operation))
129 if operation["op_id"] == op_id:
130 self.logger.debug("Found operation number: {}".format(op_num))
garciadeblas8bde3f42024-12-20 10:37:12 +0100131 if workflow_status is not None:
132 if workflow_status:
133 operation["workflowState"] = "COMPLETED"
134 operation["result"] = True
135 else:
136 operation["workflowState"] = "ERROR"
137 operation["operationState"] = "FAILED"
138 operation["result"] = False
garciadeblas7eae6f42024-11-08 10:41:38 +0100139
garciadeblas8bde3f42024-12-20 10:37:12 +0100140 if resource_status is not None:
141 if resource_status:
142 operation["resourceState"] = "READY"
143 operation["operationState"] = "COMPLETED"
144 operation["result"] = True
145 else:
146 operation["resourceState"] = "NOT_READY"
147 operation["operationState"] = "FAILED"
148 operation["result"] = False
garciadeblas7eae6f42024-11-08 10:41:38 +0100149
garciadeblasf9092892024-12-12 11:07:08 +0100150 if op_end:
151 now = time()
152 operation["endDate"] = now
garciadeblas7eae6f42024-11-08 10:41:38 +0100153 break
154 op_num += 1
155 self.logger.debug("content: {}".format(content))
156
157 return content
158
yshah564ec9c2024-11-29 07:33:32 +0000159 async def check_workflow(self, op_id, workflow_name, db_content):
160 workflow_status, workflow_msg = await self.odu.check_workflow_status(
161 workflow_name
162 )
163 self.logger.info(
164 "Workflow Status: {} Workflow Message: {}".format(
165 workflow_status, workflow_msg
166 )
167 )
168 operation_type = self.get_operation_type(db_content, op_id)
169 if operation_type == "create" and workflow_status:
170 db_content["state"] = "CREATED"
171 elif operation_type == "create" and not workflow_status:
172 db_content["state"] = "FAILED_CREATION"
173 elif operation_type == "delete" and workflow_status:
174 db_content["state"] = "DELETED"
175 elif operation_type == "delete" and not workflow_status:
176 db_content["state"] = "FAILED_DELETION"
177
178 if workflow_status:
179 db_content["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
180 else:
181 db_content["resourceState"] = "ERROR"
182
183 db_content = self.update_operation_history(
184 db_content, op_id, workflow_status, None
185 )
186 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
187 return workflow_status
188
189 async def check_resource(self, resource_name, op_id, op_params, db_content):
190 workflow_status = True
191
192 resource_status, resource_msg = await self.check_resource_status(
193 resource_name, op_id, op_params, db_content
194 )
195 self.logger.info(
196 "Resource Status: {} Resource Message: {}".format(
197 resource_status, resource_msg
198 )
199 )
200
201 if resource_status:
202 db_content["resourceState"] = "READY"
203 else:
204 db_content["resourceState"] = "ERROR"
205
206 db_content = self.update_operation_history(
207 db_content, op_id, workflow_status, resource_status
208 )
209 db_content["operatingState"] = "IDLE"
210 db_content["current_operation"] = None
211 return resource_status, db_content
212
yshahcb9075f2024-11-22 12:08:57 +0000213 async def common_check_list(self, op_id, checkings_list, db_collection, db_item):
garciadeblas72412282024-11-07 12:41:54 +0100214 try:
215 for checking in checkings_list:
216 if checking["enable"]:
217 status, message = await self.odu.readiness_loop(
218 item=checking["item"],
219 name=checking["name"],
220 namespace=checking["namespace"],
221 flag=checking["flag"],
222 timeout=checking["timeout"],
223 )
224 if not status:
225 return status, message
garciadeblas7eae6f42024-11-08 10:41:38 +0100226 else:
227 db_item["resourceState"] = checking["resourceState"]
garciadeblasbe890702024-12-20 11:39:13 +0100228 db_item = self.update_state_operation_history(
229 db_item, op_id, None, checking["resourceState"]
garciadeblas7eae6f42024-11-08 10:41:38 +0100230 )
231 self.db.set_one(db_collection, {"_id": db_item["_id"]}, db_item)
garciadeblas72412282024-11-07 12:41:54 +0100232 except Exception as e:
233 self.logger.debug(traceback.format_exc())
234 self.logger.debug(f"Exception: {e}", exc_info=True)
235 return False, f"Unexpected exception: {e}"
236 return True, "OK"
237
238 async def check_resource_status(self, key, op_id, op_params, content):
239 self.logger.info(
240 f"Check resource status. Key: {key}. Operation: {op_id}. Params: {op_params}. Content: {content}"
241 )
242 check_resource_function = self._workflows.get(key, {}).get(
243 "check_resource_function"
244 )
245 self.logger.info("check_resource function : {}".format(check_resource_function))
246 if check_resource_function:
247 return await check_resource_function(op_id, op_params, content)
248 else:
249 return await self.check_dummy_operation(op_id, op_params, content)
250
garciadeblas995cbf32024-12-18 12:54:00 +0100251 def decrypted_copy(self, content, fields=["age_pubkey", "age_privkey"]):
252 # This deep copy is intended to be passed to ODU workflows.
253 content_copy = copy.deepcopy(content)
rshric3564942024-11-12 18:12:38 +0000254
255 # decrypting the key
256 self.db.encrypt_decrypt_fields(
garciadeblas995cbf32024-12-18 12:54:00 +0100257 content_copy,
rshric3564942024-11-12 18:12:38 +0000258 "decrypt",
garciadeblas995cbf32024-12-18 12:54:00 +0100259 fields,
rshric3564942024-11-12 18:12:38 +0000260 schema_version="1.11",
garciadeblas995cbf32024-12-18 12:54:00 +0100261 salt=content_copy["_id"],
rshric3564942024-11-12 18:12:38 +0000262 )
garciadeblas995cbf32024-12-18 12:54:00 +0100263 return content_copy
rshric3564942024-11-12 18:12:38 +0000264
garciadeblas72412282024-11-07 12:41:54 +0100265
266class ClusterLcm(GitOpsLcm):
garciadeblas96b94f52024-07-08 16:18:21 +0200267 db_collection = "clusters"
rshri932105f2024-07-05 15:11:55 +0000268
269 def __init__(self, msg, lcm_tasks, config):
270 """
271 Init, Connect to database, filesystem storage, and messaging
272 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
273 :return: None
274 """
garciadeblas72412282024-11-07 12:41:54 +0100275 super().__init__(msg, lcm_tasks, config)
276 self._workflows = {
277 "create_cluster": {
278 "check_resource_function": self.check_create_cluster,
279 },
garciadeblasb0a42c22024-11-13 16:00:10 +0100280 "register_cluster": {
281 "check_resource_function": self.check_register_cluster,
282 },
283 "update_cluster": {
284 "check_resource_function": self.check_update_cluster,
285 },
garciadeblas72412282024-11-07 12:41:54 +0100286 }
rshri932105f2024-07-05 15:11:55 +0000287 self.regist = vim_sdn.K8sClusterLcm(msg, self.lcm_tasks, config)
288
rshri948f7de2024-12-02 03:42:35 +0000289 async def create(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000290 self.logger.info("cluster Create Enter")
291
garciadeblas995cbf32024-12-18 12:54:00 +0100292 # To get the cluster and op ids
rshri948f7de2024-12-02 03:42:35 +0000293 cluster_id = params["cluster_id"]
rshri948f7de2024-12-02 03:42:35 +0000294 op_id = params["operation_id"]
rshri948f7de2024-12-02 03:42:35 +0000295
296 # To initialize the operation states
297 self.initialize_operation(cluster_id, op_id)
298
garciadeblas995cbf32024-12-18 12:54:00 +0100299 # To get the cluster
300 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
301
302 # To get the operation params details
303 op_params = self.get_operation_params(db_cluster, op_id)
304
305 # To copy the cluster content and decrypting fields to use in workflows
306 workflow_content = {
307 "cluster": self.decrypted_copy(db_cluster),
308 }
rshric3564942024-11-12 18:12:38 +0000309
rshri948f7de2024-12-02 03:42:35 +0000310 # To get the vim account details
rshric3564942024-11-12 18:12:38 +0000311 db_vim = self.db.get_one("vim_accounts", {"name": db_cluster["vim_account"]})
garciadeblas995cbf32024-12-18 12:54:00 +0100312 workflow_content["vim_account"] = db_vim
rshric3564942024-11-12 18:12:38 +0000313
garciadeblasadb81e82024-11-08 01:11:46 +0100314 _, workflow_name = await self.odu.launch_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +0100315 "create_cluster", op_id, op_params, workflow_content
garciadeblas96b94f52024-07-08 16:18:21 +0200316 )
rshri932105f2024-07-05 15:11:55 +0000317 self.logger.info("workflow_name is :{}".format(workflow_name))
318
garciadeblas96b94f52024-07-08 16:18:21 +0200319 workflow_status, workflow_msg = await self.odu.check_workflow_status(
320 workflow_name
321 )
rshri932105f2024-07-05 15:11:55 +0000322 self.logger.info(
323 "workflow_status is :{} and workflow_msg is :{}".format(
324 workflow_status, workflow_msg
325 )
326 )
327 if workflow_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200328 db_cluster["state"] = "CREATED"
329 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
rshri932105f2024-07-05 15:11:55 +0000330 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200331 db_cluster["state"] = "FAILED_CREATION"
332 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000333 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +0000334 db_cluster = self.update_operation_history(
335 db_cluster, op_id, workflow_status, None
336 )
garciadeblas96b94f52024-07-08 16:18:21 +0200337 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +0000338
garciadeblas28bff0f2024-09-16 12:53:07 +0200339 # Clean items used in the workflow, no matter if the workflow succeeded
340 clean_status, clean_msg = await self.odu.clean_items_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +0100341 "create_cluster", op_id, op_params, workflow_content
garciadeblas28bff0f2024-09-16 12:53:07 +0200342 )
343 self.logger.info(
344 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
345 )
346
rshri932105f2024-07-05 15:11:55 +0000347 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +0100348 resource_status, resource_msg = await self.check_resource_status(
garciadeblas995cbf32024-12-18 12:54:00 +0100349 "create_cluster", op_id, op_params, workflow_content
rshri932105f2024-07-05 15:11:55 +0000350 )
351 self.logger.info(
352 "resource_status is :{} and resource_msg is :{}".format(
353 resource_status, resource_msg
354 )
355 )
356 if resource_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200357 db_cluster["resourceState"] = "READY"
rshri932105f2024-07-05 15:11:55 +0000358 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200359 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000360
garciadeblas96b94f52024-07-08 16:18:21 +0200361 db_cluster["operatingState"] = "IDLE"
362 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000363 db_cluster, op_id, workflow_status, resource_status
rshri932105f2024-07-05 15:11:55 +0000364 )
shahithya70a3fc92024-11-12 11:01:05 +0000365 db_cluster["current_operation"] = None
garciadeblas96b94f52024-07-08 16:18:21 +0200366 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
367 self.update_profile_state(db_cluster, workflow_status, resource_status)
rshri948f7de2024-12-02 03:42:35 +0000368
369 db_register = self.db.get_one("k8sclusters", {"name": db_cluster["name"]})
370
371 if db_cluster["resourceState"] == "READY" and db_cluster["state"] == "CREATED":
372 result, cluster_creds = await self.odu.get_cluster_credentials(db_cluster)
373 # To call the lcm.py for registering the cluster in k8scluster lcm.
374 db_register["credentials"] = cluster_creds
375 self.db.set_one("k8sclusters", {"_id": db_register["_id"]}, db_register)
376 register = await self.regist.create(db_register, order_id)
377 self.logger.debug(f"Register is : {register}")
378 else:
379 db_register["_admin"]["operationalState"] = "ERROR"
380 result, cluster_creds = await self.odu.get_cluster_credentials(db_cluster)
381 # To call the lcm.py for registering the cluster in k8scluster lcm.
382 db_register["credentials"] = cluster_creds
383 self.db.set_one("k8sclusters", {"_id": db_register["_id"]}, db_register)
384
rshri932105f2024-07-05 15:11:55 +0000385 return
386
garciadeblas72412282024-11-07 12:41:54 +0100387 async def check_create_cluster(self, op_id, op_params, content):
garciadeblas7eae6f42024-11-08 10:41:38 +0100388 self.logger.info(
389 f"check_create_cluster Operation {op_id}. Params: {op_params}."
390 )
391 # self.logger.debug(f"Content: {content}")
garciadeblas72412282024-11-07 12:41:54 +0100392 db_cluster = content["cluster"]
393 cluster_name = db_cluster["git_name"].lower()
394 cluster_kustomization_name = cluster_name
395 db_vim_account = content["vim_account"]
396 cloud_type = db_vim_account["vim_type"]
397 nodepool_name = ""
398 if cloud_type == "aws":
399 nodepool_name = f"{cluster_name}-nodegroup"
400 cluster_name = f"{cluster_name}-cluster"
401 elif cloud_type == "gcp":
402 nodepool_name = f"nodepool-{cluster_name}"
403 bootstrap = op_params.get("bootstrap", True)
404 if cloud_type in ("azure", "gcp", "aws"):
405 checkings_list = [
406 {
407 "item": "kustomization",
408 "name": cluster_kustomization_name,
409 "namespace": "managed-resources",
410 "flag": "Ready",
yshahcb9075f2024-11-22 12:08:57 +0000411 "timeout": 1500,
garciadeblas72412282024-11-07 12:41:54 +0100412 "enable": True,
garciadeblas7eae6f42024-11-08 10:41:38 +0100413 "resourceState": "IN_PROGRESS.KUSTOMIZATION_READY",
garciadeblas72412282024-11-07 12:41:54 +0100414 },
415 {
416 "item": f"cluster_{cloud_type}",
417 "name": cluster_name,
418 "namespace": "",
419 "flag": "Synced",
420 "timeout": self._checkloop_resource_timeout,
421 "enable": True,
garciadeblas7eae6f42024-11-08 10:41:38 +0100422 "resourceState": "IN_PROGRESS.RESOURCE_SYNCED.CLUSTER",
garciadeblas72412282024-11-07 12:41:54 +0100423 },
424 {
425 "item": f"cluster_{cloud_type}",
426 "name": cluster_name,
427 "namespace": "",
428 "flag": "Ready",
429 "timeout": self._checkloop_resource_timeout,
430 "enable": True,
garciadeblas7eae6f42024-11-08 10:41:38 +0100431 "resourceState": "IN_PROGRESS.RESOURCE_READY.CLUSTER",
garciadeblas72412282024-11-07 12:41:54 +0100432 },
433 {
434 "item": "kustomization",
435 "name": f"{cluster_kustomization_name}-bstrp-fluxctrl",
436 "namespace": "managed-resources",
437 "flag": "Ready",
yshahcb9075f2024-11-22 12:08:57 +0000438 "timeout": self._checkloop_resource_timeout,
garciadeblas72412282024-11-07 12:41:54 +0100439 "enable": bootstrap,
garciadeblas7eae6f42024-11-08 10:41:38 +0100440 "resourceState": "IN_PROGRESS.BOOTSTRAP_OK",
garciadeblas72412282024-11-07 12:41:54 +0100441 },
442 ]
443 else:
444 return False, "Not suitable VIM account to check cluster status"
445 if nodepool_name:
446 nodepool_check = {
447 "item": f"nodepool_{cloud_type}",
448 "name": nodepool_name,
449 "namespace": "",
450 "flag": "Ready",
451 "timeout": self._checkloop_resource_timeout,
452 "enable": True,
garciadeblas7eae6f42024-11-08 10:41:38 +0100453 "resourceState": "IN_PROGRESS.RESOURCE_READY.NODEPOOL",
garciadeblas72412282024-11-07 12:41:54 +0100454 }
455 checkings_list.insert(3, nodepool_check)
yshahcb9075f2024-11-22 12:08:57 +0000456 return await self.common_check_list(
457 op_id, checkings_list, "clusters", db_cluster
458 )
garciadeblas72412282024-11-07 12:41:54 +0100459
garciadeblasb0a42c22024-11-13 16:00:10 +0100460 async def check_register_cluster(self, op_id, op_params, content):
461 self.logger.info(
462 f"check_register_cluster Operation {op_id}. Params: {op_params}."
463 )
464 # self.logger.debug(f"Content: {content}")
465 db_cluster = content["cluster"]
466 cluster_name = db_cluster["git_name"].lower()
467 cluster_kustomization_name = cluster_name
468 bootstrap = op_params.get("bootstrap", True)
469 checkings_list = [
470 {
471 "item": "kustomization",
472 "name": f"{cluster_kustomization_name}-bstrp-fluxctrl",
473 "namespace": "managed-resources",
474 "flag": "Ready",
475 "timeout": self._checkloop_kustomization_timeout,
476 "enable": bootstrap,
477 "resourceState": "IN_PROGRESS.BOOTSTRAP_OK",
478 },
479 ]
yshahcb9075f2024-11-22 12:08:57 +0000480 return await self.common_check_list(
481 op_id, checkings_list, "clusters", db_cluster
482 )
garciadeblasb0a42c22024-11-13 16:00:10 +0100483
484 async def check_update_cluster(self, op_id, op_params, content):
485 self.logger.info(
486 f"check_create_cluster Operation {op_id}. Params: {op_params}."
487 )
488 # self.logger.debug(f"Content: {content}")
489 db_cluster = content["cluster"]
490 cluster_name = db_cluster["git_name"].lower()
491 cluster_kustomization_name = cluster_name
492 db_vim_account = content["vim_account"]
493 cloud_type = db_vim_account["vim_type"]
494 nodepool_name = ""
495 if cloud_type == "aws":
496 nodepool_name = f"{cluster_name}-nodegroup"
497 cluster_name = f"{cluster_name}-cluster"
498 elif cloud_type == "gcp":
499 nodepool_name = f"nodepool-{cluster_name}"
500 if cloud_type in ("azure", "gcp", "aws"):
501 checkings_list = [
502 {
503 "item": "kustomization",
504 "name": cluster_kustomization_name,
505 "namespace": "managed-resources",
506 "flag": "Ready",
507 "timeout": self._checkloop_kustomization_timeout,
508 "enable": True,
509 "resourceState": "IN_PROGRESS.KUSTOMIZATION_READY",
510 },
511 {
512 "item": f"cluster_{cloud_type}",
513 "name": cluster_name,
514 "namespace": "",
515 "flag": "Synced",
516 "timeout": self._checkloop_resource_timeout,
517 "enable": True,
518 "resourceState": "IN_PROGRESS.RESOURCE_SYNCED.CLUSTER",
519 },
520 {
521 "item": f"cluster_{cloud_type}",
522 "name": cluster_name,
523 "namespace": "",
524 "flag": "Ready",
525 "timeout": self._checkloop_resource_timeout,
526 "enable": True,
527 "resourceState": "IN_PROGRESS.RESOURCE_READY.CLUSTER",
528 },
529 ]
530 else:
531 return False, "Not suitable VIM account to check cluster status"
532 if nodepool_name:
533 nodepool_check = {
534 "item": f"nodepool_{cloud_type}",
535 "name": nodepool_name,
536 "namespace": "",
537 "flag": "Ready",
538 "timeout": self._checkloop_resource_timeout,
539 "enable": True,
540 "resourceState": "IN_PROGRESS.RESOURCE_READY.NODEPOOL",
541 }
542 checkings_list.append(nodepool_check)
yshahcb9075f2024-11-22 12:08:57 +0000543 return await self.common_check_list(
544 op_id, checkings_list, "clusters", db_cluster
545 )
garciadeblasb0a42c22024-11-13 16:00:10 +0100546
garciadeblas96b94f52024-07-08 16:18:21 +0200547 def update_profile_state(self, db_cluster, workflow_status, resource_status):
rshri932105f2024-07-05 15:11:55 +0000548 profiles = [
549 "infra_controller_profiles",
550 "infra_config_profiles",
551 "app_profiles",
552 "resource_profiles",
553 ]
rshri948f7de2024-12-02 03:42:35 +0000554 """
rshri932105f2024-07-05 15:11:55 +0000555 profiles_collection = {
556 "infra_controller_profiles": "k8sinfra_controller",
557 "infra_config_profiles": "k8sinfra_config",
558 "app_profiles": "k8sapp",
559 "resource_profiles": "k8sresource",
560 }
rshri948f7de2024-12-02 03:42:35 +0000561 """
Your Name86149632024-11-14 16:17:16 +0000562 self.logger.info("the db_cluster is :{}".format(db_cluster))
rshri932105f2024-07-05 15:11:55 +0000563 for profile_type in profiles:
garciadeblas96b94f52024-07-08 16:18:21 +0200564 profile_id = db_cluster[profile_type]
rshri948f7de2024-12-02 03:42:35 +0000565 # db_collection = profiles_collection[profile_type]
566 db_collection = self.profile_collection_mapping[profile_type]
rshri932105f2024-07-05 15:11:55 +0000567 db_profile = self.db.get_one(db_collection, {"_id": profile_id})
yshahcb9075f2024-11-22 12:08:57 +0000568 op_id = db_profile["operationHistory"][-1].get("op_id")
garciadeblas96b94f52024-07-08 16:18:21 +0200569 db_profile["state"] = db_cluster["state"]
570 db_profile["resourceState"] = db_cluster["resourceState"]
571 db_profile["operatingState"] = db_cluster["operatingState"]
rshric3564942024-11-12 18:12:38 +0000572 db_profile["age_pubkey"] = db_cluster["age_pubkey"]
Your Name86149632024-11-14 16:17:16 +0000573 db_profile["age_privkey"] = db_cluster["age_privkey"]
rshri932105f2024-07-05 15:11:55 +0000574 db_profile = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000575 db_profile, op_id, workflow_status, resource_status
rshri932105f2024-07-05 15:11:55 +0000576 )
rshri932105f2024-07-05 15:11:55 +0000577 self.db.set_one(db_collection, {"_id": db_profile["_id"]}, db_profile)
578
rshri948f7de2024-12-02 03:42:35 +0000579 async def delete(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000580 self.logger.info("cluster delete Enter")
rshri948f7de2024-12-02 03:42:35 +0000581
garciadeblas995cbf32024-12-18 12:54:00 +0100582 # To get the cluster and op ids
rshri948f7de2024-12-02 03:42:35 +0000583 cluster_id = params["cluster_id"]
rshri948f7de2024-12-02 03:42:35 +0000584 op_id = params["operation_id"]
rshri948f7de2024-12-02 03:42:35 +0000585
586 # To initialize the operation states
587 self.initialize_operation(cluster_id, op_id)
588
garciadeblas995cbf32024-12-18 12:54:00 +0100589 # To get the cluster
590 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
591
592 # To get the operation params details
593 op_params = self.get_operation_params(db_cluster, op_id)
594
595 # To copy the cluster content and decrypting fields to use in workflows
596 workflow_content = {
597 "cluster": self.decrypted_copy(db_cluster),
598 }
rshri948f7de2024-12-02 03:42:35 +0000599
garciadeblas6b2112c2024-12-20 10:35:13 +0100600 # TODO: workaround until NBI rejects cluster deletion requests for registered clusters
601 # This if clause will be removed
garciadeblas12470812024-11-18 10:33:12 +0100602 if db_cluster["created"] == "false":
rshri948f7de2024-12-02 03:42:35 +0000603 return await self.deregister(params, order_id)
rshri932105f2024-07-05 15:11:55 +0000604
garciadeblasadb81e82024-11-08 01:11:46 +0100605 _, workflow_name = await self.odu.launch_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +0100606 "delete_cluster", op_id, op_params, workflow_content
garciadeblas96b94f52024-07-08 16:18:21 +0200607 )
rshri932105f2024-07-05 15:11:55 +0000608 self.logger.info("workflow_name is :{}".format(workflow_name))
609
garciadeblas96b94f52024-07-08 16:18:21 +0200610 workflow_status, workflow_msg = await self.odu.check_workflow_status(
611 workflow_name
612 )
rshri932105f2024-07-05 15:11:55 +0000613 self.logger.info(
614 "workflow_status is :{} and workflow_msg is :{}".format(
615 workflow_status, workflow_msg
616 )
617 )
618 if workflow_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200619 db_cluster["state"] = "DELETED"
620 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
rshri932105f2024-07-05 15:11:55 +0000621 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200622 db_cluster["state"] = "FAILED_DELETION"
623 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000624 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +0000625 db_cluster = self.update_operation_history(
626 db_cluster, op_id, workflow_status, None
627 )
garciadeblas96b94f52024-07-08 16:18:21 +0200628 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +0000629
garciadeblas98f9a3d2024-12-10 13:42:47 +0100630 # Clean items used in the workflow or in the cluster, no matter if the workflow succeeded
631 clean_status, clean_msg = await self.odu.clean_items_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +0100632 "delete_cluster", op_id, op_params, workflow_content
garciadeblas98f9a3d2024-12-10 13:42:47 +0100633 )
634 self.logger.info(
635 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
636 )
637
rshri932105f2024-07-05 15:11:55 +0000638 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +0100639 resource_status, resource_msg = await self.check_resource_status(
garciadeblas995cbf32024-12-18 12:54:00 +0100640 "delete_cluster", op_id, op_params, workflow_content
rshri932105f2024-07-05 15:11:55 +0000641 )
642 self.logger.info(
643 "resource_status is :{} and resource_msg is :{}".format(
644 resource_status, resource_msg
645 )
646 )
647 if resource_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200648 db_cluster["resourceState"] = "READY"
rshri932105f2024-07-05 15:11:55 +0000649 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200650 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000651
garciadeblas96b94f52024-07-08 16:18:21 +0200652 db_cluster["operatingState"] = "IDLE"
653 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000654 db_cluster, op_id, workflow_status, resource_status
garciadeblas96b94f52024-07-08 16:18:21 +0200655 )
shahithya70a3fc92024-11-12 11:01:05 +0000656 db_cluster["current_operation"] = None
garciadeblas96b94f52024-07-08 16:18:21 +0200657 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +0000658
garciadeblas96b94f52024-07-08 16:18:21 +0200659 # To delete it from DB
660 if db_cluster["state"] == "DELETED":
661 self.delete_cluster(db_cluster)
rshri948f7de2024-12-02 03:42:35 +0000662
663 # To delete it from k8scluster collection
664 self.db.del_one("k8sclusters", {"name": db_cluster["name"]})
665
rshri932105f2024-07-05 15:11:55 +0000666 return
667
garciadeblas96b94f52024-07-08 16:18:21 +0200668 def delete_cluster(self, db_cluster):
669 # Actually, item_content is equal to db_cluster
670 # item_content = self.db.get_one("clusters", {"_id": db_cluster["_id"]})
671 # self.logger.debug("item_content is : {}".format(item_content))
rshri932105f2024-07-05 15:11:55 +0000672
rshri932105f2024-07-05 15:11:55 +0000673 # detach profiles
674 update_dict = None
675 profiles_to_detach = [
676 "infra_controller_profiles",
677 "infra_config_profiles",
678 "app_profiles",
679 "resource_profiles",
680 ]
rshri948f7de2024-12-02 03:42:35 +0000681 """
rshri932105f2024-07-05 15:11:55 +0000682 profiles_collection = {
683 "infra_controller_profiles": "k8sinfra_controller",
684 "infra_config_profiles": "k8sinfra_config",
685 "app_profiles": "k8sapp",
686 "resource_profiles": "k8sresource",
687 }
rshri948f7de2024-12-02 03:42:35 +0000688 """
rshri932105f2024-07-05 15:11:55 +0000689 for profile_type in profiles_to_detach:
garciadeblas96b94f52024-07-08 16:18:21 +0200690 if db_cluster.get(profile_type):
garciadeblas96b94f52024-07-08 16:18:21 +0200691 profile_ids = db_cluster[profile_type]
rshri932105f2024-07-05 15:11:55 +0000692 profile_ids_copy = deepcopy(profile_ids)
rshri932105f2024-07-05 15:11:55 +0000693 for profile_id in profile_ids_copy:
rshri948f7de2024-12-02 03:42:35 +0000694 # db_collection = profiles_collection[profile_type]
695 db_collection = self.profile_collection_mapping[profile_type]
rshri932105f2024-07-05 15:11:55 +0000696 db_profile = self.db.get_one(db_collection, {"_id": profile_id})
garciadeblasc2552852024-10-22 12:39:32 +0200697 self.logger.debug("the db_profile is :{}".format(db_profile))
698 self.logger.debug(
garciadeblas96b94f52024-07-08 16:18:21 +0200699 "the item_content name is :{}".format(db_cluster["name"])
rshri932105f2024-07-05 15:11:55 +0000700 )
garciadeblasc2552852024-10-22 12:39:32 +0200701 self.logger.debug(
rshri932105f2024-07-05 15:11:55 +0000702 "the db_profile name is :{}".format(db_profile["name"])
703 )
garciadeblas96b94f52024-07-08 16:18:21 +0200704 if db_cluster["name"] == db_profile["name"]:
rshri932105f2024-07-05 15:11:55 +0000705 self.db.del_one(db_collection, {"_id": profile_id})
706 else:
rshri932105f2024-07-05 15:11:55 +0000707 profile_ids.remove(profile_id)
708 update_dict = {profile_type: profile_ids}
rshri932105f2024-07-05 15:11:55 +0000709 self.db.set_one(
garciadeblas96b94f52024-07-08 16:18:21 +0200710 "clusters", {"_id": db_cluster["_id"]}, update_dict
rshri932105f2024-07-05 15:11:55 +0000711 )
garciadeblas96b94f52024-07-08 16:18:21 +0200712 self.db.del_one("clusters", {"_id": db_cluster["_id"]})
rshri932105f2024-07-05 15:11:55 +0000713
rshri948f7de2024-12-02 03:42:35 +0000714 async def attach_profile(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000715 self.logger.info("profile attach Enter")
rshri948f7de2024-12-02 03:42:35 +0000716
garciadeblas995cbf32024-12-18 12:54:00 +0100717 # To get the cluster and op ids
rshri948f7de2024-12-02 03:42:35 +0000718 cluster_id = params["cluster_id"]
rshri948f7de2024-12-02 03:42:35 +0000719 op_id = params["operation_id"]
rshri948f7de2024-12-02 03:42:35 +0000720
721 # To initialize the operation states
722 self.initialize_operation(cluster_id, op_id)
723
garciadeblas995cbf32024-12-18 12:54:00 +0100724 # To get the cluster
725 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
726
727 # To get the operation params details
728 op_params = self.get_operation_params(db_cluster, op_id)
729
730 # To copy the cluster content and decrypting fields to use in workflows
731 workflow_content = {
732 "cluster": self.decrypted_copy(db_cluster),
733 }
rshri948f7de2024-12-02 03:42:35 +0000734
735 # To get the profile details
736 profile_id = params["profile_id"]
737 profile_type = params["profile_type"]
738 profile_collection = self.profile_collection_mapping[profile_type]
739 db_profile = self.db.get_one(profile_collection, {"_id": profile_id})
740 db_profile["profile_type"] = profile_type
741 # content["profile"] = db_profile
garciadeblas995cbf32024-12-18 12:54:00 +0100742 workflow_content["profile"] = db_profile
rshri932105f2024-07-05 15:11:55 +0000743
garciadeblasadb81e82024-11-08 01:11:46 +0100744 _, workflow_name = await self.odu.launch_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +0100745 "attach_profile_to_cluster", op_id, op_params, workflow_content
garciadeblas96b94f52024-07-08 16:18:21 +0200746 )
rshri932105f2024-07-05 15:11:55 +0000747 self.logger.info("workflow_name is :{}".format(workflow_name))
748
garciadeblas96b94f52024-07-08 16:18:21 +0200749 workflow_status, workflow_msg = await self.odu.check_workflow_status(
750 workflow_name
751 )
rshri932105f2024-07-05 15:11:55 +0000752 self.logger.info(
753 "workflow_status is :{} and workflow_msg is :{}".format(
754 workflow_status, workflow_msg
755 )
756 )
757 if workflow_status:
758 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
759 else:
760 db_cluster["resourceState"] = "ERROR"
761 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +0000762 db_cluster = self.update_operation_history(
763 db_cluster, op_id, workflow_status, None
764 )
rshri932105f2024-07-05 15:11:55 +0000765 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
766
767 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +0100768 resource_status, resource_msg = await self.check_resource_status(
garciadeblas995cbf32024-12-18 12:54:00 +0100769 "attach_profile_to_cluster", op_id, op_params, workflow_content
rshri932105f2024-07-05 15:11:55 +0000770 )
771 self.logger.info(
772 "resource_status is :{} and resource_msg is :{}".format(
773 resource_status, resource_msg
774 )
775 )
776 if resource_status:
777 db_cluster["resourceState"] = "READY"
778 else:
779 db_cluster["resourceState"] = "ERROR"
780
781 db_cluster["operatingState"] = "IDLE"
782 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000783 db_cluster, op_id, workflow_status, resource_status
rshri932105f2024-07-05 15:11:55 +0000784 )
rshri932105f2024-07-05 15:11:55 +0000785 profile_list = db_cluster[profile_type]
rshri932105f2024-07-05 15:11:55 +0000786 if resource_status:
rshri932105f2024-07-05 15:11:55 +0000787 profile_list.append(profile_id)
rshri932105f2024-07-05 15:11:55 +0000788 db_cluster[profile_type] = profile_list
shahithya70a3fc92024-11-12 11:01:05 +0000789 db_cluster["current_operation"] = None
rshri932105f2024-07-05 15:11:55 +0000790 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
791
792 return
793
rshri948f7de2024-12-02 03:42:35 +0000794 async def detach_profile(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000795 self.logger.info("profile dettach Enter")
rshri948f7de2024-12-02 03:42:35 +0000796
garciadeblas995cbf32024-12-18 12:54:00 +0100797 # To get the cluster and op ids
rshri948f7de2024-12-02 03:42:35 +0000798 cluster_id = params["cluster_id"]
rshri948f7de2024-12-02 03:42:35 +0000799 op_id = params["operation_id"]
rshri948f7de2024-12-02 03:42:35 +0000800
801 # To initialize the operation states
802 self.initialize_operation(cluster_id, op_id)
803
garciadeblas995cbf32024-12-18 12:54:00 +0100804 # To get the cluster
805 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
806
807 # To get the operation params details
808 op_params = self.get_operation_params(db_cluster, op_id)
809
810 # To copy the cluster content and decrypting fields to use in workflows
811 workflow_content = {
812 "cluster": self.decrypted_copy(db_cluster),
813 }
rshri948f7de2024-12-02 03:42:35 +0000814
815 # To get the profile details
816 profile_id = params["profile_id"]
817 profile_type = params["profile_type"]
818 profile_collection = self.profile_collection_mapping[profile_type]
819 db_profile = self.db.get_one(profile_collection, {"_id": profile_id})
820 db_profile["profile_type"] = profile_type
821 # content["profile"] = db_profile
garciadeblas995cbf32024-12-18 12:54:00 +0100822 workflow_content["profile"] = db_profile
rshri932105f2024-07-05 15:11:55 +0000823
garciadeblasadb81e82024-11-08 01:11:46 +0100824 _, workflow_name = await self.odu.launch_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +0100825 "detach_profile_from_cluster", op_id, op_params, workflow_content
garciadeblas96b94f52024-07-08 16:18:21 +0200826 )
rshri932105f2024-07-05 15:11:55 +0000827 self.logger.info("workflow_name is :{}".format(workflow_name))
828
garciadeblas96b94f52024-07-08 16:18:21 +0200829 workflow_status, workflow_msg = await self.odu.check_workflow_status(
830 workflow_name
831 )
rshri932105f2024-07-05 15:11:55 +0000832 self.logger.info(
833 "workflow_status is :{} and workflow_msg is :{}".format(
834 workflow_status, workflow_msg
835 )
836 )
837 if workflow_status:
838 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
839 else:
840 db_cluster["resourceState"] = "ERROR"
841 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +0000842 db_cluster = self.update_operation_history(
843 db_cluster, op_id, workflow_status, None
844 )
rshri932105f2024-07-05 15:11:55 +0000845 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
846
847 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +0100848 resource_status, resource_msg = await self.check_resource_status(
garciadeblas995cbf32024-12-18 12:54:00 +0100849 "detach_profile_from_cluster", op_id, op_params, workflow_content
rshri932105f2024-07-05 15:11:55 +0000850 )
851 self.logger.info(
852 "resource_status is :{} and resource_msg is :{}".format(
853 resource_status, resource_msg
854 )
855 )
856 if resource_status:
857 db_cluster["resourceState"] = "READY"
858 else:
859 db_cluster["resourceState"] = "ERROR"
860
861 db_cluster["operatingState"] = "IDLE"
862 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000863 db_cluster, op_id, workflow_status, resource_status
rshri932105f2024-07-05 15:11:55 +0000864 )
rshri932105f2024-07-05 15:11:55 +0000865 profile_list = db_cluster[profile_type]
866 self.logger.info("profile list is : {}".format(profile_list))
867 if resource_status:
rshri932105f2024-07-05 15:11:55 +0000868 profile_list.remove(profile_id)
rshri932105f2024-07-05 15:11:55 +0000869 db_cluster[profile_type] = profile_list
shahithya70a3fc92024-11-12 11:01:05 +0000870 db_cluster["current_operation"] = None
rshri932105f2024-07-05 15:11:55 +0000871 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
872
873 return
874
rshri948f7de2024-12-02 03:42:35 +0000875 async def register(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000876 self.logger.info("cluster register enter")
877
garciadeblas995cbf32024-12-18 12:54:00 +0100878 # To get the cluster and op ids
rshri948f7de2024-12-02 03:42:35 +0000879 cluster_id = params["cluster_id"]
rshri948f7de2024-12-02 03:42:35 +0000880 op_id = params["operation_id"]
rshri948f7de2024-12-02 03:42:35 +0000881
882 # To initialize the operation states
883 self.initialize_operation(cluster_id, op_id)
884
garciadeblas995cbf32024-12-18 12:54:00 +0100885 # To get the cluster
886 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
887
888 # To get the operation params details
889 op_params = self.get_operation_params(db_cluster, op_id)
890
891 # To copy the cluster content and decrypting fields to use in workflows
892 workflow_content = {
893 "cluster": self.decrypted_copy(db_cluster),
894 }
rshric3564942024-11-12 18:12:38 +0000895
garciadeblasadb81e82024-11-08 01:11:46 +0100896 _, workflow_name = await self.odu.launch_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +0100897 "register_cluster", op_id, op_params, workflow_content
garciadeblas96b94f52024-07-08 16:18:21 +0200898 )
rshri932105f2024-07-05 15:11:55 +0000899 self.logger.info("workflow_name is :{}".format(workflow_name))
900
garciadeblas96b94f52024-07-08 16:18:21 +0200901 workflow_status, workflow_msg = await self.odu.check_workflow_status(
902 workflow_name
903 )
rshri932105f2024-07-05 15:11:55 +0000904 self.logger.info(
905 "workflow_status is :{} and workflow_msg is :{}".format(
906 workflow_status, workflow_msg
907 )
908 )
909 if workflow_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200910 db_cluster["state"] = "CREATED"
911 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
rshri932105f2024-07-05 15:11:55 +0000912 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200913 db_cluster["state"] = "FAILED_CREATION"
914 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000915 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +0000916 db_cluster = self.update_operation_history(
917 db_cluster, op_id, workflow_status, None
918 )
garciadeblas96b94f52024-07-08 16:18:21 +0200919 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +0000920
garciadeblasdde3a312024-09-17 13:25:06 +0200921 # Clean items used in the workflow, no matter if the workflow succeeded
922 clean_status, clean_msg = await self.odu.clean_items_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +0100923 "register_cluster", op_id, op_params, workflow_content
garciadeblasdde3a312024-09-17 13:25:06 +0200924 )
925 self.logger.info(
926 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
927 )
928
rshri932105f2024-07-05 15:11:55 +0000929 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +0100930 resource_status, resource_msg = await self.check_resource_status(
garciadeblas995cbf32024-12-18 12:54:00 +0100931 "register_cluster", op_id, op_params, workflow_content
rshri932105f2024-07-05 15:11:55 +0000932 )
933 self.logger.info(
934 "resource_status is :{} and resource_msg is :{}".format(
935 resource_status, resource_msg
936 )
937 )
938 if resource_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200939 db_cluster["resourceState"] = "READY"
rshri932105f2024-07-05 15:11:55 +0000940 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200941 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000942
garciadeblas96b94f52024-07-08 16:18:21 +0200943 db_cluster["operatingState"] = "IDLE"
944 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000945 db_cluster, op_id, workflow_status, resource_status
rshri932105f2024-07-05 15:11:55 +0000946 )
shahithya70a3fc92024-11-12 11:01:05 +0000947 db_cluster["current_operation"] = None
garciadeblas96b94f52024-07-08 16:18:21 +0200948 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri948f7de2024-12-02 03:42:35 +0000949
950 db_register = self.db.get_one("k8sclusters", {"name": db_cluster["name"]})
951 db_register["credentials"] = db_cluster["credentials"]
952 self.db.set_one("k8sclusters", {"_id": db_register["_id"]}, db_register)
953
954 if db_cluster["resourceState"] == "READY" and db_cluster["state"] == "CREATED":
955 # To call the lcm.py for registering the cluster in k8scluster lcm.
956 register = await self.regist.create(db_register, order_id)
957 self.logger.debug(f"Register is : {register}")
958 else:
959 db_register["_admin"]["operationalState"] = "ERROR"
960 self.db.set_one("k8sclusters", {"_id": db_register["_id"]}, db_register)
961
rshri932105f2024-07-05 15:11:55 +0000962 return
963
rshri948f7de2024-12-02 03:42:35 +0000964 async def deregister(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000965 self.logger.info("cluster deregister enter")
966
garciadeblas995cbf32024-12-18 12:54:00 +0100967 # To get the cluster and op ids
rshri948f7de2024-12-02 03:42:35 +0000968 cluster_id = params["cluster_id"]
rshri948f7de2024-12-02 03:42:35 +0000969 op_id = params["operation_id"]
rshri948f7de2024-12-02 03:42:35 +0000970
971 # To initialize the operation states
972 self.initialize_operation(cluster_id, op_id)
973
garciadeblas995cbf32024-12-18 12:54:00 +0100974 # To get the cluster
975 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
976
977 # To get the operation params details
978 op_params = self.get_operation_params(db_cluster, op_id)
979
980 # To copy the cluster content and decrypting fields to use in workflows
981 workflow_content = {
982 "cluster": self.decrypted_copy(db_cluster),
983 }
rshri932105f2024-07-05 15:11:55 +0000984
garciadeblasadb81e82024-11-08 01:11:46 +0100985 _, workflow_name = await self.odu.launch_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +0100986 "deregister_cluster", op_id, op_params, workflow_content
garciadeblas96b94f52024-07-08 16:18:21 +0200987 )
rshri932105f2024-07-05 15:11:55 +0000988 self.logger.info("workflow_name is :{}".format(workflow_name))
989
garciadeblas96b94f52024-07-08 16:18:21 +0200990 workflow_status, workflow_msg = await self.odu.check_workflow_status(
991 workflow_name
992 )
rshri932105f2024-07-05 15:11:55 +0000993 self.logger.info(
994 "workflow_status is :{} and workflow_msg is :{}".format(
995 workflow_status, workflow_msg
996 )
997 )
998 if workflow_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200999 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
rshri932105f2024-07-05 15:11:55 +00001000 else:
garciadeblas96b94f52024-07-08 16:18:21 +02001001 db_cluster["state"] = "FAILED_DELETION"
1002 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +00001003 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +00001004 db_cluster = self.update_operation_history(
1005 db_cluster, op_id, workflow_status, None
1006 )
garciadeblas96b94f52024-07-08 16:18:21 +02001007 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +00001008
garciadeblas91bb2c42024-11-12 11:17:12 +01001009 # Clean items used in the workflow or in the cluster, no matter if the workflow succeeded
1010 clean_status, clean_msg = await self.odu.clean_items_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +01001011 "deregister_cluster", op_id, op_params, workflow_content
garciadeblas91bb2c42024-11-12 11:17:12 +01001012 )
1013 self.logger.info(
1014 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
1015 )
1016
rshri932105f2024-07-05 15:11:55 +00001017 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +01001018 resource_status, resource_msg = await self.check_resource_status(
garciadeblas995cbf32024-12-18 12:54:00 +01001019 "deregister_cluster", op_id, op_params, workflow_content
rshri932105f2024-07-05 15:11:55 +00001020 )
1021 self.logger.info(
1022 "resource_status is :{} and resource_msg is :{}".format(
1023 resource_status, resource_msg
1024 )
1025 )
1026 if resource_status:
garciadeblas96b94f52024-07-08 16:18:21 +02001027 db_cluster["resourceState"] = "READY"
rshri932105f2024-07-05 15:11:55 +00001028 else:
garciadeblas96b94f52024-07-08 16:18:21 +02001029 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +00001030
garciadeblas96b94f52024-07-08 16:18:21 +02001031 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +00001032 db_cluster, op_id, workflow_status, resource_status
garciadeblas96b94f52024-07-08 16:18:21 +02001033 )
garciadeblas6b2112c2024-12-20 10:35:13 +01001034 # TODO: workaround until NBI rejects cluster deletion requests for registered clusters
1035 # Setting created flag to true avoids infinite loops when deregistering a cluster
1036 db_cluster["created"] = "true"
garciadeblas96b94f52024-07-08 16:18:21 +02001037 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +00001038
garciadeblas98f9a3d2024-12-10 13:42:47 +01001039 return await self.delete(params, order_id)
rshri932105f2024-07-05 15:11:55 +00001040
rshri948f7de2024-12-02 03:42:35 +00001041 async def get_creds(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001042 self.logger.info("Cluster get creds Enter")
rshri948f7de2024-12-02 03:42:35 +00001043 cluster_id = params["cluster_id"]
1044 op_id = params["operation_id"]
1045 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
garciadeblas96b94f52024-07-08 16:18:21 +02001046 result, cluster_creds = await self.odu.get_cluster_credentials(db_cluster)
1047 if result:
1048 db_cluster["credentials"] = cluster_creds
yshahd940c652024-10-17 06:11:12 +00001049 op_len = 0
1050 for operations in db_cluster["operationHistory"]:
1051 if operations["op_id"] == op_id:
1052 db_cluster["operationHistory"][op_len]["result"] = result
1053 db_cluster["operationHistory"][op_len]["endDate"] = time()
1054 op_len += 1
shahithya70a3fc92024-11-12 11:01:05 +00001055 db_cluster["current_operation"] = None
yshahd940c652024-10-17 06:11:12 +00001056 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri948f7de2024-12-02 03:42:35 +00001057 self.logger.info("Cluster Get Creds Exit")
yshah771dea82024-07-05 15:11:49 +00001058 return
1059
rshri948f7de2024-12-02 03:42:35 +00001060 async def update(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001061 self.logger.info("Cluster update Enter")
rshri948f7de2024-12-02 03:42:35 +00001062 # To get the cluster details
1063 cluster_id = params["cluster_id"]
1064 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
1065
1066 # To get the operation params details
1067 op_id = params["operation_id"]
1068 op_params = self.get_operation_params(db_cluster, op_id)
yshah771dea82024-07-05 15:11:49 +00001069
garciadeblas995cbf32024-12-18 12:54:00 +01001070 # To copy the cluster content and decrypting fields to use in workflows
1071 workflow_content = {
1072 "cluster": self.decrypted_copy(db_cluster),
1073 }
rshric3564942024-11-12 18:12:38 +00001074
1075 # vim account details
1076 db_vim = self.db.get_one("vim_accounts", {"name": db_cluster["vim_account"]})
garciadeblas995cbf32024-12-18 12:54:00 +01001077 workflow_content["vim_account"] = db_vim
rshric3564942024-11-12 18:12:38 +00001078
garciadeblasadb81e82024-11-08 01:11:46 +01001079 _, workflow_name = await self.odu.launch_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +01001080 "update_cluster", op_id, op_params, workflow_content
garciadeblas96b94f52024-07-08 16:18:21 +02001081 )
1082 workflow_status, workflow_msg = await self.odu.check_workflow_status(
1083 workflow_name
1084 )
1085 self.logger.info(
1086 "Workflow Status: {} Workflow Message: {}".format(
1087 workflow_status, workflow_msg
yshah771dea82024-07-05 15:11:49 +00001088 )
garciadeblas96b94f52024-07-08 16:18:21 +02001089 )
1090
1091 if workflow_status:
1092 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
1093 else:
1094 db_cluster["resourceState"] = "ERROR"
1095
yshahcb9075f2024-11-22 12:08:57 +00001096 db_cluster = self.update_operation_history(
1097 db_cluster, op_id, workflow_status, None
1098 )
garciadeblas96b94f52024-07-08 16:18:21 +02001099 # self.logger.info("Db content: {}".format(db_content))
1100 # self.db.set_one(self.db_collection, {"_id": _id}, db_cluster)
1101 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
1102
garciadeblas28bff0f2024-09-16 12:53:07 +02001103 # Clean items used in the workflow, no matter if the workflow succeeded
1104 clean_status, clean_msg = await self.odu.clean_items_workflow(
garciadeblas995cbf32024-12-18 12:54:00 +01001105 "update_cluster", op_id, op_params, workflow_content
garciadeblas28bff0f2024-09-16 12:53:07 +02001106 )
1107 self.logger.info(
1108 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
1109 )
garciadeblas96b94f52024-07-08 16:18:21 +02001110 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +01001111 resource_status, resource_msg = await self.check_resource_status(
garciadeblas995cbf32024-12-18 12:54:00 +01001112 "update_cluster", op_id, op_params, workflow_content
garciadeblas96b94f52024-07-08 16:18:21 +02001113 )
1114 self.logger.info(
1115 "Resource Status: {} Resource Message: {}".format(
1116 resource_status, resource_msg
1117 )
1118 )
yshah771dea82024-07-05 15:11:49 +00001119
1120 if resource_status:
garciadeblas96b94f52024-07-08 16:18:21 +02001121 db_cluster["resourceState"] = "READY"
yshah771dea82024-07-05 15:11:49 +00001122 else:
garciadeblas96b94f52024-07-08 16:18:21 +02001123 db_cluster["resourceState"] = "ERROR"
yshah771dea82024-07-05 15:11:49 +00001124
yshah0defcd52024-11-18 07:41:35 +00001125 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +00001126 db_cluster, op_id, workflow_status, resource_status
yshah0defcd52024-11-18 07:41:35 +00001127 )
1128
garciadeblas96b94f52024-07-08 16:18:21 +02001129 db_cluster["operatingState"] = "IDLE"
garciadeblas96b94f52024-07-08 16:18:21 +02001130 # self.logger.info("db_cluster: {}".format(db_cluster))
1131 # TODO: verify enxtcondition
1132 # For the moment, if the workflow completed successfully, then we update the db accordingly.
1133 if workflow_status:
1134 if "k8s_version" in op_params:
1135 db_cluster["k8s_version"] = op_params["k8s_version"]
yshah0defcd52024-11-18 07:41:35 +00001136 if "node_count" in op_params:
garciadeblas96b94f52024-07-08 16:18:21 +02001137 db_cluster["node_count"] = op_params["node_count"]
yshah0defcd52024-11-18 07:41:35 +00001138 if "node_size" in op_params:
1139 db_cluster["node_count"] = op_params["node_size"]
garciadeblas96b94f52024-07-08 16:18:21 +02001140 # self.db.set_one(self.db_collection, {"_id": _id}, db_content)
shahithya70a3fc92024-11-12 11:01:05 +00001141 db_cluster["current_operation"] = None
garciadeblas96b94f52024-07-08 16:18:21 +02001142 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
yshah771dea82024-07-05 15:11:49 +00001143 return
1144
1145
garciadeblas72412282024-11-07 12:41:54 +01001146class CloudCredentialsLcm(GitOpsLcm):
yshah771dea82024-07-05 15:11:49 +00001147 db_collection = "vim_accounts"
1148
1149 def __init__(self, msg, lcm_tasks, config):
1150 """
1151 Init, Connect to database, filesystem storage, and messaging
1152 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1153 :return: None
1154 """
garciadeblas72412282024-11-07 12:41:54 +01001155 super().__init__(msg, lcm_tasks, config)
yshah771dea82024-07-05 15:11:49 +00001156
yshah564ec9c2024-11-29 07:33:32 +00001157 async def add(self, params, order_id):
yshah771dea82024-07-05 15:11:49 +00001158 self.logger.info("Cloud Credentials create")
yshah564ec9c2024-11-29 07:33:32 +00001159 vim_id = params["_id"]
1160 op_id = vim_id
1161 op_params = params
1162 db_content = self.db.get_one(self.db_collection, {"_id": vim_id})
1163 vim_config = db_content.get("config", {})
1164 self.db.encrypt_decrypt_fields(
1165 vim_config.get("credentials"),
1166 "decrypt",
1167 ["password", "secret"],
1168 schema_version=db_content["schema_version"],
1169 salt=vim_id,
1170 )
1171
garciadeblasadb81e82024-11-08 01:11:46 +01001172 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001173 "create_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001174 )
1175
1176 workflow_status, workflow_msg = await self.odu.check_workflow_status(
1177 workflow_name
1178 )
1179
1180 self.logger.info(
1181 "Workflow Status: {} Workflow Msg: {}".format(workflow_status, workflow_msg)
1182 )
1183
garciadeblas28bff0f2024-09-16 12:53:07 +02001184 # Clean items used in the workflow, no matter if the workflow succeeded
1185 clean_status, clean_msg = await self.odu.clean_items_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001186 "create_cloud_credentials", op_id, op_params, db_content
garciadeblas28bff0f2024-09-16 12:53:07 +02001187 )
1188 self.logger.info(
1189 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
1190 )
1191
yshah771dea82024-07-05 15:11:49 +00001192 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +01001193 resource_status, resource_msg = await self.check_resource_status(
yshah564ec9c2024-11-29 07:33:32 +00001194 "create_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001195 )
1196 self.logger.info(
1197 "Resource Status: {} Resource Message: {}".format(
1198 resource_status, resource_msg
1199 )
1200 )
garciadeblas15b8a302024-09-23 12:40:13 +02001201
yshah564ec9c2024-11-29 07:33:32 +00001202 db_content["_admin"]["operationalState"] = "ENABLED"
1203 for operation in db_content["_admin"]["operations"]:
garciadeblas15b8a302024-09-23 12:40:13 +02001204 if operation["lcmOperationType"] == "create":
1205 operation["operationState"] = "ENABLED"
yshah564ec9c2024-11-29 07:33:32 +00001206 self.logger.info("Content : {}".format(db_content))
1207 self.db.set_one("vim_accounts", {"_id": db_content["_id"]}, db_content)
yshah771dea82024-07-05 15:11:49 +00001208 return
1209
yshah564ec9c2024-11-29 07:33:32 +00001210 async def edit(self, params, order_id):
1211 self.logger.info("Cloud Credentials Update")
1212 vim_id = params["_id"]
1213 op_id = vim_id
1214 op_params = params
1215 db_content = self.db.get_one("vim_accounts", {"_id": vim_id})
1216 vim_config = db_content.get("config", {})
1217 self.db.encrypt_decrypt_fields(
1218 vim_config.get("credentials"),
1219 "decrypt",
1220 ["password", "secret"],
1221 schema_version=db_content["schema_version"],
1222 salt=vim_id,
1223 )
1224
garciadeblasadb81e82024-11-08 01:11:46 +01001225 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001226 "update_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001227 )
1228 workflow_status, workflow_msg = await self.odu.check_workflow_status(
1229 workflow_name
1230 )
1231 self.logger.info(
1232 "Workflow Status: {} Workflow Msg: {}".format(workflow_status, workflow_msg)
1233 )
1234
garciadeblas28bff0f2024-09-16 12:53:07 +02001235 # Clean items used in the workflow, no matter if the workflow succeeded
1236 clean_status, clean_msg = await self.odu.clean_items_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001237 "update_cloud_credentials", op_id, op_params, db_content
garciadeblas28bff0f2024-09-16 12:53:07 +02001238 )
1239 self.logger.info(
1240 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
1241 )
1242
yshah771dea82024-07-05 15:11:49 +00001243 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +01001244 resource_status, resource_msg = await self.check_resource_status(
yshah564ec9c2024-11-29 07:33:32 +00001245 "update_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001246 )
1247 self.logger.info(
1248 "Resource Status: {} Resource Message: {}".format(
1249 resource_status, resource_msg
1250 )
1251 )
1252 return
1253
yshah564ec9c2024-11-29 07:33:32 +00001254 async def remove(self, params, order_id):
1255 self.logger.info("Cloud Credentials remove")
1256 vim_id = params["_id"]
1257 op_id = vim_id
1258 op_params = params
1259 db_content = self.db.get_one("vim_accounts", {"_id": vim_id})
1260
garciadeblasadb81e82024-11-08 01:11:46 +01001261 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001262 "delete_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001263 )
1264 workflow_status, workflow_msg = await self.odu.check_workflow_status(
1265 workflow_name
1266 )
1267 self.logger.info(
1268 "Workflow Status: {} Workflow Msg: {}".format(workflow_status, workflow_msg)
1269 )
1270
1271 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +01001272 resource_status, resource_msg = await self.check_resource_status(
yshah564ec9c2024-11-29 07:33:32 +00001273 "delete_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001274 )
1275 self.logger.info(
1276 "Resource Status: {} Resource Message: {}".format(
1277 resource_status, resource_msg
1278 )
1279 )
yshah564ec9c2024-11-29 07:33:32 +00001280 self.db.del_one(self.db_collection, {"_id": db_content["_id"]})
yshah771dea82024-07-05 15:11:49 +00001281 return
1282
rshri932105f2024-07-05 15:11:55 +00001283
garciadeblas72412282024-11-07 12:41:54 +01001284class K8sAppLcm(GitOpsLcm):
rshri948f7de2024-12-02 03:42:35 +00001285 db_collection = "k8sapp"
1286
rshri932105f2024-07-05 15:11:55 +00001287 def __init__(self, msg, lcm_tasks, config):
1288 """
1289 Init, Connect to database, filesystem storage, and messaging
1290 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1291 :return: None
1292 """
garciadeblas72412282024-11-07 12:41:54 +01001293 super().__init__(msg, lcm_tasks, config)
rshri932105f2024-07-05 15:11:55 +00001294
rshri948f7de2024-12-02 03:42:35 +00001295 async def create(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001296 self.logger.info("App Create Enter")
1297
rshri948f7de2024-12-02 03:42:35 +00001298 op_id = params["operation_id"]
1299 profile_id = params["profile_id"]
1300
1301 # To initialize the operation states
1302 self.initialize_operation(profile_id, op_id)
1303
1304 content = self.db.get_one("k8sapp", {"_id": profile_id})
1305 content["profile_type"] = "applications"
1306 op_params = self.get_operation_params(content, op_id)
1307 self.db.set_one("k8sapp", {"_id": content["_id"]}, content)
1308
garciadeblasadb81e82024-11-08 01:11:46 +01001309 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001310 "create_profile", op_id, op_params, content
1311 )
rshri932105f2024-07-05 15:11:55 +00001312 self.logger.info("workflow_name is :{}".format(workflow_name))
1313
yshah564ec9c2024-11-29 07:33:32 +00001314 workflow_status = await self.check_workflow(op_id, workflow_name, content)
rshri932105f2024-07-05 15:11:55 +00001315
1316 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001317 resource_status, content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001318 "create_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001319 )
yshah564ec9c2024-11-29 07:33:32 +00001320 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1321 self.logger.info(f"App Create Exit with resource status: {resource_status}")
rshri932105f2024-07-05 15:11:55 +00001322 return
1323
rshri948f7de2024-12-02 03:42:35 +00001324 async def delete(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001325 self.logger.info("App delete Enter")
rshri932105f2024-07-05 15:11:55 +00001326
rshri948f7de2024-12-02 03:42:35 +00001327 op_id = params["operation_id"]
1328 profile_id = params["profile_id"]
1329
1330 # To initialize the operation states
1331 self.initialize_operation(profile_id, op_id)
1332
1333 content = self.db.get_one("k8sapp", {"_id": profile_id})
1334 op_params = self.get_operation_params(content, op_id)
1335
garciadeblasadb81e82024-11-08 01:11:46 +01001336 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001337 "delete_profile", op_id, op_params, content
1338 )
rshri932105f2024-07-05 15:11:55 +00001339 self.logger.info("workflow_name is :{}".format(workflow_name))
1340
yshah564ec9c2024-11-29 07:33:32 +00001341 workflow_status = await self.check_workflow(op_id, workflow_name, content)
rshri932105f2024-07-05 15:11:55 +00001342
1343 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001344 resource_status, content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001345 "delete_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001346 )
rshri932105f2024-07-05 15:11:55 +00001347
yshah564ec9c2024-11-29 07:33:32 +00001348 if resource_status:
1349 content["state"] = "DELETED"
1350 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1351 self.db.del_one(self.db_collection, {"_id": content["_id"]})
1352 self.logger.info(f"App Delete Exit with resource status: {resource_status}")
rshri932105f2024-07-05 15:11:55 +00001353 return
1354
1355
garciadeblas72412282024-11-07 12:41:54 +01001356class K8sResourceLcm(GitOpsLcm):
rshri948f7de2024-12-02 03:42:35 +00001357 db_collection = "k8sresource"
1358
rshri932105f2024-07-05 15:11:55 +00001359 def __init__(self, msg, lcm_tasks, config):
1360 """
1361 Init, Connect to database, filesystem storage, and messaging
1362 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1363 :return: None
1364 """
garciadeblas72412282024-11-07 12:41:54 +01001365 super().__init__(msg, lcm_tasks, config)
rshri932105f2024-07-05 15:11:55 +00001366
rshri948f7de2024-12-02 03:42:35 +00001367 async def create(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001368 self.logger.info("Resource Create Enter")
1369
rshri948f7de2024-12-02 03:42:35 +00001370 op_id = params["operation_id"]
1371 profile_id = params["profile_id"]
1372
1373 # To initialize the operation states
1374 self.initialize_operation(profile_id, op_id)
1375
1376 content = self.db.get_one("k8sresource", {"_id": profile_id})
1377 content["profile_type"] = "managed-resources"
1378 op_params = self.get_operation_params(content, op_id)
1379 self.db.set_one("k8sresource", {"_id": content["_id"]}, content)
1380
garciadeblasadb81e82024-11-08 01:11:46 +01001381 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001382 "create_profile", op_id, op_params, content
1383 )
rshri932105f2024-07-05 15:11:55 +00001384 self.logger.info("workflow_name is :{}".format(workflow_name))
1385
yshah564ec9c2024-11-29 07:33:32 +00001386 workflow_status = await self.check_workflow(op_id, workflow_name, content)
rshri932105f2024-07-05 15:11:55 +00001387
1388 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001389 resource_status, content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001390 "create_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001391 )
yshah564ec9c2024-11-29 07:33:32 +00001392 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1393 self.logger.info(
1394 f"Resource Create Exit with resource status: {resource_status}"
rshri932105f2024-07-05 15:11:55 +00001395 )
rshri932105f2024-07-05 15:11:55 +00001396 return
1397
rshri948f7de2024-12-02 03:42:35 +00001398 async def delete(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001399 self.logger.info("Resource delete Enter")
rshri948f7de2024-12-02 03:42:35 +00001400
1401 op_id = params["operation_id"]
1402 profile_id = params["profile_id"]
1403
1404 # To initialize the operation states
1405 self.initialize_operation(profile_id, op_id)
1406
1407 content = self.db.get_one("k8sresource", {"_id": profile_id})
1408 op_params = self.get_operation_params(content, op_id)
rshri932105f2024-07-05 15:11:55 +00001409
garciadeblasadb81e82024-11-08 01:11:46 +01001410 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001411 "delete_profile", op_id, op_params, content
1412 )
rshri932105f2024-07-05 15:11:55 +00001413 self.logger.info("workflow_name is :{}".format(workflow_name))
1414
yshah564ec9c2024-11-29 07:33:32 +00001415 workflow_status = await self.check_workflow(op_id, workflow_name, content)
rshri932105f2024-07-05 15:11:55 +00001416
1417 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001418 resource_status, content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001419 "delete_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001420 )
rshri932105f2024-07-05 15:11:55 +00001421
yshah564ec9c2024-11-29 07:33:32 +00001422 if resource_status:
1423 content["state"] = "DELETED"
1424 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1425 self.db.del_one(self.db_collection, {"_id": content["_id"]})
1426 self.logger.info(
1427 f"Resource Delete Exit with resource status: {resource_status}"
garciadeblas96b94f52024-07-08 16:18:21 +02001428 )
rshri932105f2024-07-05 15:11:55 +00001429 return
1430
1431
garciadeblas72412282024-11-07 12:41:54 +01001432class K8sInfraControllerLcm(GitOpsLcm):
rshri948f7de2024-12-02 03:42:35 +00001433 db_collection = "k8sinfra_controller"
1434
rshri932105f2024-07-05 15:11:55 +00001435 def __init__(self, msg, lcm_tasks, config):
1436 """
1437 Init, Connect to database, filesystem storage, and messaging
1438 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1439 :return: None
1440 """
garciadeblas72412282024-11-07 12:41:54 +01001441 super().__init__(msg, lcm_tasks, config)
rshri932105f2024-07-05 15:11:55 +00001442
rshri948f7de2024-12-02 03:42:35 +00001443 async def create(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001444 self.logger.info("Infra controller Create Enter")
1445
rshri948f7de2024-12-02 03:42:35 +00001446 op_id = params["operation_id"]
1447 profile_id = params["profile_id"]
1448
1449 # To initialize the operation states
1450 self.initialize_operation(profile_id, op_id)
1451
1452 content = self.db.get_one("k8sinfra_controller", {"_id": profile_id})
1453 content["profile_type"] = "infra-controllers"
1454 op_params = self.get_operation_params(content, op_id)
1455 self.db.set_one("k8sinfra_controller", {"_id": content["_id"]}, content)
1456
garciadeblasadb81e82024-11-08 01:11:46 +01001457 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001458 "create_profile", op_id, op_params, content
1459 )
rshri932105f2024-07-05 15:11:55 +00001460 self.logger.info("workflow_name is :{}".format(workflow_name))
1461
yshah564ec9c2024-11-29 07:33:32 +00001462 workflow_status = await self.check_workflow(op_id, workflow_name, content)
rshri932105f2024-07-05 15:11:55 +00001463
1464 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001465 resource_status, content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001466 "create_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001467 )
yshah564ec9c2024-11-29 07:33:32 +00001468 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1469 self.logger.info(
1470 f"Infra Controller Create Exit with resource status: {resource_status}"
rshri932105f2024-07-05 15:11:55 +00001471 )
rshri932105f2024-07-05 15:11:55 +00001472 return
1473
rshri948f7de2024-12-02 03:42:35 +00001474 async def delete(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001475 self.logger.info("Infra controller delete Enter")
rshri932105f2024-07-05 15:11:55 +00001476
rshri948f7de2024-12-02 03:42:35 +00001477 op_id = params["operation_id"]
1478 profile_id = params["profile_id"]
1479
1480 # To initialize the operation states
1481 self.initialize_operation(profile_id, op_id)
1482
1483 content = self.db.get_one("k8sinfra_controller", {"_id": profile_id})
1484 op_params = self.get_operation_params(content, op_id)
1485
garciadeblasadb81e82024-11-08 01:11:46 +01001486 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001487 "delete_profile", op_id, op_params, content
1488 )
rshri932105f2024-07-05 15:11:55 +00001489 self.logger.info("workflow_name is :{}".format(workflow_name))
1490
yshah564ec9c2024-11-29 07:33:32 +00001491 workflow_status = await self.check_workflow(op_id, workflow_name, content)
rshri932105f2024-07-05 15:11:55 +00001492
1493 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001494 resource_status, content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001495 "delete_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001496 )
rshri932105f2024-07-05 15:11:55 +00001497
yshah564ec9c2024-11-29 07:33:32 +00001498 if resource_status:
1499 content["state"] = "DELETED"
1500 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1501 self.db.del_one(self.db_collection, {"_id": content["_id"]})
1502 self.logger.info(
1503 f"Infra Controller Delete Exit with resource status: {resource_status}"
garciadeblas96b94f52024-07-08 16:18:21 +02001504 )
rshri932105f2024-07-05 15:11:55 +00001505 return
1506
1507
garciadeblas72412282024-11-07 12:41:54 +01001508class K8sInfraConfigLcm(GitOpsLcm):
rshri948f7de2024-12-02 03:42:35 +00001509 db_collection = "k8sinfra_config"
1510
rshri932105f2024-07-05 15:11:55 +00001511 def __init__(self, msg, lcm_tasks, config):
1512 """
1513 Init, Connect to database, filesystem storage, and messaging
1514 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1515 :return: None
1516 """
garciadeblas72412282024-11-07 12:41:54 +01001517 super().__init__(msg, lcm_tasks, config)
rshri932105f2024-07-05 15:11:55 +00001518
rshri948f7de2024-12-02 03:42:35 +00001519 async def create(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001520 self.logger.info("Infra config Create Enter")
1521
rshri948f7de2024-12-02 03:42:35 +00001522 op_id = params["operation_id"]
1523 profile_id = params["profile_id"]
1524
1525 # To initialize the operation states
1526 self.initialize_operation(profile_id, op_id)
1527
1528 content = self.db.get_one("k8sinfra_config", {"_id": profile_id})
1529 content["profile_type"] = "infra-configs"
1530 op_params = self.get_operation_params(content, op_id)
1531 self.db.set_one("k8sinfra_config", {"_id": content["_id"]}, content)
1532
garciadeblasadb81e82024-11-08 01:11:46 +01001533 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001534 "create_profile", op_id, op_params, content
1535 )
rshri932105f2024-07-05 15:11:55 +00001536 self.logger.info("workflow_name is :{}".format(workflow_name))
1537
yshah564ec9c2024-11-29 07:33:32 +00001538 workflow_status = await self.check_workflow(op_id, workflow_name, content)
rshri932105f2024-07-05 15:11:55 +00001539
1540 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001541 resource_status, content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001542 "create_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001543 )
yshah564ec9c2024-11-29 07:33:32 +00001544 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1545 self.logger.info(
1546 f"Infra Config Create Exit with resource status: {resource_status}"
rshri932105f2024-07-05 15:11:55 +00001547 )
rshri932105f2024-07-05 15:11:55 +00001548 return
1549
rshri948f7de2024-12-02 03:42:35 +00001550 async def delete(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001551 self.logger.info("Infra config delete Enter")
1552
rshri948f7de2024-12-02 03:42:35 +00001553 op_id = params["operation_id"]
1554 profile_id = params["profile_id"]
1555
1556 # To initialize the operation states
1557 self.initialize_operation(profile_id, op_id)
1558
1559 content = self.db.get_one("k8sinfra_config", {"_id": profile_id})
1560 op_params = self.get_operation_params(content, op_id)
1561
garciadeblasadb81e82024-11-08 01:11:46 +01001562 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001563 "delete_profile", op_id, op_params, content
1564 )
rshri932105f2024-07-05 15:11:55 +00001565 self.logger.info("workflow_name is :{}".format(workflow_name))
rshri932105f2024-07-05 15:11:55 +00001566
yshah564ec9c2024-11-29 07:33:32 +00001567 workflow_status = await self.check_workflow(op_id, workflow_name, content)
1568
rshri932105f2024-07-05 15:11:55 +00001569 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001570 resource_status, content = await self.check_resource(
1571 "delete_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001572 )
yshah564ec9c2024-11-29 07:33:32 +00001573
rshri932105f2024-07-05 15:11:55 +00001574 if resource_status:
yshah564ec9c2024-11-29 07:33:32 +00001575 content["state"] = "DELETED"
1576 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1577 self.db.del_one(self.db_collection, {"_id": content["_id"]})
1578 self.logger.info(
1579 f"Infra Config Delete Exit with resource status: {resource_status}"
garciadeblas96b94f52024-07-08 16:18:21 +02001580 )
rshri932105f2024-07-05 15:11:55 +00001581
rshri932105f2024-07-05 15:11:55 +00001582 return
yshah771dea82024-07-05 15:11:49 +00001583
1584
garciadeblas72412282024-11-07 12:41:54 +01001585class OkaLcm(GitOpsLcm):
yshah771dea82024-07-05 15:11:49 +00001586 db_collection = "okas"
1587
1588 def __init__(self, msg, lcm_tasks, config):
1589 """
1590 Init, Connect to database, filesystem storage, and messaging
1591 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1592 :return: None
1593 """
garciadeblas72412282024-11-07 12:41:54 +01001594 super().__init__(msg, lcm_tasks, config)
yshah771dea82024-07-05 15:11:49 +00001595
yshah564ec9c2024-11-29 07:33:32 +00001596 async def create(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001597 self.logger.info("OKA Create Enter")
yshah564ec9c2024-11-29 07:33:32 +00001598 op_id = params["operation_id"]
1599 oka_id = params["oka_id"]
1600 self.initialize_operation(oka_id, op_id)
1601 db_content = self.db.get_one(self.db_collection, {"_id": oka_id})
1602 op_params = self.get_operation_params(db_content, op_id)
yshah771dea82024-07-05 15:11:49 +00001603
garciadeblasadb81e82024-11-08 01:11:46 +01001604 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001605 "create_oka", op_id, op_params, db_content
1606 )
yshah564ec9c2024-11-29 07:33:32 +00001607
1608 workflow_status = await self.check_workflow(op_id, workflow_name, db_content)
yshah771dea82024-07-05 15:11:49 +00001609
1610 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001611 resource_status, db_content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001612 "create_oka", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001613 )
garciadeblas96b94f52024-07-08 16:18:21 +02001614 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
yshah564ec9c2024-11-29 07:33:32 +00001615 self.logger.info(f"OKA Create Exit with resource status: {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001616 return
1617
yshah564ec9c2024-11-29 07:33:32 +00001618 async def edit(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001619 self.logger.info("OKA Edit Enter")
yshah564ec9c2024-11-29 07:33:32 +00001620 op_id = params["operation_id"]
1621 oka_id = params["oka_id"]
1622 self.initialize_operation(oka_id, op_id)
1623 db_content = self.db.get_one(self.db_collection, {"_id": oka_id})
1624 op_params = self.get_operation_params(db_content, op_id)
yshah771dea82024-07-05 15:11:49 +00001625
garciadeblasadb81e82024-11-08 01:11:46 +01001626 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001627 "update_oka", op_id, op_params, db_content
garciadeblas96b94f52024-07-08 16:18:21 +02001628 )
yshah564ec9c2024-11-29 07:33:32 +00001629 workflow_status = await self.check_workflow(op_id, workflow_name, db_content)
yshah771dea82024-07-05 15:11:49 +00001630
1631 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001632 resource_status, db_content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001633 "update_oka", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001634 )
garciadeblas96b94f52024-07-08 16:18:21 +02001635 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
yshah564ec9c2024-11-29 07:33:32 +00001636 self.logger.info(f"OKA Update Exit with resource status: {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001637 return
1638
yshah564ec9c2024-11-29 07:33:32 +00001639 async def delete(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001640 self.logger.info("OKA delete Enter")
yshah564ec9c2024-11-29 07:33:32 +00001641 op_id = params["operation_id"]
1642 oka_id = params["oka_id"]
1643 self.initialize_operation(oka_id, op_id)
1644 db_content = self.db.get_one(self.db_collection, {"_id": oka_id})
1645 op_params = self.get_operation_params(db_content, op_id)
yshah771dea82024-07-05 15:11:49 +00001646
garciadeblasadb81e82024-11-08 01:11:46 +01001647 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001648 "delete_oka", op_id, op_params, db_content
garciadeblas96b94f52024-07-08 16:18:21 +02001649 )
yshah564ec9c2024-11-29 07:33:32 +00001650 workflow_status = await self.check_workflow(op_id, workflow_name, db_content)
yshah771dea82024-07-05 15:11:49 +00001651
1652 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001653 resource_status, db_content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001654 "delete_oka", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001655 )
yshah771dea82024-07-05 15:11:49 +00001656
yshah564ec9c2024-11-29 07:33:32 +00001657 if resource_status:
1658 db_content["state"] == "DELETED"
1659 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
garciadeblas96b94f52024-07-08 16:18:21 +02001660 self.db.del_one(self.db_collection, {"_id": db_content["_id"]})
yshah564ec9c2024-11-29 07:33:32 +00001661 self.logger.info(f"OKA Delete Exit with resource status: {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001662 return
1663
1664
garciadeblas72412282024-11-07 12:41:54 +01001665class KsuLcm(GitOpsLcm):
yshah771dea82024-07-05 15:11:49 +00001666 db_collection = "ksus"
1667
1668 def __init__(self, msg, lcm_tasks, config):
1669 """
1670 Init, Connect to database, filesystem storage, and messaging
1671 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1672 :return: None
1673 """
garciadeblas72412282024-11-07 12:41:54 +01001674 super().__init__(msg, lcm_tasks, config)
yshah771dea82024-07-05 15:11:49 +00001675
yshah564ec9c2024-11-29 07:33:32 +00001676 async def create(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001677 self.logger.info("ksu Create Enter")
yshah564ec9c2024-11-29 07:33:32 +00001678 db_content = []
1679 op_params = []
1680 op_id = params["operation_id"]
1681 for ksu_id in params["ksus_list"]:
1682 self.logger.info("Ksu ID: {}".format(ksu_id))
1683 self.initialize_operation(ksu_id, op_id)
1684 db_ksu = self.db.get_one(self.db_collection, {"_id": ksu_id})
1685 self.logger.info("Db KSU: {}".format(db_ksu))
1686 db_content.append(db_ksu)
1687 ksu_params = {}
1688 ksu_params = self.get_operation_params(db_ksu, op_id)
1689 self.logger.info("Operation Params: {}".format(ksu_params))
1690 # Update ksu_params["profile"] with profile name and age-pubkey
1691 profile_type = ksu_params["profile"]["profile_type"]
1692 profile_id = ksu_params["profile"]["_id"]
1693 profile_collection = self.profile_collection_mapping[profile_type]
1694 db_profile = self.db.get_one(profile_collection, {"_id": profile_id})
1695 ksu_params["profile"]["name"] = db_profile["name"]
1696 ksu_params["profile"]["age_pubkey"] = db_profile.get("age_pubkey", "")
1697 # Update ksu_params["oka"] with sw_catalog_path (when missing)
1698 for oka in ksu_params["oka"]:
1699 if "sw_catalog_path" not in oka:
1700 oka_id = oka["_id"]
1701 db_oka = self.db.get_one("okas", {"_id": oka_id})
yshah2f39b8a2024-12-19 11:06:24 +00001702 oka_type = MAP_PROFILE[
1703 db_oka.get("profile_type", "infra_controller_profiles")
1704 ]
1705 oka["sw_catalog_path"] = f"{oka_type}/{db_oka['git_name']}"
yshah564ec9c2024-11-29 07:33:32 +00001706 op_params.append(ksu_params)
yshah771dea82024-07-05 15:11:49 +00001707
garciadeblasadb81e82024-11-08 01:11:46 +01001708 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001709 "create_ksus", op_id, op_params, db_content
garciadeblas96b94f52024-07-08 16:18:21 +02001710 )
yshah564ec9c2024-11-29 07:33:32 +00001711 for db_ksu, ksu_params in zip(db_content, op_params):
1712 workflow_status = await self.check_workflow(op_id, workflow_name, db_ksu)
yshah771dea82024-07-05 15:11:49 +00001713
garciadeblas96b94f52024-07-08 16:18:21 +02001714 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001715 resource_status, db_ksu = await self.check_resource(
1716 "create_ksus", op_id, ksu_params, db_ksu
1717 )
yshah771dea82024-07-05 15:11:49 +00001718
garciadeblas96b94f52024-07-08 16:18:21 +02001719 self.db.set_one(self.db_collection, {"_id": db_ksu["_id"]}, db_ksu)
1720
garciadeblasd8429852024-10-17 15:30:30 +02001721 # Clean items used in the workflow, no matter if the workflow succeeded
1722 clean_status, clean_msg = await self.odu.clean_items_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001723 "create_ksus", op_id, op_params, db_content
garciadeblasd8429852024-10-17 15:30:30 +02001724 )
1725 self.logger.info(
1726 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
1727 )
yshah564ec9c2024-11-29 07:33:32 +00001728 self.logger.info(f"KSU Create EXIT with Resource Status {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001729 return
1730
yshah564ec9c2024-11-29 07:33:32 +00001731 async def edit(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001732 self.logger.info("ksu edit Enter")
yshah564ec9c2024-11-29 07:33:32 +00001733 db_content = []
1734 op_params = []
1735 op_id = params["operation_id"]
1736 for ksu_id in params["ksus_list"]:
1737 self.initialize_operation(ksu_id, op_id)
1738 db_ksu = self.db.get_one("ksus", {"_id": ksu_id})
1739 db_content.append(db_ksu)
1740 ksu_params = {}
1741 ksu_params = self.get_operation_params(db_ksu, op_id)
1742 # Update ksu_params["profile"] with profile name and age-pubkey
1743 profile_type = ksu_params["profile"]["profile_type"]
1744 profile_id = ksu_params["profile"]["_id"]
1745 profile_collection = self.profile_collection_mapping[profile_type]
1746 db_profile = self.db.get_one(profile_collection, {"_id": profile_id})
1747 ksu_params["profile"]["name"] = db_profile["name"]
1748 ksu_params["profile"]["age_pubkey"] = db_profile.get("age_pubkey", "")
1749 # Update ksu_params["oka"] with sw_catalog_path (when missing)
1750 for oka in ksu_params["oka"]:
1751 if "sw_catalog_path" not in oka:
1752 oka_id = oka["_id"]
1753 db_oka = self.db.get_one("okas", {"_id": oka_id})
yshah2f39b8a2024-12-19 11:06:24 +00001754 oka_type = MAP_PROFILE[
1755 db_oka.get("profile_type", "infra_controller_profiles")
1756 ]
1757 oka["sw_catalog_path"] = f"{oka_type}/{db_oka['git_name']}"
yshah564ec9c2024-11-29 07:33:32 +00001758 op_params.append(ksu_params)
yshah771dea82024-07-05 15:11:49 +00001759
garciadeblasadb81e82024-11-08 01:11:46 +01001760 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001761 "update_ksus", op_id, op_params, db_content
garciadeblas96b94f52024-07-08 16:18:21 +02001762 )
yshah771dea82024-07-05 15:11:49 +00001763
yshah564ec9c2024-11-29 07:33:32 +00001764 for db_ksu, ksu_params in zip(db_content, op_params):
1765 workflow_status = await self.check_workflow(op_id, workflow_name, db_ksu)
1766
garciadeblas96b94f52024-07-08 16:18:21 +02001767 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001768 resource_status, db_ksu = await self.check_resource(
1769 "update_ksus", op_id, ksu_params, db_ksu
garciadeblas96b94f52024-07-08 16:18:21 +02001770 )
garciadeblas96b94f52024-07-08 16:18:21 +02001771 db_ksu["name"] = ksu_params["name"]
1772 db_ksu["description"] = ksu_params["description"]
1773 db_ksu["profile"]["profile_type"] = ksu_params["profile"][
1774 "profile_type"
1775 ]
1776 db_ksu["profile"]["_id"] = ksu_params["profile"]["_id"]
1777 db_ksu["oka"] = ksu_params["oka"]
1778 self.db.set_one(self.db_collection, {"_id": db_ksu["_id"]}, db_ksu)
1779
yshah564ec9c2024-11-29 07:33:32 +00001780 # Clean items used in the workflow, no matter if the workflow succeeded
1781 clean_status, clean_msg = await self.odu.clean_items_workflow(
1782 "create_ksus", op_id, op_params, db_content
garciadeblas96b94f52024-07-08 16:18:21 +02001783 )
1784 self.logger.info(
yshah564ec9c2024-11-29 07:33:32 +00001785 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
garciadeblas96b94f52024-07-08 16:18:21 +02001786 )
yshah564ec9c2024-11-29 07:33:32 +00001787 self.logger.info(f"KSU Update EXIT with Resource Status {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001788 return
1789
yshah564ec9c2024-11-29 07:33:32 +00001790 async def delete(self, params, order_id):
1791 self.logger.info("ksu delete Enter")
1792 db_content = []
1793 op_params = []
1794 op_id = params["operation_id"]
1795 for ksu_id in params["ksus_list"]:
1796 self.initialize_operation(ksu_id, op_id)
1797 db_ksu = self.db.get_one("ksus", {"_id": ksu_id})
1798 db_content.append(db_ksu)
1799 ksu_params = {}
1800 ksu_params["profile"] = {}
1801 ksu_params["profile"]["profile_type"] = db_ksu["profile"]["profile_type"]
1802 ksu_params["profile"]["_id"] = db_ksu["profile"]["_id"]
1803 # Update ksu_params["profile"] with profile name and age-pubkey
1804 profile_type = ksu_params["profile"]["profile_type"]
1805 profile_id = ksu_params["profile"]["_id"]
1806 profile_collection = self.profile_collection_mapping[profile_type]
1807 db_profile = self.db.get_one(profile_collection, {"_id": profile_id})
1808 ksu_params["profile"]["name"] = db_profile["name"]
1809 ksu_params["profile"]["age_pubkey"] = db_profile.get("age_pubkey", "")
1810 op_params.append(ksu_params)
yshah771dea82024-07-05 15:11:49 +00001811
garciadeblasadb81e82024-11-08 01:11:46 +01001812 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001813 "delete_ksus", op_id, op_params, db_content
1814 )
1815
1816 for db_ksu, ksu_params in zip(db_content, op_params):
1817 workflow_status = await self.check_workflow(op_id, workflow_name, db_ksu)
1818
1819 if workflow_status:
1820 resource_status, db_ksu = await self.check_resource(
1821 "delete_ksus", op_id, ksu_params, db_ksu
1822 )
1823
1824 if resource_status:
1825 db_ksu["state"] == "DELETED"
1826 self.db.set_one(self.db_collection, {"_id": db_ksu["_id"]}, db_ksu)
1827 self.db.del_one(self.db_collection, {"_id": db_ksu["_id"]})
1828
1829 self.logger.info(f"KSU Delete Exit with resource status: {resource_status}")
1830 return
1831
1832 async def clone(self, params, order_id):
1833 self.logger.info("ksu clone Enter")
1834 op_id = params["operation_id"]
1835 ksus_id = params["ksus_list"][0]
1836 self.initialize_operation(ksus_id, op_id)
1837 db_content = self.db.get_one(self.db_collection, {"_id": ksus_id})
1838 op_params = self.get_operation_params(db_content, op_id)
1839 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001840 "clone_ksus", op_id, op_params, db_content
1841 )
yshah564ec9c2024-11-29 07:33:32 +00001842
1843 workflow_status = await self.check_workflow(op_id, workflow_name, db_content)
yshah771dea82024-07-05 15:11:49 +00001844
1845 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001846 resource_status, db_content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001847 "clone_ksus", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001848 )
garciadeblas96b94f52024-07-08 16:18:21 +02001849 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
yshah564ec9c2024-11-29 07:33:32 +00001850
1851 self.logger.info(f"KSU Clone Exit with resource status: {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001852 return
1853
yshah564ec9c2024-11-29 07:33:32 +00001854 async def move(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001855 self.logger.info("ksu move Enter")
yshah564ec9c2024-11-29 07:33:32 +00001856 op_id = params["operation_id"]
1857 ksus_id = params["ksus_list"][0]
1858 self.initialize_operation(ksus_id, op_id)
1859 db_content = self.db.get_one(self.db_collection, {"_id": ksus_id})
1860 op_params = self.get_operation_params(db_content, op_id)
garciadeblasadb81e82024-11-08 01:11:46 +01001861 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001862 "move_ksus", op_id, op_params, db_content
1863 )
yshah564ec9c2024-11-29 07:33:32 +00001864
1865 workflow_status = await self.check_workflow(op_id, workflow_name, db_content)
yshah771dea82024-07-05 15:11:49 +00001866
1867 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001868 resource_status, db_content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001869 "move_ksus", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001870 )
garciadeblas96b94f52024-07-08 16:18:21 +02001871 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
yshah564ec9c2024-11-29 07:33:32 +00001872
1873 self.logger.info(f"KSU Move Exit with resource status: {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001874 return