blob: 37e638da5410e9c50e03875e0deb5eea3d0c1e94 [file] [log] [blame]
rshri932105f2024-07-05 15:11:55 +00001# -*- coding: utf-8 -*-
2
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
12# implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
15
16__author__ = (
17 "Shrinithi R <shrinithi.r@tataelxsi.co.in>",
18 "Shahithya Y <shahithya.y@tataelxsi.co.in>",
19)
20
rshric3564942024-11-12 18:12:38 +000021import copy
rshri932105f2024-07-05 15:11:55 +000022import logging
yshahd940c652024-10-17 06:11:12 +000023from time import time
garciadeblas72412282024-11-07 12:41:54 +010024import traceback
rshri932105f2024-07-05 15:11:55 +000025from osm_lcm.lcm_utils import LcmBase
26from copy import deepcopy
27from osm_lcm import odu_workflows
28from osm_lcm import vim_sdn
rshri948f7de2024-12-02 03:42:35 +000029from osm_lcm.data_utils.list_utils import find_in_list
rshri932105f2024-07-05 15:11:55 +000030
31
garciadeblas72412282024-11-07 12:41:54 +010032class GitOpsLcm(LcmBase):
garciadeblasea865ff2024-11-20 12:42:49 +010033 db_collection = "gitops"
yshah564ec9c2024-11-29 07:33:32 +000034 workflow_status = None
35 resource_status = None
36
37 profile_collection_mapping = {
38 "infra_controller_profiles": "k8sinfra_controller",
39 "infra_config_profiles": "k8sinfra_config",
40 "resource_profiles": "k8sresource",
41 "app_profiles": "k8sapp",
42 }
garciadeblasea865ff2024-11-20 12:42:49 +010043
rshri948f7de2024-12-02 03:42:35 +000044 profile_collection_mapping = {
45 "infra_controller_profiles": "k8sinfra_controller",
46 "infra_config_profiles": "k8sinfra_config",
47 "resource_profiles": "k8sresource",
48 "app_profiles": "k8sapp",
49 }
50
garciadeblas72412282024-11-07 12:41:54 +010051 def __init__(self, msg, lcm_tasks, config):
52 self.logger = logging.getLogger("lcm.gitops")
53 self.lcm_tasks = lcm_tasks
54 self.odu = odu_workflows.OduWorkflow(msg, self.lcm_tasks, config)
55 self._checkloop_kustomization_timeout = 900
56 self._checkloop_resource_timeout = 900
57 self._workflows = {}
58 super().__init__(msg, self.logger)
59
60 async def check_dummy_operation(self, op_id, op_params, content):
61 self.logger.info(f"Operation {op_id}. Params: {op_params}. Content: {content}")
62 return True, "OK"
63
garciadeblasea865ff2024-11-20 12:42:49 +010064 def initialize_operation(self, item_id, op_id):
65 db_item = self.db.get_one(self.db_collection, {"_id": item_id})
66 operation = next(
67 (op for op in db_item.get("operationHistory", []) if op["op_id"] == op_id),
68 None,
69 )
70 operation["workflowState"] = "PROCESSING"
71 operation["resourceState"] = "NOT_READY"
72 operation["operationState"] = "IN_PROGRESS"
73 operation["gitOperationInfo"] = None
74 db_item["current_operation"] = operation["op_id"]
75 self.db.set_one(self.db_collection, {"_id": item_id}, db_item)
76
yshah564ec9c2024-11-29 07:33:32 +000077 def get_operation_params(self, item, operation_id):
78 operation_history = item.get("operationHistory", [])
79 operation = find_in_list(
80 operation_history, lambda op: op["op_id"] == operation_id
81 )
82 return operation.get("operationParams", {})
83
84 def get_operation_type(self, item, operation_id):
85 operation_history = item.get("operationHistory", [])
86 operation = find_in_list(
87 operation_history, lambda op: op["op_id"] == operation_id
88 )
89 return operation.get("operationType", {})
90
garciadeblas7eae6f42024-11-08 10:41:38 +010091 def update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +000092 self, content, op_id, workflow_status=None, resource_status=None
garciadeblas7eae6f42024-11-08 10:41:38 +010093 ):
94 self.logger.info(
95 f"Update Operation History in DB. Workflow status: {workflow_status}. Resource status: {resource_status}"
96 )
97 self.logger.debug(f"Content: {content}")
98
garciadeblas7eae6f42024-11-08 10:41:38 +010099 op_num = 0
100 for operation in content["operationHistory"]:
101 self.logger.debug("Operations: {}".format(operation))
102 if operation["op_id"] == op_id:
103 self.logger.debug("Found operation number: {}".format(op_num))
104 now = time()
105 if workflow_status:
106 content["operationHistory"][op_num]["workflowState"] = "COMPLETED"
107 content["operationHistory"][op_num]["result"] = True
108 else:
109 content["operationHistory"][op_num]["workflowState"] = "ERROR"
110 content["operationHistory"][op_num]["operationState"] = "FAILED"
111 content["operationHistory"][op_num]["result"] = False
112
113 if resource_status:
114 content["operationHistory"][op_num]["resourceState"] = "READY"
115 content["operationHistory"][op_num]["operationState"] = "COMPLETED"
116 content["operationHistory"][op_num]["result"] = True
117 else:
118 content["operationHistory"][op_num]["resourceState"] = "NOT_READY"
119 content["operationHistory"][op_num]["operationState"] = "FAILED"
120 content["operationHistory"][op_num]["result"] = False
121
122 content["operationHistory"][op_num]["endDate"] = now
123 break
124 op_num += 1
125 self.logger.debug("content: {}".format(content))
126
127 return content
128
yshah564ec9c2024-11-29 07:33:32 +0000129 async def check_workflow(self, op_id, workflow_name, db_content):
130 workflow_status, workflow_msg = await self.odu.check_workflow_status(
131 workflow_name
132 )
133 self.logger.info(
134 "Workflow Status: {} Workflow Message: {}".format(
135 workflow_status, workflow_msg
136 )
137 )
138 operation_type = self.get_operation_type(db_content, op_id)
139 if operation_type == "create" and workflow_status:
140 db_content["state"] = "CREATED"
141 elif operation_type == "create" and not workflow_status:
142 db_content["state"] = "FAILED_CREATION"
143 elif operation_type == "delete" and workflow_status:
144 db_content["state"] = "DELETED"
145 elif operation_type == "delete" and not workflow_status:
146 db_content["state"] = "FAILED_DELETION"
147
148 if workflow_status:
149 db_content["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
150 else:
151 db_content["resourceState"] = "ERROR"
152
153 db_content = self.update_operation_history(
154 db_content, op_id, workflow_status, None
155 )
156 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
157 return workflow_status
158
159 async def check_resource(self, resource_name, op_id, op_params, db_content):
160 workflow_status = True
161
162 resource_status, resource_msg = await self.check_resource_status(
163 resource_name, op_id, op_params, db_content
164 )
165 self.logger.info(
166 "Resource Status: {} Resource Message: {}".format(
167 resource_status, resource_msg
168 )
169 )
170
171 if resource_status:
172 db_content["resourceState"] = "READY"
173 else:
174 db_content["resourceState"] = "ERROR"
175
176 db_content = self.update_operation_history(
177 db_content, op_id, workflow_status, resource_status
178 )
179 db_content["operatingState"] = "IDLE"
180 db_content["current_operation"] = None
181 return resource_status, db_content
182
yshahcb9075f2024-11-22 12:08:57 +0000183 async def common_check_list(self, op_id, checkings_list, db_collection, db_item):
garciadeblas72412282024-11-07 12:41:54 +0100184 try:
185 for checking in checkings_list:
186 if checking["enable"]:
187 status, message = await self.odu.readiness_loop(
188 item=checking["item"],
189 name=checking["name"],
190 namespace=checking["namespace"],
191 flag=checking["flag"],
192 timeout=checking["timeout"],
193 )
194 if not status:
195 return status, message
garciadeblas7eae6f42024-11-08 10:41:38 +0100196 else:
197 db_item["resourceState"] = checking["resourceState"]
198 db_item = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000199 db_item, op_id, "COMPLETED", checking["resourceState"]
garciadeblas7eae6f42024-11-08 10:41:38 +0100200 )
201 self.db.set_one(db_collection, {"_id": db_item["_id"]}, db_item)
garciadeblas72412282024-11-07 12:41:54 +0100202 except Exception as e:
203 self.logger.debug(traceback.format_exc())
204 self.logger.debug(f"Exception: {e}", exc_info=True)
205 return False, f"Unexpected exception: {e}"
206 return True, "OK"
207
208 async def check_resource_status(self, key, op_id, op_params, content):
209 self.logger.info(
210 f"Check resource status. Key: {key}. Operation: {op_id}. Params: {op_params}. Content: {content}"
211 )
212 check_resource_function = self._workflows.get(key, {}).get(
213 "check_resource_function"
214 )
215 self.logger.info("check_resource function : {}".format(check_resource_function))
216 if check_resource_function:
217 return await check_resource_function(op_id, op_params, content)
218 else:
219 return await self.check_dummy_operation(op_id, op_params, content)
220
rshric3564942024-11-12 18:12:38 +0000221 def decrypting_key(self, content):
222 # This deep copy is for to be passed to ODU workflows.
223 cluster_copy = copy.deepcopy(content)
224
225 # decrypting the key
226 self.db.encrypt_decrypt_fields(
227 cluster_copy,
228 "decrypt",
229 ["age_pubkey", "age_privkey"],
230 schema_version="1.11",
231 salt=cluster_copy["_id"],
232 )
233 db_cluster_copy = {
234 "cluster": cluster_copy,
235 }
236 return db_cluster_copy
237
garciadeblas72412282024-11-07 12:41:54 +0100238
239class ClusterLcm(GitOpsLcm):
garciadeblas96b94f52024-07-08 16:18:21 +0200240 db_collection = "clusters"
rshri932105f2024-07-05 15:11:55 +0000241
242 def __init__(self, msg, lcm_tasks, config):
243 """
244 Init, Connect to database, filesystem storage, and messaging
245 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
246 :return: None
247 """
garciadeblas72412282024-11-07 12:41:54 +0100248 super().__init__(msg, lcm_tasks, config)
249 self._workflows = {
250 "create_cluster": {
251 "check_resource_function": self.check_create_cluster,
252 },
garciadeblasb0a42c22024-11-13 16:00:10 +0100253 "register_cluster": {
254 "check_resource_function": self.check_register_cluster,
255 },
256 "update_cluster": {
257 "check_resource_function": self.check_update_cluster,
258 },
garciadeblas72412282024-11-07 12:41:54 +0100259 }
rshri932105f2024-07-05 15:11:55 +0000260 self.regist = vim_sdn.K8sClusterLcm(msg, self.lcm_tasks, config)
261
rshri948f7de2024-12-02 03:42:35 +0000262 async def create(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000263 self.logger.info("cluster Create Enter")
264
rshri948f7de2024-12-02 03:42:35 +0000265 # To get the cluster details
266 cluster_id = params["cluster_id"]
267 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
268
269 # To get the operation params details
270 op_id = params["operation_id"]
271 op_params = self.get_operation_params(db_cluster, op_id)
272
273 # To initialize the operation states
274 self.initialize_operation(cluster_id, op_id)
275
276 # To copy the cluster content and decrypting the key to use in workflows
rshric3564942024-11-12 18:12:38 +0000277 db_cluster_copy = self.decrypting_key(db_cluster)
278
rshri948f7de2024-12-02 03:42:35 +0000279 # To get the vim account details
rshric3564942024-11-12 18:12:38 +0000280 db_vim = self.db.get_one("vim_accounts", {"name": db_cluster["vim_account"]})
281 db_cluster_copy["vim_account"] = db_vim
282
garciadeblasadb81e82024-11-08 01:11:46 +0100283 _, workflow_name = await self.odu.launch_workflow(
rshric3564942024-11-12 18:12:38 +0000284 "create_cluster", op_id, op_params, db_cluster_copy
garciadeblas96b94f52024-07-08 16:18:21 +0200285 )
rshri932105f2024-07-05 15:11:55 +0000286 self.logger.info("workflow_name is :{}".format(workflow_name))
287
garciadeblas96b94f52024-07-08 16:18:21 +0200288 workflow_status, workflow_msg = await self.odu.check_workflow_status(
289 workflow_name
290 )
rshri932105f2024-07-05 15:11:55 +0000291 self.logger.info(
292 "workflow_status is :{} and workflow_msg is :{}".format(
293 workflow_status, workflow_msg
294 )
295 )
296 if workflow_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200297 db_cluster["state"] = "CREATED"
298 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
rshri932105f2024-07-05 15:11:55 +0000299 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200300 db_cluster["state"] = "FAILED_CREATION"
301 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000302 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +0000303 db_cluster = self.update_operation_history(
304 db_cluster, op_id, workflow_status, None
305 )
garciadeblas96b94f52024-07-08 16:18:21 +0200306 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +0000307
garciadeblas28bff0f2024-09-16 12:53:07 +0200308 # Clean items used in the workflow, no matter if the workflow succeeded
309 clean_status, clean_msg = await self.odu.clean_items_workflow(
rshric3564942024-11-12 18:12:38 +0000310 "create_cluster", op_id, op_params, db_cluster_copy
garciadeblas28bff0f2024-09-16 12:53:07 +0200311 )
312 self.logger.info(
313 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
314 )
315
rshri932105f2024-07-05 15:11:55 +0000316 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +0100317 resource_status, resource_msg = await self.check_resource_status(
rshric3564942024-11-12 18:12:38 +0000318 "create_cluster", op_id, op_params, db_cluster_copy
rshri932105f2024-07-05 15:11:55 +0000319 )
320 self.logger.info(
321 "resource_status is :{} and resource_msg is :{}".format(
322 resource_status, resource_msg
323 )
324 )
325 if resource_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200326 db_cluster["resourceState"] = "READY"
rshri932105f2024-07-05 15:11:55 +0000327 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200328 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000329
garciadeblas96b94f52024-07-08 16:18:21 +0200330 db_cluster["operatingState"] = "IDLE"
331 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000332 db_cluster, op_id, workflow_status, resource_status
rshri932105f2024-07-05 15:11:55 +0000333 )
shahithya70a3fc92024-11-12 11:01:05 +0000334 db_cluster["current_operation"] = None
garciadeblas96b94f52024-07-08 16:18:21 +0200335 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
336 self.update_profile_state(db_cluster, workflow_status, resource_status)
rshri948f7de2024-12-02 03:42:35 +0000337
338 db_register = self.db.get_one("k8sclusters", {"name": db_cluster["name"]})
339
340 if db_cluster["resourceState"] == "READY" and db_cluster["state"] == "CREATED":
341 result, cluster_creds = await self.odu.get_cluster_credentials(db_cluster)
342 # To call the lcm.py for registering the cluster in k8scluster lcm.
343 db_register["credentials"] = cluster_creds
344 self.db.set_one("k8sclusters", {"_id": db_register["_id"]}, db_register)
345 register = await self.regist.create(db_register, order_id)
346 self.logger.debug(f"Register is : {register}")
347 else:
348 db_register["_admin"]["operationalState"] = "ERROR"
349 result, cluster_creds = await self.odu.get_cluster_credentials(db_cluster)
350 # To call the lcm.py for registering the cluster in k8scluster lcm.
351 db_register["credentials"] = cluster_creds
352 self.db.set_one("k8sclusters", {"_id": db_register["_id"]}, db_register)
353
rshri932105f2024-07-05 15:11:55 +0000354 return
355
garciadeblas72412282024-11-07 12:41:54 +0100356 async def check_create_cluster(self, op_id, op_params, content):
garciadeblas7eae6f42024-11-08 10:41:38 +0100357 self.logger.info(
358 f"check_create_cluster Operation {op_id}. Params: {op_params}."
359 )
360 # self.logger.debug(f"Content: {content}")
garciadeblas72412282024-11-07 12:41:54 +0100361 db_cluster = content["cluster"]
362 cluster_name = db_cluster["git_name"].lower()
363 cluster_kustomization_name = cluster_name
364 db_vim_account = content["vim_account"]
365 cloud_type = db_vim_account["vim_type"]
366 nodepool_name = ""
367 if cloud_type == "aws":
368 nodepool_name = f"{cluster_name}-nodegroup"
369 cluster_name = f"{cluster_name}-cluster"
370 elif cloud_type == "gcp":
371 nodepool_name = f"nodepool-{cluster_name}"
372 bootstrap = op_params.get("bootstrap", True)
373 if cloud_type in ("azure", "gcp", "aws"):
374 checkings_list = [
375 {
376 "item": "kustomization",
377 "name": cluster_kustomization_name,
378 "namespace": "managed-resources",
379 "flag": "Ready",
yshahcb9075f2024-11-22 12:08:57 +0000380 "timeout": 1500,
garciadeblas72412282024-11-07 12:41:54 +0100381 "enable": True,
garciadeblas7eae6f42024-11-08 10:41:38 +0100382 "resourceState": "IN_PROGRESS.KUSTOMIZATION_READY",
garciadeblas72412282024-11-07 12:41:54 +0100383 },
384 {
385 "item": f"cluster_{cloud_type}",
386 "name": cluster_name,
387 "namespace": "",
388 "flag": "Synced",
389 "timeout": self._checkloop_resource_timeout,
390 "enable": True,
garciadeblas7eae6f42024-11-08 10:41:38 +0100391 "resourceState": "IN_PROGRESS.RESOURCE_SYNCED.CLUSTER",
garciadeblas72412282024-11-07 12:41:54 +0100392 },
393 {
394 "item": f"cluster_{cloud_type}",
395 "name": cluster_name,
396 "namespace": "",
397 "flag": "Ready",
398 "timeout": self._checkloop_resource_timeout,
399 "enable": True,
garciadeblas7eae6f42024-11-08 10:41:38 +0100400 "resourceState": "IN_PROGRESS.RESOURCE_READY.CLUSTER",
garciadeblas72412282024-11-07 12:41:54 +0100401 },
402 {
403 "item": "kustomization",
404 "name": f"{cluster_kustomization_name}-bstrp-fluxctrl",
405 "namespace": "managed-resources",
406 "flag": "Ready",
yshahcb9075f2024-11-22 12:08:57 +0000407 "timeout": self._checkloop_resource_timeout,
garciadeblas72412282024-11-07 12:41:54 +0100408 "enable": bootstrap,
garciadeblas7eae6f42024-11-08 10:41:38 +0100409 "resourceState": "IN_PROGRESS.BOOTSTRAP_OK",
garciadeblas72412282024-11-07 12:41:54 +0100410 },
411 ]
412 else:
413 return False, "Not suitable VIM account to check cluster status"
414 if nodepool_name:
415 nodepool_check = {
416 "item": f"nodepool_{cloud_type}",
417 "name": nodepool_name,
418 "namespace": "",
419 "flag": "Ready",
420 "timeout": self._checkloop_resource_timeout,
421 "enable": True,
garciadeblas7eae6f42024-11-08 10:41:38 +0100422 "resourceState": "IN_PROGRESS.RESOURCE_READY.NODEPOOL",
garciadeblas72412282024-11-07 12:41:54 +0100423 }
424 checkings_list.insert(3, nodepool_check)
yshahcb9075f2024-11-22 12:08:57 +0000425 return await self.common_check_list(
426 op_id, checkings_list, "clusters", db_cluster
427 )
garciadeblas72412282024-11-07 12:41:54 +0100428
garciadeblasb0a42c22024-11-13 16:00:10 +0100429 async def check_register_cluster(self, op_id, op_params, content):
430 self.logger.info(
431 f"check_register_cluster Operation {op_id}. Params: {op_params}."
432 )
433 # self.logger.debug(f"Content: {content}")
434 db_cluster = content["cluster"]
435 cluster_name = db_cluster["git_name"].lower()
436 cluster_kustomization_name = cluster_name
437 bootstrap = op_params.get("bootstrap", True)
438 checkings_list = [
439 {
440 "item": "kustomization",
441 "name": f"{cluster_kustomization_name}-bstrp-fluxctrl",
442 "namespace": "managed-resources",
443 "flag": "Ready",
444 "timeout": self._checkloop_kustomization_timeout,
445 "enable": bootstrap,
446 "resourceState": "IN_PROGRESS.BOOTSTRAP_OK",
447 },
448 ]
yshahcb9075f2024-11-22 12:08:57 +0000449 return await self.common_check_list(
450 op_id, checkings_list, "clusters", db_cluster
451 )
garciadeblasb0a42c22024-11-13 16:00:10 +0100452
453 async def check_update_cluster(self, op_id, op_params, content):
454 self.logger.info(
455 f"check_create_cluster Operation {op_id}. Params: {op_params}."
456 )
457 # self.logger.debug(f"Content: {content}")
458 db_cluster = content["cluster"]
459 cluster_name = db_cluster["git_name"].lower()
460 cluster_kustomization_name = cluster_name
461 db_vim_account = content["vim_account"]
462 cloud_type = db_vim_account["vim_type"]
463 nodepool_name = ""
464 if cloud_type == "aws":
465 nodepool_name = f"{cluster_name}-nodegroup"
466 cluster_name = f"{cluster_name}-cluster"
467 elif cloud_type == "gcp":
468 nodepool_name = f"nodepool-{cluster_name}"
469 if cloud_type in ("azure", "gcp", "aws"):
470 checkings_list = [
471 {
472 "item": "kustomization",
473 "name": cluster_kustomization_name,
474 "namespace": "managed-resources",
475 "flag": "Ready",
476 "timeout": self._checkloop_kustomization_timeout,
477 "enable": True,
478 "resourceState": "IN_PROGRESS.KUSTOMIZATION_READY",
479 },
480 {
481 "item": f"cluster_{cloud_type}",
482 "name": cluster_name,
483 "namespace": "",
484 "flag": "Synced",
485 "timeout": self._checkloop_resource_timeout,
486 "enable": True,
487 "resourceState": "IN_PROGRESS.RESOURCE_SYNCED.CLUSTER",
488 },
489 {
490 "item": f"cluster_{cloud_type}",
491 "name": cluster_name,
492 "namespace": "",
493 "flag": "Ready",
494 "timeout": self._checkloop_resource_timeout,
495 "enable": True,
496 "resourceState": "IN_PROGRESS.RESOURCE_READY.CLUSTER",
497 },
498 ]
499 else:
500 return False, "Not suitable VIM account to check cluster status"
501 if nodepool_name:
502 nodepool_check = {
503 "item": f"nodepool_{cloud_type}",
504 "name": nodepool_name,
505 "namespace": "",
506 "flag": "Ready",
507 "timeout": self._checkloop_resource_timeout,
508 "enable": True,
509 "resourceState": "IN_PROGRESS.RESOURCE_READY.NODEPOOL",
510 }
511 checkings_list.append(nodepool_check)
yshahcb9075f2024-11-22 12:08:57 +0000512 return await self.common_check_list(
513 op_id, checkings_list, "clusters", db_cluster
514 )
garciadeblasb0a42c22024-11-13 16:00:10 +0100515
garciadeblas96b94f52024-07-08 16:18:21 +0200516 def update_profile_state(self, db_cluster, workflow_status, resource_status):
rshri932105f2024-07-05 15:11:55 +0000517 profiles = [
518 "infra_controller_profiles",
519 "infra_config_profiles",
520 "app_profiles",
521 "resource_profiles",
522 ]
rshri948f7de2024-12-02 03:42:35 +0000523 """
rshri932105f2024-07-05 15:11:55 +0000524 profiles_collection = {
525 "infra_controller_profiles": "k8sinfra_controller",
526 "infra_config_profiles": "k8sinfra_config",
527 "app_profiles": "k8sapp",
528 "resource_profiles": "k8sresource",
529 }
rshri948f7de2024-12-02 03:42:35 +0000530 """
Your Name86149632024-11-14 16:17:16 +0000531 self.logger.info("the db_cluster is :{}".format(db_cluster))
rshri932105f2024-07-05 15:11:55 +0000532 for profile_type in profiles:
garciadeblas96b94f52024-07-08 16:18:21 +0200533 profile_id = db_cluster[profile_type]
rshri948f7de2024-12-02 03:42:35 +0000534 # db_collection = profiles_collection[profile_type]
535 db_collection = self.profile_collection_mapping[profile_type]
rshri932105f2024-07-05 15:11:55 +0000536 db_profile = self.db.get_one(db_collection, {"_id": profile_id})
yshahcb9075f2024-11-22 12:08:57 +0000537 op_id = db_profile["operationHistory"][-1].get("op_id")
garciadeblas96b94f52024-07-08 16:18:21 +0200538 db_profile["state"] = db_cluster["state"]
539 db_profile["resourceState"] = db_cluster["resourceState"]
540 db_profile["operatingState"] = db_cluster["operatingState"]
rshric3564942024-11-12 18:12:38 +0000541 db_profile["age_pubkey"] = db_cluster["age_pubkey"]
Your Name86149632024-11-14 16:17:16 +0000542 db_profile["age_privkey"] = db_cluster["age_privkey"]
rshri932105f2024-07-05 15:11:55 +0000543 db_profile = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000544 db_profile, op_id, workflow_status, resource_status
rshri932105f2024-07-05 15:11:55 +0000545 )
rshri932105f2024-07-05 15:11:55 +0000546 self.db.set_one(db_collection, {"_id": db_profile["_id"]}, db_profile)
547
rshri948f7de2024-12-02 03:42:35 +0000548 async def delete(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000549 self.logger.info("cluster delete Enter")
rshri948f7de2024-12-02 03:42:35 +0000550
551 # To get the cluster details
552 cluster_id = params["cluster_id"]
553 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
554
555 # To get the operation params details
556 op_id = params["operation_id"]
557 op_params = self.get_operation_params(db_cluster, op_id)
558
559 # To initialize the operation states
560 self.initialize_operation(cluster_id, op_id)
561
562 # To copy the cluster content and decrypting the key to use in workflows
563 db_cluster_copy = self.decrypting_key(db_cluster)
564
garciadeblas12470812024-11-18 10:33:12 +0100565 if db_cluster["created"] == "false":
rshri948f7de2024-12-02 03:42:35 +0000566 return await self.deregister(params, order_id)
rshri932105f2024-07-05 15:11:55 +0000567
garciadeblasadb81e82024-11-08 01:11:46 +0100568 _, workflow_name = await self.odu.launch_workflow(
rshri948f7de2024-12-02 03:42:35 +0000569 "delete_cluster", op_id, op_params, db_cluster_copy
garciadeblas96b94f52024-07-08 16:18:21 +0200570 )
rshri932105f2024-07-05 15:11:55 +0000571 self.logger.info("workflow_name is :{}".format(workflow_name))
572
garciadeblas96b94f52024-07-08 16:18:21 +0200573 workflow_status, workflow_msg = await self.odu.check_workflow_status(
574 workflow_name
575 )
rshri932105f2024-07-05 15:11:55 +0000576 self.logger.info(
577 "workflow_status is :{} and workflow_msg is :{}".format(
578 workflow_status, workflow_msg
579 )
580 )
581 if workflow_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200582 db_cluster["state"] = "DELETED"
583 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
rshri932105f2024-07-05 15:11:55 +0000584 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200585 db_cluster["state"] = "FAILED_DELETION"
586 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000587 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +0000588 db_cluster = self.update_operation_history(
589 db_cluster, op_id, workflow_status, None
590 )
garciadeblas96b94f52024-07-08 16:18:21 +0200591 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +0000592
593 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +0100594 resource_status, resource_msg = await self.check_resource_status(
rshri948f7de2024-12-02 03:42:35 +0000595 "delete_cluster", op_id, op_params, db_cluster_copy
rshri932105f2024-07-05 15:11:55 +0000596 )
597 self.logger.info(
598 "resource_status is :{} and resource_msg is :{}".format(
599 resource_status, resource_msg
600 )
601 )
602 if resource_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200603 db_cluster["resourceState"] = "READY"
rshri932105f2024-07-05 15:11:55 +0000604 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200605 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000606
garciadeblas96b94f52024-07-08 16:18:21 +0200607 db_cluster["operatingState"] = "IDLE"
608 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000609 db_cluster, op_id, workflow_status, resource_status
garciadeblas96b94f52024-07-08 16:18:21 +0200610 )
shahithya70a3fc92024-11-12 11:01:05 +0000611 db_cluster["current_operation"] = None
garciadeblas96b94f52024-07-08 16:18:21 +0200612 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +0000613
garciadeblas96b94f52024-07-08 16:18:21 +0200614 # To delete it from DB
615 if db_cluster["state"] == "DELETED":
616 self.delete_cluster(db_cluster)
rshri948f7de2024-12-02 03:42:35 +0000617
618 # To delete it from k8scluster collection
619 self.db.del_one("k8sclusters", {"name": db_cluster["name"]})
620
rshri932105f2024-07-05 15:11:55 +0000621 return
622
garciadeblas96b94f52024-07-08 16:18:21 +0200623 def delete_cluster(self, db_cluster):
624 # Actually, item_content is equal to db_cluster
625 # item_content = self.db.get_one("clusters", {"_id": db_cluster["_id"]})
626 # self.logger.debug("item_content is : {}".format(item_content))
rshri932105f2024-07-05 15:11:55 +0000627
rshri932105f2024-07-05 15:11:55 +0000628 # detach profiles
629 update_dict = None
630 profiles_to_detach = [
631 "infra_controller_profiles",
632 "infra_config_profiles",
633 "app_profiles",
634 "resource_profiles",
635 ]
rshri948f7de2024-12-02 03:42:35 +0000636 """
rshri932105f2024-07-05 15:11:55 +0000637 profiles_collection = {
638 "infra_controller_profiles": "k8sinfra_controller",
639 "infra_config_profiles": "k8sinfra_config",
640 "app_profiles": "k8sapp",
641 "resource_profiles": "k8sresource",
642 }
rshri948f7de2024-12-02 03:42:35 +0000643 """
rshri932105f2024-07-05 15:11:55 +0000644 for profile_type in profiles_to_detach:
garciadeblas96b94f52024-07-08 16:18:21 +0200645 if db_cluster.get(profile_type):
garciadeblas96b94f52024-07-08 16:18:21 +0200646 profile_ids = db_cluster[profile_type]
rshri932105f2024-07-05 15:11:55 +0000647 profile_ids_copy = deepcopy(profile_ids)
rshri932105f2024-07-05 15:11:55 +0000648 for profile_id in profile_ids_copy:
rshri948f7de2024-12-02 03:42:35 +0000649 # db_collection = profiles_collection[profile_type]
650 db_collection = self.profile_collection_mapping[profile_type]
rshri932105f2024-07-05 15:11:55 +0000651 db_profile = self.db.get_one(db_collection, {"_id": profile_id})
garciadeblasc2552852024-10-22 12:39:32 +0200652 self.logger.debug("the db_profile is :{}".format(db_profile))
653 self.logger.debug(
garciadeblas96b94f52024-07-08 16:18:21 +0200654 "the item_content name is :{}".format(db_cluster["name"])
rshri932105f2024-07-05 15:11:55 +0000655 )
garciadeblasc2552852024-10-22 12:39:32 +0200656 self.logger.debug(
rshri932105f2024-07-05 15:11:55 +0000657 "the db_profile name is :{}".format(db_profile["name"])
658 )
garciadeblas96b94f52024-07-08 16:18:21 +0200659 if db_cluster["name"] == db_profile["name"]:
rshri932105f2024-07-05 15:11:55 +0000660 self.db.del_one(db_collection, {"_id": profile_id})
661 else:
rshri932105f2024-07-05 15:11:55 +0000662 profile_ids.remove(profile_id)
663 update_dict = {profile_type: profile_ids}
rshri932105f2024-07-05 15:11:55 +0000664 self.db.set_one(
garciadeblas96b94f52024-07-08 16:18:21 +0200665 "clusters", {"_id": db_cluster["_id"]}, update_dict
rshri932105f2024-07-05 15:11:55 +0000666 )
garciadeblas96b94f52024-07-08 16:18:21 +0200667 self.db.del_one("clusters", {"_id": db_cluster["_id"]})
rshri932105f2024-07-05 15:11:55 +0000668
rshri948f7de2024-12-02 03:42:35 +0000669 async def attach_profile(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000670 self.logger.info("profile attach Enter")
rshri948f7de2024-12-02 03:42:35 +0000671
672 # To get the cluster details
673 cluster_id = params["cluster_id"]
674 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
675 # content = {
676 # "cluster": db_cluster,
677 # }
678
679 # To get the operation params details
680 op_id = params["operation_id"]
681 op_params = self.get_operation_params(db_cluster, op_id)
682
683 # To initialize the operation states
684 self.initialize_operation(cluster_id, op_id)
685
686 # To copy the cluster content and decrypting the key to use in workflows
687 db_cluster_copy = self.decrypting_key(db_cluster)
688
689 # To get the profile details
690 profile_id = params["profile_id"]
691 profile_type = params["profile_type"]
692 profile_collection = self.profile_collection_mapping[profile_type]
693 db_profile = self.db.get_one(profile_collection, {"_id": profile_id})
694 db_profile["profile_type"] = profile_type
695 # content["profile"] = db_profile
696 db_cluster_copy["profile"] = db_profile
rshri932105f2024-07-05 15:11:55 +0000697
garciadeblasadb81e82024-11-08 01:11:46 +0100698 _, workflow_name = await self.odu.launch_workflow(
rshri948f7de2024-12-02 03:42:35 +0000699 "attach_profile_to_cluster", op_id, op_params, db_cluster_copy
garciadeblas96b94f52024-07-08 16:18:21 +0200700 )
rshri932105f2024-07-05 15:11:55 +0000701 self.logger.info("workflow_name is :{}".format(workflow_name))
702
garciadeblas96b94f52024-07-08 16:18:21 +0200703 workflow_status, workflow_msg = await self.odu.check_workflow_status(
704 workflow_name
705 )
rshri932105f2024-07-05 15:11:55 +0000706 self.logger.info(
707 "workflow_status is :{} and workflow_msg is :{}".format(
708 workflow_status, workflow_msg
709 )
710 )
711 if workflow_status:
712 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
713 else:
714 db_cluster["resourceState"] = "ERROR"
715 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +0000716 db_cluster = self.update_operation_history(
717 db_cluster, op_id, workflow_status, None
718 )
rshri932105f2024-07-05 15:11:55 +0000719 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
720
721 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +0100722 resource_status, resource_msg = await self.check_resource_status(
rshri948f7de2024-12-02 03:42:35 +0000723 "attach_profile_to_cluster", op_id, op_params, db_cluster_copy
rshri932105f2024-07-05 15:11:55 +0000724 )
725 self.logger.info(
726 "resource_status is :{} and resource_msg is :{}".format(
727 resource_status, resource_msg
728 )
729 )
730 if resource_status:
731 db_cluster["resourceState"] = "READY"
732 else:
733 db_cluster["resourceState"] = "ERROR"
734
735 db_cluster["operatingState"] = "IDLE"
736 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000737 db_cluster, op_id, workflow_status, resource_status
rshri932105f2024-07-05 15:11:55 +0000738 )
rshri932105f2024-07-05 15:11:55 +0000739 profile_list = db_cluster[profile_type]
rshri932105f2024-07-05 15:11:55 +0000740 if resource_status:
rshri932105f2024-07-05 15:11:55 +0000741 profile_list.append(profile_id)
rshri932105f2024-07-05 15:11:55 +0000742 db_cluster[profile_type] = profile_list
shahithya70a3fc92024-11-12 11:01:05 +0000743 db_cluster["current_operation"] = None
rshri932105f2024-07-05 15:11:55 +0000744 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
745
746 return
747
rshri948f7de2024-12-02 03:42:35 +0000748 async def detach_profile(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000749 self.logger.info("profile dettach Enter")
rshri948f7de2024-12-02 03:42:35 +0000750
751 # To get the cluster details
752 cluster_id = params["cluster_id"]
753 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
754 # content = {
755 # "cluster": db_cluster,
756 # }
757
758 # To get the operation params details
759 op_id = params["operation_id"]
760 op_params = self.get_operation_params(db_cluster, op_id)
761
762 # To initialize the operation states
763 self.initialize_operation(cluster_id, op_id)
764
765 # To copy the cluster content and decrypting the key to use in workflows
766 db_cluster_copy = self.decrypting_key(db_cluster)
767
768 # To get the profile details
769 profile_id = params["profile_id"]
770 profile_type = params["profile_type"]
771 profile_collection = self.profile_collection_mapping[profile_type]
772 db_profile = self.db.get_one(profile_collection, {"_id": profile_id})
773 db_profile["profile_type"] = profile_type
774 # content["profile"] = db_profile
775 db_cluster_copy["profile"] = db_profile
rshri932105f2024-07-05 15:11:55 +0000776
garciadeblasadb81e82024-11-08 01:11:46 +0100777 _, workflow_name = await self.odu.launch_workflow(
rshri948f7de2024-12-02 03:42:35 +0000778 "detach_profile_from_cluster", op_id, op_params, db_cluster_copy
garciadeblas96b94f52024-07-08 16:18:21 +0200779 )
rshri932105f2024-07-05 15:11:55 +0000780 self.logger.info("workflow_name is :{}".format(workflow_name))
781
garciadeblas96b94f52024-07-08 16:18:21 +0200782 workflow_status, workflow_msg = await self.odu.check_workflow_status(
783 workflow_name
784 )
rshri932105f2024-07-05 15:11:55 +0000785 self.logger.info(
786 "workflow_status is :{} and workflow_msg is :{}".format(
787 workflow_status, workflow_msg
788 )
789 )
790 if workflow_status:
791 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
792 else:
793 db_cluster["resourceState"] = "ERROR"
794 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +0000795 db_cluster = self.update_operation_history(
796 db_cluster, op_id, workflow_status, None
797 )
rshri932105f2024-07-05 15:11:55 +0000798 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
799
800 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +0100801 resource_status, resource_msg = await self.check_resource_status(
rshri948f7de2024-12-02 03:42:35 +0000802 "detach_profile_from_cluster", op_id, op_params, db_cluster_copy
rshri932105f2024-07-05 15:11:55 +0000803 )
804 self.logger.info(
805 "resource_status is :{} and resource_msg is :{}".format(
806 resource_status, resource_msg
807 )
808 )
809 if resource_status:
810 db_cluster["resourceState"] = "READY"
811 else:
812 db_cluster["resourceState"] = "ERROR"
813
814 db_cluster["operatingState"] = "IDLE"
815 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000816 db_cluster, op_id, workflow_status, resource_status
rshri932105f2024-07-05 15:11:55 +0000817 )
rshri932105f2024-07-05 15:11:55 +0000818 profile_list = db_cluster[profile_type]
819 self.logger.info("profile list is : {}".format(profile_list))
820 if resource_status:
rshri932105f2024-07-05 15:11:55 +0000821 profile_list.remove(profile_id)
rshri932105f2024-07-05 15:11:55 +0000822 db_cluster[profile_type] = profile_list
shahithya70a3fc92024-11-12 11:01:05 +0000823 db_cluster["current_operation"] = None
rshri932105f2024-07-05 15:11:55 +0000824 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
825
826 return
827
rshri948f7de2024-12-02 03:42:35 +0000828 async def register(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000829 self.logger.info("cluster register enter")
830
rshri948f7de2024-12-02 03:42:35 +0000831 # To get the cluster details
832 cluster_id = params["cluster_id"]
833 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
834 # content = {
835 # "cluster": db_cluster,
836 # }
837
838 # To get the operation params details
839 op_id = params["operation_id"]
840 op_params = self.get_operation_params(db_cluster, op_id)
841
842 # To initialize the operation states
843 self.initialize_operation(cluster_id, op_id)
844
845 # To copy the cluster content and decrypting the key to use in workflows
rshric3564942024-11-12 18:12:38 +0000846 db_cluster_copy = self.decrypting_key(db_cluster)
847
garciadeblasadb81e82024-11-08 01:11:46 +0100848 _, workflow_name = await self.odu.launch_workflow(
rshric3564942024-11-12 18:12:38 +0000849 "register_cluster", op_id, op_params, db_cluster_copy
garciadeblas96b94f52024-07-08 16:18:21 +0200850 )
rshri932105f2024-07-05 15:11:55 +0000851 self.logger.info("workflow_name is :{}".format(workflow_name))
852
garciadeblas96b94f52024-07-08 16:18:21 +0200853 workflow_status, workflow_msg = await self.odu.check_workflow_status(
854 workflow_name
855 )
rshri932105f2024-07-05 15:11:55 +0000856 self.logger.info(
857 "workflow_status is :{} and workflow_msg is :{}".format(
858 workflow_status, workflow_msg
859 )
860 )
861 if workflow_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200862 db_cluster["state"] = "CREATED"
863 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
rshri932105f2024-07-05 15:11:55 +0000864 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200865 db_cluster["state"] = "FAILED_CREATION"
866 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000867 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +0000868 db_cluster = self.update_operation_history(
869 db_cluster, op_id, workflow_status, None
870 )
garciadeblas96b94f52024-07-08 16:18:21 +0200871 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +0000872
garciadeblasdde3a312024-09-17 13:25:06 +0200873 # Clean items used in the workflow, no matter if the workflow succeeded
874 clean_status, clean_msg = await self.odu.clean_items_workflow(
rshric3564942024-11-12 18:12:38 +0000875 "register_cluster", op_id, op_params, db_cluster_copy
garciadeblasdde3a312024-09-17 13:25:06 +0200876 )
877 self.logger.info(
878 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
879 )
880
rshri932105f2024-07-05 15:11:55 +0000881 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +0100882 resource_status, resource_msg = await self.check_resource_status(
rshric3564942024-11-12 18:12:38 +0000883 "register_cluster", op_id, op_params, db_cluster_copy
rshri932105f2024-07-05 15:11:55 +0000884 )
885 self.logger.info(
886 "resource_status is :{} and resource_msg is :{}".format(
887 resource_status, resource_msg
888 )
889 )
890 if resource_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200891 db_cluster["resourceState"] = "READY"
rshri932105f2024-07-05 15:11:55 +0000892 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200893 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000894
garciadeblas96b94f52024-07-08 16:18:21 +0200895 db_cluster["operatingState"] = "IDLE"
896 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000897 db_cluster, op_id, workflow_status, resource_status
rshri932105f2024-07-05 15:11:55 +0000898 )
shahithya70a3fc92024-11-12 11:01:05 +0000899 db_cluster["current_operation"] = None
garciadeblas96b94f52024-07-08 16:18:21 +0200900 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri948f7de2024-12-02 03:42:35 +0000901
902 db_register = self.db.get_one("k8sclusters", {"name": db_cluster["name"]})
903 db_register["credentials"] = db_cluster["credentials"]
904 self.db.set_one("k8sclusters", {"_id": db_register["_id"]}, db_register)
905
906 if db_cluster["resourceState"] == "READY" and db_cluster["state"] == "CREATED":
907 # To call the lcm.py for registering the cluster in k8scluster lcm.
908 register = await self.regist.create(db_register, order_id)
909 self.logger.debug(f"Register is : {register}")
910 else:
911 db_register["_admin"]["operationalState"] = "ERROR"
912 self.db.set_one("k8sclusters", {"_id": db_register["_id"]}, db_register)
913
rshri932105f2024-07-05 15:11:55 +0000914 return
915
rshri948f7de2024-12-02 03:42:35 +0000916 async def deregister(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +0000917 self.logger.info("cluster deregister enter")
918
rshri948f7de2024-12-02 03:42:35 +0000919 # To get the cluster details
920 cluster_id = params["cluster_id"]
921 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
922 # content = {
923 # "cluster": db_cluster,
924 # }
925
926 # To get the operation params details
927 op_id = params["operation_id"]
928 op_params = self.get_operation_params(db_cluster, op_id)
929
930 # To initialize the operation states
931 self.initialize_operation(cluster_id, op_id)
932
933 # To copy the cluster content and decrypting the key to use in workflows
934 db_cluster_copy = self.decrypting_key(db_cluster)
rshri932105f2024-07-05 15:11:55 +0000935
garciadeblasadb81e82024-11-08 01:11:46 +0100936 _, workflow_name = await self.odu.launch_workflow(
rshri948f7de2024-12-02 03:42:35 +0000937 "deregister_cluster", op_id, op_params, db_cluster_copy
garciadeblas96b94f52024-07-08 16:18:21 +0200938 )
rshri932105f2024-07-05 15:11:55 +0000939 self.logger.info("workflow_name is :{}".format(workflow_name))
940
garciadeblas96b94f52024-07-08 16:18:21 +0200941 workflow_status, workflow_msg = await self.odu.check_workflow_status(
942 workflow_name
943 )
rshri932105f2024-07-05 15:11:55 +0000944 self.logger.info(
945 "workflow_status is :{} and workflow_msg is :{}".format(
946 workflow_status, workflow_msg
947 )
948 )
949 if workflow_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200950 db_cluster["state"] = "DELETED"
951 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
rshri932105f2024-07-05 15:11:55 +0000952 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200953 db_cluster["state"] = "FAILED_DELETION"
954 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000955 # has to call update_operation_history return content
yshahcb9075f2024-11-22 12:08:57 +0000956 db_cluster = self.update_operation_history(
957 db_cluster, op_id, workflow_status, None
958 )
garciadeblas96b94f52024-07-08 16:18:21 +0200959 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +0000960
garciadeblas91bb2c42024-11-12 11:17:12 +0100961 # Clean items used in the workflow or in the cluster, no matter if the workflow succeeded
962 clean_status, clean_msg = await self.odu.clean_items_workflow(
rshri948f7de2024-12-02 03:42:35 +0000963 "deregister_cluster", op_id, op_params, db_cluster_copy
garciadeblas91bb2c42024-11-12 11:17:12 +0100964 )
965 self.logger.info(
966 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
967 )
968
rshri932105f2024-07-05 15:11:55 +0000969 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +0100970 resource_status, resource_msg = await self.check_resource_status(
rshri948f7de2024-12-02 03:42:35 +0000971 "deregister_cluster", op_id, op_params, db_cluster_copy
rshri932105f2024-07-05 15:11:55 +0000972 )
973 self.logger.info(
974 "resource_status is :{} and resource_msg is :{}".format(
975 resource_status, resource_msg
976 )
977 )
978 if resource_status:
garciadeblas96b94f52024-07-08 16:18:21 +0200979 db_cluster["resourceState"] = "READY"
rshri932105f2024-07-05 15:11:55 +0000980 else:
garciadeblas96b94f52024-07-08 16:18:21 +0200981 db_cluster["resourceState"] = "ERROR"
rshri932105f2024-07-05 15:11:55 +0000982
garciadeblas96b94f52024-07-08 16:18:21 +0200983 db_cluster["operatingState"] = "IDLE"
984 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +0000985 db_cluster, op_id, workflow_status, resource_status
garciadeblas96b94f52024-07-08 16:18:21 +0200986 )
shahithya70a3fc92024-11-12 11:01:05 +0000987 db_cluster["current_operation"] = None
garciadeblas96b94f52024-07-08 16:18:21 +0200988 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri932105f2024-07-05 15:11:55 +0000989
garciadeblas96b94f52024-07-08 16:18:21 +0200990 # To delete it from DB
991 if db_cluster["state"] == "DELETED":
992 self.db.del_one("clusters", {"_id": db_cluster["_id"]})
rshri948f7de2024-12-02 03:42:35 +0000993
994 # To delete it from k8scluster collection
995 self.db.del_one("k8sclusters", {"name": db_cluster["name"]})
996
rshri932105f2024-07-05 15:11:55 +0000997 return
998
rshri948f7de2024-12-02 03:42:35 +0000999 async def get_creds(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001000 self.logger.info("Cluster get creds Enter")
rshri948f7de2024-12-02 03:42:35 +00001001 cluster_id = params["cluster_id"]
1002 op_id = params["operation_id"]
1003 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
garciadeblas96b94f52024-07-08 16:18:21 +02001004 result, cluster_creds = await self.odu.get_cluster_credentials(db_cluster)
1005 if result:
1006 db_cluster["credentials"] = cluster_creds
yshahd940c652024-10-17 06:11:12 +00001007 op_len = 0
1008 for operations in db_cluster["operationHistory"]:
1009 if operations["op_id"] == op_id:
1010 db_cluster["operationHistory"][op_len]["result"] = result
1011 db_cluster["operationHistory"][op_len]["endDate"] = time()
1012 op_len += 1
shahithya70a3fc92024-11-12 11:01:05 +00001013 db_cluster["current_operation"] = None
yshahd940c652024-10-17 06:11:12 +00001014 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
rshri948f7de2024-12-02 03:42:35 +00001015 self.logger.info("Cluster Get Creds Exit")
yshah771dea82024-07-05 15:11:49 +00001016 return
1017
rshri948f7de2024-12-02 03:42:35 +00001018 async def update(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001019 self.logger.info("Cluster update Enter")
rshri948f7de2024-12-02 03:42:35 +00001020 # To get the cluster details
1021 cluster_id = params["cluster_id"]
1022 db_cluster = self.db.get_one("clusters", {"_id": cluster_id})
1023
1024 # To get the operation params details
1025 op_id = params["operation_id"]
1026 op_params = self.get_operation_params(db_cluster, op_id)
yshah771dea82024-07-05 15:11:49 +00001027
rshric3564942024-11-12 18:12:38 +00001028 db_cluster_copy = self.decrypting_key(db_cluster)
1029
1030 # vim account details
1031 db_vim = self.db.get_one("vim_accounts", {"name": db_cluster["vim_account"]})
1032 db_cluster_copy["vim_account"] = db_vim
1033
garciadeblasadb81e82024-11-08 01:11:46 +01001034 _, workflow_name = await self.odu.launch_workflow(
rshric3564942024-11-12 18:12:38 +00001035 "update_cluster", op_id, op_params, db_cluster_copy
garciadeblas96b94f52024-07-08 16:18:21 +02001036 )
1037 workflow_status, workflow_msg = await self.odu.check_workflow_status(
1038 workflow_name
1039 )
1040 self.logger.info(
1041 "Workflow Status: {} Workflow Message: {}".format(
1042 workflow_status, workflow_msg
yshah771dea82024-07-05 15:11:49 +00001043 )
garciadeblas96b94f52024-07-08 16:18:21 +02001044 )
1045
1046 if workflow_status:
1047 db_cluster["resourceState"] = "IN_PROGRESS.GIT_SYNCED"
1048 else:
1049 db_cluster["resourceState"] = "ERROR"
1050
yshahcb9075f2024-11-22 12:08:57 +00001051 db_cluster = self.update_operation_history(
1052 db_cluster, op_id, workflow_status, None
1053 )
garciadeblas96b94f52024-07-08 16:18:21 +02001054 # self.logger.info("Db content: {}".format(db_content))
1055 # self.db.set_one(self.db_collection, {"_id": _id}, db_cluster)
1056 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
1057
garciadeblas28bff0f2024-09-16 12:53:07 +02001058 # Clean items used in the workflow, no matter if the workflow succeeded
1059 clean_status, clean_msg = await self.odu.clean_items_workflow(
rshric3564942024-11-12 18:12:38 +00001060 "update_cluster", op_id, op_params, db_cluster_copy
garciadeblas28bff0f2024-09-16 12:53:07 +02001061 )
1062 self.logger.info(
1063 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
1064 )
garciadeblas96b94f52024-07-08 16:18:21 +02001065 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +01001066 resource_status, resource_msg = await self.check_resource_status(
rshric3564942024-11-12 18:12:38 +00001067 "update_cluster", op_id, op_params, db_cluster_copy
garciadeblas96b94f52024-07-08 16:18:21 +02001068 )
1069 self.logger.info(
1070 "Resource Status: {} Resource Message: {}".format(
1071 resource_status, resource_msg
1072 )
1073 )
yshah771dea82024-07-05 15:11:49 +00001074
1075 if resource_status:
garciadeblas96b94f52024-07-08 16:18:21 +02001076 db_cluster["resourceState"] = "READY"
yshah771dea82024-07-05 15:11:49 +00001077 else:
garciadeblas96b94f52024-07-08 16:18:21 +02001078 db_cluster["resourceState"] = "ERROR"
yshah771dea82024-07-05 15:11:49 +00001079
yshah0defcd52024-11-18 07:41:35 +00001080 db_cluster = self.update_operation_history(
yshahcb9075f2024-11-22 12:08:57 +00001081 db_cluster, op_id, workflow_status, resource_status
yshah0defcd52024-11-18 07:41:35 +00001082 )
1083
garciadeblas96b94f52024-07-08 16:18:21 +02001084 db_cluster["operatingState"] = "IDLE"
garciadeblas96b94f52024-07-08 16:18:21 +02001085 # self.logger.info("db_cluster: {}".format(db_cluster))
1086 # TODO: verify enxtcondition
1087 # For the moment, if the workflow completed successfully, then we update the db accordingly.
1088 if workflow_status:
1089 if "k8s_version" in op_params:
1090 db_cluster["k8s_version"] = op_params["k8s_version"]
yshah0defcd52024-11-18 07:41:35 +00001091 if "node_count" in op_params:
garciadeblas96b94f52024-07-08 16:18:21 +02001092 db_cluster["node_count"] = op_params["node_count"]
yshah0defcd52024-11-18 07:41:35 +00001093 if "node_size" in op_params:
1094 db_cluster["node_count"] = op_params["node_size"]
garciadeblas96b94f52024-07-08 16:18:21 +02001095 # self.db.set_one(self.db_collection, {"_id": _id}, db_content)
shahithya70a3fc92024-11-12 11:01:05 +00001096 db_cluster["current_operation"] = None
garciadeblas96b94f52024-07-08 16:18:21 +02001097 self.db.set_one("clusters", {"_id": db_cluster["_id"]}, db_cluster)
yshah771dea82024-07-05 15:11:49 +00001098 return
1099
1100
garciadeblas72412282024-11-07 12:41:54 +01001101class CloudCredentialsLcm(GitOpsLcm):
yshah771dea82024-07-05 15:11:49 +00001102 db_collection = "vim_accounts"
1103
1104 def __init__(self, msg, lcm_tasks, config):
1105 """
1106 Init, Connect to database, filesystem storage, and messaging
1107 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1108 :return: None
1109 """
garciadeblas72412282024-11-07 12:41:54 +01001110 super().__init__(msg, lcm_tasks, config)
yshah771dea82024-07-05 15:11:49 +00001111
yshah564ec9c2024-11-29 07:33:32 +00001112 async def add(self, params, order_id):
yshah771dea82024-07-05 15:11:49 +00001113 self.logger.info("Cloud Credentials create")
yshah564ec9c2024-11-29 07:33:32 +00001114 vim_id = params["_id"]
1115 op_id = vim_id
1116 op_params = params
1117 db_content = self.db.get_one(self.db_collection, {"_id": vim_id})
1118 vim_config = db_content.get("config", {})
1119 self.db.encrypt_decrypt_fields(
1120 vim_config.get("credentials"),
1121 "decrypt",
1122 ["password", "secret"],
1123 schema_version=db_content["schema_version"],
1124 salt=vim_id,
1125 )
1126
garciadeblasadb81e82024-11-08 01:11:46 +01001127 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001128 "create_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001129 )
1130
1131 workflow_status, workflow_msg = await self.odu.check_workflow_status(
1132 workflow_name
1133 )
1134
1135 self.logger.info(
1136 "Workflow Status: {} Workflow Msg: {}".format(workflow_status, workflow_msg)
1137 )
1138
garciadeblas28bff0f2024-09-16 12:53:07 +02001139 # Clean items used in the workflow, no matter if the workflow succeeded
1140 clean_status, clean_msg = await self.odu.clean_items_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001141 "create_cloud_credentials", op_id, op_params, db_content
garciadeblas28bff0f2024-09-16 12:53:07 +02001142 )
1143 self.logger.info(
1144 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
1145 )
1146
yshah771dea82024-07-05 15:11:49 +00001147 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +01001148 resource_status, resource_msg = await self.check_resource_status(
yshah564ec9c2024-11-29 07:33:32 +00001149 "create_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001150 )
1151 self.logger.info(
1152 "Resource Status: {} Resource Message: {}".format(
1153 resource_status, resource_msg
1154 )
1155 )
garciadeblas15b8a302024-09-23 12:40:13 +02001156
yshah564ec9c2024-11-29 07:33:32 +00001157 db_content["_admin"]["operationalState"] = "ENABLED"
1158 for operation in db_content["_admin"]["operations"]:
garciadeblas15b8a302024-09-23 12:40:13 +02001159 if operation["lcmOperationType"] == "create":
1160 operation["operationState"] = "ENABLED"
yshah564ec9c2024-11-29 07:33:32 +00001161 self.logger.info("Content : {}".format(db_content))
1162 self.db.set_one("vim_accounts", {"_id": db_content["_id"]}, db_content)
yshah771dea82024-07-05 15:11:49 +00001163 return
1164
yshah564ec9c2024-11-29 07:33:32 +00001165 async def edit(self, params, order_id):
1166 self.logger.info("Cloud Credentials Update")
1167 vim_id = params["_id"]
1168 op_id = vim_id
1169 op_params = params
1170 db_content = self.db.get_one("vim_accounts", {"_id": vim_id})
1171 vim_config = db_content.get("config", {})
1172 self.db.encrypt_decrypt_fields(
1173 vim_config.get("credentials"),
1174 "decrypt",
1175 ["password", "secret"],
1176 schema_version=db_content["schema_version"],
1177 salt=vim_id,
1178 )
1179
garciadeblasadb81e82024-11-08 01:11:46 +01001180 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001181 "update_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001182 )
1183 workflow_status, workflow_msg = await self.odu.check_workflow_status(
1184 workflow_name
1185 )
1186 self.logger.info(
1187 "Workflow Status: {} Workflow Msg: {}".format(workflow_status, workflow_msg)
1188 )
1189
garciadeblas28bff0f2024-09-16 12:53:07 +02001190 # Clean items used in the workflow, no matter if the workflow succeeded
1191 clean_status, clean_msg = await self.odu.clean_items_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001192 "update_cloud_credentials", op_id, op_params, db_content
garciadeblas28bff0f2024-09-16 12:53:07 +02001193 )
1194 self.logger.info(
1195 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
1196 )
1197
yshah771dea82024-07-05 15:11:49 +00001198 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +01001199 resource_status, resource_msg = await self.check_resource_status(
yshah564ec9c2024-11-29 07:33:32 +00001200 "update_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001201 )
1202 self.logger.info(
1203 "Resource Status: {} Resource Message: {}".format(
1204 resource_status, resource_msg
1205 )
1206 )
1207 return
1208
yshah564ec9c2024-11-29 07:33:32 +00001209 async def remove(self, params, order_id):
1210 self.logger.info("Cloud Credentials remove")
1211 vim_id = params["_id"]
1212 op_id = vim_id
1213 op_params = params
1214 db_content = self.db.get_one("vim_accounts", {"_id": vim_id})
1215
garciadeblasadb81e82024-11-08 01:11:46 +01001216 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001217 "delete_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001218 )
1219 workflow_status, workflow_msg = await self.odu.check_workflow_status(
1220 workflow_name
1221 )
1222 self.logger.info(
1223 "Workflow Status: {} Workflow Msg: {}".format(workflow_status, workflow_msg)
1224 )
1225
1226 if workflow_status:
garciadeblas72412282024-11-07 12:41:54 +01001227 resource_status, resource_msg = await self.check_resource_status(
yshah564ec9c2024-11-29 07:33:32 +00001228 "delete_cloud_credentials", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001229 )
1230 self.logger.info(
1231 "Resource Status: {} Resource Message: {}".format(
1232 resource_status, resource_msg
1233 )
1234 )
yshah564ec9c2024-11-29 07:33:32 +00001235 self.db.del_one(self.db_collection, {"_id": db_content["_id"]})
yshah771dea82024-07-05 15:11:49 +00001236 return
1237
rshri932105f2024-07-05 15:11:55 +00001238
garciadeblas72412282024-11-07 12:41:54 +01001239class K8sAppLcm(GitOpsLcm):
rshri948f7de2024-12-02 03:42:35 +00001240 db_collection = "k8sapp"
1241
rshri932105f2024-07-05 15:11:55 +00001242 def __init__(self, msg, lcm_tasks, config):
1243 """
1244 Init, Connect to database, filesystem storage, and messaging
1245 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1246 :return: None
1247 """
garciadeblas72412282024-11-07 12:41:54 +01001248 super().__init__(msg, lcm_tasks, config)
rshri932105f2024-07-05 15:11:55 +00001249
rshri948f7de2024-12-02 03:42:35 +00001250 async def create(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001251 self.logger.info("App Create Enter")
1252
rshri948f7de2024-12-02 03:42:35 +00001253 op_id = params["operation_id"]
1254 profile_id = params["profile_id"]
1255
1256 # To initialize the operation states
1257 self.initialize_operation(profile_id, op_id)
1258
1259 content = self.db.get_one("k8sapp", {"_id": profile_id})
1260 content["profile_type"] = "applications"
1261 op_params = self.get_operation_params(content, op_id)
1262 self.db.set_one("k8sapp", {"_id": content["_id"]}, content)
1263
garciadeblasadb81e82024-11-08 01:11:46 +01001264 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001265 "create_profile", op_id, op_params, content
1266 )
rshri932105f2024-07-05 15:11:55 +00001267 self.logger.info("workflow_name is :{}".format(workflow_name))
1268
yshah564ec9c2024-11-29 07:33:32 +00001269 workflow_status = await self.check_workflow(op_id, workflow_name, content)
rshri932105f2024-07-05 15:11:55 +00001270
1271 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001272 resource_status, content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001273 "create_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001274 )
yshah564ec9c2024-11-29 07:33:32 +00001275 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1276 self.logger.info(f"App Create Exit with resource status: {resource_status}")
rshri932105f2024-07-05 15:11:55 +00001277 return
1278
rshri948f7de2024-12-02 03:42:35 +00001279 async def delete(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001280 self.logger.info("App delete Enter")
rshri932105f2024-07-05 15:11:55 +00001281
rshri948f7de2024-12-02 03:42:35 +00001282 op_id = params["operation_id"]
1283 profile_id = params["profile_id"]
1284
1285 # To initialize the operation states
1286 self.initialize_operation(profile_id, op_id)
1287
1288 content = self.db.get_one("k8sapp", {"_id": profile_id})
1289 op_params = self.get_operation_params(content, op_id)
1290
garciadeblasadb81e82024-11-08 01:11:46 +01001291 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001292 "delete_profile", op_id, op_params, content
1293 )
rshri932105f2024-07-05 15:11:55 +00001294 self.logger.info("workflow_name is :{}".format(workflow_name))
1295
yshah564ec9c2024-11-29 07:33:32 +00001296 workflow_status = await self.check_workflow(op_id, workflow_name, content)
rshri932105f2024-07-05 15:11:55 +00001297
1298 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001299 resource_status, content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001300 "delete_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001301 )
rshri932105f2024-07-05 15:11:55 +00001302
yshah564ec9c2024-11-29 07:33:32 +00001303 if resource_status:
1304 content["state"] = "DELETED"
1305 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1306 self.db.del_one(self.db_collection, {"_id": content["_id"]})
1307 self.logger.info(f"App Delete Exit with resource status: {resource_status}")
rshri932105f2024-07-05 15:11:55 +00001308 return
1309
1310
garciadeblas72412282024-11-07 12:41:54 +01001311class K8sResourceLcm(GitOpsLcm):
rshri948f7de2024-12-02 03:42:35 +00001312 db_collection = "k8sresource"
1313
rshri932105f2024-07-05 15:11:55 +00001314 def __init__(self, msg, lcm_tasks, config):
1315 """
1316 Init, Connect to database, filesystem storage, and messaging
1317 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1318 :return: None
1319 """
garciadeblas72412282024-11-07 12:41:54 +01001320 super().__init__(msg, lcm_tasks, config)
rshri932105f2024-07-05 15:11:55 +00001321
rshri948f7de2024-12-02 03:42:35 +00001322 async def create(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001323 self.logger.info("Resource Create Enter")
1324
rshri948f7de2024-12-02 03:42:35 +00001325 op_id = params["operation_id"]
1326 profile_id = params["profile_id"]
1327
1328 # To initialize the operation states
1329 self.initialize_operation(profile_id, op_id)
1330
1331 content = self.db.get_one("k8sresource", {"_id": profile_id})
1332 content["profile_type"] = "managed-resources"
1333 op_params = self.get_operation_params(content, op_id)
1334 self.db.set_one("k8sresource", {"_id": content["_id"]}, content)
1335
garciadeblasadb81e82024-11-08 01:11:46 +01001336 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001337 "create_profile", op_id, op_params, content
1338 )
rshri932105f2024-07-05 15:11:55 +00001339 self.logger.info("workflow_name is :{}".format(workflow_name))
1340
yshah564ec9c2024-11-29 07:33:32 +00001341 workflow_status = await self.check_workflow(op_id, workflow_name, content)
rshri932105f2024-07-05 15:11:55 +00001342
1343 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001344 resource_status, content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001345 "create_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001346 )
yshah564ec9c2024-11-29 07:33:32 +00001347 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1348 self.logger.info(
1349 f"Resource Create Exit with resource status: {resource_status}"
rshri932105f2024-07-05 15:11:55 +00001350 )
rshri932105f2024-07-05 15:11:55 +00001351 return
1352
rshri948f7de2024-12-02 03:42:35 +00001353 async def delete(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001354 self.logger.info("Resource delete Enter")
rshri948f7de2024-12-02 03:42:35 +00001355
1356 op_id = params["operation_id"]
1357 profile_id = params["profile_id"]
1358
1359 # To initialize the operation states
1360 self.initialize_operation(profile_id, op_id)
1361
1362 content = self.db.get_one("k8sresource", {"_id": profile_id})
1363 op_params = self.get_operation_params(content, op_id)
rshri932105f2024-07-05 15:11:55 +00001364
garciadeblasadb81e82024-11-08 01:11:46 +01001365 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001366 "delete_profile", op_id, op_params, content
1367 )
rshri932105f2024-07-05 15:11:55 +00001368 self.logger.info("workflow_name is :{}".format(workflow_name))
1369
yshah564ec9c2024-11-29 07:33:32 +00001370 workflow_status = await self.check_workflow(op_id, workflow_name, content)
rshri932105f2024-07-05 15:11:55 +00001371
1372 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001373 resource_status, content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001374 "delete_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001375 )
rshri932105f2024-07-05 15:11:55 +00001376
yshah564ec9c2024-11-29 07:33:32 +00001377 if resource_status:
1378 content["state"] = "DELETED"
1379 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1380 self.db.del_one(self.db_collection, {"_id": content["_id"]})
1381 self.logger.info(
1382 f"Resource Delete Exit with resource status: {resource_status}"
garciadeblas96b94f52024-07-08 16:18:21 +02001383 )
rshri932105f2024-07-05 15:11:55 +00001384 return
1385
1386
garciadeblas72412282024-11-07 12:41:54 +01001387class K8sInfraControllerLcm(GitOpsLcm):
rshri948f7de2024-12-02 03:42:35 +00001388 db_collection = "k8sinfra_controller"
1389
rshri932105f2024-07-05 15:11:55 +00001390 def __init__(self, msg, lcm_tasks, config):
1391 """
1392 Init, Connect to database, filesystem storage, and messaging
1393 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1394 :return: None
1395 """
garciadeblas72412282024-11-07 12:41:54 +01001396 super().__init__(msg, lcm_tasks, config)
rshri932105f2024-07-05 15:11:55 +00001397
rshri948f7de2024-12-02 03:42:35 +00001398 async def create(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001399 self.logger.info("Infra controller Create Enter")
1400
rshri948f7de2024-12-02 03:42:35 +00001401 op_id = params["operation_id"]
1402 profile_id = params["profile_id"]
1403
1404 # To initialize the operation states
1405 self.initialize_operation(profile_id, op_id)
1406
1407 content = self.db.get_one("k8sinfra_controller", {"_id": profile_id})
1408 content["profile_type"] = "infra-controllers"
1409 op_params = self.get_operation_params(content, op_id)
1410 self.db.set_one("k8sinfra_controller", {"_id": content["_id"]}, content)
1411
garciadeblasadb81e82024-11-08 01:11:46 +01001412 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001413 "create_profile", op_id, op_params, content
1414 )
rshri932105f2024-07-05 15:11:55 +00001415 self.logger.info("workflow_name is :{}".format(workflow_name))
1416
yshah564ec9c2024-11-29 07:33:32 +00001417 workflow_status = await self.check_workflow(op_id, workflow_name, content)
rshri932105f2024-07-05 15:11:55 +00001418
1419 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001420 resource_status, content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001421 "create_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001422 )
yshah564ec9c2024-11-29 07:33:32 +00001423 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1424 self.logger.info(
1425 f"Infra Controller Create Exit with resource status: {resource_status}"
rshri932105f2024-07-05 15:11:55 +00001426 )
rshri932105f2024-07-05 15:11:55 +00001427 return
1428
rshri948f7de2024-12-02 03:42:35 +00001429 async def delete(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001430 self.logger.info("Infra controller delete Enter")
rshri932105f2024-07-05 15:11:55 +00001431
rshri948f7de2024-12-02 03:42:35 +00001432 op_id = params["operation_id"]
1433 profile_id = params["profile_id"]
1434
1435 # To initialize the operation states
1436 self.initialize_operation(profile_id, op_id)
1437
1438 content = self.db.get_one("k8sinfra_controller", {"_id": profile_id})
1439 op_params = self.get_operation_params(content, op_id)
1440
garciadeblasadb81e82024-11-08 01:11:46 +01001441 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001442 "delete_profile", op_id, op_params, content
1443 )
rshri932105f2024-07-05 15:11:55 +00001444 self.logger.info("workflow_name is :{}".format(workflow_name))
1445
yshah564ec9c2024-11-29 07:33:32 +00001446 workflow_status = await self.check_workflow(op_id, workflow_name, content)
rshri932105f2024-07-05 15:11:55 +00001447
1448 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001449 resource_status, content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001450 "delete_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001451 )
rshri932105f2024-07-05 15:11:55 +00001452
yshah564ec9c2024-11-29 07:33:32 +00001453 if resource_status:
1454 content["state"] = "DELETED"
1455 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1456 self.db.del_one(self.db_collection, {"_id": content["_id"]})
1457 self.logger.info(
1458 f"Infra Controller Delete Exit with resource status: {resource_status}"
garciadeblas96b94f52024-07-08 16:18:21 +02001459 )
rshri932105f2024-07-05 15:11:55 +00001460 return
1461
1462
garciadeblas72412282024-11-07 12:41:54 +01001463class K8sInfraConfigLcm(GitOpsLcm):
rshri948f7de2024-12-02 03:42:35 +00001464 db_collection = "k8sinfra_config"
1465
rshri932105f2024-07-05 15:11:55 +00001466 def __init__(self, msg, lcm_tasks, config):
1467 """
1468 Init, Connect to database, filesystem storage, and messaging
1469 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1470 :return: None
1471 """
garciadeblas72412282024-11-07 12:41:54 +01001472 super().__init__(msg, lcm_tasks, config)
rshri932105f2024-07-05 15:11:55 +00001473
rshri948f7de2024-12-02 03:42:35 +00001474 async def create(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001475 self.logger.info("Infra config Create Enter")
1476
rshri948f7de2024-12-02 03:42:35 +00001477 op_id = params["operation_id"]
1478 profile_id = params["profile_id"]
1479
1480 # To initialize the operation states
1481 self.initialize_operation(profile_id, op_id)
1482
1483 content = self.db.get_one("k8sinfra_config", {"_id": profile_id})
1484 content["profile_type"] = "infra-configs"
1485 op_params = self.get_operation_params(content, op_id)
1486 self.db.set_one("k8sinfra_config", {"_id": content["_id"]}, content)
1487
garciadeblasadb81e82024-11-08 01:11:46 +01001488 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001489 "create_profile", op_id, op_params, content
1490 )
rshri932105f2024-07-05 15:11:55 +00001491 self.logger.info("workflow_name is :{}".format(workflow_name))
1492
yshah564ec9c2024-11-29 07:33:32 +00001493 workflow_status = await self.check_workflow(op_id, workflow_name, content)
rshri932105f2024-07-05 15:11:55 +00001494
1495 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001496 resource_status, content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001497 "create_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001498 )
yshah564ec9c2024-11-29 07:33:32 +00001499 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1500 self.logger.info(
1501 f"Infra Config Create Exit with resource status: {resource_status}"
rshri932105f2024-07-05 15:11:55 +00001502 )
rshri932105f2024-07-05 15:11:55 +00001503 return
1504
rshri948f7de2024-12-02 03:42:35 +00001505 async def delete(self, params, order_id):
rshri932105f2024-07-05 15:11:55 +00001506 self.logger.info("Infra config delete Enter")
1507
rshri948f7de2024-12-02 03:42:35 +00001508 op_id = params["operation_id"]
1509 profile_id = params["profile_id"]
1510
1511 # To initialize the operation states
1512 self.initialize_operation(profile_id, op_id)
1513
1514 content = self.db.get_one("k8sinfra_config", {"_id": profile_id})
1515 op_params = self.get_operation_params(content, op_id)
1516
garciadeblasadb81e82024-11-08 01:11:46 +01001517 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001518 "delete_profile", op_id, op_params, content
1519 )
rshri932105f2024-07-05 15:11:55 +00001520 self.logger.info("workflow_name is :{}".format(workflow_name))
rshri932105f2024-07-05 15:11:55 +00001521
yshah564ec9c2024-11-29 07:33:32 +00001522 workflow_status = await self.check_workflow(op_id, workflow_name, content)
1523
rshri932105f2024-07-05 15:11:55 +00001524 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001525 resource_status, content = await self.check_resource(
1526 "delete_profile", op_id, op_params, content
rshri932105f2024-07-05 15:11:55 +00001527 )
yshah564ec9c2024-11-29 07:33:32 +00001528
rshri932105f2024-07-05 15:11:55 +00001529 if resource_status:
yshah564ec9c2024-11-29 07:33:32 +00001530 content["state"] = "DELETED"
1531 self.db.set_one(self.db_collection, {"_id": content["_id"]}, content)
1532 self.db.del_one(self.db_collection, {"_id": content["_id"]})
1533 self.logger.info(
1534 f"Infra Config Delete Exit with resource status: {resource_status}"
garciadeblas96b94f52024-07-08 16:18:21 +02001535 )
rshri932105f2024-07-05 15:11:55 +00001536
rshri932105f2024-07-05 15:11:55 +00001537 return
yshah771dea82024-07-05 15:11:49 +00001538
1539
garciadeblas72412282024-11-07 12:41:54 +01001540class OkaLcm(GitOpsLcm):
yshah771dea82024-07-05 15:11:49 +00001541 db_collection = "okas"
1542
1543 def __init__(self, msg, lcm_tasks, config):
1544 """
1545 Init, Connect to database, filesystem storage, and messaging
1546 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1547 :return: None
1548 """
garciadeblas72412282024-11-07 12:41:54 +01001549 super().__init__(msg, lcm_tasks, config)
yshah771dea82024-07-05 15:11:49 +00001550
yshah564ec9c2024-11-29 07:33:32 +00001551 async def create(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001552 self.logger.info("OKA Create Enter")
yshah564ec9c2024-11-29 07:33:32 +00001553 op_id = params["operation_id"]
1554 oka_id = params["oka_id"]
1555 self.initialize_operation(oka_id, op_id)
1556 db_content = self.db.get_one(self.db_collection, {"_id": oka_id})
1557 op_params = self.get_operation_params(db_content, op_id)
yshah771dea82024-07-05 15:11:49 +00001558
garciadeblasadb81e82024-11-08 01:11:46 +01001559 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001560 "create_oka", op_id, op_params, db_content
1561 )
yshah564ec9c2024-11-29 07:33:32 +00001562
1563 workflow_status = await self.check_workflow(op_id, workflow_name, db_content)
yshah771dea82024-07-05 15:11:49 +00001564
1565 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001566 resource_status, db_content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001567 "create_oka", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001568 )
garciadeblas96b94f52024-07-08 16:18:21 +02001569 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
yshah564ec9c2024-11-29 07:33:32 +00001570 self.logger.info(f"OKA Create Exit with resource status: {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001571 return
1572
yshah564ec9c2024-11-29 07:33:32 +00001573 async def edit(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001574 self.logger.info("OKA Edit Enter")
yshah564ec9c2024-11-29 07:33:32 +00001575 op_id = params["operation_id"]
1576 oka_id = params["oka_id"]
1577 self.initialize_operation(oka_id, op_id)
1578 db_content = self.db.get_one(self.db_collection, {"_id": oka_id})
1579 op_params = self.get_operation_params(db_content, op_id)
yshah771dea82024-07-05 15:11:49 +00001580
garciadeblasadb81e82024-11-08 01:11:46 +01001581 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001582 "update_oka", op_id, op_params, db_content
garciadeblas96b94f52024-07-08 16:18:21 +02001583 )
yshah564ec9c2024-11-29 07:33:32 +00001584 workflow_status = await self.check_workflow(op_id, workflow_name, db_content)
yshah771dea82024-07-05 15:11:49 +00001585
1586 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001587 resource_status, db_content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001588 "update_oka", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001589 )
garciadeblas96b94f52024-07-08 16:18:21 +02001590 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
yshah564ec9c2024-11-29 07:33:32 +00001591 self.logger.info(f"OKA Update Exit with resource status: {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001592 return
1593
yshah564ec9c2024-11-29 07:33:32 +00001594 async def delete(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001595 self.logger.info("OKA delete Enter")
yshah564ec9c2024-11-29 07:33:32 +00001596 op_id = params["operation_id"]
1597 oka_id = params["oka_id"]
1598 self.initialize_operation(oka_id, op_id)
1599 db_content = self.db.get_one(self.db_collection, {"_id": oka_id})
1600 op_params = self.get_operation_params(db_content, op_id)
yshah771dea82024-07-05 15:11:49 +00001601
garciadeblasadb81e82024-11-08 01:11:46 +01001602 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001603 "delete_oka", op_id, op_params, db_content
garciadeblas96b94f52024-07-08 16:18:21 +02001604 )
yshah564ec9c2024-11-29 07:33:32 +00001605 workflow_status = await self.check_workflow(op_id, workflow_name, db_content)
yshah771dea82024-07-05 15:11:49 +00001606
1607 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001608 resource_status, db_content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001609 "delete_oka", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001610 )
yshah771dea82024-07-05 15:11:49 +00001611
yshah564ec9c2024-11-29 07:33:32 +00001612 if resource_status:
1613 db_content["state"] == "DELETED"
1614 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
garciadeblas96b94f52024-07-08 16:18:21 +02001615 self.db.del_one(self.db_collection, {"_id": db_content["_id"]})
yshah564ec9c2024-11-29 07:33:32 +00001616 self.logger.info(f"OKA Delete Exit with resource status: {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001617 return
1618
1619
garciadeblas72412282024-11-07 12:41:54 +01001620class KsuLcm(GitOpsLcm):
yshah771dea82024-07-05 15:11:49 +00001621 db_collection = "ksus"
yshah564ec9c2024-11-29 07:33:32 +00001622 profile_collection_mapping = {
1623 "infra_controller_profiles": "k8sinfra_controller",
1624 "infra_config_profiles": "k8sinfra_config",
1625 "resource_profiles": "k8sresource",
1626 "app_profiles": "k8sapp",
1627 }
yshah771dea82024-07-05 15:11:49 +00001628
1629 def __init__(self, msg, lcm_tasks, config):
1630 """
1631 Init, Connect to database, filesystem storage, and messaging
1632 :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
1633 :return: None
1634 """
garciadeblas72412282024-11-07 12:41:54 +01001635 super().__init__(msg, lcm_tasks, config)
yshah771dea82024-07-05 15:11:49 +00001636
yshah564ec9c2024-11-29 07:33:32 +00001637 async def create(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001638 self.logger.info("ksu Create Enter")
yshah564ec9c2024-11-29 07:33:32 +00001639 db_content = []
1640 op_params = []
1641 op_id = params["operation_id"]
1642 for ksu_id in params["ksus_list"]:
1643 self.logger.info("Ksu ID: {}".format(ksu_id))
1644 self.initialize_operation(ksu_id, op_id)
1645 db_ksu = self.db.get_one(self.db_collection, {"_id": ksu_id})
1646 self.logger.info("Db KSU: {}".format(db_ksu))
1647 db_content.append(db_ksu)
1648 ksu_params = {}
1649 ksu_params = self.get_operation_params(db_ksu, op_id)
1650 self.logger.info("Operation Params: {}".format(ksu_params))
1651 # Update ksu_params["profile"] with profile name and age-pubkey
1652 profile_type = ksu_params["profile"]["profile_type"]
1653 profile_id = ksu_params["profile"]["_id"]
1654 profile_collection = self.profile_collection_mapping[profile_type]
1655 db_profile = self.db.get_one(profile_collection, {"_id": profile_id})
1656 ksu_params["profile"]["name"] = db_profile["name"]
1657 ksu_params["profile"]["age_pubkey"] = db_profile.get("age_pubkey", "")
1658 # Update ksu_params["oka"] with sw_catalog_path (when missing)
1659 for oka in ksu_params["oka"]:
1660 if "sw_catalog_path" not in oka:
1661 oka_id = oka["_id"]
1662 db_oka = self.db.get_one("okas", {"_id": oka_id})
1663 oka["sw_catalog_path"] = f"infra-controllers/{db_oka['git_name']}"
1664 op_params.append(ksu_params)
yshah771dea82024-07-05 15:11:49 +00001665
garciadeblasadb81e82024-11-08 01:11:46 +01001666 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001667 "create_ksus", op_id, op_params, db_content
garciadeblas96b94f52024-07-08 16:18:21 +02001668 )
yshah564ec9c2024-11-29 07:33:32 +00001669 for db_ksu, ksu_params in zip(db_content, op_params):
1670 workflow_status = await self.check_workflow(op_id, workflow_name, db_ksu)
yshah771dea82024-07-05 15:11:49 +00001671
garciadeblas96b94f52024-07-08 16:18:21 +02001672 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001673 resource_status, db_ksu = await self.check_resource(
1674 "create_ksus", op_id, ksu_params, db_ksu
1675 )
yshah771dea82024-07-05 15:11:49 +00001676
garciadeblas96b94f52024-07-08 16:18:21 +02001677 self.db.set_one(self.db_collection, {"_id": db_ksu["_id"]}, db_ksu)
1678
garciadeblasd8429852024-10-17 15:30:30 +02001679 # Clean items used in the workflow, no matter if the workflow succeeded
1680 clean_status, clean_msg = await self.odu.clean_items_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001681 "create_ksus", op_id, op_params, db_content
garciadeblasd8429852024-10-17 15:30:30 +02001682 )
1683 self.logger.info(
1684 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
1685 )
yshah564ec9c2024-11-29 07:33:32 +00001686 self.logger.info(f"KSU Create EXIT with Resource Status {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001687 return
1688
yshah564ec9c2024-11-29 07:33:32 +00001689 async def edit(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001690 self.logger.info("ksu edit Enter")
yshah564ec9c2024-11-29 07:33:32 +00001691 db_content = []
1692 op_params = []
1693 op_id = params["operation_id"]
1694 for ksu_id in params["ksus_list"]:
1695 self.initialize_operation(ksu_id, op_id)
1696 db_ksu = self.db.get_one("ksus", {"_id": ksu_id})
1697 db_content.append(db_ksu)
1698 ksu_params = {}
1699 ksu_params = self.get_operation_params(db_ksu, op_id)
1700 # Update ksu_params["profile"] with profile name and age-pubkey
1701 profile_type = ksu_params["profile"]["profile_type"]
1702 profile_id = ksu_params["profile"]["_id"]
1703 profile_collection = self.profile_collection_mapping[profile_type]
1704 db_profile = self.db.get_one(profile_collection, {"_id": profile_id})
1705 ksu_params["profile"]["name"] = db_profile["name"]
1706 ksu_params["profile"]["age_pubkey"] = db_profile.get("age_pubkey", "")
1707 # Update ksu_params["oka"] with sw_catalog_path (when missing)
1708 for oka in ksu_params["oka"]:
1709 if "sw_catalog_path" not in oka:
1710 oka_id = oka["_id"]
1711 db_oka = self.db.get_one("okas", {"_id": oka_id})
1712 oka["sw_catalog_path"] = f"infra-controllers/{db_oka['git_name']}"
1713 op_params.append(ksu_params)
yshah771dea82024-07-05 15:11:49 +00001714
garciadeblasadb81e82024-11-08 01:11:46 +01001715 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001716 "update_ksus", op_id, op_params, db_content
garciadeblas96b94f52024-07-08 16:18:21 +02001717 )
yshah771dea82024-07-05 15:11:49 +00001718
yshah564ec9c2024-11-29 07:33:32 +00001719 for db_ksu, ksu_params in zip(db_content, op_params):
1720 workflow_status = await self.check_workflow(op_id, workflow_name, db_ksu)
1721
garciadeblas96b94f52024-07-08 16:18:21 +02001722 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001723 resource_status, db_ksu = await self.check_resource(
1724 "update_ksus", op_id, ksu_params, db_ksu
garciadeblas96b94f52024-07-08 16:18:21 +02001725 )
garciadeblas96b94f52024-07-08 16:18:21 +02001726 db_ksu["name"] = ksu_params["name"]
1727 db_ksu["description"] = ksu_params["description"]
1728 db_ksu["profile"]["profile_type"] = ksu_params["profile"][
1729 "profile_type"
1730 ]
1731 db_ksu["profile"]["_id"] = ksu_params["profile"]["_id"]
1732 db_ksu["oka"] = ksu_params["oka"]
1733 self.db.set_one(self.db_collection, {"_id": db_ksu["_id"]}, db_ksu)
1734
yshah564ec9c2024-11-29 07:33:32 +00001735 # Clean items used in the workflow, no matter if the workflow succeeded
1736 clean_status, clean_msg = await self.odu.clean_items_workflow(
1737 "create_ksus", op_id, op_params, db_content
garciadeblas96b94f52024-07-08 16:18:21 +02001738 )
1739 self.logger.info(
yshah564ec9c2024-11-29 07:33:32 +00001740 f"clean_status is :{clean_status} and clean_msg is :{clean_msg}"
garciadeblas96b94f52024-07-08 16:18:21 +02001741 )
yshah564ec9c2024-11-29 07:33:32 +00001742 self.logger.info(f"KSU Update EXIT with Resource Status {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001743 return
1744
yshah564ec9c2024-11-29 07:33:32 +00001745 async def delete(self, params, order_id):
1746 self.logger.info("ksu delete Enter")
1747 db_content = []
1748 op_params = []
1749 op_id = params["operation_id"]
1750 for ksu_id in params["ksus_list"]:
1751 self.initialize_operation(ksu_id, op_id)
1752 db_ksu = self.db.get_one("ksus", {"_id": ksu_id})
1753 db_content.append(db_ksu)
1754 ksu_params = {}
1755 ksu_params["profile"] = {}
1756 ksu_params["profile"]["profile_type"] = db_ksu["profile"]["profile_type"]
1757 ksu_params["profile"]["_id"] = db_ksu["profile"]["_id"]
1758 # Update ksu_params["profile"] with profile name and age-pubkey
1759 profile_type = ksu_params["profile"]["profile_type"]
1760 profile_id = ksu_params["profile"]["_id"]
1761 profile_collection = self.profile_collection_mapping[profile_type]
1762 db_profile = self.db.get_one(profile_collection, {"_id": profile_id})
1763 ksu_params["profile"]["name"] = db_profile["name"]
1764 ksu_params["profile"]["age_pubkey"] = db_profile.get("age_pubkey", "")
1765 op_params.append(ksu_params)
yshah771dea82024-07-05 15:11:49 +00001766
garciadeblasadb81e82024-11-08 01:11:46 +01001767 _, workflow_name = await self.odu.launch_workflow(
yshah564ec9c2024-11-29 07:33:32 +00001768 "delete_ksus", op_id, op_params, db_content
1769 )
1770
1771 for db_ksu, ksu_params in zip(db_content, op_params):
1772 workflow_status = await self.check_workflow(op_id, workflow_name, db_ksu)
1773
1774 if workflow_status:
1775 resource_status, db_ksu = await self.check_resource(
1776 "delete_ksus", op_id, ksu_params, db_ksu
1777 )
1778
1779 if resource_status:
1780 db_ksu["state"] == "DELETED"
1781 self.db.set_one(self.db_collection, {"_id": db_ksu["_id"]}, db_ksu)
1782 self.db.del_one(self.db_collection, {"_id": db_ksu["_id"]})
1783
1784 self.logger.info(f"KSU Delete Exit with resource status: {resource_status}")
1785 return
1786
1787 async def clone(self, params, order_id):
1788 self.logger.info("ksu clone Enter")
1789 op_id = params["operation_id"]
1790 ksus_id = params["ksus_list"][0]
1791 self.initialize_operation(ksus_id, op_id)
1792 db_content = self.db.get_one(self.db_collection, {"_id": ksus_id})
1793 op_params = self.get_operation_params(db_content, op_id)
1794 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001795 "clone_ksus", op_id, op_params, db_content
1796 )
yshah564ec9c2024-11-29 07:33:32 +00001797
1798 workflow_status = await self.check_workflow(op_id, workflow_name, db_content)
yshah771dea82024-07-05 15:11:49 +00001799
1800 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001801 resource_status, db_content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001802 "clone_ksus", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001803 )
garciadeblas96b94f52024-07-08 16:18:21 +02001804 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
yshah564ec9c2024-11-29 07:33:32 +00001805
1806 self.logger.info(f"KSU Clone Exit with resource status: {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001807 return
1808
yshah564ec9c2024-11-29 07:33:32 +00001809 async def move(self, params, order_id):
garciadeblas96b94f52024-07-08 16:18:21 +02001810 self.logger.info("ksu move Enter")
yshah564ec9c2024-11-29 07:33:32 +00001811 op_id = params["operation_id"]
1812 ksus_id = params["ksus_list"][0]
1813 self.initialize_operation(ksus_id, op_id)
1814 db_content = self.db.get_one(self.db_collection, {"_id": ksus_id})
1815 op_params = self.get_operation_params(db_content, op_id)
garciadeblasadb81e82024-11-08 01:11:46 +01001816 _, workflow_name = await self.odu.launch_workflow(
garciadeblas96b94f52024-07-08 16:18:21 +02001817 "move_ksus", op_id, op_params, db_content
1818 )
yshah564ec9c2024-11-29 07:33:32 +00001819
1820 workflow_status = await self.check_workflow(op_id, workflow_name, db_content)
yshah771dea82024-07-05 15:11:49 +00001821
1822 if workflow_status:
yshah564ec9c2024-11-29 07:33:32 +00001823 resource_status, db_content = await self.check_resource(
garciadeblas96b94f52024-07-08 16:18:21 +02001824 "move_ksus", op_id, op_params, db_content
yshah771dea82024-07-05 15:11:49 +00001825 )
garciadeblas96b94f52024-07-08 16:18:21 +02001826 self.db.set_one(self.db_collection, {"_id": db_content["_id"]}, db_content)
yshah564ec9c2024-11-29 07:33:32 +00001827
1828 self.logger.info(f"KSU Move Exit with resource status: {resource_status}")
yshah771dea82024-07-05 15:11:49 +00001829 return