blob: cf09e8fa5b44dcd698ed3fbddb9ee8544deaf71c [file] [log] [blame]
garciadeblas96b94f52024-07-08 16:18:21 +02001#######################################################################################
2# Copyright ETSI Contributors and Others.
3#
4# Licensed under the Apache License, Version 2.0 (the "License");
5# you may not use this file except in compliance with the License.
6# You may obtain a copy of the License at
7#
8# http://www.apache.org/licenses/LICENSE-2.0
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS,
12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
13# implied.
14# See the License for the specific language governing permissions and
15# limitations under the License.
16#######################################################################################
17
18
19import asyncio
garciadeblas40811852024-10-22 11:35:17 +020020from math import ceil
garciadeblas7cf480d2025-01-27 16:53:45 +010021from jsonpath_ng.ext import parse
garciadeblas96b94f52024-07-08 16:18:21 +020022
23
24async def check_workflow_status(self, workflow_name):
garciadeblas40811852024-10-22 11:35:17 +020025 self.logger.info(f"check_workflow_status Enter: {workflow_name}")
garciadeblasadb81e82024-11-08 01:11:46 +010026 if not workflow_name:
27 return False, "Workflow was not launched"
garciadeblasb33813b2024-10-22 11:56:37 +020028 try:
29 return await self.readiness_loop(
30 item="workflow",
garciadeblas96b94f52024-07-08 16:18:21 +020031 name=workflow_name,
garciadeblasb33813b2024-10-22 11:56:37 +020032 namespace="osm-workflows",
garciadeblas7cf480d2025-01-27 16:53:45 +010033 condition={
34 "jsonpath_filter": "status.conditions[?(@.type=='Completed')].status",
35 "value": "True",
36 },
garciadeblasad6d1ba2025-01-22 16:02:18 +010037 deleted=False,
garciadeblasb33813b2024-10-22 11:56:37 +020038 timeout=300,
garciadeblas96b94f52024-07-08 16:18:21 +020039 )
garciadeblasb33813b2024-10-22 11:56:37 +020040 except Exception as e:
41 return False, f"Unexpected exception: {e}"
garciadeblas40811852024-10-22 11:35:17 +020042
43
garciadeblasad6d1ba2025-01-22 16:02:18 +010044async def readiness_loop(
garciadeblas7cf480d2025-01-27 16:53:45 +010045 self, item, name, namespace, condition, deleted, timeout, kubectl=None
garciadeblasad6d1ba2025-01-22 16:02:18 +010046):
47 if kubectl is None:
48 kubectl = self._kubectl
garciadeblas40811852024-10-22 11:35:17 +020049 self.logger.info("readiness_loop Enter")
50 self.logger.info(
garciadeblas7cf480d2025-01-27 16:53:45 +010051 f"{item} {name}. Namespace: '{namespace}'. Condition: {condition}. Deleted: {deleted}. Timeout: {timeout}"
garciadeblas40811852024-10-22 11:35:17 +020052 )
53 item_api_map = {
garciadeblasb33813b2024-10-22 11:56:37 +020054 "workflow": {
55 "api_group": "argoproj.io",
56 "api_plural": "workflows",
57 "api_version": "v1alpha1",
58 },
garciadeblas40811852024-10-22 11:35:17 +020059 "kustomization": {
60 "api_group": "kustomize.toolkit.fluxcd.io",
61 "api_plural": "kustomizations",
62 "api_version": "v1",
63 },
64 "cluster_aws": {
65 "api_group": "eks.aws.upbound.io",
66 "api_plural": "clusters",
67 "api_version": "v1beta1",
68 },
69 "cluster_azure": {
70 "api_group": "containerservice.azure.upbound.io",
71 "api_plural": "kubernetesclusters",
72 "api_version": "v1beta1",
73 },
74 "cluster_gcp": {
75 "api_group": "container.gcp.upbound.io",
76 "api_plural": "clusters",
77 "api_version": "v1beta2",
78 },
garciadeblasceaa19d2024-10-24 12:52:11 +020079 "nodepool_aws": {
80 "api_group": "eks.aws.upbound.io",
81 "api_plural": "nodegroups",
82 "api_version": "v1beta1",
83 },
84 "nodepool_gcp": {
85 "api_group": "container.gcp.upbound.io",
86 "api_plural": "nodepools",
87 "api_version": "v1beta2",
88 },
garciadeblas40811852024-10-22 11:35:17 +020089 }
90 counter = 1
91 retry_time = self._odu_checkloop_retry_time
92 max_iterations = ceil(timeout / retry_time)
garciadeblasb33813b2024-10-22 11:56:37 +020093 api_group = item_api_map[item]["api_group"]
94 api_plural = item_api_map[item]["api_plural"]
95 api_version = item_api_map[item]["api_version"]
garciadeblas40811852024-10-22 11:35:17 +020096
97 while counter <= max_iterations:
garciadeblasad6d1ba2025-01-22 16:02:18 +010098 try:
99 self.logger.info(f"Iteration {counter}/{max_iterations}")
100 generic_object = await kubectl.get_generic_object(
101 api_group=api_group,
102 api_plural=api_plural,
103 api_version=api_version,
104 namespace=namespace,
105 name=name,
garciadeblasceaa19d2024-10-24 12:52:11 +0200106 )
garciadeblasad6d1ba2025-01-22 16:02:18 +0100107 if deleted:
108 if generic_object:
109 self.logger.info(
110 f"Found {api_plural}. Name: {name}. Namespace: '{namespace}'. API: {api_group}/{api_version}"
111 )
112 else:
113 self.logger.info(
114 f"{item} {name} deleted after {counter} iterations (aprox {counter*retry_time} seconds)"
115 )
116 return True, "COMPLETED"
117 else:
garciadeblas7cf480d2025-01-27 16:53:45 +0100118 if not condition:
garciadeblasad6d1ba2025-01-22 16:02:18 +0100119 return True, "Nothing to check"
120 if generic_object:
121 # self.logger.debug(f"{yaml.safe_dump(generic_object)}")
122 conditions = generic_object.get("status", {}).get("conditions", [])
123 self.logger.info(f"{item} status conditions: {conditions}")
124 else:
125 self.logger.info(
126 f"Could not find {api_plural}. Name: {name}. Namespace: '{namespace}'. API: {api_group}/{api_version}"
127 )
128 conditions = []
garciadeblas7cf480d2025-01-27 16:53:45 +0100129 jsonpath_expr = parse(condition["jsonpath_filter"])
130 match = jsonpath_expr.find(generic_object)
131 if match:
132 value = match[0].value
133 condition_function = condition.get("function", lambda x, y: x == y)
134 if condition_function(condition["value"], value):
135 self.logger.info(
136 f"{item} {name} met the condition {condition} in {counter} iterations (aprox {counter*retry_time} seconds)"
137 )
138 return True, "COMPLETED"
garciadeblasad6d1ba2025-01-22 16:02:18 +0100139 except Exception as e:
140 self.logger.error(f"Exception: {e}")
garciadeblas40811852024-10-22 11:35:17 +0200141 await asyncio.sleep(retry_time)
142 counter += 1
143 return (
144 False,
145 "{item} {name} was not ready after {max_iterations} iterations (aprox {timeout} seconds)",
146 )