Fix validation error for ImagePullPolicy in charms
[osm/devops.git] / installers / charm / kafka-exporter / src / charm.py
1 #!/usr/bin/env python3
2 # Copyright 2021 Canonical Ltd.
3 #
4 # Licensed under the Apache License, Version 2.0 (the "License"); you may
5 # not use this file except in compliance with the License. You may obtain
6 # a copy of the License at
7 #
8 # http://www.apache.org/licenses/LICENSE-2.0
9 #
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 # License for the specific language governing permissions and limitations
14 # under the License.
15 #
16 # For those usages not covered by the Apache License, Version 2.0 please
17 # contact: legal@canonical.com
18 #
19 # To get in touch with the maintainers, please contact:
20 # osm-charmers@lists.launchpad.net
21 ##
22
23 # pylint: disable=E0213
24
25 from ipaddress import ip_network
26 import logging
27 from pathlib import Path
28 from typing import NoReturn, Optional
29 from urllib.parse import urlparse
30
31 from ops.main import main
32 from opslib.osm.charm import CharmedOsmBase, RelationsMissing
33 from opslib.osm.interfaces.grafana import GrafanaDashboardTarget
34 from opslib.osm.interfaces.kafka import KafkaClient
35 from opslib.osm.interfaces.prometheus import PrometheusScrapeTarget
36 from opslib.osm.pod import (
37 ContainerV3Builder,
38 IngressResourceV3Builder,
39 PodSpecV3Builder,
40 )
41 from opslib.osm.validator import ModelValidator, validator
42
43
44 logger = logging.getLogger(__name__)
45
46 PORT = 9308
47
48
49 class ConfigModel(ModelValidator):
50 site_url: Optional[str]
51 cluster_issuer: Optional[str]
52 ingress_class: Optional[str]
53 ingress_whitelist_source_range: Optional[str]
54 tls_secret_name: Optional[str]
55 image_pull_policy: str
56
57 @validator("site_url")
58 def validate_site_url(cls, v):
59 if v:
60 parsed = urlparse(v)
61 if not parsed.scheme.startswith("http"):
62 raise ValueError("value must start with http")
63 return v
64
65 @validator("ingress_whitelist_source_range")
66 def validate_ingress_whitelist_source_range(cls, v):
67 if v:
68 ip_network(v)
69 return v
70
71 @validator("image_pull_policy")
72 def validate_image_pull_policy(cls, v):
73 values = {
74 "always": "Always",
75 "ifnotpresent": "IfNotPresent",
76 "never": "Never",
77 }
78 v = v.lower()
79 if v not in values.keys():
80 raise ValueError("value must be always, ifnotpresent or never")
81 return values[v]
82
83
84 class KafkaExporterCharm(CharmedOsmBase):
85 def __init__(self, *args) -> NoReturn:
86 super().__init__(*args, oci_image="image")
87
88 # Provision Kafka relation to exchange information
89 self.kafka_client = KafkaClient(self, "kafka")
90 self.framework.observe(self.on["kafka"].relation_changed, self.configure_pod)
91 self.framework.observe(self.on["kafka"].relation_broken, self.configure_pod)
92
93 # Register relation to provide a Scraping Target
94 self.scrape_target = PrometheusScrapeTarget(self, "prometheus-scrape")
95 self.framework.observe(
96 self.on["prometheus-scrape"].relation_joined, self._publish_scrape_info
97 )
98
99 # Register relation to provide a Dasboard Target
100 self.dashboard_target = GrafanaDashboardTarget(self, "grafana-dashboard")
101 self.framework.observe(
102 self.on["grafana-dashboard"].relation_joined, self._publish_dashboard_info
103 )
104
105 def _publish_scrape_info(self, event) -> NoReturn:
106 """Publishes scraping information for Prometheus.
107
108 Args:
109 event (EventBase): Prometheus relation event.
110 """
111 if self.unit.is_leader():
112 hostname = (
113 urlparse(self.model.config["site_url"]).hostname
114 if self.model.config["site_url"]
115 else self.model.app.name
116 )
117 port = str(PORT)
118 if self.model.config.get("site_url", "").startswith("https://"):
119 port = "443"
120 elif self.model.config.get("site_url", "").startswith("http://"):
121 port = "80"
122
123 self.scrape_target.publish_info(
124 hostname=hostname,
125 port=port,
126 metrics_path="/metrics",
127 scrape_interval="30s",
128 scrape_timeout="15s",
129 )
130
131 def _publish_dashboard_info(self, event) -> NoReturn:
132 """Publish dashboards for Grafana.
133
134 Args:
135 event (EventBase): Grafana relation event.
136 """
137 if self.unit.is_leader():
138 self.dashboard_target.publish_info(
139 name="osm-kafka",
140 dashboard=Path("templates/kafka_exporter_dashboard.json").read_text(),
141 )
142
143 def _check_missing_dependencies(self, config: ConfigModel):
144 """Check if there is any relation missing.
145
146 Args:
147 config (ConfigModel): object with configuration information.
148
149 Raises:
150 RelationsMissing: if kafka is missing.
151 """
152 missing_relations = []
153
154 if self.kafka_client.is_missing_data_in_unit():
155 missing_relations.append("kafka")
156
157 if missing_relations:
158 raise RelationsMissing(missing_relations)
159
160 def build_pod_spec(self, image_info):
161 """Build the PodSpec to be used.
162
163 Args:
164 image_info (str): container image information.
165
166 Returns:
167 Dict: PodSpec information.
168 """
169 # Validate config
170 config = ConfigModel(**dict(self.config))
171
172 # Check relations
173 self._check_missing_dependencies(config)
174
175 # Create Builder for the PodSpec
176 pod_spec_builder = PodSpecV3Builder()
177
178 # Build container
179 container_builder = ContainerV3Builder(
180 self.app.name, image_info, config.image_pull_policy
181 )
182 container_builder.add_port(name=self.app.name, port=PORT)
183 container_builder.add_http_readiness_probe(
184 path="/api/health",
185 port=PORT,
186 initial_delay_seconds=10,
187 period_seconds=10,
188 timeout_seconds=5,
189 success_threshold=1,
190 failure_threshold=3,
191 )
192 container_builder.add_http_liveness_probe(
193 path="/api/health",
194 port=PORT,
195 initial_delay_seconds=60,
196 timeout_seconds=30,
197 failure_threshold=10,
198 )
199 container_builder.add_command(
200 [
201 "kafka_exporter",
202 f"--kafka.server={self.kafka_client.host}:{self.kafka_client.port}",
203 ]
204 )
205 container = container_builder.build()
206
207 # Add container to PodSpec
208 pod_spec_builder.add_container(container)
209
210 # Add ingress resources to PodSpec if site url exists
211 if config.site_url:
212 parsed = urlparse(config.site_url)
213 annotations = {}
214 if config.ingress_class:
215 annotations["kubernetes.io/ingress.class"] = config.ingress_class
216 ingress_resource_builder = IngressResourceV3Builder(
217 f"{self.app.name}-ingress", annotations
218 )
219
220 if config.ingress_whitelist_source_range:
221 annotations[
222 "nginx.ingress.kubernetes.io/whitelist-source-range"
223 ] = config.ingress_whitelist_source_range
224
225 if config.cluster_issuer:
226 annotations["cert-manager.io/cluster-issuer"] = config.cluster_issuer
227
228 if parsed.scheme == "https":
229 ingress_resource_builder.add_tls(
230 [parsed.hostname], config.tls_secret_name
231 )
232 else:
233 annotations["nginx.ingress.kubernetes.io/ssl-redirect"] = "false"
234
235 ingress_resource_builder.add_rule(parsed.hostname, self.app.name, PORT)
236 ingress_resource = ingress_resource_builder.build()
237 pod_spec_builder.add_ingress_resource(ingress_resource)
238
239 logger.debug(pod_spec_builder.build())
240
241 return pod_spec_builder.build()
242
243
244 if __name__ == "__main__":
245 main(KafkaExporterCharm)