CharmHub and new kafka and zookeeper charms
[osm/devops.git] / installers / charm / kafka-exporter / src / charm.py
1 #!/usr/bin/env python3
2 # Copyright 2021 Canonical Ltd.
3 #
4 # Licensed under the Apache License, Version 2.0 (the "License"); you may
5 # not use this file except in compliance with the License. You may obtain
6 # a copy of the License at
7 #
8 # http://www.apache.org/licenses/LICENSE-2.0
9 #
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 # License for the specific language governing permissions and limitations
14 # under the License.
15 #
16 # For those usages not covered by the Apache License, Version 2.0 please
17 # contact: legal@canonical.com
18 #
19 # To get in touch with the maintainers, please contact:
20 # osm-charmers@lists.launchpad.net
21 ##
22
23 # pylint: disable=E0213
24
25 from ipaddress import ip_network
26 import logging
27 from pathlib import Path
28 from typing import NoReturn, Optional
29 from urllib.parse import urlparse
30
31 from charms.kafka_k8s.v0.kafka import KafkaEvents, KafkaRequires
32 from ops.main import main
33 from opslib.osm.charm import CharmedOsmBase, RelationsMissing
34 from opslib.osm.interfaces.grafana import GrafanaDashboardTarget
35 from opslib.osm.interfaces.prometheus import PrometheusScrapeTarget
36 from opslib.osm.pod import (
37 ContainerV3Builder,
38 IngressResourceV3Builder,
39 PodSpecV3Builder,
40 )
41 from opslib.osm.validator import ModelValidator, validator
42
43
44 logger = logging.getLogger(__name__)
45
46 PORT = 9308
47
48
49 class ConfigModel(ModelValidator):
50 site_url: Optional[str]
51 cluster_issuer: Optional[str]
52 ingress_class: Optional[str]
53 ingress_whitelist_source_range: Optional[str]
54 tls_secret_name: Optional[str]
55 image_pull_policy: str
56 security_context: bool
57
58 @validator("site_url")
59 def validate_site_url(cls, v):
60 if v:
61 parsed = urlparse(v)
62 if not parsed.scheme.startswith("http"):
63 raise ValueError("value must start with http")
64 return v
65
66 @validator("ingress_whitelist_source_range")
67 def validate_ingress_whitelist_source_range(cls, v):
68 if v:
69 ip_network(v)
70 return v
71
72 @validator("image_pull_policy")
73 def validate_image_pull_policy(cls, v):
74 values = {
75 "always": "Always",
76 "ifnotpresent": "IfNotPresent",
77 "never": "Never",
78 }
79 v = v.lower()
80 if v not in values.keys():
81 raise ValueError("value must be always, ifnotpresent or never")
82 return values[v]
83
84
85 class KafkaExporterCharm(CharmedOsmBase):
86
87 on = KafkaEvents()
88
89 def __init__(self, *args) -> NoReturn:
90 super().__init__(*args, oci_image="image")
91
92 # Provision Kafka relation to exchange information
93 self.kafka = KafkaRequires(self)
94 self.framework.observe(self.on.kafka_available, self.configure_pod)
95 self.framework.observe(self.on.kafka_broken, self.configure_pod)
96
97 # Register relation to provide a Scraping Target
98 self.scrape_target = PrometheusScrapeTarget(self, "prometheus-scrape")
99 self.framework.observe(
100 self.on["prometheus-scrape"].relation_joined, self._publish_scrape_info
101 )
102
103 # Register relation to provide a Dasboard Target
104 self.dashboard_target = GrafanaDashboardTarget(self, "grafana-dashboard")
105 self.framework.observe(
106 self.on["grafana-dashboard"].relation_joined, self._publish_dashboard_info
107 )
108
109 def _publish_scrape_info(self, event) -> NoReturn:
110 """Publishes scraping information for Prometheus.
111
112 Args:
113 event (EventBase): Prometheus relation event.
114 """
115 if self.unit.is_leader():
116 hostname = (
117 urlparse(self.model.config["site_url"]).hostname
118 if self.model.config["site_url"]
119 else self.model.app.name
120 )
121 port = str(PORT)
122 if self.model.config.get("site_url", "").startswith("https://"):
123 port = "443"
124 elif self.model.config.get("site_url", "").startswith("http://"):
125 port = "80"
126
127 self.scrape_target.publish_info(
128 hostname=hostname,
129 port=port,
130 metrics_path="/metrics",
131 scrape_interval="30s",
132 scrape_timeout="15s",
133 )
134
135 def _publish_dashboard_info(self, event) -> NoReturn:
136 """Publish dashboards for Grafana.
137
138 Args:
139 event (EventBase): Grafana relation event.
140 """
141 if self.unit.is_leader():
142 self.dashboard_target.publish_info(
143 name="osm-kafka",
144 dashboard=Path("templates/kafka_exporter_dashboard.json").read_text(),
145 )
146
147 def _check_missing_dependencies(self, config: ConfigModel):
148 """Check if there is any relation missing.
149
150 Args:
151 config (ConfigModel): object with configuration information.
152
153 Raises:
154 RelationsMissing: if kafka is missing.
155 """
156 missing_relations = []
157
158 if not self.kafka.host or not self.kafka.port:
159 missing_relations.append("kafka")
160
161 if missing_relations:
162 raise RelationsMissing(missing_relations)
163
164 def build_pod_spec(self, image_info):
165 """Build the PodSpec to be used.
166
167 Args:
168 image_info (str): container image information.
169
170 Returns:
171 Dict: PodSpec information.
172 """
173 # Validate config
174 config = ConfigModel(**dict(self.config))
175
176 # Check relations
177 self._check_missing_dependencies(config)
178
179 # Create Builder for the PodSpec
180 pod_spec_builder = PodSpecV3Builder(
181 enable_security_context=config.security_context
182 )
183
184 # Build container
185 container_builder = ContainerV3Builder(
186 self.app.name,
187 image_info,
188 config.image_pull_policy,
189 run_as_non_root=config.security_context,
190 )
191 container_builder.add_port(name=self.app.name, port=PORT)
192 container_builder.add_http_readiness_probe(
193 path="/api/health",
194 port=PORT,
195 initial_delay_seconds=10,
196 period_seconds=10,
197 timeout_seconds=5,
198 success_threshold=1,
199 failure_threshold=3,
200 )
201 container_builder.add_http_liveness_probe(
202 path="/api/health",
203 port=PORT,
204 initial_delay_seconds=60,
205 timeout_seconds=30,
206 failure_threshold=10,
207 )
208 container_builder.add_command(
209 [
210 "kafka_exporter",
211 f"--kafka.server={self.kafka.host}:{self.kafka.port}",
212 ]
213 )
214 container = container_builder.build()
215
216 # Add container to PodSpec
217 pod_spec_builder.add_container(container)
218
219 # Add ingress resources to PodSpec if site url exists
220 if config.site_url:
221 parsed = urlparse(config.site_url)
222 annotations = {}
223 if config.ingress_class:
224 annotations["kubernetes.io/ingress.class"] = config.ingress_class
225 ingress_resource_builder = IngressResourceV3Builder(
226 f"{self.app.name}-ingress", annotations
227 )
228
229 if config.ingress_whitelist_source_range:
230 annotations[
231 "nginx.ingress.kubernetes.io/whitelist-source-range"
232 ] = config.ingress_whitelist_source_range
233
234 if config.cluster_issuer:
235 annotations["cert-manager.io/cluster-issuer"] = config.cluster_issuer
236
237 if parsed.scheme == "https":
238 ingress_resource_builder.add_tls(
239 [parsed.hostname], config.tls_secret_name
240 )
241 else:
242 annotations["nginx.ingress.kubernetes.io/ssl-redirect"] = "false"
243
244 ingress_resource_builder.add_rule(parsed.hostname, self.app.name, PORT)
245 ingress_resource = ingress_resource_builder.build()
246 pod_spec_builder.add_ingress_resource(ingress_resource)
247
248 return pod_spec_builder.build()
249
250
251 if __name__ == "__main__":
252 main(KafkaExporterCharm)