1316a4df215168e8dd3da4867a656492c6237ff6
[osm/devops.git] / installers / charm / kafka-exporter / src / charm.py
1 #!/usr/bin/env python3
2 # Copyright 2021 Canonical Ltd.
3 #
4 # Licensed under the Apache License, Version 2.0 (the "License"); you may
5 # not use this file except in compliance with the License. You may obtain
6 # a copy of the License at
7 #
8 # http://www.apache.org/licenses/LICENSE-2.0
9 #
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 # License for the specific language governing permissions and limitations
14 # under the License.
15 #
16 # For those usages not covered by the Apache License, Version 2.0 please
17 # contact: legal@canonical.com
18 #
19 # To get in touch with the maintainers, please contact:
20 # osm-charmers@lists.launchpad.net
21 ##
22
23 # pylint: disable=E0213
24
25 from ipaddress import ip_network
26 import logging
27 from pathlib import Path
28 from typing import NoReturn, Optional
29 from urllib.parse import urlparse
30
31 from ops.main import main
32 from opslib.osm.charm import CharmedOsmBase, RelationsMissing
33 from opslib.osm.interfaces.grafana import GrafanaDashboardTarget
34 from opslib.osm.interfaces.kafka import KafkaClient
35 from opslib.osm.interfaces.prometheus import PrometheusScrapeTarget
36 from opslib.osm.pod import (
37 ContainerV3Builder,
38 IngressResourceV3Builder,
39 PodSpecV3Builder,
40 )
41 from opslib.osm.validator import ModelValidator, validator
42
43
44 logger = logging.getLogger(__name__)
45
46 PORT = 9308
47
48
49 class ConfigModel(ModelValidator):
50 site_url: Optional[str]
51 cluster_issuer: Optional[str]
52 ingress_class: Optional[str]
53 ingress_whitelist_source_range: Optional[str]
54 tls_secret_name: Optional[str]
55 image_pull_policy: str
56 security_context: bool
57
58 @validator("site_url")
59 def validate_site_url(cls, v):
60 if v:
61 parsed = urlparse(v)
62 if not parsed.scheme.startswith("http"):
63 raise ValueError("value must start with http")
64 return v
65
66 @validator("ingress_whitelist_source_range")
67 def validate_ingress_whitelist_source_range(cls, v):
68 if v:
69 ip_network(v)
70 return v
71
72 @validator("image_pull_policy")
73 def validate_image_pull_policy(cls, v):
74 values = {
75 "always": "Always",
76 "ifnotpresent": "IfNotPresent",
77 "never": "Never",
78 }
79 v = v.lower()
80 if v not in values.keys():
81 raise ValueError("value must be always, ifnotpresent or never")
82 return values[v]
83
84
85 class KafkaExporterCharm(CharmedOsmBase):
86 def __init__(self, *args) -> NoReturn:
87 super().__init__(*args, oci_image="image")
88
89 # Provision Kafka relation to exchange information
90 self.kafka_client = KafkaClient(self, "kafka")
91 self.framework.observe(self.on["kafka"].relation_changed, self.configure_pod)
92 self.framework.observe(self.on["kafka"].relation_broken, self.configure_pod)
93
94 # Register relation to provide a Scraping Target
95 self.scrape_target = PrometheusScrapeTarget(self, "prometheus-scrape")
96 self.framework.observe(
97 self.on["prometheus-scrape"].relation_joined, self._publish_scrape_info
98 )
99
100 # Register relation to provide a Dasboard Target
101 self.dashboard_target = GrafanaDashboardTarget(self, "grafana-dashboard")
102 self.framework.observe(
103 self.on["grafana-dashboard"].relation_joined, self._publish_dashboard_info
104 )
105
106 def _publish_scrape_info(self, event) -> NoReturn:
107 """Publishes scraping information for Prometheus.
108
109 Args:
110 event (EventBase): Prometheus relation event.
111 """
112 if self.unit.is_leader():
113 hostname = (
114 urlparse(self.model.config["site_url"]).hostname
115 if self.model.config["site_url"]
116 else self.model.app.name
117 )
118 port = str(PORT)
119 if self.model.config.get("site_url", "").startswith("https://"):
120 port = "443"
121 elif self.model.config.get("site_url", "").startswith("http://"):
122 port = "80"
123
124 self.scrape_target.publish_info(
125 hostname=hostname,
126 port=port,
127 metrics_path="/metrics",
128 scrape_interval="30s",
129 scrape_timeout="15s",
130 )
131
132 def _publish_dashboard_info(self, event) -> NoReturn:
133 """Publish dashboards for Grafana.
134
135 Args:
136 event (EventBase): Grafana relation event.
137 """
138 if self.unit.is_leader():
139 self.dashboard_target.publish_info(
140 name="osm-kafka",
141 dashboard=Path("templates/kafka_exporter_dashboard.json").read_text(),
142 )
143
144 def _check_missing_dependencies(self, config: ConfigModel):
145 """Check if there is any relation missing.
146
147 Args:
148 config (ConfigModel): object with configuration information.
149
150 Raises:
151 RelationsMissing: if kafka is missing.
152 """
153 missing_relations = []
154
155 if (
156 self.kafka_client.is_missing_data_in_unit()
157 and self.kafka_client.is_missing_data_in_app()
158 ):
159 missing_relations.append("kafka")
160
161 if missing_relations:
162 raise RelationsMissing(missing_relations)
163
164 def build_pod_spec(self, image_info):
165 """Build the PodSpec to be used.
166
167 Args:
168 image_info (str): container image information.
169
170 Returns:
171 Dict: PodSpec information.
172 """
173 # Validate config
174 config = ConfigModel(**dict(self.config))
175
176 # Check relations
177 self._check_missing_dependencies(config)
178
179 # Create Builder for the PodSpec
180 pod_spec_builder = PodSpecV3Builder(
181 enable_security_context=config.security_context
182 )
183
184 # Build container
185 container_builder = ContainerV3Builder(
186 self.app.name,
187 image_info,
188 config.image_pull_policy,
189 run_as_non_root=config.security_context,
190 )
191 container_builder.add_port(name=self.app.name, port=PORT)
192 container_builder.add_http_readiness_probe(
193 path="/api/health",
194 port=PORT,
195 initial_delay_seconds=10,
196 period_seconds=10,
197 timeout_seconds=5,
198 success_threshold=1,
199 failure_threshold=3,
200 )
201 container_builder.add_http_liveness_probe(
202 path="/api/health",
203 port=PORT,
204 initial_delay_seconds=60,
205 timeout_seconds=30,
206 failure_threshold=10,
207 )
208 container_builder.add_command(
209 [
210 "kafka_exporter",
211 f"--kafka.server={self.kafka_client.host}:{self.kafka_client.port}",
212 ]
213 )
214 container = container_builder.build()
215
216 # Add container to PodSpec
217 pod_spec_builder.add_container(container)
218
219 # Add ingress resources to PodSpec if site url exists
220 if config.site_url:
221 parsed = urlparse(config.site_url)
222 annotations = {}
223 if config.ingress_class:
224 annotations["kubernetes.io/ingress.class"] = config.ingress_class
225 ingress_resource_builder = IngressResourceV3Builder(
226 f"{self.app.name}-ingress", annotations
227 )
228
229 if config.ingress_whitelist_source_range:
230 annotations[
231 "nginx.ingress.kubernetes.io/whitelist-source-range"
232 ] = config.ingress_whitelist_source_range
233
234 if config.cluster_issuer:
235 annotations["cert-manager.io/cluster-issuer"] = config.cluster_issuer
236
237 if parsed.scheme == "https":
238 ingress_resource_builder.add_tls(
239 [parsed.hostname], config.tls_secret_name
240 )
241 else:
242 annotations["nginx.ingress.kubernetes.io/ssl-redirect"] = "false"
243
244 ingress_resource_builder.add_rule(parsed.hostname, self.app.name, PORT)
245 ingress_resource = ingress_resource_builder.build()
246 pod_spec_builder.add_ingress_resource(ingress_resource)
247
248 return pod_spec_builder.build()
249
250
251 if __name__ == "__main__":
252 main(KafkaExporterCharm)