blob: 3b599a8501ce889f6dfcc104af43982be59dd491 [file] [log] [blame]
sousaedub025f302020-11-16 14:40:14 +00001#!/usr/bin/env python3
2# Copyright 2021 Canonical Ltd.
3#
4# Licensed under the Apache License, Version 2.0 (the "License"); you may
5# not use this file except in compliance with the License. You may obtain
6# a copy of the License at
7#
8# http://www.apache.org/licenses/LICENSE-2.0
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13# License for the specific language governing permissions and limitations
14# under the License.
15#
16# For those usages not covered by the Apache License, Version 2.0 please
17# contact: legal@canonical.com
18#
19# To get in touch with the maintainers, please contact:
20# osm-charmers@lists.launchpad.net
21##
22
sousaedu10721602021-05-18 17:28:17 +020023# pylint: disable=E0213
24
25from ipaddress import ip_network
sousaedub025f302020-11-16 14:40:14 +000026import logging
27from pathlib import Path
sousaedu10721602021-05-18 17:28:17 +020028from typing import NoReturn, Optional
sousaedu6332d382021-02-25 23:24:47 +010029from urllib.parse import urlparse
sousaedub025f302020-11-16 14:40:14 +000030
sousaedub025f302020-11-16 14:40:14 +000031from ops.main import main
sousaedu10721602021-05-18 17:28:17 +020032from opslib.osm.charm import CharmedOsmBase, RelationsMissing
33from opslib.osm.interfaces.grafana import GrafanaDashboardTarget
34from opslib.osm.interfaces.kafka import KafkaClient
35from opslib.osm.interfaces.prometheus import PrometheusScrapeTarget
36from opslib.osm.pod import (
37 ContainerV3Builder,
38 IngressResourceV3Builder,
39 PodSpecV3Builder,
40)
41from opslib.osm.validator import ModelValidator, validator
sousaedub025f302020-11-16 14:40:14 +000042
sousaedub025f302020-11-16 14:40:14 +000043
44logger = logging.getLogger(__name__)
45
sousaedu10721602021-05-18 17:28:17 +020046PORT = 9308
sousaedub025f302020-11-16 14:40:14 +000047
48
sousaedu10721602021-05-18 17:28:17 +020049class ConfigModel(ModelValidator):
50 site_url: Optional[str]
51 cluster_issuer: Optional[str]
52 ingress_whitelist_source_range: Optional[str]
53 tls_secret_name: Optional[str]
54
55 @validator("site_url")
56 def validate_site_url(cls, v):
57 if v:
58 parsed = urlparse(v)
59 if not parsed.scheme.startswith("http"):
60 raise ValueError("value must start with http")
61 return v
62
63 @validator("ingress_whitelist_source_range")
64 def validate_ingress_whitelist_source_range(cls, v):
65 if v:
66 ip_network(v)
67 return v
sousaedub025f302020-11-16 14:40:14 +000068
69
sousaedu10721602021-05-18 17:28:17 +020070class KafkaExporterCharm(CharmedOsmBase):
sousaedub025f302020-11-16 14:40:14 +000071 def __init__(self, *args) -> NoReturn:
sousaedu10721602021-05-18 17:28:17 +020072 super().__init__(*args, oci_image="image")
sousaedub025f302020-11-16 14:40:14 +000073
sousaedu10721602021-05-18 17:28:17 +020074 # Provision Kafka relation to exchange information
75 self.kafka_client = KafkaClient(self, "kafka")
76 self.framework.observe(self.on["kafka"].relation_changed, self.configure_pod)
77 self.framework.observe(self.on["kafka"].relation_broken, self.configure_pod)
sousaedub025f302020-11-16 14:40:14 +000078
sousaedu10721602021-05-18 17:28:17 +020079 # Register relation to provide a Scraping Target
80 self.scrape_target = PrometheusScrapeTarget(self, "prometheus-scrape")
sousaedub025f302020-11-16 14:40:14 +000081 self.framework.observe(
sousaedu10721602021-05-18 17:28:17 +020082 self.on["prometheus-scrape"].relation_joined, self._publish_scrape_info
sousaedub025f302020-11-16 14:40:14 +000083 )
84
sousaedu10721602021-05-18 17:28:17 +020085 # Register relation to provide a Dasboard Target
86 self.dashboard_target = GrafanaDashboardTarget(self, "grafana-dashboard")
87 self.framework.observe(
88 self.on["grafana-dashboard"].relation_joined, self._publish_dashboard_info
89 )
90
91 def _publish_scrape_info(self, event) -> NoReturn:
92 """Publishes scraping information for Prometheus.
sousaedub025f302020-11-16 14:40:14 +000093
94 Args:
sousaedu10721602021-05-18 17:28:17 +020095 event (EventBase): Prometheus relation event.
sousaedub025f302020-11-16 14:40:14 +000096 """
sousaedu10721602021-05-18 17:28:17 +020097 if self.unit.is_leader():
98 hostname = (
99 urlparse(self.model.config["site_url"]).hostname
100 if self.model.config["site_url"]
101 else self.model.app.name
sousaedub025f302020-11-16 14:40:14 +0000102 )
sousaedu10721602021-05-18 17:28:17 +0200103 port = str(PORT)
104 if self.model.config.get("site_url", "").startswith("https://"):
105 port = "443"
106 elif self.model.config.get("site_url", "").startswith("http://"):
107 port = "80"
sousaedub025f302020-11-16 14:40:14 +0000108
sousaedu10721602021-05-18 17:28:17 +0200109 self.scrape_target.publish_info(
110 hostname=hostname,
111 port=port,
112 metrics_path="/metrics",
113 scrape_interval="30s",
114 scrape_timeout="15s",
115 )
sousaedub025f302020-11-16 14:40:14 +0000116
sousaedu10721602021-05-18 17:28:17 +0200117 def _publish_dashboard_info(self, event) -> NoReturn:
118 """Publish dashboards for Grafana.
119
120 Args:
121 event (EventBase): Grafana relation event.
122 """
123 if self.unit.is_leader():
124 self.dashboard_target.publish_info(
125 name="osm-kafka",
126 dashboard=Path("files/kafka_exporter_dashboard.json").read_text(),
127 )
128
129 def _check_missing_dependencies(self, config: ConfigModel):
130 """Check if there is any relation missing.
131
132 Args:
133 config (ConfigModel): object with configuration information.
134
135 Raises:
136 RelationsMissing: if kafka is missing.
137 """
138 missing_relations = []
139
140 if self.kafka_client.is_missing_data_in_unit():
141 missing_relations.append("kafka")
142
143 if missing_relations:
144 raise RelationsMissing(missing_relations)
145
146 def build_pod_spec(self, image_info):
147 """Build the PodSpec to be used.
148
149 Args:
150 image_info (str): container image information.
151
152 Returns:
153 Dict: PodSpec information.
154 """
155 # Validate config
156 config = ConfigModel(**dict(self.config))
157
158 # Check relations
159 self._check_missing_dependencies(config)
160
161 # Create Builder for the PodSpec
162 pod_spec_builder = PodSpecV3Builder()
163
164 # Build container
165 container_builder = ContainerV3Builder(self.app.name, image_info)
166 container_builder.add_port(name=self.app.name, port=PORT)
167 container_builder.add_http_readiness_probe(
168 path="/api/health",
169 port=PORT,
170 initial_delay_seconds=10,
171 period_seconds=10,
172 timeout_seconds=5,
173 success_threshold=1,
174 failure_threshold=3,
175 )
176 container_builder.add_http_liveness_probe(
177 path="/api/health",
178 port=PORT,
179 initial_delay_seconds=60,
180 timeout_seconds=30,
181 failure_threshold=10,
182 )
183 container_builder.add_command(
184 [
185 "kafka_exporter",
186 f"--kafka.server={self.kafka_client.host}:{self.kafka_client.port}",
187 ]
188 )
189 container = container_builder.build()
190
191 # Add container to PodSpec
192 pod_spec_builder.add_container(container)
193
194 # Add ingress resources to PodSpec if site url exists
195 if config.site_url:
196 parsed = urlparse(config.site_url)
David Garciada31d6e2021-05-27 16:06:20 +0200197 annotations = {"kubernetes.io/ingress.class": "public"}
sousaedu10721602021-05-18 17:28:17 +0200198 ingress_resource_builder = IngressResourceV3Builder(
199 f"{self.app.name}-ingress", annotations
200 )
201
202 if config.ingress_whitelist_source_range:
203 annotations[
204 "nginx.ingress.kubernetes.io/whitelist-source-range"
205 ] = config.ingress_whitelist_source_range
206
207 if config.cluster_issuer:
208 annotations["cert-manager.io/cluster-issuer"] = config.cluster_issuer
209
210 if parsed.scheme == "https":
211 ingress_resource_builder.add_tls(
212 [parsed.hostname], config.tls_secret_name
213 )
214 else:
215 annotations["nginx.ingress.kubernetes.io/ssl-redirect"] = "false"
216
217 ingress_resource_builder.add_rule(parsed.hostname, self.app.name, PORT)
218 ingress_resource = ingress_resource_builder.build()
219 pod_spec_builder.add_ingress_resource(ingress_resource)
220
221 logger.debug(pod_spec_builder.build())
222
223 return pod_spec_builder.build()
sousaedub025f302020-11-16 14:40:14 +0000224
225
226if __name__ == "__main__":
sousaedu6332d382021-02-25 23:24:47 +0100227 main(KafkaExporterCharm)