blob: 2147781617f99303a33d45caa0773f636340e29a [file] [log] [blame]
sousaedub025f302020-11-16 14:40:14 +00001#!/usr/bin/env python3
2# Copyright 2021 Canonical Ltd.
3#
4# Licensed under the Apache License, Version 2.0 (the "License"); you may
5# not use this file except in compliance with the License. You may obtain
6# a copy of the License at
7#
8# http://www.apache.org/licenses/LICENSE-2.0
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13# License for the specific language governing permissions and limitations
14# under the License.
15#
16# For those usages not covered by the Apache License, Version 2.0 please
17# contact: legal@canonical.com
18#
19# To get in touch with the maintainers, please contact:
20# osm-charmers@lists.launchpad.net
21##
22
sousaedu10721602021-05-18 17:28:17 +020023# pylint: disable=E0213
24
25from ipaddress import ip_network
sousaedub025f302020-11-16 14:40:14 +000026import logging
27from pathlib import Path
sousaedu10721602021-05-18 17:28:17 +020028from typing import NoReturn, Optional
sousaedu6332d382021-02-25 23:24:47 +010029from urllib.parse import urlparse
sousaedub025f302020-11-16 14:40:14 +000030
sousaedub025f302020-11-16 14:40:14 +000031from ops.main import main
sousaedu10721602021-05-18 17:28:17 +020032from opslib.osm.charm import CharmedOsmBase, RelationsMissing
33from opslib.osm.interfaces.grafana import GrafanaDashboardTarget
34from opslib.osm.interfaces.kafka import KafkaClient
35from opslib.osm.interfaces.prometheus import PrometheusScrapeTarget
36from opslib.osm.pod import (
37 ContainerV3Builder,
38 IngressResourceV3Builder,
39 PodSpecV3Builder,
40)
41from opslib.osm.validator import ModelValidator, validator
sousaedub025f302020-11-16 14:40:14 +000042
sousaedub025f302020-11-16 14:40:14 +000043
44logger = logging.getLogger(__name__)
45
sousaedu10721602021-05-18 17:28:17 +020046PORT = 9308
sousaedub025f302020-11-16 14:40:14 +000047
48
sousaedu10721602021-05-18 17:28:17 +020049class ConfigModel(ModelValidator):
50 site_url: Optional[str]
51 cluster_issuer: Optional[str]
David Garciad68e0b42021-06-28 16:50:42 +020052 ingress_class: Optional[str]
sousaedu10721602021-05-18 17:28:17 +020053 ingress_whitelist_source_range: Optional[str]
54 tls_secret_name: Optional[str]
sousaedu3ddbbd12021-08-24 19:57:24 +010055 image_pull_policy: Optional[str]
sousaedu10721602021-05-18 17:28:17 +020056
57 @validator("site_url")
58 def validate_site_url(cls, v):
59 if v:
60 parsed = urlparse(v)
61 if not parsed.scheme.startswith("http"):
62 raise ValueError("value must start with http")
63 return v
64
65 @validator("ingress_whitelist_source_range")
66 def validate_ingress_whitelist_source_range(cls, v):
67 if v:
68 ip_network(v)
69 return v
sousaedub025f302020-11-16 14:40:14 +000070
sousaedu3ddbbd12021-08-24 19:57:24 +010071 @validator("image_pull_policy")
72 def validate_image_pull_policy(cls, v):
73 values = {
74 "always": "Always",
75 "ifnotpresent": "IfNotPresent",
76 "never": "Never",
77 }
78 v = v.lower()
79 if v not in values.keys():
80 raise ValueError("value must be always, ifnotpresent or never")
81 return values[v]
82
sousaedub025f302020-11-16 14:40:14 +000083
sousaedu10721602021-05-18 17:28:17 +020084class KafkaExporterCharm(CharmedOsmBase):
sousaedub025f302020-11-16 14:40:14 +000085 def __init__(self, *args) -> NoReturn:
sousaedu10721602021-05-18 17:28:17 +020086 super().__init__(*args, oci_image="image")
sousaedub025f302020-11-16 14:40:14 +000087
sousaedu10721602021-05-18 17:28:17 +020088 # Provision Kafka relation to exchange information
89 self.kafka_client = KafkaClient(self, "kafka")
90 self.framework.observe(self.on["kafka"].relation_changed, self.configure_pod)
91 self.framework.observe(self.on["kafka"].relation_broken, self.configure_pod)
sousaedub025f302020-11-16 14:40:14 +000092
sousaedu10721602021-05-18 17:28:17 +020093 # Register relation to provide a Scraping Target
94 self.scrape_target = PrometheusScrapeTarget(self, "prometheus-scrape")
sousaedub025f302020-11-16 14:40:14 +000095 self.framework.observe(
sousaedu10721602021-05-18 17:28:17 +020096 self.on["prometheus-scrape"].relation_joined, self._publish_scrape_info
sousaedub025f302020-11-16 14:40:14 +000097 )
98
sousaedu10721602021-05-18 17:28:17 +020099 # Register relation to provide a Dasboard Target
100 self.dashboard_target = GrafanaDashboardTarget(self, "grafana-dashboard")
101 self.framework.observe(
102 self.on["grafana-dashboard"].relation_joined, self._publish_dashboard_info
103 )
104
105 def _publish_scrape_info(self, event) -> NoReturn:
106 """Publishes scraping information for Prometheus.
sousaedub025f302020-11-16 14:40:14 +0000107
108 Args:
sousaedu10721602021-05-18 17:28:17 +0200109 event (EventBase): Prometheus relation event.
sousaedub025f302020-11-16 14:40:14 +0000110 """
sousaedu10721602021-05-18 17:28:17 +0200111 if self.unit.is_leader():
112 hostname = (
113 urlparse(self.model.config["site_url"]).hostname
114 if self.model.config["site_url"]
115 else self.model.app.name
sousaedub025f302020-11-16 14:40:14 +0000116 )
sousaedu10721602021-05-18 17:28:17 +0200117 port = str(PORT)
118 if self.model.config.get("site_url", "").startswith("https://"):
119 port = "443"
120 elif self.model.config.get("site_url", "").startswith("http://"):
121 port = "80"
sousaedub025f302020-11-16 14:40:14 +0000122
sousaedu10721602021-05-18 17:28:17 +0200123 self.scrape_target.publish_info(
124 hostname=hostname,
125 port=port,
126 metrics_path="/metrics",
127 scrape_interval="30s",
128 scrape_timeout="15s",
129 )
sousaedub025f302020-11-16 14:40:14 +0000130
sousaedu10721602021-05-18 17:28:17 +0200131 def _publish_dashboard_info(self, event) -> NoReturn:
132 """Publish dashboards for Grafana.
133
134 Args:
135 event (EventBase): Grafana relation event.
136 """
137 if self.unit.is_leader():
138 self.dashboard_target.publish_info(
139 name="osm-kafka",
David Garciad680be42021-08-17 11:03:55 +0200140 dashboard=Path("templates/kafka_exporter_dashboard.json").read_text(),
sousaedu10721602021-05-18 17:28:17 +0200141 )
142
143 def _check_missing_dependencies(self, config: ConfigModel):
144 """Check if there is any relation missing.
145
146 Args:
147 config (ConfigModel): object with configuration information.
148
149 Raises:
150 RelationsMissing: if kafka is missing.
151 """
152 missing_relations = []
153
154 if self.kafka_client.is_missing_data_in_unit():
155 missing_relations.append("kafka")
156
157 if missing_relations:
158 raise RelationsMissing(missing_relations)
159
160 def build_pod_spec(self, image_info):
161 """Build the PodSpec to be used.
162
163 Args:
164 image_info (str): container image information.
165
166 Returns:
167 Dict: PodSpec information.
168 """
169 # Validate config
170 config = ConfigModel(**dict(self.config))
171
172 # Check relations
173 self._check_missing_dependencies(config)
174
175 # Create Builder for the PodSpec
176 pod_spec_builder = PodSpecV3Builder()
177
178 # Build container
sousaedu3ddbbd12021-08-24 19:57:24 +0100179 container_builder = ContainerV3Builder(
180 self.app.name, image_info, config.image_pull_policy
181 )
sousaedu10721602021-05-18 17:28:17 +0200182 container_builder.add_port(name=self.app.name, port=PORT)
183 container_builder.add_http_readiness_probe(
184 path="/api/health",
185 port=PORT,
186 initial_delay_seconds=10,
187 period_seconds=10,
188 timeout_seconds=5,
189 success_threshold=1,
190 failure_threshold=3,
191 )
192 container_builder.add_http_liveness_probe(
193 path="/api/health",
194 port=PORT,
195 initial_delay_seconds=60,
196 timeout_seconds=30,
197 failure_threshold=10,
198 )
199 container_builder.add_command(
200 [
201 "kafka_exporter",
202 f"--kafka.server={self.kafka_client.host}:{self.kafka_client.port}",
203 ]
204 )
205 container = container_builder.build()
206
207 # Add container to PodSpec
208 pod_spec_builder.add_container(container)
209
210 # Add ingress resources to PodSpec if site url exists
211 if config.site_url:
212 parsed = urlparse(config.site_url)
David Garciad68e0b42021-06-28 16:50:42 +0200213 annotations = {}
214 if config.ingress_class:
215 annotations["kubernetes.io/ingress.class"] = config.ingress_class
sousaedu10721602021-05-18 17:28:17 +0200216 ingress_resource_builder = IngressResourceV3Builder(
217 f"{self.app.name}-ingress", annotations
218 )
219
220 if config.ingress_whitelist_source_range:
221 annotations[
222 "nginx.ingress.kubernetes.io/whitelist-source-range"
223 ] = config.ingress_whitelist_source_range
224
225 if config.cluster_issuer:
226 annotations["cert-manager.io/cluster-issuer"] = config.cluster_issuer
227
228 if parsed.scheme == "https":
229 ingress_resource_builder.add_tls(
230 [parsed.hostname], config.tls_secret_name
231 )
232 else:
233 annotations["nginx.ingress.kubernetes.io/ssl-redirect"] = "false"
234
235 ingress_resource_builder.add_rule(parsed.hostname, self.app.name, PORT)
236 ingress_resource = ingress_resource_builder.build()
237 pod_spec_builder.add_ingress_resource(ingress_resource)
238
239 logger.debug(pod_spec_builder.build())
240
241 return pod_spec_builder.build()
sousaedub025f302020-11-16 14:40:14 +0000242
243
244if __name__ == "__main__":
sousaedu6332d382021-02-25 23:24:47 +0100245 main(KafkaExporterCharm)