blob: a15abc841b57356cdbaa507a92d13fd6431c23e6 [file] [log] [blame]
sousaedub025f302020-11-16 14:40:14 +00001#!/usr/bin/env python3
2# Copyright 2021 Canonical Ltd.
3#
4# Licensed under the Apache License, Version 2.0 (the "License"); you may
5# not use this file except in compliance with the License. You may obtain
6# a copy of the License at
7#
8# http://www.apache.org/licenses/LICENSE-2.0
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13# License for the specific language governing permissions and limitations
14# under the License.
15#
16# For those usages not covered by the Apache License, Version 2.0 please
17# contact: legal@canonical.com
18#
19# To get in touch with the maintainers, please contact:
20# osm-charmers@lists.launchpad.net
21##
22
sousaedu10721602021-05-18 17:28:17 +020023# pylint: disable=E0213
24
25from ipaddress import ip_network
sousaedub025f302020-11-16 14:40:14 +000026import logging
27from pathlib import Path
sousaedu10721602021-05-18 17:28:17 +020028from typing import NoReturn, Optional
sousaedu6332d382021-02-25 23:24:47 +010029from urllib.parse import urlparse
sousaedub025f302020-11-16 14:40:14 +000030
sousaedub025f302020-11-16 14:40:14 +000031from ops.main import main
sousaedu10721602021-05-18 17:28:17 +020032from opslib.osm.charm import CharmedOsmBase, RelationsMissing
33from opslib.osm.interfaces.grafana import GrafanaDashboardTarget
34from opslib.osm.interfaces.kafka import KafkaClient
35from opslib.osm.interfaces.prometheus import PrometheusScrapeTarget
36from opslib.osm.pod import (
37 ContainerV3Builder,
38 IngressResourceV3Builder,
39 PodSpecV3Builder,
40)
41from opslib.osm.validator import ModelValidator, validator
sousaedub025f302020-11-16 14:40:14 +000042
sousaedub025f302020-11-16 14:40:14 +000043
44logger = logging.getLogger(__name__)
45
sousaedu10721602021-05-18 17:28:17 +020046PORT = 9308
sousaedub025f302020-11-16 14:40:14 +000047
48
sousaedu10721602021-05-18 17:28:17 +020049class ConfigModel(ModelValidator):
50 site_url: Optional[str]
51 cluster_issuer: Optional[str]
David Garciac35943e2021-06-28 16:50:42 +020052 ingress_class: Optional[str]
sousaedu10721602021-05-18 17:28:17 +020053 ingress_whitelist_source_range: Optional[str]
54 tls_secret_name: Optional[str]
55
56 @validator("site_url")
57 def validate_site_url(cls, v):
58 if v:
59 parsed = urlparse(v)
60 if not parsed.scheme.startswith("http"):
61 raise ValueError("value must start with http")
62 return v
63
64 @validator("ingress_whitelist_source_range")
65 def validate_ingress_whitelist_source_range(cls, v):
66 if v:
67 ip_network(v)
68 return v
sousaedub025f302020-11-16 14:40:14 +000069
70
sousaedu10721602021-05-18 17:28:17 +020071class KafkaExporterCharm(CharmedOsmBase):
sousaedub025f302020-11-16 14:40:14 +000072 def __init__(self, *args) -> NoReturn:
sousaedu10721602021-05-18 17:28:17 +020073 super().__init__(*args, oci_image="image")
sousaedub025f302020-11-16 14:40:14 +000074
sousaedu10721602021-05-18 17:28:17 +020075 # Provision Kafka relation to exchange information
76 self.kafka_client = KafkaClient(self, "kafka")
77 self.framework.observe(self.on["kafka"].relation_changed, self.configure_pod)
78 self.framework.observe(self.on["kafka"].relation_broken, self.configure_pod)
sousaedub025f302020-11-16 14:40:14 +000079
sousaedu10721602021-05-18 17:28:17 +020080 # Register relation to provide a Scraping Target
81 self.scrape_target = PrometheusScrapeTarget(self, "prometheus-scrape")
sousaedub025f302020-11-16 14:40:14 +000082 self.framework.observe(
sousaedu10721602021-05-18 17:28:17 +020083 self.on["prometheus-scrape"].relation_joined, self._publish_scrape_info
sousaedub025f302020-11-16 14:40:14 +000084 )
85
sousaedu10721602021-05-18 17:28:17 +020086 # Register relation to provide a Dasboard Target
87 self.dashboard_target = GrafanaDashboardTarget(self, "grafana-dashboard")
88 self.framework.observe(
89 self.on["grafana-dashboard"].relation_joined, self._publish_dashboard_info
90 )
91
92 def _publish_scrape_info(self, event) -> NoReturn:
93 """Publishes scraping information for Prometheus.
sousaedub025f302020-11-16 14:40:14 +000094
95 Args:
sousaedu10721602021-05-18 17:28:17 +020096 event (EventBase): Prometheus relation event.
sousaedub025f302020-11-16 14:40:14 +000097 """
sousaedu10721602021-05-18 17:28:17 +020098 if self.unit.is_leader():
99 hostname = (
100 urlparse(self.model.config["site_url"]).hostname
101 if self.model.config["site_url"]
102 else self.model.app.name
sousaedub025f302020-11-16 14:40:14 +0000103 )
sousaedu10721602021-05-18 17:28:17 +0200104 port = str(PORT)
105 if self.model.config.get("site_url", "").startswith("https://"):
106 port = "443"
107 elif self.model.config.get("site_url", "").startswith("http://"):
108 port = "80"
sousaedub025f302020-11-16 14:40:14 +0000109
sousaedu10721602021-05-18 17:28:17 +0200110 self.scrape_target.publish_info(
111 hostname=hostname,
112 port=port,
113 metrics_path="/metrics",
114 scrape_interval="30s",
115 scrape_timeout="15s",
116 )
sousaedub025f302020-11-16 14:40:14 +0000117
sousaedu10721602021-05-18 17:28:17 +0200118 def _publish_dashboard_info(self, event) -> NoReturn:
119 """Publish dashboards for Grafana.
120
121 Args:
122 event (EventBase): Grafana relation event.
123 """
124 if self.unit.is_leader():
125 self.dashboard_target.publish_info(
126 name="osm-kafka",
127 dashboard=Path("files/kafka_exporter_dashboard.json").read_text(),
128 )
129
130 def _check_missing_dependencies(self, config: ConfigModel):
131 """Check if there is any relation missing.
132
133 Args:
134 config (ConfigModel): object with configuration information.
135
136 Raises:
137 RelationsMissing: if kafka is missing.
138 """
139 missing_relations = []
140
141 if self.kafka_client.is_missing_data_in_unit():
142 missing_relations.append("kafka")
143
144 if missing_relations:
145 raise RelationsMissing(missing_relations)
146
147 def build_pod_spec(self, image_info):
148 """Build the PodSpec to be used.
149
150 Args:
151 image_info (str): container image information.
152
153 Returns:
154 Dict: PodSpec information.
155 """
156 # Validate config
157 config = ConfigModel(**dict(self.config))
158
159 # Check relations
160 self._check_missing_dependencies(config)
161
162 # Create Builder for the PodSpec
163 pod_spec_builder = PodSpecV3Builder()
164
165 # Build container
166 container_builder = ContainerV3Builder(self.app.name, image_info)
167 container_builder.add_port(name=self.app.name, port=PORT)
168 container_builder.add_http_readiness_probe(
169 path="/api/health",
170 port=PORT,
171 initial_delay_seconds=10,
172 period_seconds=10,
173 timeout_seconds=5,
174 success_threshold=1,
175 failure_threshold=3,
176 )
177 container_builder.add_http_liveness_probe(
178 path="/api/health",
179 port=PORT,
180 initial_delay_seconds=60,
181 timeout_seconds=30,
182 failure_threshold=10,
183 )
184 container_builder.add_command(
185 [
186 "kafka_exporter",
187 f"--kafka.server={self.kafka_client.host}:{self.kafka_client.port}",
188 ]
189 )
190 container = container_builder.build()
191
192 # Add container to PodSpec
193 pod_spec_builder.add_container(container)
194
195 # Add ingress resources to PodSpec if site url exists
196 if config.site_url:
197 parsed = urlparse(config.site_url)
198 annotations = {}
David Garciac35943e2021-06-28 16:50:42 +0200199 if config.ingress_class:
200 annotations["kubernetes.io/ingress.class"] = config.ingress_class
sousaedu10721602021-05-18 17:28:17 +0200201 ingress_resource_builder = IngressResourceV3Builder(
202 f"{self.app.name}-ingress", annotations
203 )
204
205 if config.ingress_whitelist_source_range:
206 annotations[
207 "nginx.ingress.kubernetes.io/whitelist-source-range"
208 ] = config.ingress_whitelist_source_range
209
210 if config.cluster_issuer:
211 annotations["cert-manager.io/cluster-issuer"] = config.cluster_issuer
212
213 if parsed.scheme == "https":
214 ingress_resource_builder.add_tls(
215 [parsed.hostname], config.tls_secret_name
216 )
217 else:
218 annotations["nginx.ingress.kubernetes.io/ssl-redirect"] = "false"
219
220 ingress_resource_builder.add_rule(parsed.hostname, self.app.name, PORT)
221 ingress_resource = ingress_resource_builder.build()
222 pod_spec_builder.add_ingress_resource(ingress_resource)
223
224 logger.debug(pod_spec_builder.build())
225
226 return pod_spec_builder.build()
sousaedub025f302020-11-16 14:40:14 +0000227
228
229if __name__ == "__main__":
sousaedu6332d382021-02-25 23:24:47 +0100230 main(KafkaExporterCharm)