Adding security_context flag to charms
[osm/devops.git] / installers / charm / kafka-exporter / src / charm.py
1 #!/usr/bin/env python3
2 # Copyright 2021 Canonical Ltd.
3 #
4 # Licensed under the Apache License, Version 2.0 (the "License"); you may
5 # not use this file except in compliance with the License. You may obtain
6 # a copy of the License at
7 #
8 # http://www.apache.org/licenses/LICENSE-2.0
9 #
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 # License for the specific language governing permissions and limitations
14 # under the License.
15 #
16 # For those usages not covered by the Apache License, Version 2.0 please
17 # contact: legal@canonical.com
18 #
19 # To get in touch with the maintainers, please contact:
20 # osm-charmers@lists.launchpad.net
21 ##
22
23 # pylint: disable=E0213
24
25 from ipaddress import ip_network
26 import logging
27 from pathlib import Path
28 from typing import NoReturn, Optional
29 from urllib.parse import urlparse
30
31 from ops.main import main
32 from opslib.osm.charm import CharmedOsmBase, RelationsMissing
33 from opslib.osm.interfaces.grafana import GrafanaDashboardTarget
34 from opslib.osm.interfaces.kafka import KafkaClient
35 from opslib.osm.interfaces.prometheus import PrometheusScrapeTarget
36 from opslib.osm.pod import (
37 ContainerV3Builder,
38 IngressResourceV3Builder,
39 PodSpecV3Builder,
40 )
41 from opslib.osm.validator import ModelValidator, validator
42
43
44 logger = logging.getLogger(__name__)
45
46 PORT = 9308
47
48
49 class ConfigModel(ModelValidator):
50 site_url: Optional[str]
51 cluster_issuer: Optional[str]
52 ingress_class: Optional[str]
53 ingress_whitelist_source_range: Optional[str]
54 tls_secret_name: Optional[str]
55 image_pull_policy: str
56 security_context: bool
57
58 @validator("site_url")
59 def validate_site_url(cls, v):
60 if v:
61 parsed = urlparse(v)
62 if not parsed.scheme.startswith("http"):
63 raise ValueError("value must start with http")
64 return v
65
66 @validator("ingress_whitelist_source_range")
67 def validate_ingress_whitelist_source_range(cls, v):
68 if v:
69 ip_network(v)
70 return v
71
72 @validator("image_pull_policy")
73 def validate_image_pull_policy(cls, v):
74 values = {
75 "always": "Always",
76 "ifnotpresent": "IfNotPresent",
77 "never": "Never",
78 }
79 v = v.lower()
80 if v not in values.keys():
81 raise ValueError("value must be always, ifnotpresent or never")
82 return values[v]
83
84
85 class KafkaExporterCharm(CharmedOsmBase):
86 def __init__(self, *args) -> NoReturn:
87 super().__init__(*args, oci_image="image")
88
89 # Provision Kafka relation to exchange information
90 self.kafka_client = KafkaClient(self, "kafka")
91 self.framework.observe(self.on["kafka"].relation_changed, self.configure_pod)
92 self.framework.observe(self.on["kafka"].relation_broken, self.configure_pod)
93
94 # Register relation to provide a Scraping Target
95 self.scrape_target = PrometheusScrapeTarget(self, "prometheus-scrape")
96 self.framework.observe(
97 self.on["prometheus-scrape"].relation_joined, self._publish_scrape_info
98 )
99
100 # Register relation to provide a Dasboard Target
101 self.dashboard_target = GrafanaDashboardTarget(self, "grafana-dashboard")
102 self.framework.observe(
103 self.on["grafana-dashboard"].relation_joined, self._publish_dashboard_info
104 )
105
106 def _publish_scrape_info(self, event) -> NoReturn:
107 """Publishes scraping information for Prometheus.
108
109 Args:
110 event (EventBase): Prometheus relation event.
111 """
112 if self.unit.is_leader():
113 hostname = (
114 urlparse(self.model.config["site_url"]).hostname
115 if self.model.config["site_url"]
116 else self.model.app.name
117 )
118 port = str(PORT)
119 if self.model.config.get("site_url", "").startswith("https://"):
120 port = "443"
121 elif self.model.config.get("site_url", "").startswith("http://"):
122 port = "80"
123
124 self.scrape_target.publish_info(
125 hostname=hostname,
126 port=port,
127 metrics_path="/metrics",
128 scrape_interval="30s",
129 scrape_timeout="15s",
130 )
131
132 def _publish_dashboard_info(self, event) -> NoReturn:
133 """Publish dashboards for Grafana.
134
135 Args:
136 event (EventBase): Grafana relation event.
137 """
138 if self.unit.is_leader():
139 self.dashboard_target.publish_info(
140 name="osm-kafka",
141 dashboard=Path("templates/kafka_exporter_dashboard.json").read_text(),
142 )
143
144 def _check_missing_dependencies(self, config: ConfigModel):
145 """Check if there is any relation missing.
146
147 Args:
148 config (ConfigModel): object with configuration information.
149
150 Raises:
151 RelationsMissing: if kafka is missing.
152 """
153 missing_relations = []
154
155 if self.kafka_client.is_missing_data_in_unit():
156 missing_relations.append("kafka")
157
158 if missing_relations:
159 raise RelationsMissing(missing_relations)
160
161 def build_pod_spec(self, image_info):
162 """Build the PodSpec to be used.
163
164 Args:
165 image_info (str): container image information.
166
167 Returns:
168 Dict: PodSpec information.
169 """
170 # Validate config
171 config = ConfigModel(**dict(self.config))
172
173 # Check relations
174 self._check_missing_dependencies(config)
175
176 # Create Builder for the PodSpec
177 pod_spec_builder = PodSpecV3Builder(
178 enable_security_context=config.security_context
179 )
180
181 # Build container
182 container_builder = ContainerV3Builder(
183 self.app.name,
184 image_info,
185 config.image_pull_policy,
186 run_as_non_root=config.security_context,
187 )
188 container_builder.add_port(name=self.app.name, port=PORT)
189 container_builder.add_http_readiness_probe(
190 path="/api/health",
191 port=PORT,
192 initial_delay_seconds=10,
193 period_seconds=10,
194 timeout_seconds=5,
195 success_threshold=1,
196 failure_threshold=3,
197 )
198 container_builder.add_http_liveness_probe(
199 path="/api/health",
200 port=PORT,
201 initial_delay_seconds=60,
202 timeout_seconds=30,
203 failure_threshold=10,
204 )
205 container_builder.add_command(
206 [
207 "kafka_exporter",
208 f"--kafka.server={self.kafka_client.host}:{self.kafka_client.port}",
209 ]
210 )
211 container = container_builder.build()
212
213 # Add container to PodSpec
214 pod_spec_builder.add_container(container)
215
216 # Add ingress resources to PodSpec if site url exists
217 if config.site_url:
218 parsed = urlparse(config.site_url)
219 annotations = {}
220 if config.ingress_class:
221 annotations["kubernetes.io/ingress.class"] = config.ingress_class
222 ingress_resource_builder = IngressResourceV3Builder(
223 f"{self.app.name}-ingress", annotations
224 )
225
226 if config.ingress_whitelist_source_range:
227 annotations[
228 "nginx.ingress.kubernetes.io/whitelist-source-range"
229 ] = config.ingress_whitelist_source_range
230
231 if config.cluster_issuer:
232 annotations["cert-manager.io/cluster-issuer"] = config.cluster_issuer
233
234 if parsed.scheme == "https":
235 ingress_resource_builder.add_tls(
236 [parsed.hostname], config.tls_secret_name
237 )
238 else:
239 annotations["nginx.ingress.kubernetes.io/ssl-redirect"] = "false"
240
241 ingress_resource_builder.add_rule(parsed.hostname, self.app.name, PORT)
242 ingress_resource = ingress_resource_builder.build()
243 pod_spec_builder.add_ingress_resource(ingress_resource)
244
245 return pod_spec_builder.build()
246
247
248 if __name__ == "__main__":
249 main(KafkaExporterCharm)