2 # Copyright 2021 Canonical Ltd.
4 # Licensed under the Apache License, Version 2.0 (the "License"); you may
5 # not use this file except in compliance with the License. You may obtain
6 # a copy of the License at
8 # http://www.apache.org/licenses/LICENSE-2.0
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 # License for the specific language governing permissions and limitations
16 # For those usages not covered by the Apache License, Version 2.0 please
17 # contact: legal@canonical.com
19 # To get in touch with the maintainers, please contact:
20 # osm-charmers@lists.launchpad.net
23 # pylint: disable=E0213
25 from ipaddress
import ip_network
27 from pathlib
import Path
28 from typing
import NoReturn
, Optional
29 from urllib
.parse
import urlparse
31 from ops
.main
import main
32 from opslib
.osm
.charm
import CharmedOsmBase
, RelationsMissing
33 from opslib
.osm
.interfaces
.grafana
import GrafanaDashboardTarget
34 from opslib
.osm
.interfaces
.kafka
import KafkaClient
35 from opslib
.osm
.interfaces
.prometheus
import PrometheusScrapeTarget
36 from opslib
.osm
.pod
import (
38 IngressResourceV3Builder
,
41 from opslib
.osm
.validator
import ModelValidator
, validator
44 logger
= logging
.getLogger(__name__
)
49 class ConfigModel(ModelValidator
):
50 site_url
: Optional
[str]
51 cluster_issuer
: Optional
[str]
52 ingress_class
: Optional
[str]
53 ingress_whitelist_source_range
: Optional
[str]
54 tls_secret_name
: Optional
[str]
56 @validator("site_url")
57 def validate_site_url(cls
, v
):
60 if not parsed
.scheme
.startswith("http"):
61 raise ValueError("value must start with http")
64 @validator("ingress_whitelist_source_range")
65 def validate_ingress_whitelist_source_range(cls
, v
):
71 class KafkaExporterCharm(CharmedOsmBase
):
72 def __init__(self
, *args
) -> NoReturn
:
73 super().__init
__(*args
, oci_image
="image")
75 # Provision Kafka relation to exchange information
76 self
.kafka_client
= KafkaClient(self
, "kafka")
77 self
.framework
.observe(self
.on
["kafka"].relation_changed
, self
.configure_pod
)
78 self
.framework
.observe(self
.on
["kafka"].relation_broken
, self
.configure_pod
)
80 # Register relation to provide a Scraping Target
81 self
.scrape_target
= PrometheusScrapeTarget(self
, "prometheus-scrape")
82 self
.framework
.observe(
83 self
.on
["prometheus-scrape"].relation_joined
, self
._publish
_scrape
_info
86 # Register relation to provide a Dasboard Target
87 self
.dashboard_target
= GrafanaDashboardTarget(self
, "grafana-dashboard")
88 self
.framework
.observe(
89 self
.on
["grafana-dashboard"].relation_joined
, self
._publish
_dashboard
_info
92 def _publish_scrape_info(self
, event
) -> NoReturn
:
93 """Publishes scraping information for Prometheus.
96 event (EventBase): Prometheus relation event.
98 if self
.unit
.is_leader():
100 urlparse(self
.model
.config
["site_url"]).hostname
101 if self
.model
.config
["site_url"]
102 else self
.model
.app
.name
105 if self
.model
.config
.get("site_url", "").startswith("https://"):
107 elif self
.model
.config
.get("site_url", "").startswith("http://"):
110 self
.scrape_target
.publish_info(
113 metrics_path
="/metrics",
114 scrape_interval
="30s",
115 scrape_timeout
="15s",
118 def _publish_dashboard_info(self
, event
) -> NoReturn
:
119 """Publish dashboards for Grafana.
122 event (EventBase): Grafana relation event.
124 if self
.unit
.is_leader():
125 self
.dashboard_target
.publish_info(
127 dashboard
=Path("files/kafka_exporter_dashboard.json").read_text(),
130 def _check_missing_dependencies(self
, config
: ConfigModel
):
131 """Check if there is any relation missing.
134 config (ConfigModel): object with configuration information.
137 RelationsMissing: if kafka is missing.
139 missing_relations
= []
141 if self
.kafka_client
.is_missing_data_in_unit():
142 missing_relations
.append("kafka")
144 if missing_relations
:
145 raise RelationsMissing(missing_relations
)
147 def build_pod_spec(self
, image_info
):
148 """Build the PodSpec to be used.
151 image_info (str): container image information.
154 Dict: PodSpec information.
157 config
= ConfigModel(**dict(self
.config
))
160 self
._check
_missing
_dependencies
(config
)
162 # Create Builder for the PodSpec
163 pod_spec_builder
= PodSpecV3Builder()
166 container_builder
= ContainerV3Builder(self
.app
.name
, image_info
)
167 container_builder
.add_port(name
=self
.app
.name
, port
=PORT
)
168 container_builder
.add_http_readiness_probe(
171 initial_delay_seconds
=10,
177 container_builder
.add_http_liveness_probe(
180 initial_delay_seconds
=60,
182 failure_threshold
=10,
184 container_builder
.add_command(
187 f
"--kafka.server={self.kafka_client.host}:{self.kafka_client.port}",
190 container
= container_builder
.build()
192 # Add container to PodSpec
193 pod_spec_builder
.add_container(container
)
195 # Add ingress resources to PodSpec if site url exists
197 parsed
= urlparse(config
.site_url
)
199 if config
.ingress_class
:
200 annotations
["kubernetes.io/ingress.class"] = config
.ingress_class
201 ingress_resource_builder
= IngressResourceV3Builder(
202 f
"{self.app.name}-ingress", annotations
205 if config
.ingress_whitelist_source_range
:
207 "nginx.ingress.kubernetes.io/whitelist-source-range"
208 ] = config
.ingress_whitelist_source_range
210 if config
.cluster_issuer
:
211 annotations
["cert-manager.io/cluster-issuer"] = config
.cluster_issuer
213 if parsed
.scheme
== "https":
214 ingress_resource_builder
.add_tls(
215 [parsed
.hostname
], config
.tls_secret_name
218 annotations
["nginx.ingress.kubernetes.io/ssl-redirect"] = "false"
220 ingress_resource_builder
.add_rule(parsed
.hostname
, self
.app
.name
, PORT
)
221 ingress_resource
= ingress_resource_builder
.build()
222 pod_spec_builder
.add_ingress_resource(ingress_resource
)
224 logger
.debug(pod_spec_builder
.build())
226 return pod_spec_builder
.build()
229 if __name__
== "__main__":
230 main(KafkaExporterCharm
)