2 # Copyright 2021 Canonical Ltd.
4 # Licensed under the Apache License, Version 2.0 (the "License"); you may
5 # not use this file except in compliance with the License. You may obtain
6 # a copy of the License at
8 # http://www.apache.org/licenses/LICENSE-2.0
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 # License for the specific language governing permissions and limitations
16 # For those usages not covered by the Apache License, Version 2.0 please
17 # contact: legal@canonical.com
19 # To get in touch with the maintainers, please contact:
20 # osm-charmers@lists.launchpad.net
23 # pylint: disable=E0213
25 from ipaddress
import ip_network
27 from pathlib
import Path
28 from typing
import NoReturn
, Optional
29 from urllib
.parse
import urlparse
31 from ops
.main
import main
32 from opslib
.osm
.charm
import CharmedOsmBase
, RelationsMissing
33 from opslib
.osm
.interfaces
.grafana
import GrafanaDashboardTarget
34 from opslib
.osm
.interfaces
.kafka
import KafkaClient
35 from opslib
.osm
.interfaces
.prometheus
import PrometheusScrapeTarget
36 from opslib
.osm
.pod
import (
38 IngressResourceV3Builder
,
41 from opslib
.osm
.validator
import ModelValidator
, validator
44 logger
= logging
.getLogger(__name__
)
49 class ConfigModel(ModelValidator
):
50 site_url
: Optional
[str]
51 cluster_issuer
: Optional
[str]
52 ingress_whitelist_source_range
: Optional
[str]
53 tls_secret_name
: Optional
[str]
55 @validator("site_url")
56 def validate_site_url(cls
, v
):
59 if not parsed
.scheme
.startswith("http"):
60 raise ValueError("value must start with http")
63 @validator("ingress_whitelist_source_range")
64 def validate_ingress_whitelist_source_range(cls
, v
):
70 class KafkaExporterCharm(CharmedOsmBase
):
71 def __init__(self
, *args
) -> NoReturn
:
72 super().__init
__(*args
, oci_image
="image")
74 # Provision Kafka relation to exchange information
75 self
.kafka_client
= KafkaClient(self
, "kafka")
76 self
.framework
.observe(self
.on
["kafka"].relation_changed
, self
.configure_pod
)
77 self
.framework
.observe(self
.on
["kafka"].relation_broken
, self
.configure_pod
)
79 # Register relation to provide a Scraping Target
80 self
.scrape_target
= PrometheusScrapeTarget(self
, "prometheus-scrape")
81 self
.framework
.observe(
82 self
.on
["prometheus-scrape"].relation_joined
, self
._publish
_scrape
_info
85 # Register relation to provide a Dasboard Target
86 self
.dashboard_target
= GrafanaDashboardTarget(self
, "grafana-dashboard")
87 self
.framework
.observe(
88 self
.on
["grafana-dashboard"].relation_joined
, self
._publish
_dashboard
_info
91 def _publish_scrape_info(self
, event
) -> NoReturn
:
92 """Publishes scraping information for Prometheus.
95 event (EventBase): Prometheus relation event.
97 if self
.unit
.is_leader():
99 urlparse(self
.model
.config
["site_url"]).hostname
100 if self
.model
.config
["site_url"]
101 else self
.model
.app
.name
104 if self
.model
.config
.get("site_url", "").startswith("https://"):
106 elif self
.model
.config
.get("site_url", "").startswith("http://"):
109 self
.scrape_target
.publish_info(
112 metrics_path
="/metrics",
113 scrape_interval
="30s",
114 scrape_timeout
="15s",
117 def _publish_dashboard_info(self
, event
) -> NoReturn
:
118 """Publish dashboards for Grafana.
121 event (EventBase): Grafana relation event.
123 if self
.unit
.is_leader():
124 self
.dashboard_target
.publish_info(
126 dashboard
=Path("files/kafka_exporter_dashboard.json").read_text(),
129 def _check_missing_dependencies(self
, config
: ConfigModel
):
130 """Check if there is any relation missing.
133 config (ConfigModel): object with configuration information.
136 RelationsMissing: if kafka is missing.
138 missing_relations
= []
140 if self
.kafka_client
.is_missing_data_in_unit():
141 missing_relations
.append("kafka")
143 if missing_relations
:
144 raise RelationsMissing(missing_relations
)
146 def build_pod_spec(self
, image_info
):
147 """Build the PodSpec to be used.
150 image_info (str): container image information.
153 Dict: PodSpec information.
156 config
= ConfigModel(**dict(self
.config
))
159 self
._check
_missing
_dependencies
(config
)
161 # Create Builder for the PodSpec
162 pod_spec_builder
= PodSpecV3Builder()
165 container_builder
= ContainerV3Builder(self
.app
.name
, image_info
)
166 container_builder
.add_port(name
=self
.app
.name
, port
=PORT
)
167 container_builder
.add_http_readiness_probe(
170 initial_delay_seconds
=10,
176 container_builder
.add_http_liveness_probe(
179 initial_delay_seconds
=60,
181 failure_threshold
=10,
183 container_builder
.add_command(
186 f
"--kafka.server={self.kafka_client.host}:{self.kafka_client.port}",
189 container
= container_builder
.build()
191 # Add container to PodSpec
192 pod_spec_builder
.add_container(container
)
194 # Add ingress resources to PodSpec if site url exists
196 parsed
= urlparse(config
.site_url
)
198 ingress_resource_builder
= IngressResourceV3Builder(
199 f
"{self.app.name}-ingress", annotations
202 if config
.ingress_whitelist_source_range
:
204 "nginx.ingress.kubernetes.io/whitelist-source-range"
205 ] = config
.ingress_whitelist_source_range
207 if config
.cluster_issuer
:
208 annotations
["cert-manager.io/cluster-issuer"] = config
.cluster_issuer
210 if parsed
.scheme
== "https":
211 ingress_resource_builder
.add_tls(
212 [parsed
.hostname
], config
.tls_secret_name
215 annotations
["nginx.ingress.kubernetes.io/ssl-redirect"] = "false"
217 ingress_resource_builder
.add_rule(parsed
.hostname
, self
.app
.name
, PORT
)
218 ingress_resource
= ingress_resource_builder
.build()
219 pod_spec_builder
.add_ingress_resource(ingress_resource
)
221 logger
.debug(pod_spec_builder
.build())
223 return pod_spec_builder
.build()
226 if __name__
== "__main__":
227 main(KafkaExporterCharm
)