Add mon-exporter as scrape target in Prometheus
[osm/devops.git] / docker / Prometheus / src / app.py
1 #!/usr/bin/env python
2
3 # Copyright 2021 Whitestack, LLC
4 # *************************************************************
5
6 # This file is part of OSM Monitoring module
7 # All Rights Reserved to Whitestack, LLC
8
9 # Licensed under the Apache License, Version 2.0 (the "License"); you may
10 # not use this file except in compliance with the License. You may obtain
11 # a copy of the License at
12
13 # http://www.apache.org/licenses/LICENSE-2.0
14
15 # Unless required by applicable law or agreed to in writing, software
16 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
17 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
18 # License for the specific language governing permissions and limitations
19 # under the License.
20
21 # For those usages not covered by the Apache License, Version 2.0 please
22 # contact: fbravo@whitestack.com
23 ##
24
25 import os
26 import pymongo
27 import yaml
28 import aiohttp
29 import asyncio
30 import copy
31 import json
32 import time
33 from bson.json_util import dumps
34 from bson import ObjectId
35
36 # Env variables
37 mongodb_url = os.environ["MONGODB_URL"]
38 target_database = os.environ["TARGET_DATABASE"]
39 prometheus_config_file = os.environ["PROMETHEUS_CONFIG_FILE"]
40 prometheus_base_config_file = os.environ["PROMETHEUS_BASE_CONFIG_FILE"]
41 prometheus_url = os.environ["PROMETHEUS_URL"]
42
43
44 def get_jobs(client):
45 return json.loads(dumps(client[target_database].prometheus_jobs.find({})))
46
47
48 def save_successful_jobs(client, jobs):
49 for job in jobs:
50 client[target_database].prometheus_jobs.update_one(
51 {"_id": ObjectId(job["_id"]["$oid"])}, {"$set": {"is_active": True}}
52 )
53
54
55 def clean_up_job(prometheus_job):
56 cleaned_prometheus_job = copy.deepcopy(prometheus_job)
57 # take out _id and internal keys
58 cleaned_prometheus_job.pop("_id", None)
59 cleaned_prometheus_job.pop("is_active", None)
60 cleaned_prometheus_job.pop("vnfr_id", None)
61 cleaned_prometheus_job.pop("nsr_id", None)
62 return cleaned_prometheus_job
63
64
65 def generate_prometheus_config(prometheus_jobs, config_file_path):
66 with open(config_file_path, encoding="utf-8", mode="r") as config_file:
67 config_file_yaml = yaml.safe_load(config_file)
68 if config_file_yaml is None:
69 config_file_yaml = {}
70 if "scrape_configs" not in config_file_yaml:
71 config_file_yaml["scrape_configs"] = []
72
73 prometheus_jobs_to_be_added = []
74
75 for prometheus_job in prometheus_jobs:
76 cleaned_up_job = clean_up_job(prometheus_job)
77 job_to_be_added = True
78 for sc in config_file_yaml["scrape_configs"]:
79 if sc.get("job_name") == cleaned_up_job.get("job_name"):
80 job_to_be_added = False
81 break
82 if job_to_be_added:
83 prometheus_jobs_to_be_added.append(cleaned_up_job)
84
85 for job in prometheus_jobs_to_be_added:
86 config_file_yaml["scrape_configs"].append(job)
87
88 return config_file_yaml
89
90
91 async def reload_prometheus_config(prom_url):
92 async with aiohttp.ClientSession() as session:
93 async with session.post(prom_url + "/-/reload") as resp:
94 if resp.status > 204:
95 print(f"Error while updating prometheus config: {resp.text()}")
96 return False
97 await asyncio.sleep(5)
98 return True
99
100
101 def check_configuration_equal(a_config, b_config):
102 if a_config is None and b_config is None:
103 return True
104 if a_config is None or b_config is None:
105 return False
106 if "scrape_configs" not in a_config and "scrape_configs" not in b_config:
107 return True
108 if "scrape_configs" not in a_config or "scrape_configs" not in b_config:
109 return False
110 a_jobs = [j["job_name"] for j in a_config["scrape_configs"]]
111 b_jobs = [j["job_name"] for j in b_config["scrape_configs"]]
112
113 return a_jobs == b_jobs
114
115
116 async def validate_configuration(prom_url, new_config):
117 async with aiohttp.ClientSession() as session:
118 # Gets the configuration from prometheus
119 # and compares with the inserted one
120 # If prometheus does not admit this configuration,
121 # the old one will remain
122 async with session.get(prom_url + "/api/v1/status/config") as resp:
123 if resp.status > 204:
124 print(f"Error while updating prometheus config: {resp.text()}")
125 return False
126 current_config = await resp.json()
127 return check_configuration_equal(
128 yaml.safe_load(current_config["data"]["yaml"]), new_config
129 )
130
131
132 async def main_task(client):
133 stored_jobs = get_jobs(client)
134 print(f"Jobs detected : {len(stored_jobs):d}")
135 generated_prometheus_config = generate_prometheus_config(
136 stored_jobs, prometheus_base_config_file
137 )
138 print(f"Writing new config file to {prometheus_config_file}")
139 config_file = open(prometheus_config_file, "w")
140 config_file.truncate(0)
141 print(yaml.safe_dump(generated_prometheus_config))
142 config_file.write(yaml.safe_dump(generated_prometheus_config))
143 config_file.close()
144 print("New config written, updating prometheus")
145 update_resp = await reload_prometheus_config(prometheus_url)
146 is_valid = await validate_configuration(prometheus_url, generated_prometheus_config)
147 if update_resp and is_valid:
148 print("Prometheus config update successful")
149 save_successful_jobs(client, stored_jobs)
150 else:
151 print(
152 "Error while updating prometheus config: "
153 "current config doesn't match with updated values"
154 )
155
156
157 async def main():
158 client = pymongo.MongoClient(mongodb_url)
159 print("Created MongoClient to connect to MongoDB!")
160
161 # Initial loop. First refresh of prometheus config file
162 first_refresh_completed = False
163 tries = 1
164 while tries <= 3:
165 try:
166 print("Refreshing prometheus config file for first time")
167 await main_task(client)
168 first_refresh_completed = True
169 except Exception as error:
170 print(f"Error in configuration attempt! Number of tries: {tries}/3")
171 print(error)
172 time.sleep(5)
173 tries += 1
174 if not first_refresh_completed:
175 print("Not possible to refresh prometheus config file for first time")
176 return
177
178 # Main loop
179 while True:
180 try:
181 # Needs mongodb in replica mode as this feature relies in OpLog
182 change_stream = client[target_database].prometheus_jobs.watch(
183 [
184 {
185 "$match": {
186 # If you want to modify a particular job,
187 # delete and insert it again
188 "operationType": {"$in": ["insert", "delete"]}
189 }
190 }
191 ]
192 )
193
194 # Single thread, no race conditions and ops are queued up in order
195 print("Listening to changes in prometheus jobs collection")
196 for change in change_stream:
197 print("Change detected, updating prometheus config")
198 print(f"{change}")
199 await main_task(client)
200 print()
201 except Exception as error:
202 print(error)
203 print(
204 "Detected failure while listening to prometheus jobs collection, "
205 "retrying..."
206 )
207 time.sleep(5)
208
209
210 asyncio.run(main())