Add mon-exporter as scrape target in Prometheus
[osm/devops.git] / docker / Prometheus / src / app.py
index 157413d..b06f448 100755 (executable)
@@ -48,8 +48,7 @@ def get_jobs(client):
 def save_successful_jobs(client, jobs):
     for job in jobs:
         client[target_database].prometheus_jobs.update_one(
-            {"_id": ObjectId(job["_id"]["$oid"])},
-            {"$set": {"is_active": True}}
+            {"_id": ObjectId(job["_id"]["$oid"])}, {"$set": {"is_active": True}}
         )
 
 
@@ -71,9 +70,20 @@ def generate_prometheus_config(prometheus_jobs, config_file_path):
     if "scrape_configs" not in config_file_yaml:
         config_file_yaml["scrape_configs"] = []
 
+    prometheus_jobs_to_be_added = []
+
     for prometheus_job in prometheus_jobs:
         cleaned_up_job = clean_up_job(prometheus_job)
-        config_file_yaml["scrape_configs"].append(cleaned_up_job)
+        job_to_be_added = True
+        for sc in config_file_yaml["scrape_configs"]:
+            if sc.get("job_name") == cleaned_up_job.get("job_name"):
+                job_to_be_added = False
+                break
+        if job_to_be_added:
+            prometheus_jobs_to_be_added.append(cleaned_up_job)
+
+    for job in prometheus_jobs_to_be_added:
+        config_file_yaml["scrape_configs"].append(job)
 
     return config_file_yaml
 
@@ -128,13 +138,12 @@ async def main_task(client):
     print(f"Writing new config file to {prometheus_config_file}")
     config_file = open(prometheus_config_file, "w")
     config_file.truncate(0)
+    print(yaml.safe_dump(generated_prometheus_config))
     config_file.write(yaml.safe_dump(generated_prometheus_config))
     config_file.close()
     print("New config written, updating prometheus")
     update_resp = await reload_prometheus_config(prometheus_url)
-    is_valid = await validate_configuration(
-        prometheus_url, generated_prometheus_config
-    )
+    is_valid = await validate_configuration(prometheus_url, generated_prometheus_config)
     if update_resp and is_valid:
         print("Prometheus config update successful")
         save_successful_jobs(client, stored_jobs)
@@ -147,15 +156,26 @@ async def main_task(client):
 
 async def main():
     client = pymongo.MongoClient(mongodb_url)
-    print("Connected to MongoDB!")
+    print("Created MongoClient to connect to MongoDB!")
 
-    try:
-        print("Refreshing prometheus config file for first time")
-        await main_task(client)
-    except Exception as error:
-        print("Error in first configuration attempt!")
-        print(error)
+    # Initial loop. First refresh of prometheus config file
+    first_refresh_completed = False
+    tries = 1
+    while tries <= 3:
+        try:
+            print("Refreshing prometheus config file for first time")
+            await main_task(client)
+            first_refresh_completed = True
+        except Exception as error:
+            print(f"Error in configuration attempt! Number of tries: {tries}/3")
+            print(error)
+        time.sleep(5)
+        tries += 1
+    if not first_refresh_completed:
+        print("Not possible to refresh prometheus config file for first time")
+        return
 
+    # Main loop
     while True:
         try:
             # Needs mongodb in replica mode as this feature relies in OpLog
@@ -175,6 +195,7 @@ async def main():
             print("Listening to changes in prometheus jobs collection")
             for change in change_stream:
                 print("Change detected, updating prometheus config")
+                print(f"{change}")
                 await main_task(client)
                 print()
         except Exception as error: