Merge "Updated requirements.txt file"
authorhoban <adrian.hoban@intel.com>
Tue, 19 Sep 2017 10:58:12 +0000 (12:58 +0200)
committerGerrit Code Review <root@osm.etsi.org>
Tue, 19 Sep 2017 10:58:12 +0000 (12:58 +0200)
26 files changed:
core/__init__.py [new file with mode: 0644]
core/message_bus/__init__.py [new file with mode: 0644]
plugins/CloudWatch/connection.py
plugins/CloudWatch/metric_alarms.py
plugins/CloudWatch/metrics.py [new file with mode: 0644]
plugins/CloudWatch/plugin.py [deleted file]
plugins/CloudWatch/plugin_alarm.py [new file with mode: 0644]
plugins/CloudWatch/plugin_metrics.py [new file with mode: 0644]
plugins/OpenStack/Aodh/alarming.py
plugins/OpenStack/Aodh/notifier.py [new file with mode: 0644]
plugins/OpenStack/Aodh/plugin_instance.py
plugins/OpenStack/Gnocchi/metrics.py
plugins/OpenStack/Gnocchi/plugin_instance.py
plugins/OpenStack/common.py
plugins/OpenStack/response.py [new file with mode: 0644]
plugins/OpenStack/settings.py
plugins/OpenStack/singleton.py
plugins/__init__.py [new file with mode: 0644]
plugins/vRealiseOps/__init__.py [new file with mode: 0644]
plugins/vRealiseOps/kafka_consumer_vrops.py
plugins/vRealiseOps/mon_plugin_vrops.py
plugins/vRealiseOps/plugin_receiver.py
plugins/vRealiseOps/vROPs_Webservice/__init__.py [new file with mode: 0644]
plugins/vRealiseOps/vROPs_Webservice/install.sh [new file with mode: 0755]
plugins/vRealiseOps/vROPs_Webservice/vrops_webservice.py [new file with mode: 0755]
plugins/vRealiseOps/vrops_config.xml

diff --git a/core/__init__.py b/core/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/core/message_bus/__init__.py b/core/message_bus/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
index 666879a..547c454 100644 (file)
 ##
 
 '''
-AWS-Plugin implements all the methods of MON to interact with AWS using the BOTO client
+Connecting with AWS services --CloudWatch/EC2 using Required keys  
 '''
 
 __author__ = "Wajeeha Hamid"
-__date__   = "31-August-2017"
+__date__   = "18-September-2017"
 
 import sys
 import os
@@ -48,7 +48,7 @@ except:
 class Connection():
     """Connection Establishement with AWS -- VPC/EC2/CloudWatch"""
 #-----------------------------------------------------------------------------------------------------------------------------
-    def setEnvironment(self):   
+    def setEnvironment(self):  
 
         """Credentials for connecting to AWS-CloudWatch""" 
         self.AWS_KEY = os.environ.get("AWS_ACCESS_KEY_ID")
@@ -62,13 +62,13 @@ class Connection():
                 self.vpc_conn = boto.vpc.connect_to_region(self.AWS_REGION,
                     aws_access_key_id=self.AWS_KEY,
                     aws_secret_access_key=self.AWS_SECRET)
-                print self.vpc_conn
+               
                 
                 #EC2 Connection
                 self.ec2_conn = boto.ec2.connect_to_region(self.AWS_REGION,
                     aws_access_key_id=self.AWS_KEY,
                     aws_secret_access_key=self.AWS_SECRET)
-                print self.ec2_conn
+               
                 
                 """ TODO : Required to add actions against alarms when needed """
                 #self.sns = connect_to_region(self.AWS_REGION)
@@ -80,10 +80,10 @@ class Connection():
                     self.AWS_REGION,
                     aws_access_key_id=self.AWS_KEY,
                     aws_secret_access_key=self.AWS_SECRET) 
-
-                return self.cloudwatch_conn
-                print "--- Connection Established with AWS ---"
-                print "\n"
+                connection_dict = dict()
+                connection_dict['ec2_connection'] = self.ec2_conn
+                connection_dict['cloudwatch_connection'] = self.cloudwatch_conn
+                return connection_dict
                 
             except Exception as e:
                 log.error("Failed to Connect with AWS %s: ",str(e)) 
index c58987d..bd76c81 100644 (file)
@@ -1,13 +1,41 @@
+##
+# Copyright 2017 xFlow Research Pvt. Ltd
+# This file is part of MON module
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+#
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact with: wajeeha.hamid@xflowresearch.com
+##
+
+''' Handling of alarms requests via BOTO 2.48 '''
+
+__author__ = "Wajeeha Hamid"
+__date__   = "18-September-2017"
+
 import sys
 import os
 import re
 import datetime
 import random
+import json
 import logging as log
 from random import randint
 from operator import itemgetter
 from connection import Connection
 
+
 try:
     import boto
     import boto.ec2
@@ -19,31 +47,35 @@ except:
 
 
 class MetricAlarm():
-    """Alarms Functionality Handler -- Cloudwatch """
-    def config_alarm(self,cloudwatch_conn,alarm_info):
-       """Configure or Create a new alarm"""
+    """Alarms Functionality Handler -- Carries out alarming requests and responses via BOTO.Cloudwatch """
+    def __init__(self):
+        self.alarm_resp = dict()
+        self.del_resp = dict()
 
+    def config_alarm(self,cloudwatch_conn,create_info):
+       """Configure or Create a new alarm"""
+        inner_dict = dict()
         """ Alarm Name to ID Mapping """
-        alarm_id = alarm_info['alarm_name'] + "_" + alarm_info['resource_id']
-        if self.is_present(cloudwatch_conn,alarm_id) == True: 
+        alarm_info = create_info['alarm_create_request']
+        alarm_id = alarm_info['alarm_name'] + "_" + alarm_info['resource_uuid']
+        if self.is_present(cloudwatch_conn,alarm_id)['status'] == True: 
             alarm_id = None
             log.debug ("Alarm already exists, Try updating the alarm using 'update_alarm_configuration()'")   
         else:              
             try:
                 alarm = boto.ec2.cloudwatch.alarm.MetricAlarm(
                     connection = cloudwatch_conn,
-                    name = alarm_info['alarm_name'] + "_" + alarm_info['resource_id'],
-                    metric = alarm_info['alarm_metric'],
-                    namespace = alarm_info['instance_type'],
-                    statistic = alarm_info['alarm_statistics'],
-                    comparison = alarm_info['alarm_comparison'],
-                    threshold = alarm_info['alarm_threshold'],
-                    period = alarm_info['alarm_period'],
-                    evaluation_periods = alarm_info['alarm_evaluation_period'],
-                    unit=alarm_info['alarm_unit'],
-                    description = alarm_info['alarm_severity'] + ";" + alarm_info['alarm_description'],
-                    dimensions = {'InstanceId':alarm_info['resource_id']},
+                    name = alarm_info['alarm_name'] + "_" + alarm_info['resource_uuid'],
+                    metric = alarm_info['metric_name'],
+                    namespace = "AWS/EC2",
+                    statistic = alarm_info['statistic'],
+                    comparison = alarm_info['operation'],
+                    threshold = alarm_info['threshold_value'],
+                    period = 60,
+                    evaluation_periods = 1,
+                    unit=alarm_info['unit'],
+                    description = alarm_info['severity'] + ";" + alarm_id + ";" + alarm_info['description'],
+                    dimensions = {'InstanceId':alarm_info['resource_uuid']},
                     alarm_actions = None,
                     ok_actions = None,
                     insufficient_data_actions = None)
@@ -51,126 +83,181 @@ class MetricAlarm():
                 """Setting Alarm Actions : 
                 alarm_actions = ['arn:aws:swf:us-west-2:465479087178:action/actions/AWS_EC2.InstanceId.Stop/1.0']"""
 
-                cloudwatch_conn.put_metric_alarm(alarm)
+                status=cloudwatch_conn.put_metric_alarm(alarm)
+
                 log.debug ("Alarm Configured Succesfully")
-                print "created"
-                print "\n"    
+                self.alarm_resp['schema_version'] = str(create_info['schema_version'])
+                self.alarm_resp['schema_type'] = 'create_alarm_response'
+
+                inner_dict['correlation_id'] = str(alarm_info['correlation_id'])
+                inner_dict['alarm_uuid'] = str(alarm_id) 
+                inner_dict['status'] = status
+
+                self.alarm_resp['alarm_create_response'] = inner_dict
+                if status == True:
+                       return self.alarm_resp
+                else:
+                       return None     
+
             except Exception as e:
                 log.error("Alarm Configuration Failed: " + str(e))
-        return alarm_id    
+            
 #-----------------------------------------------------------------------------------------------------------------------------
-    def update_alarm(self,cloudwatch_conn,alarm_info):
+    def update_alarm(self,cloudwatch_conn,update_info):
 
        """Update or reconfigure an alarm"""
-        
+        inner_dict = dict()
+        alarm_info = update_info['alarm_update_request']
+
         """Alarm Name to ID Mapping"""
-        alarm_id = alarm_info['alarm_name'] + "_" + alarm_info['resource_id']
+        alarm_id = alarm_info['alarm_uuid']
+        status = self.is_present(cloudwatch_conn,alarm_id)
 
         """Verifying : Alarm exists already"""
-        if self.is_present(cloudwatch_conn,alarm_id) == False: 
+        if status['status'] == False: 
             alarm_id = None
-            log.debug("Alarm not found, Try creating the alarm using 'configure_alarm()'")   
+            log.debug("Alarm not found, Try creating the alarm using 'configure_alarm()'")
+            return alarm_id   
         else:            
             try:
                 alarm = boto.ec2.cloudwatch.alarm.MetricAlarm(
-                        connection = cloudwatch_conn,
-                        name = alarm_info['alarm_name'] + "_" + alarm_info['resource_id'],
-                        metric = alarm_info['alarm_metric'],
-                        namespace = alarm_info['instance_type'],
-                        statistic = alarm_info['alarm_statistics'],
-                        comparison = alarm_info['alarm_comparison'],
-                        threshold = alarm_info['alarm_threshold'],
-                        period = alarm_info['alarm_period'],
-                        evaluation_periods = alarm_info['alarm_evaluation_period'],
-                        unit=alarm_info['alarm_unit'],
-                        description = alarm_info['alarm_severity'] + ";" + alarm_info['alarm_description'],
-                        dimensions = {'InstanceId':alarm_info['resource_id']},
-                        alarm_actions = None,
-                        ok_actions = None,
-                        insufficient_data_actions = None)
-                cloudwatch_conn.put_metric_alarm(alarm)
+                    connection = cloudwatch_conn,
+                    name = status['info'].name ,
+                    metric = alarm_info['metric_name'],
+                    namespace = "AWS/EC2",
+                    statistic = alarm_info['statistic'],
+                    comparison = alarm_info['operation'],
+                    threshold = alarm_info['threshold_value'],
+                    period = 60,
+                    evaluation_periods = 1,
+                    unit=alarm_info['unit'],
+                    description = alarm_info['severity'] + ";" + alarm_id + ";" + alarm_info['description'],
+                    dimensions = {'InstanceId':str(status['info'].dimensions['InstanceId']).split("'")[1]},
+                    alarm_actions = None,
+                    ok_actions = None,
+                    insufficient_data_actions = None)
+
+                """Setting Alarm Actions : 
+                alarm_actions = ['arn:aws:swf:us-west-2:465479087178:action/actions/AWS_EC2.InstanceId.Stop/1.0']"""
+
+                status=cloudwatch_conn.put_metric_alarm(alarm)
                 log.debug("Alarm %s Updated ",alarm.name)
-                print "updated"
+                self.alarm_resp['schema_version'] = str(update_info['schema_version'])
+                self.alarm_resp['schema_type'] = 'update_alarm_response'
+
+                inner_dict['correlation_id'] = str(alarm_info['correlation_id'])
+                inner_dict['alarm_uuid'] = str(alarm_id) 
+                inner_dict['status'] = status
+
+                self.alarm_resp['alarm_update_response'] = inner_dict
+                return self.alarm_resp
             except Exception as e:
                 log.error ("Error in Updating Alarm " + str(e))
-        return alarm_id
+        
 #-----------------------------------------------------------------------------------------------------------------------------
-    def delete_Alarm(self,cloudwatch_conn,alarm_id):
+    def delete_Alarm(self,cloudwatch_conn,del_info_all):
+
        """Deletes an Alarm with specified alarm_id"""
+        inner_dict = dict()
+        del_info = del_info_all['alarm_delete_request']
+        status = self.is_present(cloudwatch_conn,del_info['alarm_uuid'])
         try:
-            if self.is_present(cloudwatch_conn,alarm_id) == True:
-                deleted_alarm=cloudwatch_conn.delete_alarms(alarm_id)
-                return alarm_id
+            if status['status'] == True:                
+                del_status=cloudwatch_conn.delete_alarms(status['info'].name)
+                self.del_resp['schema_version'] = str(del_info_all['schema_version'])
+                self.del_resp['schema_type'] = 'delete_alarm_response'
+                inner_dict['correlation_id'] = str(del_info['correlation_id'])
+                inner_dict['alarm_id'] = str(del_info['alarm_uuid'])
+                inner_dict['status'] = del_status
+                self.del_resp['alarm_deletion_response'] = inner_dict
+                return self.del_resp
             return None 
         except Exception as e:
                 log.error("Alarm Not Deleted: " + str(e))      
 #-----------------------------------------------------------------------------------------------------------------------------
-    def alarms_list(self,cloudwatch_conn,instance_id):
+    def alarms_list(self,cloudwatch_conn,list_info):
 
-       """Get a list of alarms that are present on a particular VM instance"""
-        try:
-            log.debug("Getting Alarm list for %s",instance_id)
-            alarm_dict = dict()
-            alarm_list = []
+        """Get a list of alarms that are present on a particular VIM type"""
+        alarm_list = []
+        alarm_info = dict()
+        try: #id vim 
             alarms = cloudwatch_conn.describe_alarms()
             itr = 0
             for alarm in alarms:
-                if str(alarm.dimensions['InstanceId']).split("'")[1] == instance_id:
-                    alarm_list.insert(itr,str(alarm.name))
-                    itr += 1
-            alarm_dict['alarm_names'] = alarm_list
-            alarm_dict['resource_id'] = instance_id   
-            return alarm_dict
+                list_info['alarm_list_request']['alarm_uuid'] = str(alarm.description).split(';')[1]
+                alarm_list.insert(itr,self.alarm_details(cloudwatch_conn,list_info))
+                itr += 1
+            
+            alarm_info['schema_version'] = str(list_info['schema_version'])
+            alarm_info['schema_type'] = 'list_alarm_response'    
+            alarm_info['list_alarm_resp'] = json.dumps(alarm_list)
+
+            return alarm_info                  
         except Exception as e:
                 log.error("Error in Getting List : %s",str(e))    
 #-----------------------------------------------------------------------------------------------------------------------------
-    def alarm_details(self,cloudwatch_conn,alarm_name):
+    def alarm_details(self,cloudwatch_conn,ack_info):
 
        """Get an individual alarm details specified by alarm_name"""
         try:
-            alarms_details=cloudwatch_conn.describe_alarm_history()       
+            alarms_details=cloudwatch_conn.describe_alarm_history()  
+            alarm_details_all = dict()     
             alarm_details_dict = dict()
+            ack_info_all = ack_info
+
+
+            if 'ack_details' in ack_info:
+                ack_info = ack_info['ack_details']
+            elif 'alarm_list_request' in ack_info:
+                ack_info = ack_info['alarm_list_request']    
             
+            is_present = self.is_present(cloudwatch_conn,ack_info['alarm_uuid'])
+
             for itr in range (len(alarms_details)):
-                if alarms_details[itr].name == alarm_name and 'created' in alarms_details[itr].summary :#name, timestamp, summary
-                    status = alarms_details[itr].summary.split()                   
+                if alarms_details[itr].name == is_present['info'].name :#name, timestamp, summary
+                    if 'created' in alarms_details[itr].summary:
+                        alarm_details_dict['status'] = "New"
+                    elif 'updated' in alarms_details[itr].summary:
+                        alarm_details_dict['status'] = "Update"
+                    elif 'deleted' in alarms_details[itr].summary:   
+                        alarm_details_dict['status'] = "Canceled"
+
+                    status = alarms_details[itr].summary.split()                  
                     alarms = cloudwatch_conn.describe_alarms()
                     for alarm in alarms:
-                        if alarm.name == alarm_name:
-                            alarm_details_dict['alarm_id'] = alarm_name
-                            alarm_details_dict['resource_id'] = str(alarm.dimensions['InstanceId']).split("'")[1]
-                            alarm_details_dict['severity'] = str(alarm.description)
-                            alarm_details_dict['start_date_time'] = str(alarms_details[x].timestamp) 
+                        if str(alarm.description).split(';')[1] == ack_info['alarm_uuid']:
+                            alarm_details_dict['alarm_uuid'] = str(ack_info['alarm_uuid'])
+                            alarm_details_dict['resource_uuid'] = str(alarm.dimensions['InstanceId']).split("'")[1]
+                            alarm_details_dict['description'] = str(alarm.description).split(';')[1]
+                            alarm_details_dict['severity'] = str(alarm.description).split(';')[0]
+                            alarm_details_dict['start_date_time'] = str(alarms_details[itr].timestamp) 
+                            alarm_details_dict['vim_type'] = str(ack_info_all['vim_type'])
+                            #TODO : tenant id
+                            if 'ack_details' in ack_info_all:
+                                alarm_details_all['schema_version'] = str(ack_info_all['schema_version'])
+                                alarm_details_all['schema_type'] = 'notify_alarm'
+                                alarm_details_all['notify_details'] = alarm_details_dict
+                                return alarm_details_all
 
-                            return alarm_details_dict             
+                            elif 'alarm_list_request' in ack_info_all:
+                                return alarm_details_dict                     
                   
         except Exception as e:
                log.error("Error getting alarm details: %s",str(e))           
 #-----------------------------------------------------------------------------------------------------------------------------
-    def metrics_data(self,cloudwatch_conn,metric_name,instance_id,period,metric_unit):
-
-       """Getting Metrics Stats for an Hour. Time interval can be modified using Timedelta value"""
-        metric_data= dict()
-        metric_stats=cloudwatch_conn.get_metric_statistics(period, datetime.datetime.utcnow() - datetime.timedelta(seconds=3600),
-                            datetime.datetime.utcnow(),metric_name,'AWS/EC2', 'Maximum',
-                            dimensions={'InstanceId':instance_id}, unit=metric_unit)
-
-        for itr in range (len(metric_stats)):
-            metric_data['metric_name'] = metric_name
-            metric_data['Resource_id'] = instance_id
-            metric_data['Unit']                   = metric_stats[itr]['Unit']
-            metric_data['Timestamp']   = metric_stats[itr]['Timestamp']  
-        return metric_data
-
-#-----------------------------------------------------------------------------------------------------------------------------
-    def is_present(self,cloudwatch_conn,alarm_name):
-       """Finding Alarm exists or not"""
+    def is_present(self,cloudwatch_conn,alarm_id):
+       """Finding alarm from already configured alarms"""
+        alarm_info = dict()
         try:
             alarms = cloudwatch_conn.describe_alarms()
             for alarm in alarms:
-                if alarm.name == alarm_name:
-                    return True
-            return False
+                if str(alarm.description).split(';')[1] == alarm_id:
+                    alarm_info['status'] = True
+                    alarm_info['info'] = alarm
+                    return alarm_info
+            alarm_info['status'] = False        
+            return alarm_info
         except Exception as e:
                 log.error("Error Finding Alarm",str(e))             
 #-----------------------------------------------------------------------------------------------------------------------------
+    
\ No newline at end of file
diff --git a/plugins/CloudWatch/metrics.py b/plugins/CloudWatch/metrics.py
new file mode 100644 (file)
index 0000000..ddda7e4
--- /dev/null
@@ -0,0 +1,236 @@
+##
+# Copyright 2017 xFlow Research Pvt. Ltd
+# This file is part of MON module
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+#
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact with: wajeeha.hamid@xflowresearch.com
+##
+
+'''
+AWS-Plugin implements all the methods of MON to interact with AWS using the BOTO client
+'''
+
+__author__ = "Wajeeha Hamid"
+__date__   = "18-Sept-2017"
+
+import sys
+import datetime
+import json
+import logging as log
+
+try:
+    import boto
+    import boto.ec2
+    import boto.vpc
+    import boto.ec2.cloudwatch
+    import boto.ec2.connection
+except:
+    exit("Boto not avialable. Try activating your virtualenv OR `pip install boto`")
+
+
+
+class Metrics():
+
+    def createMetrics(self,cloudwatch_conn,metric_info):
+        try:
+            
+            '''createMetrics will be returning the metric_uuid=0 and
+             status=True when the metric is supported by AWS'''
+
+            supported=self.check_metric(metric_info['metric_name'])
+            metric_resp = dict()
+            if supported['status'] == True:
+                metric_resp['status'] = True
+                metric_resp['metric_uuid'] = 0
+            else:
+                metric_resp['status'] = False
+                metric_resp['metric_uuid'] = None
+
+            metric_resp['resource_uuid'] = metric_info['resource_uuid'] 
+            log.debug("Metrics Configured Succesfully : %s" , metric_resp)
+            return metric_resp         
+
+        except Exception as e:
+            log.error("Metric Configuration Failed: " + str(e))
+#-----------------------------------------------------------------------------------------------------------------------------
+    
+    def metricsData(self,cloudwatch_conn,data_info):
+
+       """Getting Metrics Stats for an Hour.The datapoints are
+        received after every one minute.
+        Time interval can be modified using Timedelta value"""
+
+        try:
+            metric_info = dict()
+            metric_info_dict = dict()
+            timestamp_arr = {}
+            value_arr = {}
+
+            supported=self.check_metric(data_info['metric_name'])
+
+            metric_stats=cloudwatch_conn.get_metric_statistics(60, datetime.datetime.utcnow() - datetime.timedelta(seconds=int(data_info['collection_period'])),
+                                datetime.datetime.utcnow(),supported['metric_name'],'AWS/EC2', 'Maximum',
+                                dimensions={'InstanceId':data_info['resource_uuid']}, unit='Percent')  
+
+            index = 0
+            for itr in range (len(metric_stats)):
+                timestamp_arr[index] = str(metric_stats[itr]['Timestamp'])
+                value_arr[index] = metric_stats[itr]['Maximum']
+                index +=1
+
+            metric_info_dict['time_series'] = timestamp_arr
+            metric_info_dict['metrics_series'] = value_arr
+            log.debug("Metrics Data : %s", metric_info_dict)
+            return metric_info_dict
+        
+        except Exception as e:
+            log.error("Error returning Metrics Data" + str(e))
+
+#-----------------------------------------------------------------------------------------------------------------------------
+    def updateMetrics(self,cloudwatch_conn,metric_info):
+        
+        '''updateMetrics will be returning the metric_uuid=0 and
+         status=True when the metric is supported by AWS'''
+        try:
+            supported=self.check_metric(metric_info['metric_name'])
+            update_resp = dict()
+            if supported['status'] == True:
+                update_resp['status'] = True
+                update_resp['metric_uuid'] = 0
+            else:
+                update_resp['status'] = False
+                update_resp['metric_uuid'] = None
+
+            update_resp['resource_uuid'] = metric_info['resource_uuid']
+            log.debug("Metric Updated : %s", update_resp) 
+            return update_resp  
+        
+        except Exception as e:
+            log.error("Error in Update Metrics" + str(e))
+#-----------------------------------------------------------------------------------------------------------------------------
+    def deleteMetrics(self,cloudwatch_conn,del_info):
+        
+        ''' " Not supported in AWS"
+        Returning the required parameters with status = False'''
+        try:
+
+            del_resp = dict()
+            del_resp['schema_version'] = del_info['schema_version']
+            del_resp['schema_type'] = "delete_metric_response"
+            del_resp['metric_name'] = del_info['metric_name']
+            del_resp['metric_uuid'] = del_info['metric_uuid']
+            del_resp['resource_uuid'] = del_info['resource_uuid']
+            # TODO : yet to finalize
+            del_resp['tenant_uuid'] = del_info['tenant_uuid']
+            del_resp['correlation_id'] = del_info['correlation_uuid']
+            del_resp['status'] = False
+            log.info("Metric Deletion Not supported in AWS : %s",del_resp)
+            return del_resp
+
+        except Exception as e:
+                log.error(" Metric Deletion Not supported in AWS : " + str(e))
+#------------------------------------------------------------------------------------------------------------------------------------
+    
+    def listMetrics(self,cloudwatch_conn ,list_info):
+
+        '''Returns the list of available AWS/EC2 metrics on which
+        alarms have been configured and the metrics are being monitored'''
+        try:
+            supported = self.check_metric(list_info['metric_name'])
+
+            metrics_list = []
+            metrics_data = dict()
+            metrics_info = dict()    
+
+            #To get the list of associated metrics with the alarms
+            alarms = cloudwatch_conn.describe_alarms()
+            itr = 0
+            if list_info['metric_name'] == None:
+                for alarm in alarms:
+                    instance_id = str(alarm.dimensions['InstanceId']).split("'")[1] 
+                    metrics_info['metric_name'] = str(alarm.metric)
+                    metrics_info['metric_uuid'] = 0     
+                    metrics_info['metric_unit'] = str(alarm.unit)    
+                    metrics_info['resource_uuid'] = instance_id 
+                    metrics_list.insert(itr,metrics_info)
+                    itr += 1
+            else: 
+                for alarm in alarms:
+                    print supported['metric_name']
+                    if alarm.metric == supported['metric_name']:
+                        instance_id = str(alarm.dimensions['InstanceId']).split("'")[1] 
+                        metrics_info['metric_name'] = str(alarm.metric)
+                        metrics_info['metric_uuid'] = 0     
+                        metrics_info['metric_unit'] = str(alarm.unit)    
+                        metrics_info['resource_uuid'] = instance_id
+                        metrics_list.insert(itr,metrics_info)
+                        itr += 1
+                        
+            log.debug("Metrics List : %s",metrics_list)
+            return metrics_list
+
+        except Exception as e:
+            log.error("Error in Getting Metric List " + str(e))
+
+#------------------------------------------------------------------------------------------------------------------------------------
+
+    def check_metric(self,metric_name):
+    
+        ''' Checking whether the metric is supported by AWS '''
+        try:
+            check_resp = dict()
+            #metric_name
+            if metric_name == 'CPU_UTILIZATION':
+                metric_name = 'CPUUtilization'
+                metric_status = True
+            elif metric_name == 'DISK_READ_OPS':
+                metric_name = 'DiskReadOps'
+                metric_status = True
+            elif metric_name == 'DISK_WRITE_OPS':
+                metric_name = 'DiskWriteOps'
+                metric_status = True
+            elif metric_name == 'DISK_READ_BYTES':
+                metric_name = 'DiskReadBytes'
+                metric_status = True
+            elif metric_name == 'DISK_WRITE_BYTES':
+                metric_name = 'DiskWriteBytes'
+                metric_status = True
+            elif metric_name == 'PACKETS_RECEIVED':
+                metric_name = 'NetworkPacketsIn'
+                metric_status = True
+            elif metric_name == 'PACKETS_SENT':
+                metric_name = 'NetworkPacketsOut'
+                metric_status = True
+            else:
+                metric_name = None
+                log.info("Metric Not Supported by AWS plugin ")
+                metric_status = False
+            check_resp['metric_name'] = metric_name
+            #status
+            if metric_status == True:
+                check_resp['status'] = True
+                return check_resp    
+        except Exception as e: 
+            log.error("Error in Plugin Inputs %s",str(e))     
+#--------------------------------------------------------------------------------------------------------------------------------------
+
+      
+
+
+
+
+
+
diff --git a/plugins/CloudWatch/plugin.py b/plugins/CloudWatch/plugin.py
deleted file mode 100644 (file)
index 9f917ae..0000000
+++ /dev/null
@@ -1,134 +0,0 @@
-##
-# Copyright 2017 xFlow Research Pvt. Ltd
-# This file is part of MON module
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-#
-# For those usages not covered by the Apache License, Version 2.0 please
-# contact with: wajeeha.hamid@xflowresearch.com
-##
-
-'''
-AWS-Plugin implements all the methods of MON to interact with AWS using the BOTO client
-'''
-
-__author__ = "Wajeeha Hamid"
-__date__   = "31-August-2017"
-
-import sys
-from connection import Connection
-from metric_alarms import MetricAlarm
-try:
-    from kafka import KafkaConsumer
-    from kafka.errors import KafkaError
-except:
-    exit("Kafka Error. Try activating your Kafka Services")
-
-class Plugin():
-    """Receives Alarm info from MetricAlarm and connects with the consumer/producer """
-    def __init__ (self): 
-        self.conn = Connection()
-        self.metricAlarm = MetricAlarm()
-
-        server = {'server': 'localhost:9092', 'topic': 'alarms'}
-    #Initialize a Consumer object to consume message from the SO    
-        self._consumer = KafkaConsumer(server['topic'],
-                                       group_id='my-group',
-                                       bootstrap_servers=server['server'])
-#---------------------------------------------------------------------------------------------------------------------------      
-    def connection(self):
-        """Connecting instances with CloudWatch"""
-        self.conn.setEnvironment()
-        self.cloudwatch_conn = self.conn.connection_instance()
-#---------------------------------------------------------------------------------------------------------------------------   
-    def configure_alarm(self,alarm_info):
-
-        alarm_id = self.metricAlarm.config_alarm(self.cloudwatch_conn,alarm_info)
-        return alarm_id
-#---------------------------------------------------------------------------------------------------------------------------          
-    def update_alarm_configuration(self,test):
-        alarm_id = self.metricAlarm.update_alarm(self.cloudwatch_conn,test)
-        return alarm_id
-#---------------------------------------------------------------------------------------------------------------------------            
-    def delete_alarm(self,alarm_id):
-        return self.metricAlarm.delete_Alarm(self.cloudwatch_conn,alarm_id)
-#---------------------------------------------------------------------------------------------------------------------------  
-    def get_alarms_list(self,instance_id):
-        return self.metricAlarm.alarms_list(self.cloudwatch_conn,instance_id) 
-#---------------------------------------------------------------------------------------------------------------------------            
-    def get_alarm_details(self,alarm_id):
-        return self.metricAlarm.alarm_details(self.cloudwatch_conn,alarm_id)
-#---------------------------------------------------------------------------------------------------------------------------            
-    def get_metrics_data(self,metric_name,instance_id,period,metric_unit):
-        return self.metricAlarm.metrics_data(self.cloudwatch_conn,metric_name,instance_id,period,metric_unit)
-#---------------------------------------------------------------------------------------------------------------------------   
-    def consumer(self,alarm_info):
-        try:
-            for message in self._consumer:
-
-                # Check the Functionlity that needs to be performed: topic = 'alarms'/'metrics'/'Access_Credentials'
-                if message.topic == "alarms":
-                    log.info("Action required against: %s" % (message.topic))
-
-                    if message.key == "Configure_Alarm":
-                        #alarm_info = json.loads(message.value)
-                        alarm_id = self.configure_alarm(alarm_info) #alarm_info = message.value
-                        log.info("New alarm created with alarmID: %s", alarm_id)
-                #Keys other than Configure_Alarm and Notify_Alarm are already handled here which are not yet finalized
-                    elif message.key == "Notify_Alarm":
-                        alarm_details = self.get_alarm_details(alarm_info['alarm_name'])#['alarm_id']
-
-                    elif message.key == "Update_Alarm":
-                        alarm_id = self.update_alarm_configuration(alarm_info)
-                        log.info("Alarm Updated with alarmID: %s", alarm_id)
-                    
-                    elif message.key == "Delete_Alarm":    
-                        alarm_id = self.delete_alarm(alarm_info['alarm_name'])
-                        log.info("Alarm Deleted with alarmID: %s", alarm_id)
-                   
-                    elif message.key == "Alarms_List":    
-                        self.get_alarms_list(alarm_info['resource_id'])#['alarm_names']
-                    else:
-                        log.debug("Unknown key, no action will be performed")
-                else:
-                    log.info("Message topic not relevant to this plugin: %s",
-                         message.topic)    
-        except Exception as e:
-                log.error("Consumer exception: %s", str(e))             
-#---------------------------------------------------------------------------------------------------------------------------   
-"""For Testing Purpose: Required calls to Trigger defined functions """
-'''obj = Plugin()
-obj.connection() 
-obj.consumer()
-
-alarm_info = dict()
-alarm_info['resource_id'] = 'i-098da78cbd8304e17'
-alarm_info['alarm_name'] = 'alarm-6'
-alarm_info['alarm_metric'] = 'CPUUtilization'
-alarm_info['alarm_severity'] = 'Critical'
-alarm_info['instance_type'] = 'AWS/EC2'
-alarm_info['alarm_statistics'] = 'Maximum'
-alarm_info['alarm_comparison'] = '>='
-alarm_info['alarm_threshold'] = 1.5
-alarm_info['alarm_period'] = 60
-alarm_info['alarm_evaluation_period'] = 1
-alarm_info['alarm_unit'] = None
-alarm_info['alarm_description'] = ''
-
-#obj.configure_alarm(alarm_info)
-#obj.update_alarm_configuration(alarm_info)
-#obj.delete_alarm('alarm-5|i-098da78cbd8304e17')
-#obj.get_alarms_list('i-098da78cbd8304e17')#['alarm_names']
-#obj.get_alarm_details('alarm-5|i-098da78cbd8304e17')#['alarm_id']
-#print obj.get_metrics_data('CPUUtilization','i-09462760703837b26','60',None)      '''
\ No newline at end of file
diff --git a/plugins/CloudWatch/plugin_alarm.py b/plugins/CloudWatch/plugin_alarm.py
new file mode 100644 (file)
index 0000000..b3446bd
--- /dev/null
@@ -0,0 +1,257 @@
+##
+# Copyright 2017 xFlow Research Pvt. Ltd
+# This file is part of MON module
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+#
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact with: wajeeha.hamid@xflowresearch.com
+##
+
+'''
+AWS-Plugin implements all the methods of MON to interact with AWS using the BOTO client
+'''
+
+__author__ = "Wajeeha Hamid"
+__date__   = "18-September-2017"
+
+import sys
+import json
+import logging as log
+from jsmin import jsmin
+from connection import Connection
+from metric_alarms import MetricAlarm
+from metrics import Metrics
+from kafka import KafkaConsumer
+sys.path.append("../../core/message-bus")
+from producer import KafkaProducer
+
+class Plugin():
+    """Receives Alarm info from MetricAlarm and connects with the consumer/producer"""
+    def __init__ (self): 
+        self.conn = Connection()
+        self.metricAlarm = MetricAlarm()
+        self.metric = Metrics()
+
+        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
+
+        self._consumer = KafkaConsumer(server['topic'], bootstrap_servers=server['server'])
+        self._consumer.subscribe(['alarm_request'])
+
+        #self.producer = KafkaProducer('create_alarm_request')
+        self.producer = KafkaProducer('')
+        
+
+#---------------------------------------------------------------------------------------------------------------------------      
+    def connection(self):
+        """Connecting instances with CloudWatch"""
+        self.conn.setEnvironment()
+        self.conn = self.conn.connection_instance()
+        self.cloudwatch_conn = self.conn['cloudwatch_connection']
+        self.ec2_conn = self.conn['ec2_connection']
+#---------------------------------------------------------------------------------------------------------------------------   
+    def configure_alarm(self,alarm_info):
+        alarm_id = self.metricAlarm.config_alarm(self.cloudwatch_conn,alarm_info)
+        return alarm_id
+#---------------------------------------------------------------------------------------------------------------------------          
+    def update_alarm_configuration(self,test):
+        alarm_id = self.metricAlarm.update_alarm(self.cloudwatch_conn,test)
+        return alarm_id
+#---------------------------------------------------------------------------------------------------------------------------            
+    def delete_alarm(self,alarm_id):
+        return self.metricAlarm.delete_Alarm(self.cloudwatch_conn,alarm_id)
+#---------------------------------------------------------------------------------------------------------------------------  
+    def get_alarms_list(self,instance_id):
+        return self.metricAlarm.alarms_list(self.cloudwatch_conn,instance_id) 
+#---------------------------------------------------------------------------------------------------------------------------            
+    def get_ack_details(self,ack_info):
+        return self.metricAlarm.alarm_details(self.cloudwatch_conn,ack_info)
+#---------------------------------------------------------------------------------------------------------------------------            
+    def get_metrics_data(self,metric_name,period,instance_id):
+        return self.metric.metricsData(self.cloudwatch_conn,metric_name,period,instance_id)
+#--------------------------------------------------------------------------------------------------------------------------- 
+
+    def consumer(self):
+        """Consume info from the message bus to manage alarms."""
+        try: 
+            for message in self._consumer:
+                # Check the Functionlity that needs to be performed: topic = 'alarms'/'metrics'/'Access_Credentials'
+                if message.topic == "alarm_request":
+                    log.info("Action required against: %s" % (message.topic))
+                    alarm_info = json.loads(message.value)
+
+                    if message.key == "create_alarm_request":  
+                        if alarm_info['vim_type'] == 'AWS':                        
+                            alarm_inner_dict = alarm_info['alarm_create_request']
+                            metric_status = self.check_metric(alarm_inner_dict['metric_name'])
+                            
+                            if self.check_resource(alarm_inner_dict['resource_uuid']) == True and metric_status['status'] == True:
+                                log.debug ("Resource and Metrics exists")
+                            
+                                alarm_info['alarm_create_request']['metric_name'] = metric_status['metric_name']
+                                #Generate a valid response message, send via producer
+                                config_resp = self.configure_alarm(alarm_info) #alarm_info = message.value
+                                if config_resp == None:
+                                    log.debug("Alarm Already exists")
+                                    payload = json.dumps(config_resp)                                   
+                                    file = open('../../core/models/create_alarm_resp.json','wb').write((payload))
+                                    self.producer.create_alarm_response(key='create_alarm_response',message=payload,topic = 'alarm_response')
+                                else: 
+                                    payload = json.dumps(config_resp)                                   
+                                    file = open('../../core/models/create_alarm_resp.json','wb').write((payload))
+                                    
+                                    self.producer.create_alarm_response(key='create_alarm_response',message=payload,topic = 'alarm_response')
+                                    log.info("New alarm created with alarm info: %s", config_resp)                           
+                            else:
+                                log.error("Resource ID doesn't exists")
+                        else:
+                            log.error("Plugin inputs are incorrect")
+                        
+
+                    elif message.key == "acknowledge_alarm":
+                        alarm_inner_dict = alarm_info['ack_details']
+                        if alarm_info['vim_type'] == 'AWS':
+                            if self.check_resource(alarm_inner_dict['resource_uuid']) == True: 
+                                alarm_info = json.loads(message.value)
+                                #Generate a valid response message, send via producer
+                                ack_details = self.get_ack_details(alarm_info)
+                                payload = json.dumps(ack_details)                                  
+                                file = open('../../core/models/notify_alarm.json','wb').write((payload))
+                                self.producer.update_alarm_response(key='notify_alarm',message=payload,topic = 'alarm_response')
+                                log.info("Acknowledge sent: %s", ack_details)
+                            else:
+                                log.error("Resource ID is Incorrect")    
+                        else:
+                            log.error(" VIM type incorrect ")     
+
+
+                    elif message.key == "update_alarm_request":
+                        if alarm_info['vim_type'] == 'AWS':                         
+                            alarm_inner_dict = alarm_info['alarm_update_request']
+                            metric_status = self.check_metric(alarm_inner_dict['metric_name'])
+                            
+                            if metric_status['status'] == True:
+                                log.debug ("Resource and Metrics exists")
+                                alarm_info['alarm_update_request']['metric_name'] = metric_status['metric_name']
+                                #Generate a valid response message, send via producer
+                                update_resp = self.update_alarm_configuration(alarm_info)
+                                if update_resp == None:                                    
+                                    payload = json.dumps(update_resp)                                   
+                                    file = open('../../core/models/update_alarm_resp.json','wb').write((payload))
+                                    self.producer.update_alarm_response(key='update_alarm_response',message=payload,topic = 'alarm_response')
+                                    log.debug("Alarm Already exists")
+                                else: 
+                                    payload = json.dumps(update_resp)                                   
+                                    file = open('../../core/models/update_alarm_resp.json','wb').write((payload))
+                                    self.producer.update_alarm_response(key='update_alarm_response',message=payload,topic = 'alarm_response')
+                                    log.info("Alarm Updated with alarm info: %s", update_resp)                           
+                            else:
+                                log.info ("Metric Not Supported")
+                        else:
+                            log.error(" VIM type Incorrect ")  
+                    
+                    elif message.key == "delete_alarm_request":  
+                        if alarm_info['vim_type'] == 'AWS':
+                            del_info = json.loads(message.value)
+                            #Generate a valid response message, send via producer
+                            del_resp = self.delete_alarm(del_info)
+                            payload = json.dumps(del_resp)                                   
+                            file = open('../../core/models/delete_alarm_resp.json','wb').write((payload))
+                            self.producer.update_alarm_response(key='delete_alarm_response',message=payload,topic = 'alarm_response')
+                            log.info("Alarm Deleted with alarm info: %s", del_resp)
+                        else: 
+                            log.error(" VIM type Incorrect ")     
+               
+                    elif message.key == "alarm_list_request":
+                        alarm_inner_dict = alarm_info['alarm_list_request']
+                        if alarm_info['vim_type'] == 'AWS': 
+                            if self.check_resource(alarm_inner_dict['resource_uuid']) == True: 
+                                #Generate a valid response message, send via producer
+                                list_resp = self.get_alarms_list(alarm_info)#['alarm_names']
+                                payload = json.dumps(list_resp)                                                                 
+                                file = open('../../core/models/list_alarm_resp.json','wb').write((payload))
+                                self.producer.update_alarm_response(key='list_alarm_response',message=payload,topic = 'alarm_response')
+                            else:
+                                log.error("Resource ID is Incorrect")    
+                        else:
+                            log.error(" VIM type Incorrect ") 
+                                
+                    else:
+                        log.debug("Unknown key, no action will be performed")
+                           
+                else:
+                    log.info("Message topic not relevant to this plugin: %s",
+                         message.topic)    
+        except Exception as e:
+                log.error("Consumer exception: %s", str(e))             
+#--------------------------------------------------------------------------------------------------------------------------- 
+    def check_resource(self,resource_uuid):
+        '''Finding Resource with the resource_uuid'''
+        try:
+            check_resp = dict()
+            instances = self.ec2_conn.get_all_instance_status()
+
+            #resource_id
+            for instance_id in instances:
+                instance_id = str(instance_id).split(':')[1]
+                if instance_id == resource_uuid:
+                    check_resp['resource_uuid'] = resource_uuid
+                    return True 
+            return False
+
+        except Exception as e: 
+            log.error("Error in Plugin Inputs %s",str(e)) 
+#--------------------------------------------------------------------------------------------------------------------------- 
+    def check_metric(self,metric_name):
+        ''' Checking whether the metric is supported by AWS '''
+        try:
+            check_resp = dict()
+            
+            #metric_name
+            if metric_name == 'CPU_UTILIZATION':
+                metric_name = 'CPUUtilization'
+                metric_status = True
+            elif metric_name == 'DISK_READ_OPS':
+                metric_name = 'DiskReadOps'
+                metric_status = True
+            elif metric_name == 'DISK_WRITE_OPS':
+                metric_name = 'DiskWriteOps'
+                metric_status = True
+            elif metric_name == 'DISK_READ_BYTES':
+                metric_name = 'DiskReadBytes'
+                metric_status = True
+            elif metric_name == 'DISK_WRITE_BYTES':
+                metric_name = 'DiskWriteBytes'
+                metric_status = True
+            elif metric_name == 'PACKETS_RECEIVED':
+                metric_name = 'NetworkPacketsIn'
+                metric_status = True
+            elif metric_name == 'PACKETS_SENT':
+                metric_name = 'NetworkPacketsOut'
+                metric_status = True
+            else:
+                metric_name = None
+                metric_status = False
+            check_resp['metric_name'] = metric_name
+            #status
+            if metric_status == True:
+                check_resp['status'] = True
+                return check_resp   
+        except Exception as e: 
+            log.error("Error in Plugin Inputs %s",str(e)) 
+#--------------------------------------------------------------------------------------------------------------------------- 
+
+obj = Plugin()
+obj.connection()
+obj.consumer()
diff --git a/plugins/CloudWatch/plugin_metrics.py b/plugins/CloudWatch/plugin_metrics.py
new file mode 100644 (file)
index 0000000..cb04a65
--- /dev/null
@@ -0,0 +1,224 @@
+##
+# Copyright 2017 xFlow Research Pvt. Ltd
+# This file is part of MON module
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+#
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact with: wajeeha.hamid@xflowresearch.com
+##
+
+'''
+AWS-Plugin implements all the methods of MON to interact with AWS using the BOTO client
+'''
+
+__author__ = "Wajeeha Hamid"
+__date__   = "18-September-2017"
+
+import sys
+import json
+from connection import Connection
+from metric_alarms import MetricAlarm
+from metrics import Metrics
+# Need to import the producer message bus,not working yet
+#from core.message_bus.producerfunct import KafkaProducer
+sys.path.append("../../core/message-bus")
+from producer import KafkaProducer
+from kafka import KafkaConsumer
+import logging as log
+
+class plugin_metrics():
+    """Receives Alarm info from MetricAlarm and connects with the consumer/producer """
+    def __init__ (self): 
+        self.conn = Connection()
+        self.metric = Metrics()
+
+        #server = {'server': 'localhost:9092', 'topic': 'metrics_request'}
+    #Initialize a Consumer object to consume message from the SO    
+        self._consumer = KafkaConsumer(bootstrap_servers='localhost:9092')
+        self._consumer.subscribe(['metric_request'])
+
+        #producer = KafkaProducer('create_metric_request')
+
+        self.producer = KafkaProducer('')
+#---------------------------------------------------------------------------------------------------------------------------      
+    def connection(self):
+        try:
+            """Connecting instances with CloudWatch"""
+            self.conn.setEnvironment()
+            self.conn = self.conn.connection_instance()
+            self.cloudwatch_conn = self.conn['cloudwatch_connection']
+            self.ec2_conn = self.conn['ec2_connection']
+
+        except Exception as e:
+            log.error("Failed to Connect with AWS %s: " + str(e))
+#---------------------------------------------------------------------------------------------------------------------------   
+    def create_metric_request(self,metric_info):
+        '''Comaptible API using normalized parameters'''
+        metric_resp = self.metric.createMetrics(self.cloudwatch_conn,metric_info)
+        return metric_resp
+#---------------------------------------------------------------------------------------------------------------------------          
+    def update_metric_request(self,updated_info):
+        '''Comaptible API using normalized parameters'''
+        update_resp = self.metric.updateMetrics(self.cloudwatch_conn,updated_info)
+        return update_resp
+#---------------------------------------------------------------------------------------------------------------------------            
+    def delete_metric_request(self,delete_info):
+        '''Comaptible API using normalized parameters'''
+        del_resp = self.metric.deleteMetrics(self.cloudwatch_conn,delete_info)
+        return del_resp
+#---------------------------------------------------------------------------------------------------------------------------  
+    def list_metrics_request(self,list_info):
+        '''Comaptible API using normalized parameters'''
+        list_resp = self.metric.listMetrics(self.cloudwatch_conn,list_info)
+        return list_resp
+#---------------------------------------------------------------------------------------------------------------------------                        
+    def read_metrics_data(self,list_info):
+        '''Comaptible API using normalized parameters
+        Read all metric data related to a specified metric'''
+        data_resp=self.metric.metricsData(self.cloudwatch_conn,list_info)
+        return data_resp
+#--------------------------------------------------------------------------------------------------------------------------- 
+
+    def consumer(self):
+        '''Consumer will consume the message from SO,
+        1) parse the message and trigger the methods ac
+        cording to keys and topics provided in request.
+
+        2) The response from plugin is saved in json format.
+
+        3) The producer object then calls the producer response
+        methods to send the response back to message bus
+        '''
+        
+        try:
+            for message in self._consumer:
+                
+                metric_info = json.loads(message.value)
+                metric_response = dict()
+  
+                if metric_info['vim_type'] == 'AWS':
+                    log.debug ("VIM support : AWS")
+
+                # Check the Functionlity that needs to be performed: topic = 'alarms'/'metrics'/'Access_Credentials'
+                    if message.topic == "metric_request":
+                        log.info("Action required against: %s" % (message.topic))
+
+                        if message.key == "create_metric_request":                            
+                            if self.check_resource(metric_info['metric_create']['resource_uuid']) == True:
+                                metric_resp = self.create_metric_request(metric_info['metric_create']) #alarm_info = message.value
+                                metric_response['schema_version'] = metric_info['schema_version']
+                                metric_response['schema_type']    = "create_metric_response"
+                                metric_response['metric_create_response'] = metric_resp
+                                payload = json.dumps(metric_response)                                                                  
+                                file = open('../../core/models/create_metric_resp.json','wb').write((payload))
+                                self.producer.create_metrics_resp(key='create_metric_response',message=payload,topic = 'metric_response')
+
+                                log.info("Metric configured: %s", metric_resp)
+                                return metric_response
+                                
+                        elif message.key == "update_metric_request":
+                            if self.check_resource(metric_info['metric_create']['resource_uuid']) == True:
+                                update_resp = self.update_metric_request(metric_info['metric_create'])
+                                metric_response['schema_version'] = metric_info['schema_version']
+                                metric_response['schema_type'] = "update_metric_response"
+                                metric_response['metric_update_response'] = update_resp
+                                payload = json.dumps(metric_response)                                                                                                 
+                                file = open('../../core/models/update_metric_resp.json','wb').write((payload))
+                                self.producer.create_metrics_resp(key='update_metric_response',message=payload,topic = 'metric_response')
+
+                                log.info("Metric Updates: %s",metric_response)
+                                return metric_response
+                                
+                        elif message.key == "delete_metric_request":
+                            if self.check_resource(metric_info['resource_uuid']) == True:
+                                del_resp=self.delete_metric_request(metric_info)
+                                payload = json.dumps(del_resp)                                                                                                
+                                file = open('../../core/models/delete_metric_resp.json','wb').write((payload))
+                                self.producer.create_metrics_resp(key='delete_metric_response',message=payload,topic = 'metric_response')
+
+                                log.info("Metric Deletion Not supported in AWS : %s",del_resp)
+                                return del_resp
+
+                        elif message.key == "list_metric_request": 
+                            if self.check_resource(metric_info['metrics_list_request']['resource_uuid']) == True:
+                                list_resp = self.list_metrics_request(metric_info['metrics_list_request'])
+                                metric_response['schema_version'] = metric_info['schema_version']
+                                metric_response['schema_type'] = "list_metric_response"
+                                metric_response['correlation_id'] = metric_info['metrics_list_request']['correlation_id']
+                                metric_response['vim_type'] = metric_info['vim_type']
+                                metric_response['metrics_list'] = list_resp
+                                payload = json.dumps(metric_response)                                                                                                
+                                file = open('../../core/models/list_metric_resp.json','wb').write((payload))
+                                self.producer.create_metrics_resp(key='list_metrics_response',message=payload,topic = 'metric_response')
+
+                                log.info("Metric List: %s",metric_response)
+                                return metric_response
+
+                        elif message.key == "read_metric_data_request":
+                            if self.check_resource(metric_info['resource_uuid']) == True:
+                                data_resp = self.read_metrics_data(metric_info)
+                                metric_response['schema_version'] = metric_info['schema_version']
+                                metric_response['schema_type'] = "list_metric_response"
+                                metric_response['metric_name'] = metric_info['metric_name']
+                                metric_response['metric_uuid'] = metric_info['metric_uuid']
+                                metric_response['correlation_id'] = metric_info['correlation_uuid']
+                                metric_response['resource_uuid'] = metric_info['resource_uuid']
+                                metric_response['tenant_uuid'] = metric_info['tenant_uuid']
+                                metric_response['metrics_data'] = data_resp
+                                payload = json.dumps(metric_response)
+                                                                                                
+                                file = open('../../core/models/read_metric_data_resp.json','wb').write((payload))
+                                self.producer.create_metrics_resp(key='read_metric_data_response',message=payload,topic = 'metric_response')
+                                log.info("Metric Data Response: %s",metric_response)
+                                return metric_response 
+
+                        else:
+                            log.debug("Unknown key, no action will be performed")
+                    else:
+                        log.info("Message topic not relevant to this plugin: %s",
+                             message.topic)
+            else:
+               print "Bad VIM Request"
+        except Exception as e:
+            log.error("Consumer exception: %s", str(e))
+
+#---------------------------------------------------------------------------------------------------------------------------
+    def check_resource(self,resource_uuid):
+
+        '''Checking the resource_uuid is present in EC2 instances'''
+        try:
+            check_resp = dict()
+            instances = self.ec2_conn.get_all_instance_status()
+            status_resource = False
+
+            #resource_id
+            for instance_id in instances:
+                instance_id = str(instance_id).split(':')[1]
+                if instance_id == resource_uuid:
+                    check_resp['resource_uuid'] = resource_uuid
+                    status_resource = True
+                else:
+                    status_resource = False
+
+            #status
+            return status_resource
+
+        except Exception as e: 
+            log.error("Error in Plugin Inputs %s",str(e))          
+#---------------------------------------------------------------------------------------------------------------------------   
+
+obj = plugin_metrics()
+obj.connection() 
+obj.consumer()
index 0f4a2da..b5e4a3e 100644 (file)
@@ -1,14 +1,50 @@
-"""Send alarm info from Aodh to SO via MON."""
+# Copyright 2017 Intel Research and Development Ireland Limited
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+##
+"""Carry out alarming requests via Aodh API."""
 
 import json
 import logging as log
 
-from collections import OrderedDict
+from core.message_bus.producer import KafkaProducer
 
 from kafka import KafkaConsumer
 
 from plugins.OpenStack.common import Common
-
+from plugins.OpenStack.response import OpenStack_Response
+
+__author__ = "Helena McGough"
+
+ALARM_NAMES = [
+    "Average_Memory_Usage_Above_Threshold",
+    "Read_Latency_Above_Threshold",
+    "Write_Latency_Above_Threshold",
+    "DISK_READ_OPS",
+    "DISK_WRITE_OPS",
+    "DISK_READ_BYTES",
+    "DISK_WRITE_BYTES",
+    "Net_Packets_Dropped",
+    "Packets_in_Above_Threshold",
+    "Packets_out_Above_Threshold",
+    "CPU_Utilization_Above_Threshold"]
 
 SEVERITIES = {
     "WARNING": "low",
@@ -17,16 +53,20 @@ SEVERITIES = {
     "CRITICAL": "critical",
     "INDETERMINATE": "critical"}
 
+STATISTICS = {
+    "AVERAGE": "avg",
+    "MINIMUM": "min",
+    "MAXIMUM": "max",
+    "COUNT": "count",
+    "SUM": "sum"}
+
 
 class Alarming(object):
-    """Receives alarm info from Aodh."""
+    """Carries out alarming requests and responses via Aodh API."""
 
     def __init__(self):
-        """Create the aodh_receiver instance."""
+        """Create the OpenStack alarming instance."""
         self._common = Common()
-        self.auth_token = None
-        self.endpoint = None
-        self.resp_status = None
 
         # TODO(mcgoughh): Remove hardcoded kafkaconsumer
         # Initialize a generic consumer object to consume message from the SO
@@ -35,7 +75,11 @@ class Alarming(object):
                                        group_id='osm_mon',
                                        bootstrap_servers=server['server'])
 
-        # TODO(mcgoughh): Initialize a producer to send messages bask to the SO
+        # Use the Response class to generate valid json response messages
+        self._response = OpenStack_Response()
+
+        # Initializer a producer to send responses back to SO
+        self._producer = KafkaProducer("alarm_response")
 
     def alarming(self):
         """Consume info from the message bus to manage alarms."""
@@ -48,94 +92,116 @@ class Alarming(object):
             if vim_type == "openstack":
                 log.info("Alarm action required: %s" % (message.topic))
 
+                # Generate and auth_token and endpoint for request
+                auth_token, endpoint = self.authenticate(values)
+
                 if message.key == "create_alarm_request":
                     # Configure/Update an alarm
                     alarm_details = values['alarm_create_request']
 
-                    # Generate an auth_token and endpoint
-                    auth_token = self._common._authenticate(
-                        tenant_id=alarm_details['tenant_uuid'])
-                    endpoint = self._common.get_endpoint("alarming")
-
-                    alarm_id = self.configure_alarm(
+                    alarm_id, alarm_status = self.configure_alarm(
                         endpoint, auth_token, alarm_details)
 
-                    # TODO(mcgoughh): will send an acknowledge message back on
-                    # the bus via the producer
-                    if alarm_id is not None:
-                        self.resp_status = True
-                        log.debug("A valid alarm was found/created: %s",
-                                  self.resp_status)
-                    else:
-                        self.resp_status = False
-                        log.debug("Failed to create desired alarm: %s",
-                                  self.resp_status)
+                    # Generate a valid response message, send via producer
+                    try:
+                        resp_message = self._response.generate_response(
+                            'create_alarm_response', status=alarm_status,
+                            alarm_id=alarm_id,
+                            cor_id=alarm_details['correlation_id'])
+                        self._producer.create_alarm_response(
+                            'create_alarm_resonse', resp_message,
+                            'alarm_response')
+                    except Exception as exc:
+                        log.warn("Response creation failed: %s", exc)
 
                 elif message.key == "list_alarm_request":
-                    auth_token = self._common._authenticate()
-                    endpoint = self._common.get_endpoint("alarming")
-
-                    # List all of the alarms
-                    alarm_list = self.list_alarms(endpoint, auth_token)
-
-                    # TODO(mcgoughh): send a repsonse back to SO
-                    if alarm_list is not None:
-                        self.resp_status = True
-                        log.info("A list of alarms was generated: %s",
-                                 alarm_list)
-                    else:
-                        self.resp_status = False
-                        log.warn("Failed to generae an alarm list")
+                    # Check for a specifed: alarm_name, resource_uuid, severity
+                    # and generate the appropriate list
+                    list_details = values['alarm_list_request']
+                    try:
+                        name = list_details['alarm_name']
+                        alarm_list = self.list_alarms(
+                            endpoint, auth_token, alarm_name=name)
+                    except Exception as a_name:
+                        log.debug("No name specified for list:%s", a_name)
+                        try:
+                            resource = list_details['resource_uuid']
+                            alarm_list = self.list_alarms(
+                                endpoint, auth_token, resource_id=resource)
+                        except Exception as r_id:
+                            log.debug("No resource id specified for this list:\
+                                       %s", r_id)
+                            try:
+                                severe = list_details['severity']
+                                alarm_list = self.list_alarms(
+                                    endpoint, auth_token, severity=severe)
+                            except Exception as exc:
+                                log.warn("No severity specified for list: %s.\
+                                           will return full list.", exc)
+                                alarm_list = self.list_alarms(
+                                    endpoint, auth_token)
+
+                    try:
+                        # Generate and send a list response back
+                        resp_message = self._response.generate_response(
+                            'list_alarm_response', alarm_list=alarm_list,
+                            cor_id=list_details['correlation_id'])
+                        self._producer.list_alarm_response(
+                            'list_alarm_response', resp_message,
+                            'alarm_response')
+                    except Exception as exc:
+                        log.warn("Failed to send a valid response back.")
 
                 elif message.key == "delete_alarm_request":
-                    # Delete the specified alarm
-                    auth_token = self._common._authenticate()
-                    endpoint = self._common.get_endpoint("alarming")
+                    request_details = values['alarm_delete_request']
+                    alarm_id = request_details['alarm_uuid']
 
-                    alarm_id = values['alarm_delete_request']['alarm_uuid']
-
-                    response = self.delete_alarm(
+                    resp_status = self.delete_alarm(
                         endpoint, auth_token, alarm_id)
 
-                    # TODO(mcgoughh): send a response back on the bus
-                    if response is True:
-                        log.info("Requested alarm has been deleted: %s",
-                                 alarm_id)
-                    else:
-                        log.warn("Failed to delete requested alarm.")
+                    # Generate and send a response message
+                    try:
+                        resp_message = self._response.generate_response(
+                            'delete_alarm_response', alarm_id=alarm_id,
+                            status=resp_status,
+                            cor_id=request_details['correlation_id'])
+                        self._producer.delete_alarm_response(
+                            'delete_alarm_response', resp_message,
+                            'alarm_response')
+                    except Exception as exc:
+                        log.warn("Failed to create delete reponse:%s", exc)
 
                 elif message.key == "acknowledge_alarm":
                     # Acknowledge that an alarm has been dealt with by the SO
-                    # Set its state to ok
-                    auth_token = self._common._authenticate()
-                    endpoint = self._common.get_endpoint("alarming")
-
                     alarm_id = values['ack_details']['alarm_uuid']
 
                     response = self.update_alarm_state(
                         endpoint, auth_token, alarm_id)
 
+                    # Log if an alarm was reset
                     if response is True:
-                        log.info("Status has been updated for alarm, %s.",
-                                 alarm_id)
+                        log.info("Acknowledged the alarm and cleared it.")
                     else:
-                        log.warn("Failed update the state of requested alarm.")
+                        log.warn("Failed to acknowledge/clear the alarm.")
 
                 elif message.key == "update_alarm_request":
                     # Update alarm configurations
-                    auth_token = self._common._authenticate()
-                    endpoint = self._common.get_endpoint("alarming")
-
                     alarm_details = values['alarm_update_request']
 
-                    alarm_id = self.update_alarm(
+                    alarm_id, status = self.update_alarm(
                         endpoint, auth_token, alarm_details)
 
-                    # TODO(mcgoughh): send a response message to the SO
-                    if alarm_id is not None:
-                        log.info("Alarm configuration was update correctly.")
-                    else:
-                        log.warn("Unable to update the specified alarm")
+                    # Generate a response for an update request
+                    try:
+                        resp_message = self._response.generate_response(
+                            'update_alarm_response', alarm_id=alarm_id,
+                            cor_id=alarm_details['correlation_id'],
+                            status=status)
+                        self._producer.update_alarm_response(
+                            'update_alarm_response', resp_message,
+                            'alarm_response')
+                    except Exception as exc:
+                        log.warn("Failed to send an update response:%s", exc)
 
                 else:
                     log.debug("Unknown key, no action will be performed")
@@ -145,108 +211,95 @@ class Alarming(object):
 
         return
 
-    def get_alarm_id(self, endpoint, auth_token, alarm_name):
-        """Get a list of alarms that exist in Aodh."""
-        alarm_id = None
-        url = "{}/v2/alarms/".format(endpoint)
-
-        # TODO(mcgoughh): will query on resource_id once it has been
-        # implemented need to create the query field when creating
-        # the alarm
-        query = OrderedDict([("q.field", 'name'), ("q.op", "eq"),
-                            ("q.value", alarm_name)])
-
-        result = self._common._perform_request(
-            url, auth_token, req_type="get", params=query)
-
-        try:
-            alarm_id = json.loads(result.text)[0]['alarm_id']
-            log.info("An existing alarm was found: %s", alarm_id)
-            return alarm_id
-        except Exception:
-            log.debug("Alarm doesn't exist, needs to be created.")
-        return alarm_id
-
     def configure_alarm(self, endpoint, auth_token, values):
         """Create requested alarm in Aodh."""
         url = "{}/v2/alarms/".format(endpoint)
 
+        # Check if the desired alarm is supported
         alarm_name = values['alarm_name']
+        if alarm_name not in ALARM_NAMES:
+            log.warn("This alarm is not supported, by a valid metric.")
+            return None, False
 
-        # Confirm alarm doesn't exist
-        alarm_id = self.get_alarm_id(endpoint, auth_token, alarm_name)
-        if alarm_id is None:
-            # Try to create the alarm
-            try:
-                metric_name = values['metric_name']
-                resource_id = values['resource_uuid']
-                payload = self.check_payload(values, metric_name, resource_id,
-                                             alarm_name)
-                new_alarm = self._common._perform_request(
-                    url, auth_token, req_type="post", payload=payload)
-
-                return json.loads(new_alarm.text)['alarm_id']
-            except Exception as exc:
-                log.warn("Alarm creation could not be performed: %s", exc)
-                return alarm_id
-        else:
-            log.warn("This alarm already exists. Try an update instead.")
-        return None
+        try:
+            metric_name = values['metric_name']
+            resource_id = values['resource_uuid']
+            # Check the payload for the desired alarm
+            payload = self.check_payload(values, metric_name, resource_id,
+                                         alarm_name)
+            new_alarm = self._common._perform_request(
+                url, auth_token, req_type="post", payload=payload)
+
+            return json.loads(new_alarm.text)['alarm_id'], True
+        except Exception as exc:
+            log.warn("Alarm creation could not be performed: %s", exc)
+        return None, False
 
     def delete_alarm(self, endpoint, auth_token, alarm_id):
         """Delete alarm function."""
         url = "{}/v2/alarms/%s".format(endpoint) % (alarm_id)
 
-        result = False
         try:
-            self._common._perform_request(url, auth_token, req_type="delete")
-            return True
+            result = self._common._perform_request(
+                url, auth_token, req_type="delete")
+            if str(result.status_code) == "404":
+                # If status code is 404 alarm did not exist
+                return False
+            else:
+                return True
+
         except Exception as exc:
             log.warn("Failed to delete alarm: %s because %s.", alarm_id, exc)
-        return result
+        return False
 
     def list_alarms(self, endpoint, auth_token,
                     alarm_name=None, resource_id=None, severity=None):
         """Generate the requested list of alarms."""
-        result = None
-        if (alarm_name and resource_id and severity) is None:
-            # List all alarms
-            url = "{}/v2/alarms/".format(endpoint)
-
-            try:
-                result = self._common._perform_request(
-                    url, auth_token, req_type="get")
-                return json.loads(result.text)
-            except Exception as exc:
-                log.warn("Unable to generate alarm list: %s", exc)
+        url = "{}/v2/alarms/".format(endpoint)
+        alarm_list = []
 
-            return result
+        result = self._common._perform_request(
+            url, auth_token, req_type="get")
+        if result is not None:
+            # Check for a specified list based on:
+            # alarm_name, severity, resource_id
+            if alarm_name is not None:
+                for alarm in json.loads(result.text):
+                    if alarm_name in str(alarm):
+                        alarm_list.append(str(alarm))
+            elif resource_id is not None:
+                for alarm in json.loads(result.text):
+                    if resource_id in str(alarm):
+                        alarm_list.append(str(alarm))
+            elif severity is not None:
+                for alarm in json.loads(result.text):
+                    if severity in str(alarm):
+                        alarm_list.append(str(alarm))
+            else:
+                alarm_list = result.text
         else:
-            # TODO(mcgoughh): support more specific lists
-            log.debug("Requested list is unavailable")
-
-        return result
+            return None
+        return alarm_list
 
     def update_alarm_state(self, endpoint, auth_token, alarm_id):
         """Set the state of an alarm to ok when ack message is received."""
-        result = False
-
         url = "{}/v2/alarms/%s/state".format(endpoint) % alarm_id
         payload = json.dumps("ok")
 
         try:
-            result = self._common._perform_request(
+            self._common._perform_request(
                 url, auth_token, req_type="put", payload=payload)
             return True
         except Exception as exc:
             log.warn("Unable to update alarm state: %s", exc)
-        return result
+        return False
 
     def update_alarm(self, endpoint, auth_token, values):
         """Get alarm name for an alarm configuration update."""
         # Get already existing alarm details
         url = "{}/v2/alarms/%s".format(endpoint) % values['alarm_uuid']
 
+        # Gets current configurations about the alarm
         try:
             result = self._common._perform_request(
                 url, auth_token, req_type="get")
@@ -258,22 +311,23 @@ class Alarming(object):
         except Exception as exc:
             log.warn("Failed to retreive existing alarm info: %s.\
                      Can only update OSM created alarms.", exc)
-            return None
+            return None, False
 
-        # Genate and check payload configuration for alarm update
+        # Generates and check payload configuration for alarm update
         payload = self.check_payload(values, metric_name, resource_id,
                                      alarm_name, alarm_state=alarm_state)
 
+        # Updates the alarm configurations with the valid payload
         if payload is not None:
             try:
                 update_alarm = self._common._perform_request(
                     url, auth_token, req_type="put", payload=payload)
 
-                return json.loads(update_alarm.text)['alarm_id']
+                return json.loads(update_alarm.text)['alarm_id'], True
             except Exception as exc:
                 log.warn("Alarm update could not be performed: %s", exc)
-                return None
-        return None
+                return None, False
+        return None, False
 
     def check_payload(self, values, metric_name, resource_id,
                       alarm_name, alarm_state=None):
@@ -283,17 +337,19 @@ class Alarming(object):
             severity = values['severity']
             if severity == "INDETERMINATE":
                 alarm_state = "insufficient data"
-
             if alarm_state is None:
                 alarm_state = "ok"
 
+            statistic = values['statistic']
             # Try to configure the payload for the update/create request
+            # Can only update: threshold, operation, statistic and
+            # the severity of the alarm
             rule = {'threshold': values['threshold_value'],
                     'comparison_operator': values['operation'].lower(),
                     'metric': metric_name,
                     'resource_id': resource_id,
                     'resource_type': 'generic',
-                    'aggregation_method': values['statistic'].lower()}
+                    'aggregation_method': STATISTICS[statistic]}
             payload = json.dumps({'state': alarm_state,
                                   'name': alarm_name,
                                   'severity': SEVERITIES[severity],
@@ -303,3 +359,30 @@ class Alarming(object):
         except KeyError as exc:
             log.warn("Alarm is not configured correctly: %s", exc)
         return None
+
+    def authenticate(self, values):
+        """Generate an authentication token and endpoint for alarm request."""
+        try:
+            # Check for a tenant_id
+            auth_token = self._common._authenticate(
+                tenant_id=values['tenant_uuid'])
+            endpoint = self._common.get_endpoint("alarming")
+        except Exception as exc:
+            log.warn("Tenant ID is not specified. Will use a generic\
+                      authentication: %s", exc)
+            auth_token = self._common._authenticate()
+            endpoint = self._common.get_endpoint("alarming")
+
+        return auth_token, endpoint
+
+    def get_alarm_state(self, endpoint, auth_token, alarm_id):
+        """Get the state of the alarm."""
+        url = "{}/v2/alarms/%s/state".format(endpoint) % alarm_id
+
+        try:
+            alarm_state = self._common._perform_request(
+                url, auth_token, req_type="get")
+            return json.loads(alarm_state.text)
+        except Exception as exc:
+            log.warn("Failed to get the state of the alarm:%s", exc)
+        return None
diff --git a/plugins/OpenStack/Aodh/notifier.py b/plugins/OpenStack/Aodh/notifier.py
new file mode 100644 (file)
index 0000000..2068f03
--- /dev/null
@@ -0,0 +1,84 @@
+# Copyright 2017 Intel Research and Development Ireland Limited
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+##
+"""Notifier class for alarm notification response."""
+
+import json
+import logging as log
+
+from core.message_bus.producer import KafkaProducer
+
+from plugins.OpenStack.response import OpenStack_Response
+from plugins.OpenStack.singleton import Singleton
+
+__author__ = "Helena McGough"
+
+ALARM_NAMES = [
+    "Average_Memory_Usage_Above_Threshold",
+    "Read_Latency_Above_Threshold",
+    "Write_Latency_Above_Threshold",
+    "DISK_READ_OPS",
+    "DISK_WRITE_OPS",
+    "DISK_READ_BYTES",
+    "DISK_WRITE_BYTES",
+    "Net_Packets_Dropped",
+    "Packets_in_Above_Threshold",
+    "Packets_out_Above_Threshold",
+    "CPU_Utilization_Above_Threshold"]
+
+
+@Singleton
+class Notifier(object):
+    """Alarm Notification class."""
+
+    def __init__(self):
+        """Initialize alarm notifier."""
+        self._response = OpenStack_Response()
+
+        self._producer = KafkaProducer("alarm_response")
+
+    def notify(self, alarming):
+        """Send alarm notifications responses to the SO."""
+        auth_token, endpoint = alarming.authenticate(None)
+
+        while(1):
+            alarm_list = json.loads(alarming.list_alarms(endpoint, auth_token))
+            for alarm in alarm_list:
+                alarm_id = alarm['alarm_id']
+                alarm_name = alarm['name']
+                # Send a notification response to the SO on alarm trigger
+                if alarm_name in ALARM_NAMES:
+                    alarm_state = alarming.get_alarm_state(
+                        endpoint, auth_token, alarm_id)
+                    if alarm_state == "alarm":
+                        # Generate and send an alarm notification response
+                        try:
+                            a_date = alarm['state_timestamp'].replace("T", " ")
+                            rule = alarm['gnocchi_resources_threshold_rule']
+                            resp_message = self._response.generate_response(
+                                'notify_alarm', a_id=alarm_id,
+                                r_id=rule['resource_id'],
+                                sev=alarm['severity'], date=a_date,
+                                state=alarm_state, vim_type="OpenStack")
+                            self._producer.notify_alarm(
+                                'notify_alarm', resp_message, 'alarm_response')
+                        except Exception as exc:
+                            log.warn("Failed to send notify response:%s", exc)
index 364a12e..847d44b 100644 (file)
@@ -1,20 +1,43 @@
+# Copyright 2017 Intel Research and Development Ireland Limited
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+##
 """Aodh plugin for the OSM monitoring module."""
 
 import logging as log
-#import sys
-
-#path = "/home/stack/MON"
-#if path not in sys.path:
-#    sys.path.append(path)
 
 from plugins.OpenStack.Aodh.alarming import Alarming
+from plugins.OpenStack.Aodh.notifier import Notifier
 from plugins.OpenStack.settings import Config
 
+__author__ = "Helena McGough"
+
 
 def register_plugin():
     """Register the plugin."""
+    # Initialize configuration and notifications
     config = Config.instance()
-    instance = Plugin(config=config)
+    notifier = Notifier.instance()
+
+    # Intialize plugin
+    instance = Plugin(config=config, notifier=notifier)
     instance.config()
     instance.alarm()
 
@@ -22,11 +45,12 @@ def register_plugin():
 class Plugin(object):
     """Aodh plugin for OSM MON."""
 
-    def __init__(self, config):
+    def __init__(self, config, notifier):
         """Plugin instance."""
         log.info("Initialze the plugin instance.")
         self._config = config
-        self._alarm = Alarming()
+        self._alarming = Alarming()
+        self._notifier = notifier
 
     def config(self):
         """Configure plugin."""
@@ -36,6 +60,13 @@ class Plugin(object):
     def alarm(self):
         """Allow alarm info to be received from Aodh."""
         log.info("Begin alarm functionality.")
-        self._alarm.alarming()
+        self._alarming.alarming()
+
+    def notify(self):
+        """Send notifications to the SO."""
+        # TODO(mcgoughh): Run simultaneously so that notifications
+        # can be sent while messages are being consumed
+        log.info("Sending Openstack notifications to the SO.")
+        self._notifier.notify(self._alarming)
 
 register_plugin()
index bc1a729..ca6f47a 100644 (file)
@@ -1,15 +1,63 @@
-"""Gnocchi acts on a metric message received from the SO via MON."""
+# Copyright 2017 Intel Research and Development Ireland Limited
+# *************************************************************
 
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+##
+"""Carry out OpenStack metric requests via Gnocchi API."""
+
+import datetime
 import json
 import logging as log
+import time
+
+from core.message_bus.producer import KafkaProducer
 
 from kafka import KafkaConsumer
 
 from plugins.OpenStack.common import Common
+from plugins.OpenStack.response import OpenStack_Response
+
+__author__ = "Helena McGough"
+
+METRIC_MAPPINGS = {
+    "AVERAGE_MEMORY_UTILIZATION": "memory.percent",
+    "DISK_READ_OPS": "disk.disk_ops",
+    "DISK_WRITE_OPS": "disk.disk_ops",
+    "DISK_READ_BYTES": "disk.disk_octets",
+    "DISK_WRITE_BYTES": "disk.disk_octets",
+    "PACKETS_DROPPED": "interface.if_dropped",
+    "PACKETS_RECEIVED": "interface.if_packets",
+    "PACKETS_SENT": "interface.if_packets",
+    "CPU_UTILIZATION": "cpu.percent",
+}
+
+PERIOD_MS = {
+    "HR": 3600000,
+    "DAY": 86400000,
+    "WEEK": 604800000,
+    "MONTH": 2629746000,
+    "YEAR": 31556952000
+}
 
 
 class Metrics(object):
-    """Gnocchi based metric actions performed on info from MON."""
+    """OpenStack metric requests performed via the Gnocchi API."""
 
     def __init__(self):
         """Initialize the metric actions."""
@@ -17,100 +65,349 @@ class Metrics(object):
 
         # TODO(mcgoughh): Initialize a generic consumer object to consume
         # message from the SO. This is hardcoded for now
-        server = {'server': 'localhost:9092', 'topic': 'metrics'}
+        server = {'server': 'localhost:9092', 'topic': 'metric_request'}
         self._consumer = KafkaConsumer(server['topic'],
-                                       group_id='my-group',
+                                       group_id='osm_mon',
                                        bootstrap_servers=server['server'])
 
-        # TODO(mcgoughh): Initialize a producer to send messages bask to the SO
+        # Use the Response class to generate valid json response messages
+        self._response = OpenStack_Response()
+
+        # Initializer a producer to send responses back to SO
+        self._producer = KafkaProducer("metric_response")
 
     def metric_calls(self):
-        """Consume info from the message bus to manage metrics."""
-        # Concumer check for metric messages
+        """Consume info from the message bus to manage metric requests."""
+        # Consumer check for metric messages
         for message in self._consumer:
+            # Check if this plugin should carry out this request
+            values = json.loads(message.value)
+            vim_type = values['vim_type'].lower()
+
+            if vim_type == "openstack":
+                # Generate auth_token and endpoint
+                auth_token, endpoint = self.authenticate(values)
 
-            if message.topic == "metrics":
-                log.info("Metric action required on this topic: %s",
-                         (message.topic))
+                if message.key == "create_metric_request":
+                    # Configure metric
+                    metric_details = values['metric_create']
+                    metric_id, resource_id, status = self.configure_metric(
+                        endpoint, auth_token, metric_details)
 
-                if message.key == "configure_metric":
-                    # Configure/Update a resource and its metric
-                    values = json.loads(message.value)
-                    schema = values['configure_metrics']
-                    metric_details = schema['metrics_configuration']
+                    # Generate and send a create metric response
+                    try:
+                        resp_message = self._response.generate_response(
+                            'create_metric_response', status=status,
+                            cor_id=values['correlation_id'],
+                            metric_id=metric_id, r_id=resource_id)
+                        self._producer.create_metrics_resp(
+                            'create_metric_response', resp_message,
+                            'metric_response')
+                    except Exception as exc:
+                        log.warn("Failed to create response: %s", exc)
 
-                    # Generate authentication credentials via keystone:
-                    # auth_token, endpoint
-                    auth_token = self._common._authenticate(
-                        schema['tenant_uuid'])
-                    endpoint = self._common.get_endpoint("metric")
+                elif message.key == "read_metric_data_request":
+                    # Read all metric data related to a specified metric
+                    timestamps, metric_data = self.read_metric_data(
+                        endpoint, auth_token, values)
 
-                    metric_id = self.configure_metric(
-                        endpoint, auth_token, metric_details)
-                    log.info("New metric created with metricID: %s", metric_id)
+                    # Generate and send a response message
+                    try:
+                        resp_message = self._response.generate_response(
+                            'read_metric_data_response',
+                            m_id=values['metric_uuid'],
+                            m_name=values['metric_name'],
+                            r_id=values['resource_uuid'],
+                            cor_id=values['correlation_id'],
+                            times=timestamps, metrics=metric_data)
+                        self._producer.read_metric_data_response(
+                            'read_metric_data_response', resp_message,
+                            'metric_response')
+                    except Exception as exc:
+                        log.warn("Failed to send read metric response:%s", exc)
 
-                    # TODO(mcgoughh): will send an acknowledge message back on
-                    # the bus via the producer
+                elif message.key == "delete_metric_request":
+                    # delete the specified metric in the request
+                    metric_id = values['metric_uuid']
+                    status = self.delete_metric(
+                        endpoint, auth_token, metric_id)
 
-                # TODO(mcoughh): Key alternatives are "metric_data_request" and
-                # "metric_data_response" will be accomodated later
-                # Will also need a producer for this functionality
-                elif message.key == "metric_data_request":
-                    log.debug("Key used to request a metrics data")
+                    # Generate and send a response message
+                    try:
+                        resp_message = self._response.generate_response(
+                            'delete_metric_response', m_id=metric_id,
+                            m_name=values['metric_name'],
+                            status=status, r_id=values['resource_uuid'],
+                            cor_id=values['correlation_id'])
+                        self._producer.delete_metric_response(
+                            'delete_metric_response', resp_message,
+                            'metric_response')
+                    except Exception as exc:
+                        log.warn("Failed to send delete response:%s", exc)
 
-                elif message.key == "metric_data_response":
-                    log.debug("Key used for a metrics data response")
+                elif message.key == "update_metric_request":
+                    # Gnocchi doesn't support configuration updates
+                    # Log and send a response back to this effect
+                    log.warn("Gnocchi doesn't support metric configuration\
+                              updates.")
+                    req_details = values['metric_create']
+                    metric_name = req_details['metric_name']
+                    resource_id = req_details['resource_uuid']
+                    metric_id = self.get_metric_id(
+                        endpoint, auth_token, metric_name, resource_id)
 
-                else:
-                    log.debug("Unknown key, no action will be performed")
+                    # Generate and send a response message
+                    try:
+                        resp_message = self._response.generate_response(
+                            'update_metric_response', status=False,
+                            cor_id=values['correlation_id'],
+                            r_id=resource_id, m_id=metric_id)
+                        self._producer.update_metric_response(
+                            'update_metric_response', resp_message,
+                            'metric_response')
+                    except Exception as exc:
+                        log.warn("Failed to send an update response:%s", exc)
+
+                elif message.key == "list_metric_request":
+                    list_details = values['metrics_list_request']
+
+                    metric_list = self.list_metrics(
+                        endpoint, auth_token, list_details)
+
+                    # Generate and send a response message
+                    try:
+                        resp_message = self._response.generate_response(
+                            'list_metric_response', m_list=metric_list,
+                            cor_id=list_details['correlation_id'])
+                        self._producer.list_metric_response(
+                            'list_metric_response', resp_message,
+                            'metric_response')
+                    except Exception as exc:
+                        log.warn("Failed to send a list response:%s", exc)
 
+                else:
+                    log.warn("Unknown key, no action will be performed.")
             else:
-                log.info("Message topic not relevant to this plugin: %s",
-                         message.topic)
+                log.debug("Message is not for this OpenStack.")
 
         return
 
     def configure_metric(self, endpoint, auth_token, values):
-        """Create the new SO desired metric in Gnocchi."""
-        metric_id = None
+        """Create the new metric in Gnocchi."""
+        try:
+            resource_id = values['resource_uuid']
+        except KeyError:
+            log.warn("Resource is not defined correctly.")
+            return None, None, False
 
-        # TODO(mcgoughh): error check the values sent in the message
-        # will query the database for the request resource and then
-        # check that resource for the desired metric
-        metric_name = values['metric_name']
+        # Check/Normalize metric name
+        metric_name, norm_name = self.get_metric_name(values)
+        if norm_name is None:
+            log.warn("This metric is not supported by this plugin.")
+            return None, resource_id, False
 
-        if metric_id is None:
+        # Check for an existing metric for this resource
+        metric_id = self.get_metric_id(
+            endpoint, auth_token, metric_name, resource_id)
 
+        if metric_id is None:
             # Need to create a new version of the resource for gnocchi to
-            # the new metric
-            resource_url = "{}/v1/resource/generic".format(endpoint)
+            # create the new metric based on that resource
+            url = "{}/v1/resource/generic".format(endpoint)
+            try:
+                # Try to create a new resource for the new metric
+                metric = {'name': metric_name,
+                          'archive_policy_name': 'high',
+                          'unit': values['metric_unit'], }
 
-            metric = {'name': metric_name,
-                      'unit': values['metric_unit'], }
+                resource_payload = json.dumps({'id': resource_id,
+                                               'metrics': {
+                                                   metric_name: metric}})
 
-            resource_payload = json.dumps({'id': values['resource_uuid'],
-                                           'metrics': {metric_name: metric}})
+                new_resource = self._common._perform_request(
+                    url, auth_token, req_type="post", payload=resource_payload)
 
-            new_resource = self._common._perform_request(
-                resource_url, auth_token,
-                req_type="post", payload=resource_payload)
-            new_metric = json.loads(new_resource.text)['metrics']
+                resource_id = json.loads(new_resource.text)['id']
+            except Exception as exc:
+                # Append new metric to existing resource
+                log.debug("This resource already exists:%s, appending metric.",
+                          exc)
+                base_url = "{}/v1/resource/generic/%s/metric"
+                res_url = base_url.format(endpoint) % resource_id
+                payload = {metric_name: {'archive_policy_name': 'high',
+                                         'unit': values['metric_unit']}}
+                self._common._perform_request(
+                    res_url, auth_token, req_type="post",
+                    payload=json.dumps(payload))
+
+            metric_id = self.get_metric_id(
+                endpoint, auth_token, metric_name, resource_id)
+            return metric_id, resource_id, True
 
-            return new_metric[metric_name]
         else:
-            return metric_id
+            log.debug("This metric already exists for this resource.")
+
+        return metric_id, resource_id, False
 
     def delete_metric(self, endpoint, auth_token, metric_id):
         """Delete metric."""
         url = "{}/v1/metric/%s".format(endpoint) % (metric_id)
 
-        self._common._perform_request(url, auth_token, req_type="delete")
-        return None
+        try:
+            result = self._common._perform_request(
+                url, auth_token, req_type="delete")
+            if str(result.status_code) == "404":
+                log.warn("Failed to delete the metric.")
+                return False
+            else:
+                return True
+        except Exception as exc:
+            log.warn("Failed to carry out delete metric request:%s", exc)
+        return False
 
-    def list_metrics(self, endpoint, auth_token):
+    def list_metrics(self, endpoint, auth_token, values):
         """List all metrics."""
         url = "{}/v1/metric/".format(endpoint)
 
-        metric_list = self._common._perform_request(
-            url, auth_token, req_type="get")
-        return json.loads(metric_list.text)
+        try:
+            # Check if the metric_name was specified for the list
+            metric_name = values['metric_name']
+            result = self._common._perform_request(
+                url, auth_token, req_type="get")
+            metric_list = json.loads(result.text)
+
+            # Format the list response
+            metrics = self.response_list(
+                metric_list, metric_name=metric_name)
+            return metrics
+        except KeyError:
+            log.debug("Metric name is not specified for this list.")
+
+        try:
+            # Check if a resource_id was specified
+            resource_id = values['resource_uuid']
+            result = self._common._perform_request(
+                url, auth_token, req_type="get")
+            metric_list = json.loads(result.text)
+            # Format the list response
+            metrics = self.response_list(
+                metric_list, resource=resource_id)
+            return metrics
+        except KeyError:
+            log.debug("Resource id not specificed either, will return a\
+                       complete list.")
+            try:
+                result = self._common._perform_request(
+                    url, auth_token, req_type="get")
+                metric_list = json.loads(result.text)
+                # Format the list response
+                metrics = self.response_list(metric_list)
+                return metrics
+
+            except Exception as exc:
+                log.warn("Failed to generate any metric list. %s", exc)
+        return None
+
+    def get_metric_id(self, endpoint, auth_token, metric_name, resource_id):
+        """Check if the desired metric already exists for the resource."""
+        url = "{}/v1/resource/generic/%s".format(endpoint) % resource_id
+
+        try:
+            # Try return the metric id if it exists
+            result = self._common._perform_request(
+                url, auth_token, req_type="get")
+            return json.loads(result.text)['metrics'][metric_name]
+        except Exception:
+            log.debug("Metric doesn't exist. No metric_id available")
+        return None
+
+    def get_metric_name(self, values):
+        """Check metric name configuration and normalize."""
+        try:
+            # Normalize metric name
+            metric_name = values['metric_name']
+            return metric_name, METRIC_MAPPINGS[metric_name]
+        except KeyError:
+            log.warn("Metric name %s is invalid.", metric_name)
+        return metric_name, None
+
+    def read_metric_data(self, endpoint, auth_token, values):
+        """Collectd metric measures over a specified time period."""
+        timestamps = []
+        data = []
+        try:
+            # Try and collect measures
+            metric_id = values['metric_uuid']
+            collection_unit = values['collection_unit'].upper()
+            collection_period = values['collection_period']
+
+            # Define the start and end time based on configurations
+            stop_time = time.strftime("%Y-%m-%d") + "T" + time.strftime("%X")
+            end_time = int(round(time.time() * 1000))
+            if collection_unit == 'YEAR':
+                diff = PERIOD_MS[collection_unit]
+            else:
+                diff = collection_period * PERIOD_MS[collection_unit]
+            s_time = (end_time - diff)/1000.0
+            start_time = datetime.datetime.fromtimestamp(s_time).strftime(
+                '%Y-%m-%dT%H:%M:%S.%f')
+            base_url = "{}/v1/metric/%(0)s/measures?start=%(1)s&stop=%(2)s"
+            url = base_url.format(endpoint) % {
+                "0": metric_id, "1": start_time, "2": stop_time}
+
+            # Perform metric data request
+            metric_data = self._common._perform_request(
+                url, auth_token, req_type="get")
+
+            # Generate a list of the requested timestamps and data
+            for r in json.loads(metric_data.text):
+                timestamp = r[0].replace("T", " ")
+                timestamps.append(timestamp)
+                data.append(r[2])
+
+            return timestamps, data
+        except Exception as exc:
+            log.warn("Failed to gather specified measures: %s", exc)
+        return timestamps, data
+
+    def authenticate(self, values):
+        """Generate an authentication token and endpoint for metric request."""
+        try:
+            # Check for a tenant_id
+            auth_token = self._common._authenticate(
+                tenant_id=values['tenant_uuid'])
+            endpoint = self._common.get_endpoint("metric")
+        except KeyError:
+            log.warn("Tenant ID is not specified. Will use a generic\
+                      authentication.")
+            auth_token = self._common._authenticate()
+            endpoint = self._common.get_endpoint("metric")
+
+        return auth_token, endpoint
+
+    def response_list(self, metric_list, metric_name=None, resource=None):
+        """Create the appropriate lists for a list response."""
+        resp_list = []
+
+        for row in metric_list:
+            if metric_name is not None:
+                if row['name'] == metric_name:
+                    metric = {"metric_name": row['name'],
+                              "metric_uuid": row['id'],
+                              "metric_unit": row['unit'],
+                              "resource_uuid": row['resource_id']}
+                    resp_list.append(metric)
+            elif resource is not None:
+                if row['resource_id'] == resource:
+                    metric = {"metric_name": row['name'],
+                              "metric_uuid": row['id'],
+                              "metric_unit": row['unit'],
+                              "resource_uuid": row['resource_id']}
+                    resp_list.append(metric)
+            else:
+                metric = {"metric_name": row['name'],
+                          "metric_uuid": row['id'],
+                          "metric_unit": row['unit'],
+                          "resource_uuid": row['resource_id']}
+                resp_list.append(metric)
+        return resp_list
index 6f9e306..8e4296f 100644 (file)
@@ -1,3 +1,24 @@
+# Copyright 2017 Intel Research and Development Ireland Limited
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+##
 """Gnocchi plugin for the OSM monitoring module."""
 
 import logging as log
@@ -5,6 +26,8 @@ import logging as log
 from plugins.OpenStack.Gnocchi.metrics import Metrics
 from plugins.OpenStack.settings import Config
 
+__author__ = "Helena McGough"
+
 
 def register_plugin():
     """Register the plugin."""
index 68ce4e6..25cca45 100644 (file)
@@ -1,4 +1,25 @@
-"""Common methods for the Aodh Sender/Receiver."""
+# Copyright 2017 Intel Research and Development Ireland Limited
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+##
+"""Common methods for the OpenStack plugins."""
 
 import logging as log
 
@@ -8,8 +29,7 @@ from plugins.OpenStack.settings import Config
 
 import requests
 
-# from keystoneauth1.identity.v3 import AuthMethod
-# from keystoneclient.service_catalog import ServiceCatalog
+__author__ = "Helena McGough"
 
 
 class Common(object):
@@ -35,8 +55,8 @@ class Common(object):
             self._auth_token = self._ks.auth_token
         except Exception as exc:
 
-            log.warn("Authentication failed with the following exception: %s",
-                     exc)
+            log.warn("Authentication failed: %s", exc)
+
             self._auth_token = None
 
         return self._auth_token
@@ -49,7 +69,7 @@ class Common(object):
                 endpoint_type='internalURL',
                 region_name='RegionOne')
         except Exception as exc:
-            log.warning("Failed to retreive endpoint for Aodh due to: %s",
+            log.warning("Failed to retreive endpoint for service due to: %s",
                         exc)
         return None
 
diff --git a/plugins/OpenStack/response.py b/plugins/OpenStack/response.py
new file mode 100644 (file)
index 0000000..e59c7ca
--- /dev/null
@@ -0,0 +1,168 @@
+# Copyright 2017 Intel Research and Development Ireland Limited
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+##
+"""Generate valid responses to send back to the SO."""
+
+import json
+import logging as log
+
+__author__ = "Helena McGough"
+
+schema_version = "1.0"
+
+
+class OpenStack_Response(object):
+    """Generates responses for SO from OpenStaack plugins."""
+
+    def __init__(self):
+        """Initialize OpenStack Response instance."""
+
+    def generate_response(self, key, **kwargs):
+        """Make call to appropriate response function."""
+        if key == "list_alarm_response":
+            message = self.alarm_list_response(**kwargs)
+        elif key == "create_alarm_response":
+            message = self.create_alarm_response(**kwargs)
+        elif key == "delete_alarm_response":
+            message = self.delete_alarm_response(**kwargs)
+        elif key == "update_alarm_response":
+            message = self.update_alarm_response(**kwargs)
+        elif key == "create_metric_response":
+            message = self.metric_create_response(**kwargs)
+        elif key == "read_metric_data_response":
+            message = self.read_metric_data_response(**kwargs)
+        elif key == "delete_metric_response":
+            message = self.delete_metric_response(**kwargs)
+        elif key == "update_metric_response":
+            message = self.update_metric_response(**kwargs)
+        elif key == "list_metric_response":
+            message = self.list_metric_response(**kwargs)
+        elif key == "notify_alarm":
+            message = self.notify_alarm(**kwargs)
+        else:
+            log.warn("Failed to generate a valid response message.")
+
+        return message
+
+    def alarm_list_response(self, **kwargs):
+        """Generate the response for an alarm list request."""
+        alarm_list_resp = {"schema_version": schema_version,
+                           "schema_type": "list_alarm_response",
+                           "correlation_id": kwargs['cor_id'],
+                           "list_alarm_resp": kwargs['alarm_list']}
+        return json.dumps(alarm_list_resp)
+
+    def create_alarm_response(self, **kwargs):
+        """Generate a response for a create alarm request."""
+        create_alarm_resp = {"schema_version": schema_version,
+                             "schema_type": "create_alarm_response",
+                             "alarm_create_response": {
+                                 "correlation_id": kwargs['cor_id'],
+                                 "alarm_uuid": kwargs['alarm_id'],
+                                 "status": kwargs['status']}}
+        return json.dumps(create_alarm_resp)
+
+    def delete_alarm_response(self, **kwargs):
+        """Generate a response for a delete alarm request."""
+        delete_alarm_resp = {"schema_version": schema_version,
+                             "schema_type": "alarm_deletion_response",
+                             "alarm_deletion_response": {
+                                 "correlation_id": kwargs['cor_id'],
+                                 "alarm_uuid": kwargs['alarm_id'],
+                                 "status": kwargs['status']}}
+        return json.dumps(delete_alarm_resp)
+
+    def update_alarm_response(self, **kwargs):
+        """Generate a response for an update alarm request."""
+        update_alarm_resp = {"schema_version": schema_version,
+                             "schema_type": "update_alarm_response",
+                             "alarm_update_response": {
+                                 "correlation_id": kwargs['cor_id'],
+                                 "alarm_uuid": kwargs['alarm_id'],
+                                 "status": kwargs['status']}}
+        return json.dumps(update_alarm_resp)
+
+    def metric_create_response(self, **kwargs):
+        """Generate a response for a create metric request."""
+        create_metric_resp = {"schema_version": schema_version,
+                              "schema_type": "create_metric_response",
+                              "correlation_id": kwargs['cor_id'],
+                              "metric_create_response": {
+                                  "metric_uuid": kwargs['metric_id'],
+                                  "resource_uuid": kwargs['r_id'],
+                                  "status": kwargs['status']}}
+        return json.dumps(create_metric_resp)
+
+    def read_metric_data_response(self, **kwargs):
+        """Generate a response for a read metric data request."""
+        read_metric_data_resp = {"schema_version": schema_version,
+                                 "schema_type": "read_metric_data_response",
+                                 "metric_name": kwargs['m_name'],
+                                 "metric_uuid": kwargs['m_id'],
+                                 "resource_uuid": kwargs['r_id'],
+                                 "correlation_id": kwargs['cor_id'],
+                                 "metrics_data": {
+                                     "time_series": kwargs['times'],
+                                     "metrics_series": kwargs['metrics']}}
+        return json.dumps(read_metric_data_resp)
+
+    def delete_metric_response(self, **kwargs):
+        """Generate a response for a delete metric request."""
+        delete_metric_resp = {"schema_version": schema_version,
+                              "schema_type": "delete_metric_response",
+                              "metric_name": kwargs['m_name'],
+                              "metric_uuid": kwargs['m_id'],
+                              "resource_uuid": kwargs['r_id'],
+                              "correlation_id": kwargs['cor_id'],
+                              "status": kwargs['status']}
+        return json.dumps(delete_metric_resp)
+
+    def update_metric_response(self, **kwargs):
+        """Generate a repsonse for an update metric request."""
+        update_metric_resp = {"schema_version": schema_version,
+                              "schema_type": "update_metric_response",
+                              "correlation_id": kwargs['cor_id'],
+                              "metric_update_response": {
+                                  "metric_uuid": kwargs['m_id'],
+                                  "status": kwargs['status'],
+                                  "resource_uuid": kwargs['r_id']}}
+        return json.dumps(update_metric_resp)
+
+    def list_metric_response(self, **kwargs):
+        """Generate a response for a list metric request."""
+        list_metric_resp = {"schema_version": schema_version,
+                            "schema_type": "list_metric_response",
+                            "correlation_id": kwargs['cor_id'],
+                            "metrics_list": kwargs['m_list']}
+        return json.dumps(list_metric_resp)
+
+    def notify_alarm(self, **kwargs):
+        """Generate a response to send alarm notifications."""
+        notify_alarm_resp = {"schema_version": schema_version,
+                             "schema_type": "notify_alarm",
+                             "notify_details": {
+                                 "alarm_uuid": kwargs['a_id'],
+                                 "resource_uuid": kwargs['r_id'],
+                                 "vim_type": kwargs['vim_type'],
+                                 "severity": kwargs['sev'],
+                                 "status": kwargs['state'],
+                                 "start_date": kwargs['date']}}
+        return json.dumps(notify_alarm_resp)
index 45620d9..e7b06e2 100644 (file)
@@ -1,4 +1,25 @@
-"""Configurations for the Aodh plugin."""
+# Copyright 2017 Intel Research and Development Ireland Limited
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+##
+"""Configurations for the OpenStack plugins."""
 
 from __future__ import unicode_literals
 
@@ -11,6 +32,8 @@ from plugins.OpenStack.singleton import Singleton
 
 import six
 
+__author__ = "Helena McGough"
+
 
 class BadConfigError(Exception):
     """Configuration exception."""
@@ -54,8 +77,6 @@ class Config(object):
     def read_environ(self, service):
         """Check the appropriate environment variables and update defaults."""
         for key in self._config_keys:
-            # Default username for a service is it's name
-            setattr(self, 'OS_USERNAME', service)
             if (key == "OS_IDENTITY_API_VERSION" or key == "OS_PASSWORD"):
                 val = str(os.environ[key])
                 setattr(self, key, val)
@@ -63,6 +84,7 @@ class Config(object):
                 val = str(os.environ[key]) + "/v3"
                 setattr(self, key, val)
             else:
-                # TODO(mcgoughh): Log errors and no config updates required
-                log.warn("Configuration doesn't require updating")
+                # Default username for a service is it's name
+                setattr(self, 'OS_USERNAME', service)
+                log.info("Configuration complete!")
                 return
index 12cd5a9..abfe4e2 100644 (file)
@@ -1,7 +1,30 @@
+# Copyright 2017 Intel Research and Development Ireland Limited
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+##
 """Simple singleton class."""
 
 from __future__ import unicode_literals
 
+__author__ = "Helena McGough"
+
 
 class Singleton(object):
     """Simple singleton class."""
diff --git a/plugins/__init__.py b/plugins/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/plugins/vRealiseOps/__init__.py b/plugins/vRealiseOps/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
index 00d6ada..f5e11e6 100644 (file)
@@ -1,4 +1,26 @@
 # -*- coding: utf-8 -*-
+
+##
+# Copyright 2016-2017 VMware Inc.
+# This file is part of ETSI OSM
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+#
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact:  osslegalrouting@vmware.com
+##
+
 """
 vROPs Kafka Consumer that consumes the request messages
 """
index 79b1ea7..5831e72 100644 (file)
@@ -1,4 +1,26 @@
 # -*- coding: utf-8 -*-
+
+##
+# Copyright 2016-2017 VMware Inc.
+# This file is part of ETSI OSM
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+#
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact:  osslegalrouting@vmware.com
+##
+
 """
 Montoring metrics & creating Alarm definations in vROPs
 """
@@ -11,11 +33,23 @@ import traceback
 import time
 import json
 from OpenSSL.crypto import load_certificate, FILETYPE_PEM
+import os
+import datetime
 
 OPERATION_MAPPING = {'GE':'GT_EQ', 'LE':'LT_EQ', 'GT':'GT', 'LT':'LT', 'EQ':'EQ'}
-severity_mano2vrops = {'WARNING':'WARNING', 'MINOR':'WARNING', 'MAJOR':"IMMEDIATE", 'CRITICAL':'CRITICAL'}
+severity_mano2vrops = {'WARNING':'WARNING', 'MINOR':'WARNING', 'MAJOR':"IMMEDIATE",\
+                        'CRITICAL':'CRITICAL', 'INDETERMINATE':'UNKNOWN'}
 PERIOD_MSEC = {'HR':3600000,'DAY':86400000,'WEEK':604800000,'MONTH':2678400000,'YEAR':31536000000}
-DEFAULT_CONFIG_FILE = 'vrops_config.xml'
+
+#To Do - Add actual webhook url & certificate
+#SSL_CERTIFICATE_FILE_NAME = 'vROPs_Webservice/SSL_certificate/www.vrops_webservice.com.cert'
+webhook_url = "https://mano-dev-1:8080/notify/" #for testing
+SSL_CERTIFICATE_FILE_NAME = 'vROPs_Webservice/SSL_certificate/10.172.137.214.cert' #for testing
+
+MODULE_DIR = os.path.dirname(__file__)
+CONFIG_FILE_NAME = 'vrops_config.xml'
+CONFIG_FILE_PATH = os.path.join(MODULE_DIR, CONFIG_FILE_NAME)
+SSL_CERTIFICATE_FILE_PATH = os.path.join(MODULE_DIR, SSL_CERTIFICATE_FILE_NAME)
 
 class MonPlugin():
     """MON Plugin class for vROPs telemetry plugin
@@ -39,13 +73,22 @@ class MonPlugin():
         """
         access_config = self.get_default_Params('Access_Config')
         self.access_config = access_config
-        self.vrops_site =  access_config['vrops_site']
-        self.vrops_user = access_config['vrops_user']
-        self.vrops_password = access_config['vrops_password']
-        self.vcloud_site = access_config['vcloud-site']
-        self.admin_username = access_config['admin_username']
-        self.admin_password = access_config['admin_password']
-        self.tenant_id = access_config['tenant_id']
+        if not bool(access_config):
+            log.error("Access configuration not provided in vROPs Config file")
+            raise KeyError("Access configuration not provided in vROPs Config file")
+
+        try:
+            self.vrops_site =  access_config['vrops_site']
+            self.vrops_user = access_config['vrops_user']
+            self.vrops_password = access_config['vrops_password']
+            self.vcloud_site = access_config['vcloud-site']
+            self.admin_username = access_config['admin_username']
+            self.admin_password = access_config['admin_password']
+            self.tenant_id = access_config['tenant_id']
+        except KeyError as exp:
+            log.error("Check Access configuration in vROPs Config file: {}".format(exp))
+            raise KeyError("Check Access configuration in vROPs Config file: {}".format(exp))
+
 
     def configure_alarm(self, config_dict = {}):
         """Configures or creates a new alarm using the input parameters in config_dict
@@ -80,7 +123,7 @@ class MonPlugin():
             log.warn("Metric not supported: {}".format(config_dict['metric_name']))
             return None
         #2) create symptom definition
-        vrops_alarm_name = def_a_params['vrops_alarm']+ '-' +config_dict['resource_uuid']
+        vrops_alarm_name = def_a_params['vrops_alarm']+ '-' + config_dict['resource_uuid']
         symptom_params ={'cancel_cycles': (def_a_params['cancel_period']/300)*def_a_params['cancel_cycles'],
                         'wait_cycles': (def_a_params['period']/300)*def_a_params['evaluation'],
                         'resource_kind_key': def_a_params['resource_kind'],
@@ -129,7 +172,7 @@ class MonPlugin():
             return None
 
         #6) Configure alarm notification for a particular VM using it's resource_id
-        notification_id = self.create_alarm_notification(vrops_alarm_name, alarm_def, resource_id)
+        notification_id = self.create_alarm_notification_rule(vrops_alarm_name, alarm_def, resource_id)
         if notification_id is None:
             return None
         else:
@@ -145,9 +188,17 @@ class MonPlugin():
         Params:
             metric_alarm_name: Name of the alarm, whose congif params to be read from the config file.
         """
-        tree = XmlElementTree.parse(DEFAULT_CONFIG_FILE)
-        alarms = tree.getroot()
         a_params = {}
+        try:
+            source = open(CONFIG_FILE_PATH, 'r')
+        except IOError as exp:
+            msg = ("Could not read Config file: {}, \nException: {}"\
+                        .format(CONFIG_FILE_PATH, exp))
+            log.error(msg)
+            raise IOError(msg)
+
+        tree = XmlElementTree.parse(source)
+        alarms = tree.getroot()
         for alarm in alarms:
             if alarm.tag == metric_alarm_name:
                 for param in alarm:
@@ -161,7 +212,7 @@ class MonPlugin():
                             a_params[param.tag] = False
                     else:
                         a_params[param.tag] = param.text
-
+        source.close()
         return a_params
 
 
@@ -326,9 +377,12 @@ class MonPlugin():
         if plugin_id is not None:
             return plugin_id
         else:
-            #To Do - Add actual webhook url
-            webhook_url = "https://mano-dev-1:8080/notify/" #for testing
-            cert_file_string = open("10.172.137.214.cert", "rb").read() #for testing
+            try:
+                cert_file_string = open(SSL_CERTIFICATE_FILE_PATH, "rb").read()
+            except IOError as exp:
+                msg = ("Could not read SSL certificate file: {}".format(SSL_CERTIFICATE_FILE_PATH))
+                log.error(msg)
+                raise IOError(msg)
             cert = load_certificate(FILETYPE_PEM, cert_file_string)
             certificate = cert.digest("sha1")
             api_url = '/suite-api/api/alertplugins'
@@ -444,9 +498,9 @@ class MonPlugin():
             log.warn("Error enabling REST plugin for {} plugin: Exception: {}\n{}"\
                     .format(plugin_name, exp, traceback.format_exc()))
 
-    def create_alarm_notification(self, alarm_name, alarm_id, resource_id):
+    def create_alarm_notification_rule(self, alarm_name, alarm_id, resource_id):
         """
-        Create notification for each alarm
+        Create notification rule for each alarm
         Params:
             alarm_name
             alarm_id
@@ -490,7 +544,7 @@ class MonPlugin():
                              data=data)
 
         if resp.status_code is not 201:
-            log.warn("Failed to create Alarm notification {} for {} alarm."\
+            log.warn("Failed to create Alarm notification rule {} for {} alarm."\
                         "\nResponse code: {}\nResponse content: {}"\
                         .format(notification_name, alarm_name, resp.status_code, resp.content))
             return None
@@ -750,8 +804,393 @@ class MonPlugin():
 
         return return_data
 
-    def reconfigure_alarm(self, config_dict):
+    def update_alarm_configuration(self, new_alarm_config):
+        """Update alarm configuration (i.e. Symptom & alarm) as per request
+        """
+        #1) Get Alarm details from it's uuid & find the symptom defination
+        alarm_details_json, alarm_details = self.get_alarm_defination_details(new_alarm_config['alarm_uuid'])
+        if alarm_details_json is None:
+            return None
+
+        try:
+            #2) Update the symptom defination
+            if alarm_details['alarm_id'] is not None and alarm_details['symptom_definition_id'] is not None:
+                symptom_defination_id = alarm_details['symptom_definition_id']
+            else:
+                log.info("Symptom Defination ID not found for {}".format(new_alarm_config['alarm_uuid']))
+                return None
+
+            symptom_uuid = self.update_symptom_defination(symptom_defination_id, new_alarm_config)
+
+            #3) Update the alarm defination & Return UUID if successful update
+            if symptom_uuid is None:
+                return None
+            else:
+                alarm_uuid = self.reconfigure_alarm(alarm_details_json, new_alarm_config)
+                if alarm_uuid is None:
+                    return None
+                else:
+                    return alarm_uuid
+        except:
+            log.error("Exception while updating alarm: {}".format(traceback.format_exc()))
+
+    def get_alarm_defination_details(self, alarm_uuid):
+        """Get alarm details based on alarm UUID
+        """
+        if alarm_uuid is None:
+            log.warn("get_alarm_defination_details: Alarm UUID not provided")
+            return None
+
+        alarm_details = {}
+        json_data = {}
+        api_url = '/suite-api/api/alertdefinitions/AlertDefinition-'
+        headers = {'Accept': 'application/json'}
+
+        resp = requests.get(self.vrops_site + api_url + alarm_uuid,
+                            auth=(self.vrops_user, self.vrops_password),
+                            verify = False, headers = headers)
+
+        if resp.status_code is not 200:
+            log.warn("Failed to get alarm details from vROPs for {}\nResponse code:{}\nResponse Content: {}"\
+                    .format(alarm_uuid, resp.status_code, resp.content))
+            return None
+
+        try:
+            json_data = json.loads(resp.content)
+            if json_data['id'] is not None:
+                alarm_details['alarm_id'] = json_data['id']
+                alarm_details['alarm_name'] = json_data['name']
+                alarm_details['adapter_kind'] = json_data['adapterKindKey']
+                alarm_details['resource_kind'] = json_data['resourceKindKey']
+                alarm_details['type'] = ['type']
+                alarm_details['sub_type'] = json_data['subType']
+                alarm_details['symptom_definition_id'] = json_data['states'][0]['base-symptom-set']['symptomDefinitionIds'][0]
+        except exception as exp:
+            log.warn("Exception while retriving alarm defination details: {}".format(exp))
+
+        return json_data, alarm_details
+
+
+    def update_symptom_defination(self, symptom_uuid, new_alarm_config):
+        """Update symptom defination based on new alarm input configuration
+        """
+        #1) Get symptom defination details
+        symptom_details = self.get_symptom_defination_details(symptom_uuid)
+        #print "\n\nsymptom_details: {}".format(symptom_details)
+        if symptom_details is None:
+            return None
+
+        if new_alarm_config.has_key('severity') and new_alarm_config['severity'] is not None:
+            symptom_details['state']['severity'] = severity_mano2vrops[new_alarm_config['severity']]
+        if new_alarm_config.has_key('operation') and new_alarm_config['operation'] is not None:
+            symptom_details['state']['condition']['operator'] = OPERATION_MAPPING[new_alarm_config['operation']]
+        if new_alarm_config.has_key('threshold_value') and new_alarm_config['threshold_value'] is not None:
+            symptom_details['state']['condition']['value'] = new_alarm_config['threshold_value']
+        #Find vrops metric key from metric_name, if required
+        """
+        if new_alarm_config.has_key('metric_name') and new_alarm_config['metric_name'] is not None:
+            metric_key_params = self.get_default_Params(new_alarm_config['metric_name'])
+            if not metric_key_params:
+                log.warn("Metric not supported: {}".format(config_dict['metric_name']))
+                return None
+            symptom_details['state']['condition']['key'] = metric_key_params['metric_key']
+        """
+        log.info("Fetched Symptom details : {}".format(symptom_details))
+
+        api_url = '/suite-api/api/symptomdefinitions'
+        headers = {'Content-Type': 'application/json', 'Accept':'application/json'}
+        data = json.dumps(symptom_details)
+        resp = requests.put(self.vrops_site + api_url,
+                             auth=(self.vrops_user, self.vrops_password),
+                             headers=headers,
+                             verify = False,
+                             data=data)
+
+        if resp.status_code != 200:
+            log.warn("Failed to update Symptom definition: {}, response {}"\
+                    .format(symptom_uuid, resp.content))
+            return None
+
+
+        if symptom_uuid is not None:
+            log.info("Symptom defination updated {} for alarm: {}"\
+                    .format(symptom_uuid, new_alarm_config['alarm_uuid']))
+            return symptom_uuid
+        else:
+            log.warn("Failed to update Symptom Defination {} for : {}"\
+                    .format(symptom_uuid, new_alarm_config['alarm_uuid']))
+            return None
+
+
+    def get_symptom_defination_details(self, symptom_uuid):
+        """Get symptom defination details
+        """
+        symptom_details = {}
+        if symptom_uuid is None:
+            log.warn("get_symptom_defination_details: Symptom UUID not provided")
+            return None
+
+        api_url = '/suite-api/api/symptomdefinitions/'
+        headers = {'Accept': 'application/json'}
+
+        resp = requests.get(self.vrops_site + api_url + symptom_uuid,
+                            auth=(self.vrops_user, self.vrops_password),
+                            verify = False, headers = headers)
+
+        if resp.status_code is not 200:
+            log.warn("Failed to get symptom details for {} \nResponse code:{}\nResponse Content: {}"\
+                    .format(symptom_uuid, resp.status_code, resp.content))
+            return None
+
+        symptom_details = json.loads(resp.content)
+        #print "New symptom Details: {}".format(symptom_details)
+        return symptom_details
+
+
+    def reconfigure_alarm(self, alarm_details_json, new_alarm_config):
+        """Reconfigure alarm defination as per input
+        """
+        if new_alarm_config.has_key('severity') and new_alarm_config['severity'] is not None:
+            alarm_details_json['states'][0]['severity'] = new_alarm_config['severity']
+
+        api_url = '/suite-api/api/alertdefinitions'
+        headers = {'Content-Type': 'application/json', 'Accept':'application/json'}
+        data = json.dumps(alarm_details_json)
+        resp = requests.put(self.vrops_site + api_url,
+                             auth=(self.vrops_user, self.vrops_password),
+                             headers=headers,
+                             verify = False,
+                             data=data)
+
+        if resp.status_code != 200:
+            log.warn("Failed to create Symptom definition: {}, response code {}, response content: {}"\
+                    .format(symptom_uuid, resp.status_code, resp.content))
+            return None
+        else:
+            parsed_alarm_details = json.loads(resp.content)
+            alarm_def_uuid = parsed_alarm_details['id'].split('-', 1)[1]
+            log.info("Successfully updated Alarm defination: {}".format(alarm_def_uuid))
+            return alarm_def_uuid
+
+    def delete_alarm_configuration(self, delete_alarm_req_dict):
+        """Delete complete alarm configuration
+        """
+        if delete_alarm_req_dict['alarm_uuid'] is None:
+            log.info("delete_alarm_configuration: Alarm UUID not provided")
+            return None
+        #1)Get alarm & symptom defination details
+        alarm_details_json, alarm_details = self.get_alarm_defination_details(delete_alarm_req_dict['alarm_uuid'])
+        if alarm_details is None or alarm_details_json is None:
+            return None
+
+        #2) Delete alarm notfication
+        rule_id = self.delete_notification_rule(alarm_details['alarm_name'])
+        if rule_id is None:
+            return None
+
+        #3) Delete alarm configuraion
+        alarm_id = self.delete_alarm_defination(alarm_details['alarm_id'])
+        if alarm_id is None:
+            return None
+
+        #4) Delete alarm symptom
+        symptom_id = self.delete_symptom_definition(alarm_details['symptom_definition_id'])
+        if symptom_id is None:
+            return None
+        else:
+            log.info("Completed deleting alarm configuration: {}"\
+                    .format(delete_alarm_req_dict['alarm_uuid']))
+            return delete_alarm_req_dict['alarm_uuid']
+
+    def delete_notification_rule(self, alarm_name):
+        """Deleted notification rule defined for a particular alarm
+        """
+        rule_id = self.get_notification_rule_id_by_alarm_name(alarm_name)
+        if rule_id is None:
+            return None
+        else:
+            api_url = '/suite-api/api/notifications/rules/'
+            headers = {'Accept':'application/json'}
+            resp = requests.delete(self.vrops_site + api_url + rule_id,
+                                auth=(self.vrops_user, self.vrops_password),
+                                verify = False, headers = headers)
+            if resp.status_code is not 204:
+                log.warn("Failed to delete notification rules for {}".format(alarm_name))
+                return None
+            else:
+                log.info("Deleted notification rules for {}".format(alarm_name))
+                return rule_id
+
+    def get_notification_rule_id_by_alarm_name(self, alarm_name):
+        """Find created Alarm notification rule id by alarm name
+        """
+        alarm_notify_id = 'notify_' + alarm_name
+        api_url = '/suite-api/api/notifications/rules'
+        headers = {'Content-Type': 'application/json', 'Accept':'application/json'}
+        resp = requests.get(self.vrops_site + api_url,
+                            auth=(self.vrops_user, self.vrops_password),
+                            verify = False, headers = headers)
+
+        if resp.status_code is not 200:
+            log.warn("Failed to get notification rules details for {}"\
+                    .format(delete_alarm_req_dict['alarm_name']))
+            return None
+
+        notifications = json.loads(resp.content)
+        if notifications is not None and notifications.has_key('notification-rule'):
+            notifications_list = notifications['notification-rule']
+            for dict in notifications_list:
+                if dict['name'] is not None and dict['name'] == alarm_notify_id:
+                    notification_id = dict['id']
+                    log.info("Found Notification id to be deleted: {} for {}"\
+                            .format(notification_id, alarm_name))
+                    return notification_id
+
+            log.warn("Notification id to be deleted not found for {}"\
+                            .format(notification_id, alarm_name))
+            return None
+
+    def delete_alarm_defination(self, alarm_id):
+        """Delete created Alarm defination
+        """
+        api_url = '/suite-api/api/alertdefinitions/'
+        headers = {'Accept':'application/json'}
+        resp = requests.delete(self.vrops_site + api_url + alarm_id,
+                            auth=(self.vrops_user, self.vrops_password),
+                            verify = False, headers = headers)
+        if resp.status_code is not 204:
+            log.warn("Failed to delete alarm definition {}".format(alarm_id))
+            return None
+        else:
+            log.info("Deleted alarm definition {}".format(alarm_id))
+            return alarm_id
+
+    def delete_symptom_definition(self, symptom_id):
+        """Delete symptom defination
+        """
+        api_url = '/suite-api/api/symptomdefinitions/'
+        headers = {'Accept':'application/json'}
+        resp = requests.delete(self.vrops_site + api_url + symptom_id,
+                            auth=(self.vrops_user, self.vrops_password),
+                            verify = False, headers = headers)
+        if resp.status_code is not 204:
+            log.warn("Failed to delete symptom definition {}".format(symptom_id))
+            return None
+        else:
+            log.info("Deleted symptom definition {}".format(symptom_id))
+            return symptom_id
+
+
+    def verify_metric_support(self, metric_info):
+        """Verify, if Metric is supported by vROPs plugin, verify metric unit & return status
+            Returns:
+            status: True if supported, False if not supported
+        """
+        status = False
+        metric_key_params = self.get_default_Params(metric_info['metric_name'])
+        if not metric_key_params:
+            log.warn("Metric not supported: {}".format(metric_info['metric_name']))
+            return status
+        else:
+            #If Metric is supported, verify metric unit & return status
+            if metric_key_params['unit'] == metric_info['metric_unit']:
+                log.info("Metric is supported: {}".format(metric_info['metric_name']))
+                status = True
+            else:
+                log.warn("Metric not supported: {}".format(metric_info['metric_name']))
+                status = False
+            return status
+
+    def get_triggered_alarms_list(self, list_alarm_input):
+        """Get list of triggered alarms on a resource based on alarm input request.
         """
+        #TO Do - Need to add filtering of alarms based on Severity & alarm name
+
+        triggered_alarms_list = []
+        if list_alarm_input['resource_uuid'] is None:
+            return triggered_alarms_list
+
+        #1)Find vROPs resource ID using RO resource UUID
+        vrops_resource_id = self.get_vrops_resourceid_from_ro_uuid(list_alarm_input['resource_uuid'])
+        if vrops_resource_id is None:
+            return triggered_alarms_list
+
+        #2)Get triggered alarms on particular resource
+        triggered_alarms_list = self.get_triggered_alarms_on_resource(list_alarm_input['resource_uuid'], vrops_resource_id)
+        return triggered_alarms_list
+
+    def get_vrops_resourceid_from_ro_uuid(self, ro_resource_uuid):
+        """Fetch vROPs resource ID using resource UUID from RO/SO
+        """
+        #1) Find vm_moref_id from vApp uuid in vCD
+        vm_moref_id = self.get_vm_moref_id(ro_resource_uuid)
+        if vm_moref_id is None:
+            log.warn("Failed to find vm morefid for vApp in vCD: {}".format(ro_resource_uuid))
+            return None
+
+        #2) Based on vm_moref_id, find VM's corresponding resource_id in vROPs to set notification
+        vrops_resource_id = self.get_vm_resource_id(vm_moref_id)
+        if vrops_resource_id is None:
+            log.warn("Failed to find resource in vROPs: {}".format(ro_resource_uuid))
+            return None
+        return vrops_resource_id
+
+
+    def get_triggered_alarms_on_resource(self, ro_resource_uuid, vrops_resource_id):
+        """Get triggered alarms on particular resource & return list of dictionary of alarms
+        """
+        resource_alarms = []
+        api_url = '/suite-api/api/alerts?resourceId='
+        headers = {'Accept':'application/json'}
+        resp = requests.get(self.vrops_site + api_url + vrops_resource_id,
+                            auth=(self.vrops_user, self.vrops_password),
+                            verify = False, headers = headers)
+
+        if resp.status_code is not 200:
+            log.warn("Failed to get notification rules details for {}"\
+                    .format(delete_alarm_req_dict['alarm_name']))
+            return None
+
+        all_alerts = json.loads(resp.content)
+        if all_alerts.has_key('alerts'):
+            if not all_alerts['alerts']:
+                log.info("No alarms present on resource {}".format(ro_resource_uuid))
+                return resource_alarms
+            all_alerts_list = all_alerts['alerts']
+            for alarm in all_alerts_list:
+                #log.info("Triggered Alarm {}".format(alarm))
+                if alarm['alertDefinitionName'] is not None and\
+                    len(alarm['alertDefinitionName'].split('-', 1)) == 2:
+                        if alarm['alertDefinitionName'].split('-', 1)[1] == ro_resource_uuid:
+                            alarm_instance = {}
+                            alarm_instance['alarm_uuid'] = alarm['alertDefinitionId'].split('-', 1)[1]
+                            alarm_instance['resource_uuid'] = ro_resource_uuid
+                            alarm_instance['alarm_instance_uuid'] = alarm['alertId']
+                            alarm_instance['vim_type'] = 'VMware'
+                            #find severity of alarm
+                            severity = None
+                            for key,value in severity_mano2vrops.iteritems():
+                                if value == alarm['alertLevel']:
+                                    severity = key
+                            if severity is None:
+                                severity = 'INDETERMINATE'
+                            alarm_instance['severity'] = severity
+                            alarm_instance['status'] = alarm['status']
+                            alarm_instance['start_date'] = self.convert_date_time(alarm['startTimeUTC'])
+                            alarm_instance['update_date'] = self.convert_date_time(alarm['updateTimeUTC'])
+                            alarm_instance['cancel_date'] = self.convert_date_time(alarm['cancelTimeUTC'])
+                            log.info("Triggered Alarm on resource {}".format(alarm_instance))
+                            resource_alarms.append(alarm_instance)
+        if not resource_alarms:
+            log.info("No alarms present on resource {}".format(ro_resource_uuid))
+        return resource_alarms
+
+    def convert_date_time(self, date_time):
+        """Convert the input UTC time in msec to OSM date time format
         """
-        return None
+        date_time_formatted = '0000-00-00T00:00:00'
+        if date_time != 0:
+            complete_datetime = datetime.datetime.fromtimestamp(date_time/1000.0).isoformat('T')
+            date_time_formatted = complete_datetime.split('.',1)[0]
+        return date_time_formatted
 
index 6a134bd..5339686 100644 (file)
@@ -1,4 +1,26 @@
 # -*- coding: utf-8 -*-
+
+##
+# Copyright 2016-2017 VMware Inc.
+# This file is part of ETSI OSM
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+#
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact:  osslegalrouting@vmware.com
+##
+
 """
 Montoring plugin receiver that consumes the request messages &
 responds using producer for vROPs
@@ -6,10 +28,11 @@ responds using producer for vROPs
 
 from mon_plugin_vrops import MonPlugin
 from kafka_consumer_vrops import vROP_KafkaConsumer
-#To Do- Change producer
-#from core.message_bus.producer import KafkaProducer
+#Core producer
+from core.message_bus.producer import KafkaProducer
 import json
 import logging as log
+import traceback
 
 class PluginReceiver():
     """MON Plugin receiver receiving request messages & responding using producer for vROPs
@@ -24,37 +47,69 @@ class PluginReceiver():
         broker_uri = None
         self.mon_plugin = MonPlugin()
         self.consumer = vROP_KafkaConsumer(topics, broker_uri)
-        #To Do- Change producer
-        #self.producer = KafkaProducer()
+        #Core producer
+        self.producer = KafkaProducer()
 
     def consume(self):
         """Consume the message, act on it & respond
         """
         try:
             for message in self.consumer.vrops_consumer:
-                if message.topic == 'alarm_request':
-                    if message.key == "create_alarm_request":
-                        config_alarm_info = json.loads(message.value)
-                        alarm_uuid = self.create_alarm(config_alarm_info['alarm_creation_request'])
-                        log.info("Alarm created with alarm uuid: {}".format(alarm_uuid))
-                        #To Do - Publish message using producer
-                        #self.publish_create_alarm_status(alarm_uuid, config_alarm_info)
-                    elif message.key == "update_alarm_request":
-                        update_alarm_info = json.loads(message.value)
-                        alarm_uuid = self.update_alarm(update_alarm_info['alarm_creation_request'])
-                        log.info("Alarm defination updated : alarm uuid: {}".format(alarm_uuid))
-                        #To Do - Publish message using producer
-                        #self.publish_update_alarm_status(alarm_uuid, update_alarm_info)
-                elif message.topic == 'metric_request':
-                    if message.key == "read_metric_data_request":
-                        metric_request_info = json.loads(message.value)
-                        metrics_data = self.mon_plugin.get_metrics_data(metric_request_info)
-                        log.info("Collected Metrics Data: {}".format(metrics_data))
-                        #To Do - Publish message using producer
-                        #self.publish_metrics_data_status(metrics_data)
+                message_values = json.loads(message.value)
+                if message_values.has_key('vim_type'):
+                    vim_type = message_values['vim_type'].lower()
+                if vim_type == 'vmware':
+                    log.info("Action required for: {}".format(message.topic))
+                    if message.topic == 'alarm_request':
+                        if message.key == "create_alarm_request":
+                            config_alarm_info = json.loads(message.value)
+                            alarm_uuid = self.create_alarm(config_alarm_info['alarm_creation_request'])
+                            log.info("Alarm created with alarm uuid: {}".format(alarm_uuid))
+                            #Publish message using producer
+                            self.publish_create_alarm_status(alarm_uuid, config_alarm_info)
+                        elif message.key == "update_alarm_request":
+                            update_alarm_info = json.loads(message.value)
+                            alarm_uuid = self.update_alarm(update_alarm_info['alarm_update_request'])
+                            log.info("In plugin_receiver: Alarm defination updated : alarm uuid: {}".format(alarm_uuid))
+                            #Publish message using producer
+                            self.publish_update_alarm_status(alarm_uuid, update_alarm_info)
+                        elif message.key == "delete_alarm_request":
+                            delete_alarm_info = json.loads(message.value)
+                            alarm_uuid = self.delete_alarm(delete_alarm_info['alarm_deletion_request'])
+                            log.info("In plugin_receiver: Alarm defination deleted : alarm uuid: {}".format(alarm_uuid))
+                            #Publish message using producer
+                            self.publish_delete_alarm_status(alarm_uuid, delete_alarm_info)
+                        elif message.key == "list_alarm_request":
+                            request_input = json.loads(message.value)
+                            triggered_alarm_list = self.list_alarms(request_input['alarm_list_request'])
+                            #Publish message using producer
+                            self.publish_list_alarm_response(triggered_alarm_list, request_input)
+                    elif message.topic == 'metric_request':
+                        if message.key == "read_metric_data_request":
+                            metric_request_info = json.loads(message.value)
+                            metrics_data = self.mon_plugin.get_metrics_data(metric_request_info)
+                            log.info("Collected Metrics Data: {}".format(metrics_data))
+                            #Publish message using producer
+                            self.publish_metrics_data_status(metrics_data)
+                        elif message.key == "create_metric_request":
+                            metric_info = json.loads(message.value)
+                            metric_status = self.verify_metric(metric_info['metric_create'])
+                            #Publish message using producer
+                            self.publish_create_metric_response(metric_info, metric_status)
+                        elif message.key == "update_metric_request":
+                            metric_info = json.loads(message.value)
+                            metric_status = self.verify_metric(metric_info['metric_create'])
+                            #Publish message using producer
+                            self.publish_update_metric_response(metric_info, metric_status)
+                        elif message.key == "delete_metric_request":
+                            metric_info = json.loads(message.value)
+                            #Deleting Metric Data is not allowed. Publish status as False
+                            log.warn("Deleting Metric is not allowed: {}".format(metric_info['metric_name']))
+                            #Publish message using producer
+                            self.publish_delete_metric_response(metric_info)
 
         except Exception as exp:
-            log.error("Exception in receiver: {}".format(exp))
+            log.error("Exception in receiver: {} {}".format(exp, traceback.format_exc()))
 
     def create_alarm(self, config_alarm_info):
         """Create alarm using vROPs plugin
@@ -76,13 +131,13 @@ class PluginReceiver():
                              "status": True if alarm_uuid else False
                             }
                        }
-        #To Do - Add producer
-        #self.producer.publish(key=msg_key, value=json.dumps(response_msg), topic=topic)
+        #Core producer
+        self.producer.publish(key=msg_key, value=json.dumps(response_msg), topic=topic)
 
     def update_alarm(self, update_alarm_info):
         """Updare already created alarm
         """
-        alarm_uuid = self.mon_plugin.reconfigure_alarm(update_alarm_info)
+        alarm_uuid = self.mon_plugin.update_alarm_configuration(update_alarm_info)
         return alarm_uuid
 
     def publish_update_alarm_status(self, alarm_uuid, update_alarm_info):
@@ -93,24 +148,132 @@ class PluginReceiver():
         response_msg = {"schema_version":1.0,
                          "schema_type":"update_alarm_response",
                          "alarm_update_response":
-                            {"correlation_id":update_alarm_info["alarm_creation_request"]["correlation_id"],
+                            {"correlation_id":update_alarm_info["alarm_update_request"]["correlation_id"],
+                             "alarm_uuid":alarm_uuid,
+                             "status": True if alarm_uuid else False
+                            }
+                       }
+        #Core producer
+        self.producer.publish(key=msg_key, value=json.dumps(response_msg), topic=topic)
+
+    def delete_alarm(self, delete_alarm_info):
+        """Delete alarm configuration
+        """
+        alarm_uuid = self.mon_plugin.delete_alarm_configuration(delete_alarm_info)
+        return alarm_uuid
+
+    def publish_delete_alarm_status(self, alarm_uuid, delete_alarm_info):
+        """Publish update alarm status requests using producer
+        """
+        topic = 'alarm_response'
+        msg_key = 'delete_alarm_response'
+        response_msg = {"schema_version":1.0,
+                         "schema_type":"delete_alarm_response",
+                         "alarm_deletion_response":
+                            {"correlation_id":delete_alarm_info["alarm_deletion_request"]["correlation_id"],
                              "alarm_uuid":alarm_uuid,
                              "status": True if alarm_uuid else False
                             }
                        }
-        #To Do - Add producer
-        #self.producer.publish(key=msg_key, value=json.dumps(response_msg), topic=topic)
+        #Core producer
+        self.producer.publish(key=msg_key, value=json.dumps(response_msg), topic=topic)
+
 
     def publish_metrics_data_status(self, metrics_data):
         """Publish the requested metric data using producer
         """
         topic = 'metric_response'
         msg_key = 'read_metric_data_response'
-        #To Do - Add producer
-        #self.producer.publish(key=msg_key, value=json.dumps(metrics_data), topic=topic)
+        #Core producer
+        self.producer.publish(key=msg_key, value=json.dumps(metrics_data), topic=topic)
+
+
+    def verify_metric(self, metric_info):
+        """Verify if metric is supported or not
+        """
+        metric_key_status = self.mon_plugin.verify_metric_support(metric_info)
+        return metric_key_status
+
+    def publish_create_metric_response(self, metric_info, metric_status):
+        """Publish create metric response
+        """
+        topic = 'metric_response'
+        msg_key = 'create_metric_response'
+        response_msg = {"schema_version":1.0,
+                         "schema_type":"create_metric_response",
+                         "correlation_id":metric_info['correlation_id'],
+                         "metric_create_response":
+                            {
+                             "metric_uuid":0,
+                             "resource_uuid":metric_info['metric_create']['resource_uuid'],
+                             "status":metric_status
+                            }
+                       }
+        #Core producer
+        self.producer.publish(key=msg_key, value=json.dumps(response_msg), topic=topic)
+
+    def publish_update_metric_response(self, metric_info, metric_status):
+        """Publish update metric response
+        """
+        topic = 'metric_response'
+        msg_key = 'update_metric_response'
+        response_msg = {"schema_version":1.0,
+                        "schema_type":"metric_update_response",
+                        "correlation_id":metric_info['correlation_id'],
+                        "metric_update_response":
+                            {
+                             "metric_uuid":0,
+                             "resource_uuid":metric_info['metric_create']['resource_uuid'],
+                             "status":metric_status
+                            }
+                       }
+        #Core producer
+        self.producer.publish(key=msg_key, value=json.dumps(response_msg), topic=topic)
+
+    def publish_delete_metric_response(self, metric_info):
+        """
+        """
+        topic = 'metric_response'
+        msg_key = 'delete_metric_response'
+        response_msg = {"schema_version":1.0,
+                        "schema_type":"delete_metric_response",
+                        "correlation_id":metric_info['correlation_id'],
+                        "metric_name":metric_info['metric_name'],
+                        "metric_uuid":0,
+                        "resource_uuid":metric_info['resource_uuid'],
+                        "tenant_uuid":metric_info['tenant_uuid'],
+                        "status":False
+                       }
+        #Core producer
+        self.producer.publish(key=msg_key, value=json.dumps(response_msg), topic=topic)
+
+    def list_alarms(self, list_alarm_input):
+        """
+        """
+        triggered_alarms = self.mon_plugin.get_triggered_alarms_list(list_alarm_input)
+        return triggered_alarms
+
+
+    def publish_list_alarm_response(self, triggered_alarm_list, list_alarm_input):
+        """
+        """
+        topic = 'alarm_response'
+        msg_key = 'list_alarm_response'
+        response_msg = {"schema_version":1.0,
+                        "schema_type":"list_alarm_response",
+                        "correlation_id":list_alarm_input['alarm_list_request']['correlation_id'],
+                        "resource_uuid":list_alarm_input['alarm_list_request']['resource_uuid'],
+                        "list_alarm_resp":triggered_alarm_list
+                       }
+        #Core producer
+        self.producer.publish(key=msg_key, value=json.dumps(response_msg), topic=topic)
+
+
+def main():
+    log.basicConfig(filename='mon_vrops_log.log',level=log.DEBUG)
+    plugin_rcvr = PluginReceiver()
+    plugin_rcvr.consume()
 
-#For testing
-#log.basicConfig(filename='mon_vrops_log.log',level=log.DEBUG)
-#plugin_rcvr = PluginReceiver()
-#plugin_rcvr.consume()
+if __name__ == "__main__":
+    main()
 
diff --git a/plugins/vRealiseOps/vROPs_Webservice/__init__.py b/plugins/vRealiseOps/vROPs_Webservice/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/plugins/vRealiseOps/vROPs_Webservice/install.sh b/plugins/vRealiseOps/vROPs_Webservice/install.sh
new file mode 100755 (executable)
index 0000000..90bf68d
--- /dev/null
@@ -0,0 +1,82 @@
+#!/usr/bin/env bash
+
+##
+# Copyright 2016-2017 VMware Inc.
+# This file is part of ETSI OSM
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+#
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact:  osslegalrouting@vmware.com
+##
+
+BASEDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+SSL_Cert_Dir="${BASEDIR}/SSL_certificate"
+Domain_Name="www.vrops_webservice.com"
+WebServiceFile='vrops_webservice.py'
+
+echo '
+ #################################################################
+ #####             Installing Require Packages             #####
+ #################################################################'
+
+#Function to install packages using apt-get
+function install_packages(){
+      [ -x /usr/bin/apt-get ] && apt-get install -y $*
+       
+       #check properly installed
+       for PACKAGE in $*
+       do
+           PACKAGE_INSTALLED="no"
+           [ -x /usr/bin/apt-get ] && dpkg -l $PACKAGE            &>> /dev/null && PACKAGE_INSTALLED="yes"
+           if [ "$PACKAGE_INSTALLED" = "no" ]
+           then
+               echo "failed to install package '$PACKAGE'. Revise network connectivity and try again" >&2
+               exit 1
+          fi
+       done
+   }
+  
+apt-get update  # To get the latest package lists
+
+[ "$_DISTRO" == "Ubuntu" ] && install_packages "python-yaml python-bottle python-jsonschema python-requests libxml2-dev libxslt-dev python-dev python-pip openssl"
+[ "$_DISTRO" == "CentOS" -o "$_DISTRO" == "Red" ] && install_packages "python-jsonschema python-requests libxslt-devel libxml2-devel python-devel python-pip openssl"
+#The only way to install python-bottle on Centos7 is with easy_install or pip
+[ "$_DISTRO" == "CentOS" -o "$_DISTRO" == "Red" ] && easy_install -U bottle
+
+#required for vmware connector TODO move that to separete opt in install script
+sudo pip install --upgrade pip
+sudo pip install cherrypy
+
+echo '
+ #################################################################
+ #####             Genrate SSL Certificate                 #####
+ #################################################################'
+#Create SSL Certifcate folder and file
+mkdir "${SSL_Cert_Dir}"
+
+openssl genrsa -out "${SSL_Cert_Dir}/${Domain_Name}".key 2048
+openssl req -new -x509 -key "${SSL_Cert_Dir}/${Domain_Name}".key -out "${SSL_Cert_Dir}/${Domain_Name}".cert -days 3650 -subj /CN="${Domain_Name}"
+
+echo '
+ #################################################################
+ #####             Start Web Service                      #####
+ #################################################################'
+
+nohup python "${WebServiceFile}" &
+
+echo ' 
+ #################################################################
+ #####              Done                                  #####
+ #################################################################'
diff --git a/plugins/vRealiseOps/vROPs_Webservice/vrops_webservice.py b/plugins/vRealiseOps/vROPs_Webservice/vrops_webservice.py
new file mode 100755 (executable)
index 0000000..806733c
--- /dev/null
@@ -0,0 +1,234 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+##
+# Copyright 2016-2017 VMware Inc.
+# This file is part of ETSI OSM
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+#
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact:  osslegalrouting@vmware.com
+##
+
+"""
+ Webservice for vRealize Operations (vROPs) to post/notify alarms details.
+
+"""
+__author__ = "Arpita Kate"
+__date__ = "$15-Sept-2017 16:09:29$"
+__version__ = '0.1'
+
+
+from bottle import (ServerAdapter, route, run, server_names, redirect, default_app,
+                     request, response, template, debug, TEMPLATE_PATH , static_file)
+from socket import gethostname
+from datetime import datetime
+from xml.etree import ElementTree as ET
+import logging
+import os
+import json
+import requests
+
+from core.message_bus.producer import KafkaProducer
+
+try:
+    from cheroot.wsgi import Server as WSGIServer
+    from cheroot.ssl.pyopenssl import pyOpenSSLAdapter
+except ImportError:
+    from cherrypy.wsgiserver import CherryPyWSGIServer as WSGIServer
+    from cherrypy.wsgiserver.ssl_pyopenssl import pyOpenSSLAdapter
+
+#Set Constants
+BASE_DIR = os.path.dirname(os.path.dirname(__file__))
+CERT_DIR = os.path.join(BASE_DIR, "SSL_certificate")
+CERTIFICATE = os.path.join(CERT_DIR, "www.vrops_webservice.com.cert")
+KEY = os.path.join(CERT_DIR, "www.vrops_webservice.com.key")
+CONFIG_FILE = os.path.join(BASE_DIR, '../vrops_config.xml')
+#Severity Mapping from vROPs to OSM
+VROPS_SEVERITY_TO_OSM_MAPPING = {
+                "ALERT_CRITICALITY_LEVEL_CRITICAL":"CRITICAL",
+                "ALERT_CRITICALITY_LEVEL_WARNING":"WARNING",
+                "ALERT_CRITICALITY_LEVEL_IMMEDIATE":"MAJOR",
+                "ALERT_CRITICALITY_LEVEL_INFO":"INDETERMINATE",
+                "ALERT_CRITICALITY_LEVEL_AUTO":"INDETERMINATE",
+                "ALERT_CRITICALITY_LEVEL_UNKNOWN":"INDETERMINATE",
+                "ALERT_CRITICALITY_LEVEL_NONE":"INDETERMINATE"
+            }
+
+#Set logger
+logger = logging.getLogger('vROPs_Webservice')
+formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
+hdlr = logging.FileHandler(os.path.join(BASE_DIR,"vrops_webservice.log"))
+hdlr.setFormatter(formatter)
+logger.addHandler(hdlr)
+logger.setLevel(logging.DEBUG)
+
+
+def format_datetime(str_date):
+    """
+        Method to format datetime
+        Args:
+            str_date - datetime string
+        Returns:
+           formated datetime
+    """
+    date_fromat = "%Y-%m-%dT%H:%M:%S"
+    formated_datetime = None
+    try:
+        datetime_obj = datetime.fromtimestamp(float(str_date)/1000.)
+        formated_datetime = datetime_obj.strftime(date_fromat)
+    except Exception as exp:
+        logger.error('Exception: {} occured while converting date {} into format {}'.format(
+                           exp,str_date, date_fromat))
+
+    return formated_datetime
+
+def get_alarm_config():
+    """
+        Method to get configuration parameters
+        Args:
+            None
+        Returns:
+            dictionary of config parameters
+    """
+    alarm_config = {}
+    try:
+        xml_content = ET.parse(CONFIG_FILE)
+        alarms = xml_content.getroot()
+        for alarm in alarms:
+            if alarm.tag == 'Access_Config':
+                for param in alarm:
+                    alarm_config[param.tag] = param.text
+    except Exception as exp:
+        logger.error('Exception: {} occured while parsing config file.'.format(exp))
+
+    return alarm_config
+
+def get_alarm_definationID(alarm_uuid):
+    """
+         Method to get alarm/alert defination ID
+            Args:
+                alarm_uuid : UUID of alarm
+            Returns:
+                alarm defination ID
+    """
+    alarm_definationID = None
+    if alarm_uuid :
+        try:
+            access_config = get_alarm_config()
+            headers = {'Accept': 'application/json'}
+            api_url = '{}/suite-api/api/alerts/{}'.format(access_config.get('vrops_site'), alarm_uuid)
+            api_response = requests.get(
+                            api_url,
+                            auth=(access_config.get('vrops_user'), access_config.get('vrops_password')),
+                            verify = False, headers = headers
+                            )
+
+            if  api_response.status_code == 200:
+                data = api_response.json()
+                if data.get("alertDefinitionId") is not None:
+                    alarm_definationID = '-'.join(data.get("alertDefinitionId").split('-')[1:])
+            else:
+                logger.error("Failed to get alert definition ID for alarm {}".format(alarm_uuid))
+        except Exception as exp:
+            logger.error( "Exception occured while getting alert definition ID for alarm : {}".format(exp, alarm_uuid))
+
+    return alarm_definationID
+
+
+@route('/notify/<alarmID>', method='POST')
+def notify_alarm(alarmID):
+    """
+        Method notify alarm details by publishing message at Kafka message bus
+        Args:
+            alarmID - Name of alarm
+        Returns:
+           response code
+    """
+    logger.info("Request:{} from:{} {} {} ".format(request, request.remote_addr, request.method, request.url))
+    response.headers['Content-Type'] = 'application/json'
+    try:
+        postdata = json.loads(request.body.read())
+        notify_details = {}
+        alaram_config = get_alarm_config()
+        #Parse noditfy data
+        notify_details['alarm_uuid'] = get_alarm_definationID(postdata.get('alertId'))
+        notify_details['description'] = postdata.get('info')
+        notify_details['alarm_instance_uuid'] = alarmID
+        notify_details['resource_uuid'] = '-'.join(postdata.get('alertName').split('-')[1:])
+        notify_details['tenant_uuid'] =  alaram_config.get('tenant_id')
+        notify_details['vim_type'] = "VMware"
+        notify_details['severity'] = VROPS_SEVERITY_TO_OSM_MAPPING.get(postdata.get('criticality'), 'INDETERMINATE')
+        notify_details['status'] = postdata.get('status')
+        if postdata.get('startDate'):
+            notify_details['start_date_time'] = format_datetime(postdata.get('startDate'))
+        if postdata.get('updateDate'):
+            notify_details['update_date_time'] = format_datetime(postdata.get('updateDate'))
+        if postdata.get('cancelDate'):
+            notify_details['cancel_date_time'] = format_datetime(postdata.get('cancelDate'))
+
+        alarm_details = {'schema_version': 1.0,
+                         'schema_type': "notify_alarm",
+                         'notify_details': notify_details
+                        }
+        alarm_data = json.dumps(alarm_details)
+        logger.info("Alarm details: {}".format(alarm_data))
+
+        #Publish Alarm details
+        kafkaMsgProducer = KafkaProducer()
+        kafkaMsgProducer.publish(topic='alarm_response', key='notify_alarm', value=alarm_data)
+
+        #return 201 on Success
+        response.status = 201
+
+    except Exception as exp:
+        logger.error('Exception: {} occured while notifying alarm {}.'.format(exp, alarmID))
+        #return 500 on Error
+        response.status = 500
+
+    return response
+
+
+class SSLWebServer(ServerAdapter):
+    """
+    CherryPy web server with SSL support.
+    """
+
+    def run(self, handler):
+        """
+        Runs a CherryPy Server using the SSL certificate.
+        """
+        server = WSGIServer((self.host, self.port), handler)
+        server.ssl_adapter = pyOpenSSLAdapter(
+            certificate=CERTIFICATE,
+            private_key=KEY,
+           # certificate_chain="intermediate_cert.crt"
+        )
+
+        try:
+            server.start()
+            logger.info("Started vROPs Web Serverice")
+        except Exception as exp:
+            server.stop()
+            logger.error("Exception: {} Stopped vROPs Web Serverice".format(exp))
+
+
+if __name__ == "__main__":
+    #Start SSL Web Service
+    logger.info("Start vROPs Web Serverice")
+    app = default_app()
+    server_names['sslwebserver'] = SSLWebServer
+    run(app=app,host=gethostname(), port=8080, server='sslwebserver')
+
index da7cc53..d7e8403 100644 (file)
@@ -1,3 +1,25 @@
+<!--
+##
+# Copyright 2016-2017 VMware Inc.
+# This file is part of ETSI OSM
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+#
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact:  osslegalrouting@vmware.com
+##
+-->
 <alarmsDefaultConfig>
     <Average_Memory_Usage_Above_Threshold>
         <vrops_alarm>Avg_Mem_Usage_Above_Thr</vrops_alarm>
         <tenant_id>Org2-VDC-PVDC1</tenant_id>
     </Access_Config>
 </alarmsDefaultConfig>
+