Adds vdu_id to message bus models

LB Dockerfile now uses Python3
Removes obsolete parameters from models
Refactors and fixes kafka integration tests
Refactors deprecated log warnings
Adds DATABASE env var to set MON database url
Fixes indentation and dependency issues in AWS plugin
Use open from io package for Python 2 and 3 compatibility

Signed-off-by: Benjamin Diaz <bdiaz@whitestack.com>
Change-Id: I70af22d2fbc2cb1bfd5d9632d9daa80e9d7f6b62
diff --git a/docker/Dockerfile b/docker/Dockerfile
index ad00631..b07f781 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -25,16 +25,16 @@
 LABEL authors="Guillermo Calvino"
 
 RUN apt-get --yes update \
- && apt-get --yes install git python python-pip sudo libmysqlclient-dev \
+ && apt-get --yes install git python python-pip python3 python3-pip libmysqlclient-dev libssl-dev libffi-dev \
  && pip install pip==9.0.3
 
 COPY requirements.txt /mon/requirements.txt
 
-RUN pip install -r /mon/requirements.txt
+RUN pip3 install -r /mon/requirements.txt
 
 COPY . /mon
 
-RUN pip install /mon
+RUN pip3 install /mon
 
 # These ENV must be provided
 # ENV BROKER_URI=kafka:9092
diff --git a/docker/scripts/runInstall.sh b/docker/scripts/runInstall.sh
index 87954ed..611bbe8 100755
--- a/docker/scripts/runInstall.sh
+++ b/docker/scripts/runInstall.sh
@@ -21,6 +21,6 @@
 # contact: bdiaz@whitestack.com or glavado@whitestack.com
 ##
 /bin/bash /mon/osm_mon/plugins/vRealiseOps/vROPs_Webservice/install.sh
-nohup python /mon/osm_mon/plugins/OpenStack/Aodh/notifier.py &
-python /mon/osm_mon/core/message_bus/common_consumer.py
+nohup python3 /mon/osm_mon/plugins/OpenStack/Aodh/notifier.py &
+python3 /mon/osm_mon/core/message_bus/common_consumer.py
 
diff --git a/osm_mon/core/auth.py b/osm_mon/core/auth.py
index 3a88a50..bb6dbba 100644
--- a/osm_mon/core/auth.py
+++ b/osm_mon/core/auth.py
@@ -47,8 +47,6 @@
 
     def get_credentials(self, vim_uuid):
         creds = self.database_manager.get_credentials(vim_uuid)
-        if creds.config is None:
-            creds.config = {}
         return creds
 
     def delete_auth_credentials(self, creds_dict):
diff --git a/osm_mon/core/database.py b/osm_mon/core/database.py
index eab4bed..857e8e5 100644
--- a/osm_mon/core/database.py
+++ b/osm_mon/core/database.py
@@ -25,14 +25,15 @@
 import logging
 
 from peewee import *
-from playhouse.sqlite_ext import SqliteExtDatabase
+from playhouse.db_url import connect
 
 from osm_mon.core.settings import Config
 
 log = logging.getLogger(__name__)
 cfg = Config.instance()
+cfg.read_environ()
 
-db = SqliteExtDatabase('mon.db')
+db = connect(cfg.DATABASE)
 
 
 class BaseModel(Model):
diff --git a/osm_mon/core/message_bus/producer.py b/osm_mon/core/message_bus/producer.py
index d4f8015..bf0839c 100755
--- a/osm_mon/core/message_bus/producer.py
+++ b/osm_mon/core/message_bus/producer.py
@@ -25,13 +25,9 @@
 """
 
 import logging
-
 import os
 
-from jsmin import jsmin
-
 from kafka import KafkaProducer as kaf
-
 from kafka.errors import KafkaError
 
 __author__ = "Prithiv Mohan"
@@ -83,225 +79,166 @@
         except KafkaError:
             pass
 
-    def create_alarm_request(self, key, message, topic):
+    def create_alarm_request(self, key, message):
         """Create alarm request from SO to MON."""
         # External to MON
 
-        payload_create_alarm = jsmin(
-            open(os.path.join(json_path, 'create_alarm.json')).read())
         self.publish(key,
                      value=message,
                      topic='alarm_request')
 
-    def create_alarm_response(self, key, message, topic):
+    def create_alarm_response(self, key, message):
         """Response to a create alarm request from MON to SO."""
         # Internal to MON
 
-        payload_create_alarm_resp = jsmin(
-            open(os.path.join(json_path, 'create_alarm_resp.json')).read())
-
         self.publish(key,
                      value=message,
                      topic='alarm_response')
 
-    def acknowledge_alarm(self, key, message, topic):
+    def acknowledge_alarm(self, key, message):
         """Alarm acknowledgement request from SO to MON."""
         # Internal to MON
 
-        payload_acknowledge_alarm = jsmin(
-            open(os.path.join(json_path, 'acknowledge_alarm.json')).read())
-
         self.publish(key,
                      value=message,
                      topic='alarm_request')
 
-    def list_alarm_request(self, key, message, topic):
+    def list_alarm_request(self, key, message):
         """List alarms request from SO to MON."""
         # External to MON
 
-        payload_alarm_list_req = jsmin(
-            open(os.path.join(json_path, 'list_alarm_req.json')).read())
-
         self.publish(key,
                      value=message,
                      topic='alarm_request')
 
-    def notify_alarm(self, key, message, topic):
+    def notify_alarm(self, key, message):
         """Notify of triggered alarm from MON to SO."""
-        payload_notify_alarm = jsmin(
-            open(os.path.join(json_path, 'notify_alarm.json')).read())
 
         self.publish(key,
                      value=message,
                      topic='alarm_response')
 
-    def list_alarm_response(self, key, message, topic):
+    def list_alarm_response(self, key, message):
         """Response for list alarms request from MON to SO."""
-        payload_list_alarm_resp = jsmin(
-            open(os.path.join(json_path, 'list_alarm_resp.json')).read())
 
         self.publish(key,
                      value=message,
                      topic='alarm_response')
 
-    def update_alarm_request(self, key, message, topic):
+    def update_alarm_request(self, key, message):
         """Update alarm request from SO to MON."""
         # External to Mon
 
-        payload_update_alarm_req = jsmin(
-            open(os.path.join(json_path, 'update_alarm_req.json')).read())
-
         self.publish(key,
                      value=message,
                      topic='alarm_request')
 
-    def update_alarm_response(self, key, message, topic):
+    def update_alarm_response(self, key, message):
         """Response from update alarm request from MON to SO."""
         # Internal to Mon
 
-        payload_update_alarm_resp = jsmin(
-            open(os.path.join(json_path, 'update_alarm_resp.json')).read())
-
         self.publish(key,
                      value=message,
                      topic='alarm_response')
 
-    def delete_alarm_request(self, key, message, topic):
+    def delete_alarm_request(self, key, message):
         """Delete alarm request from SO to MON."""
         # External to Mon
 
-        payload_delete_alarm_req = jsmin(
-            open(os.path.join(json_path, 'delete_alarm_req.json')).read())
-
         self.publish(key,
                      value=message,
                      topic='alarm_request')
 
-    def delete_alarm_response(self, key, message, topic):
+    def delete_alarm_response(self, key, message):
         """Response for a delete alarm request from MON to SO."""
         # Internal to Mon
 
-        payload_delete_alarm_resp = jsmin(
-            open(os.path.join(json_path, 'delete_alarm_resp.json')).read())
-
         self.publish(key,
                      value=message,
                      topic='alarm_response')
 
-    def create_metrics_request(self, key, message, topic):
+    def create_metrics_request(self, key, message):
         """Create metrics request from SO to MON."""
         # External to Mon
 
-        payload_create_metrics_req = jsmin(
-            open(os.path.join(json_path, 'create_metric_req.json')).read())
-
         self.publish(key,
                      value=message,
                      topic='metric_request')
 
-    def create_metrics_resp(self, key, message, topic):
+    def create_metrics_resp(self, key, message):
         """Response for a create metric request from MON to SO."""
         # Internal to Mon
 
-        payload_create_metrics_resp = jsmin(
-            open(os.path.join(json_path, 'create_metric_resp.json')).read())
-
         self.publish(key,
                      value=message,
                      topic='metric_response')
 
-    def read_metric_data_request(self, key, message, topic):
+    def read_metric_data_request(self, key, message):
         """Read metric data request from SO to MON."""
         # External to Mon
 
-        payload_read_metric_data_request = jsmin(
-            open(os.path.join(json_path, 'read_metric_data_req.json')).read())
-
         self.publish(key,
                      value=message,
                      topic='metric_request')
 
-    def read_metric_data_response(self, key, message, topic):
+    def read_metric_data_response(self, key, message):
         """Response from MON to SO for read metric data request."""
         # Internal to Mon
 
-        payload_metric_data_response = jsmin(
-            open(os.path.join(json_path, 'read_metric_data_resp.json')).read())
-
         self.publish(key,
                      value=message,
                      topic='metric_response')
 
-    def list_metric_request(self, key, message, topic):
+    def list_metric_request(self, key, message):
         """List metric request from SO to MON."""
         # External to MON
 
-        payload_metric_list_req = jsmin(
-            open(os.path.join(json_path, 'list_metric_req.json')).read())
-
         self.publish(key,
                      value=message,
                      topic='metric_request')
 
-    def list_metric_response(self, key, message, topic):
+    def list_metric_response(self, key, message):
         """Response from SO to MON for list metrics request."""
         # Internal to MON
 
-        payload_metric_list_resp = jsmin(
-            open(os.path.join(json_path, 'list_metric_resp.json')).read())
-
         self.publish(key,
                      value=message,
                      topic='metric_response')
 
-    def delete_metric_request(self, key, message, topic):
+    def delete_metric_request(self, key, message):
         """Delete metric request from SO to MON."""
         # External to Mon
 
-        payload_delete_metric_req = jsmin(
-            open(os.path.join(json_path, 'delete_metric_req.json')).read())
-
         self.publish(key,
                      value=message,
                      topic='metric_request')
 
-    def delete_metric_response(self, key, message, topic):
+    def delete_metric_response(self, key, message):
         """Response from MON to SO for delete metric request."""
         # Internal to Mon
 
-        payload_delete_metric_resp = jsmin(
-            open(os.path.join(json_path, 'delete_metric_resp.json')).read())
-
         self.publish(key,
                      value=message,
                      topic='metric_response')
 
-    def update_metric_request(self, key, message, topic):
+    def update_metric_request(self, key, message):
         """Metric update request from SO to MON."""
         # External to Mon
 
-        payload_update_metric_req = jsmin(
-            open(os.path.join(json_path, 'update_metric_req.json')).read())
-
         self.publish(key,
                      value=message,
                      topic='metric_request')
 
-    def update_metric_response(self, key, message, topic):
+    def update_metric_response(self, key, message):
         """Reponse from MON to SO for metric update."""
         # Internal to Mon
 
-        payload_update_metric_resp = jsmin(
-            open(os.path.join(json_path, 'update_metric_resp.json')).read())
-
         self.publish(key,
                      value=message,
                      topic='metric_response')
 
-    def access_credentials(self, key, message, topic):
+    def access_credentials(self, key, message):
         """Send access credentials to MON from SO."""
-        payload_access_credentials = jsmin(
-            open(os.path.join(json_path, 'access_credentials.json')).read())
 
         self.publish(key,
                      value=message,
diff --git a/osm_mon/core/models/create_alarm.json b/osm_mon/core/models/create_alarm.json
index d7fce88..a25ea7b 100644
--- a/osm_mon/core/models/create_alarm.json
+++ b/osm_mon/core/models/create_alarm.json
@@ -29,8 +29,8 @@
     "correlation_id": { "type": "integer" },
     "alarm_name": { "type": "string" },
     "metric_name": { "type": "string" },
-    "tenant_uuid": { "type": "string" },
     "resource_uuid": { "type": "string" },
+    "vdu_id": { "type": "string"},
     "description": { "type": "string" },
     "severity": { "type": "string" },
     "operation": { "type": "string" },
diff --git a/osm_mon/core/models/create_metric_req.json b/osm_mon/core/models/create_metric_req.json
index 3710b43..6fa0972 100644
--- a/osm_mon/core/models/create_metric_req.json
+++ b/osm_mon/core/models/create_metric_req.json
@@ -23,7 +23,6 @@
 {
   "schema_version": { "type": "string" },
   "schema_type": { "type": "string" },
-  "tenant_uuid": { "type": "string" },
   "correlation_id": { "type": "integer" },
   "vim_type": { "type": "string" },
   "vim_uuid": { "type": "string" },
@@ -31,7 +30,8 @@
   {
     "metric_name": { "type" : "string" },
     "metric_unit": { "type": "string" },
-    "resource_uuid": { "type": "string" }
+    "resource_uuid": { "type": "string" },
+    "vdu_id": { "type": "string"}
   },
   "required": [ "schema_version",
                 "schema_type",
diff --git a/osm_mon/core/models/delete_alarm_req.json b/osm_mon/core/models/delete_alarm_req.json
index 5683b8d..a5b99e6 100644
--- a/osm_mon/core/models/delete_alarm_req.json
+++ b/osm_mon/core/models/delete_alarm_req.json
@@ -27,7 +27,8 @@
   "alarm_delete_request":
   {
     "alarm_uuid": { "type": "string" },
-    "correlation_id": { "type": "integer" }
+    "correlation_id": { "type": "integer" },
+    "vdu_id": { "type": "string"}
   },
   "required": [ "schema_version",
                 "schema_type",
diff --git a/osm_mon/core/models/delete_metric_req.json b/osm_mon/core/models/delete_metric_req.json
index c4cfdad..c5788cd 100644
--- a/osm_mon/core/models/delete_metric_req.json
+++ b/osm_mon/core/models/delete_metric_req.json
@@ -25,11 +25,11 @@
   "metric_name": { "type": "string" },
   "metric_uuid": { "type": "string" },
   "resource_uuid": { "type": "string" },
-  "tenant_uuid": { "type": "string" },
+  "vdu_id": { "type": "string"},
   "correlation_id": { "type": "integer" },
   "vim_type": { "type": "string" },
   "vim_uuid": { "type": "string" },
-  "required": [ "schema_verion",
+  "required": [ "schema_version",
                 "schema_type",
                 "metric_name",
                 "metric_uuid",
diff --git a/osm_mon/core/models/list_alarm_req.json b/osm_mon/core/models/list_alarm_req.json
index dd46978..7dd9785 100644
--- a/osm_mon/core/models/list_alarm_req.json
+++ b/osm_mon/core/models/list_alarm_req.json
@@ -28,6 +28,7 @@
   {
     "correlation_id": { "type": "integer" },
     "resource_uuid": { "type": "string" },
+    "vdu_id": { "type": "string"},
     "alarm_name": { "type": "string" },
     "severity": { "type" : "string" }
   },
diff --git a/osm_mon/core/models/list_metric_req.json b/osm_mon/core/models/list_metric_req.json
index fecdde2..c684208 100644
--- a/osm_mon/core/models/list_metric_req.json
+++ b/osm_mon/core/models/list_metric_req.json
@@ -28,7 +28,8 @@
   {
     "metric_name": { "type": "string" },
     "correlation_id": { "type": "integer" },
-    "resource_uuid": { "type": "string" }
+    "resource_uuid": { "type": "string" },
+    "vdu_id": { "type": "string"}
   },
   "required": [ "schema_version",
                 "schema_type",
diff --git a/osm_mon/core/models/notify_alarm.json b/osm_mon/core/models/notify_alarm.json
index 9497b7f..0430d6a 100644
--- a/osm_mon/core/models/notify_alarm.json
+++ b/osm_mon/core/models/notify_alarm.json
@@ -28,8 +28,8 @@
     {
       "alarm_uuid": { "type": "string" },
       "resource_uuid": { "type": "string" },
+      "vdu_id": { "type": "string"},
       "description": { "type": "string" },
-      "tenant_uuid": { "type": "string" },
       "vim_type": { "type": "string" },
       "vim_uuid": { "type": "string" },
       "severity": { "type" : "string" },
diff --git a/osm_mon/core/models/read_metric_data_req.json b/osm_mon/core/models/read_metric_data_req.json
index 922b91e..2554be8 100644
--- a/osm_mon/core/models/read_metric_data_req.json
+++ b/osm_mon/core/models/read_metric_data_req.json
@@ -25,7 +25,7 @@
   "metric_name": { "type": "string" },
   "metric_uuid": { "type": "string" },
   "resource_uuid": { "type": "string" },
-  "tenant_uuid": { "type": "string" },
+  "vdu_id": { "type": "string"},
   "correlation_id": { "type": "integer" },
   "vim_type": { "type": "string" },
   "vim_uuid": { "type": "string" },
diff --git a/osm_mon/core/models/update_alarm_req.json b/osm_mon/core/models/update_alarm_req.json
index ab5c312..a8a0f82 100644
--- a/osm_mon/core/models/update_alarm_req.json
+++ b/osm_mon/core/models/update_alarm_req.json
@@ -29,6 +29,7 @@
     "correlation_id": { "type": "integer" },
     "alarm_uuid": { "type": "string" },
     "metric_uuid": { "type": "string" },
+    "vdu_id": { "type": "string"},
     "description": { "type": "string" },
     "severity": { "type": "string" },
     "operation": { "type": "string" },
diff --git a/osm_mon/core/models/update_metric_req.json b/osm_mon/core/models/update_metric_req.json
index f0f5b97..4b1c157 100644
--- a/osm_mon/core/models/update_metric_req.json
+++ b/osm_mon/core/models/update_metric_req.json
@@ -22,7 +22,6 @@
 {
   "schema_version": { "type": "string" },
   "schema_type": { "type": "string" },
-  "tenant_uuid": { "type": "string" },
   "correlation_id": { "type": "integer" },
   "vim_type": { "type": "string" },
   "vim_uuid": { "type": "string" },
@@ -30,7 +29,8 @@
   {
     "metric_name": { "type": "string" },
     "metric_unit": { "type": "string" },
-    "resource_uuid": { "type": "string" }
+    "resource_uuid": { "type": "string" },
+    "vdu_id": { "type": "string"}
   },
   "required": [ "schema_version",
                 "schema_type",
diff --git a/osm_mon/core/settings.py b/osm_mon/core/settings.py
index e40fecd..ae717e0 100644
--- a/osm_mon/core/settings.py
+++ b/osm_mon/core/settings.py
@@ -60,6 +60,7 @@
 
     _configuration = [
         CfgParam('BROKER_URI', "localhost:9092", six.text_type),
+        CfgParam('DATABASE', "sqlite:///mon_sqlite.db", six.text_type),
         CfgParam('OS_NOTIFIER_URI', "http://localhost:8662", six.text_type),
         CfgParam('OS_DEFAULT_GRANULARITY', "300", six.text_type),
     ]
@@ -79,5 +80,5 @@
                 val = str(os.environ[key])
                 setattr(self, key, val)
             except KeyError as exc:
-                log.warn("Failed to configure plugin: %s", exc)
+                log.warning("Environment variable not present: %s", exc)
         return
diff --git a/osm_mon/plugins/CloudWatch/connection.py b/osm_mon/plugins/CloudWatch/connection.py
index 023071f..5853ae9 100644
--- a/osm_mon/plugins/CloudWatch/connection.py
+++ b/osm_mon/plugins/CloudWatch/connection.py
@@ -26,7 +26,6 @@
 __author__ = "Wajeeha Hamid"
 __date__   = "18-September-2017"
 
-import sys
 import os
 
 try:
@@ -50,7 +49,7 @@
     """Connection Establishement with AWS -- VPC/EC2/CloudWatch"""
 #-----------------------------------------------------------------------------------------------------------------------------
     def setEnvironment(self):  
-    	try:
+        try:
             """Credentials for connecting to AWS-CloudWatch""" 
             #Reads from the environment variables
             self.AWS_KEY = os.environ.get("AWS_ACCESS_KEY_ID")
diff --git a/osm_mon/plugins/CloudWatch/metric_alarms.py b/osm_mon/plugins/CloudWatch/metric_alarms.py
index d8b3715..7b03f73 100644
--- a/osm_mon/plugins/CloudWatch/metric_alarms.py
+++ b/osm_mon/plugins/CloudWatch/metric_alarms.py
@@ -24,16 +24,7 @@
 __author__ = "Wajeeha Hamid"
 __date__   = "18-September-2017"
 
-import sys
-import os
-import re
-import datetime
-import random
-import json
 import logging
-from random import randint
-from operator import itemgetter
-from connection import Connection
 
 log = logging.getLogger(__name__)
 
@@ -67,7 +58,7 @@
         self.del_resp = dict()
 
     def config_alarm(self,cloudwatch_conn,create_info):
-    	"""Configure or Create a new alarm"""
+        """Configure or Create a new alarm"""
         inner_dict = dict()
         """ Alarm Name to ID Mapping """
         alarm_info = create_info['alarm_create_request']
@@ -128,7 +119,7 @@
 #-----------------------------------------------------------------------------------------------------------------------------
     def update_alarm(self,cloudwatch_conn,update_info):
 
-    	"""Update or reconfigure an alarm"""
+        """Update or reconfigure an alarm"""
         inner_dict = dict()
         alarm_info = update_info['alarm_update_request']
 
@@ -188,7 +179,7 @@
 #-----------------------------------------------------------------------------------------------------------------------------
     def delete_Alarm(self,cloudwatch_conn,del_info_all):
 
-    	"""Deletes an Alarm with specified alarm_id"""
+        """Deletes an Alarm with specified alarm_id"""
         inner_dict = dict()
         del_info = del_info_all['alarm_delete_request']
         status = self.is_present(cloudwatch_conn,del_info['alarm_uuid'])
@@ -261,7 +252,7 @@
 #-----------------------------------------------------------------------------------------------------------------------------
     def alarm_details(self,cloudwatch_conn,ack_info):
 
-	"""Get an individual alarm details specified by alarm_name"""
+        """Get an individual alarm details specified by alarm_name"""
         try:
             alarms_details=cloudwatch_conn.describe_alarm_history()  
             alarm_details_all = dict()     
@@ -306,10 +297,10 @@
                                 return alarm_details_dict                     
                   
         except Exception as e:
-        	log.error("Error getting alarm details: %s",str(e))           
+            log.error("Error getting alarm details: %s",str(e))
 #-----------------------------------------------------------------------------------------------------------------------------
     def is_present(self,cloudwatch_conn,alarm_id):
-    	"""Finding alarm from already configured alarms"""
+        """Finding alarm from already configured alarms"""
         alarm_info = dict()
         try:
             alarms = cloudwatch_conn.describe_alarms()
diff --git a/osm_mon/plugins/CloudWatch/metrics.py b/osm_mon/plugins/CloudWatch/metrics.py
index 5c6c976..1586359 100644
--- a/osm_mon/plugins/CloudWatch/metrics.py
+++ b/osm_mon/plugins/CloudWatch/metrics.py
@@ -26,9 +26,7 @@
 __author__ = "Wajeeha Hamid"
 __date__   = "18-Sept-2017"
 
-import sys
 import datetime
-import json
 import logging
 
 try:
@@ -71,7 +69,7 @@
     
     def metricsData(self,cloudwatch_conn,data_info):
 
-    	"""Getting Metrics Stats for an Hour.The datapoints are
+        """Getting Metrics Stats for an Hour.The datapoints are
         received after every one minute.
         Time interval can be modified using Timedelta value"""
 
@@ -182,7 +180,7 @@
                         metrics_info['resource_uuid'] = instance_id 
                         metrics_list.insert(itr,metrics_info)
                         itr += 1
-                    print metrics_list
+                    log.info(metrics_list)
                     return metrics_list
                 else: 
                     for alarm in alarms:
@@ -246,7 +244,7 @@
             if metric_status == True:
                 check_resp['status'] = True
             else:
-            	check_resp['status'] = False
+                check_resp['status'] = False
 
             return check_resp
 
diff --git a/osm_mon/plugins/CloudWatch/plugin_alarm.py b/osm_mon/plugins/CloudWatch/plugin_alarm.py
index c8ca955..c2ac6a7 100644
--- a/osm_mon/plugins/CloudWatch/plugin_alarm.py
+++ b/osm_mon/plugins/CloudWatch/plugin_alarm.py
@@ -22,19 +22,16 @@
 '''
 AWS-Plugin implements all the methods of MON to interact with AWS using the BOTO client
 '''
+from io import open
+from osm_mon.core.message_bus.producer import KafkaProducer
+from osm_mon.plugins.CloudWatch.metric_alarms import MetricAlarm
+from osm_mon.plugins.CloudWatch.metrics import Metrics
 
 __author__ = "Wajeeha Hamid"
 __date__   = "18-September-2017"
 
-import sys
 import json
 import logging
-from jsmin import jsmin
-from connection import Connection
-from metric_alarms import MetricAlarm
-from metrics import Metrics
-sys.path.append("../../core/message_bus")
-from producer import KafkaProducer
 
 log = logging.getLogger(__name__)
 
@@ -90,12 +87,12 @@
                         log.debug("Alarm Already exists")
                         payload = json.dumps(config_resp)                                   
                         file = open('../../core/models/create_alarm_resp.json','wb').write((payload))
-                        self.producer.create_alarm_response(key='create_alarm_response',message=payload,topic = 'alarm_response')
+                        self.producer.create_alarm_response(key='create_alarm_response',message=payload)
              
                     else: 
                         payload = json.dumps(config_resp)                                
                         file = open('../../core/models/create_alarm_resp.json','wb').write((payload))                           
-                        self.producer.create_alarm_response(key='create_alarm_response',message=payload,topic = 'alarm_response')
+                        self.producer.create_alarm_response(key='create_alarm_response',message=payload)
                         log.info("New alarm created with alarm info: %s", config_resp)                           
              
                 else:
diff --git a/osm_mon/plugins/CloudWatch/plugin_metric.py b/osm_mon/plugins/CloudWatch/plugin_metric.py
index b6508a5..3b7029f 100644
--- a/osm_mon/plugins/CloudWatch/plugin_metric.py
+++ b/osm_mon/plugins/CloudWatch/plugin_metric.py
@@ -22,17 +22,13 @@
 '''
 AWS-Plugin implements all the methods of MON to interact with AWS using the BOTO client
 '''
+from osm_mon.core.message_bus.producer import KafkaProducer
+from osm_mon.plugins.CloudWatch.metrics import Metrics
 
 __author__ = "Wajeeha Hamid"
 __date__   = "18-September-2017"
 
-import sys
 import json
-from connection import Connection
-from metric_alarms import MetricAlarm
-from metrics import Metrics
-sys.path.append("../../core/message_bus")
-from producer import KafkaProducer
 import logging
 
 log = logging.getLogger(__name__)
diff --git a/osm_mon/plugins/OpenStack/Aodh/alarming.py b/osm_mon/plugins/OpenStack/Aodh/alarming.py
index 09dc5f6..2c145ee 100644
--- a/osm_mon/plugins/OpenStack/Aodh/alarming.py
+++ b/osm_mon/plugins/OpenStack/Aodh/alarming.py
@@ -102,7 +102,7 @@
         resource_id = values['resource_uuid']
 
         if metric_name not in METRIC_MAPPINGS.keys():
-            log.warn("This metric is not supported.")
+            log.warning("This metric is not supported.")
             return None, False
 
         # Check for the required metric
@@ -119,11 +119,11 @@
                     url, auth_token, req_type="post", payload=payload)
                 return json.loads(new_alarm.text)['alarm_id'], True
             else:
-                log.warn("The required Gnocchi metric does not exist.")
+                log.warning("The required Gnocchi metric does not exist.")
                 return None, False
 
         except Exception as exc:
-            log.warn("Failed to create the alarm: %s", exc)
+            log.warning("Failed to create the alarm: %s", exc)
         return None, False
 
     def alarming(self, message):
@@ -163,8 +163,7 @@
                     cor_id=alarm_details['correlation_id'])
                 log.info("Response Message: %s", resp_message)
                 self._producer.create_alarm_response(
-                    'create_alarm_response', resp_message,
-                    'alarm_response')
+                    'create_alarm_response', resp_message)
             except Exception:
                 log.exception("Response creation failed:")
 
@@ -183,8 +182,7 @@
                     cor_id=list_details['correlation_id'])
                 log.info("Response Message: %s", resp_message)
                 self._producer.list_alarm_response(
-                    'list_alarm_response', resp_message,
-                    'alarm_response')
+                    'list_alarm_response', resp_message)
             except Exception:
                 log.exception("Failed to send a valid response back.")
 
@@ -203,8 +201,7 @@
                     cor_id=request_details['correlation_id'])
                 log.info("Response message: %s", resp_message)
                 self._producer.delete_alarm_response(
-                    'delete_alarm_response', resp_message,
-                    'alarm_response')
+                    'delete_alarm_response', resp_message)
             except Exception:
                 log.exception("Failed to create delete response: ")
 
@@ -219,7 +216,7 @@
             if response is True:
                 log.info("Acknowledged the alarm and cleared it.")
             else:
-                log.warn("Failed to acknowledge/clear the alarm.")
+                log.warning("Failed to acknowledge/clear the alarm.")
 
         elif message.key == "update_alarm_request":
             # Update alarm configurations
@@ -236,8 +233,7 @@
                     status=status)
                 log.info("Response message: %s", resp_message)
                 self._producer.update_alarm_response(
-                    'update_alarm_response', resp_message,
-                    'alarm_response')
+                    'update_alarm_response', resp_message)
             except Exception:
                 log.exception("Failed to send an update response: ")
 
@@ -274,14 +270,14 @@
         try:
             resource = list_details['resource_uuid']
         except KeyError as exc:
-            log.warn("Resource id not specified for list request: %s", exc)
+            log.warning("Resource id not specified for list request: %s", exc)
             return None
 
         # Checking what fields are specified for a list request
         try:
             name = list_details['alarm_name'].lower()
             if name not in ALARM_NAMES.keys():
-                log.warn("This alarm is not supported, won't be used!")
+                log.warning("This alarm is not supported, won't be used!")
                 name = None
         except KeyError as exc:
             log.info("Alarm name isn't specified.")
@@ -373,7 +369,7 @@
             resource_id = rule['resource_id']
             metric_name = [key for key, value in six.iteritems(METRIC_MAPPINGS) if value == rule['metric']][0]
         except Exception as exc:
-            log.warn("Failed to retrieve existing alarm info: %s.\
+            log.warning("Failed to retrieve existing alarm info: %s.\
                      Can only update OSM alarms.", exc)
             return None, False
 
@@ -391,7 +387,7 @@
 
                 return json.loads(update_alarm.text)['alarm_id'], True
             except Exception as exc:
-                log.warn("Alarm update could not be performed: %s", exc)
+                log.warning("Alarm update could not be performed: %s", exc)
                 return None, False
         return None, False
 
@@ -439,7 +435,7 @@
                                   'alarm_actions': [cfg.OS_NOTIFIER_URI], })
             return payload
         except KeyError as exc:
-            log.warn("Alarm is not configured correctly: %s", exc)
+            log.warning("Alarm is not configured correctly: %s", exc)
         return None
 
     def get_alarm_state(self, endpoint, auth_token, alarm_id):
@@ -451,7 +447,7 @@
                 url, auth_token, req_type="get")
             return json.loads(alarm_state.text)
         except Exception as exc:
-            log.warn("Failed to get the state of the alarm:%s", exc)
+            log.warning("Failed to get the state of the alarm:%s", exc)
         return None
 
     def check_for_metric(self, auth_token, metric_endpoint, m_name, r_id):
diff --git a/osm_mon/plugins/OpenStack/Aodh/notifier.py b/osm_mon/plugins/OpenStack/Aodh/notifier.py
index c09ad9e..55178fd 100644
--- a/osm_mon/plugins/OpenStack/Aodh/notifier.py
+++ b/osm_mon/plugins/OpenStack/Aodh/notifier.py
@@ -70,6 +70,10 @@
         # Gets the size of data
         content_length = int(self.headers['Content-Length'])
         post_data = self.rfile.read(content_length)
+        try:
+            post_data = post_data.decode()
+        except AttributeError:
+            pass
         self.wfile.write("<html><body><h1>POST!</h1></body></tml>")
         log.info("This alarm was triggered: %s", json.loads(post_data))
 
@@ -122,9 +126,9 @@
                     except Exception as exc:
                         log.exception("Couldn't notify SO of the alarm:")
                 else:
-                    log.warn("No resource_id for alarm; no SO response sent.")
+                    log.warning("No resource_id for alarm; no SO response sent.")
             else:
-                log.warn("Authentication failure; SO notification not sent.")
+                log.warning("Authentication failure; SO notification not sent.")
         except:
             log.exception("Could not notify alarm.")
 
@@ -138,7 +142,7 @@
         log.info("Starting alarm notifier server on port: %s", port)
         httpd.serve_forever()
     except Exception as exc:
-        log.warn("Failed to start webserver, %s", exc)
+        log.warning("Failed to start webserver, %s", exc)
 
 
 if __name__ == "__main__":
diff --git a/osm_mon/plugins/OpenStack/Gnocchi/metrics.py b/osm_mon/plugins/OpenStack/Gnocchi/metrics.py
index b41b5c0..9e69ee7 100644
--- a/osm_mon/plugins/OpenStack/Gnocchi/metrics.py
+++ b/osm_mon/plugins/OpenStack/Gnocchi/metrics.py
@@ -87,6 +87,9 @@
 
         endpoint = Common.get_endpoint("metric", values['vim_uuid'])
 
+        if 'metric_name' in values and values['metric_name'] not in METRIC_MAPPINGS.keys():
+            raise ValueError('Metric ' + values['metric_name'] + ' is not supported.')
+
         if message.key == "create_metric_request":
             # Configure metric
             metric_details = values['metric_create']
@@ -101,10 +104,9 @@
                     metric_id=metric_id, r_id=resource_id)
                 log.info("Response messages: %s", resp_message)
                 self._producer.create_metrics_resp(
-                    'create_metric_response', resp_message,
-                    'metric_response')
+                    'create_metric_response', resp_message)
             except Exception as exc:
-                log.warn("Failed to create response: %s", exc)
+                log.warning("Failed to create response: %s", exc)
 
         elif message.key == "read_metric_data_request":
             # Read all metric data related to a specified metric
@@ -115,7 +117,7 @@
             try:
 
                 metric_id = self.get_metric_id(endpoint, auth_token, METRIC_MAPPINGS[values['metric_name']],
-                                          values['resource_uuid'])
+                                               values['resource_uuid'])
                 resp_message = self._response.generate_response(
                     'read_metric_data_response',
                     m_id=metric_id,
@@ -125,10 +127,9 @@
                     times=timestamps, metrics=metric_data)
                 log.info("Response message: %s", resp_message)
                 self._producer.read_metric_data_response(
-                    'read_metric_data_response', resp_message,
-                    'metric_response')
+                    'read_metric_data_response', resp_message)
             except Exception as exc:
-                log.warn("Failed to send read metric response:%s", exc)
+                log.warning("Failed to send read metric response:%s", exc)
 
         elif message.key == "delete_metric_request":
             # delete the specified metric in the request
@@ -146,15 +147,14 @@
                     cor_id=values['correlation_id'])
                 log.info("Response message: %s", resp_message)
                 self._producer.delete_metric_response(
-                    'delete_metric_response', resp_message,
-                    'metric_response')
+                    'delete_metric_response', resp_message)
             except Exception as exc:
-                log.warn("Failed to send delete response:%s", exc)
+                log.warning("Failed to send delete response:%s", exc)
 
         elif message.key == "update_metric_request":
             # Gnocchi doesn't support configuration updates
             # Log and send a response back to this effect
-            log.warn("Gnocchi doesn't support metric configuration\
+            log.warning("Gnocchi doesn't support metric configuration\
                       updates.")
             req_details = values['metric_create']
             metric_name = req_details['metric_name']
@@ -170,10 +170,9 @@
                     r_id=resource_id, m_id=metric_id)
                 log.info("Response message: %s", resp_message)
                 self._producer.update_metric_response(
-                    'update_metric_response', resp_message,
-                    'metric_response')
+                    'update_metric_response', resp_message)
             except Exception as exc:
-                log.warn("Failed to send an update response:%s", exc)
+                log.warning("Failed to send an update response:%s", exc)
 
         elif message.key == "list_metric_request":
             list_details = values['metrics_list_request']
@@ -188,13 +187,12 @@
                     cor_id=list_details['correlation_id'])
                 log.info("Response message: %s", resp_message)
                 self._producer.list_metric_response(
-                    'list_metric_response', resp_message,
-                    'metric_response')
+                    'list_metric_response', resp_message)
             except Exception as exc:
-                log.warn("Failed to send a list response:%s", exc)
+                log.warning("Failed to send a list response:%s", exc)
 
         else:
-            log.warn("Unknown key, no action will be performed.")
+            log.warning("Unknown key, no action will be performed.")
 
         return
 
@@ -203,13 +201,13 @@
         try:
             resource_id = values['resource_uuid']
         except KeyError:
-            log.warn("Resource is not defined correctly.")
+            log.warning("Resource is not defined correctly.")
             return None, None, False
 
         # Check/Normalize metric name
         norm_name, metric_name = self.get_metric_name(values)
         if metric_name is None:
-            log.warn("This metric is not supported by this plugin.")
+            log.warning("This metric is not supported by this plugin.")
             return None, resource_id, False
 
         # Check for an existing metric for this resource
@@ -261,7 +259,7 @@
 
                     return metric_id, new_resource_id, True
                 except Exception as exc:
-                    log.warn("Failed to create a new resource:%s", exc)
+                    log.warning("Failed to create a new resource:%s", exc)
             return None, None, False
 
         else:
@@ -277,12 +275,12 @@
             result = Common.perform_request(
                 url, auth_token, req_type="delete")
             if str(result.status_code) == "404":
-                log.warn("Failed to delete the metric.")
+                log.warning("Failed to delete the metric.")
                 return False
             else:
                 return True
         except Exception as exc:
-            log.warn("Failed to carry out delete metric request:%s", exc)
+            log.warning("Failed to carry out delete metric request:%s", exc)
         return False
 
     def list_metrics(self, endpoint, auth_token, values):
@@ -293,7 +291,7 @@
             # Check if the metric_name was specified for the list
             metric_name = values['metric_name'].lower()
             if metric_name not in METRIC_MAPPINGS.keys():
-                log.warn("This metric is not supported, won't be listed.")
+                log.warning("This metric is not supported, won't be listed.")
                 metric_name = None
         except KeyError as exc:
             log.info("Metric name is not specified: %s", exc)
@@ -348,7 +346,7 @@
                 log.info("There are no metrics available")
                 return []
         except Exception as exc:
-            log.warn("Failed to generate any metric list. %s", exc)
+            log.warning("Failed to generate any metric list. %s", exc)
         return None
 
     def get_metric_id(self, endpoint, auth_token, metric_name, resource_id):
@@ -379,8 +377,9 @@
         timestamps = []
         data = []
         try:
-            #get metric_id
-            metric_id = self.get_metric_id(endpoint, auth_token, METRIC_MAPPINGS[values['metric_name']], values['resource_uuid'])
+            # get metric_id
+            metric_id = self.get_metric_id(endpoint, auth_token, METRIC_MAPPINGS[values['metric_name']],
+                                           values['resource_uuid'])
             # Try and collect measures
             collection_unit = values['collection_unit'].upper()
             collection_period = values['collection_period']
@@ -412,7 +411,7 @@
 
             return timestamps, data
         except Exception as exc:
-            log.warn("Failed to gather specified measures: %s", exc)
+            log.warning("Failed to gather specified measures: %s", exc)
         return timestamps, data
 
     def response_list(self, metric_list, metric_name=None, resource=None):
@@ -424,7 +423,7 @@
             # Only list OSM metrics
             name = None
             if row['name'] in METRIC_MAPPINGS.values():
-                for k,v in six.iteritems(METRIC_MAPPINGS):
+                for k, v in six.iteritems(METRIC_MAPPINGS):
                     if row['name'] == v:
                         name = k
                 metric = {"metric_name": name,
diff --git a/osm_mon/plugins/OpenStack/response.py b/osm_mon/plugins/OpenStack/response.py
index 75d907e..bd1133e 100644
--- a/osm_mon/plugins/OpenStack/response.py
+++ b/osm_mon/plugins/OpenStack/response.py
@@ -58,7 +58,7 @@
         elif key == "notify_alarm":
             message = self.notify_alarm(**kwargs)
         else:
-            log.warn("Failed to generate a valid response message.")
+            log.warning("Failed to generate a valid response message.")
             message = None
 
         return message
diff --git a/osm_mon/plugins/vRealiseOps/plugin_receiver.py b/osm_mon/plugins/vRealiseOps/plugin_receiver.py
index af56372..9526e64 100644
--- a/osm_mon/plugins/vRealiseOps/plugin_receiver.py
+++ b/osm_mon/plugins/vRealiseOps/plugin_receiver.py
@@ -26,14 +26,16 @@
 responds using producer for vROPs
 """
 
-import sys
-import os
 import json
 import logging
+import os
+import sys
 import traceback
-from mon_plugin_vrops import MonPlugin
-from kafka_consumer_vrops import vROP_KafkaConsumer
+
+
 #Core producer
+from osm_mon.plugins.vRealiseOps.mon_plugin_vrops import MonPlugin
+
 sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', '..', '..'))
 from osm_mon.core.message_bus.producer import KafkaProducer
 #from core.message_bus.producer import KafkaProducer
diff --git a/osm_mon/test/CloudWatch/unit_tests_alarms.py b/osm_mon/test/CloudWatch/unit_tests_alarms.py
index e34586b..ae036cf 100644
--- a/osm_mon/test/CloudWatch/unit_tests_alarms.py
+++ b/osm_mon/test/CloudWatch/unit_tests_alarms.py
@@ -56,7 +56,7 @@
         for message in _consumer:
             if message.key == "create_alarm_response": 
                 info = json.loads(json.loads(message.value))
-                print info
+                print(info)
                 time.sleep(1)
                 self.assertTrue(info['alarm_create_response']['status'])
                 return        
@@ -73,7 +73,7 @@
         for message in _consumer:
             if message.key == "create_alarm_response": 
                 info = json.loads(json.loads(message.value))
-                print info
+                print(info)
                 time.sleep(1)
                 producer.request("test_schemas/delete_alarm/name_valid_delete1.json",'delete_alarm_request','','alarm_request')
                 self.assertTrue(info['alarm_create_response']['status'])   
@@ -91,7 +91,7 @@
         for message in _consumer:
             if message.key == "create_alarm_response": 
                 info = json.loads(json.loads(message.value))
-                print info
+                print(info)
                 time.sleep(1)
                 producer.request("test_schemas/delete_alarm/name_valid_delete2.json",'delete_alarm_request', '','alarm_request')
                 self.assertTrue(info['alarm_create_response']['status']) 
@@ -109,7 +109,7 @@
         for message in _consumer:
             if message.key == "create_alarm_response": 
                 info = json.loads(json.loads(message.value))
-                print info,"---"
+                print(info, "---")
                 time.sleep(1)
                 producer.request("test_schemas/delete_alarm/name_valid.json",'delete_alarm_request', '','alarm_request')
                 self.assertEqual(info, None)  
@@ -127,7 +127,7 @@
         for message in _consumer:
             if message.key == "create_alarm_response": 
                 info = json.loads(json.loads(message.value))
-                print info
+                print(info)
                 time.sleep(1)
                 producer.request("test_schemas/delete_alarm/name_valid_delete3.json",'delete_alarm_request', '','alarm_request')
                 self.assertTrue(info['alarm_create_response']['status']) 
@@ -145,7 +145,7 @@
         for message in _consumer:
             if message.key == "create_alarm_response": 
                 info = json.loads(json.loads(message.value))
-                print info,"---"
+                print(info, "---")
                 time.sleep(1)
                 producer.request("test_schemas/delete_alarm/name_valid_delete3.json",'delete_alarm_request', '','alarm_request')
                 self.assertEqual(info, None)
@@ -163,7 +163,7 @@
         for message in _consumer:
             if message.key == "create_alarm_response": 
                 info = json.loads(json.loads(message.value))
-                print info
+                print(info)
                 time.sleep(1)
                 producer.request("test_schemas/delete_alarm/name_valid_delete3.json",'delete_alarm_request', '','alarm_request')
                 self.assertTrue(info['alarm_create_response']['status']) 
@@ -181,7 +181,7 @@
         for message in _consumer:
             if message.key == "create_alarm_response": 
                 info = json.loads(json.loads(message.value))
-                print info
+                print(info)
                 time.sleep(1)
                 self.assertEqual(info,None) 
                 return                 
@@ -203,7 +203,7 @@
         for message in _consumer:
             if message.key == "update_alarm_response": 
                 info = json.loads(json.loads(json.loads(message.value)))
-                print info
+                print(info)
                 time.sleep(1)
                 producer.request("test_schemas/delete_alarm/name_valid_delete4.json",'delete_alarm_request', '','alarm_request')
                 self.assertTrue(info['alarm_update_response']['status'])
@@ -221,7 +221,7 @@
         for message in _consumer:
             if message.key == "update_alarm_response": 
                 info = json.loads(json.loads(json.loads(message.value)))
-                print info
+                print(info)
                 time.sleep(1)
                 self.assertEqual(info,None)
                 return
@@ -239,7 +239,7 @@
         for message in _consumer:
             if message.key == "update_alarm_response": 
                 info = json.loads(json.loads(json.loads(message.value)))
-                print info
+                print(info)
                 time.sleep(1)
                 producer.request("test_schemas/delete_alarm/name_valid.json",'delete_alarm_request', '','alarm_request')
                 self.assertTrue(info['alarm_update_response']['status'])
@@ -257,7 +257,7 @@
         for message in _consumer:
             if message.key == "update_alarm_response": 
                 info = json.loads(json.loads(json.loads(message.value)))
-                print info
+                print(info)
                 time.sleep(1)
                 self.assertEqual(info,None)
                 return            
@@ -275,7 +275,7 @@
         for message in _consumer:
             if message.key == "update_alarm_response": 
                 info = json.loads(json.loads(json.loads(message.value)))
-                print info
+                print(info)
                 time.sleep(1)
                 producer.request("test_schemas/delete_alarm/name_valid.json",'delete_alarm_request', '','alarm_request')
                 self.assertTrue(info['alarm_update_response']['status'])
@@ -297,7 +297,7 @@
         for message in _consumer:
             if message.key == "delete_alarm_response": 
                 info = json.loads(json.loads(json.loads(message.value)))
-                print info
+                print(info)
                 time.sleep(1)                
                 self.assertTrue(info['alarm_deletion_response']['status'])
                 return
@@ -314,7 +314,7 @@
         for message in _consumer:
             if message.key == "delete_alarm_response": 
                 info = json.loads(json.loads(json.loads(message.value)))
-                print info
+                print(info)
                 time.sleep(1)                
                 self.assertEqual(info,None)
                 return             
@@ -334,7 +334,7 @@
         for message in _consumer:
             if message.key == "list_alarm_response": 
                 info = json.loads(json.loads(json.loads(message.value)))
-                print info
+                print(info)
                 time.sleep(1)                
                 self.assertEqual(type(info),dict)
                 return
@@ -351,7 +351,7 @@
         for message in _consumer:
             if message.key == "list_alarm_response": 
                 info = json.loads(json.loads(json.loads(message.value)))
-                print info
+                print(info)
                 time.sleep(1)                
                 self.assertEqual(type(info),dict)
                 return
@@ -368,7 +368,7 @@
         for message in _consumer:
             if message.key == "list_alarm_response": 
                 info = json.loads(json.loads(json.loads(message.value)))
-                print info
+                print(info)
                 time.sleep(1)                
                 self.assertEqual(type(info),dict)
                 return
@@ -389,7 +389,7 @@
         for message in _consumer:
             if message.key == "notify_alarm": 
                 info = json.loads(json.loads(json.loads(message.value)))
-                print info
+                print(info)
                 time.sleep(1)                
                 self.assertEqual(type(info),dict)
                 return                
diff --git a/osm_mon/test/integration/__init__.py b/osm_mon/test/OpenStack/integration/__init__.py
similarity index 100%
rename from osm_mon/test/integration/__init__.py
rename to osm_mon/test/OpenStack/integration/__init__.py
diff --git a/osm_mon/test/integration/test_alarm_integration.py b/osm_mon/test/OpenStack/integration/test_alarm_integration.py
similarity index 85%
rename from osm_mon/test/integration/test_alarm_integration.py
rename to osm_mon/test/OpenStack/integration/test_alarm_integration.py
index b443d6a..b0cfd32 100644
--- a/osm_mon/test/integration/test_alarm_integration.py
+++ b/osm_mon/test/OpenStack/integration/test_alarm_integration.py
@@ -32,21 +32,28 @@
 from kafka.errors import KafkaError
 
 from osm_mon.core.auth import AuthManager
-from osm_mon.core.database import DatabaseManager
+from osm_mon.core.database import DatabaseManager, VimCredentials
 from osm_mon.core.message_bus.producer import KafkaProducer as prod
 from osm_mon.plugins.OpenStack import response
 from osm_mon.plugins.OpenStack.Aodh import alarming
 from osm_mon.plugins.OpenStack.common import Common
-from keystoneclient.v3 import client
 
 log = logging.getLogger(__name__)
 
+mock_creds = VimCredentials()
+mock_creds.config = '{}'
+
 
 class AlarmIntegrationTest(unittest.TestCase):
     def setUp(self):
         try:
-            self.producer = KafkaProducer(bootstrap_servers='localhost:9092')
+            self.producer = KafkaProducer(bootstrap_servers='localhost:9092',
+                                          key_serializer=str.encode,
+                                          value_serializer=str.encode
+                                          )
             self.req_consumer = KafkaConsumer(bootstrap_servers='localhost:9092',
+                                              key_deserializer=bytes.decode,
+                                              value_deserializer=bytes.decode,
                                               auto_offset_reset='earliest',
                                               consumer_timeout_ms=60000)
             self.req_consumer.subscribe(['alarm_request'])
@@ -58,10 +65,11 @@
 
     @mock.patch.object(Common, "get_auth_token", mock.Mock())
     @mock.patch.object(Common, "get_endpoint", mock.Mock())
+    @mock.patch.object(AuthManager, 'get_credentials')
     @mock.patch.object(prod, "update_alarm_response")
     @mock.patch.object(alarming.Alarming, "update_alarm")
     @mock.patch.object(response.OpenStack_Response, "generate_response")
-    def test_update_alarm_req(self, resp, update_alarm, update_resp):
+    def test_update_alarm_req(self, resp, update_alarm, update_resp, get_creds):
         """Test Aodh update alarm request message from KafkaProducer."""
         # Set-up message, producer and consumer for tests
         payload = {"vim_type": "OpenSTACK",
@@ -71,6 +79,8 @@
                         "alarm_uuid": "alarm_id",
                         "metric_uuid": "metric_id"}}
 
+        get_creds.return_value = mock_creds
+
         self.producer.send('alarm_request', key="update_alarm_request",
                            value=json.dumps(payload))
 
@@ -86,7 +96,7 @@
                     'update_alarm_response', alarm_id="alarm_id", cor_id=123,
                     status=True)
                 update_resp.assert_called_with(
-                    'update_alarm_response', resp.return_value, 'alarm_response')
+                    'update_alarm_response', resp.return_value)
 
                 return
         self.fail("No message received in consumer")
@@ -94,10 +104,11 @@
     @mock.patch.object(DatabaseManager, "save_alarm", mock.Mock())
     @mock.patch.object(Common, "get_auth_token", mock.Mock())
     @mock.patch.object(Common, "get_endpoint", mock.Mock())
+    @mock.patch.object(AuthManager, 'get_credentials')
     @mock.patch.object(prod, "create_alarm_response")
     @mock.patch.object(alarming.Alarming, "configure_alarm")
     @mock.patch.object(response.OpenStack_Response, "generate_response")
-    def test_create_alarm_req(self, resp, config_alarm, create_resp):
+    def test_create_alarm_req(self, resp, config_alarm, create_resp, get_creds):
         """Test Aodh create alarm request message from KafkaProducer."""
         # Set-up message, producer and consumer for tests
         payload = {"vim_type": "OpenSTACK",
@@ -109,6 +120,8 @@
                         "resource_uuid": "my_resource",
                         "severity": "WARNING"}}
 
+        get_creds.return_value = mock_creds
+
         self.producer.send('alarm_request', key="create_alarm_request",
                            value=json.dumps(payload))
 
@@ -124,17 +137,18 @@
                     'create_alarm_response', status=True, alarm_id="alarm_id",
                     cor_id=123)
                 create_resp.assert_called_with(
-                    'create_alarm_response', resp.return_value, 'alarm_response')
+                    'create_alarm_response', resp.return_value)
 
                 return
         self.fail("No message received in consumer")
 
     @mock.patch.object(Common, "get_auth_token", mock.Mock())
     @mock.patch.object(Common, "get_endpoint", mock.Mock())
+    @mock.patch.object(AuthManager, 'get_credentials')
     @mock.patch.object(prod, "list_alarm_response")
     @mock.patch.object(alarming.Alarming, "list_alarms")
     @mock.patch.object(response.OpenStack_Response, "generate_response")
-    def test_list_alarm_req(self, resp, list_alarm, list_resp):
+    def test_list_alarm_req(self, resp, list_alarm, list_resp, get_creds):
         """Test Aodh list alarm request message from KafkaProducer."""
         # Set-up message, producer and consumer for tests
         payload = {"vim_type": "OpenSTACK",
@@ -146,6 +160,8 @@
         self.producer.send('alarm_request', key="list_alarm_request",
                            value=json.dumps(payload))
 
+        get_creds.return_value = mock_creds
+
         for message in self.req_consumer:
             # Check the vim desired by the message
             if message.key == "list_alarm_request":
@@ -159,17 +175,18 @@
                     cor_id=123)
                 # Producer attempts to send the response message back to the SO
                 list_resp.assert_called_with(
-                    'list_alarm_response', resp.return_value, 'alarm_response')
+                    'list_alarm_response', resp.return_value)
 
                 return
         self.fail("No message received in consumer")
 
     @mock.patch.object(Common, "get_auth_token", mock.Mock())
     @mock.patch.object(Common, "get_endpoint", mock.Mock())
+    @mock.patch.object(AuthManager, 'get_credentials')
     @mock.patch.object(alarming.Alarming, "delete_alarm")
     @mock.patch.object(prod, "delete_alarm_response")
     @mock.patch.object(response.OpenStack_Response, "generate_response")
-    def test_delete_alarm_req(self, resp, del_resp, del_alarm):
+    def test_delete_alarm_req(self, resp, del_resp, del_alarm, get_creds):
         """Test Aodh delete alarm request message from KafkaProducer."""
         # Set-up message, producer and consumer for tests
         payload = {"vim_type": "OpenSTACK",
@@ -181,6 +198,8 @@
         self.producer.send('alarm_request', key="delete_alarm_request",
                            value=json.dumps(payload))
 
+        get_creds.return_value = mock_creds
+
         for message in self.req_consumer:
             # Check the vim desired by the message
             if message.key == "delete_alarm_request":
@@ -191,15 +210,16 @@
                     'delete_alarm_response', alarm_id="alarm_id",
                     status=del_alarm.return_value, cor_id=123)
                 del_resp.assert_called_with(
-                    'delete_alarm_response', resp.return_value, 'alarm_response')
+                    'delete_alarm_response', resp.return_value)
 
                 return
         self.fail("No message received in consumer")
 
     @mock.patch.object(Common, "get_auth_token", mock.Mock())
     @mock.patch.object(Common, "get_endpoint", mock.Mock())
+    @mock.patch.object(AuthManager, 'get_credentials')
     @mock.patch.object(alarming.Alarming, "update_alarm_state")
-    def test_ack_alarm_req(self, ack_alarm):
+    def test_ack_alarm_req(self, ack_alarm, get_creds):
         """Test Aodh acknowledge alarm request message from KafkaProducer."""
         # Set-up message, producer and consumer for tests
         payload = {"vim_type": "OpenSTACK",
@@ -210,10 +230,12 @@
         self.producer.send('alarm_request', key="acknowledge_alarm",
                            value=json.dumps(payload))
 
+        get_creds.return_value = mock_creds
+
         for message in self.req_consumer:
             # Check the vim desired by the message
             if message.key == "acknowledge_alarm":
                 self.alarms.alarming(message)
                 return
 
-        self.fail("No message received in consumer")
\ No newline at end of file
+        self.fail("No message received in consumer")
diff --git a/osm_mon/test/integration/test_metric_integration.py b/osm_mon/test/OpenStack/integration/test_metric_integration.py
similarity index 89%
rename from osm_mon/test/integration/test_metric_integration.py
rename to osm_mon/test/OpenStack/integration/test_metric_integration.py
index 66f4f0a..c130973 100644
--- a/osm_mon/test/integration/test_metric_integration.py
+++ b/osm_mon/test/OpenStack/integration/test_metric_integration.py
@@ -52,8 +52,13 @@
         self.openstack_auth = Common()
 
         try:
-            self.producer = KafkaProducer(bootstrap_servers='localhost:9092')
+            self.producer = KafkaProducer(bootstrap_servers='localhost:9092',
+                                          key_serializer=str.encode,
+                                          value_serializer=str.encode
+                                          )
             self.req_consumer = KafkaConsumer(bootstrap_servers='localhost:9092',
+                                              key_deserializer=bytes.decode,
+                                              value_deserializer=bytes.decode,
                                               auto_offset_reset='earliest',
                                               consumer_timeout_ms=60000)
             self.req_consumer.subscribe(['metric_request'])
@@ -72,14 +77,13 @@
                    "vim_uuid": "1",
                    "correlation_id": 123,
                    "metric_create":
-                       {"metric_name": "my_metric",
+                       {"metric_name": "cpu_utilization",
                         "resource_uuid": "resource_id"}}
 
         self.producer.send('metric_request', key="create_metric_request",
                            value=json.dumps(payload))
 
         for message in self.req_consumer:
-            print(message)
             # Check the vim desired by the message
             vim_type = json.loads(message.value)["vim_type"].lower()
             if vim_type == "openstack":
@@ -92,7 +96,7 @@
                     'create_metric_response', status=True, cor_id=123,
                     metric_id="metric_id", r_id="resource_id")
                 create_resp.assert_called_with(
-                    'create_metric_response', resp.return_value, 'metric_response')
+                    'create_metric_response', resp.return_value)
 
                 return
         self.fail("No message received in consumer")
@@ -108,8 +112,7 @@
         payload = {"vim_type": "OpenSTACK",
                    "vim_uuid": "1",
                    "correlation_id": 123,
-                   "metric_uuid": "metric_id",
-                   "metric_name": "metric_name",
+                   "metric_name": "cpu_utilization",
                    "resource_uuid": "resource_id"}
 
         self.producer.send('metric_request', key="delete_metric_request",
@@ -123,11 +126,11 @@
 
                 # A response message is generated and sent by MON's producer
                 resp.assert_called_with(
-                    'delete_metric_response', m_id="metric_id",
-                    m_name="metric_name", status=True, r_id="resource_id",
+                    'delete_metric_response', m_id=None,
+                    m_name="cpu_utilization", status=True, r_id="resource_id",
                     cor_id=123)
                 del_resp.assert_called_with(
-                    'delete_metric_response', resp.return_value, 'metric_response')
+                    'delete_metric_response', resp.return_value)
 
                 return
         self.fail("No message received in consumer")
@@ -143,8 +146,7 @@
         payload = {"vim_type": "OpenSTACK",
                    "vim_uuid": "test_id",
                    "correlation_id": 123,
-                   "metric_uuid": "metric_id",
-                   "metric_name": "metric_name",
+                   "metric_name": "cpu_utilization",
                    "resource_uuid": "resource_id"}
 
         self.producer.send('metric_request', key="read_metric_data_request",
@@ -159,12 +161,11 @@
 
                 # A response message is generated and sent by MON's producer
                 resp.assert_called_with(
-                    'read_metric_data_response', m_id="metric_id",
-                    m_name="metric_name", r_id="resource_id", cor_id=123, times=[],
+                    'read_metric_data_response', m_id=None,
+                    m_name="cpu_utilization", r_id="resource_id", cor_id=123, times=[],
                     metrics=[])
                 read_resp.assert_called_with(
-                    'read_metric_data_response', resp.return_value,
-                    'metric_response')
+                    'read_metric_data_response', resp.return_value)
 
                 return
         self.fail("No message received in consumer")
@@ -196,7 +197,7 @@
                 resp.assert_called_with(
                     'list_metric_response', m_list=[], cor_id=123)
                 list_resp.assert_called_with(
-                    'list_metric_response', resp.return_value, 'metric_response')
+                    'list_metric_response', resp.return_value)
 
                 return
         self.fail("No message received in consumer")
@@ -232,7 +233,7 @@
                     'update_metric_response', status=False, cor_id=123,
                     r_id="resource_id", m_id="metric_id")
                 update_resp.assert_called_with(
-                    'update_metric_response', resp.return_value, 'metric_response')
+                    'update_metric_response', resp.return_value)
 
                 return
         self.fail("No message received in consumer")
diff --git a/osm_mon/test/integration/test_notify_alarm.py b/osm_mon/test/OpenStack/integration/test_notify_alarm.py
similarity index 97%
rename from osm_mon/test/integration/test_notify_alarm.py
rename to osm_mon/test/OpenStack/integration/test_notify_alarm.py
index 4f44572..1b2c64c 100644
--- a/osm_mon/test/integration/test_notify_alarm.py
+++ b/osm_mon/test/OpenStack/integration/test_notify_alarm.py
@@ -21,6 +21,8 @@
 ##
 """Tests for all common OpenStack methods."""
 
+
+from __future__ import unicode_literals
 import json
 import logging
 import socket
@@ -76,6 +78,10 @@
         self._set_headers()
         content_length = int(self.headers['Content-Length'])
         post_data = self.rfile.read(content_length)
+        try:
+            post_data = post_data.decode()
+        except AttributeError:
+            pass
         self.notify_alarm(json.loads(post_data))
 
     def notify_alarm(self, values):
diff --git a/osm_mon/test/functional/test_vim_account.py b/osm_mon/test/OpenStack/integration/test_vim_account.py
similarity index 72%
rename from osm_mon/test/functional/test_vim_account.py
rename to osm_mon/test/OpenStack/integration/test_vim_account.py
index 2a7a722..da34bb2 100644
--- a/osm_mon/test/functional/test_vim_account.py
+++ b/osm_mon/test/OpenStack/integration/test_vim_account.py
@@ -26,31 +26,20 @@
 
 import json
 import logging
-import time
 import unittest
 
-from kafka import KafkaConsumer
-from kafka import KafkaProducer
-from kafka.errors import KafkaError
-
 from osm_mon.core.auth import AuthManager
+from osm_mon.core.database import DatabaseManager
 
 log = logging.getLogger(__name__)
 
 
 class VimAccountTest(unittest.TestCase):
     def setUp(self):
-        try:
-            self.producer = KafkaProducer(bootstrap_servers='localhost:9092')
-            self.consumer = KafkaConsumer(bootstrap_servers='localhost:9092',
-                                          group_id='osm_mon')
-            self.consumer.subscribe(['vim_account'])
-            self.auth_manager = AuthManager()
-        except KafkaError:
-            self.skipTest('Kafka server not present.')
+        self.auth_manager = AuthManager()
+        self.database_manager = DatabaseManager()
+        self.database_manager.create_tables()
 
-    @unittest.skip("Correct support for functional tests is pending.")
-    # TODO: Refactor
     def test_create_edit_delete_vim_account(self):
         """Test vim_account creation message from KafkaProducer."""
         # Set-up message, producer and consumer for tests
@@ -67,13 +56,10 @@
                     "foo": "bar"
                 }
         }
+        self.auth_manager.store_auth_credentials(create_payload)
 
-        self.producer.send('vim_account', key=b'create', value=json.dumps(create_payload))
-
-        self.producer.flush()
-
-        time.sleep(1)
         creds = self.auth_manager.get_credentials('test_id')
+
         self.assertIsNotNone(creds)
         self.assertEqual(creds.name, create_payload['name'])
         self.assertEqual(json.loads(creds.config), create_payload['config'])
@@ -93,12 +79,10 @@
                 }
         }
 
-        self.producer.send('vim_account', key=b'edit', value=json.dumps(edit_payload))
+        self.auth_manager.store_auth_credentials(edit_payload)
 
-        self.producer.flush()
-
-        time.sleep(1)
         creds = self.auth_manager.get_credentials('test_id')
+
         self.assertEqual(creds.name, edit_payload['name'])
         self.assertEqual(json.loads(creds.config), edit_payload['config'])
 
@@ -106,10 +90,7 @@
             "_id": "test_id"
         }
 
-        self.producer.send('vim_account', key=b'delete', value=json.dumps(delete_payload))
+        self.auth_manager.delete_auth_credentials(delete_payload)
 
-        self.producer.flush()
-
-        time.sleep(1)
         creds = self.auth_manager.get_credentials('test_id')
         self.assertIsNone(creds)
diff --git a/osm_mon/test/OpenStack/unit/__init__.py b/osm_mon/test/OpenStack/unit/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/osm_mon/test/OpenStack/unit/__init__.py
diff --git a/osm_mon/test/OpenStack/test_alarm_req.py b/osm_mon/test/OpenStack/unit/test_alarm_req.py
similarity index 100%
rename from osm_mon/test/OpenStack/test_alarm_req.py
rename to osm_mon/test/OpenStack/unit/test_alarm_req.py
diff --git a/osm_mon/test/OpenStack/test_alarming.py b/osm_mon/test/OpenStack/unit/test_alarming.py
similarity index 100%
rename from osm_mon/test/OpenStack/test_alarming.py
rename to osm_mon/test/OpenStack/unit/test_alarming.py
diff --git a/osm_mon/test/OpenStack/test_common.py b/osm_mon/test/OpenStack/unit/test_common.py
similarity index 100%
rename from osm_mon/test/OpenStack/test_common.py
rename to osm_mon/test/OpenStack/unit/test_common.py
diff --git a/osm_mon/test/OpenStack/test_metric_calls.py b/osm_mon/test/OpenStack/unit/test_metric_calls.py
similarity index 100%
rename from osm_mon/test/OpenStack/test_metric_calls.py
rename to osm_mon/test/OpenStack/unit/test_metric_calls.py
diff --git a/osm_mon/test/OpenStack/test_metric_req.py b/osm_mon/test/OpenStack/unit/test_metric_req.py
similarity index 100%
rename from osm_mon/test/OpenStack/test_metric_req.py
rename to osm_mon/test/OpenStack/unit/test_metric_req.py
diff --git a/osm_mon/test/OpenStack/test_notifier.py b/osm_mon/test/OpenStack/unit/test_notifier.py
similarity index 99%
rename from osm_mon/test/OpenStack/test_notifier.py
rename to osm_mon/test/OpenStack/unit/test_notifier.py
index 0f96e71..a1ce1c6 100644
--- a/osm_mon/test/OpenStack/test_notifier.py
+++ b/osm_mon/test/OpenStack/unit/test_notifier.py
@@ -23,9 +23,9 @@
 
 import json
 import unittest
-from BaseHTTPServer import BaseHTTPRequestHandler
 
 import mock
+from six.moves.BaseHTTPServer import BaseHTTPRequestHandler
 
 from osm_mon.core.message_bus.producer import KafkaProducer
 from osm_mon.core.settings import Config
diff --git a/osm_mon/test/OpenStack/test_responses.py b/osm_mon/test/OpenStack/unit/test_responses.py
similarity index 100%
rename from osm_mon/test/OpenStack/test_responses.py
rename to osm_mon/test/OpenStack/unit/test_responses.py
diff --git a/osm_mon/test/OpenStack/test_settings.py b/osm_mon/test/OpenStack/unit/test_settings.py
similarity index 100%
rename from osm_mon/test/OpenStack/test_settings.py
rename to osm_mon/test/OpenStack/unit/test_settings.py
diff --git a/requirements.txt b/requirements.txt
index be103aa..98752ea 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -19,7 +19,6 @@
 # For those usages not covered by the Apache License, Version 2.0 please
 # contact: prithiv.mohan@intel.com or adrian.hoban@intel.com
 stdeb==0.8.*
-MySQL-python==1.2.*
 kafka==1.3.*
 lxml==4.2.*
 requests==2.18.*
@@ -38,4 +37,5 @@
 six==1.11.*
 bottle==0.12.*
 peewee==3.1.*
-pyyaml==3.*
\ No newline at end of file
+pyyaml==3.*
+git+https://osm.etsi.org/gerrit/osm/common.git@857731b#egg=osm-common
\ No newline at end of file