Readds plugins code and respective tests 29/6929/4
authorBenjamin Diaz <bdiaz@whitestack.com>
Wed, 21 Nov 2018 13:52:05 +0000 (10:52 -0300)
committerBenjamin Diaz <bdiaz@whitestack.com>
Wed, 21 Nov 2018 15:50:54 +0000 (12:50 -0300)
Signed-off-by: Benjamin Diaz <bdiaz@whitestack.com>
Change-Id: I80ad624892b513818b1c989494884cc8047e377e

100 files changed:
osm_mon/core/database.py
osm_mon/core/models/access_cred_CloudWatch.json [new file with mode: 0644]
osm_mon/core/models/access_cred_OpenStack.json [new file with mode: 0644]
osm_mon/core/models/access_cred_vROPs.json [new file with mode: 0644]
osm_mon/core/models/acknowledge_alarm_req.json [new file with mode: 0644]
osm_mon/core/models/create_metric_req.json [new file with mode: 0644]
osm_mon/core/models/create_metric_resp.json [new file with mode: 0644]
osm_mon/core/models/delete_metric_req.json [new file with mode: 0644]
osm_mon/core/models/delete_metric_resp.json [new file with mode: 0644]
osm_mon/core/models/list_metric_req.json [new file with mode: 0644]
osm_mon/core/models/list_metric_resp.json [new file with mode: 0644]
osm_mon/core/models/read_metric_data_req.json [new file with mode: 0644]
osm_mon/core/models/read_metric_data_resp.json [new file with mode: 0644]
osm_mon/core/models/update_metric_req.json [new file with mode: 0644]
osm_mon/core/models/update_metric_resp.json [new file with mode: 0644]
osm_mon/core/settings.py
osm_mon/plugins/CloudWatch/__init__.py [new file with mode: 0644]
osm_mon/plugins/CloudWatch/access_credentials.py [new file with mode: 0644]
osm_mon/plugins/CloudWatch/connection.py [new file with mode: 0644]
osm_mon/plugins/CloudWatch/metric_alarms.py [new file with mode: 0644]
osm_mon/plugins/CloudWatch/metrics.py [new file with mode: 0644]
osm_mon/plugins/CloudWatch/plugin_alarm.py [new file with mode: 0644]
osm_mon/plugins/CloudWatch/plugin_metric.py [new file with mode: 0644]
osm_mon/plugins/OpenStack/Aodh/__init__.py [new file with mode: 0644]
osm_mon/plugins/OpenStack/Aodh/alarm_handler.py [new file with mode: 0644]
osm_mon/plugins/OpenStack/Aodh/notifier.py [new file with mode: 0644]
osm_mon/plugins/OpenStack/Gnocchi/__init__.py [new file with mode: 0644]
osm_mon/plugins/OpenStack/Gnocchi/metric_handler.py [new file with mode: 0644]
osm_mon/plugins/OpenStack/__init__.py [new file with mode: 0644]
osm_mon/plugins/OpenStack/common.py [new file with mode: 0644]
osm_mon/plugins/OpenStack/response.py [new file with mode: 0644]
osm_mon/plugins/OpenStack/singleton.py [new file with mode: 0644]
osm_mon/plugins/__init__.py [new file with mode: 0644]
osm_mon/plugins/vRealiseOps/__init__.py [new file with mode: 0644]
osm_mon/plugins/vRealiseOps/mon_plugin_vrops.py [new file with mode: 0644]
osm_mon/plugins/vRealiseOps/plugin_receiver.py [new file with mode: 0644]
osm_mon/plugins/vRealiseOps/vROPs_Webservice/__init__.py [new file with mode: 0644]
osm_mon/plugins/vRealiseOps/vROPs_Webservice/install.sh [new file with mode: 0755]
osm_mon/plugins/vRealiseOps/vROPs_Webservice/vrops_webservice [new file with mode: 0755]
osm_mon/plugins/vRealiseOps/vrops_config.xml [new file with mode: 0644]
osm_mon/tests/plugins/CloudWatch/__init__.py [new file with mode: 0644]
osm_mon/tests/plugins/CloudWatch/test_schemas/alarm_details/acknowledge_alarm.json [new file with mode: 0644]
osm_mon/tests/plugins/CloudWatch/test_schemas/create_alarm/create_alarm_differentName_differentInstance.json [new file with mode: 0644]
osm_mon/tests/plugins/CloudWatch/test_schemas/create_alarm/create_alarm_differentName_sameInstance.json [new file with mode: 0644]
osm_mon/tests/plugins/CloudWatch/test_schemas/create_alarm/create_alarm_sameName_differentInstance.json [new file with mode: 0644]
osm_mon/tests/plugins/CloudWatch/test_schemas/create_alarm/create_alarm_sameName_sameInstance.json [new file with mode: 0644]
osm_mon/tests/plugins/CloudWatch/test_schemas/create_alarm/operation_invalid.json [new file with mode: 0644]
osm_mon/tests/plugins/CloudWatch/test_schemas/create_alarm/operation_valid.json [new file with mode: 0644]
osm_mon/tests/plugins/CloudWatch/test_schemas/create_alarm/statistic_invalid.json [new file with mode: 0644]
osm_mon/tests/plugins/CloudWatch/test_schemas/create_alarm/statistic_valid.json [new file with mode: 0644]
osm_mon/tests/plugins/CloudWatch/test_schemas/create_metrics/create_metric_req_invalid.json [new file with mode: 0644]
osm_mon/tests/plugins/CloudWatch/test_schemas/create_metrics/create_metric_req_valid.json [new file with mode: 0644]
osm_mon/tests/plugins/CloudWatch/test_schemas/delete_alarm/name_invalid.json [new file with mode: 0644]
osm_mon/tests/plugins/CloudWatch/test_schemas/delete_alarm/name_valid.json [new file with mode: 0644]
osm_mon/tests/plugins/CloudWatch/test_schemas/delete_alarm/name_valid_delete1.json [new file with mode: 0644]
osm_mon/tests/plugins/CloudWatch/test_schemas/delete_alarm/name_valid_delete2.json [new file with mode: 0644]
osm_mon/tests/plugins/CloudWatch/test_schemas/delete_alarm/name_valid_delete3.json [new file with mode: 0644]
osm_mon/tests/plugins/CloudWatch/test_schemas/delete_alarm/name_valid_delete4.json [new file with mode: 0644]
osm_mon/tests/plugins/CloudWatch/test_schemas/delete_metrics/delete_metric_req_invalid.json [new file with mode: 0644]
osm_mon/tests/plugins/CloudWatch/test_schemas/delete_metrics/delete_metric_req_valid.json [new file with mode: 0644]
osm_mon/tests/plugins/CloudWatch/test_schemas/list_alarm/list_alarm_valid_no_arguments.json [new file with mode: 0644]
osm_mon/tests/plugins/CloudWatch/test_schemas/list_alarm/list_alarm_valid_one_argument.json [new file with mode: 0644]
osm_mon/tests/plugins/CloudWatch/test_schemas/list_alarm/list_alarm_valid_two_arguments.json [new file with mode: 0644]
osm_mon/tests/plugins/CloudWatch/test_schemas/list_metrics/list_metric_req_invalid.json [new file with mode: 0644]
osm_mon/tests/plugins/CloudWatch/test_schemas/list_metrics/list_metric_req_valid.json [new file with mode: 0644]
osm_mon/tests/plugins/CloudWatch/test_schemas/read_metrics_data/read_coll_period_req_invalid.json [new file with mode: 0644]
osm_mon/tests/plugins/CloudWatch/test_schemas/read_metrics_data/read_coll_period_req_valid.json [new file with mode: 0644]
osm_mon/tests/plugins/CloudWatch/test_schemas/read_metrics_data/read_metric_name_req_invalid.json [new file with mode: 0644]
osm_mon/tests/plugins/CloudWatch/test_schemas/read_metrics_data/read_metric_name_req_valid.json [new file with mode: 0644]
osm_mon/tests/plugins/CloudWatch/test_schemas/update_alarm/name_invalid.json [new file with mode: 0644]
osm_mon/tests/plugins/CloudWatch/test_schemas/update_alarm/name_valid.json [new file with mode: 0644]
osm_mon/tests/plugins/CloudWatch/test_schemas/update_alarm/operation_invalid.json [new file with mode: 0644]
osm_mon/tests/plugins/CloudWatch/test_schemas/update_alarm/operation_valid.json [new file with mode: 0644]
osm_mon/tests/plugins/CloudWatch/test_schemas/update_alarm/statistic_invalid.json [new file with mode: 0644]
osm_mon/tests/plugins/CloudWatch/test_schemas/update_alarm/statistic_valid.json [new file with mode: 0644]
osm_mon/tests/plugins/CloudWatch/test_schemas/update_alarm/update_alarm_new_alarm.json [new file with mode: 0644]
osm_mon/tests/plugins/CloudWatch/test_schemas/update_metrics/update_metric_req_invalid.json [new file with mode: 0644]
osm_mon/tests/plugins/CloudWatch/test_schemas/update_metrics/update_metric_req_valid.json [new file with mode: 0644]
osm_mon/tests/plugins/CloudWatch/unit_tests_alarms.py [new file with mode: 0644]
osm_mon/tests/plugins/CloudWatch/unit_tests_metrics.py [new file with mode: 0644]
osm_mon/tests/plugins/OpenStack/__init__.py [new file with mode: 0644]
osm_mon/tests/plugins/OpenStack/integration/__init__.py [new file with mode: 0644]
osm_mon/tests/plugins/OpenStack/integration/test_alarm_integration.py [new file with mode: 0644]
osm_mon/tests/plugins/OpenStack/integration/test_metric_integration.py [new file with mode: 0644]
osm_mon/tests/plugins/OpenStack/integration/test_notify_alarm.py [new file with mode: 0644]
osm_mon/tests/plugins/OpenStack/integration/test_vim_account.py [new file with mode: 0644]
osm_mon/tests/plugins/OpenStack/unit/__init__.py [new file with mode: 0644]
osm_mon/tests/plugins/OpenStack/unit/test_alarm_req.py [new file with mode: 0644]
osm_mon/tests/plugins/OpenStack/unit/test_alarming.py [new file with mode: 0644]
osm_mon/tests/plugins/OpenStack/unit/test_common.py [new file with mode: 0644]
osm_mon/tests/plugins/OpenStack/unit/test_metric_calls.py [new file with mode: 0644]
osm_mon/tests/plugins/OpenStack/unit/test_metric_req.py [new file with mode: 0644]
osm_mon/tests/plugins/OpenStack/unit/test_notifier.py [new file with mode: 0644]
osm_mon/tests/plugins/OpenStack/unit/test_responses.py [new file with mode: 0644]
osm_mon/tests/plugins/OpenStack/unit/test_settings.py [new file with mode: 0644]
osm_mon/tests/plugins/VMware/__init__.py [new file with mode: 0644]
osm_mon/tests/plugins/VMware/test_mon_plugin_vrops.py [new file with mode: 0644]
osm_mon/tests/plugins/VMware/test_plugin_receiver.py [new file with mode: 0644]
osm_mon/tests/plugins/__init__.py [new file with mode: 0644]
tox.ini

index 56a1275..26e3700 100644 (file)
@@ -81,7 +81,7 @@ class DatabaseManager:
         """Saves vim credentials. If a record with same uuid exists, overwrite it."""
         exists = VimCredentials.get_or_none(VimCredentials.uuid == vim_credentials.uuid)
         if exists:
-            vim_credentials.uuid = exists.uuid
+            vim_credentials.id = exists.id
         vim_credentials.save()
         return vim_credentials
 
diff --git a/osm_mon/core/models/access_cred_CloudWatch.json b/osm_mon/core/models/access_cred_CloudWatch.json
new file mode 100644 (file)
index 0000000..b5d4a81
--- /dev/null
@@ -0,0 +1,41 @@
+/* Copyright© 2017 Intel Research and Development Ireland Limited
+  # This file is part of OSM Monitoring module
+  # All Rights Reserved to Intel Corporation
+
+  # Licensed under the Apache License, Version 2.0 (the "License"); you may
+  # not use this file except in compliance with the License. You may obtain
+  # a copy of the License at
+
+  #         http://www.apache.org/licenses/LICENSE-2.0
+
+  # Unless required by applicable law or agreed to in writing, software
+  # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+  # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+  # License for the specific language governing permissions and limitations
+  # under the License.
+
+  # For those usages not covered by the Apache License, Version 2.0 please
+  # contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+
+  # This is the message bus schema for CloudWatch access credentials */
+
+
+{
+  "schema_version": { "type": "string" },
+  "schema_type": { "type": "string" },
+  "vim_type": { "type": "string" },
+  "access_config":
+  {
+    "aws_site": { "type": "string" },
+    "user": { "type": "string" },
+    "password": { "type": "string" },
+    "vim_tenant_name":  { "type": "string" }
+  },
+  "required": [ "schema_version",
+                "schema_type",
+                "vim_type",
+                "aws_site",
+                "user",
+                "password",
+                "vim_tenant_name" ]
+}
diff --git a/osm_mon/core/models/access_cred_OpenStack.json b/osm_mon/core/models/access_cred_OpenStack.json
new file mode 100644 (file)
index 0000000..5759f8c
--- /dev/null
@@ -0,0 +1,43 @@
+/* Copyright© 2017 Intel Research and Development Ireland Limited
+  # This file is part of OSM Monitoring module
+  # All Rights Reserved to Intel Corporation
+
+  # Licensed under the Apache License, Version 2.0 (the "License"); you may
+  # not use this file except in compliance with the License. You may obtain
+  # a copy of the License at
+
+  #         http://www.apache.org/licenses/LICENSE-2.0
+
+  # Unless required by applicable law or agreed to in writing, software
+  # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+  # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+  # License for the specific language governing permissions and limitations
+  # under the License.
+
+  # For those usages not covered by the Apache License, Version 2.0 please
+  # contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+
+  # This is the message bus schema for OpenStack access credentials */
+
+
+{
+  "schema_version": { "type": "string" },
+  "schema_type": { "type": "string" },
+  "vim_type": { "type": "string" },
+  "access_config":
+  {
+    "vim_uuid": { "type": "string" },
+    "openstack_site": { "type" : "string" },
+    "user": { "type": "string" },
+    "password": { "type": "string" },
+    "vim_tenant_name": { "type": "string" }
+  },
+  "required": [ "schema_version",
+                "schema_type",
+                "vim_type",
+                "vim_uuid",
+                "openstack_site",
+                "user",
+                "password",
+                "vim_tenant_name" ]
+}
diff --git a/osm_mon/core/models/access_cred_vROPs.json b/osm_mon/core/models/access_cred_vROPs.json
new file mode 100644 (file)
index 0000000..29d99c7
--- /dev/null
@@ -0,0 +1,60 @@
+/* Copyright© 2017 Intel Research and Development Ireland Limited
+  # This file is part of OSM Monitoring module
+  # All Rights Reserved to Intel Corporation
+
+  # Licensed under the Apache License, Version 2.0 (the "License"); you may
+  # not use this file except in compliance with the License. You may obtain
+  # a copy of the License at
+
+  #         http://www.apache.org/licenses/LICENSE-2.0
+
+  # Unless required by applicable law or agreed to in writing, software
+  # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+  # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+  # License for the specific language governing permissions and limitations
+  # under the License.
+
+  # For those usages not covered by the Apache License, Version 2.0 please
+  # contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+
+  # This is the message bus schema for vROPs access credentials */
+
+
+{
+  "schema_version": { "type": "string" },
+  "schema_type": { "type": "string" },
+  "vim_type": { "type": "string" },
+  "access_config":
+  {
+    "vrops_site": { "type": "string" },
+    "vrops_user": { "type": "string" },
+    "vrops_password": { "type": "string" },
+    "vcloud-site": { "type": "string" },
+    "admin_username": { "type": "string" },
+    "admin_password": { "type": "string" },
+    "nsx_manager": { "type": "string" },
+    "nsx_user": { "type": "string" },
+    "nsx_password": { "type": "string" },
+    "vcenter_ip": { "type": "string" },
+    "vcenter_port": { "type": "string" },
+    "vcenter_user": { "type": "string" },
+    "vcenter_password": { "type": "string" },
+    "vim_tenant_name": { "type": "string" },
+    "orgname": { "type": "string" }
+  },
+  "required": [ "schema_version",
+                "schema_type",
+                "vim_type",
+                "vrops_site",
+                "vrops_user",
+                "vrops_password",
+                "vcloud-site",
+                "admin_username",
+                "admin_password",
+                "vcenter_ip",
+                "vcenter_port",
+                "vcenter_user",
+                "vcenter_password",
+                "vim_tenant_name",
+                "orgname" ]
+}
diff --git a/osm_mon/core/models/acknowledge_alarm_req.json b/osm_mon/core/models/acknowledge_alarm_req.json
new file mode 100644 (file)
index 0000000..8a9e50a
--- /dev/null
@@ -0,0 +1,41 @@
+
+/* Copyright© 2017 Intel Research and Development Ireland Limited
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: prithiv.mohan@intel.com or adrian.hoban@intel.com
+
+# This is the message bus schema for acknowledge_alarm */
+
+{  
+  "schema_version": { "type": "string" },
+  "schema_type": { "type": "string" },
+  "vim_type": { "type": "string" },
+  "vim_uuid": { "type": "string" },
+  "ack_details":
+  {
+    "alarm_uuid": { "type": "string" },
+    "ns_id": { "type": "string"},
+    "vnf_member_index": { "type": "integer"},
+    "vdu_name": { "type": "string"}
+  },
+  "required": [ "schema_version",
+                "schema_type",
+                "alarm_uuid",
+                "ns_id",
+                "vnf_member_index",
+                "vdu_name"]
+}
diff --git a/osm_mon/core/models/create_metric_req.json b/osm_mon/core/models/create_metric_req.json
new file mode 100644 (file)
index 0000000..d419824
--- /dev/null
@@ -0,0 +1,43 @@
+/* Copyright© 2017 Intel Research and Development Ireland Limited
+  # This file is part of OSM Monitoring module
+  # All Rights Reserved to Intel Corporation
+
+  # Licensed under the Apache License, Version 2.0 (the "License"); you may
+  # not use this file except in compliance with the License. You may obtain
+  # a copy of the License at
+
+  #         http://www.apache.org/licenses/LICENSE-2.0
+
+  # Unless required by applicable law or agreed to in writing, software
+  # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+  # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+  # License for the specific language governing permissions and limitations
+  # under the License.
+
+  # For those usages not covered by the Apache License, Version 2.0 please
+  # contact: prithiv.mohan@intel.com or adrian.hoban@intel.com
+  
+  # This is the message bus schema to create_metric */
+
+
+{
+  "schema_version": { "type": "string" },
+  "schema_type": { "type": "string" },
+  "metric_create_request":
+  {
+    "correlation_id": { "type": "integer" },
+    "metric_name": { "type" : "string" },
+    "metric_unit": { "type": "string" },
+    "ns_id": { "type": "string"},
+    "vnf_member_index": { "type": "integer"},
+    "vdu_name": { "type": "string"}
+  },
+  "required": [ "schema_version",
+                "schema_type",
+                "correlation_id",
+                "metric_name",
+                "metric_unit",
+                "ns_id",
+                "vnf_member_index",
+                "vdu_name" ]
+}
diff --git a/osm_mon/core/models/create_metric_resp.json b/osm_mon/core/models/create_metric_resp.json
new file mode 100644 (file)
index 0000000..5b67955
--- /dev/null
@@ -0,0 +1,36 @@
+/* Copyright© 2017 Intel Research and Development Ireland Limited
+  # This file is part of OSM Monitoring module
+  # All Rights Reserved to Intel Corporation
+
+  # Licensed under the Apache License, Version 2.0 (the "License"); you may
+  # not use this file except in compliance with the License. You may obtain
+  # a copy of the License at
+
+  #         http://www.apache.org/licenses/LICENSE-2.0
+
+  # Unless required by applicable law or agreed to in writing, software
+  # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+  # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+  # License for the specific language governing permissions and limitations
+  # under the License.
+
+  # For those usages not covered by the Apache License, Version 2.0 please
+  # contact: prithiv.mohan@intel.com or adrian.hoban@intel.com
+  
+  # This is the message bus schema for create_metric response*/
+
+{
+  "schema_version": { "type": "string" },
+  "schema_type": { "type": "string" },
+  "metric_create_response":
+  {
+    "correlation_id": { "type": "integer" },
+    "metric_uuid": { "type": "string" },
+    "status": { "type": "boolean" }
+  },
+  "required": [ "schema_type",
+                "schema_version",
+                "correlation_id",
+                "metric_uuid",
+                "status" ]
+}
diff --git a/osm_mon/core/models/delete_metric_req.json b/osm_mon/core/models/delete_metric_req.json
new file mode 100644 (file)
index 0000000..2c51042
--- /dev/null
@@ -0,0 +1,37 @@
+/* Copyright© 2017 Intel Research and Development Ireland Limited
+  # This file is part of OSM Monitoring module
+  # All Rights Reserved to Intel Corporation
+
+  # Licensed under the Apache License, Version 2.0 (the "License"); you may
+  # not use this file except in compliance with the License. You may obtain
+  # a copy of the License at
+
+  #         http://www.apache.org/licenses/LICENSE-2.0
+
+  # Unless required by applicable law or agreed to in writing, software
+  # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+  # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+  # License for the specific language governing permissions and limitations
+  # under the License.
+
+  # For those usages not covered by the Apache License, Version 2.0 please
+  # contact: prithiv.mohan@intel.com or adrian.hoban@intel.com
+  
+  # This is the message bus schema to delete_metric */
+
+{
+  "schema_version": { "type": "string" },
+  "schema_type": { "type": "string" },
+  "correlation_id": { "type": "integer" },
+  "metric_name": { "type": "string" },
+  "ns_id": { "type": "string"},
+  "vnf_member_index": { "type": "integer"},
+  "vdu_name": { "type": "string"},
+  "required": [ "schema_version",
+                "schema_type",
+                "metric_name",
+                "ns_id",
+                "vdu_name",
+                "vnf_member_index",
+                "correlation_id" ]
+}
diff --git a/osm_mon/core/models/delete_metric_resp.json b/osm_mon/core/models/delete_metric_resp.json
new file mode 100644 (file)
index 0000000..dc99059
--- /dev/null
@@ -0,0 +1,37 @@
+/* Copyright© 2017 Intel Research and Development Ireland Limited
+  # This file is part of OSM Monitoring module
+  # All Rights Reserved to Intel Corporation
+
+  # Licensed under the Apache License, Version 2.0 (the "License"); you may
+  # not use this file except in compliance with the License. You may obtain
+  # a copy of the License at
+
+  #         http://www.apache.org/licenses/LICENSE-2.0
+
+  # Unless required by applicable law or agreed to in writing, software
+  # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+  # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+  # License for the specific language governing permissions and limitations
+  # under the License.
+
+  # For those usages not covered by the Apache License, Version 2.0 please
+  # contact: prithiv.mohan@intel.com or adrian.hoban@intel.com
+  
+  # This is the message bus schema for delete_metric_response */
+
+{
+  "schema_version": { "type": "string" },
+  "schema_type": { "type": "string" },
+  "metric_name": { "type": "string" },
+  "metric_uuid": { "type": "string" },
+  "resource_uuid": { "type": "string" },
+  "correlation_id": { "type": "integer" },
+  "status": { "type": "boolean" },
+  "required": [ "schema_version",
+                "schema_type",
+                "metric_name",
+                "metric_uuid",
+                "resource_uuid",
+                "status",
+                "correlation_id"]
+}
diff --git a/osm_mon/core/models/list_metric_req.json b/osm_mon/core/models/list_metric_req.json
new file mode 100644 (file)
index 0000000..7474956
--- /dev/null
@@ -0,0 +1,40 @@
+/* Copyright© 2017 Intel Research and Development Ireland Limited
+  # This file is part of OSM Monitoring module
+  # All Rights Reserved to Intel Corporation
+
+  # Licensed under the Apache License, Version 2.0 (the "License"); you may
+  # not use this file except in compliance with the License. You may obtain
+  # a copy of the License at
+
+  #         http://www.apache.org/licenses/LICENSE-2.0
+
+  # Unless required by applicable law or agreed to in writing, software
+  # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+  # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+  # License for the specific language governing permissions and limitations
+  # under the License.
+
+  # For those usages not covered by the Apache License, Version 2.0 please
+  # contact: prithiv.mohan@intel.com or adrian.hoban@intel.com
+  
+  # This is the message bus schema to list_metric */
+
+{
+  "schema_version": { "type": "string" },
+  "schema_type": { "type": "string" },
+  "metrics_list_request":
+  {
+    "correlation_id": { "type": "integer" },
+    "metric_name": { "type": "string" },
+    "ns_id": { "type": "string"},
+    "vnf_member_index": { "type": "integer"},
+    "vdu_name": { "type": "string"}
+  },
+  "required": [ "schema_version",
+                "schema_type",
+                "ns_id",
+                "vdu_name",
+                "vnf_member_index",
+                "correlation_id"
+              ]
+}
diff --git a/osm_mon/core/models/list_metric_resp.json b/osm_mon/core/models/list_metric_resp.json
new file mode 100644 (file)
index 0000000..e8c933d
--- /dev/null
@@ -0,0 +1,44 @@
+/* Copyright© 2017 Intel Research and Development Ireland Limited
+  # This file is part of OSM Monitoring module
+  # All Rights Reserved to Intel Corporation
+
+  # Licensed under the Apache License, Version 2.0 (the "License"); you may
+  # not use this file except in compliance with the License. You may obtain
+  # a copy of the License at
+
+  #         http://www.apache.org/licenses/LICENSE-2.0
+
+  # Unless required by applicable law or agreed to in writing, software
+  # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+  # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+  # License for the specific language governing permissions and limitations
+  # under the License.
+
+  # For those usages not covered by the Apache License, Version 2.0 please
+  # contact: prithiv.mohan@intel.com or adrian.hoban@intel.com
+  
+  # This is the message bus schema for list_metric response */
+
+{
+  "schema_version": { "type": "string" },
+  "schema_type": { "type": "string" },
+  "correlation_id": { "type": "integer" },
+  "status": { "type": "boolean" },
+  "metrics_list":
+    [{
+      "type": "array",
+      "properties":{
+        "metric_name": { "type": "string" },
+        "metric_uuid": { "type": "string" },
+        "metric_unit": { "type": "string" },
+        "resource_uuid": { "type": "string" }
+      }
+    }],
+    "required": [ "schema_version",
+                  "schema_type",
+                  "correlation_id",
+                  "metric_name",
+                  "metric_uuid",
+                  "metric_unit",
+                  "resource_uuid" ]
+}
diff --git a/osm_mon/core/models/read_metric_data_req.json b/osm_mon/core/models/read_metric_data_req.json
new file mode 100644 (file)
index 0000000..6214569
--- /dev/null
@@ -0,0 +1,42 @@
+/* Copyright© 2017 Intel Research and Development Ireland Limited
+  # This file is part of OSM Monitoring module
+  # All Rights Reserved to Intel Corporation
+
+  # Licensed under the Apache License, Version 2.0 (the "License"); you may
+  # not use this file except in compliance with the License. You may obtain
+  # a copy of the License at
+
+  #         http://www.apache.org/licenses/LICENSE-2.0
+
+  # Unless required by applicable law or agreed to in writing, software
+  # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+  # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+  # License for the specific language governing permissions and limitations
+  # under the License.
+
+  # For those usages not covered by the Apache License, Version 2.0 please
+  # contact: prithiv.mohan@intel.com or adrian.hoban@intel.com
+  
+  # This is the message bus schema to read_metric_data */
+
+{
+  "schema_version": { "type": "string" },
+  "schema_type": { "type": "string" },
+  "metric_name": { "type": "string" },
+  "ns_id": { "type": "string"},
+  "vnf_member_index": { "type": "integer"},
+  "vdu_name": { "type": "string"},
+  "correlation_id": { "type": "integer" },
+  "collection_period": { "type": "integer" },
+  "collection_unit": { "type": "string" },
+  "required": ["schema_version",
+               "schema_type",
+               "metric_name",
+               "metric_uuid",
+               "correlation_id",
+               "collection_period",
+               "collection_unit",
+               "ns_id",
+               "vdu_name",
+               "vnf_member_index"]
+}
diff --git a/osm_mon/core/models/read_metric_data_resp.json b/osm_mon/core/models/read_metric_data_resp.json
new file mode 100644 (file)
index 0000000..2a96b76
--- /dev/null
@@ -0,0 +1,62 @@
+/* Copyright© 2017 Intel Research and Development Ireland Limited
+  # This file is part of OSM Monitoring module
+  # All Rights Reserved to Intel Corporation
+
+  # Licensed under the Apache License, Version 2.0 (the "License"); you may
+  # not use this file except in compliance with the License. You may obtain
+  # a copy of the License at
+
+  #         http://www.apache.org/licenses/LICENSE-2.0
+
+  # Unless required by applicable law or agreed to in writing, software
+  # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+  # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+  # License for the specific language governing permissions and limitations
+  # under the License.
+
+  # For those usages not covered by the Apache License, Version 2.0 please
+  # contact: prithiv.mohan@intel.com or adrian.hoban@intel.com
+  
+  # This is the message bus schema for read_metric_data response */
+
+{
+  "schema_version": { "type": "string" },
+  "schema_type": {"type": "string" },
+  "metric_name": { "type": "string" },
+  "metric_uuid": { "type": "string" },
+  "correlation_id": { "type": "integer" },
+  "resource_uuid": { "type": "string" },
+  "status": { "type": "boolean" },
+  "metrics_data":
+  {
+    "time_series": [{
+                      "type": "array",
+                      "properties":
+                      { "time_stamp":
+                        { "type": "integer" }}}
+    ],
+    "metrics_series": [{
+                      "type": "array",
+                      "properties":
+                      { "data":
+                        { "type":
+                            ["integer",
+                            "string",
+                            "decimal"
+                            ]
+                        }
+                      }
+                    }
+                  ],
+      },
+  "unit": { "type": "string" },
+  "required": [ "schema_version",
+                "schema_type",
+                "metric_name",
+                "metric_uuid",
+                "resource_uuid",
+                "correlation_id",
+                "time_series",
+                "metrics_series",
+                "status" ]
+}
diff --git a/osm_mon/core/models/update_metric_req.json b/osm_mon/core/models/update_metric_req.json
new file mode 100644 (file)
index 0000000..7c0eba3
--- /dev/null
@@ -0,0 +1,43 @@
+/* Copyright© 2017 Intel Research and Development Ireland Limited
+  # This file is part of OSM Monitoring module
+  # All Rights Reserved to Intel Corporation
+
+  # Licensed under the Apache License, Version 2.0 (the "License"); you may
+  # not use this file except in compliance with the License. You may obtain
+  # a copy of the License at
+
+  #         http://www.apache.org/licenses/LICENSE-2.0
+
+  # Unless required by applicable law or agreed to in writing, software
+  # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+  # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+  # License for the specific language governing permissions and limitations
+  # under the License.
+
+  # For those usages not covered by the Apache License, Version 2.0 please
+  # contact: prithiv.mohan@intel.com or adrian.hoban@intel.com
+  
+  # This is the message bus schema for update_metric response */
+
+{
+  "schema_version": { "type": "string" },
+  "schema_type": { "type": "string" },
+  "metric_update_request":
+  {
+  "correlation_id": { "type": "integer" },
+  "metric_name": { "type": "string" },
+  "metric_unit": { "type": "string" },
+  "ns_id": { "type": "string"},
+  "vnf_member_index": { "type": "integer"},
+  "vdu_name": { "type": "string"}
+  },
+  "required": [ "schema_version",
+                "schema_type",
+                "correlation_id",
+                "metric_name",
+                "metric_unit",
+                "ns_id",
+                "vdu_name",
+                "vnf_member_index"
+                ]
+}
diff --git a/osm_mon/core/models/update_metric_resp.json b/osm_mon/core/models/update_metric_resp.json
new file mode 100644 (file)
index 0000000..a5bdb67
--- /dev/null
@@ -0,0 +1,38 @@
+/* Copyright© 2017 Intel Research and Development Ireland Limited
+  # This file is part of OSM Monitoring module
+  # All Rights Reserved to Intel Corporation
+
+  # Licensed under the Apache License, Version 2.0 (the "License"); you may
+  # not use this file except in compliance with the License. You may obtain
+  # a copy of the License at
+
+  #         http://www.apache.org/licenses/LICENSE-2.0
+
+  # Unless required by applicable law or agreed to in writing, software
+  # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+  # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+  # License for the specific language governing permissions and limitations
+  # under the License.
+
+  # For those usages not covered by the Apache License, Version 2.0 please
+  # contact: prithiv.mohan@intel.com or adrian.hoban@intel.com
+  
+  # This is the message bus schema to update_metric */
+
+{
+  "schema_version": { "type": "string" },
+  "schema_type": { "type": "string" },
+  "correlation_id": { "type": "integer" },
+  "metric_update_response":
+  {
+    "metric_uuid": { "type": "string" },
+    "status": { "type": "boolean" },
+    "resource_uuid": { "type": "string" }
+  },
+  "required": [ "schema_version",
+                "schema_type",
+                "correlation_id",
+                "metric_uuid",
+                "resource_uuid",
+                "status"]
+}
index 680b58d..a5be2d1 100644 (file)
@@ -62,6 +62,7 @@ class Config(object):
         CfgParam('BROKER_URI', "localhost:9092", six.text_type),
         CfgParam('MONGO_URI', "mongodb://mongo:27017", six.text_type),
         CfgParam('DATABASE', "sqlite:///mon_sqlite.db", six.text_type),
+        CfgParam('OS_NOTIFIER_URI', "http://localhost:8662", six.text_type),
         CfgParam('OS_DEFAULT_GRANULARITY', 300, int),
         CfgParam('REQUEST_TIMEOUT', 10, int),
         CfgParam('OSMMON_LOG_LEVEL', "INFO", six.text_type),
diff --git a/osm_mon/plugins/CloudWatch/__init__.py b/osm_mon/plugins/CloudWatch/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/osm_mon/plugins/CloudWatch/access_credentials.py b/osm_mon/plugins/CloudWatch/access_credentials.py
new file mode 100644 (file)
index 0000000..8c110ab
--- /dev/null
@@ -0,0 +1,64 @@
+##
+# Copyright 2017 xFlow Research Pvt. Ltd
+# This file is part of MON module
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+#
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact with: usman.javaid@xflowresearch.com
+##
+
+"""
+Access credentials class implements all the methods to store the access credentials for AWS
+"""
+
+__author__ = "Usman Javaid"
+__date__ = "20-December-2017"
+
+import json
+import logging
+import os
+
+log = logging.getLogger(__name__)
+
+
+class AccessCredentials():
+
+    def logtest(self):
+        log.info("Access credentials sourced for CloudWatch MON plugin")
+
+    def access_credential_calls(self, message):
+        try:
+            message = json.loads(message.value)['access_config']
+
+            AWS_KEY = message['user']
+            AWS_SECRET = message['password']
+            AWS_REGION = message['vim_tenant_name']
+
+            os.environ['AWS_ACCESS_KEY_ID'] = AWS_KEY
+            os.environ['AWS_SECRET_ACCESS_KEY'] = AWS_SECRET
+            os.environ['AWS_EC2_REGION'] = AWS_REGION
+
+            # aws_credentials.txt file to save the access credentials
+            cloudwatch_credentials = open("../../plugins/CloudWatch/cloudwatch_credentials.txt", "w+")
+            cloudwatch_credentials.write("AWS_ACCESS_KEY_ID=" + AWS_KEY +
+                                         "\nAWS_SECRET_ACCESS_KEY=" + AWS_SECRET +
+                                         "\nAWS_EC2_REGION=" + AWS_REGION)
+
+            # Closing the file
+            cloudwatch_credentials.close()
+            log.info("Access credentials sourced for CloudWatch MON plugin")
+
+        except Exception as e:
+            log.error("Access credentials not provided correctly: %s", str(e))
diff --git a/osm_mon/plugins/CloudWatch/connection.py b/osm_mon/plugins/CloudWatch/connection.py
new file mode 100644 (file)
index 0000000..98b05aa
--- /dev/null
@@ -0,0 +1,94 @@
+##
+# Copyright 2017 xFlow Research Pvt. Ltd
+# This file is part of MON module
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+#
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact with: wajeeha.hamid@xflowresearch.com
+##
+
+"""
+Connecting with AWS services --CloudWatch/EC2 using Required keys  
+"""
+
+__author__ = "Wajeeha Hamid"
+__date__ = "18-September-2017"
+
+import os
+
+try:
+    import boto
+    import boto.ec2
+    import boto.vpc
+    import boto.ec2.cloudwatch
+    import boto.ec2.connection
+    import logging
+    from boto.ec2.cloudwatch.alarm import MetricAlarm
+    from boto.ec2.cloudwatch.dimension import Dimension
+    from boto.sns import connect_to_region
+    from boto.utils import get_instance_metadata
+
+except:
+    exit("Boto not available. Try activating your virtualenv OR `pip install boto`")
+
+log = logging.getLogger(__name__)
+
+
+class Connection:
+    """Connection Establishment with AWS -- VPC/EC2/CloudWatch"""
+
+    def setEnvironment(self):
+        try:
+            """Credentials for connecting to AWS-CloudWatch"""
+            # Reads from the environment variables
+            self.AWS_KEY = os.environ.get("AWS_ACCESS_KEY_ID")
+            self.AWS_SECRET = os.environ.get("AWS_SECRET_ACCESS_KEY")
+            self.AWS_REGION = os.environ.get("AWS_EC2_REGION", "us-west-2")
+
+            # TODO Read from the cloudwatch_credentials.txt file
+
+            return self.connection_instance()
+        except Exception as e:
+            log.error("AWS Credentials not configured, Try setting the access credentials first %s: ", str(e))
+
+    def connection_instance(self):
+        try:
+            # VPC Connection
+            self.vpc_conn = boto.vpc.connect_to_region(self.AWS_REGION,
+                                                       aws_access_key_id=self.AWS_KEY,
+                                                       aws_secret_access_key=self.AWS_SECRET)
+
+            # EC2 Connection
+            self.ec2_conn = boto.ec2.connect_to_region(self.AWS_REGION,
+                                                       aws_access_key_id=self.AWS_KEY,
+                                                       aws_secret_access_key=self.AWS_SECRET)
+
+            """ TODO : Required to add actions against alarms when needed """
+            # self.sns = connect_to_region(self.AWS_REGION)
+            # self.topics = self.sns.get_all_topics()
+            # self.topic = self.topics[u'ListTopicsResponse']['ListTopicsResult']['Topics'][0]['TopicArn']
+
+            # Cloudwatch Connection
+            self.cloudwatch_conn = boto.ec2.cloudwatch.connect_to_region(
+                self.AWS_REGION,
+                aws_access_key_id=self.AWS_KEY,
+                aws_secret_access_key=self.AWS_SECRET)
+            connection_dict = dict()
+            connection_dict['ec2_connection'] = self.ec2_conn
+            connection_dict['cloudwatch_connection'] = self.cloudwatch_conn
+            return connection_dict
+
+        except Exception as e:
+            log.error("Failed to Connect with AWS %s: ", str(e))
diff --git a/osm_mon/plugins/CloudWatch/metric_alarms.py b/osm_mon/plugins/CloudWatch/metric_alarms.py
new file mode 100644 (file)
index 0000000..5d330b0
--- /dev/null
@@ -0,0 +1,323 @@
+##
+# Copyright 2017 xFlow Research Pvt. Ltd
+# This file is part of MON module
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+#
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact with: wajeeha.hamid@xflowresearch.com
+##
+
+''' Handling of alarms requests via BOTO 2.48 '''
+
+__author__ = "Wajeeha Hamid"
+__date__ = "18-September-2017"
+
+import logging
+
+log = logging.getLogger(__name__)
+
+try:
+    import boto
+    import boto.ec2
+    import boto.vpc
+    import boto.ec2.cloudwatch
+    import boto.ec2.connection
+except:
+    exit("Boto not avialable. Try activating your virtualenv OR `pip install boto`")
+
+STATISTICS = {
+    "AVERAGE": "Average",
+    "MINIMUM": "Minimum",
+    "MAXIMUM": "Maximum",
+    "COUNT": "SampleCount",
+    "SUM": "Sum"}
+
+OPERATIONS = {
+    "GE": ">=",
+    "LE": "<=",
+    "GT": ">",
+    "LT": "<",
+    "EQ": "="}
+
+
+class MetricAlarm():
+    """Alarms Functionality Handler -- Carries out alarming requests and responses via BOTO.Cloudwatch """
+
+    def __init__(self):
+        self.alarm_resp = dict()
+        self.del_resp = dict()
+
+    def config_alarm(self, cloudwatch_conn, create_info):
+        """Configure or Create a new alarm"""
+        inner_dict = dict()
+        """ Alarm Name to ID Mapping """
+        alarm_info = create_info['alarm_create_request']
+        alarm_id = alarm_info['alarm_name'] + "_" + alarm_info['resource_uuid']
+        if self.is_present(cloudwatch_conn, alarm_id)['status'] == True:
+            alarm_id = None
+            log.debug("Alarm already exists, Try updating the alarm using 'update_alarm_configuration()'")
+            return alarm_id
+        else:
+            try:
+                if alarm_info['statistic'] in STATISTICS:
+                    if alarm_info['operation'] in OPERATIONS:
+                        alarm = boto.ec2.cloudwatch.alarm.MetricAlarm(
+                            connection=cloudwatch_conn,
+                            name=alarm_info['alarm_name'] + "_" + alarm_info['resource_uuid'],
+                            metric=alarm_info['metric_name'],
+                            namespace="AWS/EC2",
+                            statistic=STATISTICS[alarm_info['statistic']],
+                            comparison=OPERATIONS[alarm_info['operation']],
+                            threshold=alarm_info['threshold_value'],
+                            period=60,
+                            evaluation_periods=1,
+                            unit=alarm_info['unit'],
+                            description=alarm_info['severity'] + ";" + alarm_id + ";" + alarm_info['description'],
+                            dimensions={'InstanceId': alarm_info['resource_uuid']},
+                            alarm_actions=None,
+                            ok_actions=None,
+                            insufficient_data_actions=None)
+
+                        """Setting Alarm Actions : 
+                        alarm_actions = ['arn:aws:swf:us-west-2:465479087178:action/actions/AWS_EC2.InstanceId.Stop/1.0']"""
+
+                        status = cloudwatch_conn.put_metric_alarm(alarm)
+
+                        log.debug("Alarm Configured Succesfully")
+                        self.alarm_resp['schema_version'] = str(create_info['schema_version'])
+                        self.alarm_resp['schema_type'] = 'create_alarm_response'
+
+                        inner_dict['correlation_id'] = str(alarm_info['correlation_id'])
+                        inner_dict['alarm_uuid'] = str(alarm_id)
+                        inner_dict['status'] = status
+
+                        self.alarm_resp['alarm_create_response'] = inner_dict
+
+                        if status == True:
+                            return self.alarm_resp
+                        else:
+                            return None
+                    else:
+                        log.error("Operation not supported")
+                        return None
+                else:
+                    log.error("Statistic not supported")
+                    return None
+            except Exception as e:
+                log.error("Alarm Configuration Failed: " + str(e))
+
+    def update_alarm(self, cloudwatch_conn, update_info):
+
+        """Update or reconfigure an alarm"""
+        inner_dict = dict()
+        alarm_info = update_info['alarm_update_request']
+
+        """Alarm Name to ID Mapping"""
+        alarm_id = alarm_info['alarm_uuid']
+        status = self.is_present(cloudwatch_conn, alarm_id)
+
+        """Verifying : Alarm exists already"""
+        if status['status'] == False:
+            alarm_id = None
+            log.debug("Alarm not found, Try creating the alarm using 'configure_alarm()'")
+            return alarm_id
+        else:
+            try:
+                if alarm_info['statistic'] in STATISTICS:
+                    if alarm_info['operation'] in OPERATIONS:
+                        alarm = boto.ec2.cloudwatch.alarm.MetricAlarm(
+                            connection=cloudwatch_conn,
+                            name=status['info'].name,
+                            metric=alarm_info['metric_name'],
+                            namespace="AWS/EC2",
+                            statistic=STATISTICS[alarm_info['statistic']],
+                            comparison=OPERATIONS[alarm_info['operation']],
+                            threshold=alarm_info['threshold_value'],
+                            period=60,
+                            evaluation_periods=1,
+                            unit=alarm_info['unit'],
+                            description=alarm_info['severity'] + ";" + alarm_id + ";" + alarm_info['description'],
+                            dimensions={'InstanceId': str(status['info'].dimensions['InstanceId']).split("'")[1]},
+                            alarm_actions=None,
+                            ok_actions=None,
+                            insufficient_data_actions=None)
+
+                        """Setting Alarm Actions : 
+                        alarm_actions = ['arn:aws:swf:us-west-2:465479087178:action/actions/AWS_EC2.InstanceId.Stop/1.0']"""
+
+                        status = cloudwatch_conn.put_metric_alarm(alarm)
+                        log.debug("Alarm %s Updated ", alarm.name)
+                        self.alarm_resp['schema_version'] = str(update_info['schema_version'])
+                        self.alarm_resp['schema_type'] = 'update_alarm_response'
+
+                        inner_dict['correlation_id'] = str(alarm_info['correlation_id'])
+                        inner_dict['alarm_uuid'] = str(alarm_id)
+                        inner_dict['status'] = status
+
+                        self.alarm_resp['alarm_update_response'] = inner_dict
+                        return self.alarm_resp
+                    else:
+                        log.error("Operation not supported")
+                        return None
+                else:
+                    log.error("Statistic not supported")
+                    return None
+            except Exception as e:
+                log.error("Error in Updating Alarm " + str(e))
+
+    def delete_Alarm(self, cloudwatch_conn, del_info_all):
+
+        """Deletes an Alarm with specified alarm_id"""
+        inner_dict = dict()
+        del_info = del_info_all['alarm_delete_request']
+        status = self.is_present(cloudwatch_conn, del_info['alarm_uuid'])
+        try:
+            if status['status'] == True:
+                del_status = cloudwatch_conn.delete_alarms(status['info'].name)
+                self.del_resp['schema_version'] = str(del_info_all['schema_version'])
+                self.del_resp['schema_type'] = 'delete_alarm_response'
+                inner_dict['correlation_id'] = str(del_info['correlation_id'])
+                inner_dict['alarm_id'] = str(del_info['alarm_uuid'])
+                inner_dict['status'] = del_status
+                self.del_resp['alarm_deletion_response'] = inner_dict
+                return self.del_resp
+            return None
+        except Exception as e:
+            log.error("Alarm Not Deleted: " + str(e))
+
+    def alarms_list(self, cloudwatch_conn, list_info):
+
+        """Get a list of alarms that are present on a particular VIM type"""
+        alarm_list = []
+        alarm_info = dict()
+        inner_dict = list_info['alarm_list_request']
+        try:  # id vim 
+            alarms = cloudwatch_conn.describe_alarms()
+            itr = 0
+            for alarm in alarms:
+                list_info['alarm_list_request']['alarm_uuid'] = str(alarm.description).split(';')[1]
+
+                # Severity = alarm_name = resource_uuid = ""
+                if inner_dict['severity'] == "" and inner_dict['alarm_name'] == "" and inner_dict[
+                    'resource_uuid'] == "":
+                    alarm_list.insert(itr, self.alarm_details(cloudwatch_conn, list_info))
+                    itr += 1
+                # alarm_name = resource_uuid = ""
+                if inner_dict['severity'] == str(alarm.description).split(';')[0] and inner_dict['alarm_name'] == "" and \
+                        inner_dict['resource_uuid'] == "":
+                    alarm_list.insert(itr, self.alarm_details(cloudwatch_conn, list_info))
+                    itr += 1
+                # severity = resource_uuid = ""
+                if inner_dict['severity'] == "" and inner_dict['alarm_name'] in alarm.name and inner_dict[
+                    'resource_uuid'] == "":
+                    alarm_list.insert(itr, self.alarm_details(cloudwatch_conn, list_info))
+                    itr += 1
+                # severity = alarm_name = ""    
+                if inner_dict['severity'] == "" and inner_dict['alarm_name'] == "" and inner_dict['resource_uuid'] == \
+                        str(alarm.dimensions['InstanceId']).split("'")[1]:
+                    alarm_list.insert(itr, self.alarm_details(cloudwatch_conn, list_info))
+                    itr += 1
+                # resource_uuid = ""    
+                if inner_dict['severity'] == str(alarm.description).split(';')[0] and inner_dict[
+                    'alarm_name'] in alarm.name and inner_dict['resource_uuid'] == "":
+                    alarm_list.insert(itr, self.alarm_details(cloudwatch_conn, list_info))
+                    itr += 1
+                # alarm_name = ""    
+                if inner_dict['severity'] == str(alarm.description).split(';')[0] and inner_dict['alarm_name'] == "" and \
+                        inner_dict['resource_uuid'] == str(alarm.dimensions['InstanceId']).split("'")[1]:
+                    alarm_list.insert(itr, self.alarm_details(cloudwatch_conn, list_info))
+                    itr += 1
+                # severity = ""    
+                if inner_dict['severity'] == "" and inner_dict['alarm_name'] in alarm.name and inner_dict[
+                    'resource_uuid'] == str(alarm.dimensions['InstanceId']).split("'")[1]:
+                    alarm_list.insert(itr, self.alarm_details(cloudwatch_conn, list_info))
+                    itr += 1
+                    # Everything provided    
+                if inner_dict['severity'] == str(alarm.description).split(';')[0] and inner_dict[
+                    'alarm_name'] in alarm.name and inner_dict['resource_uuid'] == \
+                        str(alarm.dimensions['InstanceId']).split("'")[1]:
+                    alarm_list.insert(itr, self.alarm_details(cloudwatch_conn, list_info))
+                    itr += 1
+
+            alarm_info['schema_version'] = str(list_info['schema_version'])
+            alarm_info['schema_type'] = 'list_alarm_response'
+            alarm_info['list_alarm_response'] = alarm_list
+
+            return alarm_info
+        except Exception as e:
+            log.error("Error in Getting List : %s", str(e))
+
+    def alarm_details(self, cloudwatch_conn, ack_info):
+
+        """Get an individual alarm details specified by alarm_name"""
+        try:
+            alarms_details = cloudwatch_conn.describe_alarm_history()
+            alarm_details_all = dict()
+            alarm_details_dict = dict()
+            ack_info_all = ack_info
+
+            if 'ack_details' in ack_info:
+                ack_info = ack_info['ack_details']
+            elif 'alarm_list_request' in ack_info:
+                ack_info = ack_info['alarm_list_request']
+
+            is_present = self.is_present(cloudwatch_conn, ack_info['alarm_uuid'])
+
+            for itr in range(len(alarms_details)):
+                if alarms_details[itr].name == is_present['info'].name:  # name, timestamp, summary
+                    if 'created' in alarms_details[itr].summary:
+                        alarm_details_dict['status'] = "New"
+                    elif 'updated' in alarms_details[itr].summary:
+                        alarm_details_dict['status'] = "Update"
+                    elif 'deleted' in alarms_details[itr].summary:
+                        alarm_details_dict['status'] = "Canceled"
+
+                    status = alarms_details[itr].summary.split()
+                    alarms = cloudwatch_conn.describe_alarms()
+                    for alarm in alarms:
+                        if str(alarm.description).split(';')[1] == ack_info['alarm_uuid']:
+                            alarm_details_dict['alarm_uuid'] = str(ack_info['alarm_uuid'])
+                            alarm_details_dict['resource_uuid'] = str(alarm.dimensions['InstanceId']).split("'")[1]
+                            alarm_details_dict['description'] = str(alarm.description).split(';')[1]
+                            alarm_details_dict['severity'] = str(alarm.description).split(';')[0]
+                            alarm_details_dict['start_date_time'] = str(alarms_details[itr].timestamp)
+                            alarm_details_dict['vim_type'] = str(ack_info_all['vim_type'])
+                            # TODO : tenant id
+                            if 'ack_details' in ack_info_all:
+                                alarm_details_all['schema_version'] = str(ack_info_all['schema_version'])
+                                alarm_details_all['schema_type'] = 'notify_alarm'
+                                alarm_details_all['notify_details'] = alarm_details_dict
+                                return alarm_details_all
+
+                            elif 'alarm_list_request' in ack_info_all:
+                                return alarm_details_dict
+
+        except Exception as e:
+            log.error("Error getting alarm details: %s", str(e))
+
+    def is_present(self, cloudwatch_conn, alarm_id):
+        """Finding alarm from already configured alarms"""
+        alarm_info = dict()
+        try:
+            alarms = cloudwatch_conn.describe_alarms()
+            for alarm in alarms:
+                if str(alarm.description).split(';')[1] == alarm_id:
+                    alarm_info['status'] = True
+                    alarm_info['info'] = alarm
+                    return alarm_info
+            alarm_info['status'] = False
+            return alarm_info
+        except Exception as e:
+            log.error("Error Finding Alarm", str(e))
diff --git a/osm_mon/plugins/CloudWatch/metrics.py b/osm_mon/plugins/CloudWatch/metrics.py
new file mode 100644 (file)
index 0000000..1812002
--- /dev/null
@@ -0,0 +1,253 @@
+##
+# Copyright 2017 xFlow Research Pvt. Ltd
+# This file is part of MON module
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+#
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact with: wajeeha.hamid@xflowresearch.com
+##
+
+"""
+AWS-Plugin implements all the methods of MON to interact with AWS using the BOTO client
+"""
+
+__author__ = "Wajeeha Hamid"
+__date__ = "18-Sept-2017"
+
+import datetime
+import logging
+
+try:
+    import boto
+    import boto.ec2
+    import boto.vpc
+    import boto.ec2.cloudwatch
+    import boto.ec2.connection
+except:
+    exit("Boto not avialable. Try activating your virtualenv OR `pip install boto`")
+
+log = logging.getLogger(__name__)
+
+
+class Metrics:
+
+    def createMetrics(self, cloudwatch_conn, metric_info):
+        try:
+
+            """createMetrics will be returning the metric_uuid=0 and
+             status=True when the metric is supported by AWS"""
+
+            supported = self.check_metric(metric_info['metric_name'])
+            metric_resp = dict()
+            metric_resp['resource_uuid'] = metric_info['resource_uuid']
+
+            if supported['status']:
+                metric_resp['status'] = True
+                metric_resp['metric_uuid'] = 0
+                log.debug("Metrics Configured Successfully : %s", metric_resp)
+            else:
+                metric_resp['status'] = False
+                metric_resp['metric_uuid'] = None
+                log.error("Metric name is not supported")
+
+            return metric_resp
+
+        except Exception as e:
+            log.error("Metric Configuration Failed: " + str(e))
+
+    def metricsData(self, cloudwatch_conn, data_info):
+
+        """Getting Metrics Stats for an Hour.The datapoints are
+        received after every one minute.
+        Time interval can be modified using Timedelta value"""
+
+        try:
+            metric_info = dict()
+            metric_info_dict = dict()
+            timestamp_arr = {}
+            value_arr = {}
+
+            supported = self.check_metric(data_info['metric_name'])
+
+            if supported['status']:
+                if int(data_info['collection_period']) % 60 == 0:
+                    metric_stats = cloudwatch_conn.get_metric_statistics(60,
+                                                                         datetime.datetime.utcnow() - datetime.timedelta(
+                                                                             seconds=int(
+                                                                                 data_info['collection_period'])),
+                                                                         datetime.datetime.utcnow(),
+                                                                         supported['metric_name'], 'AWS/EC2', 'Maximum',
+                                                                         dimensions={
+                                                                             'InstanceId': data_info['resource_uuid']},
+                                                                         unit='Percent')
+                    index = 0
+                    for itr in range(len(metric_stats)):
+                        timestamp_arr[index] = str(metric_stats[itr]['Timestamp'])
+                        value_arr[index] = metric_stats[itr]['Maximum']
+                        index += 1
+                    metric_info_dict['time_series'] = timestamp_arr
+                    metric_info_dict['metrics_series'] = value_arr
+                    log.debug("Metrics Data : %s", metric_info_dict)
+                    return metric_info_dict
+                else:
+                    log.error("Collection Period should be a multiple of 60")
+                    return False
+
+            else:
+                log.error("Metric name is not supported")
+                return False
+
+        except Exception as e:
+            log.error("Error returning Metrics Data" + str(e))
+
+    def updateMetrics(self, cloudwatch_conn, metric_info):
+
+        """updateMetrics will be returning the metric_uuid=0 and
+         status=True when the metric is supported by AWS"""
+        try:
+            supported = self.check_metric(metric_info['metric_name'])
+            update_resp = dict()
+            update_resp['resource_uuid'] = metric_info['resource_uuid']
+            if supported['status']:
+                update_resp['status'] = True
+                update_resp['metric_uuid'] = 0
+                log.debug("Metric Updated : %s", update_resp)
+            else:
+                update_resp['status'] = False
+                update_resp['metric_uuid'] = None
+                log.error("Metric name is not supported")
+
+            return update_resp
+
+        except Exception as e:
+            log.error("Error in Update Metrics" + str(e))
+
+    def deleteMetrics(self, cloudwatch_conn, del_info):
+
+        """ " Not supported in AWS"
+        Returning the required parameters with status = False"""
+        try:
+            supported = self.check_metric(del_info['metric_name'])
+            metric_resp = dict()
+            del_resp = dict()
+            if supported['status']:
+                del_resp['schema_version'] = del_info['schema_version']
+                del_resp['schema_type'] = "delete_metric_response"
+                del_resp['metric_name'] = del_info['metric_name']
+                del_resp['metric_uuid'] = del_info['metric_uuid']
+                del_resp['resource_uuid'] = del_info['resource_uuid']
+                # TODO : yet to finalize
+                del_resp['tenant_uuid'] = del_info['tenant_uuid']
+                del_resp['correlation_id'] = del_info['correlation_uuid']
+                del_resp['status'] = False
+                log.info("Metric Deletion Not supported in AWS : %s", del_resp)
+                return del_resp
+            else:
+                log.error("Metric name is not supported")
+                return False
+
+        except Exception as e:
+            log.error(" Metric Deletion Not supported in AWS : " + str(e))
+
+    def listMetrics(self, cloudwatch_conn, list_info):
+
+        """Returns the list of available AWS/EC2 metrics on which
+        alarms have been configured and the metrics are being monitored"""
+        try:
+            supported = self.check_metric(list_info['metric_name'])
+            if supported['status']:
+                metrics_list = []
+                metrics_data = dict()
+
+                # To get the list of associated metrics with the alarms
+                alarms = cloudwatch_conn.describe_alarms()
+                itr = 0
+                if list_info['metric_name'] == "":
+                    for alarm in alarms:
+                        metrics_info = dict()
+                        instance_id = str(alarm.dimensions['InstanceId']).split("'")[1]
+                        metrics_info['metric_name'] = str(alarm.metric)
+                        metrics_info['metric_uuid'] = 0
+                        metrics_info['metric_unit'] = str(alarm.unit)
+                        metrics_info['resource_uuid'] = instance_id
+                        metrics_list.insert(itr, metrics_info)
+                        itr += 1
+                    log.info(metrics_list)
+                    return metrics_list
+                else:
+                    for alarm in alarms:
+                        metrics_info = dict()
+                        if alarm.metric == supported['metric_name']:
+                            instance_id = str(alarm.dimensions['InstanceId']).split("'")[1]
+                            metrics_info['metric_name'] = str(alarm.metric)
+                            metrics_info['metric_uuid'] = 0
+                            metrics_info['metric_unit'] = str(alarm.unit)
+                            metrics_info['resource_uuid'] = instance_id
+                            metrics_list.insert(itr, metrics_info)
+                            itr += 1
+                    return metrics_list
+            else:
+                log.error("Metric name is not supported")
+                return False
+
+        except Exception as e:
+            log.error("Error in Getting Metric List " + str(e))
+
+    def check_metric(self, metric_name):
+
+        """ Checking whether the metric is supported by AWS """
+        try:
+            check_resp = dict()
+            # metric_name
+            if metric_name == 'CPU_UTILIZATION':
+                metric_name = 'CPUUtilization'
+                metric_status = True
+            elif metric_name == 'DISK_READ_OPS':
+                metric_name = 'DiskReadOps'
+                metric_status = True
+            elif metric_name == 'DISK_WRITE_OPS':
+                metric_name = 'DiskWriteOps'
+                metric_status = True
+            elif metric_name == 'DISK_READ_BYTES':
+                metric_name = 'DiskReadBytes'
+                metric_status = True
+            elif metric_name == 'DISK_WRITE_BYTES':
+                metric_name = 'DiskWriteBytes'
+                metric_status = True
+            elif metric_name == 'PACKETS_RECEIVED':
+                metric_name = 'NetworkPacketsIn'
+                metric_status = True
+            elif metric_name == 'PACKETS_SENT':
+                metric_name = 'NetworkPacketsOut'
+                metric_status = True
+            elif metric_name == "":
+                metric_name = None
+                metric_status = True
+                log.info("Metric Not Supported by AWS plugin ")
+            else:
+                metric_name = None
+                metric_status = False
+                log.info("Metric Not Supported by AWS plugin ")
+            check_resp['metric_name'] = metric_name
+            # status
+            if metric_status:
+                check_resp['status'] = True
+            else:
+                check_resp['status'] = False
+
+            return check_resp
+
+        except Exception as e:
+            log.error("Error in Plugin Inputs %s", str(e))
diff --git a/osm_mon/plugins/CloudWatch/plugin_alarm.py b/osm_mon/plugins/CloudWatch/plugin_alarm.py
new file mode 100644 (file)
index 0000000..c125bab
--- /dev/null
@@ -0,0 +1,215 @@
+##
+# Copyright 2017 xFlow Research Pvt. Ltd
+# This file is part of MON module
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+#
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact with: wajeeha.hamid@xflowresearch.com
+##
+
+"""
+AWS-Plugin implements all the methods of MON to interact with AWS using the BOTO client
+"""
+from io import UnsupportedOperation
+
+from osm_mon.core.settings import Config
+from osm_mon.plugins.CloudWatch.metric_alarms import MetricAlarm
+from osm_mon.plugins.CloudWatch.metrics import Metrics
+
+__author__ = "Wajeeha Hamid"
+__date__ = "18-September-2017"
+
+import logging
+
+log = logging.getLogger(__name__)
+
+
+class plugin_alarms:
+    """Receives Alarm info from MetricAlarm and connects with the consumer/producer"""
+
+    def __init__(self):
+        self._cfg = Config.instance()
+        self.metricAlarm = MetricAlarm()
+        self.metric = Metrics()
+
+    def configure_alarm(self, alarm_info):
+        alarm_id = self.metricAlarm.config_alarm(self.cloudwatch_conn, alarm_info)
+        return alarm_id
+
+    def update_alarm_configuration(self, test):
+        alarm_id = self.metricAlarm.update_alarm(self.cloudwatch_conn, test)
+        return alarm_id
+
+    def delete_alarm(self, alarm_id):
+        return self.metricAlarm.delete_Alarm(self.cloudwatch_conn, alarm_id)
+
+    def get_alarms_list(self, instance_id):
+        return self.metricAlarm.alarms_list(self.cloudwatch_conn, instance_id)
+
+    def get_ack_details(self, ack_info):
+        return self.metricAlarm.alarm_details(self.cloudwatch_conn, ack_info)
+
+    def get_metrics_data(self, metric_name, period, instance_id):
+        # TODO: Investigate and fix this call
+        return self.metric.metricsData(self.cloudwatch_conn, metric_name, period, instance_id)
+
+    def alarm_calls(self, key: str, alarm_info: dict, aws_conn: dict):
+        """Gets the message from the common consumer"""
+        try:
+            self.cloudwatch_conn = aws_conn['cloudwatch_connection']
+            self.ec2_conn = aws_conn['ec2_connection']
+
+            if key == "create_alarm_request":
+                alarm_inner_dict = alarm_info['alarm_create_request']
+                metric_status = self.check_metric(alarm_inner_dict['metric_name'])
+
+                if self.check_resource(alarm_inner_dict['resource_uuid']) and metric_status['status']:
+                    log.debug("Resource and Metrics exists")
+
+                    alarm_info['alarm_create_request']['metric_name'] = metric_status['metric_name']
+                    # Generate a valid response message, send via producer
+                    config_resp = self.configure_alarm(alarm_info)  # alarm_info = message.value
+
+                    if config_resp is None:
+                        log.debug("Alarm Already exists")
+                        # TODO: This should return a response with status False
+                        return config_resp
+
+                    else:
+                        log.info("New alarm created with alarm info: %s", config_resp)
+                        return config_resp
+
+                else:
+                    log.error("Resource ID doesn't exists")
+
+            elif key == "acknowledge_alarm":
+                alarm_inner_dict = alarm_info['ack_details']
+
+                if self.check_resource(alarm_inner_dict['resource_uuid']):
+                    ack_details = self.get_ack_details(alarm_info)
+                    log.info("Acknowledge sent: %s", ack_details)
+                    return ack_details
+
+                else:
+                    log.error("Resource ID is Incorrect")
+
+            elif key == "update_alarm_request":
+                alarm_inner_dict = alarm_info['alarm_update_request']
+                metric_status = self.check_metric(alarm_inner_dict['metric_name'])
+
+                if metric_status['status']:
+                    log.debug("Resource and Metrics exists")
+                    alarm_info['alarm_update_request']['metric_name'] = metric_status['metric_name']
+                    # Generate a valid response message, send via producer
+                    update_resp = self.update_alarm_configuration(alarm_info)
+
+                    if update_resp is None:
+                        # TODO: This should return a response with status False
+                        log.debug("Alarm Already exists")
+                        return update_resp
+
+                    else:
+                        log.info("Alarm Updated with alarm info: %s", update_resp)
+                        return update_resp
+
+                else:
+                    log.info("Metric Not Supported")
+
+            elif key == "delete_alarm_request":
+                # Generate a valid response message, send via producer
+                del_resp = self.delete_alarm(alarm_info)
+                log.info("Alarm Deleted with alarm info: %s", del_resp)
+                return del_resp
+
+            elif key == "alarm_list_request":
+                alarm_inner_dict = alarm_info['alarm_list_request']
+
+                if self.check_resource(alarm_inner_dict['resource_uuid']) or alarm_inner_dict['resource_uuid'] == "":
+                    # Generate a valid response message, send via producer
+                    list_resp = self.get_alarms_list(alarm_info)  # ['alarm_names']
+                    return list_resp
+                else:
+                    log.error("Resource ID is Incorrect")
+
+            else:
+                raise UnsupportedOperation("Unknown key, no action will be performed")
+
+        except Exception as e:
+            log.error("Message retrieval exception: %s", str(e))
+
+    def check_resource(self, resource_uuid):
+        """Finding Resource with the resource_uuid"""
+        try:
+            check_resp = dict()
+            instances = self.ec2_conn.get_all_instance_status()
+
+            # resource_id
+            for instance_id in instances:
+                instance_id = str(instance_id).split(':')[1]
+
+                if instance_id == resource_uuid:
+                    check_resp['resource_uuid'] = resource_uuid
+                    return True
+            return False
+
+        except Exception as e:
+            log.error("Error in Plugin Inputs %s", str(e))
+
+    def check_metric(self, metric_name):
+        """ Checking whether the metric is supported by AWS """
+        try:
+            check_resp = dict()
+
+            # metric_name
+            if metric_name == 'CPU_UTILIZATION':
+                metric_name = 'CPUUtilization'
+                metric_status = True
+
+            elif metric_name == 'DISK_READ_OPS':
+                metric_name = 'DiskReadOps'
+                metric_status = True
+
+            elif metric_name == 'DISK_WRITE_OPS':
+                metric_name = 'DiskWriteOps'
+                metric_status = True
+
+            elif metric_name == 'DISK_READ_BYTES':
+                metric_name = 'DiskReadBytes'
+                metric_status = True
+
+            elif metric_name == 'DISK_WRITE_BYTES':
+                metric_name = 'DiskWriteBytes'
+                metric_status = True
+
+            elif metric_name == 'PACKETS_RECEIVED':
+                metric_name = 'NetworkPacketsIn'
+                metric_status = True
+
+            elif metric_name == 'PACKETS_SENT':
+                metric_name = 'NetworkPacketsOut'
+                metric_status = True
+
+            else:
+                metric_name = None
+                metric_status = False
+            check_resp['metric_name'] = metric_name
+            # status
+
+            if metric_status:
+                check_resp['status'] = True
+                return check_resp
+
+        except Exception as e:
+            log.error("Error in Plugin Inputs %s", str(e))
diff --git a/osm_mon/plugins/CloudWatch/plugin_metric.py b/osm_mon/plugins/CloudWatch/plugin_metric.py
new file mode 100644 (file)
index 0000000..d31b608
--- /dev/null
@@ -0,0 +1,159 @@
+##
+# Copyright 2017 xFlow Research Pvt. Ltd
+# This file is part of MON module
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+#
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact with: wajeeha.hamid@xflowresearch.com
+##
+
+"""
+AWS-Plugin implements all the methods of MON to interact with AWS using the BOTO client
+"""
+from io import UnsupportedOperation
+
+from osm_mon.core.settings import Config
+from osm_mon.plugins.CloudWatch.metrics import Metrics
+
+__author__ = "Wajeeha Hamid"
+__date__ = "18-September-2017"
+
+import logging
+
+log = logging.getLogger(__name__)
+
+
+class plugin_metrics:
+    """Receives Alarm info from MetricAlarm and connects with the consumer/producer """
+
+    def __init__(self):
+        self._cfg = Config.instance()
+        self.metric = Metrics()
+
+    def create_metric_request(self, metric_info):
+        """Compatible API using normalized parameters"""
+        metric_resp = self.metric.createMetrics(self.cloudwatch_conn, metric_info)
+        return metric_resp
+
+    def update_metric_request(self, updated_info):
+        """Compatible API using normalized parameters"""
+        update_resp = self.metric.updateMetrics(self.cloudwatch_conn, updated_info)
+        return update_resp
+
+    def delete_metric_request(self, delete_info):
+        """Compatible API using normalized parameters"""
+        del_resp = self.metric.deleteMetrics(self.cloudwatch_conn, delete_info)
+        return del_resp
+
+    def list_metrics_request(self, list_info):
+        """Compatible API using normalized parameters"""
+        list_resp = self.metric.listMetrics(self.cloudwatch_conn, list_info)
+        return list_resp
+
+    def read_metrics_data(self, list_info):
+        """Compatible API using normalized parameters
+        Read all metric data related to a specified metric"""
+        data_resp = self.metric.metricsData(self.cloudwatch_conn, list_info)
+        return data_resp
+
+    def metric_calls(self, key: str, metric_info: dict, aws_conn: dict):
+        """Gets the message from the common consumer"""
+
+        try:
+            self.cloudwatch_conn = aws_conn['cloudwatch_connection']
+            self.ec2_conn = aws_conn['ec2_connection']
+
+            metric_response = dict()
+
+            log.debug("VIM support : AWS")
+
+            if key == "create_metric_request":
+                if self.check_resource(metric_info['metric_create_request']['resource_uuid']):
+                    metric_resp = self.create_metric_request(
+                        metric_info['metric_create_request'])  # alarm_info = message.value
+                    metric_response['schema_version'] = metric_info['schema_version']
+                    metric_response['schema_type'] = "create_metric_response"
+                    metric_response['metric_create_response'] = metric_resp
+                    log.info("Metric configured: %s", metric_resp)
+                    return metric_response
+
+            elif key == "update_metric_request":
+                if self.check_resource(metric_info['metric_create_request']['resource_uuid']):
+                    update_resp = self.update_metric_request(metric_info['metric_create_request'])
+                    metric_response['schema_version'] = metric_info['schema_version']
+                    metric_response['schema_type'] = "update_metric_response"
+                    metric_response['metric_update_response'] = update_resp
+                    log.info("Metric Updates: %s", metric_response)
+                    return metric_response
+
+            elif key == "delete_metric_request":
+                if self.check_resource(metric_info['resource_uuid']):
+                    del_resp = self.delete_metric_request(metric_info)
+                    log.info("Metric Deletion Not supported in AWS : %s", del_resp)
+                    return del_resp
+
+            elif key == "list_metric_request":
+                if self.check_resource(metric_info['metrics_list_request']['resource_uuid']):
+                    list_resp = self.list_metrics_request(metric_info['metrics_list_request'])
+                    metric_response['schema_version'] = metric_info['schema_version']
+                    metric_response['schema_type'] = "list_metric_response"
+                    metric_response['correlation_id'] = metric_info['metrics_list_request']['correlation_id']
+                    metric_response['vim_type'] = metric_info['vim_type']
+                    metric_response['metrics_list'] = list_resp
+                    log.info("Metric List: %s", metric_response)
+                    return metric_response
+
+            elif key == "read_metric_data_request":
+                if self.check_resource(metric_info['resource_uuid']):
+                    data_resp = self.read_metrics_data(metric_info)
+                    metric_response['schema_version'] = metric_info['schema_version']
+                    metric_response['schema_type'] = "read_metric_data_response"
+                    metric_response['metric_name'] = metric_info['metric_name']
+                    metric_response['metric_uuid'] = metric_info['metric_uuid']
+                    metric_response['correlation_id'] = metric_info['correlation_uuid']
+                    metric_response['resource_uuid'] = metric_info['resource_uuid']
+                    metric_response['tenant_uuid'] = metric_info['tenant_uuid']
+                    metric_response['metrics_data'] = data_resp
+                    log.info("Metric Data Response: %s", metric_response)
+                    return metric_response
+
+            else:
+                raise UnsupportedOperation("Unknown key, no action will be performed")
+
+        except Exception as e:
+            log.error("Consumer exception: %s", str(e))
+
+    def check_resource(self, resource_uuid):
+
+        """Checking the resource_uuid is present in EC2 instances"""
+        try:
+            check_resp = dict()
+            instances = self.ec2_conn.get_all_instance_status()
+            status_resource = False
+
+            # resource_id
+            for instance_id in instances:
+                instance_id = str(instance_id).split(':')[1]
+                if instance_id == resource_uuid:
+                    check_resp['resource_uuid'] = resource_uuid
+                    status_resource = True
+                else:
+                    status_resource = False
+
+            # status
+            return status_resource
+
+        except Exception as e:
+            log.error("Error in Plugin Inputs %s", str(e))
diff --git a/osm_mon/plugins/OpenStack/Aodh/__init__.py b/osm_mon/plugins/OpenStack/Aodh/__init__.py
new file mode 100644 (file)
index 0000000..d854d63
--- /dev/null
@@ -0,0 +1,22 @@
+# Copyright 2017 Intel Research and Development Ireland Limited
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+##
+"""Aodh plugin for OSM MON."""
diff --git a/osm_mon/plugins/OpenStack/Aodh/alarm_handler.py b/osm_mon/plugins/OpenStack/Aodh/alarm_handler.py
new file mode 100644 (file)
index 0000000..7e6347f
--- /dev/null
@@ -0,0 +1,390 @@
+# Copyright 2017 Intel Research and Development Ireland Limited
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+##
+"""Carry out alarming requests via Aodh API."""
+
+import json
+import logging
+from io import UnsupportedOperation
+
+import six
+
+from osm_mon.core.auth import AuthManager
+from osm_mon.core.database import DatabaseManager
+from osm_mon.core.settings import Config
+from osm_mon.plugins.OpenStack.Gnocchi.metric_handler import METRIC_MAPPINGS
+from osm_mon.plugins.OpenStack.common import Common
+from osm_mon.plugins.OpenStack.response import OpenStackResponseBuilder
+
+log = logging.getLogger(__name__)
+
+SEVERITIES = {
+    "warning": "low",
+    "minor": "low",
+    "major": "moderate",
+    "critical": "critical",
+    "indeterminate": "critical"}
+
+STATISTICS = {
+    "average": "mean",
+    "minimum": "min",
+    "maximum": "max",
+    "count": "count",
+    "sum": "sum"}
+
+
+class OpenstackAlarmHandler(object):
+    """Carries out alarming requests and responses via Aodh API."""
+
+    def __init__(self):
+        """Create the OpenStack alarming instance."""
+        self._database_manager = DatabaseManager()
+        self._auth_manager = AuthManager()
+        self._cfg = Config.instance()
+
+        # Use the Response class to generate valid json response messages
+        self._response = OpenStackResponseBuilder()
+
+    def handle_message(self, key: str, values: dict, vim_uuid: str):
+        """
+        Processes alarm request message depending on it's key
+        :param key: Kafka message key
+        :param values: Dict containing alarm request data. Follows models defined in core.models.
+        :param vim_uuid: UUID of the VIM to handle the alarm request.
+        :return: Dict containing alarm response data. Follows models defined in core.models.
+        """
+
+        log.info("OpenStack alarm action required.")
+
+        verify_ssl = self._auth_manager.is_verify_ssl(vim_uuid)
+
+        auth_token = Common.get_auth_token(vim_uuid, verify_ssl=verify_ssl)
+
+        alarm_endpoint = Common.get_endpoint("alarming", vim_uuid, verify_ssl=verify_ssl)
+        metric_endpoint = Common.get_endpoint("metric", vim_uuid, verify_ssl=verify_ssl)
+
+        vim_account = self._auth_manager.get_credentials(vim_uuid)
+        vim_config = json.loads(vim_account.config)
+
+        if key == "create_alarm_request":
+            alarm_details = values['alarm_create_request']
+            alarm_id = None
+            status = False
+            try:
+                metric_name = alarm_details['metric_name'].lower()
+                resource_id = alarm_details['resource_uuid']
+
+                self.check_for_metric(auth_token, metric_endpoint, metric_name, resource_id, verify_ssl)
+
+                alarm_id = self.configure_alarm(
+                    alarm_endpoint, auth_token, alarm_details, vim_config, verify_ssl)
+
+                log.info("Alarm successfully created")
+                self._database_manager.save_alarm(alarm_id,
+                                                  vim_uuid,
+                                                  alarm_details['threshold_value'],
+                                                  alarm_details['operation'].lower(),
+                                                  alarm_details['metric_name'].lower(),
+                                                  alarm_details['vdu_name'].lower(),
+                                                  alarm_details['vnf_member_index'],
+                                                  alarm_details['ns_id'].lower()
+                                                  )
+                status = True
+            except Exception as e:
+                log.exception("Error creating alarm")
+                raise e
+            finally:
+                return self._response.generate_response('create_alarm_response',
+                                                        cor_id=alarm_details['correlation_id'],
+                                                        status=status,
+                                                        alarm_id=alarm_id)
+
+        elif key == "list_alarm_request":
+            list_details = values['alarm_list_request']
+            alarm_list = None
+            try:
+                alarm_list = self.list_alarms(
+                    alarm_endpoint, auth_token, list_details, verify_ssl)
+            except Exception as e:
+                log.exception("Error listing alarms")
+                raise e
+            finally:
+                return self._response.generate_response('list_alarm_response',
+                                                        cor_id=list_details['correlation_id'],
+                                                        alarm_list=alarm_list)
+
+        elif key == "delete_alarm_request":
+            request_details = values['alarm_delete_request']
+            alarm_id = request_details['alarm_uuid']
+            status = False
+            try:
+                self.delete_alarm(
+                    alarm_endpoint, auth_token, alarm_id, verify_ssl)
+                status = True
+            except Exception as e:
+                log.exception("Error deleting alarm")
+                raise e
+            finally:
+                return self._response.generate_response('delete_alarm_response',
+                                                        cor_id=request_details['correlation_id'],
+                                                        status=status,
+                                                        alarm_id=alarm_id)
+
+        elif key == "acknowledge_alarm_request":
+            try:
+                alarm_id = values['ack_details']['alarm_uuid']
+
+                self.update_alarm_state(
+                    alarm_endpoint, auth_token, alarm_id, verify_ssl)
+
+                log.info("Acknowledged the alarm and cleared it.")
+            except Exception as e:
+                log.exception("Error acknowledging alarm")
+                raise
+            finally:
+                return None
+
+        elif key == "update_alarm_request":
+            # Update alarm configurations
+            alarm_details = values['alarm_update_request']
+            alarm_id = None
+            status = False
+            try:
+                alarm_id = self.update_alarm(
+                    alarm_endpoint, auth_token, alarm_details, vim_config, verify_ssl)
+                status = True
+            except Exception as e:
+                log.exception("Error updating alarm")
+                raise e
+            finally:
+                return self._response.generate_response('update_alarm_response',
+                                                        cor_id=alarm_details['correlation_id'],
+                                                        status=status,
+                                                        alarm_id=alarm_id)
+
+        else:
+            raise UnsupportedOperation("Unknown key {}, no action will be performed.".format(key))
+
+    def configure_alarm(self, alarm_endpoint, auth_token, values, vim_config, verify_ssl):
+        """Create requested alarm in Aodh."""
+        url = "{}/v2/alarms/".format(alarm_endpoint)
+
+        # Check if the desired alarm is supported
+        alarm_name = values['alarm_name'].lower()
+        metric_name = values['metric_name'].lower()
+        resource_id = values['resource_uuid']
+
+        if metric_name not in METRIC_MAPPINGS.keys():
+            raise KeyError("Metric {} is not supported.".format(metric_name))
+
+        if 'granularity' in vim_config and 'granularity' not in values:
+            values['granularity'] = vim_config['granularity']
+        payload = self.check_payload(values, metric_name, resource_id,
+                                     alarm_name)
+        new_alarm = Common.perform_request(
+            url, auth_token, req_type="post", payload=payload, verify_ssl=verify_ssl)
+        return json.loads(new_alarm.text)['alarm_id']
+
+    def delete_alarm(self, endpoint, auth_token, alarm_id, verify_ssl):
+        """Delete alarm function."""
+        url = "{}/v2/alarms/%s".format(endpoint) % alarm_id
+
+        result = Common.perform_request(
+            url, auth_token, req_type="delete", verify_ssl=verify_ssl)
+        if str(result.status_code) == "404":
+            raise ValueError("Alarm {} doesn't exist".format(alarm_id))
+
+    def list_alarms(self, endpoint, auth_token, list_details, verify_ssl):
+        """Generate the requested list of alarms."""
+        url = "{}/v2/alarms/".format(endpoint)
+        a_list, name_list, sev_list, res_list = [], [], [], []
+
+        # TODO(mcgoughh): for now resource_id is a mandatory field
+        # Check for a resource id
+        try:
+            resource = list_details['resource_uuid']
+            name = list_details['alarm_name'].lower()
+            severity = list_details['severity'].lower()
+            sev = SEVERITIES[severity]
+        except KeyError as e:
+            log.warning("Missing parameter for alarm list request: %s", e)
+            raise e
+
+        # Perform the request to get the desired list
+        try:
+            result = Common.perform_request(
+                url, auth_token, req_type="get", verify_ssl=verify_ssl)
+
+            if result is not None:
+                # Get list based on resource id
+                for alarm in json.loads(result.text):
+                    rule = alarm['gnocchi_resources_threshold_rule']
+                    if resource == rule['resource_id']:
+                        res_list.append(alarm['alarm_id'])
+
+                # Generate specified listed if requested
+                if name is not None and sev is not None:
+                    log.info("Return a list of %s alarms with %s severity.",
+                             name, sev)
+                    for alarm in json.loads(result.text):
+                        if name == alarm['name']:
+                            name_list.append(alarm['alarm_id'])
+                    for alarm in json.loads(result.text):
+                        if sev == alarm['severity']:
+                            sev_list.append(alarm['alarm_id'])
+                    name_sev_list = list(set(name_list).intersection(sev_list))
+                    a_list = list(set(name_sev_list).intersection(res_list))
+                elif name is not None:
+                    log.info("Returning a %s list of alarms.", name)
+                    for alarm in json.loads(result.text):
+                        if name == alarm['name']:
+                            name_list.append(alarm['alarm_id'])
+                    a_list = list(set(name_list).intersection(res_list))
+                elif sev is not None:
+                    log.info("Returning %s severity alarm list.", sev)
+                    for alarm in json.loads(result.text):
+                        if sev == alarm['severity']:
+                            sev_list.append(alarm['alarm_id'])
+                    a_list = list(set(sev_list).intersection(res_list))
+                else:
+                    log.info("Returning an entire list of alarms.")
+                    a_list = res_list
+            else:
+                log.info("There are no alarms!")
+            response_list = []
+            for alarm in json.loads(result.text):
+                if alarm['alarm_id'] in a_list:
+                    response_list.append(alarm)
+            return response_list
+
+        except Exception as e:
+            log.exception("Failed to generate alarm list: ")
+            raise e
+
+    def update_alarm_state(self, endpoint, auth_token, alarm_id, verify_ssl):
+        """Set the state of an alarm to ok when ack message is received."""
+        url = "{}/v2/alarms/%s/state".format(endpoint) % alarm_id
+        payload = json.dumps("ok")
+
+        result = Common.perform_request(
+            url, auth_token, req_type="put", payload=payload, verify_ssl=verify_ssl)
+
+        return json.loads(result.text)
+
+    def update_alarm(self, endpoint, auth_token, values, vim_config, verify_ssl):
+        """Get alarm name for an alarm configuration update."""
+        # Get already existing alarm details
+        url = "{}/v2/alarms/%s".format(endpoint) % values['alarm_uuid']
+
+        # Gets current configurations about the alarm
+        result = Common.perform_request(
+            url, auth_token, req_type="get")
+        alarm_name = json.loads(result.text)['name']
+        rule = json.loads(result.text)['gnocchi_resources_threshold_rule']
+        alarm_state = json.loads(result.text)['state']
+        resource_id = rule['resource_id']
+        metric_name = [key for key, value in six.iteritems(METRIC_MAPPINGS) if value == rule['metric']][0]
+
+        # Generates and check payload configuration for alarm update
+        if 'granularity' in vim_config and 'granularity' not in values:
+            values['granularity'] = vim_config['granularity']
+        payload = self.check_payload(values, metric_name, resource_id,
+                                     alarm_name, alarm_state=alarm_state)
+
+        # Updates the alarm configurations with the valid payload
+        update_alarm = Common.perform_request(
+            url, auth_token, req_type="put", payload=payload, verify_ssl=verify_ssl)
+
+        return json.loads(update_alarm.text)['alarm_id']
+
+    def check_payload(self, values, metric_name, resource_id,
+                      alarm_name, alarm_state=None):
+        """Check that the payload is configuration for update/create alarm."""
+        cfg = Config.instance()
+        # Check state and severity
+
+        severity = 'critical'
+        if 'severity' in values:
+            severity = values['severity'].lower()
+
+        if severity == "indeterminate":
+            alarm_state = "insufficient data"
+        if alarm_state is None:
+            alarm_state = "ok"
+
+        statistic = values['statistic'].lower()
+
+        granularity = cfg.OS_DEFAULT_GRANULARITY
+        if 'granularity' in values:
+            granularity = values['granularity']
+
+        resource_type = 'generic'
+        if 'resource_type' in values:
+            resource_type = values['resource_type'].lower()
+
+        # Try to configure the payload for the update/create request
+        # Can only update: threshold, operation, statistic and
+        # the severity of the alarm
+        rule = {'threshold': values['threshold_value'],
+                'comparison_operator': values['operation'].lower(),
+                'metric': METRIC_MAPPINGS[metric_name],
+                'resource_id': resource_id,
+                'resource_type': resource_type,
+                'aggregation_method': STATISTICS[statistic],
+                'granularity': granularity, }
+        payload = json.dumps({'state': alarm_state,
+                              'name': alarm_name,
+                              'severity': SEVERITIES[severity],
+                              'type': 'gnocchi_resources_threshold',
+                              'gnocchi_resources_threshold_rule': rule,
+                              'alarm_actions': [cfg.OS_NOTIFIER_URI],
+                              'repeat_actions': True}, sort_keys=True)
+        return payload
+
+    def get_alarm_state(self, endpoint, auth_token, alarm_id):
+        """Get the state of the alarm."""
+        url = "{}/v2/alarms/%s/state".format(endpoint) % alarm_id
+
+        alarm_state = Common.perform_request(
+            url, auth_token, req_type="get")
+        return json.loads(alarm_state.text)
+
+    def check_for_metric(self, auth_token, metric_endpoint, metric_name, resource_id, verify_ssl):
+        """
+        Checks if resource has a specific metric. If not, throws exception.
+        :param verify_ssl: Boolean flag to set SSL cert validation
+        :param auth_token: OpenStack auth token
+        :param metric_endpoint: OpenStack metric endpoint
+        :param metric_name: Metric name
+        :param resource_id: Resource UUID
+        :return: Metric details from resource
+        :raise Exception: Could not retrieve metric from resource
+        """
+        try:
+            url = "{}/v1/resource/generic/{}".format(metric_endpoint, resource_id)
+            result = Common.perform_request(
+                url, auth_token, req_type="get", verify_ssl=verify_ssl)
+            resource = json.loads(result.text)
+            metrics_dict = resource['metrics']
+            return metrics_dict[METRIC_MAPPINGS[metric_name]]
+        except Exception as e:
+            log.exception("Desired Gnocchi metric not found:", e)
+            raise e
diff --git a/osm_mon/plugins/OpenStack/Aodh/notifier.py b/osm_mon/plugins/OpenStack/Aodh/notifier.py
new file mode 100644 (file)
index 0000000..698784a
--- /dev/null
@@ -0,0 +1,148 @@
+# Copyright 2017 Intel Research and Development Ireland Limited
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+##
+# __author__ = Helena McGough
+#
+"""A Webserver to send alarm notifications from Aodh to the SO."""
+import json
+import logging
+import os
+import re
+import sys
+import time
+
+from six.moves.BaseHTTPServer import BaseHTTPRequestHandler
+from six.moves.BaseHTTPServer import HTTPServer
+
+# Initialise a logger for alarm notifier
+from osm_mon.core.message_bus.producer import Producer
+from osm_mon.core.settings import Config
+
+cfg = Config.instance()
+
+logging.basicConfig(stream=sys.stdout,
+                    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
+                    datefmt='%m/%d/%Y %I:%M:%S %p',
+                    level=logging.getLevelName(cfg.OSMMON_LOG_LEVEL))
+log = logging.getLogger(__name__)
+
+kafka_logger = logging.getLogger('kafka')
+kafka_logger.setLevel(logging.getLevelName(cfg.OSMMON_KAFKA_LOG_LEVEL))
+kafka_formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
+kafka_handler = logging.StreamHandler(sys.stdout)
+kafka_handler.setFormatter(kafka_formatter)
+kafka_logger.addHandler(kafka_handler)
+
+sys.path.append(os.path.abspath(os.path.join(os.path.realpath(__file__), '..', '..', '..', '..', '..')))
+
+from osm_mon.core.database import DatabaseManager
+
+from osm_mon.plugins.OpenStack.response import OpenStackResponseBuilder
+
+
+class NotifierHandler(BaseHTTPRequestHandler):
+    """Handler class for alarm_actions triggered by OSM alarms."""
+
+    def _set_headers(self):
+        """Set the headers for a request."""
+        self.send_response(200)
+        self.send_header('Content-type', 'text/html')
+        self.end_headers()
+
+    def do_GET(self):
+        """Get request functionality."""
+        self._set_headers()
+
+    def do_POST(self):
+        """POST request function."""
+        # Gets header and data from the post request and records info
+        self._set_headers()
+        # Gets the size of data
+        content_length = int(self.headers['Content-Length'])
+        post_data = self.rfile.read(content_length)
+        # Python 2/3 string compatibility
+        try:
+            post_data = post_data.decode()
+        except AttributeError:
+            pass
+        log.info("This alarm was triggered: %s", post_data)
+
+        # Send alarm notification to message bus
+        try:
+            self.notify_alarm(json.loads(post_data))
+        except Exception:
+            log.exception("Error notifying alarm")
+
+    def notify_alarm(self, values):
+        """Sends alarm notification message to bus."""
+
+        # Initialise configuration and authentication for response message
+        response = OpenStackResponseBuilder()
+
+        database_manager = DatabaseManager()
+
+        alarm_id = values['alarm_id']
+        alarm = database_manager.get_alarm(alarm_id, 'openstack')
+        # Process an alarm notification if resource_id is valid
+        # Get date and time for response message
+        a_date = time.strftime("%d-%m-%Y") + " " + time.strftime("%X")
+        # Generate and send response
+        resp_message = response.generate_response(
+            'notify_alarm',
+            alarm_id=alarm_id,
+            vdu_name=alarm.vdur_name,
+            vnf_member_index=alarm.vnf_member_index,
+            ns_id=alarm.nsr_id,
+            metric_name=alarm.monitoring_param,
+            operation=alarm.operation,
+            threshold_value=alarm.threshold,
+            sev=values['severity'],
+            date=a_date,
+            state=values['current'])
+        self._publish_response('notify_alarm', json.dumps(resp_message))
+        log.info("Sent alarm notification: %s", resp_message)
+
+    def _publish_response(self, key: str, msg: str):
+        producer = Producer()
+        producer.send(topic='alarm_response', key=key, value=msg)
+        producer.flush()
+
+
+def run(server_class=HTTPServer, handler_class=NotifierHandler, port=8662):
+    """Run the webserver application to retrieve alarm notifications."""
+    try:
+        server_address = ('', port)
+        httpd = server_class(server_address, handler_class)
+        log.info("Starting alarm notifier server on port: %s", port)
+        httpd.serve_forever()
+    except Exception as exc:
+        log.warning("Failed to start webserver, %s", exc)
+
+
+if __name__ == "__main__":
+    cfg = Config.instance()
+    p = re.compile(':(\d+)', re.IGNORECASE)
+    m = p.search(cfg.OS_NOTIFIER_URI)
+    if m:
+        port = m.group(1)
+        run(port=int(port))
+    else:
+        run()
diff --git a/osm_mon/plugins/OpenStack/Gnocchi/__init__.py b/osm_mon/plugins/OpenStack/Gnocchi/__init__.py
new file mode 100644 (file)
index 0000000..cdbd056
--- /dev/null
@@ -0,0 +1,22 @@
+# Copyright 2017 Intel Research and Development Ireland Limited
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+##
+"""Gnocchi plugin for OSM MON."""
diff --git a/osm_mon/plugins/OpenStack/Gnocchi/metric_handler.py b/osm_mon/plugins/OpenStack/Gnocchi/metric_handler.py
new file mode 100644 (file)
index 0000000..9736da8
--- /dev/null
@@ -0,0 +1,453 @@
+# Copyright 2017 Intel Research and Development Ireland Limited
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+##
+"""Carry out OpenStack metric requests via Gnocchi API."""
+
+import datetime
+import json
+import logging
+import time
+
+import six
+
+from osm_mon.core.auth import AuthManager
+from osm_mon.core.settings import Config
+from osm_mon.plugins.OpenStack.common import Common
+from osm_mon.plugins.OpenStack.response import OpenStackResponseBuilder
+
+log = logging.getLogger(__name__)
+
+METRIC_MAPPINGS = {
+    "average_memory_utilization": "memory.usage",
+    "disk_read_ops": "disk.read.requests",
+    "disk_write_ops": "disk.write.requests",
+    "disk_read_bytes": "disk.read.bytes",
+    "disk_write_bytes": "disk.write.bytes",
+    "packets_dropped": "interface.if_dropped",
+    "packets_received": "interface.if_packets",
+    "packets_sent": "interface.if_packets",
+    "cpu_utilization": "cpu_util",
+}
+
+PERIOD_MS = {
+    "HR": 3600000,
+    "DAY": 86400000,
+    "WEEK": 604800000,
+    "MONTH": 2629746000,
+    "YEAR": 31556952000
+}
+
+
+class OpenstackMetricHandler(object):
+    """OpenStack metric requests performed via the Gnocchi API."""
+
+    def __init__(self):
+        """Initialize the metric actions."""
+        self._cfg = Config.instance()
+
+        # Use the Response class to generate valid json response messages
+        self._response = OpenStackResponseBuilder()
+
+        self._auth_manager = AuthManager()
+
+    def handle_request(self, key: str, values: dict, vim_uuid: str) -> dict:
+        """
+        Processes metric request message depending on it's key
+        :param key: Kafka message key
+        :param values: Dict containing metric request data. Follows models defined in core.models.
+        :param vim_uuid: UUID of the VIM to handle the metric request.
+        :return: Dict containing metric response data. Follows models defined in core.models.
+        """
+
+        log.info("OpenStack metric action required.")
+
+        if 'metric_name' in values and values['metric_name'] not in METRIC_MAPPINGS.keys():
+            raise ValueError('Metric ' + values['metric_name'] + ' is not supported.')
+
+        verify_ssl = self._auth_manager.is_verify_ssl(vim_uuid)
+
+        endpoint = Common.get_endpoint("metric", vim_uuid, verify_ssl=verify_ssl)
+
+        auth_token = Common.get_auth_token(vim_uuid, verify_ssl=verify_ssl)
+
+        if key == "create_metric_request":
+            metric_details = values['metric_create_request']
+            status = False
+            metric_id = None
+            resource_id = None
+            try:
+                # Configure metric
+                metric_id, resource_id = self.configure_metric(endpoint, auth_token, metric_details, verify_ssl)
+                log.info("Metric successfully created")
+                status = True
+            except Exception as e:
+                log.exception("Error creating metric")
+                raise e
+            finally:
+                return self._response.generate_response('create_metric_response',
+                                                        cor_id=metric_details['correlation_id'],
+                                                        status=status,
+                                                        metric_id=metric_id,
+                                                        resource_id=resource_id)
+
+        elif key == "read_metric_data_request":
+            metric_id = None
+            timestamps = []
+            metric_data = []
+            status = False
+            try:
+                metric_id = self.get_metric_id(endpoint,
+                                               auth_token,
+                                               METRIC_MAPPINGS[values['metric_name']],
+                                               values['resource_uuid'],
+                                               verify_ssl)
+                # Read all metric data related to a specified metric
+                timestamps, metric_data = self.read_metric_data(endpoint, auth_token, values, verify_ssl)
+                log.info("Metric data collected successfully")
+                status = True
+            except Exception as e:
+                log.exception("Error reading metric data")
+                raise e
+            finally:
+                return self._response.generate_response('read_metric_data_response',
+                                                        cor_id=values['correlation_id'],
+                                                        status=status,
+                                                        metric_id=metric_id,
+                                                        metric_name=values['metric_name'],
+                                                        resource_id=values['resource_uuid'],
+                                                        times=timestamps,
+                                                        metrics=metric_data)
+
+        elif key == "delete_metric_request":
+            metric_id = None
+            status = False
+            try:
+                # delete the specified metric in the request
+                metric_id = self.get_metric_id(endpoint, auth_token, METRIC_MAPPINGS[values['metric_name']],
+                                               values['resource_uuid'], verify_ssl)
+                self.delete_metric(
+                    endpoint, auth_token, metric_id, verify_ssl)
+                log.info("Metric deleted successfully")
+                status = True
+
+            except Exception as e:
+                log.exception("Error deleting metric")
+                raise e
+            finally:
+                return self._response.generate_response('delete_metric_response',
+                                                        cor_id=values['correlation_id'],
+                                                        metric_id=metric_id,
+                                                        metric_name=values['metric_name'],
+                                                        status=status,
+                                                        resource_id=values['resource_uuid'])
+
+        elif key == "update_metric_request":
+            # Gnocchi doesn't support configuration updates
+            # Log and send a response back to this effect
+            log.warning("Gnocchi doesn't support metric configuration updates.")
+            req_details = values['metric_update_request']
+            metric_name = req_details['metric_name']
+            resource_id = req_details['resource_uuid']
+            metric_id = self.get_metric_id(endpoint, auth_token, metric_name, resource_id, verify_ssl)
+            return self._response.generate_response('update_metric_response',
+                                                    cor_id=req_details['correlation_id'],
+                                                    status=False,
+                                                    resource_id=resource_id,
+                                                    metric_id=metric_id)
+
+        elif key == "list_metric_request":
+            list_details = values['metrics_list_request']
+            metric_list = []
+            status = False
+            try:
+                metric_list = self.list_metrics(
+                    endpoint, auth_token, list_details, verify_ssl)
+                log.info("Metrics listed successfully")
+                status = True
+            except Exception as e:
+                log.exception("Error listing metrics")
+                raise e
+            finally:
+                return self._response.generate_response('list_metric_response',
+                                                        cor_id=list_details['correlation_id'],
+                                                        status=status,
+                                                        metric_list=metric_list)
+
+        else:
+            raise ValueError("Unknown key {}, no action will be performed.".format(key))
+
+    def configure_metric(self, endpoint, auth_token, values, verify_ssl):
+        """Create the new metric in Gnocchi."""
+        required_fields = ['resource_uuid', 'metric_name']
+        for field in required_fields:
+            if field not in values:
+                raise ValueError("Missing field: " + field)
+
+        resource_id = values['resource_uuid']
+        metric_name = values['metric_name'].lower()
+
+        # Check for an existing metric for this resource
+        metric_id = self.get_metric_id(
+            endpoint, auth_token, metric_name, resource_id, verify_ssl)
+
+        if metric_id is None:
+            # Try appending metric to existing resource
+            try:
+                base_url = "{}/v1/resource/generic/%s/metric"
+                res_url = base_url.format(endpoint) % resource_id
+                payload = {metric_name: {'archive_policy_name': 'high',
+                                         'unit': values['metric_unit']}}
+                result = Common.perform_request(
+                    res_url,
+                    auth_token,
+                    req_type="post",
+                    verify_ssl=verify_ssl,
+                    payload=json.dumps(payload, sort_keys=True))
+                # Get id of newly created metric
+                for row in json.loads(result.text):
+                    if row['name'] == metric_name:
+                        metric_id = row['id']
+                log.info("Appended metric to existing resource.")
+
+                return metric_id, resource_id
+            except Exception as exc:
+                # Gnocchi version of resource does not exist creating a new one
+                log.info("Failed to append metric to existing resource:%s",
+                         exc)
+                url = "{}/v1/resource/generic".format(endpoint)
+                metric = {'name': metric_name,
+                          'archive_policy_name': 'high',
+                          'unit': values['metric_unit'], }
+
+                resource_payload = json.dumps({'id': resource_id,
+                                               'metrics': {
+                                                   metric_name: metric}}, sort_keys=True)
+
+                resource = Common.perform_request(
+                    url,
+                    auth_token,
+                    req_type="post",
+                    payload=resource_payload,
+                    verify_ssl=verify_ssl)
+
+                # Return the newly created resource_id for creating alarms
+                new_resource_id = json.loads(resource.text)['id']
+                log.info("Created new resource for metric: %s",
+                         new_resource_id)
+
+                metric_id = self.get_metric_id(
+                    endpoint, auth_token, metric_name, new_resource_id, verify_ssl)
+
+                return metric_id, new_resource_id
+
+        else:
+            raise ValueError("Metric already exists for this resource")
+
+    def delete_metric(self, endpoint, auth_token, metric_id, verify_ssl):
+        """Delete metric."""
+        url = "{}/v1/metric/%s".format(endpoint) % metric_id
+
+        result = Common.perform_request(
+            url,
+            auth_token,
+            req_type="delete",
+            verify_ssl=verify_ssl)
+        if not str(result.status_code).startswith("2"):
+            log.warning("Failed to delete the metric.")
+            raise ValueError("Error deleting metric. Aodh API responded with code " + str(result.status_code))
+
+    def list_metrics(self, endpoint, auth_token, values, verify_ssl):
+        """List all metrics."""
+
+        # Check for a specified list
+        metric_name = None
+        if 'metric_name' in values:
+            metric_name = values['metric_name'].lower()
+
+        resource = None
+        if 'resource_uuid' in values:
+            resource = values['resource_uuid']
+
+        if resource:
+            url = "{}/v1/resource/generic/{}".format(endpoint, resource)
+            result = Common.perform_request(
+                url, auth_token, req_type="get", verify_ssl=verify_ssl)
+            resource_data = json.loads(result.text)
+            metrics = resource_data['metrics']
+
+            if metric_name:
+                if metrics.get(METRIC_MAPPINGS[metric_name]):
+                    metric_id = metrics[METRIC_MAPPINGS[metric_name]]
+                    url = "{}/v1/metric/{}".format(endpoint, metric_id)
+                    result = Common.perform_request(
+                        url, auth_token, req_type="get", verify_ssl=verify_ssl)
+                    metric_list = json.loads(result.text)
+                    log.info("Returning an %s resource list for %s metrics",
+                             metric_name, resource)
+                    return metric_list
+                else:
+                    log.info("Metric {} not found for {} resource".format(metric_name, resource))
+                    return []
+            else:
+                metric_list = []
+                for k, v in metrics.items():
+                    url = "{}/v1/metric/{}".format(endpoint, v)
+                    result = Common.perform_request(
+                        url, auth_token, req_type="get", verify_ssl=verify_ssl)
+                    metric = json.loads(result.text)
+                    metric_list.append(metric)
+                if metric_list:
+                    log.info("Return a list of %s resource metrics", resource)
+                    return metric_list
+
+                else:
+                    log.info("There are no metrics available")
+                    return []
+        else:
+            url = "{}/v1/metric?sort=name:asc".format(endpoint)
+            result = Common.perform_request(
+                url, auth_token, req_type="get", verify_ssl=verify_ssl)
+            metrics = []
+            metrics_partial = json.loads(result.text)
+            for metric in metrics_partial:
+                metrics.append(metric)
+
+            while len(json.loads(result.text)) > 0:
+                last_metric_id = metrics_partial[-1]['id']
+                url = "{}/v1/metric?sort=name:asc&marker={}".format(endpoint, last_metric_id)
+                result = Common.perform_request(
+                    url, auth_token, req_type="get", verify_ssl=verify_ssl)
+                if len(json.loads(result.text)) > 0:
+                    metrics_partial = json.loads(result.text)
+                    for metric in metrics_partial:
+                        metrics.append(metric)
+
+            if metrics is not None:
+                # Format the list response
+                if metric_name is not None:
+                    metric_list = self.response_list(
+                        metrics, metric_name=metric_name)
+                    log.info("Returning a list of %s metrics", metric_name)
+                else:
+                    metric_list = self.response_list(metrics)
+                    log.info("Returning a complete list of metrics")
+                return metric_list
+            else:
+                log.info("There are no metrics available")
+                return []
+
+    def get_metric_id(self, endpoint, auth_token, metric_name, resource_id, verify_ssl):
+        """Check if the desired metric already exists for the resource."""
+        url = "{}/v1/resource/generic/%s".format(endpoint) % resource_id
+        try:
+            # Try return the metric id if it exists
+            result = Common.perform_request(
+                url,
+                auth_token,
+                req_type="get",
+                verify_ssl=verify_ssl)
+            return json.loads(result.text)['metrics'][metric_name]
+        except KeyError as e:
+            log.error("Metric doesn't exist. No metric_id available")
+            raise e
+
+    def read_metric_data(self, endpoint, auth_token, values, verify_ssl):
+        """Collect metric measures over a specified time period."""
+        timestamps = []
+        data = []
+        # get metric_id
+        metric_id = self.get_metric_id(endpoint, auth_token, METRIC_MAPPINGS[values['metric_name']],
+                                       values['resource_uuid'], verify_ssl)
+        # Try and collect measures
+        collection_unit = values['collection_unit'].upper()
+        collection_period = values['collection_period']
+
+        # Define the start and end time based on configurations
+        # FIXME: Local timezone may differ from timezone set in Gnocchi, causing discrepancies in measures
+        stop_time = time.strftime("%Y-%m-%d") + "T" + time.strftime("%X")
+        end_time = int(round(time.time() * 1000))
+        diff = collection_period * PERIOD_MS[collection_unit]
+        s_time = (end_time - diff) / 1000.0
+        start_time = datetime.datetime.fromtimestamp(s_time).strftime(
+            '%Y-%m-%dT%H:%M:%S.%f')
+        base_url = "{}/v1/metric/%(0)s/measures?start=%(1)s&stop=%(2)s"
+        url = base_url.format(endpoint) % {
+            "0": metric_id, "1": start_time, "2": stop_time}
+
+        # Perform metric data request
+        metric_data = Common.perform_request(
+            url,
+            auth_token,
+            req_type="get",
+            verify_ssl=verify_ssl)
+
+        # Generate a list of the requested timestamps and data
+        for r in json.loads(metric_data.text):
+            timestamp = r[0].replace("T", " ")
+            timestamps.append(timestamp)
+            data.append(r[2])
+
+        return timestamps, data
+
+    def response_list(self, metric_list, metric_name=None, resource=None):
+        """Create the appropriate lists for a list response."""
+        resp_list, name_list, res_list = [], [], []
+
+        # Create required lists
+        for row in metric_list:
+            # Only list OSM metrics
+            name = None
+            if row['name'] in METRIC_MAPPINGS.values():
+                for k, v in six.iteritems(METRIC_MAPPINGS):
+                    if row['name'] == v:
+                        name = k
+                metric = {"metric_name": name,
+                          "metric_uuid": row['id'],
+                          "metric_unit": row['unit'],
+                          "resource_uuid": row['resource_id']}
+                resp_list.append(metric)
+            # Generate metric_name specific list
+            if metric_name is not None and name is not None:
+                if metric_name in METRIC_MAPPINGS.keys() and row['name'] == METRIC_MAPPINGS[metric_name]:
+                    metric = {"metric_name": metric_name,
+                              "metric_uuid": row['id'],
+                              "metric_unit": row['unit'],
+                              "resource_uuid": row['resource_id']}
+                    name_list.append(metric)
+            # Generate resource specific list
+            if resource is not None and name is not None:
+                if row['resource_id'] == resource:
+                    metric = {"metric_name": name,
+                              "metric_uuid": row['id'],
+                              "metric_unit": row['unit'],
+                              "resource_uuid": row['resource_id']}
+                    res_list.append(metric)
+
+        # Join required lists
+        if metric_name is not None and resource is not None:
+            # Return intersection of res_list and name_list
+            return [i for i in res_list for j in name_list if i['metric_uuid'] == j['metric_uuid']]
+        elif metric_name is not None:
+            return name_list
+        elif resource is not None:
+            return res_list
+        else:
+            return resp_list
diff --git a/osm_mon/plugins/OpenStack/__init__.py b/osm_mon/plugins/OpenStack/__init__.py
new file mode 100644 (file)
index 0000000..18eed2f
--- /dev/null
@@ -0,0 +1,22 @@
+# Copyright 2017 Intel Research and Development Ireland Limited
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+##
+"""OpenStack plugin for OSM MON."""
diff --git a/osm_mon/plugins/OpenStack/common.py b/osm_mon/plugins/OpenStack/common.py
new file mode 100644 (file)
index 0000000..09c911d
--- /dev/null
@@ -0,0 +1,139 @@
+# Copyright 2017 Intel Research and Development Ireland Limited
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+##
+"""Common methods for the OpenStack plugins."""
+import json
+import logging
+
+import requests
+import yaml
+from keystoneauth1 import session
+from keystoneauth1.identity import v3
+from keystoneclient.v3 import client
+
+from osm_mon.core.auth import AuthManager
+from osm_mon.core.settings import Config
+
+__author__ = "Helena McGough"
+
+log = logging.getLogger(__name__)
+cfg = Config.instance()
+
+
+class Common(object):
+    """Common calls for Gnocchi/Aodh plugins."""
+
+    def __init__(self):
+        """Create the common instance."""
+
+    @staticmethod
+    def get_auth_token(vim_uuid, verify_ssl=True):
+        """Authenticate and/or renew the authentication token."""
+        auth_manager = AuthManager()
+        creds = auth_manager.get_credentials(vim_uuid)
+        sess = session.Session(verify=verify_ssl)
+        ks = client.Client(session=sess)
+        token_dict = ks.get_raw_token_from_identity_service(auth_url=creds.url,
+                                                            username=creds.user,
+                                                            password=creds.password,
+                                                            project_name=creds.tenant_name,
+                                                            project_domain_id='default',
+                                                            user_domain_id='default')
+        return token_dict['auth_token']
+
+    @staticmethod
+    def get_endpoint(service_type, vim_uuid, verify_ssl=True):
+        """
+        Gets the public endpoint for an OpenStack service in the configured region (default: RegionOne).
+        :param service_type: Service type name (eg. metric or alarming)
+        :param vim_uuid: VIM UUID generated by OSM
+        :param verify_ssl: If False, disables SSL validation. Useful when using self signed certs.
+        :return: Endpoint url string.
+
+        :raises ValueError If it can't find services, or if it can find services but no endpoint for specified region.
+        """
+        auth_manager = AuthManager()
+        creds = auth_manager.get_credentials(vim_uuid)
+        auth = v3.Password(auth_url=creds.url,
+                           username=creds.user,
+                           password=creds.password,
+                           project_name=creds.tenant_name,
+                           project_domain_id='default',
+                           user_domain_id='default')
+        sess = session.Session(auth=auth, verify=verify_ssl)
+        ks = client.Client(session=sess, interface='public')
+        services = ks.services.list(type=service_type)
+        if not services:
+            raise ValueError("No services found for {}. Is the corresponding service enabled?".format(service_type))
+        service = services[0]
+        endpoints = ks.endpoints.list(service)
+        endpoint_type = 'publicURL'
+        region_name = 'RegionOne'
+        if creds.config is not None:
+            try:
+                config = json.loads(creds.config)
+            except ValueError:
+                config = yaml.safe_load(creds.config)
+            if 'endpoint_type' in config:
+                endpoint_type = config['endpoint_type']
+            if 'region_name' in config:
+                region_name = config['region_name']
+        for endpoint in endpoints:
+            if endpoint.interface in endpoint_type and endpoint.region == region_name:
+                return endpoint.url
+        raise ValueError("No endpoints found for service {} in region {}".format(service_type, region_name))
+
+    @staticmethod
+    def perform_request(url, auth_token,
+                        req_type=None, payload=None, params=None, verify_ssl=True):
+        """Perform the POST/PUT/GET/DELETE request."""
+
+        timeout = cfg.REQUEST_TIMEOUT
+
+        # request headers
+        headers = {'X-Auth-Token': auth_token,
+                   'Content-type': 'application/json'}
+        # perform request and return its result
+        if req_type == "put":
+            response = requests.put(
+                url, data=payload, headers=headers,
+                timeout=timeout, verify=verify_ssl)
+        elif req_type == "get":
+            response = requests.get(
+                url, params=params, headers=headers, timeout=timeout, verify=verify_ssl)
+        elif req_type == "delete":
+            response = requests.delete(
+                url, headers=headers, timeout=timeout, verify=verify_ssl)
+        else:
+            response = requests.post(
+                url, data=payload, headers=headers,
+                timeout=timeout, verify=verify_ssl)
+
+        # Raises exception if there was an error
+        try:
+            response.raise_for_status()
+        # pylint: disable=broad-except
+        except Exception:
+            # Log out the result of the request
+            log.warning(
+                'Result: %s, %s',
+                response.status_code, response.text)
+        return response
diff --git a/osm_mon/plugins/OpenStack/response.py b/osm_mon/plugins/OpenStack/response.py
new file mode 100644 (file)
index 0000000..9f5b17d
--- /dev/null
@@ -0,0 +1,174 @@
+# Copyright 2017 Intel Research and Development Ireland Limited
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+##
+"""Generate valid responses to send back to the SO."""
+
+import logging
+
+log = logging.getLogger(__name__)
+
+schema_version = "1.0"
+
+
+class OpenStackResponseBuilder(object):
+    """Generates responses for OpenStack plugin."""
+
+    def __init__(self):
+        """Initialize OpenStack Response instance."""
+
+    def generate_response(self, key, **kwargs) -> dict:
+        """Make call to appropriate response function."""
+        if key == "list_alarm_response":
+            message = self.alarm_list_response(**kwargs)
+        elif key == "create_alarm_response":
+            message = self.create_alarm_response(**kwargs)
+        elif key == "delete_alarm_response":
+            message = self.delete_alarm_response(**kwargs)
+        elif key == "update_alarm_response":
+            message = self.update_alarm_response(**kwargs)
+        elif key == "create_metric_response":
+            message = self.metric_create_response(**kwargs)
+        elif key == "read_metric_data_response":
+            message = self.read_metric_data_response(**kwargs)
+        elif key == "delete_metric_response":
+            message = self.delete_metric_response(**kwargs)
+        elif key == "update_metric_response":
+            message = self.update_metric_response(**kwargs)
+        elif key == "list_metric_response":
+            message = self.list_metric_response(**kwargs)
+        elif key == "notify_alarm":
+            message = self.notify_alarm(**kwargs)
+        else:
+            log.warning("Failed to generate a valid response message.")
+            message = None
+
+        return message
+
+    def alarm_list_response(self, **kwargs) -> dict:
+        """Generate the response for an alarm list request."""
+        alarm_list_resp = {"schema_version": schema_version,
+                           "schema_type": "list_alarm_response",
+                           "correlation_id": kwargs['cor_id'],
+                           "list_alarm_response": kwargs['alarm_list']}
+        return alarm_list_resp
+
+    def create_alarm_response(self, **kwargs) -> dict:
+        """Generate a response for a create alarm request."""
+        create_alarm_resp = {"schema_version": schema_version,
+                             "schema_type": "create_alarm_response",
+                             "alarm_create_response": {
+                                 "correlation_id": kwargs['cor_id'],
+                                 "alarm_uuid": kwargs['alarm_id'],
+                                 "status": kwargs['status']}}
+        return create_alarm_resp
+
+    def delete_alarm_response(self, **kwargs) -> dict:
+        """Generate a response for a delete alarm request."""
+        delete_alarm_resp = {"schema_version": schema_version,
+                             "schema_type": "alarm_deletion_response",
+                             "alarm_deletion_response": {
+                                 "correlation_id": kwargs['cor_id'],
+                                 "alarm_uuid": kwargs['alarm_id'],
+                                 "status": kwargs['status']}}
+        return delete_alarm_resp
+
+    def update_alarm_response(self, **kwargs) -> dict:
+        """Generate a response for an update alarm request."""
+        update_alarm_resp = {"schema_version": schema_version,
+                             "schema_type": "update_alarm_response",
+                             "alarm_update_response": {
+                                 "correlation_id": kwargs['cor_id'],
+                                 "alarm_uuid": kwargs['alarm_id'],
+                                 "status": kwargs['status']}}
+        return update_alarm_resp
+
+    def metric_create_response(self, **kwargs) -> dict:
+        """Generate a response for a create metric request."""
+        create_metric_resp = {"schema_version": schema_version,
+                              "schema_type": "create_metric_response",
+                              "correlation_id": kwargs['cor_id'],
+                              "metric_create_response": {
+                                  "metric_uuid": kwargs['metric_id'],
+                                  "resource_uuid": kwargs['resource_id'],
+                                  "status": kwargs['status']}}
+        return create_metric_resp
+
+    def read_metric_data_response(self, **kwargs) -> dict:
+        """Generate a response for a read metric data request."""
+        read_metric_data_resp = {"schema_version": schema_version,
+                                 "schema_type": "read_metric_data_response",
+                                 "metric_name": kwargs['metric_name'],
+                                 "metric_uuid": kwargs['metric_id'],
+                                 "resource_uuid": kwargs['resource_id'],
+                                 "correlation_id": kwargs['cor_id'],
+                                 "status": kwargs['status'],
+                                 "metrics_data": {
+                                     "time_series": kwargs['times'],
+                                     "metrics_series": kwargs['metrics']}}
+        return read_metric_data_resp
+
+    def delete_metric_response(self, **kwargs) -> dict:
+        """Generate a response for a delete metric request."""
+        delete_metric_resp = {"schema_version": schema_version,
+                              "schema_type": "delete_metric_response",
+                              "metric_name": kwargs['metric_name'],
+                              "metric_uuid": kwargs['metric_id'],
+                              "resource_uuid": kwargs['resource_id'],
+                              "correlation_id": kwargs['cor_id'],
+                              "status": kwargs['status']}
+        return delete_metric_resp
+
+    def update_metric_response(self, **kwargs) -> dict:
+        """Generate a repsonse for an update metric request."""
+        update_metric_resp = {"schema_version": schema_version,
+                              "schema_type": "update_metric_response",
+                              "correlation_id": kwargs['cor_id'],
+                              "metric_update_response": {
+                                  "metric_uuid": kwargs['metric_id'],
+                                  "status": kwargs['status'],
+                                  "resource_uuid": kwargs['resource_id']}}
+        return update_metric_resp
+
+    def list_metric_response(self, **kwargs) -> dict:
+        """Generate a response for a list metric request."""
+        list_metric_resp = {"schema_version": schema_version,
+                            "schema_type": "list_metric_response",
+                            "correlation_id": kwargs['cor_id'],
+                            "status": kwargs['status'],
+                            "metrics_list": kwargs['metric_list']}
+        return list_metric_resp
+
+    def notify_alarm(self, **kwargs) -> dict:
+        """Generate a response to send alarm notifications."""
+        notify_alarm_resp = {"schema_version": schema_version,
+                             "schema_type": "notify_alarm",
+                             "notify_details": {
+                                 "alarm_uuid": kwargs['alarm_id'],
+                                 "vdu_name": kwargs['vdu_name'],
+                                 "vnf_member_index": kwargs['vnf_member_index'],
+                                 "ns_id": kwargs['ns_id'],
+                                 "metric_name": kwargs['metric_name'],
+                                 "threshold_value": kwargs['threshold_value'],
+                                 "operation": kwargs['operation'],
+                                 "severity": kwargs['sev'],
+                                 "status": kwargs['state'],
+                                 "start_date": kwargs['date']}}
+        return notify_alarm_resp
diff --git a/osm_mon/plugins/OpenStack/singleton.py b/osm_mon/plugins/OpenStack/singleton.py
new file mode 100644 (file)
index 0000000..59c5ee5
--- /dev/null
@@ -0,0 +1,40 @@
+# Copyright 2017 Intel Research and Development Ireland Limited
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+##
+"""Simple singleton class."""
+
+from __future__ import unicode_literals
+
+
+class Singleton(object):
+    """Simple singleton class."""
+
+    def __init__(self, decorated):
+        """Initialize singleton instance."""
+        self._decorated = decorated
+
+    def instance(self):
+        """Return singleton instance."""
+        try:
+            return self._instance
+        except AttributeError:
+            self._instance = self._decorated()
+            return self._instance
diff --git a/osm_mon/plugins/__init__.py b/osm_mon/plugins/__init__.py
new file mode 100644 (file)
index 0000000..316993c
--- /dev/null
@@ -0,0 +1,24 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2017 Intel Research and Development Ireland Limited
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+##
+"""Plugins for OSM MON."""
diff --git a/osm_mon/plugins/vRealiseOps/__init__.py b/osm_mon/plugins/vRealiseOps/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/osm_mon/plugins/vRealiseOps/mon_plugin_vrops.py b/osm_mon/plugins/vRealiseOps/mon_plugin_vrops.py
new file mode 100644 (file)
index 0000000..af68dfc
--- /dev/null
@@ -0,0 +1,1288 @@
+# -*- coding: utf-8 -*-
+
+##
+# Copyright 2016-2017 VMware Inc.
+# This file is part of ETSI OSM
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+#
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact:  osslegalrouting@vmware.com
+##
+
+"""
+Monitoring metrics & creating Alarm definitions in vROPs
+"""
+import logging
+
+import pytz
+import requests
+import six
+from pyvcloud.vcd.client import BasicLoginCredentials
+from pyvcloud.vcd.client import Client
+
+from xml.etree import ElementTree as XmlElementTree
+import traceback
+import time
+import json
+from OpenSSL.crypto import load_certificate, FILETYPE_PEM
+import os
+import sys
+import datetime
+from socket import getfqdn
+
+import urllib3
+
+from osm_mon.core.settings import Config
+
+sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', '..', '..'))
+from osm_mon.core.database import DatabaseManager
+
+API_VERSION = '5.9'
+
+urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+OPERATION_MAPPING = {'GE': 'GT_EQ', 'LE': 'LT_EQ', 'GT': 'GT', 'LT': 'LT', 'EQ': 'EQ'}
+severity_mano2vrops = {'WARNING': 'WARNING', 'MINOR': 'WARNING', 'MAJOR': "IMMEDIATE",
+                       'CRITICAL': 'CRITICAL', 'INDETERMINATE': 'UNKNOWN'}
+PERIOD_MSEC = {'HR': 3600000, 'DAY': 86400000, 'WEEK': 604800000, 'MONTH': 2678400000, 'YEAR': 31536000000}
+
+# To Do - Add actual webhook url & certificate
+# SSL_CERTIFICATE_FILE_NAME = 'vROPs_Webservice/SSL_certificate/www.vrops_webservice.com.cert'
+# webhook_url = "https://mano-dev-1:8080/notify/" #for testing
+webhook_url = "https://" + getfqdn() + ":8080/notify/"
+SSL_CERTIFICATE_FILE_NAME = ('vROPs_Webservice/SSL_certificate/' + getfqdn() + ".cert")
+# SSL_CERTIFICATE_FILE_NAME = 'vROPs_Webservice/SSL_certificate/10.172.137.214.cert' #for testing
+
+MODULE_DIR = os.path.dirname(__file__)
+CONFIG_FILE_NAME = 'vrops_config.xml'
+CONFIG_FILE_PATH = os.path.join(MODULE_DIR, CONFIG_FILE_NAME)
+SSL_CERTIFICATE_FILE_PATH = os.path.join(MODULE_DIR, SSL_CERTIFICATE_FILE_NAME)
+
+cfg = Config.instance()
+logging.basicConfig(stream=sys.stdout,
+                    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
+                    datefmt='%m/%d/%Y %I:%M:%S %p',
+                    level=logging.getLevelName(cfg.OSMMON_LOG_LEVEL))
+
+logger = logging.getLogger(__name__)
+
+
+class MonPlugin:
+    """MON Plugin class for vROPs telemetry plugin
+    """
+
+    def __init__(self, access_config=None):
+        """Constructor of MON plugin
+        Params:
+            'access_config': dictionary with VIM access information based on VIM type.
+            This contains a consolidate version of VIM & monitoring tool config at creation and
+            particular VIM config at their attachment.
+            For VIM type: 'vmware',
+            access_config - {'vrops_site':<>, 'vrops_user':<>, 'vrops_password':<>,
+                            'vcloud-site':<>,'admin_username':<>,'admin_password':<>,
+                            'nsx_manager':<>,'nsx_user':<>,'nsx_password':<>,
+                            'vcenter_ip':<>,'vcenter_port':<>,'vcenter_user':<>,'vcenter_password':<>,
+                            'vim_tenant_name':<>,'orgname':<>}
+
+        #To Do
+        Returns: Raise an exception if some needed parameter is missing, but it must not do any connectivity
+            check against the VIM
+        """
+
+        if access_config is None:
+            logger.error("VIM Access Configuration not provided")
+            raise KeyError("VIM Access Configuration not provided")
+
+        self.database_manager = DatabaseManager()
+
+        self.access_config = access_config
+        if not bool(access_config):
+            logger.error("VIM Account details are not added. Please add a VIM account")
+            raise KeyError("VIM Account details are not added. Please add a VIM account")
+
+        try:
+            self.vrops_site = access_config['vrops_site']
+            self.vrops_user = access_config['vrops_user']
+            self.vrops_password = access_config['vrops_password']
+            self.vcloud_site = access_config['vim_url']
+            self.admin_username = access_config['admin_username']
+            self.admin_password = access_config['admin_password']
+            # self.tenant_id = access_config['tenant_id']
+            self.vim_uuid = access_config['vim_uuid']
+
+        except KeyError as exp:
+            logger.error("Required VIM account details not provided: {}".format(exp))
+            raise KeyError("Required VIM account details not provided: {}".format(exp))
+
+    def configure_alarm(self, config_dict={}):
+        """Configures or creates a new alarm using the input parameters in config_dict
+        Params:
+        "alarm_name": Alarm name in string format
+        "description": Description of alarm in string format
+        "resource_uuid": Resource UUID for which alarm needs to be configured. in string format
+        "Resource type": String resource type: 'VDU' or 'host'
+        "Severity": 'WARNING', 'MINOR', 'MAJOR', 'CRITICAL'
+        "metric_name": Metric key in string format
+        "operation": One of ('GE', 'LE', 'GT', 'LT', 'EQ')
+        "threshold_value": Defines the threshold (up to 2 fraction digits) that,
+                            if crossed, will trigger the alarm.
+        "statistic": AVERAGE, MINIMUM, MAXIMUM, COUNT, SUM
+
+        Default parameters for each alarm are read from the plugin specific config file.
+        Dict of default parameters is as follows:
+        default_params keys = {'cancel_cycles','wait_cycles','resource_kind','adapter_kind',
+                               'alarm_type','alarm_subType',impact}
+
+        Returns the UUID of created alarm or None
+        """
+        # 1) get alarm & metrics parameters from plugin specific file
+        def_a_params = self.get_default_Params(config_dict['alarm_name'])
+        if not def_a_params:
+            logger.warning("Alarm not supported: {}".format(config_dict['alarm_name']))
+            return None
+        metric_key_params = self.get_default_Params(config_dict['metric_name'])
+        if not metric_key_params:
+            logger.warning("Metric not supported: {}".format(config_dict['metric_name']))
+            return None
+
+        # 1.2) Check if alarm definition already exists
+        vrops_alarm_name = def_a_params['vrops_alarm'] + '-' + config_dict['resource_uuid']
+        alert_def_list = self.get_alarm_defination_by_name(vrops_alarm_name)
+        if alert_def_list:
+            logger.warning("Alarm already exists: {}. Try updating by update_alarm_request"
+                           .format(vrops_alarm_name))
+            return None
+
+        # 2) create symptom definition
+        symptom_params = {'cancel_cycles': (def_a_params['cancel_period'] / 300) * def_a_params['cancel_cycles'],
+                          'wait_cycles': (def_a_params['period'] / 300) * def_a_params['evaluation'],
+                          'resource_kind_key': def_a_params['resource_kind'],
+                          'adapter_kind_key': def_a_params['adapter_kind'],
+                          'symptom_name': vrops_alarm_name,
+                          'severity': severity_mano2vrops[config_dict['severity'].upper()],
+                          'metric_key': metric_key_params['metric_key'],
+                          'operation': OPERATION_MAPPING[config_dict['operation']],
+                          'threshold_value': config_dict['threshold_value']}
+
+        symptom_uuid = self.create_symptom(symptom_params)
+        if symptom_uuid is not None:
+            logger.info("Symptom defined: {} with ID: {}".format(symptom_params['symptom_name'], symptom_uuid))
+        else:
+            logger.warning("Failed to create Symptom: {}".format(symptom_params['symptom_name']))
+            return None
+        # 3) create alert definition
+        # To Do - Get type & subtypes for all 5 alarms
+        alarm_params = {'name': vrops_alarm_name,
+                        'description': config_dict['description']
+                        if 'description' in config_dict and config_dict['description'] is not None else config_dict[
+                            'alarm_name'],
+                        'adapterKindKey': def_a_params['adapter_kind'],
+                        'resourceKindKey': def_a_params['resource_kind'],
+                        'waitCycles': 1, 'cancelCycles': 1,
+                        'type': def_a_params['alarm_type'], 'subType': def_a_params['alarm_subType'],
+                        'severity': severity_mano2vrops[config_dict['severity'].upper()],
+                        'symptomDefinitionId': symptom_uuid,
+                        'impact': def_a_params['impact']}
+
+        alarm_def = self.create_alarm_definition(alarm_params)
+        if alarm_def is None:
+            logger.warning("Failed to create Alert: {}".format(alarm_params['name']))
+            return None
+
+        logger.info("Alarm defined: {} with ID: {}".format(alarm_params['name'], alarm_def))
+
+        # 4) Find vm_moref_id from vApp uuid in vCD
+        vm_moref_id = self.get_vm_moref_id(config_dict['resource_uuid'])
+        if vm_moref_id is None:
+            logger.warning("Failed to find vm morefid for vApp in vCD: {}".format(config_dict['resource_uuid']))
+            return None
+
+        # 5) Based on vm_moref_id, find VM's corresponding resource_id in vROPs to set notification
+        resource_id = self.get_vm_resource_id(vm_moref_id)
+        if resource_id is None:
+            logger.warning("Failed to find resource in vROPs: {}".format(config_dict['resource_uuid']))
+            return None
+
+        # 6) Configure alarm notification for a particular VM using it's resource_id
+        notification_id = self.create_alarm_notification_rule(vrops_alarm_name, alarm_def, resource_id)
+        if notification_id is None:
+            return None
+        else:
+            alarm_def_uuid = alarm_def.split('-', 1)[1]
+            logger.info("Alarm definition created with notification: {} with ID: {}"
+                        .format(alarm_params['name'], alarm_def_uuid))
+            self.database_manager.save_alarm(alarm_params['name'],
+                                             self.vim_uuid,
+                                             config_dict['threshold_value'],
+                                             config_dict['operation'],
+                                             config_dict['metric_name'].lower(),
+                                             config_dict['vdu_name'].lower(),
+                                             config_dict['vnf_member_index'].lower(),
+                                             config_dict['ns_id'].lower()
+                                             )
+
+            # Return alarm definition UUID by removing 'AlertDefinition' from UUID
+            return (alarm_def_uuid)
+
+    def get_default_Params(self, metric_alarm_name):
+        """
+        Read the default config parameters from plugin specific file stored with plugin file.
+        Params:
+            metric_alarm_name: Name of the alarm, whose config parameters to be read from the config file.
+        """
+        a_params = {}
+        try:
+            source = open(CONFIG_FILE_PATH, 'r')
+        except IOError as exp:
+            msg = ("Could not read Config file: {},  nException: {}"
+                   .format(CONFIG_FILE_PATH, exp))
+            logger.error(msg)
+            raise IOError(msg)
+
+        tree = XmlElementTree.parse(source)
+        alarms = tree.getroot()
+        for alarm in alarms:
+            if alarm.tag.lower() == metric_alarm_name.lower():
+                for param in alarm:
+                    if param.tag in ("period", "evaluation", "cancel_period", "alarm_type",
+                                     "cancel_cycles", "alarm_subType"):
+                        a_params[param.tag] = int(param.text)
+                    elif param.tag in ("enabled", "repeat"):
+                        if param.text.lower() == "true":
+                            a_params[param.tag] = True
+                        else:
+                            a_params[param.tag] = False
+                    else:
+                        a_params[param.tag] = param.text
+        source.close()
+        return a_params
+
+    def create_symptom(self, symptom_params):
+        """Create Symptom definition for an alarm
+        Params:
+        symptom_params: Dict of parameters required for defining a symptom as follows
+            cancel_cycles
+            wait_cycles
+            resource_kind_key = "VirtualMachine"
+            adapter_kind_key = "VMWARE"
+            symptom_name = Test_Memory_Usage_TooHigh
+            severity
+            metric_key
+            operation = GT_EQ
+            threshold_value = 85
+        Returns the uuid of Symptom definition
+        """
+        symptom_id = None
+
+        try:
+            api_url = '/suite-api/api/symptomdefinitions'
+            headers = {'Content-Type': 'application/json', 'Accept': 'application/json'}
+            data = {
+                "id": None,
+                "name": symptom_params['symptom_name'],
+                "adapterKindKey": symptom_params['adapter_kind_key'],
+                "resourceKindKey": symptom_params['resource_kind_key'],
+                "waitCycles": symptom_params['wait_cycles'],
+                "cancelCycles": symptom_params['cancel_cycles'],
+                "state": {
+                    "severity": symptom_params['severity'],
+                    "condition": {
+                        "type": "CONDITION_HT",
+                        "key": symptom_params['metric_key'],
+                        "operator": symptom_params['operation'],
+                        "value": symptom_params['threshold_value'],
+                        "valueType": "NUMERIC",
+                        "instanced": False,
+                        "thresholdType": "STATIC"
+                    }
+                }
+            }
+
+            resp = requests.post(self.vrops_site + api_url,
+                                 auth=(self.vrops_user, self.vrops_password),
+                                 headers=headers,
+                                 verify=False,
+                                 data=json.dumps(data))
+
+            if resp.status_code != 201:
+                logger.warning("Failed to create Symptom definition: {}, response {}"
+                               .format(symptom_params['symptom_name'], resp.content))
+                return None
+
+            resp_data = json.loads(resp.content)
+            if resp_data.get('id') is not None:
+                symptom_id = resp_data['id']
+
+            return symptom_id
+
+        except Exception as exp:
+            logger.warning("Error creating symptom definition : {} n{}"
+                           .format(exp, traceback.format_exc()))
+
+    def create_alarm_definition(self, alarm_params):
+        """
+        Create an alarm definition in vROPs
+        Params:
+            'name': Alarm Name,
+            'description':Alarm description,
+            'adapterKindKey': Adapter type in vROPs "VMWARE",
+            'resourceKindKey':Resource type in vROPs "VirtualMachine",
+            'waitCycles': No of wait cycles,
+            'cancelCycles': No of cancel cycles,
+            'type': Alarm type,
+            'subType': Alarm subtype,
+            'severity': Severity in vROPs "CRITICAL",
+            'symptomDefinitionId':symptom Definition uuid,
+            'impact': impact 'risk'
+        Returns:
+            'alarm_uuid': returns alarm uuid
+        """
+
+        alarm_uuid = None
+
+        try:
+            api_url = '/suite-api/api/alertdefinitions'
+            headers = {'Content-Type': 'application/json', 'Accept': 'application/json'}
+            data = {
+                "name": alarm_params['name'],
+                "description": alarm_params['description'],
+                "adapterKindKey": alarm_params['adapterKindKey'],
+                "resourceKindKey": alarm_params['resourceKindKey'],
+                "waitCycles": 1,
+                "cancelCycles": 1,
+                "type": alarm_params['type'],
+                "subType": alarm_params['subType'],
+                "states": [
+                    {
+                        "severity": alarm_params['severity'],
+                        "base-symptom-set":
+                            {
+                                "type": "SYMPTOM_SET",
+                                "relation": "SELF",
+                                "aggregation": "ALL",
+                                "symptomSetOperator": "AND",
+                                "symptomDefinitionIds": [alarm_params['symptomDefinitionId']]
+                            },
+                        "impact": {
+                            "impactType": "BADGE",
+                            "detail": alarm_params['impact']
+                        }
+                    }
+                ]
+            }
+
+            resp = requests.post(self.vrops_site + api_url,
+                                 auth=(self.vrops_user, self.vrops_password),
+                                 headers=headers,
+                                 verify=False,
+                                 data=json.dumps(data))
+
+            if resp.status_code != 201:
+                logger.warning("Failed to create Alarm definition: {}, response {}"
+                               .format(alarm_params['name'], resp.content))
+                return None
+
+            resp_data = json.loads(resp.content)
+            if resp_data.get('id') is not None:
+                alarm_uuid = resp_data['id']
+
+            return alarm_uuid
+
+        except Exception as exp:
+            logger.warning("Error creating alarm definition : {} n{}".format(exp, traceback.format_exc()))
+
+    def configure_rest_plugin(self):
+        """
+        Creates REST Plug-in for vROPs outbound alerts
+
+        Returns Plugin ID
+        """
+        plugin_id = None
+        plugin_name = 'MON_module_REST_Plugin'
+        plugin_id = self.check_if_plugin_configured(plugin_name)
+
+        # If REST plugin not configured, configure it
+        if plugin_id is not None:
+            return plugin_id
+        else:
+            try:
+                cert_file_string = open(SSL_CERTIFICATE_FILE_PATH, "rb").read()
+            except IOError as exp:
+                msg = ("Could not read SSL certificate file: {}".format(SSL_CERTIFICATE_FILE_PATH))
+                logger.error(msg)
+                raise IOError(msg)
+            cert = load_certificate(FILETYPE_PEM, cert_file_string)
+            certificate = cert.digest("sha1")
+            api_url = '/suite-api/api/alertplugins'
+            headers = {'Content-Type': 'application/json', 'Accept': 'application/json'}
+            data = {
+                "pluginTypeId": "RestPlugin",
+                "name": plugin_name,
+                "configValues": [
+                    {
+                        "name": "Url",
+                        "value": webhook_url
+                    },
+                    {
+                        "name": "Content-type",
+                        "value": "application/json"
+                    },
+                    {
+                        "name": "Certificate",
+                        "value": certificate
+                    },
+                    {
+                        "name": "ConnectionCount",
+                        "value": "20"
+                    }
+                ]
+            }
+
+            resp = requests.post(self.vrops_site + api_url,
+                                 auth=(self.vrops_user, self.vrops_password),
+                                 headers=headers,
+                                 verify=False,
+                                 data=json.dumps(data))
+
+            if resp.status_code is not 201:
+                logger.warning("Failed to create REST Plugin: {} for url: {},  nresponse code: {},"
+                               " nresponse content: {}".format(plugin_name, webhook_url,
+                                                               resp.status_code, resp.content))
+                return None
+
+            resp_data = json.loads(resp.content)
+            if resp_data.get('pluginId') is not None:
+                plugin_id = resp_data['pluginId']
+
+            if plugin_id is None:
+                logger.warning("Failed to get REST Plugin ID for {}, url: {}".format(plugin_name, webhook_url))
+                return None
+            else:
+                logger.info(
+                    "Created REST Plugin: {} with ID : {} for url: {}".format(plugin_name, plugin_id, webhook_url))
+                status = self.enable_rest_plugin(plugin_id, plugin_name)
+                if status is False:
+                    logger.warning(
+                        "Failed to enable created REST Plugin: {} for url: {}".format(plugin_name, webhook_url))
+                    return None
+                else:
+                    logger.info("Enabled REST Plugin: {} for url: {}".format(plugin_name, webhook_url))
+                    return plugin_id
+
+    def check_if_plugin_configured(self, plugin_name):
+        """Check if the REST plugin is already created
+        Returns: plugin_id: if already created, None: if needs to be created
+        """
+        plugin_id = None
+        # Find the REST Plugin id details for - MON_module_REST_Plugin
+        api_url = '/suite-api/api/alertplugins'
+        headers = {'Accept': 'application/json'}
+
+        resp = requests.get(self.vrops_site + api_url,
+                            auth=(self.vrops_user, self.vrops_password),
+                            verify=False, headers=headers)
+
+        if resp.status_code is not 200:
+            logger.warning("Failed to REST GET Alarm plugin details  nResponse code: {} nResponse content: {}"
+                           .format(resp.status_code, resp.content))
+            return None
+
+        # Look for specific plugin & parse pluginId for 'MON_module_REST_Plugin'
+        plugins_list = json.loads(resp.content)
+        if plugins_list.get('notificationPluginInstances') is not None:
+            for notify_plugin in plugins_list['notificationPluginInstances']:
+                if notify_plugin.get('name') is not None and notify_plugin['name'] == plugin_name:
+                    plugin_id = notify_plugin.get('pluginId')
+
+        if plugin_id is None:
+            logger.warning("REST plugin {} not found".format(plugin_name))
+            return None
+        else:
+            logger.info("Found REST Plugin: {}".format(plugin_name))
+            return plugin_id
+
+    def enable_rest_plugin(self, plugin_id, plugin_name):
+        """
+        Enable the REST plugin using plugin_id
+        Params: plugin_id: plugin ID string that is to be enabled
+        Returns: status (Boolean) - True for success, False for failure
+        """
+
+        if plugin_id is None or plugin_name is None:
+            logger.debug("enable_rest_plugin() : Plugin ID or plugin_name not provided for {} plugin"
+                         .format(plugin_name))
+            return False
+
+        try:
+            api_url = "/suite-api/api/alertplugins/{}/enable/True".format(plugin_id)
+
+            resp = requests.put(self.vrops_site + api_url,
+                                auth=(self.vrops_user, self.vrops_password),
+                                verify=False)
+
+            if resp.status_code is not 204:
+                logger.warning("Failed to enable REST plugin {}.  nResponse code {} nResponse Content: {}"
+                               .format(plugin_name, resp.status_code, resp.content))
+                return False
+
+            logger.info("Enabled REST plugin {}.".format(plugin_name))
+            return True
+
+        except Exception as exp:
+            logger.warning("Error enabling REST plugin for {} plugin: Exception: {} n{}"
+                           .format(plugin_name, exp, traceback.format_exc()))
+
+    def create_alarm_notification_rule(self, alarm_name, alarm_id, resource_id):
+        """
+        Create notification rule for each alarm
+        Params:
+            alarm_name
+            alarm_id
+            resource_id
+
+        Returns:
+            notification_id: notification_id or None
+        """
+        notification_name = 'notify_' + alarm_name
+        notification_id = None
+        plugin_name = 'MON_module_REST_Plugin'
+
+        # 1) Find the REST Plugin id details for - MON_module_REST_Plugin
+        plugin_id = self.check_if_plugin_configured(plugin_name)
+        if plugin_id is None:
+            logger.warning("Failed to get REST plugin_id for : {}".format('MON_module_REST_Plugin'))
+            return None
+
+        # 2) Create Alarm notification rule
+        api_url = '/suite-api/api/notifications/rules'
+        headers = {'Content-Type': 'application/json', 'Accept': 'application/json'}
+        data = {
+            "name": notification_name,
+            "pluginId": plugin_id,
+            "resourceFilter": {
+                "matchResourceIdOnly": True,
+                "resourceId": resource_id
+            },
+            "alertDefinitionIdFilters": {
+                "values": [alarm_id]
+            }
+        }
+
+        resp = requests.post(self.vrops_site + api_url,
+                             auth=(self.vrops_user, self.vrops_password),
+                             headers=headers,
+                             verify=False,
+                             data=json.dumps(data))
+
+        if resp.status_code is not 201:
+            logger.warning("Failed to create Alarm notification rule {} for {} alarm."
+                           " nResponse code: {} nResponse content: {}"
+                           .format(notification_name, alarm_name, resp.status_code, resp.content))
+            return None
+
+        # parse notification id from response
+        resp_data = json.loads(resp.content)
+        if resp_data.get('id') is not None:
+            notification_id = resp_data['id']
+
+        logger.info("Created Alarm notification rule {} for {} alarm.".format(notification_name, alarm_name))
+        return notification_id
+
+    def get_vm_moref_id(self, vapp_uuid):
+        """
+        Get the moref_id of given VM
+        """
+        try:
+            if vapp_uuid:
+                vm_details = self.get_vapp_details_rest(vapp_uuid)
+                if vm_details and "vm_vcenter_info" in vm_details:
+                    vm_moref_id = vm_details["vm_vcenter_info"].get("vm_moref_id", None)
+                    logger.info("Found vm_moref_id: {} for vApp UUID: {}".format(vm_moref_id, vapp_uuid))
+                    return vm_moref_id
+
+        except Exception as exp:
+            logger.warning("Error occurred while getting VM moref ID for VM : {} n{}"
+                           .format(exp, traceback.format_exc()))
+
+    def get_vapp_details_rest(self, vapp_uuid=None):
+        """
+        Method retrieve vapp detail from vCloud director
+
+        Args:
+            vapp_uuid - is vapp identifier.
+
+            Returns:
+                Returns VM MOref ID or return None
+        """
+
+        parsed_respond = {}
+        vca = None
+
+        if vapp_uuid is None:
+            return parsed_respond
+
+        vca = self.connect_as_admin()
+        if not vca:
+            logger.warning("Failed to connect to vCD")
+            return parsed_respond
+
+        url_list = [self.vcloud_site, '/api/vApp/vapp-', vapp_uuid]
+        get_vapp_restcall = ''.join(url_list)
+
+        if vca._session:
+            headers = {'Accept': 'application/*+xml;version=' + API_VERSION,
+                       'x-vcloud-authorization': vca._session.headers['x-vcloud-authorization']}
+            response = requests.get(get_vapp_restcall,
+                                    headers=headers,
+                                    verify=False)
+
+            if response.status_code != 200:
+                logger.warning("REST API call {} failed. Return status code {}"
+                               .format(get_vapp_restcall, response.content))
+                return parsed_respond
+
+            try:
+                xmlroot_respond = XmlElementTree.fromstring(response.content)
+
+                namespaces = {'vm': 'http://www.vmware.com/vcloud/v1.5',
+                              "vmext": "http://www.vmware.com/vcloud/extension/v1.5",
+                              "xmlns": "http://www.vmware.com/vcloud/v1.5"
+                              }
+
+                # parse children section for other attrib
+                children_section = xmlroot_respond.find('vm:Children/', namespaces)
+                if children_section is not None:
+                    vCloud_extension_section = children_section.find('xmlns:VCloudExtension', namespaces)
+                    if vCloud_extension_section is not None:
+                        vm_vcenter_info = {}
+                        vim_info = vCloud_extension_section.find('vmext:VmVimInfo', namespaces)
+                        vmext = vim_info.find('vmext:VmVimObjectRef', namespaces)
+                        if vmext is not None:
+                            vm_vcenter_info["vm_moref_id"] = vmext.find('vmext:MoRef', namespaces).text
+                        parsed_respond["vm_vcenter_info"] = vm_vcenter_info
+
+            except Exception as exp:
+                logger.warning("Error occurred calling rest api for getting vApp details: {} n{}"
+                               .format(exp, traceback.format_exc()))
+
+        return parsed_respond
+
+    def connect_as_admin(self):
+        """ Method connect as pvdc admin user to vCloud director.
+            There are certain action that can be done only by provider vdc admin user.
+            Organization creation / provider network creation etc.
+
+            Returns:
+                The return vca object that letter can be used to connect to vcloud direct as admin for provider vdc
+        """
+
+        logger.debug("Logging into vCD org as admin.")
+
+        try:
+            host = self.vcloud_site
+            org = 'System'
+            client_as_admin = Client(host, verify_ssl_certs=False)
+            client_as_admin.set_credentials(BasicLoginCredentials(self.admin_username, org,
+                                                                  self.admin_password))
+            return client_as_admin
+
+        except Exception as e:
+            logger.warning("Can't connect to a vCloud director as: {} with exception {}"
+                           .format(self.admin_username, e))
+
+    def get_vm_resource_id(self, vm_moref_id):
+        """ Find resource ID in vROPs using vm_moref_id
+        """
+        if vm_moref_id is None:
+            return None
+
+        api_url = '/suite-api/api/resources?resourceKind=VirtualMachine'
+        headers = {'Accept': 'application/json'}
+
+        resp = requests.get(self.vrops_site + api_url,
+                            auth=(self.vrops_user, self.vrops_password),
+                            verify=False, headers=headers)
+
+        if resp.status_code is not 200:
+            logger.warning("Failed to get resource details from vROPs for {}"
+                           " nResponse code:{} nResponse Content: {}"
+                           .format(vm_moref_id, resp.status_code, resp.content))
+            return None
+
+        vm_resource_id = None
+        try:
+            resp_data = json.loads(resp.content)
+            if resp_data.get('resourceList') is not None:
+                resource_list = resp_data.get('resourceList')
+                for resource in resource_list:
+                    if resource.get('resourceKey') is not None:
+                        resource_details = resource['resourceKey']
+                        if resource_details.get('resourceIdentifiers') is not None:
+                            resource_identifiers = resource_details['resourceIdentifiers']
+                            for resource_identifier in resource_identifiers:
+                                if resource_identifier['identifierType']['name'] == 'VMEntityObjectID':
+                                    if resource_identifier.get('value') is not None and \
+                                            resource_identifier['value'] == vm_moref_id:
+                                        vm_resource_id = resource['identifier']
+                                        logger.info("Found VM resource ID: {} for vm_moref_id: {}"
+                                                    .format(vm_resource_id, vm_moref_id))
+
+        except Exception as exp:
+            logger.warning("get_vm_resource_id: Error in parsing {} n{}"
+                           .format(exp, traceback.format_exc()))
+
+        return vm_resource_id
+
+    def get_metrics_data(self, metric={}):
+        """Get an individual metric's data of a resource.
+        Params:
+            'metric_name': Normalized name of metric (string)
+            'resource_uuid': Resource UUID (string)
+            'period': Time period in Period Unit for which metrics data to be collected from
+                        Monitoring tool from now.
+            'period_unit': Period measurement unit can be one of 'HR', 'DAY', 'MONTH', 'YEAR'
+
+        Return a dict that contains:
+            'metric_name': Normalized name of metric (string)
+            'resource_uuid': Resource UUID (string)
+            'tenant_id': tenent id name in which the resource is present in string format
+            'metrics_data': Dictionary containing time_series & metrics_series data.
+                'time_series': List of individual time stamp values in msec
+                'metrics_series': List of individual metrics data values
+        Raises an exception upon error or when network is not found
+        """
+        return_data = {}
+        return_data['schema_version'] = "1.0"
+        return_data['schema_type'] = 'read_metric_data_response'
+        return_data['vim_uuid'] = metric['vim_uuid']
+        return_data['metric_name'] = metric['metric_name']
+        # To do - No metric_uuid in vROPs, thus returning '0'
+        return_data['metric_uuid'] = '0'
+        return_data['correlation_id'] = metric['correlation_id']
+        return_data['resource_uuid'] = metric['resource_uuid']
+        return_data['metrics_data'] = {'time_series': [], 'metrics_series': []}
+        # To do - Need confirmation about uuid & id
+        ##if 'tenant_uuid' in metric and metric['tenant_uuid'] is not None:
+        ##    return_data['tenant_uuid'] = metric['tenant_uuid']
+        ##else:
+        ##    return_data['tenant_uuid'] = None
+        return_data['unit'] = None
+        # return_data['tenant_id'] = self.tenant_id
+        # logger.warning("return_data: {}".format(return_data))
+
+        # 1) Get metric details from plugin specific file & format it into vROPs metrics
+        metric_key_params = self.get_default_Params(metric['metric_name'])
+
+        if not metric_key_params:
+            logger.warning("Metric not supported: {}".format(metric['metric_name']))
+            # To Do: Return message
+            return return_data
+
+        return_data['unit'] = metric_key_params['unit']
+
+        # 2) Find the resource id in vROPs based on OSM resource_uuid
+        # 2.a) Find vm_moref_id from vApp uuid in vCD
+        vm_moref_id = self.get_vm_moref_id(metric['resource_uuid'])
+        if vm_moref_id is None:
+            logger.warning("Failed to find vm morefid for vApp in vCD: {}".format(metric['resource_uuid']))
+            return return_data
+        # 2.b) Based on vm_moref_id, find VM's corresponding resource_id in vROPs to set notification
+        resource_id = self.get_vm_resource_id(vm_moref_id)
+        if resource_id is None:
+            logger.warning("Failed to find resource in vROPs: {}".format(metric['resource_uuid']))
+            return return_data
+
+        # 3) Calculate begin & end time for period & period unit
+        end_time = int(round(time.time() * 1000))
+        if metric['collection_unit'] == 'YR':
+            time_diff = PERIOD_MSEC[metric['collection_unit']]
+        else:
+            time_diff = metric['collection_period'] * PERIOD_MSEC[metric['collection_unit']]
+        begin_time = end_time - time_diff
+
+        # 4) Get the metrics data
+        logger.info("metric_key_params['metric_key'] = {}".format(metric_key_params['metric_key']))
+        logger.info("end_time: {}, begin_time: {}".format(end_time, begin_time))
+
+        url_list = ['/suite-api/api/resources/', resource_id, '/stats?statKey=',
+                    metric_key_params['metric_key'], '&begin=', str(begin_time), '&end=', str(end_time)]
+        api_url = ''.join(url_list)
+        headers = {'Accept': 'application/json'}
+
+        resp = requests.get(self.vrops_site + api_url,
+                            auth=(self.vrops_user, self.vrops_password),
+                            verify=False, headers=headers)
+
+        if resp.status_code is not 200:
+            logger.warning(
+                "Failed to retrieve Metric data from vROPs for {} nResponse code:{} nResponse Content: {}"
+                    .format(metric['metric_name'], resp.status_code, resp.content))
+            return return_data
+
+        # 5) Convert to required format
+        metrics_data = {}
+        json_data = json.loads(resp.content)
+        for resp_key, resp_val in six.iteritems(json_data):
+            if resp_key == 'values':
+                data = json_data['values'][0]
+                for data_k, data_v in six.iteritems(data):
+                    if data_k == 'stat-list':
+                        stat_list = data_v
+                        for stat_list_k, stat_list_v in six.iteritems(stat_list):
+                            for stat_keys, stat_vals in six.iteritems(stat_list_v[0]):
+                                if stat_keys == 'timestamps':
+                                    metrics_data['time_series'] = stat_list_v[0]['timestamps']
+                                if stat_keys == 'data':
+                                    metrics_data['metrics_series'] = stat_list_v[0]['data']
+
+        return_data['metrics_data'] = metrics_data
+
+        return return_data
+
+    def update_alarm_configuration(self, new_alarm_config):
+        """Update alarm configuration (i.e. Symptom & alarm) as per request
+        """
+        if new_alarm_config.get('alarm_uuid') is None:
+            logger.warning("alarm_uuid is required to update an Alarm")
+            return None
+        # 1) Get Alarm details from it's uuid & find the symptom definition
+        alarm_details_json, alarm_details = self.get_alarm_defination_details(new_alarm_config['alarm_uuid'])
+        if alarm_details_json is None:
+            return None
+
+        try:
+            # 2) Update the symptom definition
+            if alarm_details['alarm_id'] is not None and alarm_details['symptom_definition_id'] is not None:
+                symptom_defination_id = alarm_details['symptom_definition_id']
+            else:
+                logger.info("Symptom Definition ID not found for {}".format(new_alarm_config['alarm_uuid']))
+                return None
+
+            symptom_uuid = self.update_symptom_defination(symptom_defination_id, new_alarm_config)
+
+            # 3) Update the alarm definition & Return UUID if successful update
+            if symptom_uuid is None:
+                logger.info("Symptom Definition details not found for {}"
+                            .format(new_alarm_config['alarm_uuid']))
+                return None
+            else:
+                alarm_uuid = self.reconfigure_alarm(alarm_details_json, new_alarm_config)
+                if alarm_uuid is None:
+                    return None
+                else:
+                    return alarm_uuid
+        except:
+            logger.error("Exception while updating alarm: {}".format(traceback.format_exc()))
+
+    def get_alarm_defination_details(self, alarm_uuid):
+        """Get alarm details based on alarm UUID
+        """
+        if alarm_uuid is None:
+            logger.warning("get_alarm_defination_details: Alarm UUID not provided")
+            return None, None
+
+        alarm_details = {}
+        json_data = {}
+        api_url = '/suite-api/api/alertdefinitions/AlertDefinition-'
+        headers = {'Accept': 'application/json'}
+
+        resp = requests.get(self.vrops_site + api_url + alarm_uuid,
+                            auth=(self.vrops_user, self.vrops_password),
+                            verify=False, headers=headers)
+
+        if resp.status_code is not 200:
+            logger.warning("Alarm to be updated not found: {} nResponse code:{} nResponse Content: {}"
+                           .format(alarm_uuid, resp.status_code, resp.content))
+            return None, None
+
+        try:
+            json_data = json.loads(resp.content)
+            if json_data['id'] is not None:
+                alarm_details['alarm_id'] = json_data['id']
+                alarm_details['alarm_name'] = json_data['name']
+                alarm_details['adapter_kind'] = json_data['adapterKindKey']
+                alarm_details['resource_kind'] = json_data['resourceKindKey']
+                alarm_details['type'] = json_data['type']
+                alarm_details['sub_type'] = json_data['subType']
+                alarm_details['symptom_definition_id'] = \
+                    json_data['states'][0]['base-symptom-set']['symptomDefinitionIds'][0]
+        except Exception as exp:
+            logger.warning("Exception while retrieving alarm definition details: {}".format(exp))
+            return None, None
+
+        return json_data, alarm_details
+
+    def get_alarm_defination_by_name(self, alarm_name):
+        """Get alarm details based on alarm name
+        """
+        status = False
+        alert_match_list = []
+
+        if alarm_name is None:
+            logger.warning("get_alarm_defination_by_name: Alarm name not provided")
+            return alert_match_list
+
+        json_data = {}
+        api_url = '/suite-api/api/alertdefinitions'
+        headers = {'Accept': 'application/json'}
+
+        resp = requests.get(self.vrops_site + api_url,
+                            auth=(self.vrops_user, self.vrops_password),
+                            verify=False, headers=headers)
+
+        if resp.status_code is not 200:
+            logger.warning("get_alarm_defination_by_name: Error in response: {} nResponse code:{}"
+                           " nResponse Content: {}".format(alarm_name, resp.status_code, resp.content))
+            return alert_match_list
+
+        try:
+            json_data = json.loads(resp.content)
+            if json_data['alertDefinitions'] is not None:
+                alerts_list = json_data['alertDefinitions']
+                alert_match_list = list(filter(lambda alert: alert['name'] == alarm_name, alerts_list))
+                status = False if not alert_match_list else True
+                # logger.debug("Found alert_match_list: {}for larm_name: {}, nstatus: {}".format(alert_match_list, alarm_name,status))
+
+            return alert_match_list
+
+        except Exception as exp:
+            logger.warning("Exception while searching alarm definition: {}".format(exp))
+            return alert_match_list
+
+    def update_symptom_defination(self, symptom_uuid, new_alarm_config):
+        """Update symptom definition based on new alarm input configuration
+        """
+        # 1) Get symptom definition details
+        symptom_details = self.get_symptom_defination_details(symptom_uuid)
+        # print " n nsymptom_details: {}".format(symptom_details)
+        if symptom_details is None:
+            return None
+
+        if 'severity' in new_alarm_config and new_alarm_config['severity'] is not None:
+            symptom_details['state']['severity'] = severity_mano2vrops[new_alarm_config['severity']]
+        if 'operation' in new_alarm_config and new_alarm_config['operation'] is not None:
+            symptom_details['state']['condition']['operator'] = OPERATION_MAPPING[new_alarm_config['operation']]
+        if 'threshold_value' in new_alarm_config and new_alarm_config['threshold_value'] is not None:
+            symptom_details['state']['condition']['value'] = new_alarm_config['threshold_value']
+        # Find vrops metric key from metric_name, if required
+        """
+        if 'metric_name' in new_alarm_config and new_alarm_config['metric_name'] is not None:
+            metric_key_params = self.get_default_Params(new_alarm_config['metric_name'])
+            if not metric_key_params:
+                logger.warning("Metric not supported: {}".format(config_dict['metric_name']))
+                return None
+            symptom_details['state']['condition']['key'] = metric_key_params['metric_key']
+        """
+        logger.info("Fetched Symptom details : {}".format(symptom_details))
+
+        api_url = '/suite-api/api/symptomdefinitions'
+        headers = {'Content-Type': 'application/json', 'Accept': 'application/json'}
+        data = json.dumps(symptom_details)
+        resp = requests.put(self.vrops_site + api_url,
+                            auth=(self.vrops_user, self.vrops_password),
+                            headers=headers,
+                            verify=False,
+                            data=data)
+
+        if resp.status_code != 200:
+            logger.warning("Failed to update Symptom definition: {}, response {}"
+                           .format(symptom_uuid, resp.content))
+            return None
+
+        if symptom_uuid is not None:
+            logger.info("Symptom definition updated {} for alarm: {}"
+                        .format(symptom_uuid, new_alarm_config['alarm_uuid']))
+            return symptom_uuid
+        else:
+            logger.warning("Failed to update Symptom Definition {} for : {}"
+                           .format(symptom_uuid, new_alarm_config['alarm_uuid']))
+            return None
+
+    def get_symptom_defination_details(self, symptom_uuid):
+        """Get symptom definition details
+        """
+        symptom_details = {}
+        if symptom_uuid is None:
+            logger.warning("get_symptom_defination_details: Symptom UUID not provided")
+            return None
+
+        api_url = '/suite-api/api/symptomdefinitions/'
+        headers = {'Accept': 'application/json'}
+
+        resp = requests.get(self.vrops_site + api_url + symptom_uuid,
+                            auth=(self.vrops_user, self.vrops_password),
+                            verify=False, headers=headers)
+
+        if resp.status_code is not 200:
+            logger.warning("Symptom definition not found {}  nResponse code:{} nResponse Content: {}"
+                           .format(symptom_uuid, resp.status_code, resp.content))
+            return None
+
+        symptom_details = json.loads(resp.content)
+        # print "New symptom Details: {}".format(symptom_details)
+        return symptom_details
+
+    def reconfigure_alarm(self, alarm_details_json, new_alarm_config):
+        """Reconfigure alarm definition as per input
+        """
+        if 'severity' in new_alarm_config and new_alarm_config['severity'] is not None:
+            alarm_details_json['states'][0]['severity'] = new_alarm_config['severity']
+        if 'description' in new_alarm_config and new_alarm_config['description'] is not None:
+            alarm_details_json['description'] = new_alarm_config['description']
+
+        api_url = '/suite-api/api/alertdefinitions'
+        headers = {'Content-Type': 'application/json', 'Accept': 'application/json'}
+        data = json.dumps(alarm_details_json)
+        resp = requests.put(self.vrops_site + api_url,
+                            auth=(self.vrops_user, self.vrops_password),
+                            headers=headers,
+                            verify=False,
+                            data=data)
+
+        if resp.status_code != 200:
+            logger.warning("Failed to update Alarm definition: {}, response code {}, response content: {}"
+                           .format(alarm_details_json['id'], resp.status_code, resp.content))
+            return None
+        else:
+            parsed_alarm_details = json.loads(resp.content)
+            alarm_def_uuid = parsed_alarm_details['id'].split('-', 1)[1]
+            logger.info("Successfully updated Alarm definition: {}".format(alarm_def_uuid))
+            return alarm_def_uuid
+
+    def delete_alarm_configuration(self, delete_alarm_req_dict):
+        """Delete complete alarm configuration
+        """
+        if delete_alarm_req_dict['alarm_uuid'] is None:
+            logger.info("delete_alarm_configuration: Alarm UUID not provided")
+            return None
+        # 1)Get alarm & symptom definition details
+        alarm_details_json, alarm_details = self.get_alarm_defination_details(delete_alarm_req_dict['alarm_uuid'])
+        if alarm_details is None or alarm_details_json is None:
+            return None
+
+        # 2) Delete alarm notification
+        rule_id = self.delete_notification_rule(alarm_details['alarm_name'])
+        if rule_id is None:
+            return None
+
+        # 3) Delete alarm configuration
+        alarm_id = self.delete_alarm_defination(alarm_details['alarm_id'])
+        if alarm_id is None:
+            return None
+
+        # 4) Delete alarm symptom
+        symptom_id = self.delete_symptom_definition(alarm_details['symptom_definition_id'])
+        if symptom_id is None:
+            return None
+        else:
+            logger.info("Completed deleting alarm configuration: {}"
+                        .format(delete_alarm_req_dict['alarm_uuid']))
+            return delete_alarm_req_dict['alarm_uuid']
+
+    def delete_notification_rule(self, alarm_name):
+        """Deleted notification rule defined for a particular alarm
+        """
+        rule_id = self.get_notification_rule_id_by_alarm_name(alarm_name)
+        if rule_id is None:
+            return None
+        else:
+            api_url = '/suite-api/api/notifications/rules/'
+            headers = {'Accept': 'application/json'}
+            resp = requests.delete(self.vrops_site + api_url + rule_id,
+                                   auth=(self.vrops_user, self.vrops_password),
+                                   verify=False, headers=headers)
+            if resp.status_code is not 204:
+                logger.warning("Failed to delete notification rules for {}".format(alarm_name))
+                return None
+            else:
+                logger.info("Deleted notification rules for {}".format(alarm_name))
+                return rule_id
+
+    def get_notification_rule_id_by_alarm_name(self, alarm_name):
+        """Find created Alarm notification rule id by alarm name
+        """
+        alarm_notify_id = 'notify_' + alarm_name
+        api_url = '/suite-api/api/notifications/rules'
+        headers = {'Content-Type': 'application/json', 'Accept': 'application/json'}
+        resp = requests.get(self.vrops_site + api_url,
+                            auth=(self.vrops_user, self.vrops_password),
+                            verify=False, headers=headers)
+
+        if resp.status_code is not 200:
+            logger.warning("Failed to get notification rules details for {}"
+                           .format(alarm_name))
+            return None
+
+        notifications = json.loads(resp.content)
+        if notifications is not None and 'notification-rule' in notifications:
+            notifications_list = notifications['notification-rule']
+            for dict in notifications_list:
+                if dict['name'] is not None and dict['name'] == alarm_notify_id:
+                    notification_id = dict['id']
+                    logger.info("Found Notification id to be deleted: {} for {}"
+                                .format(notification_id, alarm_name))
+                    return notification_id
+
+            logger.warning("Notification id to be deleted not found for {}"
+                           .format(alarm_name))
+            return None
+
+    def delete_alarm_defination(self, alarm_id):
+        """Delete created Alarm definition
+        """
+        api_url = '/suite-api/api/alertdefinitions/'
+        headers = {'Accept': 'application/json'}
+        resp = requests.delete(self.vrops_site + api_url + alarm_id,
+                               auth=(self.vrops_user, self.vrops_password),
+                               verify=False, headers=headers)
+        if resp.status_code is not 204:
+            logger.warning("Failed to delete alarm definition {}".format(alarm_id))
+            return None
+        else:
+            logger.info("Deleted alarm definition {}".format(alarm_id))
+            return alarm_id
+
+    def delete_symptom_definition(self, symptom_id):
+        """Delete symptom definition
+        """
+        api_url = '/suite-api/api/symptomdefinitions/'
+        headers = {'Accept': 'application/json'}
+        resp = requests.delete(self.vrops_site + api_url + symptom_id,
+                               auth=(self.vrops_user, self.vrops_password),
+                               verify=False, headers=headers)
+        if resp.status_code is not 204:
+            logger.warning("Failed to delete symptom definition {}".format(symptom_id))
+            return None
+        else:
+            logger.info("Deleted symptom definition {}".format(symptom_id))
+            return symptom_id
+
+    def verify_metric_support(self, metric_info):
+        """Verify, if Metric is supported by vROPs plugin, verify metric unit & return status
+            Returns:
+            status: True if supported, False if not supported
+        """
+        status = False
+        if 'metric_name' not in metric_info:
+            logger.debug("Metric name not provided: {}".format(metric_info))
+            return status
+        metric_key_params = self.get_default_Params(metric_info['metric_name'].lower())
+        if not metric_key_params:
+            logger.warning("Metric not supported: {}".format(metric_info['metric_name']))
+            return status
+        else:
+            # If Metric is supported, verify optional metric unit & return status
+            if 'metric_unit' in metric_info:
+                if metric_key_params.get('unit') == metric_info['metric_unit']:
+                    logger.info("Metric is supported with unit: {}".format(metric_info['metric_name']))
+                    status = True
+                else:
+                    logger.debug("Metric supported but there is unit mismatch for: {}."
+                                 "Supported unit: {}"
+                                 .format(metric_info['metric_name'], metric_key_params['unit']))
+                    status = True
+        return status
+
+    def get_triggered_alarms_list(self, list_alarm_input):
+        """Get list of triggered alarms on a resource based on alarm input request.
+        """
+        # TO Do - Need to add filtering of alarms based on Severity & alarm name
+
+        triggered_alarms_list = []
+        if list_alarm_input.get('resource_uuid') is None:
+            logger.warning("Resource UUID is required to get triggered alarms list")
+            return triggered_alarms_list
+
+        # 1)Find vROPs resource ID using RO resource UUID
+        vrops_resource_id = self.get_vrops_resourceid_from_ro_uuid(list_alarm_input['resource_uuid'])
+        if vrops_resource_id is None:
+            return triggered_alarms_list
+
+        # 2)Get triggered alarms on particular resource
+        triggered_alarms_list = self.get_triggered_alarms_on_resource(list_alarm_input['resource_uuid'],
+                                                                      vrops_resource_id)
+        return triggered_alarms_list
+
+    def get_vrops_resourceid_from_ro_uuid(self, ro_resource_uuid):
+        """Fetch vROPs resource ID using resource UUID from RO/SO
+        """
+        # 1) Find vm_moref_id from vApp uuid in vCD
+        vm_moref_id = self.get_vm_moref_id(ro_resource_uuid)
+        if vm_moref_id is None:
+            logger.warning("Failed to find vm morefid for vApp in vCD: {}".format(ro_resource_uuid))
+            return None
+
+        # 2) Based on vm_moref_id, find VM's corresponding resource_id in vROPs to set notification
+        vrops_resource_id = self.get_vm_resource_id(vm_moref_id)
+        if vrops_resource_id is None:
+            logger.warning("Failed to find resource in vROPs: {}".format(ro_resource_uuid))
+            return None
+        return vrops_resource_id
+
+    def get_triggered_alarms_on_resource(self, ro_resource_uuid, vrops_resource_id):
+        """Get triggered alarms on particular resource & return list of dictionary of alarms
+        """
+        resource_alarms = []
+        api_url = '/suite-api/api/alerts?resourceId='
+        headers = {'Accept': 'application/json'}
+        resp = requests.get(self.vrops_site + api_url + vrops_resource_id,
+                            auth=(self.vrops_user, self.vrops_password),
+                            verify=False, headers=headers)
+
+        if resp.status_code is not 200:
+            logger.warning("Failed to get triggered alarms for {}"
+                           .format(ro_resource_uuid))
+            return None
+
+        all_alerts = json.loads(resp.content)
+        if 'alerts' in all_alerts:
+            if not all_alerts['alerts']:
+                logger.info("No alarms present on resource {}".format(ro_resource_uuid))
+                return resource_alarms
+            all_alerts_list = all_alerts['alerts']
+            for alarm in all_alerts_list:
+                # logger.info("Triggered Alarm {}".format(alarm))
+                if alarm['alertDefinitionName'] is not None and \
+                        len(alarm['alertDefinitionName'].split('-', 1)) == 2:
+                    if alarm['alertDefinitionName'].split('-', 1)[1] == ro_resource_uuid:
+                        alarm_instance = {'alarm_uuid': alarm['alertDefinitionId'].split('-', 1)[1],
+                                          'resource_uuid': ro_resource_uuid,
+                                          'alarm_instance_uuid': alarm['alertId'],
+                                          'vim_type': 'VMware'}
+                        # find severity of alarm
+                        severity = None
+                        for key, value in six.iteritems(severity_mano2vrops):
+                            if value == alarm['alertLevel']:
+                                severity = key
+                        if severity is None:
+                            severity = 'INDETERMINATE'
+                        alarm_instance['severity'] = severity
+                        alarm_instance['status'] = alarm['status']
+                        alarm_instance['start_date'] = self.convert_date_time(alarm['startTimeUTC'])
+                        alarm_instance['update_date'] = self.convert_date_time(alarm['updateTimeUTC'])
+                        alarm_instance['cancel_date'] = self.convert_date_time(alarm['cancelTimeUTC'])
+                        logger.info("Triggered Alarm on resource {}".format(alarm_instance))
+                        resource_alarms.append(alarm_instance)
+        if not resource_alarms:
+            logger.info("No alarms present on resource {}".format(ro_resource_uuid))
+        return resource_alarms
+
+    def convert_date_time(self, date_time):
+        """Convert the input UTC time in msec to OSM date time format
+        """
+        date_time_formatted = '0000-00-00T00:00:00'
+        if date_time != 0:
+            complete_datetime = datetime.datetime.fromtimestamp(date_time / 1000.0, tz=pytz.utc).isoformat('T')
+            date_time_formatted = complete_datetime.split('.', 1)[0]
+        return date_time_formatted
diff --git a/osm_mon/plugins/vRealiseOps/plugin_receiver.py b/osm_mon/plugins/vRealiseOps/plugin_receiver.py
new file mode 100644 (file)
index 0000000..ce9e1f8
--- /dev/null
@@ -0,0 +1,435 @@
+# -*- coding: utf-8 -*-
+
+##
+# Copyright 2016-2017 VMware Inc.
+# This file is part of ETSI OSM
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+#
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact:  osslegalrouting@vmware.com
+##
+
+"""
+Monitoring plugin receiver that consumes the request messages &
+responds using producer for vROPs
+"""
+
+import json
+import logging
+import os
+import sys
+import traceback
+from io import UnsupportedOperation
+
+import six
+
+from osm_mon.core.settings import Config
+from osm_mon.plugins.vRealiseOps.mon_plugin_vrops import MonPlugin
+
+sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', '..', '..'))
+
+from osm_mon.core.auth import AuthManager
+
+from xml.etree import ElementTree as XmlElementTree
+
+schema_version = "1.0"
+req_config_params = ('vrops_site', 'vrops_user', 'vrops_password',
+                     'vcloud-site', 'admin_username', 'admin_password',
+                     'vcenter_ip', 'vcenter_port', 'vcenter_user', 'vcenter_password',
+                     'vim_tenant_name', 'orgname')
+MODULE_DIR = os.path.dirname(__file__)
+CONFIG_FILE_NAME = 'vrops_config.xml'
+CONFIG_FILE_PATH = os.path.join(MODULE_DIR, CONFIG_FILE_NAME)
+
+cfg = Config.instance()
+logging.basicConfig(stream=sys.stdout,
+                    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
+                    datefmt='%m/%d/%Y %I:%M:%S %p',
+                    level=logging.getLevelName(cfg.OSMMON_LOG_LEVEL))
+
+logger = logging.getLogger(__name__)
+
+
+class PluginReceiver:
+    """MON Plugin receiver receiving request messages & responding using producer for vROPs
+    telemetry plugin
+    """
+
+    def __init__(self):
+        """Constructor of PluginReceiver
+        """
+        self._cfg = Config.instance()
+
+    def handle_alarm_requests(self, key: str, values: dict, vim_uuid: str):
+        values['vim_uuid'] = vim_uuid
+        if key == "create_alarm_request":
+            config_alarm_info = values
+            alarm_uuid = self.create_alarm(config_alarm_info)
+            logger.info("Alarm created with alarm uuid: {}".format(alarm_uuid))
+            # Publish message using producer
+            return self.publish_create_alarm_status(alarm_uuid, config_alarm_info)
+        elif key == "update_alarm_request":
+            update_alarm_info = values
+            alarm_uuid = self.update_alarm(update_alarm_info)
+            logger.info("Alarm definition updated : alarm uuid: {}".format(alarm_uuid))
+            # Publish message using producer
+            return self.publish_update_alarm_status(alarm_uuid, update_alarm_info)
+        elif key == "delete_alarm_request":
+            delete_alarm_info = values
+            alarm_uuid = self.delete_alarm(delete_alarm_info)
+            logger.info("Alarm definition deleted : alarm uuid: {}".format(alarm_uuid))
+            # Publish message using producer
+            return self.publish_delete_alarm_status(alarm_uuid, delete_alarm_info)
+        elif key == "list_alarm_request":
+            request_input = values
+            triggered_alarm_list = self.list_alarms(request_input)
+            # Publish message using producer
+            return self.publish_list_alarm_response(triggered_alarm_list, request_input)
+        else:
+            raise UnsupportedOperation("Unknown key, no action will be performed")
+
+    def handle_metric_requests(self, key: str, values: dict, vim_uuid: str):
+        values['vim_uuid'] = vim_uuid
+        if key == "read_metric_data_request":
+            metric_request_info = values
+            access_config = self.get_vim_access_config(metric_request_info['vim_uuid'])
+            mon_plugin_obj = MonPlugin(access_config)
+            metrics_data = mon_plugin_obj.get_metrics_data(metric_request_info)
+            logger.info("Collected Metrics Data: {}".format(metrics_data))
+            # Publish message using producer
+            return self.publish_metrics_data_status(metrics_data)
+        elif key == "create_metric_request":
+            metric_info = values
+            metric_status = self.verify_metric(metric_info)
+            # Publish message using producer
+            return self.publish_create_metric_response(metric_info, metric_status)
+        elif key == "update_metric_request":
+            metric_info = values
+            metric_status = self.verify_metric(metric_info)
+            # Publish message using producer
+            return self.publish_update_metric_response(metric_info, metric_status)
+        elif key == "delete_metric_request":
+            metric_info = values
+            # Deleting Metric Data is not allowed. Publish status as False
+            logger.warning("Deleting Metric is not allowed by VMware vROPs plugin: {}"
+                           .format(metric_info['metric_name']))
+            # Publish message using producer
+            return self.publish_delete_metric_response(metric_info)
+
+        else:
+            raise UnsupportedOperation("Unknown key, no action will be performed")
+
+    def create_alarm(self, config_alarm_info):
+        """Create alarm using vROPs plugin
+        """
+        access_config = self.get_vim_access_config(config_alarm_info['vim_uuid'])
+        mon_plugin = MonPlugin(access_config)
+        mon_plugin.configure_rest_plugin()
+        alarm_uuid = mon_plugin.configure_alarm(config_alarm_info['alarm_create_request'])
+        return alarm_uuid
+
+    def publish_create_alarm_status(self, alarm_uuid, config_alarm_info):
+        """Publish create alarm status using producer
+        """
+        topic = 'alarm_response'
+        msg_key = 'create_alarm_response'
+        response_msg = {"schema_version": schema_version,
+                        "schema_type": "create_alarm_response",
+                        "vim_uuid": config_alarm_info["vim_uuid"],
+                        "alarm_create_response":
+                            {"correlation_id": config_alarm_info["alarm_create_request"]["correlation_id"],
+                             "alarm_uuid": alarm_uuid,
+                             "status": True if alarm_uuid else False
+                             }
+                        }
+        logger.info("Publishing response:\nTopic={}\nKey={}\nValue={}"
+                    .format(topic, msg_key, response_msg))
+
+        return response_msg
+
+    def update_alarm(self, update_alarm_info):
+        """Update already created alarm
+        """
+        access_config = self.get_vim_access_config(update_alarm_info['vim_uuid'])
+        mon_plugin = MonPlugin(access_config)
+        alarm_uuid = mon_plugin.update_alarm_configuration(update_alarm_info['alarm_update_request'])
+        return alarm_uuid
+
+    def publish_update_alarm_status(self, alarm_uuid, update_alarm_info):
+        """Publish update alarm status requests using producer
+        """
+        topic = 'alarm_response'
+        msg_key = 'update_alarm_response'
+        response_msg = {"schema_version": schema_version,
+                        "schema_type": "update_alarm_response",
+                        "vim_uuid": update_alarm_info["vim_uuid"],
+                        "alarm_update_response":
+                            {"correlation_id": update_alarm_info["alarm_update_request"]["correlation_id"],
+                             "alarm_uuid": update_alarm_info["alarm_update_request"]["alarm_uuid"] \
+                                 if update_alarm_info["alarm_update_request"].get('alarm_uuid') is not None else None,
+                             "status": True if alarm_uuid else False
+                             }
+                        }
+        logger.info("Publishing response:\nTopic={}\nKey={}\nValue={}"
+                    .format(topic, msg_key, response_msg))
+
+        return response_msg
+
+    def delete_alarm(self, delete_alarm_info):
+        """Delete alarm configuration
+        """
+        access_config = self.get_vim_access_config(delete_alarm_info['vim_uuid'])
+        mon_plugin = MonPlugin(access_config)
+        alarm_uuid = mon_plugin.delete_alarm_configuration(delete_alarm_info['alarm_delete_request'])
+        return alarm_uuid
+
+    def publish_delete_alarm_status(self, alarm_uuid, delete_alarm_info):
+        """Publish update alarm status requests using producer
+        """
+        topic = 'alarm_response'
+        msg_key = 'delete_alarm_response'
+        response_msg = {"schema_version": schema_version,
+                        "schema_type": "delete_alarm_response",
+                        "vim_uuid": delete_alarm_info['vim_uuid'],
+                        "alarm_deletion_response":
+                            {"correlation_id": delete_alarm_info["alarm_delete_request"]["correlation_id"],
+                             "alarm_uuid": delete_alarm_info["alarm_delete_request"]["alarm_uuid"],
+                             "status": True if alarm_uuid else False
+                             }
+                        }
+        logger.info("Publishing response:\nTopic={}\nKey={}\nValue={}"
+                    .format(topic, msg_key, response_msg))
+
+        return response_msg
+
+    def publish_metrics_data_status(self, metrics_data):
+        """Publish the requested metric data using producer
+        """
+        topic = 'metric_response'
+        msg_key = 'read_metric_data_response'
+        logger.info("Publishing response:\nTopic={}\nKey={}\nValue={}"
+                    .format(topic, msg_key, metrics_data))
+
+        return metrics_data
+
+    def verify_metric(self, metric_info):
+        """Verify if metric is supported or not
+        """
+        access_config = self.get_vim_access_config(metric_info['vim_uuid'])
+        mon_plugin = MonPlugin(access_config)
+        if 'metric_create_request' in metric_info:
+            metric_key_status = mon_plugin.verify_metric_support(metric_info['metric_create_request'])
+        else:
+            metric_key_status = mon_plugin.verify_metric_support(metric_info['metric_update_request'])
+        return metric_key_status
+
+    def publish_create_metric_response(self, metric_info, metric_status):
+        """Publish create metric response
+        """
+        topic = 'metric_response'
+        msg_key = 'create_metric_response'
+        response_msg = {"schema_version": schema_version,
+                        "schema_type": "create_metric_response",
+                        ##"vim_uuid":metric_info['vim_uuid'],
+                        ##"correlation_id":metric_info['correlation_id'],
+                        "metric_create_response":
+                            {
+                                ##"metric_uuid":'0',
+                                ##"resource_uuid":metric_info['metric_create']['resource_uuid'],
+                                ##"vim_uuid":metric_info['vim_uuid'], #May be required. TODO - Confirm
+                                "correlation_id": metric_info['correlation_id'],
+                                "status": metric_status
+                            }
+                        }
+        logger.info("Publishing response:\nTopic={}\nKey={}\nValue={}"
+                    .format(topic, msg_key, response_msg))
+
+        return response_msg
+
+    def publish_update_metric_response(self, metric_info, metric_status):
+        """Publish update metric response
+        """
+        topic = 'metric_response'
+        msg_key = 'update_metric_response'
+        response_msg = {"schema_version": schema_version,
+                        "schema_type": "metric_update_response",
+                        "vim_uuid": metric_info['vim_uuid'],
+                        "metric_update_response":
+                            {
+                                "metric_uuid": '0',
+                                "correlation_id": metric_info['correlation_id'],
+                                "resource_uuid": metric_info['metric_create']['resource_uuid'],
+                                "status": metric_status
+                            }
+                        }
+        logger.info("Publishing response:\nTopic={}\nKey={}\nValue={}"
+                    .format(topic, msg_key, response_msg))
+
+        return response_msg
+
+    def publish_delete_metric_response(self, metric_info):
+        """Publish delete metric response
+        """
+        topic = 'metric_response'
+        msg_key = 'delete_metric_response'
+        if 'tenant_uuid' in metric_info and metric_info['tenant_uuid'] is not None:
+            tenant_uuid = metric_info['tenant_uuid']
+        else:
+            tenant_uuid = None
+
+        response_msg = {"schema_version": schema_version,
+                        "schema_type": "delete_metric_response",
+                        "vim_uuid": metric_info['vim_uuid'],
+                        "correlation_id": metric_info['correlation_id'],
+                        "metric_name": metric_info['metric_name'],
+                        "metric_uuid": '0',
+                        "resource_uuid": metric_info['resource_uuid'],
+                        "tenant_uuid": tenant_uuid,
+                        "status": False
+                        }
+        logger.info("Publishing response:\nTopic={}\nKey={}\nValue={}"
+                    .format(topic, msg_key, response_msg))
+
+        return response_msg
+
+    def list_alarms(self, list_alarm_input):
+        """Collect list of triggered alarms based on input
+        """
+        access_config = self.get_vim_access_config(list_alarm_input['vim_uuid'])
+        mon_plugin = MonPlugin(access_config)
+        triggered_alarms = mon_plugin.get_triggered_alarms_list(list_alarm_input['alarm_list_request'])
+        return triggered_alarms
+
+    def publish_list_alarm_response(self, triggered_alarm_list, list_alarm_input):
+        """Publish list of triggered alarms
+        """
+        topic = 'alarm_response'
+        msg_key = 'list_alarm_response'
+        response_msg = {"schema_version": schema_version,
+                        "schema_type": "list_alarm_response",
+                        "vim_type": "VMware",
+                        "vim_uuid": list_alarm_input['vim_uuid'],
+                        "correlation_id": list_alarm_input['alarm_list_request']['correlation_id'],
+                        "list_alarm_response": triggered_alarm_list
+                        }
+        logger.info("Publishing response:\nTopic={}\nKey={}\nValue={}"
+                    .format(topic, msg_key, response_msg))
+
+        return response_msg
+
+    def update_access_credentials(self, access_info):
+        """Verify if all the required access config params are provided and
+           updates access config in default vrops config file
+        """
+        update_status = False
+        # Check if all the required config params are passed in request
+        if not all(keys in access_info for keys in req_config_params):
+            logger.debug("All required Access Config Parameters not provided")
+            logger.debug("List of required Access Config Parameters: {}".format(req_config_params))
+            logger.debug("List of given Access Config Parameters: {}".format(access_info))
+            return update_status
+
+        wr_status = self.write_access_config(access_info)
+        return wr_status  # True/False
+
+    def write_access_config(self, access_info):
+        """Write access configuration to vROPs config file.
+        """
+        wr_status = False
+        try:
+            tree = XmlElementTree.parse(CONFIG_FILE_PATH)
+            root = tree.getroot()
+            alarmParams = {}
+            for config in root:
+                if config.tag == 'Access_Config':
+                    for param in config:
+                        for key, val in six.iteritems(access_info):
+                            if param.tag == key:
+                                # print param.tag, val
+                                param.text = val
+
+            tree.write(CONFIG_FILE_PATH)
+            wr_status = True
+        except Exception as exp:
+            logger.warning("Failed to update Access Config Parameters: {}".format(exp))
+
+        return wr_status
+
+    def publish_access_update_response(self, access_update_status, access_info_req):
+        """Publish access update response
+        """
+        topic = 'access_credentials'
+        msg_key = 'vim_access_credentials_response'
+        response_msg = {"schema_version": schema_version,
+                        "schema_type": "vim_access_credentials_response",
+                        "correlation_id": access_info_req['access_config']['correlation_id'],
+                        "status": access_update_status
+                        }
+        logger.info("Publishing response:\nTopic={}\nKey={}\nValue={}" \
+                    .format(topic, msg_key, response_msg))
+        # Core Add producer
+        return response_msg
+
+    def get_vim_access_config(self, vim_uuid):
+        """Get VIM access configuration & account details from path: VIM_ACCOUNTS_FILE_PATH
+        """
+        vim_account = {}
+        auth_manager = AuthManager()
+        vim_account_details = auth_manager.get_credentials(vim_uuid)
+
+        try:
+            if vim_account_details is not None:
+                vim_account['name'] = vim_account_details.name
+                vim_account['vim_tenant_name'] = vim_account_details.tenant_name
+                vim_account['vim_type'] = vim_account_details.type
+                vim_account['vim_url'] = vim_account_details.url
+                vim_account['org_user'] = vim_account_details.user
+                vim_account['org_password'] = vim_account_details.password
+                vim_account['vim_uuid'] = vim_account_details.uuid
+
+                vim_config = json.loads(vim_account_details.config)
+                vim_account['admin_username'] = vim_config['admin_username']
+                vim_account['admin_password'] = vim_config['admin_password']
+                vim_account['vrops_site'] = vim_config['vrops_site']
+                vim_account['vrops_user'] = vim_config['vrops_user']
+                vim_account['vrops_password'] = vim_config['vrops_password']
+                vim_account['vcenter_ip'] = vim_config['vcenter_ip']
+                vim_account['vcenter_port'] = vim_config['vcenter_port']
+                vim_account['vcenter_user'] = vim_config['vcenter_user']
+                vim_account['vcenter_password'] = vim_config['vcenter_password']
+
+                if vim_config['nsx_manager'] is not None:
+                    vim_account['nsx_manager'] = vim_config['nsx_manager']
+                if vim_config['nsx_user'] is not None:
+                    vim_account['nsx_user'] = vim_config['nsx_user']
+                if vim_config['nsx_password'] is not None:
+                    vim_account['nsx_password'] = vim_config['nsx_password']
+                if vim_config['orgname'] is not None:
+                    vim_account['orgname'] = vim_config['orgname']
+        except Exception as exp:
+            logger.error("VIM account details not sufficient: {}".format(exp))
+        return vim_account
+
+
+"""
+def main():
+    #log.basicConfig(filename='mon_vrops_log.log',level=log.DEBUG)
+    set_logger()
+    plugin_rcvr = PluginReceiver()
+    plugin_rcvr.consume()
+
+if __name__ == "__main__":
+    main()
+"""
diff --git a/osm_mon/plugins/vRealiseOps/vROPs_Webservice/__init__.py b/osm_mon/plugins/vRealiseOps/vROPs_Webservice/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/osm_mon/plugins/vRealiseOps/vROPs_Webservice/install.sh b/osm_mon/plugins/vRealiseOps/vROPs_Webservice/install.sh
new file mode 100755 (executable)
index 0000000..6d05dfc
--- /dev/null
@@ -0,0 +1,86 @@
+#!/usr/bin/env bash
+
+##
+# Copyright 2016-2017 VMware Inc.
+# This file is part of ETSI OSM
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+#
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact:  osslegalrouting@vmware.com
+##
+
+BASEDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+SSL_Cert_Dir="${BASEDIR}/SSL_certificate"
+THISHOST=$(hostname -f)
+Domain_Name="${THISHOST}"
+#Domain_Name="www.vrops_webservice.com"
+WebServiceFile="${BASEDIR}/vrops_webservice"
+
+echo '
+ #################################################################
+ #####             Installing Require Packages             #####
+ #################################################################'
+
+#Function to install packages using apt-get
+function install_packages(){
+      [ -x /usr/bin/apt-get ] && apt-get install -y $*
+
+       #check properly installed
+       for PACKAGE in $*
+       do
+           PACKAGE_INSTALLED="no"
+           [ -x /usr/bin/apt-get ] && dpkg -l $PACKAGE            &>> /dev/null && PACKAGE_INSTALLED="yes"
+           if [ "$PACKAGE_INSTALLED" = "no" ]
+           then
+               echo "failed to install package '$PACKAGE'. Revise network connectivity and try again" >&2
+               exit 1
+          fi
+       done
+   }
+
+apt-get update  # To get the latest package lists
+
+[ "$_DISTRO" == "Ubuntu" ] && install_packages "python3-yaml python3-bottle python3-jsonschema python3-requests libxml2-dev libxslt-dev python3-dev python3-pip openssl"
+[ "$_DISTRO" == "CentOS" -o "$_DISTRO" == "Red" ] && install_packages "python3-jsonschema python3-requests libxslt-devel libxml2-devel python3-devel python3-pip openssl"
+#The only way to install python-bottle on Centos7 is with easy_install or pip
+[ "$_DISTRO" == "CentOS" -o "$_DISTRO" == "Red" ] && easy_install -U bottle
+
+#required for vmware connector TODO move that to separete opt in install script
+pip3 install pip==9.0.3
+pip3 install cherrypy
+
+echo '
+ #################################################################
+ #####             Genrate SSL Certificate                 #####
+ #################################################################'
+#Create SSL Certifcate folder and file
+mkdir "${SSL_Cert_Dir}"
+
+openssl genrsa -out "${SSL_Cert_Dir}/${Domain_Name}".key 2048
+openssl req -new -x509 -key "${SSL_Cert_Dir}/${Domain_Name}".key -out "${SSL_Cert_Dir}/${Domain_Name}".cert -days 3650 -subj /CN="${Domain_Name}"
+
+echo '
+ #################################################################
+ #####             Start Web Service                      #####
+ #################################################################'
+
+nohup python3 "${WebServiceFile}" &
+
+echo '
+ #################################################################
+ #####              Done                                  #####
+ #################################################################'
+
+
diff --git a/osm_mon/plugins/vRealiseOps/vROPs_Webservice/vrops_webservice b/osm_mon/plugins/vRealiseOps/vROPs_Webservice/vrops_webservice
new file mode 100755 (executable)
index 0000000..d7cf33c
--- /dev/null
@@ -0,0 +1,282 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+##
+# Copyright 2016-2017 VMware Inc.
+# This file is part of ETSI OSM
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+#
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact:  osslegalrouting@vmware.com
+##
+
+"""
+ Webservice for vRealize Operations (vROPs) to post/notify alarms details.
+
+"""
+from osm_mon.core.settings import Config
+
+__author__ = "Arpita Kate"
+__date__ = "$15-Sept-2017 16:09:29$"
+__version__ = '0.1'
+
+import json
+import logging
+import os
+import sys
+from datetime import datetime
+from socket import getfqdn
+
+import requests
+from bottle import (ServerAdapter, route, run, server_names, default_app,
+                    request, response)
+
+sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', '..', '..', '..'))
+from osm_mon.core.message_bus.producer import Producer
+
+# from core.message_bus.producer import KafkaProducer
+
+sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', '..', '..'))
+from osm_mon.core.database import DatabaseManager
+
+try:
+    from cheroot.wsgi import Server as WSGIServer
+    from cheroot.ssl.pyopenssl import pyOpenSSLAdapter
+except ImportError:
+    from cherrypy.wsgiserver import CherryPyWSGIServer as WSGIServer
+    from cherrypy.wsgiserver.ssl_pyopenssl import pyOpenSSLAdapter
+
+# Set Constants
+BASE_DIR = os.path.dirname(os.path.dirname(__file__))
+CERT_DIR = os.path.join(BASE_DIR, "SSL_certificate")
+certificate_name = getfqdn() + ".cert"
+key_name = getfqdn() + ".key"
+CERTIFICATE = os.path.join(CERT_DIR, certificate_name)
+KEY = os.path.join(CERT_DIR, key_name)
+# CERTIFICATE = os.path.join(CERT_DIR, "www.vrops_webservice.com.cert")
+# KEY = os.path.join(CERT_DIR, "www.vrops_webservice.com.key")
+CONFIG_FILE = os.path.join(BASE_DIR, '../vrops_config.xml')
+# Severity Mapping from vROPs to OSM
+VROPS_SEVERITY_TO_OSM_MAPPING = {
+    "ALERT_CRITICALITY_LEVEL_CRITICAL": "CRITICAL",
+    "ALERT_CRITICALITY_LEVEL_WARNING": "WARNING",
+    "ALERT_CRITICALITY_LEVEL_IMMEDIATE": "MAJOR",
+    "ALERT_CRITICALITY_LEVEL_INFO": "INDETERMINATE",
+    "ALERT_CRITICALITY_LEVEL_AUTO": "INDETERMINATE",
+    "ALERT_CRITICALITY_LEVEL_UNKNOWN": "INDETERMINATE",
+    "ALERT_CRITICALITY_LEVEL_NONE": "INDETERMINATE"
+}
+
+# Set logger
+cfg = Config.instance()
+logging.basicConfig(stream=sys.stdout,
+                    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
+                    datefmt='%m/%d/%Y %I:%M:%S %p',
+                    level=logging.getLevelName(cfg.OSMMON_LOG_LEVEL))
+
+logger = logging.getLogger(__name__)
+
+
+def format_datetime(str_date):
+    """
+        Method to format datetime
+        Args:
+            str_date - datetime string
+        Returns:
+           formated datetime
+    """
+    date_fromat = "%Y-%m-%dT%H:%M:%S"
+    formated_datetime = None
+    try:
+        datetime_obj = datetime.fromtimestamp(float(str_date) / 1000.)
+        formated_datetime = datetime_obj.strftime(date_fromat)
+    except Exception as exp:
+        logger.error('Exception: {} occured while converting date {} into format {}'.format(
+            exp, str_date, date_fromat))
+
+    return formated_datetime
+
+
+def get_alarm_config(alarm_name):
+    """
+        Method to get configuration parameters
+        Args:
+            None
+        Returns:
+            dictionary of config parameters
+    """
+    vim_account = {}
+    database_manager = DatabaseManager()
+    alarm = database_manager.get_alarm(alarm_name, 'VMware')
+    vim_account_details = alarm.credentials
+
+    try:
+        if vim_account_details is not None:
+            vim_account['name'] = vim_account_details.name
+            vim_account['vim_tenant_name'] = vim_account_details.tenant_name
+            vim_account['vim_type'] = vim_account_details.type
+            vim_account['vim_uuid'] = vim_account_details.uuid
+            vim_account['vim_url'] = vim_account_details.url
+            vim_account['org_user'] = vim_account_details.user
+            vim_account['org_password'] = vim_account_details.password
+
+            vim_config = json.loads(vim_account_details.config)
+            vim_account['tenant_id'] = vim_config['tenant_id']
+            vim_account['admin_username'] = vim_config['admin_username']
+            vim_account['admin_password'] = vim_config['admin_password']
+            vim_account['vrops_site'] = vim_config['vrops_site']
+            vim_account['vrops_user'] = vim_config['vrops_user']
+            vim_account['vrops_password'] = vim_config['vrops_password']
+            vim_account['vcenter_ip'] = vim_config['vcenter_ip']
+            vim_account['vcenter_port'] = vim_config['vcenter_port']
+            vim_account['vcenter_user'] = vim_config['vcenter_user']
+            vim_account['vcenter_password'] = vim_config['vcenter_password']
+            if vim_config['nsx_manager'] is not None:
+                vim_account['nsx_manager'] = vim_config['nsx_manager']
+            if vim_config['nsx_user'] is not None:
+                vim_account['nsx_user'] = vim_config['nsx_user']
+            if vim_config['nsx_password'] is not None:
+                vim_account['nsx_password'] = vim_config['nsx_password']
+            if vim_config['orgname'] is not None:
+                vim_account['orgname'] = vim_config['orgname']
+    except Exception as exp:
+        logger.error("VIM account details not sufficient: {}".format(exp))
+    return vim_account
+
+
+def get_alarm_definationID(alarm_instance_uuid, access_config):
+    """
+         Method to get alarm/alert defination ID
+            Args:
+                alarm_instance_uuid : UUID of alarm
+            Returns:
+                alarm defination ID
+    """
+    alarm_definationID = None
+    if alarm_instance_uuid:
+        try:
+            # access_config = get_alarm_config()
+            headers = {'Accept': 'application/json'}
+            api_url = '{}/suite-api/api/alerts/{}' \
+                .format(access_config.get('vrops_site'), alarm_instance_uuid)
+            api_response = requests.get(api_url,
+                                        auth=(access_config.get('vrops_user'),
+                                              access_config.get('vrops_password')),
+                                        verify=False,
+                                        headers=headers
+                                        )
+
+            if api_response.status_code == 200:
+                data = api_response.json()
+                if data.get("alertDefinitionId") is not None:
+                    alarm_definationID = '-'.join(data.get("alertDefinitionId").split('-')[1:])
+            else:
+                logger.error("Failed to get alert definition ID for alarm {}"
+                             .format(alarm_instance_uuid))
+        except Exception as exp:
+            logger.error("Exception occured while getting alert definition ID for alarm : {}"
+                         .format(exp, alarm_instance_uuid))
+
+    return alarm_definationID
+
+
+@route('/notify/<alarmID>', method='POST')
+def notify_alarm(alarmID):
+    """
+        Method notify alarm details by publishing message at Kafka message bus
+        Args:
+            alarmID - Name of alarm
+        Returns:
+           response code
+    """
+    logger.info("Request:{} from:{} {} {} " \
+                .format(request, request.remote_addr, request.method, request.url))
+    response.headers['Content-Type'] = 'application/json'
+    try:
+        postdata = json.loads(request.body.read())
+        notify_details = {}
+        vim_access_config = get_alarm_config(postdata.get('alertName'))
+        # Parse notify data
+        notify_details['vim_uuid'] = vim_access_config.get('vim_uuid')
+        notify_details['alarm_uuid'] = get_alarm_definationID(postdata.get('alertId'),
+                                                              vim_access_config)
+        notify_details['description'] = postdata.get('info')
+        notify_details['alarm_instance_uuid'] = alarmID
+        notify_details['resource_uuid'] = '-'.join(postdata.get('alertName').split('-')[1:])
+        notify_details['tenant_uuid'] = vim_access_config.get('tenant_id')
+        notify_details['vim_type'] = "VMware"
+        notify_details['severity'] = VROPS_SEVERITY_TO_OSM_MAPPING.get(postdata.get('criticality'),
+                                                                       'INDETERMINATE')
+        notify_details['status'] = postdata.get('status')
+        if postdata.get('startDate'):
+            notify_details['start_date_time'] = format_datetime(postdata.get('startDate'))
+        if postdata.get('updateDate'):
+            notify_details['update_date_time'] = format_datetime(postdata.get('updateDate'))
+        if postdata.get('cancelDate'):
+            notify_details['cancel_date_time'] = format_datetime(postdata.get('cancelDate'))
+
+        alarm_details = {'schema_version': 1.0,
+                         'schema_type': "notify_alarm",
+                         'notify_details': notify_details
+                         }
+        alarm_data = json.dumps(alarm_details)
+        logger.info("Alarm details: {}".format(alarm_data))
+
+        # Publish Alarm details
+        producer = Producer()
+        producer.send(topic='alarm_response', key=notify_alarm, value=alarm_data)
+        producer.flush()
+
+        # return 201 on Success
+        response.status = 201
+
+    except Exception as exp:
+        logger.error('Exception: {} occured while notifying alarm {}.'.format(exp, alarmID))
+        # return 500 on Error
+        response.status = 500
+
+    return response
+
+
+class SSLWebServer(ServerAdapter):
+    """
+    CherryPy web server with SSL support.
+    """
+
+    def run(self, handler):
+        """
+        Runs a CherryPy Server using the SSL certificate.
+        """
+        server = WSGIServer((self.host, self.port), handler)
+        server.ssl_adapter = pyOpenSSLAdapter(
+            certificate=CERTIFICATE,
+            private_key=KEY,
+            # certificate_chain="intermediate_cert.crt"
+        )
+
+        try:
+            server.start()
+            logger.info("Started vROPs Web Service")
+        except Exception as exp:
+            server.stop()
+            logger.error("Exception: {} Stopped vROPs Web Service".format(exp))
+
+
+if __name__ == "__main__":
+    # Start SSL Web Service
+    logger.info("Start vROPs Web Service")
+    app = default_app()
+    server_names['sslwebserver'] = SSLWebServer
+    run(app=app, host=getfqdn(), port=8080, server='sslwebserver')
diff --git a/osm_mon/plugins/vRealiseOps/vrops_config.xml b/osm_mon/plugins/vRealiseOps/vrops_config.xml
new file mode 100644 (file)
index 0000000..c2bf38c
--- /dev/null
@@ -0,0 +1,144 @@
+<alarmsDefaultConfig>
+    <Average_Memory_Usage_Above_Threshold>
+        <vrops_alarm>Avg_Mem_Usage_Above_Thr</vrops_alarm>
+        <period>300</period>
+        <evaluation>2</evaluation>
+        <cancel_period>300</cancel_period>
+        <cancel_cycles>2</cancel_cycles>
+        <enabled>true</enabled>
+        <repeat>false</repeat>
+        <action>acknowledge</action>
+        <resource_kind>VirtualMachine</resource_kind>
+        <adapter_kind>VMWARE</adapter_kind>
+        <alarm_type>16</alarm_type>
+        <alarm_subType>19</alarm_subType>
+        <impact>risk</impact>
+        <unit>%</unit>
+    </Average_Memory_Usage_Above_Threshold>
+    <Read_Latency_Above_Threshold>
+        <vrops_alarm>RD_Latency_Above_Thr</vrops_alarm>
+        <period>300</period>
+        <evaluation>3</evaluation>
+        <cancel_period>300</cancel_period>
+        <cancel_cycles>3</cancel_cycles>
+        <enabled>true</enabled>
+        <repeat>false</repeat>
+        <action>acknowledge</action>
+        <resource_kind>VirtualMachine</resource_kind>
+        <adapter_kind>VMWARE</adapter_kind>
+        <alarm_type>18</alarm_type>
+        <alarm_subType>19</alarm_subType>
+        <impact>risk</impact>
+        <unit>msec</unit>
+    </Read_Latency_Above_Threshold>
+    <Write_Latency_Above_Threshold>
+        <vrops_alarm>WR_Latency_Above_Thr</vrops_alarm>
+        <period>300</period>
+        <evaluation>3</evaluation>
+        <cancel_period>300</cancel_period>
+        <cancel_cycles>3</cancel_cycles>
+        <enabled>true</enabled>
+        <repeat>false</repeat>
+        <action>acknowledge</action>
+        <resource_kind>VirtualMachine</resource_kind>
+        <adapter_kind>VMWARE</adapter_kind>
+        <alarm_type>18</alarm_type>
+        <alarm_subType>19</alarm_subType>
+        <impact>risk</impact>
+        <unit>msec</unit>
+    </Write_Latency_Above_Threshold>
+    <Net_Packets_Dropped>
+        <vrops_alarm>Net_Packets_Dropped</vrops_alarm>
+        <period>300</period>
+        <evaluation>1</evaluation>
+        <cancel_period>300</cancel_period>
+        <cancel_cycles>1</cancel_cycles>
+        <enabled>true</enabled>
+        <repeat>false</repeat>
+        <action>acknowledge</action>
+        <resource_kind>VirtualMachine</resource_kind>
+        <adapter_kind>VMWARE</adapter_kind>
+        <alarm_type>19</alarm_type>
+        <alarm_subType>19</alarm_subType>
+        <impact>risk</impact>
+        <unit>nos</unit>
+    </Net_Packets_Dropped>
+    <CPU_Utilization_Above_Threshold>
+        <vrops_alarm>CPU_Utilization_Above_Thr</vrops_alarm>
+        <period>300</period>
+        <evaluation>1</evaluation>
+        <cancel_period>300</cancel_period>
+        <cancel_cycles>1</cancel_cycles>
+        <enabled>true</enabled>
+        <repeat>false</repeat>
+        <action>acknowledge</action>
+        <resource_kind>VirtualMachine</resource_kind>
+        <adapter_kind>VMWARE</adapter_kind>
+        <alarm_type>16</alarm_type>
+        <alarm_subType>19</alarm_subType>
+        <impact>risk</impact>
+        <unit>msec</unit>
+    </CPU_Utilization_Above_Threshold>
+    <average_memory_utilization>
+        <metric_key>mem|usage_average</metric_key>
+        <unit>%</unit>
+    </average_memory_utilization>
+    <cpu_utilization>
+        <metric_key>cpu|usage_average</metric_key>
+        <unit>%</unit>
+    </cpu_utilization>
+    <read_latency_0>
+        <metric_key>virtualDisk:scsi0:0|totalReadLatency_average</metric_key>
+        <unit>msec</unit>
+    </read_latency_0>
+    <write_latency_0>
+        <metric_key>virtualDisk:scsi0:0|totalWriteLatency_average</metric_key>
+        <unit>msec</unit>
+    </write_latency_0>
+    <read_latency_1>
+        <metric_key>virtualDisk:scsi0:1|totalReadLatency_average</metric_key>
+        <unit>msec</unit>
+    </read_latency_1>
+    <write_latency_1>
+        <metric_key>virtualDisk:scsi0:1|totalWriteLatency_average</metric_key>
+        <unit>msec</unit>
+    </write_latency_1>
+    <packets_dropped_0>
+        <metric_key>net:4000|dropped</metric_key>
+        <unit>nos</unit>
+    </packets_dropped_0>
+    <packets_dropped_1>
+        <metric_key>net:4001|dropped</metric_key>
+        <unit>nos</unit>
+    </packets_dropped_1>
+    <packets_dropped_2>
+        <metric_key>net:4002|dropped</metric_key>
+        <unit>nos</unit>
+    </packets_dropped_2>
+    <packets_received>
+        <metric_key>net:Aggregate of all instances|packetsRxPerSec</metric_key>
+        <unit>nos</unit>
+    </packets_received>
+    <packets_sent>
+        <metric_key>net:Aggregate of all instances|packetsTxPerSec</metric_key>
+        <unit>nos</unit>
+    </packets_sent>
+    <Access_Config>
+        <vrops_site>https://192.169.241.123</vrops_site>
+        <vrops_user>admin</vrops_user>
+        <vrops_password>vmware</vrops_password>
+        <vcloud-site>https://192.169.241.15</vcloud-site>
+        <admin_username>admin</admin_username>
+        <admin_password>vmware</admin_password>
+        <nsx_manager>https://192.169.241.181</nsx_manager>
+        <nsx_user>admin</nsx_user>
+        <nsx_password>VMware1!</nsx_password>
+        <vcenter_ip>192.169.241.13</vcenter_ip>
+        <vcenter_port>443</vcenter_port>
+        <vcenter_user>admin</vcenter_user>
+        <vcenter_password>vmware</vcenter_password>
+        <vim_tenant_name>Org2</vim_tenant_name>
+        <orgname>Org2</orgname>
+        <tenant_id>Org2</tenant_id>
+    </Access_Config>
+</alarmsDefaultConfig>
\ No newline at end of file
diff --git a/osm_mon/tests/plugins/CloudWatch/__init__.py b/osm_mon/tests/plugins/CloudWatch/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/osm_mon/tests/plugins/CloudWatch/test_schemas/alarm_details/acknowledge_alarm.json b/osm_mon/tests/plugins/CloudWatch/test_schemas/alarm_details/acknowledge_alarm.json
new file mode 100644 (file)
index 0000000..341b2bd
--- /dev/null
@@ -0,0 +1,11 @@
+{
+"schema_version": "1.0",
+"schema_type": "alarm_ack",
+"vim_type": "AWS",
+"ack_details":
+{
+"alarm_uuid": "CPU_Utilization_i-098da78cbd8304e17",
+"resource_uuid": "i-098da78cbd8304e17",
+"tenant_uuid": ""
+}
+}
\ No newline at end of file
diff --git a/osm_mon/tests/plugins/CloudWatch/test_schemas/create_alarm/create_alarm_differentName_differentInstance.json b/osm_mon/tests/plugins/CloudWatch/test_schemas/create_alarm/create_alarm_differentName_differentInstance.json
new file mode 100644 (file)
index 0000000..ecf403e
--- /dev/null
@@ -0,0 +1,18 @@
+{
+"schema_version": "1.0",
+"schema_type": "create_alarm_request",
+"vim_type": "AWS",
+"alarm_create_request":
+{
+"correlation_id": "SO123",
+"alarm_name": "CPU_Utilization_Above_Threshold",
+"resource_uuid": "i-098da78cbd8304e17",
+"description": "",
+"severity": "Critical",
+"operation": "GE",
+"threshold_value": 1.5,
+"unit": "",
+"metric_name": "CPU_UTILIZATION",
+"statistic": "MAXIMUM"
+}
+}
diff --git a/osm_mon/tests/plugins/CloudWatch/test_schemas/create_alarm/create_alarm_differentName_sameInstance.json b/osm_mon/tests/plugins/CloudWatch/test_schemas/create_alarm/create_alarm_differentName_sameInstance.json
new file mode 100644 (file)
index 0000000..17c423d
--- /dev/null
@@ -0,0 +1,18 @@
+{
+"schema_version": "1.0",
+"schema_type": "create_alarm_request",
+"vim_type": "AWS",
+"alarm_create_request":
+{
+"correlation_id": "SO123",
+"alarm_name": "CPU_Utilization_Above_Threshold1",
+"resource_uuid": "i-098da78cbd8304e17",
+"description": "",
+"severity": "Critical",
+"operation": "GE",
+"threshold_value": 1.5,
+"unit": "",
+"metric_name": "CPU_UTILIZATION",
+"statistic": "MAXIMUM"
+}
+}
diff --git a/osm_mon/tests/plugins/CloudWatch/test_schemas/create_alarm/create_alarm_sameName_differentInstance.json b/osm_mon/tests/plugins/CloudWatch/test_schemas/create_alarm/create_alarm_sameName_differentInstance.json
new file mode 100644 (file)
index 0000000..b2f5acb
--- /dev/null
@@ -0,0 +1,18 @@
+{
+"schema_version": "1.0",
+"schema_type": "create_alarm_request",
+"vim_type": "AWS",
+"alarm_create_request":
+{
+"correlation_id": "SO123",
+"alarm_name": "CPU_Utilization_Above_Threshold",
+"resource_uuid": "i-09462760703837b26",
+"description": "",
+"severity": "Critical",
+"operation": "GE",
+"threshold_value": 1.5,
+"unit": "",
+"metric_name": "CPU_UTILIZATION",
+"statistic": "MAXIMUM"
+}
+}
diff --git a/osm_mon/tests/plugins/CloudWatch/test_schemas/create_alarm/create_alarm_sameName_sameInstance.json b/osm_mon/tests/plugins/CloudWatch/test_schemas/create_alarm/create_alarm_sameName_sameInstance.json
new file mode 100644 (file)
index 0000000..ecf403e
--- /dev/null
@@ -0,0 +1,18 @@
+{
+"schema_version": "1.0",
+"schema_type": "create_alarm_request",
+"vim_type": "AWS",
+"alarm_create_request":
+{
+"correlation_id": "SO123",
+"alarm_name": "CPU_Utilization_Above_Threshold",
+"resource_uuid": "i-098da78cbd8304e17",
+"description": "",
+"severity": "Critical",
+"operation": "GE",
+"threshold_value": 1.5,
+"unit": "",
+"metric_name": "CPU_UTILIZATION",
+"statistic": "MAXIMUM"
+}
+}
diff --git a/osm_mon/tests/plugins/CloudWatch/test_schemas/create_alarm/operation_invalid.json b/osm_mon/tests/plugins/CloudWatch/test_schemas/create_alarm/operation_invalid.json
new file mode 100644 (file)
index 0000000..31e1e0b
--- /dev/null
@@ -0,0 +1,18 @@
+{
+"schema_version": "1.0",
+"schema_type": "create_alarm_request",
+"vim_type": "AWS",
+"alarm_create_request":
+{
+"correlation_id": "SO123",
+"alarm_name": "CPU_Utilization_Above_Threshold2",
+"resource_uuid": "i-098da78cbd8304e17",
+"description": "",
+"severity": "Critical",
+"operation": "Greaterthan",
+"threshold_value": 1.5,
+"unit": "",
+"metric_name": "CPU_UTILIZATION",
+"statistic": "MAXIMUM"
+}
+}
diff --git a/osm_mon/tests/plugins/CloudWatch/test_schemas/create_alarm/operation_valid.json b/osm_mon/tests/plugins/CloudWatch/test_schemas/create_alarm/operation_valid.json
new file mode 100644 (file)
index 0000000..adb789b
--- /dev/null
@@ -0,0 +1,18 @@
+{
+"schema_version": "1.0",
+"schema_type": "create_alarm_request",
+"vim_type": "AWS",
+"alarm_create_request":
+{
+"correlation_id": "SO123",
+"alarm_name": "CPU_Utilization_Above_Threshold2",
+"resource_uuid": "i-098da78cbd8304e17",
+"description": "",
+"severity": "Critical",
+"operation": "GE",
+"threshold_value": 1.5,
+"unit": "",
+"metric_name": "CPU_UTILIZATION",
+"statistic": "MAXIMUM"
+}
+}
diff --git a/osm_mon/tests/plugins/CloudWatch/test_schemas/create_alarm/statistic_invalid.json b/osm_mon/tests/plugins/CloudWatch/test_schemas/create_alarm/statistic_invalid.json
new file mode 100644 (file)
index 0000000..8c2e68d
--- /dev/null
@@ -0,0 +1,18 @@
+{
+"schema_version": "1.0",
+"schema_type": "create_alarm_request",
+"vim_type": "AWS",
+"alarm_create_request":
+{
+"correlation_id": "SO123",
+"alarm_name": "CPU_Utilization_Above_Threshold2",
+"resource_uuid": "i-098da78cbd8304e17",
+"description": "",
+"severity": "Critical",
+"operation": "GE",
+"threshold_value": 1.5,
+"unit": "",
+"metric_name": "CPU_UTILIZATION",
+"statistic": "MAX"
+}
+}
diff --git a/osm_mon/tests/plugins/CloudWatch/test_schemas/create_alarm/statistic_valid.json b/osm_mon/tests/plugins/CloudWatch/test_schemas/create_alarm/statistic_valid.json
new file mode 100644 (file)
index 0000000..adb789b
--- /dev/null
@@ -0,0 +1,18 @@
+{
+"schema_version": "1.0",
+"schema_type": "create_alarm_request",
+"vim_type": "AWS",
+"alarm_create_request":
+{
+"correlation_id": "SO123",
+"alarm_name": "CPU_Utilization_Above_Threshold2",
+"resource_uuid": "i-098da78cbd8304e17",
+"description": "",
+"severity": "Critical",
+"operation": "GE",
+"threshold_value": 1.5,
+"unit": "",
+"metric_name": "CPU_UTILIZATION",
+"statistic": "MAXIMUM"
+}
+}
diff --git a/osm_mon/tests/plugins/CloudWatch/test_schemas/create_metrics/create_metric_req_invalid.json b/osm_mon/tests/plugins/CloudWatch/test_schemas/create_metrics/create_metric_req_invalid.json
new file mode 100644 (file)
index 0000000..0fe0dcb
--- /dev/null
@@ -0,0 +1,13 @@
+{
+"schema_version": "1.0",
+"schema_type": "create_metrics_request",
+"tenant_uuid": "",
+"correlation_id": "SO123",
+"vim_type": "AWS",
+"metric_create":
+{
+"metric_name": "CPU_UTILIZ",
+"metric_unit": "",
+"resource_uuid": "i-098da78cbd8304e17"
+}
+}
\ No newline at end of file
diff --git a/osm_mon/tests/plugins/CloudWatch/test_schemas/create_metrics/create_metric_req_valid.json b/osm_mon/tests/plugins/CloudWatch/test_schemas/create_metrics/create_metric_req_valid.json
new file mode 100644 (file)
index 0000000..18cc23c
--- /dev/null
@@ -0,0 +1,13 @@
+{
+"schema_version": "1.0",
+"schema_type": "create_metrics_request",
+"tenant_uuid": "",
+"correlation_id": "SO123",
+"vim_type": "AWS",
+"metric_create":
+{
+"metric_name": "CPU_UTILIZATION",
+"metric_unit": "",
+"resource_uuid": "i-098da78cbd8304e17"
+}
+}
\ No newline at end of file
diff --git a/osm_mon/tests/plugins/CloudWatch/test_schemas/delete_alarm/name_invalid.json b/osm_mon/tests/plugins/CloudWatch/test_schemas/delete_alarm/name_invalid.json
new file mode 100644 (file)
index 0000000..e51a670
--- /dev/null
@@ -0,0 +1,10 @@
+{
+"schema_version": "1.0",
+"schema_type": "delete_alarm_request",
+"vim_type": "AWS",
+"alarm_delete_request":
+{
+"alarm_uuid": "CPU_Utilization_Above_Threshold_i-098da78cbd8304e16",
+"correlation_id": "SO123"
+}
+}
\ No newline at end of file
diff --git a/osm_mon/tests/plugins/CloudWatch/test_schemas/delete_alarm/name_valid.json b/osm_mon/tests/plugins/CloudWatch/test_schemas/delete_alarm/name_valid.json
new file mode 100644 (file)
index 0000000..a2cd4b5
--- /dev/null
@@ -0,0 +1,10 @@
+{
+"schema_version": "1.0",
+"schema_type": "delete_alarm_request",
+"vim_type": "AWS",
+"alarm_delete_request":
+{
+"alarm_uuid": "CPU_Utilization_Above_Threshold_i-098da78cbd8304e17",
+"correlation_id": "SO123"
+}
+}
\ No newline at end of file
diff --git a/osm_mon/tests/plugins/CloudWatch/test_schemas/delete_alarm/name_valid_delete1.json b/osm_mon/tests/plugins/CloudWatch/test_schemas/delete_alarm/name_valid_delete1.json
new file mode 100644 (file)
index 0000000..f465df7
--- /dev/null
@@ -0,0 +1,10 @@
+{
+"schema_version": "1.0",
+"schema_type": "delete_alarm_request",
+"vim_type": "AWS",
+"alarm_delete_request":
+{
+"alarm_uuid": "CPU_Utilization_Above_Threshold1_i-098da78cbd8304e17",
+"correlation_id": "SO123"
+}
+}
\ No newline at end of file
diff --git a/osm_mon/tests/plugins/CloudWatch/test_schemas/delete_alarm/name_valid_delete2.json b/osm_mon/tests/plugins/CloudWatch/test_schemas/delete_alarm/name_valid_delete2.json
new file mode 100644 (file)
index 0000000..1fa6870
--- /dev/null
@@ -0,0 +1,10 @@
+{
+"schema_version": "1.0",
+"schema_type": "delete_alarm_request",
+"vim_type": "AWS",
+"alarm_delete_request":
+{
+"alarm_uuid": "CPU_Utilization_Above_Threshold_i-09462760703837b26",
+"correlation_id": "SO123"
+}
+}
\ No newline at end of file
diff --git a/osm_mon/tests/plugins/CloudWatch/test_schemas/delete_alarm/name_valid_delete3.json b/osm_mon/tests/plugins/CloudWatch/test_schemas/delete_alarm/name_valid_delete3.json
new file mode 100644 (file)
index 0000000..6c35ab2
--- /dev/null
@@ -0,0 +1,10 @@
+{
+"schema_version": "1.0",
+"schema_type": "delete_alarm_request",
+"vim_type": "AWS",
+"alarm_delete_request":
+{
+"alarm_uuid": "CPU_Utilization_Above_Threshold2_i-098da78cbd8304e17",
+"correlation_id": "SO123"
+}
+}
\ No newline at end of file
diff --git a/osm_mon/tests/plugins/CloudWatch/test_schemas/delete_alarm/name_valid_delete4.json b/osm_mon/tests/plugins/CloudWatch/test_schemas/delete_alarm/name_valid_delete4.json
new file mode 100644 (file)
index 0000000..716b039
--- /dev/null
@@ -0,0 +1,10 @@
+{
+"schema_version": "1.0",
+"schema_type": "delete_alarm_request",
+"vim_type": "AWS",
+"alarm_delete_request":
+{
+"alarm_uuid": "CPU_Utilization_Above_Threshold4_i-098da78cbd8304e17",
+"correlation_id": "SO123"
+}
+}
\ No newline at end of file
diff --git a/osm_mon/tests/plugins/CloudWatch/test_schemas/delete_metrics/delete_metric_req_invalid.json b/osm_mon/tests/plugins/CloudWatch/test_schemas/delete_metrics/delete_metric_req_invalid.json
new file mode 100644 (file)
index 0000000..f30ab87
--- /dev/null
@@ -0,0 +1,10 @@
+{
+"schema_version": "1.0",
+"schema_type": "delete_metric_data_request",
+"metric_name": "CPU_UTILIATION",
+"metric_uuid": "",
+"resource_uuid": "i-098da78cbd8304e17",
+"tenant_uuid": "",
+"correlation_uuid": "S0123",
+"vim_type": "AWS"
+}
\ No newline at end of file
diff --git a/osm_mon/tests/plugins/CloudWatch/test_schemas/delete_metrics/delete_metric_req_valid.json b/osm_mon/tests/plugins/CloudWatch/test_schemas/delete_metrics/delete_metric_req_valid.json
new file mode 100644 (file)
index 0000000..ea3922b
--- /dev/null
@@ -0,0 +1,10 @@
+{
+"schema_version": "1.0",
+"schema_type": "delete_metric_data_request",
+"metric_name": "CPU_UTILIZATION",
+"metric_uuid": "",
+"resource_uuid": "i-098da78cbd8304e17",
+"tenant_uuid": "",
+"correlation_uuid": "S0123",
+"vim_type": "AWS"
+}
\ No newline at end of file
diff --git a/osm_mon/tests/plugins/CloudWatch/test_schemas/list_alarm/list_alarm_valid_no_arguments.json b/osm_mon/tests/plugins/CloudWatch/test_schemas/list_alarm/list_alarm_valid_no_arguments.json
new file mode 100644 (file)
index 0000000..a4d02a3
--- /dev/null
@@ -0,0 +1,12 @@
+{
+"schema_version": "1.0",
+"schema_type": "list_alarm_request",
+"vim_type": "AWS",
+"alarm_list_request":
+{
+"correlation_id": "SO123",
+"resource_uuid": "",
+"alarm_name": "",
+"severity": ""
+}
+}
\ No newline at end of file
diff --git a/osm_mon/tests/plugins/CloudWatch/test_schemas/list_alarm/list_alarm_valid_one_argument.json b/osm_mon/tests/plugins/CloudWatch/test_schemas/list_alarm/list_alarm_valid_one_argument.json
new file mode 100644 (file)
index 0000000..d0f31f2
--- /dev/null
@@ -0,0 +1,12 @@
+{
+"schema_version": "1.0",
+"schema_type": "list_alarm_request",
+"vim_type": "AWS",
+"alarm_list_request":
+{
+"correlation_id": "SO123",
+"resource_uuid": "i-098da78cbd8304e17",
+"alarm_name": "",
+"severity": ""
+}
+}
\ No newline at end of file
diff --git a/osm_mon/tests/plugins/CloudWatch/test_schemas/list_alarm/list_alarm_valid_two_arguments.json b/osm_mon/tests/plugins/CloudWatch/test_schemas/list_alarm/list_alarm_valid_two_arguments.json
new file mode 100644 (file)
index 0000000..bf46579
--- /dev/null
@@ -0,0 +1,12 @@
+{
+"schema_version": "1.0",
+"schema_type": "list_alarm_request",
+"vim_type": "AWS",
+"alarm_list_request":
+{
+"correlation_id": "SO123",
+"resource_uuid": "i-098da78cbd8304e17",
+"alarm_name": "",
+"severity": "Critical"
+}
+}
\ No newline at end of file
diff --git a/osm_mon/tests/plugins/CloudWatch/test_schemas/list_metrics/list_metric_req_invalid.json b/osm_mon/tests/plugins/CloudWatch/test_schemas/list_metrics/list_metric_req_invalid.json
new file mode 100644 (file)
index 0000000..6108e77
--- /dev/null
@@ -0,0 +1,11 @@
+{
+"schema_version": "1.0",
+"schema_type": "list_metrics_request",
+"vim_type": "AWS",
+"metrics_list_request":
+{
+"metric_name": "CPU_UTILZATION",
+"correlation_id": "SO123",
+"resource_uuid": "i-098da78cbd8304e17"
+}
+}
\ No newline at end of file
diff --git a/osm_mon/tests/plugins/CloudWatch/test_schemas/list_metrics/list_metric_req_valid.json b/osm_mon/tests/plugins/CloudWatch/test_schemas/list_metrics/list_metric_req_valid.json
new file mode 100644 (file)
index 0000000..b1bd9de
--- /dev/null
@@ -0,0 +1,11 @@
+{
+"schema_version": "1.0",
+"schema_type": "list_metrics_request",
+"vim_type": "AWS",
+"metrics_list_request":
+{
+"metric_name": "CPU_UTILIZATION",
+"correlation_id": "SO123",
+"resource_uuid": "i-098da78cbd8304e17"
+}
+}
\ No newline at end of file
diff --git a/osm_mon/tests/plugins/CloudWatch/test_schemas/read_metrics_data/read_coll_period_req_invalid.json b/osm_mon/tests/plugins/CloudWatch/test_schemas/read_metrics_data/read_coll_period_req_invalid.json
new file mode 100644 (file)
index 0000000..815edf9
--- /dev/null
@@ -0,0 +1,12 @@
+{
+"schema_version": "1.0",
+"schema_type": "read_metric_data_request",
+"metric_name": "CPU_UTILIZATION",
+"metric_uuid": "0",
+"resource_uuid": "i-098da78cbd8304e17",
+"tenant_uuid": "",
+"correlation_uuid": "SO123",
+"vim_type":"AWS",
+"collection_period":"3500" ,
+"collection_unit": ""
+}
\ No newline at end of file
diff --git a/osm_mon/tests/plugins/CloudWatch/test_schemas/read_metrics_data/read_coll_period_req_valid.json b/osm_mon/tests/plugins/CloudWatch/test_schemas/read_metrics_data/read_coll_period_req_valid.json
new file mode 100644 (file)
index 0000000..dad9a24
--- /dev/null
@@ -0,0 +1,12 @@
+{
+"schema_version": "1.0",
+"schema_type": "read_metric_data_request",
+"metric_name": "CPU_UTILIZATION",
+"metric_uuid": "0",
+"resource_uuid": "i-098da78cbd8304e17",
+"tenant_uuid": "",
+"correlation_uuid": "SO123",
+"vim_type":"AWS",
+"collection_period":"3600" ,
+"collection_unit": ""
+}
\ No newline at end of file
diff --git a/osm_mon/tests/plugins/CloudWatch/test_schemas/read_metrics_data/read_metric_name_req_invalid.json b/osm_mon/tests/plugins/CloudWatch/test_schemas/read_metrics_data/read_metric_name_req_invalid.json
new file mode 100644 (file)
index 0000000..0ff4f0e
--- /dev/null
@@ -0,0 +1,12 @@
+{
+"schema_version": "1.0",
+"schema_type": "read_metric_data_request",
+"metric_name": "CPU_UTLIZATION",
+"metric_uuid": "0",
+"resource_uuid": "i-098da78cbd8304e17",
+"tenant_uuid": "",
+"correlation_uuid": "SO123",
+"vim_type":"AWS",
+"collection_period":"3600" ,
+"collection_unit": ""
+}
\ No newline at end of file
diff --git a/osm_mon/tests/plugins/CloudWatch/test_schemas/read_metrics_data/read_metric_name_req_valid.json b/osm_mon/tests/plugins/CloudWatch/test_schemas/read_metrics_data/read_metric_name_req_valid.json
new file mode 100644 (file)
index 0000000..dad9a24
--- /dev/null
@@ -0,0 +1,12 @@
+{
+"schema_version": "1.0",
+"schema_type": "read_metric_data_request",
+"metric_name": "CPU_UTILIZATION",
+"metric_uuid": "0",
+"resource_uuid": "i-098da78cbd8304e17",
+"tenant_uuid": "",
+"correlation_uuid": "SO123",
+"vim_type":"AWS",
+"collection_period":"3600" ,
+"collection_unit": ""
+}
\ No newline at end of file
diff --git a/osm_mon/tests/plugins/CloudWatch/test_schemas/update_alarm/name_invalid.json b/osm_mon/tests/plugins/CloudWatch/test_schemas/update_alarm/name_invalid.json
new file mode 100644 (file)
index 0000000..fe171e4
--- /dev/null
@@ -0,0 +1,17 @@
+{
+"schema_version": "1.0",
+"schema_type": "update_alarm_request",
+"vim_type": "AWS",
+"alarm_update_request":
+{
+"correlation_id": "SO123",
+"alarm_uuid": "CPU_Utilization_Above_Threshold_i-098da78cbd8304e13",
+"description": "",
+"severity": "Critical",
+"operation": "LE",
+"threshold_value": 1.5,
+"unit": "",
+"metric_name": "CPU_UTILIZATION",
+"statistic": "MAXIMUM"
+}
+}
\ No newline at end of file
diff --git a/osm_mon/tests/plugins/CloudWatch/test_schemas/update_alarm/name_valid.json b/osm_mon/tests/plugins/CloudWatch/test_schemas/update_alarm/name_valid.json
new file mode 100644 (file)
index 0000000..7070dff
--- /dev/null
@@ -0,0 +1,17 @@
+{
+"schema_version": "1.0",
+"schema_type": "update_alarm_request",
+"vim_type": "AWS",
+"alarm_update_request":
+{
+"correlation_id": "SO123",
+"alarm_uuid": "CPU_Utilization_Above_Threshold4_i-098da78cbd8304e17",
+"description": "",
+"severity": "Critical",
+"operation": "LE",
+"threshold_value": 1.5,
+"unit": "",
+"metric_name": "CPU_UTILIZATION",
+"statistic": "MAXIMUM"
+}
+}
\ No newline at end of file
diff --git a/osm_mon/tests/plugins/CloudWatch/test_schemas/update_alarm/operation_invalid.json b/osm_mon/tests/plugins/CloudWatch/test_schemas/update_alarm/operation_invalid.json
new file mode 100644 (file)
index 0000000..0116228
--- /dev/null
@@ -0,0 +1,17 @@
+{
+"schema_version": "1.0",
+"schema_type": "update_alarm_request",
+"vim_type": "AWS",
+"alarm_update_request":
+{
+"correlation_id": "SO123",
+"alarm_uuid": "CPU_Utilization_Above_Threshold_i-098da78cbd8304e17",
+"description": "",
+"severity": "Critical",
+"operation": "Less",
+"threshold_value": 1.5,
+"unit": "",
+"metric_name": "CPU_UTILIZATION",
+"statistic": "MAXIMUM"
+}
+}
\ No newline at end of file
diff --git a/osm_mon/tests/plugins/CloudWatch/test_schemas/update_alarm/operation_valid.json b/osm_mon/tests/plugins/CloudWatch/test_schemas/update_alarm/operation_valid.json
new file mode 100644 (file)
index 0000000..5fb8eb6
--- /dev/null
@@ -0,0 +1,17 @@
+{
+"schema_version": "1.0",
+"schema_type": "update_alarm_request",
+"vim_type": "AWS",
+"alarm_update_request":
+{
+"correlation_id": "SO123",
+"alarm_uuid": "CPU_Utilization_Above_Threshold_i-098da78cbd8304e17",
+"description": "",
+"severity": "Critical",
+"operation": "LE",
+"threshold_value": 1.5,
+"unit": "",
+"metric_name": "CPU_UTILIZATION",
+"statistic": "MAXIMUM"
+}
+}
\ No newline at end of file
diff --git a/osm_mon/tests/plugins/CloudWatch/test_schemas/update_alarm/statistic_invalid.json b/osm_mon/tests/plugins/CloudWatch/test_schemas/update_alarm/statistic_invalid.json
new file mode 100644 (file)
index 0000000..991d844
--- /dev/null
@@ -0,0 +1,17 @@
+{
+"schema_version": "1.0",
+"schema_type": "update_alarm_request",
+"vim_type": "AWS",
+"alarm_update_request":
+{
+"correlation_id": "SO123",
+"alarm_uuid": "CPU_Utilization_Above_Threshold_i-098da78cbd8304e17",
+"description": "",
+"severity": "Critical",
+"operation": "LE",
+"threshold_value": 1.5,
+"unit": "",
+"metric_name": "CPU_UTILIZATION",
+"statistic": "MAX"
+}
+}
\ No newline at end of file
diff --git a/osm_mon/tests/plugins/CloudWatch/test_schemas/update_alarm/statistic_valid.json b/osm_mon/tests/plugins/CloudWatch/test_schemas/update_alarm/statistic_valid.json
new file mode 100644 (file)
index 0000000..5fb8eb6
--- /dev/null
@@ -0,0 +1,17 @@
+{
+"schema_version": "1.0",
+"schema_type": "update_alarm_request",
+"vim_type": "AWS",
+"alarm_update_request":
+{
+"correlation_id": "SO123",
+"alarm_uuid": "CPU_Utilization_Above_Threshold_i-098da78cbd8304e17",
+"description": "",
+"severity": "Critical",
+"operation": "LE",
+"threshold_value": 1.5,
+"unit": "",
+"metric_name": "CPU_UTILIZATION",
+"statistic": "MAXIMUM"
+}
+}
\ No newline at end of file
diff --git a/osm_mon/tests/plugins/CloudWatch/test_schemas/update_alarm/update_alarm_new_alarm.json b/osm_mon/tests/plugins/CloudWatch/test_schemas/update_alarm/update_alarm_new_alarm.json
new file mode 100644 (file)
index 0000000..581fb55
--- /dev/null
@@ -0,0 +1,18 @@
+{
+"schema_version": "1.0",
+"schema_type": "create_alarm_request",
+"vim_type": "AWS",
+"alarm_create_request":
+{
+"correlation_id": "SO123",
+"alarm_name": "CPU_Utilization_Above_Threshold4",
+"resource_uuid": "i-098da78cbd8304e17",
+"description": "",
+"severity": "Critical",
+"operation": "GE",
+"threshold_value": 1.5,
+"unit": "",
+"metric_name": "CPU_UTILIZATION",
+"statistic": "MAXIMUM"
+}
+}
diff --git a/osm_mon/tests/plugins/CloudWatch/test_schemas/update_metrics/update_metric_req_invalid.json b/osm_mon/tests/plugins/CloudWatch/test_schemas/update_metrics/update_metric_req_invalid.json
new file mode 100644 (file)
index 0000000..0fe0dcb
--- /dev/null
@@ -0,0 +1,13 @@
+{
+"schema_version": "1.0",
+"schema_type": "create_metrics_request",
+"tenant_uuid": "",
+"correlation_id": "SO123",
+"vim_type": "AWS",
+"metric_create":
+{
+"metric_name": "CPU_UTILIZ",
+"metric_unit": "",
+"resource_uuid": "i-098da78cbd8304e17"
+}
+}
\ No newline at end of file
diff --git a/osm_mon/tests/plugins/CloudWatch/test_schemas/update_metrics/update_metric_req_valid.json b/osm_mon/tests/plugins/CloudWatch/test_schemas/update_metrics/update_metric_req_valid.json
new file mode 100644 (file)
index 0000000..18cc23c
--- /dev/null
@@ -0,0 +1,13 @@
+{
+"schema_version": "1.0",
+"schema_type": "create_metrics_request",
+"tenant_uuid": "",
+"correlation_id": "SO123",
+"vim_type": "AWS",
+"metric_create":
+{
+"metric_name": "CPU_UTILIZATION",
+"metric_unit": "",
+"resource_uuid": "i-098da78cbd8304e17"
+}
+}
\ No newline at end of file
diff --git a/osm_mon/tests/plugins/CloudWatch/unit_tests_alarms.py b/osm_mon/tests/plugins/CloudWatch/unit_tests_alarms.py
new file mode 100644 (file)
index 0000000..ae036cf
--- /dev/null
@@ -0,0 +1,408 @@
+from connection import Connection
+import unittest
+import sys
+import jsmin
+import json
+import os
+import time
+from jsmin import jsmin
+sys.path.append("../../test/core/")
+from test_producer import KafkaProducer
+from kafka import KafkaConsumer
+try:
+    import boto
+    import boto.ec2
+    import boto.vpc
+    import boto.ec2.cloudwatch
+    import boto.ec2.connection
+except:
+    exit("Boto not avialable. Try activating your virtualenv OR `pip install boto`")
+
+#--------------------------------------------------------------------------------------------------------------------------------------
+
+# Test Producer object to generate request
+
+producer = KafkaProducer('create_alarm_request')
+obj = Connection() 
+connections = obj.setEnvironment()
+connections_res = obj.connection_instance()
+cloudwatch_conn = connections_res['cloudwatch_connection'] 
+
+#--------------------------------------------------------------------------------------------------------------------------------------
+
+'''Test E2E Flow : Test cases has been tested one at a time.
+1) Commom Request is generated using request function in test_producer.py(/test/core)
+2) The request is then consumed by the comsumer (plugin)
+3) The response is sent back on the message bus in plugin_alarm.py using
+   response functions in producer.py(/core/message-bus)
+4) The response is then again consumed by the unit_tests_alarms.py
+   and the test cases has been applied on the response.
+'''
+
+class config_alarm_name_test(unittest.TestCase):
+   
+
+    def setUp(self):
+        pass
+    #To generate a request of testing new alarm name and new instance id in create alarm request
+    def test_differentName_differentInstance(self):
+        time.sleep(2)
+        producer.request("test_schemas/create_alarm/create_alarm_differentName_differentInstance.json",'create_alarm_request', '','alarm_request')  
+        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
+
+        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
+        _consumer.subscribe(['alarm_response'])
+
+        for message in _consumer:
+            if message.key == "create_alarm_response": 
+                info = json.loads(json.loads(message.value))
+                print(info)
+                time.sleep(1)
+                self.assertTrue(info['alarm_create_response']['status'])
+                return        
+
+    #To generate a request of testing new alarm name and existing instance id in create alarm request
+    def test_differentName_sameInstance(self):
+        time.sleep(2)
+        producer.request("test_schemas/create_alarm/create_alarm_differentName_sameInstance.json",'create_alarm_request', '','alarm_request')  
+        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
+
+        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
+        _consumer.subscribe(['alarm_response'])
+
+        for message in _consumer:
+            if message.key == "create_alarm_response": 
+                info = json.loads(json.loads(message.value))
+                print(info)
+                time.sleep(1)
+                producer.request("test_schemas/delete_alarm/name_valid_delete1.json",'delete_alarm_request','','alarm_request')
+                self.assertTrue(info['alarm_create_response']['status'])   
+                return
+
+    #To generate a request of testing existing alarm name and new instance id in create alarm request    
+    def test_sameName_differentInstance(self): 
+        time.sleep(2)
+        producer.request("test_schemas/create_alarm/create_alarm_sameName_differentInstance.json",'create_alarm_request', '','alarm_request')  
+        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
+
+        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
+        _consumer.subscribe(['alarm_response'])
+
+        for message in _consumer:
+            if message.key == "create_alarm_response": 
+                info = json.loads(json.loads(message.value))
+                print(info)
+                time.sleep(1)
+                producer.request("test_schemas/delete_alarm/name_valid_delete2.json",'delete_alarm_request', '','alarm_request')
+                self.assertTrue(info['alarm_create_response']['status']) 
+                return    
+
+    #To generate a request of testing existing alarm name and existing instance id in create alarm request
+    def test_sameName_sameInstance(self):  
+        time.sleep(2)
+        producer.request("test_schemas/create_alarm/create_alarm_sameName_sameInstance.json",'create_alarm_request', '','alarm_request')  
+        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
+
+        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
+        _consumer.subscribe(['alarm_response'])
+
+        for message in _consumer:
+            if message.key == "create_alarm_response": 
+                info = json.loads(json.loads(message.value))
+                print(info, "---")
+                time.sleep(1)
+                producer.request("test_schemas/delete_alarm/name_valid.json",'delete_alarm_request', '','alarm_request')
+                self.assertEqual(info, None)  
+                return        
+
+    #To generate a request of testing valid statistics in create alarm request
+    def test_statisticValid(self):       
+        time.sleep(2)
+        producer.request("test_schemas/create_alarm/statistic_valid.json",'create_alarm_request', '','alarm_request')  
+        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
+
+        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
+        _consumer.subscribe(['alarm_response'])
+
+        for message in _consumer:
+            if message.key == "create_alarm_response": 
+                info = json.loads(json.loads(message.value))
+                print(info)
+                time.sleep(1)
+                producer.request("test_schemas/delete_alarm/name_valid_delete3.json",'delete_alarm_request', '','alarm_request')
+                self.assertTrue(info['alarm_create_response']['status']) 
+                return
+
+    #To generate a request of testing Invalid statistics in create alarm request    
+    def test_statisticValidNot(self):       
+        time.sleep(2)
+        producer.request("test_schemas/create_alarm/statistic_invalid.json",'create_alarm_request', '','alarm_request')  
+        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
+
+        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
+        _consumer.subscribe(['alarm_response'])
+
+        for message in _consumer:
+            if message.key == "create_alarm_response": 
+                info = json.loads(json.loads(message.value))
+                print(info, "---")
+                time.sleep(1)
+                producer.request("test_schemas/delete_alarm/name_valid_delete3.json",'delete_alarm_request', '','alarm_request')
+                self.assertEqual(info, None)
+                return  
+
+    #To generate a request of testing valid operation in create alarm request
+    def test_operationValid(self):       
+        time.sleep(2)
+        producer.request("test_schemas/create_alarm/operation_valid.json",'create_alarm_request', '','alarm_request')  
+        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
+
+        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
+        _consumer.subscribe(['alarm_response'])
+
+        for message in _consumer:
+            if message.key == "create_alarm_response": 
+                info = json.loads(json.loads(message.value))
+                print(info)
+                time.sleep(1)
+                producer.request("test_schemas/delete_alarm/name_valid_delete3.json",'delete_alarm_request', '','alarm_request')
+                self.assertTrue(info['alarm_create_response']['status']) 
+                return
+
+    #To generate a request of testing Invalid operation in create alarm request
+    def test_operationValidNot(self):       
+        time.sleep(2)
+        producer.request("test_schemas/create_alarm/operation_invalid.json",'create_alarm_request', '','alarm_request')  
+        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
+
+        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
+        _consumer.subscribe(['alarm_response'])
+
+        for message in _consumer:
+            if message.key == "create_alarm_response": 
+                info = json.loads(json.loads(message.value))
+                print(info)
+                time.sleep(1)
+                self.assertEqual(info,None) 
+                return                 
+                 
+
+#--------------------------------------------------------------------------------------------------------------------------------------
+class update_alarm_name_test(unittest.TestCase):
+
+    #To generate a request of testing valid alarm_id in update alarm request    
+    def test_nameValid(self):
+        producer.request("test_schemas/update_alarm/update_alarm_new_alarm.json",'create_alarm_request', '','alarm_request')  
+        time.sleep(2)
+        producer.request("test_schemas/update_alarm/name_valid.json",'update_alarm_request', '','alarm_request')  
+        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
+
+        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
+        _consumer.subscribe(['alarm_response'])
+
+        for message in _consumer:
+            if message.key == "update_alarm_response": 
+                info = json.loads(json.loads(json.loads(message.value)))
+                print(info)
+                time.sleep(1)
+                producer.request("test_schemas/delete_alarm/name_valid_delete4.json",'delete_alarm_request', '','alarm_request')
+                self.assertTrue(info['alarm_update_response']['status'])
+                return 
+    
+    #To generate a request of testing invalid alarm_id in update alarm request
+    def test_nameInvalid(self):
+        time.sleep(2)
+        producer.request("test_schemas/update_alarm/name_invalid.json",'update_alarm_request', '','alarm_request')  
+        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
+
+        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
+        _consumer.subscribe(['alarm_response'])
+
+        for message in _consumer:
+            if message.key == "update_alarm_response": 
+                info = json.loads(json.loads(json.loads(message.value)))
+                print(info)
+                time.sleep(1)
+                self.assertEqual(info,None)
+                return
+
+    #To generate a request of testing valid statistics in update alarm request
+    def test_statisticValid(self):
+        producer.request("test_schemas/create_alarm/create_alarm_differentName_differentInstance.json",'create_alarm_request', '','alarm_request')  
+        time.sleep(2)
+        producer.request("test_schemas/update_alarm/statistic_valid.json",'update_alarm_request', '','alarm_request')  
+        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
+
+        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
+        _consumer.subscribe(['alarm_response'])
+
+        for message in _consumer:
+            if message.key == "update_alarm_response": 
+                info = json.loads(json.loads(json.loads(message.value)))
+                print(info)
+                time.sleep(1)
+                producer.request("test_schemas/delete_alarm/name_valid.json",'delete_alarm_request', '','alarm_request')
+                self.assertTrue(info['alarm_update_response']['status'])
+                return
+
+    #To generate a request of testing Invalid statistics in update alarm request
+    def test_statisticInvalid(self):
+        time.sleep(2)
+        producer.request("test_schemas/update_alarm/statistic_invalid.json",'update_alarm_request', '','alarm_request')  
+        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
+
+        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
+        _consumer.subscribe(['alarm_response'])
+
+        for message in _consumer:
+            if message.key == "update_alarm_response": 
+                info = json.loads(json.loads(json.loads(message.value)))
+                print(info)
+                time.sleep(1)
+                self.assertEqual(info,None)
+                return            
+
+    #To generate a request of testing valid operation in update alarm request
+    def test_operationValid(self):
+        producer.request("test_schemas/create_alarm/create_alarm_differentName_differentInstance.json",'create_alarm_request', '','alarm_request')  
+        time.sleep(2)
+        producer.request("test_schemas/update_alarm/operation_valid.json",'update_alarm_request', '','alarm_request')  
+        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
+
+        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
+        _consumer.subscribe(['alarm_response'])
+
+        for message in _consumer:
+            if message.key == "update_alarm_response": 
+                info = json.loads(json.loads(json.loads(message.value)))
+                print(info)
+                time.sleep(1)
+                producer.request("test_schemas/delete_alarm/name_valid.json",'delete_alarm_request', '','alarm_request')
+                self.assertTrue(info['alarm_update_response']['status'])
+                return
+              
+#--------------------------------------------------------------------------------------------------------------------------------------
+class delete_alarm_test(unittest.TestCase):
+
+    #To generate a request of testing valid alarm_id in delete alarm request   
+    def test_nameValid(self):             
+        producer.request("test_schemas/create_alarm/create_alarm_differentName_differentInstance.json",'create_alarm_request', '','alarm_request')  
+        time.sleep(2)
+        producer.request("test_schemas/delete_alarm/name_valid.json",'delete_alarm_request', '','alarm_request') 
+        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
+
+        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
+        _consumer.subscribe(['alarm_response'])
+
+        for message in _consumer:
+            if message.key == "delete_alarm_response": 
+                info = json.loads(json.loads(json.loads(message.value)))
+                print(info)
+                time.sleep(1)                
+                self.assertTrue(info['alarm_deletion_response']['status'])
+                return
+
+    #To generate a request of testing Invalid alarm_id in delete alarm request
+    def test_nameInvalid(self):              
+        time.sleep(2)
+        producer.request("test_schemas/delete_alarm/name_invalid.json",'delete_alarm_request', '','alarm_request') 
+        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
+
+        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
+        _consumer.subscribe(['alarm_response'])
+
+        for message in _consumer:
+            if message.key == "delete_alarm_response": 
+                info = json.loads(json.loads(json.loads(message.value)))
+                print(info)
+                time.sleep(1)                
+                self.assertEqual(info,None)
+                return             
+
+#--------------------------------------------------------------------------------------------------------------------------------------
+class list_alarm_test(unittest.TestCase): 
+
+    #To generate a request of testing valid input fields in alarm list request   
+    def test_valid_no_arguments(self):
+        time.sleep(2)
+        producer.request("test_schemas/list_alarm/list_alarm_valid_no_arguments.json",'alarm_list_request', '','alarm_request') 
+        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
+
+        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
+        _consumer.subscribe(['alarm_response'])
+
+        for message in _consumer:
+            if message.key == "list_alarm_response": 
+                info = json.loads(json.loads(json.loads(message.value)))
+                print(info)
+                time.sleep(1)                
+                self.assertEqual(type(info),dict)
+                return
+
+    #To generate a request of testing valid input fields in alarm list request
+    def test_valid_one_arguments(self):
+        time.sleep(2)
+        producer.request("test_schemas/list_alarm/list_alarm_valid_one_arguments.json",'alarm_list_request', '','alarm_request') 
+        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
+
+        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
+        _consumer.subscribe(['alarm_response'])
+
+        for message in _consumer:
+            if message.key == "list_alarm_response": 
+                info = json.loads(json.loads(json.loads(message.value)))
+                print(info)
+                time.sleep(1)                
+                self.assertEqual(type(info),dict)
+                return
+
+    #To generate a request of testing valid input fields in alarm list request
+    def test_valid_two_arguments(self):
+        time.sleep(2)
+        producer.request("test_schemas/list_alarm/list_alarm_valid_two_arguments.json",'alarm_list_request', '','alarm_request') 
+        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
+
+        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
+        _consumer.subscribe(['alarm_response'])
+
+        for message in _consumer:
+            if message.key == "list_alarm_response": 
+                info = json.loads(json.loads(json.loads(message.value)))
+                print(info)
+                time.sleep(1)                
+                self.assertEqual(type(info),dict)
+                return
+
+
+#--------------------------------------------------------------------------------------------------------------------------------------
+class alarm_details_test(unittest.TestCase):
+
+    #To generate a request of testing valid input fields in acknowledge alarm
+    def test_Valid(self):
+        time.sleep(2)
+        producer.request("test_schemas/alarm_details/acknowledge_alarm.json",'acknowledge_alarm', '','alarm_request') 
+        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
+
+        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
+        _consumer.subscribe(['alarm_response'])
+
+        for message in _consumer:
+            if message.key == "notify_alarm": 
+                info = json.loads(json.loads(json.loads(message.value)))
+                print(info)
+                time.sleep(1)                
+                self.assertEqual(type(info),dict)
+                return                
+
+if __name__ == '__main__':
+
+    # Saving test reults in Log file
+
+    log_file = 'log_file.txt'
+    f = open(log_file, "w")
+    runner = unittest.TextTestRunner(f)
+    unittest.main(testRunner=runner)
+    f.close()
+
+    # For printing results on Console
+    # unittest.main()
diff --git a/osm_mon/tests/plugins/CloudWatch/unit_tests_metrics.py b/osm_mon/tests/plugins/CloudWatch/unit_tests_metrics.py
new file mode 100644 (file)
index 0000000..625e872
--- /dev/null
@@ -0,0 +1,208 @@
+from connection import Connection
+import unittest
+import sys
+import jsmin
+import json
+import os
+import time
+from jsmin import jsmin
+sys.path.append("../../test/core/")
+from test_producer import KafkaProducer
+from kafka import KafkaConsumer
+try:
+    import boto
+    import boto.ec2
+    import boto.vpc
+    import boto.ec2.cloudwatch
+    import boto.ec2.connection
+except:
+    exit("Boto not avialable. Try activating your virtualenv OR `pip install boto`")
+
+#--------------------------------------------------------------------------------------------------------------------------------------
+
+# Test Producer object to generate request
+
+producer = KafkaProducer('')
+obj = Connection() 
+connections = obj.setEnvironment()
+connections_res = obj.connection_instance()
+cloudwatch_conn = connections_res['cloudwatch_connection'] 
+
+# Consumer Object to consume response from message bus
+server = {'server': 'localhost:9092', 'topic': 'metric_request'}
+_consumer = KafkaConsumer(bootstrap_servers=server['server'])
+_consumer.subscribe(['metric_response'])
+
+#--------------------------------------------------------------------------------------------------------------------------------------
+
+'''Test E2E Flow : Test cases has been tested one at a time.
+1) Commom Request is generated using request function in test_producer.py(/core/message-bus)
+2) The request is then consumed by the comsumer (plugin)
+3) The response is sent back on the message bus in plugin_metrics.py using
+   response functions in producer.py(/core/message-bus)
+4) The response is then again consumed by the unit_tests_metrics.py
+   and the test cases has been applied on the response.
+'''
+class test_create_metrics(unittest.TestCase):
+
+    def test_status_positive(self):
+        time.sleep(2)
+        # To generate Request of testing valid meric_name in create metrics requests
+        producer.request("create_metrics/create_metric_req_valid.json",'create_metric_request', '','metric_request')  
+
+        for message in _consumer:
+            if message.key == "create_metric_response": 
+                resp = json.loads(json.loads(json.loads(message.value)))
+                time.sleep(1)
+                self.assertTrue(resp['metric_create_response']['status'])
+                self.assertEqual(resp['metric_create_response']['metric_uuid'],0)
+                return 
+
+    def test_status_negative(self):
+        time.sleep(2)
+        # To generate Request of testing invalid meric_name in create metrics requests
+        producer.request("create_metrics/create_metric_req_invalid.json",'create_metric_request', '','metric_request')  
+
+        for message in _consumer:
+            if message.key == "create_metric_response": 
+                resp = json.loads(json.loads(json.loads(message.value)))
+                time.sleep(1)
+                self.assertFalse(resp['metric_create_response']['status'])
+                self.assertEqual(resp['metric_create_response']['metric_uuid'],None)
+                return 
+
+class test_metrics_data(unittest.TestCase):
+
+    def test_met_name_positive(self):
+        time.sleep(2)
+        # To generate Request of testing valid meric_name in read_metric_data_request
+        producer.request("read_metrics_data/read_metric_name_req_valid.json",'read_metric_data_request', '','metric_request')  
+        for message in _consumer:
+            if message.key == "read_metric_data_response": 
+                resp = json.loads(json.loads(json.loads(message.value)))
+                time.sleep(1)
+                self.assertEqual(type(resp['metrics_data']),dict)
+                return 
+
+    def test_met_name_negative(self):
+        time.sleep(2)
+        # To generate Request of testing invalid meric_name in read_metric_data_request
+        producer.request("read_metrics_data/read_metric_name_req_invalid.json",'read_metric_data_request', '','metric_request')  
+        for message in _consumer:
+            if message.key == "read_metric_data_response": 
+                resp = json.loads(json.loads(json.loads(message.value)))
+                time.sleep(1)
+                self.assertFalse(resp['metrics_data'])
+                return 
+
+    def test_coll_period_positive(self):
+        # To generate Request of testing valid collection_period in read_metric_data_request
+        # For AWS metric_data_stats collection period should be a multiple of 60
+        time.sleep(2)
+        producer.request("read_metrics_data/read_coll_period_req_valid.json",'read_metric_data_request', '','metric_request')  
+        for message in _consumer:
+            if message.key == "read_metric_data_response": 
+                resp = json.loads(json.loads(json.loads(message.value)))
+                time.sleep(1)
+                self.assertEqual(type(resp),dict)
+                return
+
+    def test_coll_period_negative(self):
+        time.sleep(2)
+        # To generate Request of testing invalid collection_period in read_metric_data_request
+        producer.request("read_metrics_data/read_coll_period_req_invalid.json",'read_metric_data_request', '','metric_request')  
+        for message in _consumer:
+            if message.key == "read_metric_data_response": 
+                resp = json.loads(json.loads(json.loads(message.value)))
+                time.sleep(1)
+                self.assertFalse(resp['metrics_data'])
+                return
+
+class test_update_metrics(unittest.TestCase):
+
+    def test_upd_status_positive(self):
+        time.sleep(2)
+        # To generate Request of testing valid meric_name in update metrics requests
+        producer.request("update_metrics/update_metric_req_valid.json",'update_metric_request', '','metric_request')  
+        for message in _consumer:
+            if message.key == "update_metric_response": 
+                resp = json.loads(json.loads(json.loads(message.value)))
+                time.sleep(1)
+                self.assertTrue(resp['metric_update_response']['status'])
+                self.assertEqual(resp['metric_update_response']['metric_uuid'],0)
+                return
+
+    def test_upd_status_negative(self):
+        time.sleep(2)
+        # To generate Request of testing invalid meric_name in update metrics requests
+        producer.request("update_metrics/update_metric_req_invalid.json",'update_metric_request', '','metric_request')  
+        for message in _consumer:
+            if message.key == "update_metric_response": 
+                resp = json.loads(json.loads(json.loads(message.value)))
+                time.sleep(1)
+                self.assertFalse(resp['metric_update_response']['status'])
+                self.assertEqual(resp['metric_update_response']['metric_uuid'],None)
+                return
+
+class test_delete_metrics(unittest.TestCase):
+
+    def test_del_met_name_positive(self):
+        time.sleep(2)
+        # To generate Request of testing valid meric_name in delete metrics requests
+        producer.request("delete_metrics/delete_metric_req_valid.json",'delete_metric_request', '','metric_request')  
+        for message in _consumer:
+            if message.key == "delete_metric_response": 
+                resp = json.loads(json.loads(json.loads(message.value)))
+                time.sleep(1)
+                self.assertFalse(resp['status'])
+                return
+
+    def test_del_met_name_negative(self):
+        time.sleep(2)
+        # To generate Request of testing invalid meric_name in delete metrics requests
+        producer.request("delete_metrics/delete_metric_req_invalid.json",'delete_metric_request', '','metric_request')  
+        for message in _consumer:
+            if message.key == "delete_metric_response": 
+                resp = json.loads(json.loads(json.loads(message.value)))
+                time.sleep(1)
+                self.assertFalse(resp)
+                return
+
+class test_list_metrics(unittest.TestCase):
+
+    def test_list_met_name_positive(self):
+        time.sleep(2)
+        # To generate Request of testing valid meric_name in list metrics requests
+        producer.request("list_metrics/list_metric_req_valid.json",'list_metric_request', '','metric_request')  
+        for message in _consumer:
+            if message.key == "list_metrics_response": 
+                resp = json.loads(json.loads(json.loads(message.value)))
+                time.sleep(1)
+                self.assertEqual(type(resp['metrics_list']),list)
+                return
+
+    def test_list_met_name_negitive(self):
+        time.sleep(2)
+        # To generate Request of testing invalid meric_name in list metrics requests
+        producer.request("list_metrics/list_metric_req_invalid.json",'list_metric_request', '','metric_request')  
+        for message in _consumer:
+            if message.key == "list_metrics_response": 
+                resp = json.loads(json.loads(json.loads(message.value)))
+                time.sleep(1)
+                self.assertFalse(resp['metrics_list'])
+                return
+
+
+if __name__ == '__main__':
+
+    # Saving test reults in Log file
+
+    log_file = 'log_file.txt'
+    f = open(log_file, "w")
+    runner = unittest.TextTestRunner(f)
+    unittest.main(testRunner=runner)
+    f.close()
+
+    # For printing results on Console
+    # unittest.main()
+
diff --git a/osm_mon/tests/plugins/OpenStack/__init__.py b/osm_mon/tests/plugins/OpenStack/__init__.py
new file mode 100644 (file)
index 0000000..32eb94e
--- /dev/null
@@ -0,0 +1,21 @@
+# Copyright 2017 Intel Research and Development Ireland Limited
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+##
diff --git a/osm_mon/tests/plugins/OpenStack/integration/__init__.py b/osm_mon/tests/plugins/OpenStack/integration/__init__.py
new file mode 100644 (file)
index 0000000..cd7731b
--- /dev/null
@@ -0,0 +1,34 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2018 Whitestack, LLC
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Whitestack, LLC
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: bdiaz@whitestack.com or glavado@whitestack.com
+##
+import logging
+import sys
+
+from osm_mon.core.settings import Config
+
+cfg = Config.instance()
+logging.basicConfig(stream=sys.stdout,
+                    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
+                    datefmt='%m/%d/%Y %I:%M:%S %p',
+                    level=logging.getLevelName(cfg.OSMMON_LOG_LEVEL))
+log = logging.getLogger(__name__)
diff --git a/osm_mon/tests/plugins/OpenStack/integration/test_alarm_integration.py b/osm_mon/tests/plugins/OpenStack/integration/test_alarm_integration.py
new file mode 100644 (file)
index 0000000..fbec56c
--- /dev/null
@@ -0,0 +1,221 @@
+# Copyright 2017 Intel Research and Development Ireland Limited
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+
+# __author__ = "Helena McGough"
+"""Test an end to end Openstack alarm requests."""
+
+import json
+import logging
+import unittest
+
+import mock
+from kafka import KafkaConsumer
+from kafka import KafkaProducer
+from kafka.errors import KafkaError
+
+from osm_mon.core.auth import AuthManager
+from osm_mon.core.database import DatabaseManager, VimCredentials
+from osm_mon.plugins.OpenStack import response
+from osm_mon.plugins.OpenStack.Aodh import alarm_handler
+from osm_mon.plugins.OpenStack.common import Common
+
+log = logging.getLogger(__name__)
+
+mock_creds = VimCredentials()
+mock_creds.config = '{}'
+
+
+@mock.patch.object(DatabaseManager, "save_alarm", mock.Mock())
+@mock.patch.object(Common, "get_auth_token", mock.Mock())
+@mock.patch.object(Common, "get_endpoint", mock.Mock())
+class AlarmIntegrationTest(unittest.TestCase):
+    def setUp(self):
+        try:
+            self.producer = KafkaProducer(bootstrap_servers='localhost:9092',
+                                          key_serializer=str.encode,
+                                          value_serializer=str.encode
+                                          )
+            self.req_consumer = KafkaConsumer(bootstrap_servers='localhost:9092',
+                                              key_deserializer=bytes.decode,
+                                              value_deserializer=bytes.decode,
+                                              auto_offset_reset='earliest',
+                                              consumer_timeout_ms=60000)
+            self.req_consumer.subscribe(['alarm_request'])
+        except KafkaError:
+            self.skipTest('Kafka server not present.')
+        # Set up common and alarming class instances
+        self.alarms = alarm_handler.OpenstackAlarmHandler()
+        self.openstack_auth = Common()
+
+    def tearDown(self):
+        self.producer.close()
+        self.req_consumer.close()
+
+    @mock.patch.object(Common, "perform_request")
+    @mock.patch.object(AuthManager, 'get_credentials')
+    @mock.patch.object(alarm_handler.OpenstackAlarmHandler, "update_alarm")
+    @mock.patch.object(response.OpenStackResponseBuilder, "generate_response")
+    def test_update_alarm_req(self, resp, update_alarm, get_creds, perf_req):
+        """Test Aodh update alarm request message from KafkaProducer."""
+        # Set-up message, producer and consumer for tests
+        payload = {"alarm_update_request": {"correlation_id": 123,
+                                            "alarm_uuid": "alarm_id",
+                                            "metric_uuid": "metric_id"}}
+
+        get_creds.return_value = mock_creds
+        perf_req.return_value = type('obj', (object,), {'text': json.dumps({"metrics": {"cpu_util": "1"}})})
+        resp.return_value = ''
+
+        self.producer.send('alarm_request', key="update_alarm_request",
+                           value=json.dumps(payload))
+
+        for message in self.req_consumer:
+            if message.key == "update_alarm_request":
+                # Mock a valid alarm update
+                update_alarm.return_value = "alarm_id"
+                self.alarms.handle_message(message, 'test_id')
+
+                # A response message is generated and sent via MON's producer
+                resp.assert_called_with(
+                    'update_alarm_response', alarm_id="alarm_id", cor_id=123,
+                    status=True)
+
+                return
+        self.fail("No message received in consumer")
+
+    @mock.patch.object(Common, "perform_request")
+    @mock.patch.object(AuthManager, 'get_credentials')
+    @mock.patch.object(alarm_handler.OpenstackAlarmHandler, "configure_alarm")
+    @mock.patch.object(response.OpenStackResponseBuilder, "generate_response")
+    def test_create_alarm_req(self, resp, config_alarm, get_creds, perf_req):
+        """Test Aodh create alarm request message from KafkaProducer."""
+        # Set-up message, producer and consumer for tests
+        payload = {"alarm_create_request": {"correlation_id": 123,
+                                            "alarm_name": "my_alarm",
+                                            "metric_name": "cpu_utilization",
+                                            "resource_uuid": "my_resource",
+                                            "severity": "WARNING",
+                                            "threshold_value": 60,
+                                            "operation": "GT",
+                                            "vdu_name": "vdu",
+                                            "vnf_member_index": "1",
+                                            "ns_id": "1"}}
+
+        get_creds.return_value = mock_creds
+        perf_req.return_value = type('obj', (object,), {'text': json.dumps({"metrics": {"cpu_util": "1"}})})
+        resp.return_value = ''
+        self.producer.send('alarm_request', key="create_alarm_request",
+                           value=json.dumps(payload))
+
+        for message in self.req_consumer:
+            if message.key == "create_alarm_request":
+                # Mock a valid alarm creation
+                config_alarm.return_value = "alarm_id"
+                self.alarms.handle_message(message, 'test_id')
+
+                # A response message is generated and sent via MON's produce
+                resp.assert_called_with(
+                    'create_alarm_response', status=True, alarm_id="alarm_id",
+                    cor_id=123)
+
+                return
+        self.fail("No message received in consumer")
+
+    @mock.patch.object(Common, "perform_request")
+    @mock.patch.object(AuthManager, 'get_credentials')
+    @mock.patch.object(alarm_handler.OpenstackAlarmHandler, "list_alarms")
+    @mock.patch.object(response.OpenStackResponseBuilder, "generate_response")
+    def test_list_alarm_req(self, resp, list_alarm, get_creds, perf_req):
+        """Test Aodh list alarm request message from KafkaProducer."""
+        # Set-up message, producer and consumer for tests
+        payload = {"alarm_list_request": {"correlation_id": 123,
+                                          "resource_uuid": "resource_id", }}
+
+        self.producer.send('alarm_request', key="list_alarm_request",
+                           value=json.dumps(payload))
+
+        get_creds.return_value = mock_creds
+        perf_req.return_value = type('obj', (object,), {'text': json.dumps([])})
+        resp.return_value = ''
+
+        for message in self.req_consumer:
+            if message.key == "list_alarm_request":
+                # Mock an empty list generated by the request
+                list_alarm.return_value = []
+                self.alarms.handle_message(message, 'test_id')
+
+                # Response message is generated
+                resp.assert_called_with(
+                    'list_alarm_response', alarm_list=[],
+                    cor_id=123)
+
+                return
+        self.fail("No message received in consumer")
+
+    @mock.patch.object(Common, "perform_request")
+    @mock.patch.object(AuthManager, 'get_credentials')
+    @mock.patch.object(alarm_handler.OpenstackAlarmHandler, "delete_alarm")
+    @mock.patch.object(response.OpenStackResponseBuilder, "generate_response")
+    def test_delete_alarm_req(self, resp, del_alarm, get_creds, perf_req):
+        """Test Aodh delete alarm request message from KafkaProducer."""
+        # Set-up message, producer and consumer for tests
+        payload = {"alarm_delete_request": {"correlation_id": 123,
+                                            "alarm_uuid": "alarm_id", }}
+
+        self.producer.send('alarm_request', key="delete_alarm_request",
+                           value=json.dumps(payload))
+
+        get_creds.return_value = mock_creds
+        perf_req.return_value = type('obj', (object,), {'text': json.dumps([])})
+        resp.return_value = ''
+
+        for message in self.req_consumer:
+            if message.key == "delete_alarm_request":
+                self.alarms.handle_message(message, 'test_id')
+
+                # Response message is generated and sent by MON's producer
+                resp.assert_called_with(
+                    'delete_alarm_response', alarm_id="alarm_id",
+                    status=True, cor_id=123)
+
+                return
+        self.fail("No message received in consumer")
+
+    @mock.patch.object(AuthManager, 'get_credentials')
+    @mock.patch.object(alarm_handler.OpenstackAlarmHandler, "update_alarm_state")
+    def test_ack_alarm_req(self, ack_alarm, get_creds):
+        """Test Aodh acknowledge alarm request message from KafkaProducer."""
+        # Set-up message, producer and consumer for tests
+        payload = {"ack_details": {"alarm_uuid": "alarm_id", }}
+
+        self.producer.send('alarm_request', key="acknowledge_alarm",
+                           value=json.dumps(payload))
+
+        get_creds.return_value = mock_creds
+        ack_alarm.return_value = True
+
+        for message in self.req_consumer:
+            if message.key == "acknowledge_alarm":
+                self.alarms.handle_message(message, 'test_id')
+                ack_alarm.assert_called_with(mock.ANY, mock.ANY, 'alarm_id', True)
+                return
+
+        self.fail("No message received in consumer")
diff --git a/osm_mon/tests/plugins/OpenStack/integration/test_metric_integration.py b/osm_mon/tests/plugins/OpenStack/integration/test_metric_integration.py
new file mode 100644 (file)
index 0000000..578c8b1
--- /dev/null
@@ -0,0 +1,242 @@
+# Copyright 2017 Intel Research and Development Ireland Limited
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+
+# __author__ = "Helena McGough"
+"""Test an end to end Openstack metric requests."""
+
+import json
+
+import logging
+import unittest
+
+from kafka.errors import KafkaError
+
+from osm_mon.core.auth import AuthManager
+from osm_mon.core.database import VimCredentials
+
+from kafka import KafkaConsumer
+from kafka import KafkaProducer
+
+import mock
+
+from osm_mon.plugins.OpenStack import response
+
+from osm_mon.plugins.OpenStack.Gnocchi import metric_handler
+
+from osm_mon.plugins.OpenStack.common import Common
+
+log = logging.getLogger(__name__)
+
+mock_creds = VimCredentials()
+mock_creds.config = '{}'
+
+
+@mock.patch.object(Common, "get_auth_token", mock.Mock())
+@mock.patch.object(Common, "get_endpoint", mock.Mock())
+class MetricIntegrationTest(unittest.TestCase):
+    def setUp(self):
+        # Set up common and alarming class instances
+        self.metric_req = metric_handler.OpenstackMetricHandler()
+        self.openstack_auth = Common()
+
+        try:
+            self.producer = KafkaProducer(bootstrap_servers='localhost:9092',
+                                          key_serializer=str.encode,
+                                          value_serializer=str.encode
+                                          )
+            self.req_consumer = KafkaConsumer(bootstrap_servers='localhost:9092',
+                                              key_deserializer=bytes.decode,
+                                              value_deserializer=bytes.decode,
+                                              auto_offset_reset='earliest',
+                                              consumer_timeout_ms=60000)
+            self.req_consumer.subscribe(['metric_request'])
+        except KafkaError:
+            self.skipTest('Kafka server not present.')
+
+    @mock.patch.object(Common, "perform_request")
+    @mock.patch.object(AuthManager, 'get_credentials')
+    @mock.patch.object(metric_handler.OpenstackMetricHandler, "configure_metric")
+    @mock.patch.object(response.OpenStackResponseBuilder, "generate_response")
+    def test_create_metric_req(self, resp, config_metric, get_creds, perf_req):
+        """Test Gnocchi create metric request message from producer."""
+        # Set-up message, producer and consumer for tests
+        payload = {"metric_create_request": {"correlation_id": 123,
+                                             "metric_name": "cpu_utilization",
+                                             "resource_uuid": "resource_id"}}
+
+        get_creds.return_value = mock_creds
+        perf_req.return_value = type('obj', (object,), {'text': json.dumps({"metrics": {"cpu_util": "1"}})})
+        resp.return_value = ''
+
+        self.producer.send('metric_request', key="create_metric_request",
+                           value=json.dumps(payload))
+
+        for message in self.req_consumer:
+            if message.key == "create_metric_request":
+                # A valid metric is created
+                config_metric.return_value = "metric_id", "resource_id"
+                self.metric_req.handle_request(message, 'test_id')
+
+                # A response message is generated and sent by MON's producer
+                resp.assert_called_with(
+                    'create_metric_response', status=True, cor_id=123,
+                    metric_id="metric_id", resource_id="resource_id")
+
+                return
+        self.fail("No message received in consumer")
+
+    @mock.patch.object(Common, "perform_request")
+    @mock.patch.object(AuthManager, 'get_credentials')
+    @mock.patch.object(metric_handler.OpenstackMetricHandler, "delete_metric")
+    @mock.patch.object(response.OpenStackResponseBuilder, "generate_response")
+    def test_delete_metric_req(self, resp, del_metric, get_creds, perf_req):
+        """Test Gnocchi delete metric request message from producer."""
+        # Set-up message, producer and consumer for tests
+        payload = {"vim_type": "OpenSTACK",
+                   "vim_uuid": "1",
+                   "correlation_id": 123,
+                   "metric_name": "cpu_utilization",
+                   "resource_uuid": "resource_id"}
+
+        get_creds.return_value = mock_creds
+        perf_req.return_value = type('obj', (object,), {'text': json.dumps({"metrics": {"cpu_util": "1"}})})
+        resp.return_value = ''
+
+        self.producer.send('metric_request', key="delete_metric_request",
+                           value=json.dumps(payload))
+
+        for message in self.req_consumer:
+            if message.key == "delete_metric_request":
+                # Metric has been deleted
+                del_metric.return_value = True
+                self.metric_req.handle_request(message, 'test_id')
+
+                # A response message is generated and sent by MON's producer
+                resp.assert_called_with(
+                    'delete_metric_response', metric_id='1',
+                    metric_name="cpu_utilization", status=True, resource_id="resource_id",
+                    cor_id=123)
+
+                return
+        self.fail("No message received in consumer")
+
+    @mock.patch.object(Common, "perform_request")
+    @mock.patch.object(AuthManager, 'get_credentials')
+    @mock.patch.object(metric_handler.OpenstackMetricHandler, "read_metric_data")
+    @mock.patch.object(response.OpenStackResponseBuilder, "generate_response")
+    def test_read_metric_data_req(self, resp, read_data, get_creds, perf_req):
+        """Test Gnocchi read metric data request message from producer."""
+        # Set-up message, producer and consumer for tests
+        payload = {"vim_type": "OpenSTACK",
+                   "vim_uuid": "test_id",
+                   "correlation_id": 123,
+                   "metric_name": "cpu_utilization",
+                   "resource_uuid": "resource_id"}
+
+        get_creds.return_value = mock_creds
+        perf_req.return_value = type('obj', (object,), {'text': json.dumps({"metrics": {"cpu_util": "1"}})})
+        resp.return_value = ''
+
+        self.producer.send('metric_request', key="read_metric_data_request",
+                           value=json.dumps(payload))
+
+        for message in self.req_consumer:
+            # Check the vim desired by the message
+            if message.key == "read_metric_data_request":
+                # Mock empty lists generated by the request message
+                read_data.return_value = [], []
+                self.metric_req.handle_request(message, 'test_id')
+
+                # A response message is generated and sent by MON's producer
+                resp.assert_called_with(
+                    'read_metric_data_response', metric_id='1',
+                    metric_name="cpu_utilization", resource_id="resource_id", cor_id=123, times=[],
+                    metrics=[], status=True)
+
+                return
+        self.fail("No message received in consumer")
+
+    @mock.patch.object(Common, "perform_request")
+    @mock.patch.object(AuthManager, 'get_credentials')
+    @mock.patch.object(metric_handler.OpenstackMetricHandler, "list_metrics")
+    @mock.patch.object(response.OpenStackResponseBuilder, "generate_response")
+    def test_list_metrics_req(self, resp, list_metrics, get_creds, perf_req):
+        """Test Gnocchi list metrics request message from producer."""
+        # Set-up message, producer and consumer for tests
+        payload = {"vim_type": "OpenSTACK",
+                   "vim_uuid": "1",
+                   "metrics_list_request":
+                       {"correlation_id": 123, }}
+
+        get_creds.return_value = mock_creds
+        perf_req.return_value = type('obj', (object,), {'text': json.dumps({"metrics": {"cpu_util": "1"}})})
+        resp.return_value = ''
+
+        self.producer.send('metric_request', key="list_metric_request",
+                           value=json.dumps(payload))
+
+        for message in self.req_consumer:
+            # Check the vim desired by the message
+            if message.key == "list_metric_request":
+                # Mock an empty list generated by the request
+                list_metrics.return_value = []
+                self.metric_req.handle_request(message, 'test_id')
+
+                # A response message is generated and sent by MON's producer
+                resp.assert_called_with(
+                    'list_metric_response', metric_list=[], cor_id=123, status=True)
+
+                return
+        self.fail("No message received in consumer")
+
+    @mock.patch.object(Common, "perform_request")
+    @mock.patch.object(AuthManager, 'get_credentials')
+    @mock.patch.object(metric_handler.OpenstackMetricHandler, "get_metric_id")
+    @mock.patch.object(response.OpenStackResponseBuilder, "generate_response")
+    def test_update_metrics_req(self, resp, get_id, get_creds, perf_req):
+        """Test Gnocchi update metric request message from KafkaProducer."""
+        # Set-up message, producer and consumer for tests
+        payload = {"metric_update_request": {"metric_name": "my_metric",
+                                             "correlation_id": 123,
+                                             "resource_uuid": "resource_id", }}
+
+        get_creds.return_value = mock_creds
+        perf_req.return_value = type('obj', (object,), {'text': json.dumps({"metrics": {"cpu_util": "1"}})})
+        resp.return_value = ''
+
+        self.producer.send('metric_request', key="update_metric_request",
+                           value=json.dumps(payload))
+
+        for message in self.req_consumer:
+            # Check the vim desired by the message
+            if message.key == "update_metric_request":
+                # Gnocchi doesn't support metric updates
+                get_id.return_value = "metric_id"
+                self.metric_req.handle_request(message, 'test_id')
+
+                # Response message is generated and sent via MON's producer
+                # No metric update has taken place
+                resp.assert_called_with(
+                    'update_metric_response', status=False, cor_id=123,
+                    resource_id="resource_id", metric_id="metric_id")
+
+                return
+        self.fail("No message received in consumer")
diff --git a/osm_mon/tests/plugins/OpenStack/integration/test_notify_alarm.py b/osm_mon/tests/plugins/OpenStack/integration/test_notify_alarm.py
new file mode 100644 (file)
index 0000000..8aa2c9f
--- /dev/null
@@ -0,0 +1,183 @@
+# Copyright 2017 Intel Research and Development Ireland Limited
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+##
+"""Tests for all common OpenStack methods."""
+
+from __future__ import unicode_literals
+import json
+import logging
+import socket
+import unittest
+from threading import Thread
+
+import mock
+import requests
+from kafka import KafkaProducer
+from six.moves.BaseHTTPServer import BaseHTTPRequestHandler
+from six.moves.BaseHTTPServer import HTTPServer
+
+from osm_mon.core.settings import Config
+from osm_mon.plugins.OpenStack.Aodh.alarm_handler import OpenstackAlarmHandler
+from osm_mon.plugins.OpenStack.common import Common
+from osm_mon.plugins.OpenStack.response import OpenStackResponseBuilder
+
+log = logging.getLogger(__name__)
+
+# Create an instance of the common openstack class, producer and consumer
+openstack_auth = Common()
+
+# Mock a valid get_response for alarm details
+valid_get_resp = '{"gnocchi_resources_threshold_rule":\
+                  {"resource_id": "my_resource_id"}}'
+
+
+class MockResponse(object):
+    """Mock a response class for generating responses."""
+
+    def __init__(self, text):
+        """Initialise a mock response with a text attribute."""
+        self.text = text
+
+
+class MockNotifierHandler(BaseHTTPRequestHandler):
+    """Mock the NotifierHandler class for testing purposes."""
+
+    def _set_headers(self):
+        """Set the headers for a request."""
+        self.send_response(200)
+        self.send_header('Content-type', 'text/html')
+        self.end_headers()
+
+    def do_GET(self):
+        """Mock functionality for GET request."""
+        #        self.send_response(requests.codes.ok)
+        self._set_headers()
+        pass
+
+    def do_POST(self):
+        """Mock functionality for a POST request."""
+        self._set_headers()
+        content_length = int(self.headers['Content-Length'])
+        post_data = self.rfile.read(content_length)
+        try:
+            post_data = post_data.decode()
+        except AttributeError:
+            pass
+        self.notify_alarm(json.loads(post_data))
+
+    def notify_alarm(self, values):
+        """Mock the notify_alarm functionality to generate a valid response."""
+        cfg = Config.instance()
+        self._alarming = OpenstackAlarmHandler()
+        self._common = Common()
+        self._response = OpenStackResponseBuilder()
+        alarm_id = values['alarm_id']
+
+        auth_token = Common.get_auth_token('test_id')
+        endpoint = Common.get_endpoint('alarming', 'test_id')
+
+        # If authenticated generate and send response message
+        if auth_token is not None and endpoint is not None:
+            url = "{}/v2/alarms/%s".format(endpoint) % alarm_id
+
+            # Get the resource_id of the triggered alarm and the date
+            result = Common.perform_request(
+                url, auth_token, req_type="get")
+            alarm_details = json.loads(result.text)
+            gnocchi_rule = alarm_details['gnocchi_resources_threshold_rule']
+            resource_id = gnocchi_rule['resource_id']
+            # Mock a date for testing purposes
+            a_date = "dd-mm-yyyy 00:00"
+
+            # Process an alarm notification if resource_id is valid
+            if resource_id is not None:
+                # Try generate and send response
+                try:
+                    resp_message = self._response.generate_response(
+                        'notify_alarm',
+                        alarm_id=alarm_id,
+                        resource_id=resource_id,
+                        sev=values['severity'], date=a_date,
+                        state=values['current'], vim_type="OpenStack")
+                except Exception:
+                    log.exception("Error generating response")
+
+
+def get_free_port():
+    """Function to get a free port to run the test webserver on."""
+    s = socket.socket(socket.AF_INET, type=socket.SOCK_STREAM)
+    s.bind(('localhost', 0))
+    address, port = s.getsockname()
+    s.close()
+    return port
+
+
+# Create the webserver, port and run it on its own thread
+mock_server_port = get_free_port()
+mock_server = HTTPServer(('localhost', mock_server_port), MockNotifierHandler)
+mock_server_thread = Thread(target=mock_server.serve_forever)
+mock_server_thread.setDaemon(True)
+mock_server_thread.start()
+
+
+def test_do_get():
+    """Integration test for get request on notifier webserver."""
+    url = 'http://localhost:{port}/users'.format(port=mock_server_port)
+
+    # Send a request to the mock API server and store the response.
+    response = requests.get(url)
+
+    # Confirm that the request-response cycle completed successfully.
+    assert response.ok
+
+
+class AlarmNotificationTest(unittest.TestCase):
+    @mock.patch.object(OpenStackResponseBuilder, "generate_response")
+    @mock.patch.object(Common, "perform_request")
+    @mock.patch.object(Common, "get_endpoint")
+    @mock.patch.object(Common, "get_auth_token")
+    def test_post_notify_alarm(self, auth, endpoint, perf_req, resp):
+        """Integration test for notify_alarm."""
+        url = 'http://localhost:{port}/users'.format(port=mock_server_port)
+        payload = {"severity": "critical",
+                   "alarm_name": "my_alarm",
+                   "current": "current_state",
+                   "alarm_id": "my_alarm_id",
+                   "reason": "Threshold has been broken",
+                   "reason_data": {"count": 1,
+                                   "most_recent": "null",
+                                   "type": "threshold",
+                                   "disposition": "unknown"},
+                   "previous": "previous_state"}
+
+        # Mock authenticate and request response for testing
+        auth.return_value = "my_auth_token"
+        endpoint.return_value = "my_endpoint"
+        perf_req.return_value = MockResponse(valid_get_resp)
+
+        # Generate a post request for testing
+        response = requests.post(url, json.dumps(payload))
+        self.assertEqual(response.status_code, 200)
+        # A response message is generated with the following details
+        resp.assert_called_with(
+            "notify_alarm", alarm_id="my_alarm_id", resource_id="my_resource_id",
+            sev="critical", date='dd-mm-yyyy 00:00', state="current_state",
+            vim_type="OpenStack")
diff --git a/osm_mon/tests/plugins/OpenStack/integration/test_vim_account.py b/osm_mon/tests/plugins/OpenStack/integration/test_vim_account.py
new file mode 100644 (file)
index 0000000..f8b92ed
--- /dev/null
@@ -0,0 +1,113 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2018 Whitestack, LLC
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Whitestack, LLC
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: bdiaz@whitestack.com or glavado@whitestack.com
+##
+
+"""Test an end to end Openstack vim_account requests."""
+
+import json
+import logging
+import unittest
+
+from peewee import SqliteDatabase
+
+from osm_mon.core import database
+from osm_mon.core.auth import AuthManager
+from osm_mon.core.database import DatabaseManager, VimCredentials, Alarm
+
+log = logging.getLogger(__name__)
+
+test_db = SqliteDatabase(':memory:')
+
+MODELS = [VimCredentials, Alarm]
+
+
+class VimAccountTest(unittest.TestCase):
+    def setUp(self):
+        super().setUp()
+        database.db = test_db
+        test_db.bind(MODELS)
+        test_db.connect()
+        test_db.drop_tables(MODELS)
+        test_db.create_tables(MODELS)
+        self.auth_manager = AuthManager()
+        self.database_manager = DatabaseManager()
+        self.database_manager.create_tables()
+
+    def tearDown(self):
+        super().tearDown()
+        test_db.close()
+
+    def test_create_edit_delete_vim_account(self):
+        """Test vim_account creation message from KafkaProducer."""
+        # Set-up message, producer and consumer for tests
+        create_payload = {
+            "_id": "test_id",
+            "name": "test_name",
+            "vim_type": "openstack",
+            "vim_url": "auth_url",
+            "vim_user": "user",
+            "vim_password": "password",
+            "vim_tenant_name": "tenant",
+            "config":
+                {
+                    "foo": "bar"
+                }
+        }
+        self.auth_manager.store_auth_credentials(create_payload)
+
+        creds = self.auth_manager.get_credentials('test_id')
+
+        self.assertIsNotNone(creds)
+        self.assertEqual(creds.name, create_payload['name'])
+        self.assertEqual(json.loads(creds.config), create_payload['config'])
+
+        # Set-up message, producer and consumer for tests
+        edit_payload = {
+            "_id": "test_id",
+            "name": "test_name_edited",
+            "vim_type": "openstack",
+            "vim_url": "auth_url",
+            "vim_user": "user",
+            "vim_password": "password",
+            "vim_tenant_name": "tenant",
+            "config":
+                {
+                    "foo_edited": "bar_edited"
+                }
+        }
+
+        self.auth_manager.store_auth_credentials(edit_payload)
+
+        creds = self.auth_manager.get_credentials('test_id')
+
+        self.assertEqual(creds.name, edit_payload['name'])
+        self.assertEqual(json.loads(creds.config), edit_payload['config'])
+
+        delete_payload = {
+            "_id": "test_id"
+        }
+
+        self.auth_manager.delete_auth_credentials(delete_payload)
+
+        creds = self.auth_manager.get_credentials('test_id')
+        self.assertIsNone(creds)
diff --git a/osm_mon/tests/plugins/OpenStack/unit/__init__.py b/osm_mon/tests/plugins/OpenStack/unit/__init__.py
new file mode 100644 (file)
index 0000000..cd7731b
--- /dev/null
@@ -0,0 +1,34 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2018 Whitestack, LLC
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Whitestack, LLC
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: bdiaz@whitestack.com or glavado@whitestack.com
+##
+import logging
+import sys
+
+from osm_mon.core.settings import Config
+
+cfg = Config.instance()
+logging.basicConfig(stream=sys.stdout,
+                    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
+                    datefmt='%m/%d/%Y %I:%M:%S %p',
+                    level=logging.getLevelName(cfg.OSMMON_LOG_LEVEL))
+log = logging.getLogger(__name__)
diff --git a/osm_mon/tests/plugins/OpenStack/unit/test_alarm_req.py b/osm_mon/tests/plugins/OpenStack/unit/test_alarm_req.py
new file mode 100644 (file)
index 0000000..02cec8b
--- /dev/null
@@ -0,0 +1,150 @@
+# Copyright 2017 iIntel Research and Development Ireland Limited
+# **************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the 'License'); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an 'AS IS' BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+##
+"""Tests for all alarm request message keys."""
+
+import json
+import logging
+import unittest
+from io import UnsupportedOperation
+
+import mock
+
+from osm_mon.core.auth import AuthManager
+from osm_mon.core.database import VimCredentials, DatabaseManager
+from osm_mon.plugins.OpenStack.Aodh import alarm_handler as alarm_req
+from osm_mon.plugins.OpenStack.Aodh.alarm_handler import OpenstackAlarmHandler
+from osm_mon.plugins.OpenStack.common import Common
+
+log = logging.getLogger(__name__)
+
+mock_creds = VimCredentials()
+mock_creds.config = '{}'
+
+
+class Message(object):
+    """A class to mock a message object value for alarm requests."""
+
+    def __init__(self):
+        """Initialize a mocked message instance."""
+        self.topic = 'alarm_request'
+        self.key = None
+        self.value = json.dumps({'mock_value': 'mock_details'})
+
+
+class TestAlarmKeys(unittest.TestCase):
+    """Integration test for alarm request keys."""
+
+    def setUp(self):
+        """Setup the tests for alarm request keys."""
+        super(TestAlarmKeys, self).setUp()
+        self.alarming = alarm_req.OpenstackAlarmHandler()
+        self.alarming.common = Common()
+
+    @mock.patch.object(AuthManager, 'get_credentials')
+    @mock.patch.object(Common, 'get_endpoint')
+    @mock.patch.object(Common, 'get_auth_token')
+    def test_alarming_authentication(self, get_token, get_endpoint, get_creds):
+        """Test getting an auth_token and endpoint for alarm requests."""
+        # if auth_token is None environment variables are used to authenticate
+        get_creds.return_value = mock_creds
+
+        with self.assertRaises(UnsupportedOperation):
+            self.alarming.handle_message('', {}, 'test_id')
+
+        get_token.assert_called_with('test_id', verify_ssl=True)
+        get_endpoint.assert_any_call('alarming', 'test_id', verify_ssl=True)
+
+    @mock.patch.object(Common, 'get_endpoint', mock.Mock())
+    @mock.patch.object(Common, 'get_auth_token', mock.Mock())
+    @mock.patch.object(AuthManager, 'get_credentials')
+    @mock.patch.object(alarm_req.OpenstackAlarmHandler, 'delete_alarm')
+    def test_delete_alarm_key(self, del_alarm, get_creds):
+        """Test the functionality for a create alarm request."""
+        value = {'alarm_delete_request': {
+            'correlation_id': 1,
+            'alarm_uuid': 'my_alarm_id'
+        }}
+
+        get_creds.return_value = mock_creds
+        del_alarm.return_value = {}
+
+        # Call the alarming functionality and check delete request
+        self.alarming.handle_message('delete_alarm_request', value, 'test_id')
+        del_alarm.assert_called_with(mock.ANY, mock.ANY, 'my_alarm_id', True)
+
+    @mock.patch.object(Common, 'get_endpoint', mock.Mock())
+    @mock.patch.object(Common, 'get_auth_token', mock.Mock())
+    @mock.patch.object(AuthManager, 'get_credentials')
+    @mock.patch.object(alarm_req.OpenstackAlarmHandler, 'list_alarms')
+    def test_list_alarm_key(self, list_alarm, get_creds):
+        """Test the functionality for a list alarm request."""
+        value = {'alarm_list_request': {'correlation_id': 1}}
+
+        get_creds.return_value = mock_creds
+
+        list_alarm.return_value = []
+
+        # Call the alarming functionality and check list functionality
+        self.alarming.handle_message('list_alarm_request', value, 'test_id')
+        list_alarm.assert_called_with(mock.ANY, mock.ANY, {'correlation_id': 1}, True)
+
+    @mock.patch.object(Common, 'get_auth_token', mock.Mock())
+    @mock.patch.object(Common, 'get_endpoint', mock.Mock())
+    @mock.patch.object(AuthManager, 'get_credentials')
+    @mock.patch.object(alarm_req.OpenstackAlarmHandler, 'update_alarm_state')
+    def test_ack_alarm_key(self, ack_alarm, get_creds):
+        """Test the functionality for an acknowledge alarm request."""
+        value = {'ack_details': {'alarm_uuid': 'my_alarm_id'}}
+
+        get_creds.return_value = mock_creds
+
+        # Call alarming functionality and check acknowledge functionality
+        self.alarming.handle_message('acknowledge_alarm_request', value, 'test_id')
+        ack_alarm.assert_called_with(mock.ANY, mock.ANY, 'my_alarm_id', True)
+
+    @mock.patch.object(Common, 'get_auth_token', mock.Mock())
+    @mock.patch.object(Common, 'get_endpoint', mock.Mock())
+    @mock.patch.object(DatabaseManager, 'save_alarm', mock.Mock())
+    @mock.patch.object(Common, "perform_request")
+    @mock.patch.object(AuthManager, 'get_credentials')
+    @mock.patch.object(alarm_req.OpenstackAlarmHandler, 'configure_alarm')
+    def test_config_alarm_key(self, config_alarm, get_creds, perf_req):
+        """Test the functionality for a create alarm request."""
+        value = {'alarm_create_request': {'correlation_id': 1, 'threshold_value': 50,
+                                          'operation': 'GT', 'metric_name': 'cpu_utilization',
+                                          'vdu_name': 'vdu',
+                                          'vnf_member_index': '1',
+                                          'ns_id': '1',
+                                          'resource_uuid': '123'}}
+        mock_perf_req_return_value = {"metrics": {"cpu_util": 123}}
+        perf_req.return_value = type('obj', (object,), {'text': json.dumps(mock_perf_req_return_value, sort_keys=True)})
+        get_creds.return_value = mock_creds
+
+        # Call alarming functionality and check config alarm call
+        config_alarm.return_value = 'my_alarm_id'
+        self.alarming.handle_message('create_alarm_request', value, 'test_id')
+        config_alarm.assert_called_with(mock.ANY, mock.ANY, {'correlation_id': 1, 'threshold_value': 50,
+                                                             'operation': 'GT',
+                                                             'metric_name': 'cpu_utilization',
+                                                             'vdu_name': 'vdu',
+                                                             'vnf_member_index': '1', 'ns_id': '1',
+                                                             'resource_uuid': '123'}, {}, True)
diff --git a/osm_mon/tests/plugins/OpenStack/unit/test_alarming.py b/osm_mon/tests/plugins/OpenStack/unit/test_alarming.py
new file mode 100644 (file)
index 0000000..aeacd7b
--- /dev/null
@@ -0,0 +1,299 @@
+# Copyright 2017 iIntel Research and Development Ireland Limited
+# **************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+##
+"""Tests for all alarm request message keys."""
+
+import json
+import logging
+import unittest
+
+import mock
+
+from osm_mon.core.settings import Config
+from osm_mon.plugins.OpenStack.Aodh import alarm_handler as alarm_req
+from osm_mon.plugins.OpenStack.common import Common
+
+log = logging.getLogger(__name__)
+
+auth_token = mock.ANY
+alarm_endpoint = "alarm_endpoint"
+metric_endpoint = "metric_endpoint"
+
+
+class Response(object):
+    """Mock a response message class."""
+
+    def __init__(self, result):
+        """Initialise the response text and status code."""
+        self.text = json.dumps(result)
+        self.status_code = "MOCK_STATUS_CODE"
+
+
+class TestAlarming(unittest.TestCase):
+    """Tests for alarming class functions."""
+
+    maxDiff = None
+
+    def setUp(self):
+        """Setup for tests."""
+        super(TestAlarming, self).setUp()
+        self.alarming = alarm_req.OpenstackAlarmHandler()
+
+    @mock.patch.object(Common, "perform_request")
+    def test_config_invalid_alarm_req(self, perf_req):
+        """Test configure an invalid alarm request."""
+        # Configuring with invalid metric name results in failure
+        values = {"alarm_name": "my_alarm",
+                  "metric_name": "my_metric",
+                  "resource_uuid": "my_r_id"}
+        with self.assertRaises(KeyError):
+            self.alarming.configure_alarm(alarm_endpoint, auth_token, values, {}, True)
+        perf_req.assert_not_called()
+        perf_req.reset_mock()
+
+        # Configuring with missing metric name results in failure
+        values = {"alarm_name": "disk_write_ops",
+                  "resource_uuid": "my_r_id"}
+
+        with self.assertRaises(KeyError):
+            self.alarming.configure_alarm(alarm_endpoint, auth_token, values, {}, True)
+        perf_req.assert_not_called()
+
+    @mock.patch.object(Common, "perform_request")
+    def test_config_valid_alarm_req(self, perf_req):
+        """Test config a valid alarm."""
+        values = {"alarm_name": "disk_write_ops",
+                  "metric_name": "disk_write_ops",
+                  "resource_uuid": "my_r_id",
+                  "statistic": "AVERAGE",
+                  "threshold_value": 60,
+                  "operation": "GT"}
+
+        perf_req.return_value = type('obj', (object,), {'text': '{"alarm_id":"1"}'})
+
+        self.alarming.configure_alarm(alarm_endpoint, auth_token, values, {}, True)
+        payload = {"name": "disk_write_ops",
+                   "gnocchi_resources_threshold_rule": {"resource_type": "generic", "comparison_operator": "gt",
+                                                        "granularity": 300, "metric": "disk.write.requests",
+                                                        "aggregation_method": "mean", "threshold": 60,
+                                                        "resource_id": "my_r_id"},
+                   "alarm_actions": ["http://localhost:8662"], "repeat_actions": True, "state": "ok", "type": "gnocchi_resources_threshold",
+                   "severity": "critical"}
+        perf_req.assert_called_with(
+            "alarm_endpoint/v2/alarms/", auth_token,
+            req_type="post", payload=json.dumps(payload, sort_keys=True), verify_ssl=True)
+
+    @mock.patch.object(Common, "perform_request")
+    def test_delete_alarm_req(self, perf_req):
+        """Test delete alarm request."""
+        self.alarming.delete_alarm(alarm_endpoint, auth_token, "my_alarm_id", True)
+
+        perf_req.assert_called_with(
+            "alarm_endpoint/v2/alarms/my_alarm_id", auth_token, req_type="delete", verify_ssl=True)
+
+    @mock.patch.object(Common, "perform_request")
+    def test_invalid_list_alarm_req(self, perf_req):
+        """Test invalid list alarm_req."""
+        # Request will not be performed without a resource_id
+        list_details = {"mock_details": "invalid_details"}
+        with self.assertRaises(KeyError):
+            self.alarming.list_alarms(alarm_endpoint, auth_token, list_details, True)
+        perf_req.assert_not_called()
+
+    @mock.patch.object(Common, "perform_request")
+    def test_valid_list_alarm_req(self, perf_req):
+        """Test valid list alarm request."""
+        # Minimum requirement for an alarm list is resource_id
+        list_details = {"resource_uuid": "mock_r_id", "alarm_name": "mock_alarm", "severity": "critical"}
+
+        mock_perf_req_return_value = [
+            {"alarm_id": "1", "name": "mock_alarm", "severity": "critical",
+             "gnocchi_resources_threshold_rule": {"resource_id": "mock_r_id"}}]
+        perf_req.return_value = type('obj', (object,),
+                                     {'text': json.dumps(mock_perf_req_return_value)})
+
+        alarm_list = self.alarming.list_alarms(alarm_endpoint, auth_token, list_details, True)
+
+        self.assertDictEqual(alarm_list[0], mock_perf_req_return_value[0])
+
+        perf_req.assert_called_with(
+            "alarm_endpoint/v2/alarms/", auth_token, req_type="get", verify_ssl=True)
+        perf_req.reset_mock()
+
+        # Check list with alarm_name defined
+        list_details = {"resource_uuid": "mock_r_id",
+                        "alarm_name": "mock_alarm",
+                        "severity": "critical"}
+        alarm_list = self.alarming.list_alarms(alarm_endpoint, auth_token, list_details, True)
+
+        self.assertDictEqual(alarm_list[0], mock_perf_req_return_value[0])
+
+        perf_req.assert_called_with(
+            "alarm_endpoint/v2/alarms/", auth_token, req_type="get", verify_ssl=True)
+
+    @mock.patch.object(Common, "perform_request")
+    def test_ack_alarm_req(self, perf_req):
+        """Test update alarm state for acknowledge alarm request."""
+        resp = Response({})
+        perf_req.return_value = resp
+
+        self.alarming.update_alarm_state(alarm_endpoint, auth_token, "my_alarm_id", True)
+
+        perf_req.assert_called_with(
+            "alarm_endpoint/v2/alarms/my_alarm_id/state", auth_token, req_type="put",
+            payload=json.dumps("ok"), verify_ssl=True)
+
+    @mock.patch.object(Common, "perform_request")
+    def test_update_alarm_invalid(self, perf_req):
+        """Test update alarm with invalid get response."""
+        values = {"alarm_uuid": "my_alarm_id"}
+
+        perf_req.return_value = type('obj', (object,), {'invalid_prop': 'Invalid response'})
+
+        with self.assertRaises(Exception):
+            self.alarming.update_alarm(alarm_endpoint, auth_token, values, {}, True)
+        perf_req.assert_called_with(mock.ANY, auth_token, req_type="get")
+
+    @mock.patch.object(Common, "perform_request")
+    def test_update_alarm_invalid_payload(self, perf_req):
+        """Test update alarm with invalid payload."""
+        resp = Response({"name": "my_alarm",
+                         "state": "alarm",
+                         "gnocchi_resources_threshold_rule":
+                             {"resource_id": "my_resource_id",
+                              "metric": "my_metric"}})
+        perf_req.return_value = resp
+        values = {"alarm_uuid": "my_alarm_id"}
+
+        with self.assertRaises(Exception):
+            self.alarming.update_alarm(alarm_endpoint, auth_token, values, {}, True)
+        perf_req.assert_called_with(mock.ANY, auth_token, req_type="get")
+        self.assertEqual(perf_req.call_count, 1)
+
+    @mock.patch.object(alarm_req.OpenstackAlarmHandler, "check_payload")
+    @mock.patch.object(Common, "perform_request")
+    def test_update_alarm_valid(self, perf_req, check_pay):
+        """Test valid update alarm request."""
+        resp = Response({"alarm_id": "1",
+                         "name": "my_alarm",
+                         "state": "alarm",
+                         "gnocchi_resources_threshold_rule":
+                             {"resource_id": "my_resource_id",
+                              "metric": "disk.write.requests"}})
+        perf_req.return_value = resp
+        values = {"alarm_uuid": "my_alarm_id"}
+
+        self.alarming.update_alarm(alarm_endpoint, auth_token, values, {}, True)
+
+        check_pay.assert_called_with(values, "disk_write_ops", "my_resource_id",
+                                     "my_alarm", alarm_state="alarm")
+
+        self.assertEqual(perf_req.call_count, 2)
+        # Second call is the update request
+        perf_req.assert_called_with(
+            'alarm_endpoint/v2/alarms/my_alarm_id', auth_token,
+            req_type="put", payload=check_pay.return_value, verify_ssl=True)
+
+    @mock.patch.object(Config, "instance")
+    def test_check_valid_payload(self, cfg):
+        """Test the check payload function for a valid payload."""
+        values = {"severity": "warning",
+                  "statistic": "COUNT",
+                  "threshold_value": 12,
+                  "operation": "GT",
+                  "granularity": 300,
+                  "resource_type": "generic"}
+        cfg.return_value.OS_NOTIFIER_URI = "http://localhost:8662"
+        payload = self.alarming.check_payload(
+            values, "disk_write_ops", "r_id", "alarm_name")
+
+        self.assertDictEqual(
+            json.loads(payload), {"name": "alarm_name",
+                                  "gnocchi_resources_threshold_rule":
+                                      {"resource_id": "r_id",
+                                       "metric": "disk.write.requests",
+                                       "comparison_operator": "gt",
+                                       "aggregation_method": "count",
+                                       "threshold": 12,
+                                       "granularity": 300,
+                                       "resource_type": "generic"},
+                                  "severity": "low",
+                                  "state": "ok",
+                                  "type": "gnocchi_resources_threshold",
+                                  "alarm_actions": ["http://localhost:8662"],
+                                  "repeat_actions": True})
+
+    @mock.patch.object(Config, "instance")
+    @mock.patch.object(Common, "perform_request")
+    def test_check_valid_state_payload(self, perform_req, cfg):
+        """Test the check payload function for a valid payload with state."""
+        values = {"severity": "warning",
+                  "statistic": "COUNT",
+                  "threshold_value": 12,
+                  "operation": "GT",
+                  "granularity": 300,
+                  "resource_type": "generic"}
+        cfg.return_value.OS_NOTIFIER_URI = "http://localhost:8662"
+        payload = self.alarming.check_payload(
+            values, "disk_write_ops", "r_id", "alarm_name", alarm_state="alarm")
+
+        self.assertEqual(
+            json.loads(payload), {"name": "alarm_name",
+                                  "gnocchi_resources_threshold_rule":
+                                      {"resource_id": "r_id",
+                                       "metric": "disk.write.requests",
+                                       "comparison_operator": "gt",
+                                       "aggregation_method": "count",
+                                       "threshold": 12,
+                                       "granularity": 300,
+                                       "resource_type": "generic"},
+                                  "severity": "low",
+                                  "state": "alarm",
+                                  "type": "gnocchi_resources_threshold",
+                                  "alarm_actions": ["http://localhost:8662"],
+                                  "repeat_actions": True})
+
+    def test_check_invalid_payload(self):
+        """Test the check payload function for an invalid payload."""
+        values = {"alarm_values": "mock_invalid_details"}
+        with self.assertRaises(Exception):
+            self.alarming.check_payload(values, "my_metric", "r_id", "alarm_name")
+
+    @mock.patch.object(Common, "perform_request")
+    def test_get_alarm_state(self, perf_req):
+        """Test the get alarm state function."""
+        perf_req.return_value = type('obj', (object,), {'text': '{"alarm_id":"1"}'})
+
+        self.alarming.get_alarm_state(alarm_endpoint, auth_token, "alarm_id")
+
+        perf_req.assert_called_with(
+            "alarm_endpoint/v2/alarms/alarm_id/state", auth_token, req_type="get")
+
+    @mock.patch.object(Common, "perform_request")
+    def test_check_for_metric(self, perf_req):
+        """Test the check for metric function."""
+        mock_perf_req_return_value = {"metrics": {"cpu_util": 123}}
+        perf_req.return_value = type('obj', (object,), {'text': json.dumps(mock_perf_req_return_value)})
+
+        self.alarming.check_for_metric(auth_token, metric_endpoint, "cpu_utilization", "r_id", True)
+
+        perf_req.assert_called_with(
+            "metric_endpoint/v1/resource/generic/r_id", auth_token, req_type="get", verify_ssl=True)
diff --git a/osm_mon/tests/plugins/OpenStack/unit/test_common.py b/osm_mon/tests/plugins/OpenStack/unit/test_common.py
new file mode 100644 (file)
index 0000000..e6c52fb
--- /dev/null
@@ -0,0 +1,119 @@
+# Copyright 2017 Intel Research and Development Ireland Limited
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+##
+"""Tests for all common OpenStack methods."""
+
+import json
+import logging
+import unittest
+
+import mock
+import requests
+from keystoneclient.v3 import client
+
+from osm_mon.core.auth import AuthManager
+from osm_mon.core.database import VimCredentials
+from osm_mon.plugins.OpenStack.common import Common
+
+__author__ = "Helena McGough"
+
+log = logging.getLogger(__name__)
+
+
+class Message(object):
+    """Mock a message for an access credentials request."""
+
+    def __init__(self):
+        """Initialise the topic and value of access_cred message."""
+        self.topic = "access_credentials"
+        self.value = json.dumps({"mock_value": "mock_details",
+                                 "vim_type": "OPENSTACK",
+                                 "access_config":
+                                     {"openstack_site": "my_site",
+                                      "user": "my_user",
+                                      "password": "my_password",
+                                      "vim_tenant_name": "my_tenant"}})
+
+
+class TestCommon(unittest.TestCase):
+    """Test the common class for OpenStack plugins."""
+
+    def setUp(self):
+        """Test Setup."""
+        super(TestCommon, self).setUp()
+        self.common = Common()
+        self.creds = VimCredentials()
+        self.creds.id = 'test_id'
+        self.creds.user = 'user'
+        self.creds.url = 'url'
+        self.creds.password = 'password'
+        self.creds.tenant_name = 'tenant_name'
+
+    @mock.patch.object(AuthManager, "get_credentials")
+    @mock.patch.object(client.Client, "get_raw_token_from_identity_service")
+    def test_get_auth_token(self, get_token, get_creds):
+        """Test generating a new authentication token."""
+        get_creds.return_value = self.creds
+        Common.get_auth_token('test_id')
+        get_creds.assert_called_with('test_id')
+        get_token.assert_called_with(auth_url='url', password='password', project_name='tenant_name', username='user',
+                                     project_domain_id='default', user_domain_id='default')
+
+    @mock.patch.object(requests, 'post')
+    def test_post_req(self, post):
+        """Testing a post request."""
+        Common.perform_request("url", "auth_token", req_type="post",
+                                    payload="payload")
+
+        post.assert_called_with("url", data="payload", headers=mock.ANY,
+                                timeout=mock.ANY, verify=True)
+
+    @mock.patch.object(requests, 'get')
+    def test_get_req(self, get):
+        """Testing a get request."""
+        # Run the defualt get request without any parameters
+        Common.perform_request("url", "auth_token", req_type="get")
+
+        get.assert_called_with("url", params=None, headers=mock.ANY,
+                               timeout=mock.ANY, verify=True)
+
+        # Test with some parameters specified
+        get.reset_mock()
+        Common.perform_request("url", "auth_token", req_type="get",
+                                    params="some parameters")
+
+        get.assert_called_with("url", params="some parameters",
+                               headers=mock.ANY, timeout=mock.ANY, verify=True)
+
+    @mock.patch.object(requests, 'put')
+    def test_put_req(self, put):
+        """Testing a put request."""
+        Common.perform_request("url", "auth_token", req_type="put",
+                                    payload="payload")
+        put.assert_called_with("url", data="payload", headers=mock.ANY,
+                               timeout=mock.ANY, verify=True)
+
+    @mock.patch.object(requests, 'delete')
+    def test_delete_req(self, delete):
+        """Testing a delete request."""
+        Common.perform_request("url", "auth_token", req_type="delete")
+
+        delete.assert_called_with("url", headers=mock.ANY, timeout=mock.ANY, verify=True)
diff --git a/osm_mon/tests/plugins/OpenStack/unit/test_metric_calls.py b/osm_mon/tests/plugins/OpenStack/unit/test_metric_calls.py
new file mode 100644 (file)
index 0000000..b71ca72
--- /dev/null
@@ -0,0 +1,308 @@
+# Copyright 2017 iIntel Research and Development Ireland Limited
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+##
+"""Tests for all metric request message keys."""
+
+import json
+import logging
+import unittest
+
+import mock
+
+from osm_mon.core.auth import AuthManager
+from osm_mon.plugins.OpenStack.Gnocchi import metric_handler as metric_req
+from osm_mon.plugins.OpenStack.common import Common
+
+log = logging.getLogger(__name__)
+
+# Mock auth_token and endpoint
+endpoint = mock.ANY
+auth_token = mock.ANY
+
+# Mock a valid metric list for some tests, and a resultant list
+metric_list = [{"name": "disk.write.requests",
+                "id": "metric_id",
+                "unit": "units",
+                "resource_id": "r_id"}]
+result_list = ["metric_id", "r_id", "units", "disk_write_ops"]
+
+
+class Response(object):
+    """Mock a response object for requests."""
+
+    def __init__(self):
+        """Initialise test and status code values."""
+        self.text = json.dumps([{"id": "test_id"}])
+        self.status_code = "STATUS_CODE"
+
+
+def perform_request_side_effect(*args, **kwargs):
+    resp = Response()
+    if 'marker' in args[0]:
+        resp.text = json.dumps([])
+    if 'resource/generic' in args[0]:
+        resp.text = json.dumps({'metrics': {'cpu_util': 'test_id'}})
+    return resp
+
+
+class TestMetricCalls(unittest.TestCase):
+    """Integration test for metric request keys."""
+
+    def setUp(self):
+        """Setup the tests for metric request keys."""
+        super(TestMetricCalls, self).setUp()
+        self.metrics = metric_req.OpenstackMetricHandler()
+        self.metrics._common = Common()
+
+    @mock.patch.object(metric_req.OpenstackMetricHandler, "get_metric_id")
+    @mock.patch.object(Common, "perform_request")
+    def test_invalid_config_metric_req(
+            self, perf_req, get_metric):
+        """Test the configure metric function, for an invalid metric."""
+        # Test invalid configuration for creating a metric
+        values = {"metric_details": "invalid_metric"}
+
+        with self.assertRaises(ValueError):
+            self.metrics.configure_metric(endpoint, auth_token, values, verify_ssl=False)
+
+        perf_req.assert_not_called()
+
+        # Test with an invalid metric name, will not perform request
+        values = {"resource_uuid": "r_id"}
+
+        with self.assertRaises(ValueError):
+            self.metrics.configure_metric(endpoint, auth_token, values, verify_ssl=False)
+
+        perf_req.assert_not_called()
+
+        # If metric exists, it won't be recreated
+        get_metric.return_value = "metric_id"
+
+        with self.assertRaises(ValueError):
+            self.metrics.configure_metric(endpoint, auth_token, values, verify_ssl=False)
+
+        perf_req.assert_not_called()
+
+    @mock.patch.object(metric_req.OpenstackMetricHandler, "get_metric_id")
+    @mock.patch.object(Common, "perform_request")
+    @mock.patch.object(AuthManager, "get_credentials")
+    def test_valid_config_metric_req(
+            self, get_creds, perf_req, get_metric):
+        """Test the configure metric function, for a valid metric."""
+        # Test valid configuration and payload for creating a metric
+        get_creds.return_value = type('obj', (object,), {'config': '{"insecure":true}'})
+        values = {"resource_uuid": "r_id",
+                  "metric_unit": "units",
+                  "metric_name": "cpu_util"}
+        get_metric.return_value = None
+        payload = {"id": "r_id",
+                   "metrics": {"cpu_util":
+                                   {"archive_policy_name": "high",
+                                    "name": "cpu_util",
+                                    "unit": "units"}}}
+
+        perf_req.return_value = type('obj', (object,), {'text': '{"metrics":{"cpu_util":1}, "id":1}'})
+
+        self.metrics.configure_metric(endpoint, auth_token, values, verify_ssl=False)
+
+        perf_req.assert_called_with(
+            "<ANY>/v1/resource/generic", auth_token, req_type="post", verify_ssl=False,
+            payload=json.dumps(payload, sort_keys=True))
+
+    @mock.patch.object(Common, "perform_request")
+    def test_delete_metric_req(self, perf_req):
+        """Test the delete metric function."""
+        mock_response = Response()
+        mock_response.status_code = 200
+        perf_req.return_value = mock_response
+
+        self.metrics.delete_metric(endpoint, auth_token, "metric_id", verify_ssl=False)
+
+        perf_req.assert_called_with(
+            "<ANY>/v1/metric/metric_id", auth_token, req_type="delete", verify_ssl=False)
+
+    @mock.patch.object(Common, "perform_request")
+    def test_delete_metric_invalid_status(self, perf_req):
+        """Test invalid response for delete request."""
+        perf_req.return_value = type('obj', (object,), {"status_code": "404"})
+
+        with self.assertRaises(ValueError):
+            self.metrics.delete_metric(endpoint, auth_token, "metric_id", verify_ssl=False)
+
+    @mock.patch.object(metric_req.OpenstackMetricHandler, "response_list")
+    @mock.patch.object(Common, "perform_request")
+    def test_complete_list_metric_req(self, perf_req, resp_list):
+        """Test the complete list metric function."""
+        # Test listing metrics without any configuration options
+        values = {}
+        perf_req.side_effect = perform_request_side_effect
+        self.metrics.list_metrics(endpoint, auth_token, values, verify_ssl=False)
+
+        perf_req.assert_any_call(
+            "<ANY>/v1/metric?sort=name:asc", auth_token, req_type="get", verify_ssl=False)
+        resp_list.assert_called_with([{u'id': u'test_id'}])
+
+    @mock.patch.object(metric_req.OpenstackMetricHandler, "response_list")
+    @mock.patch.object(Common, "perform_request")
+    def test_resource_list_metric_req(self, perf_req, resp_list):
+        """Test the resource list metric function."""
+        # Test listing metrics with a resource id specified
+        values = {"resource_uuid": "resource_id"}
+        perf_req.side_effect = perform_request_side_effect
+        self.metrics.list_metrics(endpoint, auth_token, values, verify_ssl=False)
+
+        perf_req.assert_any_call(
+            "<ANY>/v1/metric/test_id", auth_token, req_type="get", verify_ssl=False)
+
+    @mock.patch.object(metric_req.OpenstackMetricHandler, "response_list")
+    @mock.patch.object(Common, "perform_request")
+    def test_name_list_metric_req(self, perf_req, resp_list):
+        """Test the metric_name list metric function."""
+        # Test listing metrics with a metric_name specified
+        values = {"metric_name": "disk_write_bytes"}
+        perf_req.side_effect = perform_request_side_effect
+        self.metrics.list_metrics(endpoint, auth_token, values, verify_ssl=False)
+
+        perf_req.assert_any_call(
+            "<ANY>/v1/metric?sort=name:asc", auth_token, req_type="get", verify_ssl=False)
+        resp_list.assert_called_with(
+            [{u'id': u'test_id'}], metric_name="disk_write_bytes")
+
+    @mock.patch.object(metric_req.OpenstackMetricHandler, "response_list")
+    @mock.patch.object(Common, "perform_request")
+    def test_combined_list_metric_req(self, perf_req, resp_list):
+        """Test the combined resource and metric list metric function."""
+        # Test listing metrics with a resource id and metric name specified
+
+        values = {"resource_uuid": "resource_id",
+                  "metric_name": "cpu_utilization"}
+        perf_req.side_effect = perform_request_side_effect
+        self.metrics.list_metrics(endpoint, auth_token, values, verify_ssl=False)
+
+        perf_req.assert_any_call(
+            "<ANY>/v1/metric/test_id", auth_token, req_type="get", verify_ssl=False)
+
+    @mock.patch.object(Common, "perform_request")
+    def test_get_metric_id(self, perf_req):
+        """Test get_metric_id function."""
+        mock_response = Response()
+        mock_response.text = json.dumps({'metrics': {'my_metric': 'id'}})
+        perf_req.return_value = mock_response
+        self.metrics.get_metric_id(endpoint, auth_token, "my_metric", "r_id", verify_ssl=False)
+
+        perf_req.assert_called_with(
+            "<ANY>/v1/resource/generic/r_id", auth_token, req_type="get", verify_ssl=False)
+
+    @mock.patch.object(metric_req.OpenstackMetricHandler, "get_metric_id")
+    @mock.patch.object(Common, "perform_request")
+    def test_valid_read_data_req(self, perf_req, get_metric):
+        """Test the read metric data function, for a valid call."""
+        values = {"metric_name": "disk_write_ops",
+                  "resource_uuid": "resource_id",
+                  "collection_unit": "DAY",
+                  "collection_period": 1}
+
+        perf_req.return_value = type('obj', (object,), {'text': '{"metric_data":"[]"}'})
+
+        get_metric.return_value = "metric_id"
+        self.metrics.read_metric_data(endpoint, auth_token, values, verify_ssl=False)
+
+        perf_req.assert_called_once()
+
+    @mock.patch.object(Common, "perform_request")
+    def test_invalid_read_data_req(self, perf_req):
+        """Test the read metric data function for an invalid call."""
+        values = {}
+
+        with self.assertRaises(KeyError):
+            self.metrics.read_metric_data(endpoint, auth_token, values, verify_ssl=False)
+
+    def test_complete_response_list(self):
+        """Test the response list function for formatting metric lists."""
+        # Mock a list for testing purposes, with valid OSM metric
+        resp_list = self.metrics.response_list(metric_list)
+
+        # Check for the expected values in the resulting list
+        for l in result_list:
+            self.assertIn(l, resp_list[0].values())
+
+    def test_name_response_list(self):
+        """Test the response list with metric name configured."""
+        # Mock the metric name to test a metric name list
+        # Test with a name that is not in the list
+        invalid_name = "my_metric"
+        resp_list = self.metrics.response_list(
+            metric_list, metric_name=invalid_name)
+
+        self.assertEqual(resp_list, [])
+
+        # Test with a name on the list
+        valid_name = "disk_write_ops"
+        resp_list = self.metrics.response_list(
+            metric_list, metric_name=valid_name)
+
+        # Check for the expected values in the resulting list
+        for l in result_list:
+            self.assertIn(l, resp_list[0].values())
+
+    def test_resource_response_list(self):
+        """Test the response list with resource_id configured."""
+        # Mock a resource_id to test a resource list
+        # Test with resource not on the list
+        invalid_id = "mock_resource"
+        resp_list = self.metrics.response_list(metric_list, resource=invalid_id)
+
+        self.assertEqual(resp_list, [])
+
+        # Test with a resource on the list
+        valid_id = "r_id"
+        resp_list = self.metrics.response_list(metric_list, resource=valid_id)
+
+        # Check for the expected values in the resulting list
+        for l in result_list:
+            self.assertIn(l, resp_list[0].values())
+
+    def test_combined_response_list(self):
+        """Test the response list function with resource_id and metric_name."""
+        # Test for a combined resource and name list
+        # resource and name are on the list
+        valid_name = "disk_write_ops"
+        valid_id = "r_id"
+        resp_list = self.metrics.response_list(
+            metric_list, metric_name=valid_name, resource=valid_id)
+
+        # Check for the expected values in the resulting list
+        for l in result_list:
+            self.assertIn(l, resp_list[0].values())
+
+        # resource not on list
+        invalid_id = "mock_resource"
+        resp_list = self.metrics.response_list(
+            metric_list, metric_name=valid_name, resource=invalid_id)
+
+        self.assertEqual(resp_list, [])
+
+        # metric name not on list
+        invalid_name = "mock_metric"
+        resp_list = self.metrics.response_list(
+            metric_list, metric_name=invalid_name, resource=valid_id)
+
+        self.assertEqual(resp_list, [])
diff --git a/osm_mon/tests/plugins/OpenStack/unit/test_metric_req.py b/osm_mon/tests/plugins/OpenStack/unit/test_metric_req.py
new file mode 100644 (file)
index 0000000..2fa31a6
--- /dev/null
@@ -0,0 +1,159 @@
+# Copyright 2017 iIntel Research and Development Ireland Limited
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+##
+"""Tests for all metric request message keys."""
+
+import json
+import logging
+import unittest
+
+import mock
+
+from osm_mon.core.auth import AuthManager
+from osm_mon.plugins.OpenStack.Gnocchi import metric_handler as metric_req
+from osm_mon.plugins.OpenStack.Gnocchi.metric_handler import OpenstackMetricHandler
+from osm_mon.plugins.OpenStack.common import Common
+
+log = logging.getLogger(__name__)
+
+
+class Response(object):
+    """Mock a response object for requests."""
+
+    def __init__(self):
+        """Initialise test and status code values."""
+        self.text = json.dumps([{"id": "test_id"}])
+        self.status_code = "STATUS_CODE"
+
+
+class Message(object):
+    """A class to mock a message object value for metric requests."""
+
+    def __init__(self):
+        """Initialize a mocked message instance."""
+        self.topic = "metric_request"
+        self.key = None
+        self.value = json.dumps({"mock_message": "message_details"})
+
+
+class TestMetricReq(unittest.TestCase):
+    """Integration test for metric request keys."""
+
+    def setUp(self):
+        """Setup the tests for metric request keys."""
+        super(TestMetricReq, self).setUp()
+        self.metrics = metric_req.OpenstackMetricHandler()
+
+    @mock.patch.object(Common, "get_auth_token", mock.Mock())
+    @mock.patch.object(Common, "get_endpoint", mock.Mock())
+    @mock.patch.object(metric_req.OpenstackMetricHandler, "delete_metric")
+    @mock.patch.object(metric_req.OpenstackMetricHandler, "get_metric_id")
+    @mock.patch.object(AuthManager, "get_credentials")
+    def test_delete_metric_key(self, get_creds, get_metric_id, del_metric):
+        """Test the functionality for a delete metric request."""
+        value = {"metric_name": "disk_write_ops", "resource_uuid": "my_r_id", "correlation_id": 1}
+
+        get_creds.return_value = type('obj', (object,), {
+            'config': '{"insecure":true}'
+        })
+        del_metric.return_value = True
+
+        # Call the metric functionality and check delete request
+        get_metric_id.return_value = "my_metric_id"
+        self.metrics.handle_request('delete_metric_request', value, 'test_id')
+        del_metric.assert_called_with(mock.ANY, mock.ANY, "my_metric_id", False)
+
+    @mock.patch.object(Common, "get_auth_token", mock.Mock())
+    @mock.patch.object(Common, 'get_endpoint', mock.Mock())
+    @mock.patch.object(metric_req.OpenstackMetricHandler, "list_metrics")
+    @mock.patch.object(AuthManager, "get_credentials")
+    def test_list_metric_key(self, get_creds, list_metrics):
+        """Test the functionality for a list metric request."""
+        value = {"metrics_list_request": {"correlation_id": 1}}
+
+        get_creds.return_value = type('obj', (object,), {
+            'config': '{"insecure":true}'
+        })
+
+        list_metrics.return_value = []
+
+        # Call the metric functionality and check list functionality
+        self.metrics.handle_request('list_metric_request', value, 'test_id')
+        list_metrics.assert_called_with(mock.ANY, mock.ANY, {"correlation_id": 1}, False)
+
+    @mock.patch.object(Common, "get_auth_token", mock.Mock())
+    @mock.patch.object(Common, 'get_endpoint', mock.Mock())
+    @mock.patch.object(AuthManager, "get_credentials")
+    @mock.patch.object(Common, "perform_request")
+    def test_update_metric_key(self, perf_req, get_creds):
+        """Test the functionality for an update metric request."""
+        value = {"metric_update_request":
+                     {"correlation_id": 1,
+                      "metric_name": "my_metric",
+                      "resource_uuid": "my_r_id"}}
+
+        get_creds.return_value = type('obj', (object,), {
+            'config': '{"insecure":true}'
+        })
+
+        mock_response = Response()
+        mock_response.text = json.dumps({'metrics': {'my_metric': 'id'}})
+        perf_req.return_value = mock_response
+
+        # Call metric functionality and confirm no function is called
+        # Gnocchi does not support updating a metric configuration
+        self.metrics.handle_request('update_metric_request', value, 'test_id')
+
+    @mock.patch.object(Common, "get_auth_token", mock.Mock())
+    @mock.patch.object(Common, 'get_endpoint', mock.Mock())
+    @mock.patch.object(OpenstackMetricHandler, "configure_metric")
+    @mock.patch.object(AuthManager, "get_credentials")
+    def test_config_metric_key(self, get_credentials, config_metric):
+        """Test the functionality for a create metric request."""
+        value = {"metric_create_request": {"correlation_id": 123}}
+        get_credentials.return_value = type('obj', (object,), {'config': '{"insecure":true}'})
+        # Call metric functionality and check config metric
+        config_metric.return_value = "metric_id", "resource_id"
+        self.metrics.handle_request('create_metric_request', value, 'test_id')
+        config_metric.assert_called_with(mock.ANY, mock.ANY, {"correlation_id": 123}, False)
+
+    @mock.patch.object(Common, "get_auth_token", mock.Mock())
+    @mock.patch.object(Common, 'get_endpoint', mock.Mock())
+    @mock.patch.object(OpenstackMetricHandler, "read_metric_data")
+    @mock.patch.object(AuthManager, "get_credentials")
+    @mock.patch.object(Common, "perform_request")
+    def test_read_data_key(self, perf_req, get_creds, read_data):
+        """Test the functionality for a read metric data request."""
+        value = {"correlation_id": 123, "metric_name": "cpu_utilization", "resource_uuid": "uuid"}
+
+        get_creds.return_value = type('obj', (object,), {
+            'config': '{"insecure":true}'
+        })
+
+        mock_response = Response()
+        mock_response.text = json.dumps({'metrics': {'cpu_util': 'id'}})
+        perf_req.return_value = mock_response
+
+        # Call metric functionality and check read data metrics
+        read_data.return_value = "time_stamps", "data_values"
+        self.metrics.handle_request('read_metric_data_request', value, 'test_id')
+        read_data.assert_called_with(
+            mock.ANY, mock.ANY, value, False)
diff --git a/osm_mon/tests/plugins/OpenStack/unit/test_notifier.py b/osm_mon/tests/plugins/OpenStack/unit/test_notifier.py
new file mode 100644 (file)
index 0000000..a420c70
--- /dev/null
@@ -0,0 +1,133 @@
+# Copyright 2017 Intel Research and Development Ireland Limited
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+##
+"""Tests for all common OpenStack methods."""
+
+import json
+import unittest
+
+import mock
+from kafka import KafkaProducer
+
+from osm_mon.core.database import DatabaseManager, Alarm
+from osm_mon.plugins.OpenStack.Aodh.notifier import NotifierHandler
+
+post_data = {"severity": "critical",
+             "alarm_name": "my_alarm",
+             "current": "current_state",
+             "alarm_id": "my_alarm_id",
+             "reason": "Threshold has been broken",
+             "reason_data": {"count": 1,
+                             "most_recent": "null",
+                             "type": "threshold",
+                             "disposition": "unknown"},
+             "previous": "previous_state"}
+
+
+class Response(object):
+    """Mock a response class for generating responses."""
+
+    def __init__(self, text):
+        """Initialise a mock response with a text attribute."""
+        self.text = text
+
+
+class RFile():
+    def read(self, content_length):
+        return json.dumps(post_data, sort_keys=True)
+
+
+class MockNotifierHandler(NotifierHandler):
+    """Mock the NotifierHandler class for testing purposes."""
+
+    def __init__(self):
+        """Initialise mock NotifierHandler."""
+        self.headers = {'Content-Length': '20'}
+        self.rfile = RFile()
+
+    def setup(self):
+        """Mock setup function."""
+        pass
+
+    def handle(self):
+        """Mock handle function."""
+        pass
+
+    def finish(self):
+        """Mock finish function."""
+        pass
+
+
+@mock.patch.object(KafkaProducer, "__init__", lambda *args, **kwargs: None)
+@mock.patch.object(KafkaProducer, "flush", mock.Mock())
+class TestNotifier(unittest.TestCase):
+    """Test the NotifierHandler class for requests from aodh."""
+
+    def setUp(self):
+        """Setup tests."""
+        super(TestNotifier, self).setUp()
+        self.handler = MockNotifierHandler()
+
+    @mock.patch.object(NotifierHandler, "_set_headers")
+    def test_do_GET(self, set_head):
+        """Tests do_GET. Validates _set_headers has been called."""
+        self.handler.do_GET()
+
+        set_head.assert_called_once()
+
+    @mock.patch.object(NotifierHandler, "notify_alarm")
+    @mock.patch.object(NotifierHandler, "_set_headers")
+    def test_do_POST(self, set_head, notify):
+        """Tests do_POST. Validates notify_alarm has been called."""
+        self.handler.do_POST()
+
+        set_head.assert_called_once()
+        notify.assert_called_with(post_data)
+
+    @mock.patch.object(NotifierHandler, "_publish_response")
+    @mock.patch.object(DatabaseManager, "get_alarm")
+    def test_notify_alarm_valid_alarm(
+            self, get_alarm, notify):
+        """
+        Tests notify_alarm when request from OpenStack references an existing alarm in the DB.
+        Validates KafkaProducer.notify_alarm has been called.
+        """
+        # Generate return values for valid notify_alarm operation
+        mock_alarm = Alarm()
+        get_alarm.return_value = mock_alarm
+
+        self.handler.notify_alarm(post_data)
+        notify.assert_called_with('notify_alarm', mock.ANY)
+
+    @mock.patch.object(NotifierHandler, "_publish_response")
+    @mock.patch.object(DatabaseManager, "get_alarm")
+    def test_notify_alarm_invalid_alarm(
+            self, get_alarm, notify):
+        """
+        Tests notify_alarm when request from OpenStack references a non existing alarm in the DB.
+        Validates Exception is thrown and KafkaProducer.notify_alarm has not been called.
+        """
+        # Generate return values for valid notify_alarm operation
+        get_alarm.return_value = None
+
+        with self.assertRaises(Exception):
+            self.handler.notify_alarm(post_data)
+        notify.assert_not_called()
diff --git a/osm_mon/tests/plugins/OpenStack/unit/test_responses.py b/osm_mon/tests/plugins/OpenStack/unit/test_responses.py
new file mode 100644 (file)
index 0000000..1377bc0
--- /dev/null
@@ -0,0 +1,116 @@
+# Copyright 2017 iIntel Research and Development Ireland Limited
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+##
+"""Test that the correct responses are generated for each message."""
+
+import logging
+import unittest
+
+import mock
+
+from osm_mon.plugins.OpenStack import response as resp
+
+log = logging.getLogger(__name__)
+
+
+class TestOpenStackResponse(unittest.TestCase):
+    """Tests for responses generated by the OpenStack plugins."""
+
+    def setUp(self):
+        """Setup for testing OpenStack plugin responses."""
+        super(TestOpenStackResponse, self).setUp()
+        self.plugin_resp = resp.OpenStackResponseBuilder()
+
+    def test_invalid_key(self):
+        """Test if an invalid key is entered for a response."""
+        message = self.plugin_resp.generate_response("mock_invalid_key")
+        self.assertEqual(message, None)
+
+    @mock.patch.object(
+        resp.OpenStackResponseBuilder, "alarm_list_response")
+    def test_list_alarm_resp(self, alarm_list_resp):
+        """Test out a function call for a list alarm response."""
+        message = self.plugin_resp.generate_response("list_alarm_response")
+        self.assertEqual(alarm_list_resp.return_value, message)
+
+    @mock.patch.object(
+        resp.OpenStackResponseBuilder, "list_metric_response")
+    def test_list_metric_resp(self, metric_list_resp):
+        """Test list metric response function call."""
+        message = self.plugin_resp.generate_response("list_metric_response")
+        self.assertEqual(message, metric_list_resp.return_value)
+
+    @mock.patch.object(
+        resp.OpenStackResponseBuilder, "delete_alarm_response")
+    def test_delete_alarm_resp(self, del_alarm_resp):
+        """Test delete alarm response function call."""
+        message = self.plugin_resp.generate_response("delete_alarm_response")
+        self.assertEqual(message, del_alarm_resp.return_value)
+
+    @mock.patch.object(
+        resp.OpenStackResponseBuilder, "delete_metric_response")
+    def test_delete_metric_resp(self, del_metric_resp):
+        """Test the response functionality of delete metric response."""
+        message = self.plugin_resp.generate_response("delete_metric_response")
+        self.assertEqual(message, del_metric_resp.return_value)
+
+    @mock.patch.object(
+        resp.OpenStackResponseBuilder, "create_alarm_response")
+    def test_create_alarm_resp(self, config_alarm_resp):
+        """Test create alarm response function call."""
+        message = self.plugin_resp.generate_response("create_alarm_response")
+        self.assertEqual(message, config_alarm_resp.return_value)
+
+    @mock.patch.object(
+        resp.OpenStackResponseBuilder, "metric_create_response")
+    def test_create_metric_resp(self, config_metric_resp):
+        """Test create metric response function call."""
+        message = self.plugin_resp.generate_response("create_metric_response")
+        self.assertEqual(message, config_metric_resp.return_value)
+
+    @mock.patch.object(
+        resp.OpenStackResponseBuilder, "update_alarm_response")
+    def test_update_alarm_resp(self, up_alarm_resp):
+        """Test update alarm response function call."""
+        message = self.plugin_resp.generate_response("update_alarm_response")
+        self.assertEqual(message, up_alarm_resp.return_value)
+
+    @mock.patch.object(
+        resp.OpenStackResponseBuilder, "update_metric_response")
+    def test_update_metric_resp(self, up_metric_resp):
+        """Test update metric response function call."""
+        message = self.plugin_resp.generate_response("update_metric_response")
+        self.assertEqual(message, up_metric_resp.return_value)
+
+    @mock.patch.object(
+        resp.OpenStackResponseBuilder, "notify_alarm")
+    def test_notify_alarm(self, notify_alarm):
+        """Test notify alarm response function call."""
+        message = self.plugin_resp.generate_response("notify_alarm")
+        self.assertEqual(message, notify_alarm.return_value)
+
+    @mock.patch.object(
+        resp.OpenStackResponseBuilder, "read_metric_data_response")
+    def test_read_metric_data_resp(self, read_data_resp):
+        """Test read metric data response function call."""
+        message = self.plugin_resp.generate_response(
+            "read_metric_data_response")
+        self.assertEqual(message, read_data_resp.return_value)
diff --git a/osm_mon/tests/plugins/OpenStack/unit/test_settings.py b/osm_mon/tests/plugins/OpenStack/unit/test_settings.py
new file mode 100644 (file)
index 0000000..42619f8
--- /dev/null
@@ -0,0 +1,46 @@
+# Copyright 2017 Intel Research and Development Ireland Limited
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+##
+"""Tests for settings for OpenStack plugins configurations."""
+
+import logging
+import os
+import unittest
+
+from osm_mon.core.settings import Config
+
+log = logging.getLogger(__name__)
+
+
+class TestSettings(unittest.TestCase):
+    """Test the settings class for OpenStack plugin configuration."""
+
+    def setUp(self):
+        """Test Setup."""
+        super(TestSettings, self).setUp()
+        self.cfg = Config.instance()
+
+    def test_set_os_username(self):
+        """Test reading the environment for OpenStack plugin configuration."""
+        os.environ["OS_NOTIFIER_URI"] = "test"
+        self.cfg.read_environ()
+
+        self.assertEqual(self.cfg.OS_NOTIFIER_URI, "test")
diff --git a/osm_mon/tests/plugins/VMware/__init__.py b/osm_mon/tests/plugins/VMware/__init__.py
new file mode 100644 (file)
index 0000000..64d5d51
--- /dev/null
@@ -0,0 +1,37 @@
+# -*- coding: utf-8 -*-
+
+##
+# Copyright 2017-2018 VMware Inc.
+# This file is part of ETSI OSM
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+#
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact:  osslegalrouting@vmware.com
+##
+
+"""VMware MON plugin tests."""
+
+import logging
+import sys
+
+from osm_mon.core.settings import Config
+
+cfg = Config.instance()
+logging.basicConfig(stream=sys.stdout,
+                    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
+                    datefmt='%m/%d/%Y %I:%M:%S %p',
+                    level=logging.getLevelName(cfg.OSMMON_LOG_LEVEL))
+log = logging.getLogger(__name__)
+
diff --git a/osm_mon/tests/plugins/VMware/test_mon_plugin_vrops.py b/osm_mon/tests/plugins/VMware/test_mon_plugin_vrops.py
new file mode 100644 (file)
index 0000000..2d10d1b
--- /dev/null
@@ -0,0 +1,3083 @@
+# -*- coding: utf-8 -*-
+
+##
+# Copyright 2017-2018 VMware Inc.
+# This file is part of ETSI OSM
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+#
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact:  osslegalrouting@vmware.com
+##
+
+""" Mock tests for VMware vROPs Mon plugin """
+
+import os
+import sys
+import unittest
+
+import mock
+import requests
+
+sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", "..", ".."))
+
+from osm_mon.plugins.vRealiseOps import mon_plugin_vrops as monPlugin
+
+from pyvcloud.vcd.client import Client
+
+
+class TestMonPlugin(unittest.TestCase):
+    """Test class for vROPs Mon Plugin class methods"""
+
+    def setUp(self):
+        """Setup the tests for Mon Plugin class methods"""
+        super(TestMonPlugin, self).setUp()
+
+        self.m_vim_access_config = {'vrops_site': 'abc',
+                                    'vrops_user': 'user',
+                                    'vrops_password': 'passwd',
+                                    'vim_url': 'vcd_url',
+                                    'admin_username': 'admin',
+                                    'admin_password': 'admin_passwd',
+                                    'vim_uuid': '1',
+                                    'tenant_id': 'org_vdc_1'}
+        self.mon_plugin = monPlugin.MonPlugin(self.m_vim_access_config)
+        # create client object
+        self.vca = Client('test', verify_ssl_certs=False)
+        # create session
+        self.session = requests.Session()
+
+    def test_get_default_Params_valid_metric_alarm_name(self):
+        """Test get default params method"""
+
+        # Mock valid metric_alarm_name and response
+        metric_alarm_name = "Average_Memory_Usage_Above_Threshold"
+        expected_return = {'impact': 'risk', 'cancel_cycles': 2, 'adapter_kind': 'VMWARE',
+                           'repeat': False, 'cancel_period': 300, 'alarm_type': 16,
+                           'vrops_alarm': 'Avg_Mem_Usage_Above_Thr', 'enabled': True, 'period': 300,
+                           'resource_kind': 'VirtualMachine', 'alarm_subType': 19,
+                           'action': 'acknowledge', 'evaluation': 2, 'unit': '%'}
+
+        # call get default param function under test
+        actual_return = self.mon_plugin.get_default_Params(metric_alarm_name)
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    def test_get_default_Params_invalid_metric_alarm_name(self):
+        """Test get default params method invalid metric alarm"""
+
+        # Mock valid metric_alarm_name and response
+        metric_alarm_name = "Invalid_Alarm"
+        expected_return = {}
+
+        # call get default param function under test
+        actual_return = self.mon_plugin.get_default_Params(metric_alarm_name)
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'post')
+    def test_create_symptom_valid_req_response(self, m_post):
+        """Test create symptom method-valid request"""
+
+        # Mock valid symptom params and mock responses
+        symptom_param = {'threshold_value': 0, 'cancel_cycles': 1, 'adapter_kind_key': 'VMWARE',
+                         'resource_kind_key': 'VirtualMachine', 'severity': 'CRITICAL',
+                         'symptom_name': \
+                             'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                         'operation': 'GT', 'wait_cycles': 1, 'metric_key': 'cpu|usage_average'}
+
+        m_post.return_value.status_code = 201
+        m_post.return_value.content = \
+            '{"id":"SymptomDefinition-351c23b4-bc3c-4c7b-b4af-1ad90a673c5d",\
+            "name":"CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4",\
+            "adapterKindKey":"VMWARE","resourceKindKey":"VirtualMachine",\
+            "waitCycles":1,"cancelCycles":1,\
+            "state":{"severity":"CRITICAL","condition":{"type":"CONDITION_HT",\
+            "key":"cpu|usage_average","operator":"GT","value":"0.0",\
+            "valueType":"NUMERIC",\
+            "instanced":false,"thresholdType":"STATIC"}}}'
+
+        expected_return = "SymptomDefinition-351c23b4-bc3c-4c7b-b4af-1ad90a673c5d"
+
+        # call create symptom method under test
+        actual_return = self.mon_plugin.create_symptom(symptom_param)
+
+        # verify that mocked method is called
+        m_post.assert_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'post')
+    def test_create_symptom_invalid_req_response(self, m_post):
+        """Test create symptom method-invalid response"""
+
+        # Mock valid symptom params and invalid  mock responses
+        symptom_param = {'threshold_value': 0, 'cancel_cycles': 1, 'adapter_kind_key': 'VMWARE',
+                         'resource_kind_key': 'VirtualMachine', 'severity': 'CRITICAL',
+                         'symptom_name': \
+                             'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                         'operation': 'GT', 'wait_cycles': 1, 'metric_key': 'cpu|usage_average'}
+
+        m_post.return_value.status_code = 404
+        m_post.return_value.content = '404 Not Found'
+
+        expected_return = None
+
+        # call create symptom method under test
+        actual_return = self.mon_plugin.create_symptom(symptom_param)
+
+        # verify that mocked method is called
+        m_post.assert_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'post')
+    def test_create_symptom_incorrect_data(self, m_post):
+        """Test create symptom method-incorrect data"""
+
+        # Mock valid symptom params and invalid  mock responses
+        symptom_param = {'threshold_value': 0, 'cancel_cycles': 1, 'adapter_kind_key': 'VMWARE',
+                         'resource_kind_key': 'VirtualMachine', 'severity': 'CRITICAL',
+                         'symptom_name': \
+                             'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                         'operation': 'GT', 'metric_key': 'cpu|usage_average'}
+
+        expected_return = None
+
+        # call create symptom method under test
+        actual_return = self.mon_plugin.create_symptom(symptom_param)
+
+        # verify that mocked method is not called
+        m_post.assert_not_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'post')
+    def test_create_alarm_definition_valid_req_response(self, m_post):
+        """Test create alarm definition method-valid response"""
+
+        # Mock valid alarm params and mock responses
+        alarm_param = {'description': 'CPU_Utilization_Above_Threshold', 'cancelCycles': 1,
+                       'subType': 19, 'waitCycles': 1,
+                       'severity': 'CRITICAL', 'impact': 'risk', 'adapterKindKey': 'VMWARE',
+                       'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                       'resourceKindKey': 'VirtualMachine', 'type': 16,
+                       'symptomDefinitionId': \
+                           'SymptomDefinition-25278b06-bff8-4409-a141-9b4e064235df'}
+
+        m_post.return_value.status_code = 201
+        m_post.return_value.content = \
+            '{"id":"AlertDefinition-d4f21e4b-770a-45d6-b298-022eaf489115",\
+            "name":"CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4",\
+            "description":"CPU_Utilization_Above_Threshold","adapterKindKey":"VMWARE",\
+            "resourceKindKey":"VirtualMachine","waitCycles":1,"cancelCycles":1,\
+            "type":16,"subType":19,\
+            "states":[{"severity":"CRITICAL","base-symptom-set":{"type":"SYMPTOM_SET",\
+            "relation":"SELF","aggregation":"ALL","symptomSetOperator":"AND",\
+            "symptomDefinitionIds":\
+            ["SymptomDefinition-25278b06-bff8-4409-a141-9b4e064235df"]},\
+            "impact":{"impactType":"BADGE","detail":"risk"}}]}'
+
+        expected_return = "AlertDefinition-d4f21e4b-770a-45d6-b298-022eaf489115"
+
+        # call create alarm definition method under test
+        actual_return = self.mon_plugin.create_alarm_definition(alarm_param)
+
+        # verify that mocked method is called
+        m_post.assert_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'post')
+    def test_create_alarm_definition_invalid_req_response(self, m_post):
+        """Test create alarm definition method-invalid response"""
+
+        # Mock valid alarm params and mock responses
+        alarm_param = {'description': 'CPU_Utilization_Above_Threshold', 'cancelCycles': 1,
+                       'subType': 19, 'waitCycles': 1,
+                       'severity': 'CRITICAL', 'impact': 'risk', 'adapterKindKey': 'VMWARE',
+                       'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                       'resourceKindKey': 'VirtualMachine', 'type': 16,
+                       'symptomDefinitionId': \
+                           'SymptomDefinition-25278b06-bff8-4409-a141-9b4e064235df'}
+
+        m_post.return_value.status_code = 404
+        m_post.return_value.content = '404 Not Found'
+
+        expected_return = None
+
+        # call create alarm definition method under test
+        actual_return = self.mon_plugin.create_alarm_definition(alarm_param)
+
+        # verify that mocked method is called
+        m_post.assert_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'post')
+    def test_create_alarm_definition_incorrect_data(self, m_post):
+        """Test create alarm definition method-incorrect data"""
+
+        # Mock incorrect alarm param
+        alarm_param = {'description': 'CPU_Utilization_Above_Threshold', 'cancelCycles': 1,
+                       'subType': 19, 'waitCycles': 1, 'type': 16,
+                       'severity': 'CRITICAL', 'impact': 'risk', 'adapterKindKey': 'VMWARE',
+                       'symptomDefinitionId': \
+                           'SymptomDefinition-25278b06-bff8-4409-a141-9b4e064235df'}
+        expected_return = None
+
+        # call create symptom method under test
+        actual_return = self.mon_plugin.create_alarm_definition(alarm_param)
+
+        # verify that mocked method is not called
+        m_post.assert_not_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.DatabaseManager, 'save_alarm')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_notification_rule')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_definition')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_symptom')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_by_name')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
+    def test_configure_alarm_valid_req(self, m_get_default_Params,
+                                       m_get_alarm_defination_by_name,
+                                       m_create_symptom,
+                                       m_create_alarm_definition,
+                                       m_get_vm_moref_id,
+                                       m_get_vm_resource_id,
+                                       m_create_alarm_notification_rule,
+                                       m_save_alarm):
+        """Test configure alarm valid request creating alarm"""
+
+        # Mock input configuration dictionary
+        config_dict = {'threshold_value': 0, 'severity': 'CRITICAL',
+                       'alarm_name': 'CPU_Utilization_Above_Threshold',
+                       'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                       'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
+                       'statistic': 'AVERAGE', 'metric_name': 'cpu_utilization',
+                       'vdu_name': 'vdu1', 'vnf_member_index': 'index1', 'ns_id': 'nsd1',
+                       'operation': 'GT', 'unit': '%',
+                       'description': 'CPU_Utilization_Above_Threshold'}
+
+        # symptom parameters to be passed for symptom creation
+        symptom_params = {'threshold_value': 0,
+                          'cancel_cycles': 1,
+                          'adapter_kind_key': 'VMWARE',
+                          'resource_kind_key': 'VirtualMachine',
+                          'severity': 'CRITICAL',
+                          'symptom_name': \
+                              'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                          'operation': 'GT',
+                          'wait_cycles': 1,
+                          'metric_key': 'cpu|usage_average'}
+
+        # alarm parameters to  be passed for alarm creation
+        alarm_params = {'description': 'CPU_Utilization_Above_Threshold',
+                        'cancelCycles': 1, 'subType': 19,
+                        'waitCycles': 1, 'severity': 'CRITICAL',
+                        'impact': 'risk', 'adapterKindKey': 'VMWARE',
+                        'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                        'resourceKindKey': 'VirtualMachine',
+                        'symptomDefinitionId': \
+                            'SymptomDefinition-2e8f9ddc-9f7b-4cd6-b85d-7d7fe3a8a804',
+                        'type': 16}
+
+        vm_moref_id = 'vm-6626'
+        vrops_alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+        alarm_def = 'AlertDefinition-0f3cdcb3-4e1b-4a0b-86d0-66d4b3f65220'
+        resource_id = 'ac87622f-b761-40a0-b151-00872a2a456e'
+        alarm_def_uuid = '0f3cdcb3-4e1b-4a0b-86d0-66d4b3f65220'
+
+        # Mock default Parameters for alarm & metric configuration
+        m_get_default_Params.side_effect = [{'impact': 'risk', 'cancel_cycles': 1,
+                                             'adapter_kind': 'VMWARE', 'repeat': False,
+                                             'cancel_period': 300, 'alarm_type': 16,
+                                             'vrops_alarm': 'CPU_Utilization_Above_Thr',
+                                             'enabled': True, 'period': 300,
+                                             'resource_kind': 'VirtualMachine',
+                                             'alarm_subType': 19, 'action': 'acknowledge',
+                                             'evaluation': 1, 'unit': 'msec'},
+                                            {'metric_key': 'cpu|usage_average', 'unit': '%'}
+                                            ]
+
+        # set mocked function return values
+        m_get_alarm_defination_by_name.return_value = []
+        m_create_symptom.return_value = 'SymptomDefinition-2e8f9ddc-9f7b-4cd6-b85d-7d7fe3a8a804'
+        m_create_alarm_definition.return_value = \
+            'AlertDefinition-0f3cdcb3-4e1b-4a0b-86d0-66d4b3f65220'
+        m_get_vm_moref_id.return_value = vm_moref_id
+        m_get_vm_resource_id.return_value = 'ac87622f-b761-40a0-b151-00872a2a456e'
+        m_create_alarm_notification_rule.return_value = 'f37900e7-dd01-4383-b84c-08f519530d71'
+
+        # Call configure_alarm method under test
+        return_value = self.mon_plugin.configure_alarm(config_dict)
+
+        # Verify that mocked methods are called with correct parameters
+        self.assertEqual(m_get_default_Params.call_count, 2)
+        m_get_alarm_defination_by_name.assert_called_with(vrops_alarm_name)
+        m_create_symptom.assert_called_with(symptom_params)
+        m_create_alarm_definition.assert_called_with(alarm_params)
+        m_get_vm_moref_id.assert_called_with(config_dict['resource_uuid'])
+        m_get_vm_resource_id.assert_called_with(vm_moref_id)
+        m_create_alarm_notification_rule.assert_called_with(vrops_alarm_name,
+                                                            alarm_def,
+                                                            resource_id)
+        m_save_alarm.assert_called_with(vrops_alarm_name, '1',
+                                        config_dict['threshold_value'],
+                                        config_dict['operation'],
+                                        config_dict['metric_name'],
+                                        config_dict['vdu_name'],
+                                        config_dict['vnf_member_index'],
+                                        config_dict['ns_id'])
+
+        # Verify return value with expected value of alarm_def_uuid
+        self.assertEqual(return_value, alarm_def_uuid)
+
+    @mock.patch.object(monPlugin.DatabaseManager, 'save_alarm')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_notification_rule')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_definition')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_symptom')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_by_name')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
+    def test_configure_alarm_invalid_alarm_name_req(self, m_get_default_Params,
+                                                    m_get_alarm_defination_by_name,
+                                                    m_create_symptom,
+                                                    m_create_alarm_definition,
+                                                    m_get_vm_moref_id,
+                                                    m_get_vm_resource_id,
+                                                    m_create_alarm_notification_rule,
+                                                    m_save_alarm):
+        """Test configure alarm invalid test: for invalid alarm name"""
+
+        # Mock input configuration dictionary
+        config_dict = {'threshold_value': 0, 'severity': 'CRITICAL',
+                       'alarm_name': 'CPU_Utilization_Above_Threshold',
+                       'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                       'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
+                       'statistic': 'AVERAGE', 'metric_name': 'CPU_UTILIZATION',
+                       'operation': 'GT', 'unit': '%',
+                       'description': 'CPU_Utilization_Above_Threshold'}
+
+        alarm_def_uuid = None
+
+        # Mock default Parameters return value to None
+        m_get_default_Params.return_value = {}
+
+        # Call configure_alarm method under test
+        return_value = self.mon_plugin.configure_alarm(config_dict)
+
+        # Verify that mocked methods are called with correct parameters
+        m_get_default_Params.assert_called_with(config_dict['alarm_name'])
+        m_get_alarm_defination_by_name.assert_not_called()
+        m_create_symptom.assert_not_called()
+        m_create_alarm_definition.assert_not_called()
+        m_get_vm_moref_id.assert_not_called()
+        m_get_vm_resource_id.assert_not_called()
+        m_create_alarm_notification_rule.assert_not_called()
+        m_save_alarm.assert_not_called()
+
+        # Verify return value with expected value i.e. None
+        self.assertEqual(return_value, alarm_def_uuid)
+
+    @mock.patch.object(monPlugin.DatabaseManager, 'save_alarm')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_notification_rule')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_definition')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_symptom')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_by_name')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
+    def test_configure_alarm_invalid_metric_name_req(self, m_get_default_Params,
+                                                     m_get_alarm_defination_by_name,
+                                                     m_create_symptom,
+                                                     m_create_alarm_definition,
+                                                     m_get_vm_moref_id,
+                                                     m_get_vm_resource_id,
+                                                     m_create_alarm_notification_rule,
+                                                     m_save_alarm):
+        """Test configure alarm invalid test: for invalid metric name"""
+
+        # Mock input configuration dictionary
+        config_dict = {'threshold_value': 0, 'severity': 'CRITICAL',
+                       'alarm_name': 'CPU_Utilization_Above_Threshold',
+                       'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                       'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
+                       'statistic': 'AVERAGE', 'metric_name': 'CPU_UTILIZATION',
+                       'operation': 'GT', 'unit': '%',
+                       'description': 'CPU_Utilization_Above_Threshold'}
+
+        alarm_def_uuid = None
+
+        # Mock default Parameters return values for metrics to None
+        m_get_default_Params.side_effect = [{'impact': 'risk', 'cancel_cycles': 1,
+                                             'adapter_kind': 'VMWARE', 'repeat': False,
+                                             'cancel_period': 300, 'alarm_type': 16,
+                                             'vrops_alarm': 'CPU_Utilization_Above_Thr',
+                                             'enabled': True, 'period': 300,
+                                             'resource_kind': 'VirtualMachine',
+                                             'alarm_subType': 19, 'action': 'acknowledge',
+                                             'evaluation': 1, 'unit': 'msec'},
+                                            {}
+                                            ]
+
+        # Call configure_alarm method under test
+        return_value = self.mon_plugin.configure_alarm(config_dict)
+
+        # Verify that mocked methods are called with correct parameters
+        self.assertEqual(m_get_default_Params.call_count, 2)
+        m_get_alarm_defination_by_name.assert_not_called()
+        m_create_symptom.assert_not_called()
+        m_create_alarm_definition.assert_not_called()
+        m_get_vm_moref_id.assert_not_called()
+        m_get_vm_resource_id.assert_not_called()
+        m_create_alarm_notification_rule.assert_not_called()
+        m_save_alarm.assert_not_called()
+
+        # Verify return value with expected value i.e. None
+        self.assertEqual(return_value, alarm_def_uuid)
+
+    @mock.patch.object(monPlugin.DatabaseManager, 'save_alarm')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_notification_rule')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_definition')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_symptom')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_by_name')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
+    def test_configure_alarm_invalid_already_exists(self, m_get_default_Params,
+                                                    m_get_alarm_defination_by_name,
+                                                    m_create_symptom,
+                                                    m_create_alarm_definition,
+                                                    m_get_vm_moref_id,
+                                                    m_get_vm_resource_id,
+                                                    m_create_alarm_notification_rule,
+                                                    m_save_alarm):
+        """Test configure alarm invalid test: for alarm that already exists"""
+
+        # Mock input configuration dictionary
+        config_dict = {'threshold_value': 0, 'severity': 'CRITICAL',
+                       'alarm_name': 'CPU_Utilization_Above_Threshold',
+                       'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                       'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
+                       'statistic': 'AVERAGE', 'metric_name': 'CPU_UTILIZATION',
+                       'operation': 'GT', 'unit': '%',
+                       'description': 'CPU_Utilization_Above_Threshold'}
+
+        vrops_alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+        alarm_def_uuid = None
+
+        # Mock default Parameters for alarm & metric configuration
+        m_get_default_Params.side_effect = [{'impact': 'risk', 'cancel_cycles': 1,
+                                             'adapter_kind': 'VMWARE', 'repeat': False,
+                                             'cancel_period': 300, 'alarm_type': 16,
+                                             'vrops_alarm': 'CPU_Utilization_Above_Thr',
+                                             'enabled': True, 'period': 300,
+                                             'resource_kind': 'VirtualMachine',
+                                             'alarm_subType': 19, 'action': 'acknowledge',
+                                             'evaluation': 1, 'unit': 'msec'},
+                                            {'metric_key': 'cpu|usage_average', 'unit': '%'}
+                                            ]
+        # set mocked function return value
+        m_get_alarm_defination_by_name.return_value = ['mocked_alarm_CPU_Utilization_Above_Thr']
+
+        # Call configure_alarm method under test
+        return_value = self.mon_plugin.configure_alarm(config_dict)
+
+        # Verify that mocked methods are called with correct parameters
+        self.assertEqual(m_get_default_Params.call_count, 2)
+        m_get_alarm_defination_by_name.assert_called_with(vrops_alarm_name)
+        m_create_symptom.assert_not_called()
+        m_create_alarm_definition.assert_not_called()
+        m_get_vm_moref_id.assert_not_called()
+        m_get_vm_resource_id.assert_not_called()
+        m_create_alarm_notification_rule.assert_not_called()
+        m_save_alarm.assert_not_called()
+        # Verify return value with expected value of alarm_def_uuid
+        self.assertEqual(return_value, alarm_def_uuid)
+
+    @mock.patch.object(monPlugin.DatabaseManager, 'save_alarm')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_notification_rule')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_definition')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_symptom')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_by_name')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
+    def test_configure_alarm_failed_symptom_creation(self, m_get_default_Params,
+                                                     m_get_alarm_defination_by_name,
+                                                     m_create_symptom,
+                                                     m_create_alarm_definition,
+                                                     m_get_vm_moref_id,
+                                                     m_get_vm_resource_id,
+                                                     m_create_alarm_notification_rule,
+                                                     m_save_alarm):
+        """Test configure alarm: failed to create symptom"""
+
+        # Mock input configuration dictionary
+        config_dict = {'threshold_value': 0, 'severity': 'CRITICAL',
+                       'alarm_name': 'CPU_Utilization_Above_Threshold',
+                       'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                       'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
+                       'statistic': 'AVERAGE', 'metric_name': 'CPU_UTILIZATION',
+                       'operation': 'GT', 'unit': '%',
+                       'description': 'CPU_Utilization_Above_Threshold'}
+
+        # symptom parameters to be passed for symptom creation
+        symptom_params = {'threshold_value': 0,
+                          'cancel_cycles': 1,
+                          'adapter_kind_key': 'VMWARE',
+                          'resource_kind_key': 'VirtualMachine',
+                          'severity': 'CRITICAL',
+                          'symptom_name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                          'operation': 'GT',
+                          'wait_cycles': 1,
+                          'metric_key': 'cpu|usage_average'}
+        vrops_alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+        alarm_def_uuid = None
+
+        # Mock default Parameters for alarm & metric configuration
+        m_get_default_Params.side_effect = [{'impact': 'risk', 'cancel_cycles': 1,
+                                             'adapter_kind': 'VMWARE', 'repeat': False,
+                                             'cancel_period': 300, 'alarm_type': 16,
+                                             'vrops_alarm': 'CPU_Utilization_Above_Thr',
+                                             'enabled': True, 'period': 300,
+                                             'resource_kind': 'VirtualMachine',
+                                             'alarm_subType': 19, 'action': 'acknowledge',
+                                             'evaluation': 1, 'unit': 'msec'},
+                                            {'metric_key': 'cpu|usage_average', 'unit': '%'}
+                                            ]
+        # set mocked function return values
+        m_get_alarm_defination_by_name.return_value = []
+        m_create_symptom.return_value = None
+
+        # Call configure_alarm method under test
+        return_value = self.mon_plugin.configure_alarm(config_dict)
+
+        # Verify that mocked methods are called with correct parameters
+        self.assertEqual(m_get_default_Params.call_count, 2)
+        m_get_alarm_defination_by_name.assert_called_with(vrops_alarm_name)
+        m_create_symptom.assert_called_with(symptom_params)
+        m_create_alarm_definition.assert_not_called()
+        m_get_vm_moref_id.assert_not_called()
+        m_get_vm_resource_id.assert_not_called()
+        m_create_alarm_notification_rule.assert_not_called()
+        m_save_alarm.assert_not_called()
+
+        # Verify return value with expected value of alarm_def_uuid
+        self.assertEqual(return_value, alarm_def_uuid)
+
+    @mock.patch.object(monPlugin.DatabaseManager, 'save_alarm')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_notification_rule')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_definition')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_symptom')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_by_name')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
+    def test_configure_alarm_failed_alert_creation(self, m_get_default_Params,
+                                                   m_get_alarm_defination_by_name,
+                                                   m_create_symptom,
+                                                   m_create_alarm_definition,
+                                                   m_get_vm_moref_id,
+                                                   m_get_vm_resource_id,
+                                                   m_create_alarm_notification_rule,
+                                                   m_save_alarm):
+        """Test configure alarm: failed to create alert in vROPs"""
+
+        # Mock input configuration dictionary
+        config_dict = {'threshold_value': 0, 'severity': 'CRITICAL',
+                       'alarm_name': 'CPU_Utilization_Above_Threshold',
+                       'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                       'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
+                       'statistic': 'AVERAGE', 'metric_name': 'CPU_UTILIZATION',
+                       'operation': 'GT', 'unit': '%',
+                       'description': 'CPU_Utilization_Above_Threshold'}
+
+        # symptom parameters to be passed for symptom creation
+        symptom_params = {'threshold_value': 0,
+                          'cancel_cycles': 1,
+                          'adapter_kind_key': 'VMWARE',
+                          'resource_kind_key': 'VirtualMachine',
+                          'severity': 'CRITICAL',
+                          'symptom_name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                          'operation': 'GT',
+                          'wait_cycles': 1,
+                          'metric_key': 'cpu|usage_average'}
+
+        # alarm parameters to  be passed for alarm creation
+        alarm_params = {'description': 'CPU_Utilization_Above_Threshold',
+                        'cancelCycles': 1, 'subType': 19,
+                        'waitCycles': 1, 'severity': 'CRITICAL',
+                        'impact': 'risk', 'adapterKindKey': 'VMWARE',
+                        'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                        'resourceKindKey': 'VirtualMachine',
+                        'symptomDefinitionId': 'SymptomDefinition-2e8f9ddc-9f7b-4cd6-b85d-7d7fe3a8a804',
+                        'type': 16}
+
+        vrops_alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+        alarm_def_uuid = None
+
+        # Mock default Parameters for alarm & metric configuration
+        m_get_default_Params.side_effect = [{'impact': 'risk', 'cancel_cycles': 1,
+                                             'adapter_kind': 'VMWARE', 'repeat': False,
+                                             'cancel_period': 300, 'alarm_type': 16,
+                                             'vrops_alarm': 'CPU_Utilization_Above_Thr',
+                                             'enabled': True, 'period': 300,
+                                             'resource_kind': 'VirtualMachine',
+                                             'alarm_subType': 19, 'action': 'acknowledge',
+                                             'evaluation': 1, 'unit': 'msec'},
+                                            {'metric_key': 'cpu|usage_average', 'unit': '%'}
+                                            ]
+        # set mocked function return values
+        m_get_alarm_defination_by_name.return_value = []
+        m_create_symptom.return_value = 'SymptomDefinition-2e8f9ddc-9f7b-4cd6-b85d-7d7fe3a8a804'
+        m_create_alarm_definition.return_value = None
+
+        # Call configure_alarm method under test
+        return_value = self.mon_plugin.configure_alarm(config_dict)
+
+        # Verify that mocked methods are called with correct parameters
+        self.assertEqual(m_get_default_Params.call_count, 2)
+        m_get_alarm_defination_by_name.assert_called_with(vrops_alarm_name)
+        m_create_symptom.assert_called_with(symptom_params)
+        m_create_alarm_definition.assert_called_with(alarm_params)
+        m_get_vm_moref_id.assert_not_called()
+        m_get_vm_resource_id.assert_not_called()
+        m_create_alarm_notification_rule.assert_not_called()
+        m_save_alarm.assert_not_called()
+
+        # Verify return value with expected value of alarm_def_uuid
+        self.assertEqual(return_value, alarm_def_uuid)
+
+    @mock.patch.object(monPlugin.DatabaseManager, 'save_alarm')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_notification_rule')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_definition')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_symptom')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_by_name')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
+    def test_configure_alarm_failed_to_get_vm_moref_id(self, m_get_default_Params,
+                                                       m_get_alarm_defination_by_name,
+                                                       m_create_symptom,
+                                                       m_create_alarm_definition,
+                                                       m_get_vm_moref_id,
+                                                       m_get_vm_resource_id,
+                                                       m_create_alarm_notification_rule,
+                                                       m_save_alarm):
+        """Test configure alarm: failed to get vm_moref_id"""
+
+        # Mock input configuration dictionary
+        config_dict = {'threshold_value': 0, 'severity': 'CRITICAL',
+                       'alarm_name': 'CPU_Utilization_Above_Threshold',
+                       'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                       'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
+                       'statistic': 'AVERAGE', 'metric_name': 'CPU_UTILIZATION',
+                       'operation': 'GT', 'unit': '%',
+                       'description': 'CPU_Utilization_Above_Threshold'}
+
+        # symptom parameters to be passed for symptom creation
+        symptom_params = {'threshold_value': 0,
+                          'cancel_cycles': 1,
+                          'adapter_kind_key': 'VMWARE',
+                          'resource_kind_key': 'VirtualMachine',
+                          'severity': 'CRITICAL',
+                          'symptom_name': \
+                              'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                          'operation': 'GT',
+                          'wait_cycles': 1,
+                          'metric_key': 'cpu|usage_average'}
+
+        # alarm parameters to  be passed for alarm creation
+        alarm_params = {'description': 'CPU_Utilization_Above_Threshold',
+                        'cancelCycles': 1, 'subType': 19,
+                        'waitCycles': 1, 'severity': 'CRITICAL',
+                        'impact': 'risk', 'adapterKindKey': 'VMWARE',
+                        'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                        'resourceKindKey': 'VirtualMachine',
+                        'symptomDefinitionId': \
+                            'SymptomDefinition-2e8f9ddc-9f7b-4cd6-b85d-7d7fe3a8a804',
+                        'type': 16}
+
+        vrops_alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+        alarm_def_uuid = None
+
+        # Mock default Parameters for alarm & metric configuration
+        m_get_default_Params.side_effect = [{'impact': 'risk', 'cancel_cycles': 1,
+                                             'adapter_kind': 'VMWARE', 'repeat': False,
+                                             'cancel_period': 300, 'alarm_type': 16,
+                                             'vrops_alarm': 'CPU_Utilization_Above_Thr',
+                                             'enabled': True, 'period': 300,
+                                             'resource_kind': 'VirtualMachine',
+                                             'alarm_subType': 19, 'action': 'acknowledge',
+                                             'evaluation': 1, 'unit': 'msec'},
+                                            {'metric_key': 'cpu|usage_average', 'unit': '%'}
+                                            ]
+        # set mocked function return values
+        m_get_alarm_defination_by_name.return_value = []
+        m_create_symptom.return_value = 'SymptomDefinition-2e8f9ddc-9f7b-4cd6-b85d-7d7fe3a8a804'
+        m_create_alarm_definition.return_value = \
+            'AlertDefinition-0f3cdcb3-4e1b-4a0b-86d0-66d4b3f65220'
+        m_get_vm_moref_id.return_value = None
+
+        # Call configure_alarm method under test
+        return_value = self.mon_plugin.configure_alarm(config_dict)
+
+        # Verify that mocked methods are called with correct parameters
+        self.assertEqual(m_get_default_Params.call_count, 2)
+        m_get_alarm_defination_by_name.assert_called_with(vrops_alarm_name)
+        m_create_symptom.assert_called_with(symptom_params)
+        m_create_alarm_definition.assert_called_with(alarm_params)
+        m_get_vm_moref_id.assert_called_with(config_dict['resource_uuid'])
+        m_get_vm_resource_id.assert_not_called()
+        m_create_alarm_notification_rule.assert_not_called()
+        m_save_alarm.assert_not_called()
+
+        # Verify return value with expected value of alarm_def_uuid
+        self.assertEqual(return_value, alarm_def_uuid)
+
+    @mock.patch.object(monPlugin.DatabaseManager, 'save_alarm')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_notification_rule')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_definition')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_symptom')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_by_name')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
+    def test_configure_alarm_failed_to_get_vm_resource_id(self, m_get_default_Params,
+                                                          m_get_alarm_defination_by_name,
+                                                          m_create_symptom,
+                                                          m_create_alarm_definition,
+                                                          m_get_vm_moref_id,
+                                                          m_get_vm_resource_id,
+                                                          m_create_alarm_notification_rule,
+                                                          m_save_alarm):
+        """Test configure alarm: failed to get vm resource_id"""
+
+        # Mock input configuration dictionary
+        config_dict = {'threshold_value': 0, 'severity': 'CRITICAL',
+                       'alarm_name': 'CPU_Utilization_Above_Threshold',
+                       'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                       'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
+                       'statistic': 'AVERAGE', 'metric_name': 'CPU_UTILIZATION',
+                       'operation': 'GT', 'unit': '%',
+                       'description': 'CPU_Utilization_Above_Threshold'}
+
+        # symptom parameters to be passed for symptom creation
+        symptom_params = {'threshold_value': 0,
+                          'cancel_cycles': 1,
+                          'adapter_kind_key': 'VMWARE',
+                          'resource_kind_key': 'VirtualMachine',
+                          'severity': 'CRITICAL',
+                          'symptom_name': \
+                              'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                          'operation': 'GT',
+                          'wait_cycles': 1,
+                          'metric_key': 'cpu|usage_average'}
+
+        # alarm parameters to  be passed for alarm creation
+        alarm_params = {'description': 'CPU_Utilization_Above_Threshold',
+                        'cancelCycles': 1, 'subType': 19,
+                        'waitCycles': 1, 'severity': 'CRITICAL',
+                        'impact': 'risk', 'adapterKindKey': 'VMWARE',
+                        'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                        'resourceKindKey': 'VirtualMachine',
+                        'symptomDefinitionId': \
+                            'SymptomDefinition-2e8f9ddc-9f7b-4cd6-b85d-7d7fe3a8a804',
+                        'type': 16}
+
+        vrops_alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+        vm_moref_id = 'vm-6626'
+        alarm_def_uuid = None
+
+        # Mock default Parameters for alarm & metric configuration
+        m_get_default_Params.side_effect = [{'impact': 'risk', 'cancel_cycles': 1,
+                                             'adapter_kind': 'VMWARE', 'repeat': False,
+                                             'cancel_period': 300, 'alarm_type': 16,
+                                             'vrops_alarm': 'CPU_Utilization_Above_Thr',
+                                             'enabled': True, 'period': 300,
+                                             'resource_kind': 'VirtualMachine',
+                                             'alarm_subType': 19, 'action': 'acknowledge',
+                                             'evaluation': 1, 'unit': 'msec'},
+                                            {'metric_key': 'cpu|usage_average', 'unit': '%'}
+                                            ]
+        # set mocked function return values
+        m_get_alarm_defination_by_name.return_value = []
+        m_create_symptom.return_value = 'SymptomDefinition-2e8f9ddc-9f7b-4cd6-b85d-7d7fe3a8a804'
+        m_create_alarm_definition.return_value = \
+            'AlertDefinition-0f3cdcb3-4e1b-4a0b-86d0-66d4b3f65220'
+        m_get_vm_moref_id.return_value = vm_moref_id
+        m_get_vm_resource_id.return_value = None
+        m_save_alarm.assert_not_called()
+
+        # Call configure_alarm method under test
+        return_value = self.mon_plugin.configure_alarm(config_dict)
+
+        # Verify that mocked methods are called with correct parameters
+        self.assertEqual(m_get_default_Params.call_count, 2)
+        m_get_alarm_defination_by_name.assert_called_with(vrops_alarm_name)
+        m_create_symptom.assert_called_with(symptom_params)
+        m_create_alarm_definition.assert_called_with(alarm_params)
+        m_get_vm_moref_id.assert_called_with(config_dict['resource_uuid'])
+        m_get_vm_resource_id.assert_called_with(vm_moref_id)
+        m_create_alarm_notification_rule.assert_not_called()
+
+        # Verify return value with expected value of alarm_def_uuid
+        self.assertEqual(return_value, alarm_def_uuid)
+
+    @mock.patch.object(monPlugin.DatabaseManager, 'save_alarm')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_notification_rule')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_definition')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_symptom')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_by_name')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
+    def test_configure_alarm_failed_to_create_alarm_notification_rule(self, m_get_default_Params,
+                                                                      m_get_alarm_defination_by_name,
+                                                                      m_create_symptom,
+                                                                      m_create_alarm_definition,
+                                                                      m_get_vm_moref_id,
+                                                                      m_get_vm_resource_id,
+                                                                      m_create_alarm_notification_rule,
+                                                                      m_save_alarm):
+        """Test configure alarm: failed to create alarm notification rule"""
+
+        # Mock input configuration dictionary
+        config_dict = {'threshold_value': 0, 'severity': 'CRITICAL',
+                       'alarm_name': 'CPU_Utilization_Above_Threshold',
+                       'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                       'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
+                       'statistic': 'AVERAGE', 'metric_name': 'CPU_UTILIZATION',
+                       'operation': 'GT', 'unit': '%',
+                       'description': 'CPU_Utilization_Above_Threshold'}
+
+        # symptom parameters to be passed for symptom creation
+        symptom_params = {'threshold_value': 0,
+                          'cancel_cycles': 1,
+                          'adapter_kind_key': 'VMWARE',
+                          'resource_kind_key': 'VirtualMachine',
+                          'severity': 'CRITICAL',
+                          'symptom_name': \
+                              'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                          'operation': 'GT',
+                          'wait_cycles': 1,
+                          'metric_key': 'cpu|usage_average'}
+
+        # alarm parameters to  be passed for alarm creation
+        alarm_params = {'description': 'CPU_Utilization_Above_Threshold',
+                        'cancelCycles': 1, 'subType': 19,
+                        'waitCycles': 1, 'severity': 'CRITICAL',
+                        'impact': 'risk', 'adapterKindKey': 'VMWARE',
+                        'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                        'resourceKindKey': 'VirtualMachine',
+                        'symptomDefinitionId': \
+                            'SymptomDefinition-2e8f9ddc-9f7b-4cd6-b85d-7d7fe3a8a804',
+                        'type': 16}
+
+        vrops_alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+        vm_moref_id = 'vm-6626'
+        alarm_def = 'AlertDefinition-0f3cdcb3-4e1b-4a0b-86d0-66d4b3f65220'
+        resource_id = 'ac87622f-b761-40a0-b151-00872a2a456e'
+        alarm_def_uuid = None
+
+        # Mock default Parameters for alarm & metric configuration
+        m_get_default_Params.side_effect = [{'impact': 'risk', 'cancel_cycles': 1,
+                                             'adapter_kind': 'VMWARE', 'repeat': False,
+                                             'cancel_period': 300, 'alarm_type': 16,
+                                             'vrops_alarm': 'CPU_Utilization_Above_Thr',
+                                             'enabled': True, 'period': 300,
+                                             'resource_kind': 'VirtualMachine',
+                                             'alarm_subType': 19, 'action': 'acknowledge',
+                                             'evaluation': 1, 'unit': 'msec'},
+                                            {'metric_key': 'cpu|usage_average', 'unit': '%'}
+                                            ]
+        # set mocked function return values
+        m_get_alarm_defination_by_name.return_value = []
+        m_create_symptom.return_value = 'SymptomDefinition-2e8f9ddc-9f7b-4cd6-b85d-7d7fe3a8a804'
+        m_create_alarm_definition.return_value = \
+            'AlertDefinition-0f3cdcb3-4e1b-4a0b-86d0-66d4b3f65220'
+        m_get_vm_moref_id.return_value = vm_moref_id
+        m_get_vm_resource_id.return_value = 'ac87622f-b761-40a0-b151-00872a2a456e'
+        m_create_alarm_notification_rule.return_value = None
+
+        # Call configure_alarm method under test
+        return_value = self.mon_plugin.configure_alarm(config_dict)
+
+        # Verify that mocked methods are called with correct parameters
+        self.assertEqual(m_get_default_Params.call_count, 2)
+        m_get_alarm_defination_by_name.assert_called_with(vrops_alarm_name)
+        m_create_symptom.assert_called_with(symptom_params)
+        m_create_alarm_definition.assert_called_with(alarm_params)
+        m_get_vm_moref_id.assert_called_with(config_dict['resource_uuid'])
+        m_get_vm_resource_id.assert_called_with(vm_moref_id)
+        m_create_alarm_notification_rule.assert_called_with(vrops_alarm_name, alarm_def, resource_id)
+        m_save_alarm.assert_not_called()
+
+        # Verify return value with expected value of alarm_def_uuid
+        self.assertEqual(return_value, alarm_def_uuid)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    def test_get_alarm_defination_details_valid_rest_req_response(self, m_get):
+        """Test get_alarm_defination_details: For a valid REST request response"""
+
+        alarm_uuid = '9a6d8a14-9f25-4d81-bf91-4d773497444d'
+
+        # Set mocked function's return values
+        m_get.return_value.status_code = 200
+        m_get.return_value.content = '{"id":"AlertDefinition-9a6d8a14-9f25-4d81-bf91-4d773497444d",\
+                            "name":"CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4",\
+                            "description":"CPU_Utilization_Above_Threshold",\
+                            "adapterKindKey":"VMWARE","resourceKindKey":"VirtualMachine",\
+                            "waitCycles":1,"cancelCycles":1,"type":16,"subType":19,\
+                            "states":[{"severity":"CRITICAL","base-symptom-set":\
+                            {"type":"SYMPTOM_SET","relation":"SELF",\
+                            "aggregation":"ALL","symptomSetOperator":"AND","symptomDefinitionIds":\
+                            ["SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278"]},\
+                            "impact":{"impactType":"BADGE","detail":"risk"}}]}'
+
+        expected_alarm_details = {'adapter_kind': 'VMWARE', 'symptom_definition_id': \
+            'SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278',
+                                  'alarm_name': \
+                                      'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                                  'alarm_id': 'AlertDefinition-9a6d8a14-9f25-4d81-bf91-4d773497444d',
+                                  'resource_kind': 'VirtualMachine', 'type': 16, 'sub_type': 19}
+
+        expected_alarm_details_json = {'states':
+                                           [{'impact':
+                                                 {'impactType': 'BADGE', 'detail': 'risk'}, 'severity': 'CRITICAL',
+                                             'base-symptom-set': {'symptomDefinitionIds': \
+                                                 [
+                                                     'SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278'],
+                                                 'relation': 'SELF', 'type': 'SYMPTOM_SET',
+                                                 'aggregation': 'ALL', 'symptomSetOperator': 'AND'}}],
+                                       'adapterKindKey': 'VMWARE',
+                                       'description': 'CPU_Utilization_Above_Threshold',
+                                       'type': 16, 'cancelCycles': 1,
+                                       'resourceKindKey': 'VirtualMachine',
+                                       'subType': 19, 'waitCycles': 1,
+                                       'id': 'AlertDefinition-9a6d8a14-9f25-4d81-bf91-4d773497444d',
+                                       'name': \
+                                           'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'}
+
+        # Call get_alarm_defination_details method under test
+        alarm_details_json, alarm_details = self.mon_plugin.get_alarm_defination_details(alarm_uuid)
+
+        # Verify that mocked method is called
+        m_get.assert_called()
+
+        # Verify return value with expected value
+        self.assertEqual(expected_alarm_details, alarm_details)
+        self.assertEqual(expected_alarm_details_json, alarm_details_json)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    def test_get_alarm_defination_details_invalid_rest_req_response(self, m_get):
+        """Test get_alarm_defination_details: For an invalid REST request response"""
+
+        alarm_uuid = '9a6d8a14-9f25-4d81-bf91-4d773497444d'
+
+        # Set mocked function's return values
+        m_get.return_value.status_code = 404
+        m_get.return_value.content = '{"message": "No such AlertDefinition - \
+                                        AlertDefinition-9a6d8a14-9f25-4d81-bf91-4d773497444.",\
+                                        "httpStatusCode": 404,"apiErrorCode": 404}'
+
+        expected_alarm_details = None
+        expected_alarm_details_json = None
+
+        # Call get_alarm_defination_details method under test
+        alarm_details_json, alarm_details = self.mon_plugin.get_alarm_defination_details(alarm_uuid)
+
+        # verify that mocked method is called
+        m_get.assert_called()
+
+        # Verify return value with expected value
+        self.assertEqual(expected_alarm_details, alarm_details)
+        self.assertEqual(expected_alarm_details_json, alarm_details_json)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    def test_get_alarm_defination_by_name_valid_rest_req_response(self, m_get):
+        """Test get_alarm_defination_by_name: For a valid REST request response"""
+
+        alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+
+        # Set mocked function's return values
+        m_get.return_value.status_code = 200
+        m_get.return_value.content = '{"pageInfo": {"totalCount": 1,"page": 0,"pageSize": 1000},\
+                                    "links": [\
+                                        {"href": "/suite-api/api/alertdefinitions?page=0&amp;pageSize=1000",\
+                                        "rel": "SELF","name": "current"},\
+                                        {"href": "/suite-api/api/alertdefinitions?page=0&amp;pageSize=1000",\
+                                         "rel": "RELATED","name": "first"},\
+                                        {"href": "/suite-api/api/alertdefinitions?page=0&amp;pageSize=1000",\
+                                         "rel": "RELATED","name": "last"}],\
+                                    "alertDefinitions": [{\
+                                        "id": "AlertDefinition-9a6d8a14-9f25-4d81-bf91-4d773497444d",\
+                                        "name": "CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4",\
+                                        "description": "CPU_Utilization_Above_Threshold",\
+                                        "adapterKindKey": "VMWARE","resourceKindKey": "VirtualMachine",\
+                                        "waitCycles": 1,"cancelCycles": 1,"type": 16,"subType": 19,\
+                                        "states": [{"impact": {"impactType": "BADGE","detail": "risk"},\
+                                            "severity": "CRITICAL",\
+                                            "base-symptom-set": {"type": "SYMPTOM_SET",\
+                                            "relation": "SELF","aggregation": "ALL",\
+                                            "symptomSetOperator": "AND",\
+                                            "symptomDefinitionIds": [\
+                                            "SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278"]}}]\
+                                        }]}'
+
+        # Expected return match list
+        Exp_alert_match_list = [{'states':
+                                     [{'impact': {'impactType': 'BADGE', 'detail': 'risk'},
+                                       'severity': 'CRITICAL',
+                                       'base-symptom-set': {
+                                           'symptomDefinitionIds': \
+                                               ['SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278'],
+                                           'relation': 'SELF',
+                                           'type': 'SYMPTOM_SET',
+                                           'aggregation': 'ALL',
+                                           'symptomSetOperator': 'AND'}
+                                       }],
+                                 'adapterKindKey': 'VMWARE',
+                                 'description': 'CPU_Utilization_Above_Threshold',
+                                 'type': 16,
+                                 'cancelCycles': 1,
+                                 'resourceKindKey': 'VirtualMachine',
+                                 'subType': 19, 'waitCycles': 1,
+                                 'id': 'AlertDefinition-9a6d8a14-9f25-4d81-bf91-4d773497444d',
+                                 'name': \
+                                     'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+                                 }]
+
+        # Call get_alarm_defination_by_name method under test
+        alert_match_list = self.mon_plugin.get_alarm_defination_by_name(alarm_name)
+
+        # Verify that mocked method is called
+        m_get.assert_called()
+
+        # Verify return value with expected value
+        self.assertEqual(Exp_alert_match_list, alert_match_list)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    def test_get_alarm_defination_by_name_no_valid_alarm_found(self, m_get):
+        """Test get_alarm_defination_by_name: With no valid alarm found in returned list"""
+
+        alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda5'
+
+        # Set mocked function's return values
+        m_get.return_value.status_code = 200
+        m_get.return_value.content = '{"pageInfo": {"totalCount": 1,"page": 0,"pageSize": 1000},\
+                                    "links": [\
+                                        {"href": "/suite-api/api/alertdefinitions?page=0&amp;pageSize=1000",\
+                                        "rel": "SELF","name": "current"},\
+                                        {"href": "/suite-api/api/alertdefinitions?page=0&amp;pageSize=1000",\
+                                         "rel": "RELATED","name": "first"},\
+                                        {"href": "/suite-api/api/alertdefinitions?page=0&amp;pageSize=1000",\
+                                         "rel": "RELATED","name": "last"}],\
+                                    "alertDefinitions": [{\
+                                        "id": "AlertDefinition-9a6d8a14-9f25-4d81-bf91-4d773497444d",\
+                                        "name": "CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4",\
+                                        "description": "CPU_Utilization_Above_Threshold",\
+                                        "adapterKindKey": "VMWARE","resourceKindKey": "VirtualMachine",\
+                                        "waitCycles": 1,"cancelCycles": 1,"type": 16,"subType": 19,\
+                                        "states": [{"impact": {"impactType": "BADGE","detail": "risk"},\
+                                            "severity": "CRITICAL",\
+                                            "base-symptom-set": {"type": "SYMPTOM_SET",\
+                                            "relation": "SELF","aggregation": "ALL",\
+                                            "symptomSetOperator": "AND",\
+                                            "symptomDefinitionIds": [\
+                                            "SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278"]}}]\
+                                        }]}'
+
+        # Expected return match list
+        Exp_alert_match_list = []
+
+        # Call get_alarm_defination_by_name method under test
+        alert_match_list = self.mon_plugin.get_alarm_defination_by_name(alarm_name)
+
+        # Verify that mocked method is called
+        m_get.assert_called()
+
+        # Verify return value with expected value
+        self.assertEqual(Exp_alert_match_list, alert_match_list)
+
+    @mock.patch.object(monPlugin.requests, 'put')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_symptom_defination_details')
+    def test_update_symptom_defination_valid_symptom_req_response(self,
+                                                                  m_get_symptom_defination_details,
+                                                                  m_put):
+        """Test update_symptom_defination: With valid REST response, update symptom"""
+
+        # Expected symptom to be updated
+        symptom_defination_id = 'SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278'
+        new_alarm_config = {'severity': "CRITICAL",
+                            'operation': 'GT',
+                            'threshold_value': 5,
+                            'alarm_uuid': '9a6d8a14-9f25-4d81-bf91-4d773497444d'
+                            }
+
+        # Set mocked function's return values
+        m_get_symptom_defination_details.return_value = {
+            "id": "SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278",
+            "name": "CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4",
+            "adapterKindKey": "VMWARE",
+            "resourceKindKey": "VirtualMachine",
+            "waitCycles": 1,
+            "cancelCycles": 1,
+            "state": {"severity": "CRITICAL",
+                      "condition": {
+                          "type": "CONDITION_HT",
+                          "key": "cpu|usage_average", "operator": "GT", "value": "0.0",
+                          "valueType": "NUMERIC", "instanced": False,
+                          "thresholdType": "STATIC"}
+                      }
+        }
+
+        m_put.return_value.status_code = 200
+        m_put.return_value.content = '{\
+            "id":"SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278",\
+            "name":"CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4",\
+            "adapterKindKey":"VMWARE","resourceKindKey":"VirtualMachine","waitCycles":1,\
+            "cancelCycles":1,\
+            "state":{\
+                "severity":"CRITICAL",\
+                "condition":{\
+                    "type":"CONDITION_HT","key":"cpu|usage_average","operator":"GT","value":"5.0",\
+                    "valueType":"NUMERIC","instanced":False,"thresholdType":"STATIC"}}}'
+
+        # Call update_symptom_defination method under test
+        symptom_uuid = self.mon_plugin.update_symptom_defination(symptom_defination_id,
+                                                                 new_alarm_config)
+
+        # Verify that mocked method is called with required parameters
+        m_get_symptom_defination_details.assert_called_with(symptom_defination_id)
+        # m_put.assert_called_with(symptom_defination_id,new_alarm_config)
+
+        # Verify return value with expected value
+        self.assertEqual(symptom_defination_id, symptom_uuid)
+
+    @mock.patch.object(monPlugin.requests, 'put')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_symptom_defination_details')
+    def test_update_symptom_defination_invalid_symptom_req_response(self,
+                                                                    m_get_symptom_defination_details,
+                                                                    m_put):
+        """Test update_symptom_defination: If invalid REST response received, return None"""
+
+        # Expected symptom to be updated
+        symptom_defination_id = 'SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278'
+        new_alarm_config = {'severity': "CRITICAL",
+                            'operation': 'GT',
+                            'threshold_value': 5,
+                            'alarm_uuid': '9a6d8a14-9f25-4d81-bf91-4d773497444d'
+                            }
+
+        # Set mocked function's return values
+        m_get_symptom_defination_details.return_value = {
+            "id": "SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278",
+            "name": "CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4",
+            "adapterKindKey": "VMWARE",
+            "resourceKindKey": "VirtualMachine",
+            "waitCycles": 1,
+            "cancelCycles": 1,
+            "state": {"severity": "CRITICAL",
+                      "condition": {
+                          "type": "CONDITION_HT",
+                          "key": "cpu|usage_average", "operator": "GT", "value": "0.0",
+                          "valueType": "NUMERIC", "instanced": False,
+                          "thresholdType": "STATIC"}
+                      }
+        }
+
+        m_put.return_value.status_code = 500
+        m_put.return_value.content = '{\
+            "message": "Internal Server error, cause unknown.",\
+            "moreInformation": [\
+                {"name": "errorMessage",\
+                 "value": "Symptom Definition CPU_Utilization_Above_Thr-e14b203c-\
+                 6bf2-4e2f-a91c-8c19d240eda4 does not exist and hence cannot be updated."},\
+                {"name": "localizedMessage",\
+                 "value": "Symptom Definition CPU_Utilization_Above_Thr-e14b203c-\
+                 6bf2-4e2f-a91c-8c19d240eda4 does not exist and hence cannot be updated.;"}],\
+            "httpStatusCode": 500,"apiErrorCode": 500}'
+
+        # Call update_symptom_defination method under test
+        symptom_uuid = self.mon_plugin.update_symptom_defination(symptom_defination_id,
+                                                                 new_alarm_config)
+
+        # Verify that mocked method is called with required parameters
+        m_get_symptom_defination_details.assert_called_with(symptom_defination_id)
+        m_put.assert_called()
+
+        # Verify return value with expected value
+        self.assertEqual(symptom_uuid, None)
+
+    @mock.patch.object(monPlugin.requests, 'put')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_symptom_defination_details')
+    def test_update_symptom_defination_failed_to_get_symptom_defination(self,
+                                                                        m_get_symptom_defination_details,
+                                                                        m_put):
+        """Test update_symptom_defination: if fails to get symptom_defination returns None"""
+
+        # Expected symptom to be updated
+        symptom_defination_id = 'SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278'
+        new_alarm_config = {'severity': "CRITICAL",
+                            'operation': 'GT',
+                            'threshold_value': 5,
+                            'alarm_uuid': '9a6d8a14-9f25-4d81-bf91-4d773497444d'
+                            }
+
+        # Set mocked function's return values
+        m_get_symptom_defination_details.return_value = None
+
+        # Call update_symptom_defination method under test
+        symptom_uuid = self.mon_plugin.update_symptom_defination(symptom_defination_id,
+                                                                 new_alarm_config)
+
+        # Verify that mocked method is called with required parameters
+        m_get_symptom_defination_details.assert_called_with(symptom_defination_id)
+        m_put.assert_not_called()
+
+        # Verify return value with expected value
+        self.assertEqual(symptom_uuid, None)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    def test_get_symptom_defination_details_valid_req_response(self, m_get):
+        """Test update_symptom_defination: With valid REST response symptom is created"""
+
+        # Expected symptom to be updated
+        symptom_uuid = 'SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278'
+
+        # Set mocked function's return values
+        m_get.return_value.status_code = 200
+        m_get.return_value.content = '{\
+            "id": "SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278",\
+            "name": "CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4",\
+            "adapterKindKey": "VMWARE","resourceKindKey": "VirtualMachine","waitCycles": 1,\
+            "cancelCycles": 1,"state": {"severity": "CRITICAL","condition": {"type": "CONDITION_HT",\
+            "key": "cpu|usage_average","operator": "GT","value": "6.0","valueType": "NUMERIC",\
+            "instanced": false,"thresholdType": "STATIC"}}}'
+        expected_symptom_details = { \
+            "id": "SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278",
+            "name": "CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4",
+            "adapterKindKey": "VMWARE", "resourceKindKey": "VirtualMachine", "waitCycles": 1,
+            "cancelCycles": 1, "state": {"severity": "CRITICAL", "condition": {"type": "CONDITION_HT",
+                                                                               "key": "cpu|usage_average",
+                                                                               "operator": "GT", "value": "6.0",
+                                                                               "valueType": "NUMERIC",
+                                                                               "instanced": False,
+                                                                               "thresholdType": "STATIC"}}}
+
+        # Call update_symptom_defination method under test
+        symptom_details = self.mon_plugin.get_symptom_defination_details(symptom_uuid)
+
+        # Verify that mocked method is called with required parameters
+        m_get.assert_called()
+
+        # Verify return value with expected value
+        self.assertEqual(expected_symptom_details, symptom_details)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    def test_get_symptom_defination_details_invalid_req_response(self, m_get):
+        """Test update_symptom_defination: if invalid REST response received return None"""
+
+        # Expected symptom to be updated
+        symptom_uuid = 'SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278'
+
+        # Set mocked function's return values
+        m_get.return_value.status_code = 404
+        m_get.return_value.content = '{"message": "No such SymptomDefinition\
+        - SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278.",\
+        "httpStatusCode": 404,"apiErrorCode": 404}'
+
+        expected_symptom_details = None
+
+        # Call update_symptom_defination method under test
+        symptom_details = self.mon_plugin.get_symptom_defination_details(symptom_uuid)
+
+        # Verify that mocked method is called with required parameters
+        m_get.assert_called()
+
+        # Verify return value with expected value
+        self.assertEqual(expected_symptom_details, symptom_details)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    def test_get_symptom_defination_details_symptom_uuid_not_provided(self, m_get):
+        """Test update_symptom_defination: if required symptom uuid is not provided"""
+
+        # Expected symptom to be updated
+        symptom_uuid = None
+        expected_symptom_details = None
+
+        # Call update_symptom_defination method under test
+        symptom_details = self.mon_plugin.get_symptom_defination_details(symptom_uuid)
+
+        # Verify that mocked method is called with required parameters
+        m_get.assert_not_called()
+
+        # Verify return value with expected value
+        self.assertEqual(expected_symptom_details, symptom_details)
+
+    @mock.patch.object(monPlugin.requests, 'put')
+    def test_reconfigure_alarm_valid_req_response(self, m_put):
+        """Test reconfigure_alarm: for valid REST response"""
+
+        # Set input parameters to reconfigure_alarm
+        alarm_details_json = {
+            'id': 'AlertDefinition-9a6d8a14-9f25-4d81-bf91-4d773497444d',
+            'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+            'description': 'CPU_Utilization_Above_Threshold', 'adapterKindKey': 'VMWARE',
+            'states': [{'impact': {'impactType': 'BADGE', 'detail': 'risk'}, 'severity': 'CRITICAL',
+                        'base-symptom-set': {
+                            'symptomDefinitionIds': ['SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278'],
+                            'relation': 'SELF', 'type': 'SYMPTOM_SET', 'aggregation': 'ALL',
+                            'symptomSetOperator': 'AND'}}],
+            'type': 16, 'cancelCycles': 1, 'resourceKindKey': 'VirtualMachine', 'subType': 19,
+            'waitCycles': 1}
+
+        new_alarm_config = {'severity': 'WARNING',
+                            'description': 'CPU_Utilization_Above_Threshold_Warning'}
+
+        # Set mocked function's return values
+        m_put.return_value.status_code = 200
+        m_put.return_value.content = '{"id":"AlertDefinition-9a6d8a14-9f25-4d81-bf91-4d773497444d",\
+            "name":"CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4",\
+            "description":"CPU_Utilization_Above_Threshold_Warning","adapterKindKey":"VMWARE",\
+            "resourceKindKey":"VirtualMachine","waitCycles":1,"cancelCycles":1,"type":16,\
+            "subType":19,"states":[{"severity":"WARNING","base-symptom-set":{"type":"SYMPTOM_SET",\
+            "relation":"SELF","aggregation":"ALL","symptomSetOperator":"AND",\
+            "symptomDefinitionIds":["SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278"]},\
+            "impact":{"impactType":"BADGE","detail":"risk"}}]}'
+
+        # Expected alarm_def_uuid to be returned
+        expected_alarm_def_uuid = '9a6d8a14-9f25-4d81-bf91-4d773497444d'
+
+        # Call reconfigure_alarm method under test
+        alarm_def_uuid = self.mon_plugin.reconfigure_alarm(alarm_details_json, new_alarm_config)
+
+        # Verify that mocked method is called with required parameters
+        m_put.assert_called()
+
+        # Verify return value with expected value
+        self.assertEqual(expected_alarm_def_uuid, alarm_def_uuid)
+
+    @mock.patch.object(monPlugin.requests, 'put')
+    def test_reconfigure_alarm_invalid_req_response(self, m_put):
+        """Test reconfigure_alarm: for invalid REST response, return None"""
+
+        # Set input parameters to reconfigure_alarm
+        alarm_details_json = {
+            'id': 'AlertDefinition-9a6d8a14-9f25-4d81-bf91-4d773497444d',
+            'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+            'description': 'CPU_Utilization_Above_Threshold', 'adapterKindKey': 'VMWARE',
+            'states': [{'impact': {'impactType': 'BADGE', 'detail': 'risk'}, 'severity': 'CRITICAL',
+                        'base-symptom-set': {
+                            'symptomDefinitionIds': ['SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278'],
+                            'relation': 'SELF', 'type': 'SYMPTOM_SET', 'aggregation': 'ALL',
+                            'symptomSetOperator': 'AND'}}],
+            'type': 16, 'cancelCycles': 1, 'resourceKindKey': 'VirtualMachine', 'subType': 19,
+            'waitCycles': 1}
+
+        new_alarm_config = {'severity': 'WARNING',
+                            'description': 'CPU_Utilization_Above_Threshold_Warning'}
+
+        # Set mocked function's return values
+        m_put.return_value.status_code = 500
+        m_put.return_value.content = '{"message": "Internal Server error, cause unknown.",\
+            "moreInformation": [{"name": "errorMessage",\
+            "value": "Cannot update Alert Definition CPU_Utilization_Above_Thr-\
+            e14b203c-6bf2-4e2f-a91c-8c19d240eda4 since it does not exist"},\
+            {"name": "localizedMessage",\
+            "value": "Cannot update Alert Definition CPU_Utilization_Above_Thr-\
+            e14b203c-6bf2-4e2f-a91c-8c19d240eda4 since it does not exist;"}],\
+            "httpStatusCode": 500,"apiErrorCode": 500}'
+
+        # Expected alarm_def_uuid to be returned
+        expected_alarm_def_uuid = None
+
+        # Call reconfigure_alarm method under test
+        alarm_def_uuid = self.mon_plugin.reconfigure_alarm(alarm_details_json, new_alarm_config)
+
+        # Verify that mocked method is called with required parameters
+        m_put.assert_called()
+
+        # Verify return value with expected value
+        self.assertEqual(expected_alarm_def_uuid, alarm_def_uuid)
+
+    @mock.patch.object(monPlugin.MonPlugin, 'delete_symptom_definition')
+    @mock.patch.object(monPlugin.MonPlugin, 'delete_alarm_defination')
+    @mock.patch.object(monPlugin.MonPlugin, 'delete_notification_rule')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_details')
+    def test_delete_alarm_configuration_successful_alarm_deletion(self,
+                                                                  m_get_alarm_defination_details,
+                                                                  m_delete_notification_rule,
+                                                                  m_delete_alarm_defination,
+                                                                  m_delete_symptom_definition):
+        """Test delete_alarm_configuration: for successful alarm deletion, return alarm uuid"""
+
+        # Set input parameters to delete_alarm_configuration
+        delete_alarm_req_dict = {'alarm_uuid': '9a6d8a14-9f25-4d81-bf91-4d773497444d'}
+
+        # Set mocked function's return values
+        alarm_details_json = {
+            'id': 'AlertDefinition-9a6d8a14-9f25-4d81-4d773497444d',
+            'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-8c19d240eda4',
+            'symptomDefinitionIds': ['SymptomDefinition-bcc2cb36-a67b-4deb-9b5884973278']}
+        alarm_details = {
+            'alarm_name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-8c19d240eda4',
+            'alarm_id': 'AlertDefinition-9a6d8a14-9f25-4d81-4d773497444d',
+            'symptom_definition_id': 'SymptomDefinition-bcc2cb36-a67b-4deb-9b5884973278'}
+
+        m_get_alarm_defination_details.return_value = (alarm_details_json, alarm_details)
+        m_delete_notification_rule.return_value = '989e7293-d78d-4405-92e30ec4f247'
+        m_delete_alarm_defination.return_value = alarm_details['alarm_id']
+        m_delete_symptom_definition.return_value = alarm_details['symptom_definition_id']
+
+        # Call reconfigure_alarm method under test
+        alarm_uuid = self.mon_plugin.delete_alarm_configuration(delete_alarm_req_dict)
+
+        # Verify that mocked method is called with required parameters
+        m_get_alarm_defination_details.assert_called_with(delete_alarm_req_dict['alarm_uuid'])
+        m_delete_notification_rule.assert_called_with(alarm_details['alarm_name'])
+        m_delete_alarm_defination.assert_called_with(alarm_details['alarm_id'])
+        m_delete_symptom_definition.assert_called_with(alarm_details['symptom_definition_id'])
+
+        # Verify return value with expected value
+        self.assertEqual(alarm_uuid, delete_alarm_req_dict['alarm_uuid'])
+
+    @mock.patch.object(monPlugin.MonPlugin, 'delete_symptom_definition')
+    @mock.patch.object(monPlugin.MonPlugin, 'delete_alarm_defination')
+    @mock.patch.object(monPlugin.MonPlugin, 'delete_notification_rule')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_details')
+    def test_delete_alarm_configuration_failed_to_get_alarm_defination(self,
+                                                                       m_get_alarm_defination_details,
+                                                                       m_delete_notification_rule,
+                                                                       m_delete_alarm_defination,
+                                                                       m_delete_symptom_definition):
+        """Test delete_alarm_configuration: if failed to get alarm definition, return None"""
+
+        # Set input parameters to delete_alarm_configuration
+        delete_alarm_req_dict = {'alarm_uuid': '9a6d8a14-9f25-4d81-bf91-4d773497444d'}
+
+        # Set mocked function's return values
+        alarm_details_json = None
+        alarm_details = None
+
+        m_get_alarm_defination_details.return_value = (alarm_details_json, alarm_details)
+
+        # Call reconfigure_alarm method under test
+        alarm_uuid = self.mon_plugin.delete_alarm_configuration(delete_alarm_req_dict)
+
+        # Verify that mocked method is called with required parameters
+        m_get_alarm_defination_details.assert_called_with(delete_alarm_req_dict['alarm_uuid'])
+        m_delete_notification_rule.assert_not_called()
+        m_delete_alarm_defination.assert_not_called()
+        m_delete_symptom_definition.assert_not_called()
+
+        # Verify return value with expected value
+        self.assertEqual(alarm_uuid, None)
+
+    @mock.patch.object(monPlugin.MonPlugin, 'delete_symptom_definition')
+    @mock.patch.object(monPlugin.MonPlugin, 'delete_alarm_defination')
+    @mock.patch.object(monPlugin.MonPlugin, 'delete_notification_rule')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_details')
+    def test_delete_alarm_configuration_failed_to_delete_notification_rule(self,
+                                                                           m_get_alarm_defination_details,
+                                                                           m_delete_notification_rule,
+                                                                           m_delete_alarm_defination,
+                                                                           m_delete_symptom_definition):
+        """Test delete_alarm_configuration: if failed to delete notification rule, return None"""
+
+        # Set input parameters to delete_alarm_configuration
+        delete_alarm_req_dict = {'alarm_uuid': '9a6d8a14-9f25-4d81-bf91-4d773497444d'}
+
+        # Set mocked function's return values
+        alarm_details_json = {
+            'id': 'AlertDefinition-9a6d8a14-9f25-4d81-4d773497444d',
+            'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-8c19d240eda4',
+            'symptomDefinitionIds': ['SymptomDefinition-bcc2cb36-a67b-4deb-9b5884973278']}
+        alarm_details = {
+            'alarm_name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-8c19d240eda4',
+            'alarm_id': 'AlertDefinition-9a6d8a14-9f25-4d81-4d773497444d',
+            'symptom_definition_id': 'SymptomDefinition-bcc2cb36-a67b-4deb-9b5884973278'}
+
+        m_get_alarm_defination_details.return_value = (alarm_details_json, alarm_details)
+        m_delete_notification_rule.return_value = None
+
+        # Call reconfigure_alarm method under test
+        alarm_uuid = self.mon_plugin.delete_alarm_configuration(delete_alarm_req_dict)
+
+        # Verify that mocked method is called with required parameters
+        m_get_alarm_defination_details.assert_called_with(delete_alarm_req_dict['alarm_uuid'])
+        m_delete_notification_rule.assert_called_with(alarm_details['alarm_name'])
+        m_delete_alarm_defination.assert_not_called()
+        m_delete_symptom_definition.assert_not_called()
+
+        # Verify return value with expected value
+        self.assertEqual(alarm_uuid, None)
+
+    @mock.patch.object(monPlugin.MonPlugin, 'delete_symptom_definition')
+    @mock.patch.object(monPlugin.MonPlugin, 'delete_alarm_defination')
+    @mock.patch.object(monPlugin.MonPlugin, 'delete_notification_rule')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_details')
+    def test_delete_alarm_configuration_failed_to_delete_alarm_defination(self,
+                                                                          m_get_alarm_defination_details,
+                                                                          m_delete_notification_rule,
+                                                                          m_delete_alarm_defination,
+                                                                          m_delete_symptom_definition):
+        """Test delete_alarm_configuration: if failed to delete alarm definition, return None"""
+
+        # Set input parameters to delete_alarm_configuration
+        delete_alarm_req_dict = {'alarm_uuid': '9a6d8a14-9f25-4d81-bf91-4d773497444d'}
+
+        # Set mocked function's return values
+        alarm_details_json = {
+            'id': 'AlertDefinition-9a6d8a14-9f25-4d81-4d773497444d',
+            'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-8c19d240eda4',
+            'symptomDefinitionIds': ['SymptomDefinition-bcc2cb36-a67b-4deb-9b5884973278']}
+        alarm_details = {
+            'alarm_name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-8c19d240eda4',
+            'alarm_id': 'AlertDefinition-9a6d8a14-9f25-4d81-4d773497444d',
+            'symptom_definition_id': 'SymptomDefinition-bcc2cb36-a67b-4deb-9b5884973278'}
+
+        m_get_alarm_defination_details.return_value = (alarm_details_json, alarm_details)
+        m_delete_notification_rule.return_value = '989e7293-d78d-4405-92e30ec4f247'
+        m_delete_alarm_defination.return_value = None
+
+        # Call reconfigure_alarm method under test
+        alarm_uuid = self.mon_plugin.delete_alarm_configuration(delete_alarm_req_dict)
+
+        # Verify that mocked method is called with required parameters
+        m_get_alarm_defination_details.assert_called_with(delete_alarm_req_dict['alarm_uuid'])
+        m_delete_notification_rule.assert_called_with(alarm_details['alarm_name'])
+        m_delete_alarm_defination.assert_called_with(alarm_details['alarm_id'])
+        m_delete_symptom_definition.assert_not_called()
+
+        # Verify return value with expected value
+        self.assertEqual(alarm_uuid, None)
+
+    @mock.patch.object(monPlugin.MonPlugin, 'delete_symptom_definition')
+    @mock.patch.object(monPlugin.MonPlugin, 'delete_alarm_defination')
+    @mock.patch.object(monPlugin.MonPlugin, 'delete_notification_rule')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_details')
+    def test_delete_alarm_configuration_failed_to_delete_symptom_definition(self,
+                                                                            m_get_alarm_defination_details,
+                                                                            m_delete_notification_rule,
+                                                                            m_delete_alarm_defination,
+                                                                            m_delete_symptom_definition):
+        """Test delete_alarm_configuration: if failed to delete symptom definition, return None"""
+
+        # Set input parameters to delete_alarm_configuration
+        delete_alarm_req_dict = {'alarm_uuid': '9a6d8a14-9f25-4d81-bf91-4d773497444d'}
+
+        # Set mocked function's return values
+        alarm_details_json = {
+            'id': 'AlertDefinition-9a6d8a14-9f25-4d81-4d773497444d',
+            'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-8c19d240eda4',
+            'symptomDefinitionIds': ['SymptomDefinition-bcc2cb36-a67b-4deb-9b5884973278']}
+        alarm_details = {
+            'alarm_name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-8c19d240eda4',
+            'alarm_id': 'AlertDefinition-9a6d8a14-9f25-4d81-4d773497444d',
+            'symptom_definition_id': 'SymptomDefinition-bcc2cb36-a67b-4deb-9b5884973278'}
+
+        m_get_alarm_defination_details.return_value = (alarm_details_json, alarm_details)
+        m_delete_notification_rule.return_value = '989e7293-d78d-4405-92e30ec4f247'
+        m_delete_alarm_defination.return_value = alarm_details['alarm_id']
+        m_delete_symptom_definition.return_value = None
+
+        # Call reconfigure_alarm method under test
+        alarm_uuid = self.mon_plugin.delete_alarm_configuration(delete_alarm_req_dict)
+
+        # Verify that mocked method is called with required parameters
+        m_get_alarm_defination_details.assert_called_with(delete_alarm_req_dict['alarm_uuid'])
+        m_delete_notification_rule.assert_called_with(alarm_details['alarm_name'])
+        m_delete_alarm_defination.assert_called_with(alarm_details['alarm_id'])
+        m_delete_symptom_definition.assert_called_with(alarm_details['symptom_definition_id'])
+
+        # Verify return value with expected value
+        self.assertEqual(alarm_uuid, None)
+
+    @mock.patch.object(monPlugin.requests, 'delete')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_notification_rule_id_by_alarm_name')
+    def test_delete_notification_rule_successful_deletion_req_response(self,
+                                                                       m_get_notification_rule_id_by_alarm_name,
+                                                                       m_delete):
+        """Test delete_notification_rule: Valid notification rule is deleted & returns rule_id"""
+
+        # Set input parameters to delete_notification_rule
+        alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-8c19d240eda4'
+
+        # Set mocked function's return values
+        m_get_notification_rule_id_by_alarm_name.return_value = '8db86441-71d8-4830-9e1a-a90be3776d12'
+        m_delete.return_value.status_code = 204
+
+        # Call delete_notification_rule method under test
+        rule_id = self.mon_plugin.delete_notification_rule(alarm_name)
+
+        # Verify that mocked method is called with required parameters
+        m_get_notification_rule_id_by_alarm_name.assert_called_with(alarm_name)
+        m_delete.assert_called()
+
+        # Verify return value with expected value
+        self.assertEqual(rule_id, '8db86441-71d8-4830-9e1a-a90be3776d12')
+
+    @mock.patch.object(monPlugin.requests, 'delete')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_notification_rule_id_by_alarm_name')
+    def test_delete_notification_rule_failed_to_get_notification_rule_id(self,
+                                                                         m_get_notification_rule_id_by_alarm_name,
+                                                                         m_delete):
+        """Test delete_notification_rule: if notification rule is not found, returns None"""
+
+        # Set input parameters to delete_notification_rule
+        alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-8c19d240eda4'
+
+        # Set mocked function's return values
+        m_get_notification_rule_id_by_alarm_name.return_value = None
+
+        # Call delete_notification_rule method under test
+        rule_id = self.mon_plugin.delete_notification_rule(alarm_name)
+
+        # Verify that mocked method is called with required parameters
+        m_get_notification_rule_id_by_alarm_name.assert_called_with(alarm_name)
+        m_delete.assert_not_called()
+
+        # verify return value with expected value
+        self.assertEqual(rule_id, None)
+
+    @mock.patch.object(monPlugin.requests, 'delete')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_notification_rule_id_by_alarm_name')
+    def test_delete_notification_rule_invalid_deletion_req_response(self,
+                                                                    m_get_notification_rule_id_by_alarm_name,
+                                                                    m_delete):
+        """Test delete_notification_rule: If an invalid response is received, returns None"""
+
+        # Set input parameters to delete_notification_rule
+        alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-8c19d240eda4'
+
+        # Set mocked function's return values
+        m_get_notification_rule_id_by_alarm_name.return_value = '8db86441-71d8-4830-9e1a-a90be3776d12'
+        m_delete.return_value.status_code = 404
+
+        # Call delete_notification_rule method under test
+        rule_id = self.mon_plugin.delete_notification_rule(alarm_name)
+
+        # Verify that mocked method is called with required parameters
+        m_get_notification_rule_id_by_alarm_name.assert_called_with(alarm_name)
+        m_delete.assert_called()
+
+        # Verify return value with expected value
+        self.assertEqual(rule_id, None)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    def test_get_notification_rule_id_by_alarm_name_valid_req_response(self, m_get):
+        """Test get_notification_rule_id_by_alarm_name: A valid request response received,
+            returns notification_id
+        """
+
+        # Set input parameters to get_notification_rule_id_by_alarm_name
+        alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+
+        # Set mocked function's return values
+        m_get.return_value.status_code = 200
+        m_get.return_value.content = '{\
+        "pageInfo": {"totalCount": 0,"page": 0,"pageSize": 1000},\
+        "links": [\
+            {"href": "/suite-api/api/notifications/rules?page=0&amp;pageSize=1000",\
+            "rel": "SELF","name": "current"},\
+            {"href": "/suite-api/api/notifications/rules?page=0&amp;pageSize=1000",\
+            "rel": "RELATED","name": "first"},\
+            {"href": "/suite-api/api/notifications/rules?page=0&amp;pageSize=1000",\
+            "rel": "RELATED","name": "last"}],\
+        "notification-rule": [{\
+        "id": "2b86fa23-0c15-445c-a2b1-7bd725c46f59",\
+        "name": "notify_CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4",\
+        "pluginId": "03053f51-f829-438d-993d-cc33a435d76a",\
+        "links": [{"href": "/suite-api/api/notifications/rules/2b86fa23-0c15-445c-a2b1-7bd725c46f59",\
+        "rel": "SELF","name": "linkToSelf"}]}]}'
+
+        # Call get_notification_rule_id_by_alarm_name method under test
+        notification_id = self.mon_plugin.get_notification_rule_id_by_alarm_name(alarm_name)
+
+        # Verify that mocked method is called with required parameters
+        m_get.assert_called()
+
+        # Verify return value with expected value
+        self.assertEqual(notification_id, '2b86fa23-0c15-445c-a2b1-7bd725c46f59')
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    def test_get_notification_rule_id_by_alarm_name_invalid_req_response(self, m_get):
+        """Test get_notification_rule_id_by_alarm_name: If an invalid response received,\
+            returns None
+        """
+
+        # Set input parameters to delete_alarm_configuration
+        alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+
+        # Set mocked function's return values
+        m_get.return_value.status_code = 404
+
+        # Call get_notification_rule_id_by_alarm_name method under test
+        notification_id = self.mon_plugin.get_notification_rule_id_by_alarm_name(alarm_name)
+
+        # Verify that mocked method is called with required parameters
+        m_get.assert_called()
+
+        # Verify return value with expected value
+        self.assertEqual(notification_id, None)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    def test_get_notification_rule_id_by_alarm_name_rule_not_found(self, m_get):
+        """Test get_notification_rule_id_by_alarm_name: If a notification rule is not found,
+            returns None
+        """
+
+        # Set input parameters to delete_alarm_configuration
+        alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda'
+
+        # Set mocked function's return values
+        m_get.return_value.status_code = 200
+        m_get.return_value.content = '{\
+        "pageInfo": {"totalCount": 0,"page": 0,"pageSize": 1000},\
+        "links": [\
+            {"href": "/suite-api/api/notifications/rules?page=0&amp;pageSize=1000",\
+            "rel": "SELF","name": "current"},\
+            {"href": "/suite-api/api/notifications/rules?page=0&amp;pageSize=1000",\
+            "rel": "RELATED","name": "first"},\
+            {"href": "/suite-api/api/notifications/rules?page=0&amp;pageSize=1000",\
+            "rel": "RELATED","name": "last"}],\
+        "notification-rule": [{\
+        "id": "2b86fa23-0c15-445c-a2b1-7bd725c46f59",\
+        "name": "notify_CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4",\
+        "pluginId": "03053f51-f829-438d-993d-cc33a435d76a",\
+        "links": [{"href": "/suite-api/api/notifications/rules/2b86fa23-0c15-445c-a2b1-7bd725c46f59",\
+        "rel": "SELF","name": "linkToSelf"}]}]}'
+
+        # Call get_notification_rule_id_by_alarm_name method under test
+        notification_id = self.mon_plugin.get_notification_rule_id_by_alarm_name(alarm_name)
+
+        # Verify that mocked method is called with required parameters
+        m_get.assert_called()
+
+        # Verify return value with expected value
+        self.assertEqual(notification_id, None)
+
+    @mock.patch.object(monPlugin.requests, 'delete')
+    def test_delete_alarm_defination_valid_req_response(self, m_delete):
+        """Test delete_alarm_defination: A valid request response received,
+            returns symptom_id
+        """
+
+        # Set input parameters to delete_alarm_definition
+        alarm_definition_id = 'AlertDefinition-9a6d8a14-9f25-4d81-4d773497444d'
+
+        # Set mocked function's return values
+        m_delete.return_value.status_code = 204
+
+        # Call delete_alarm_defination method under test
+        actual_alarm_id = self.mon_plugin.delete_alarm_defination(alarm_definition_id)
+
+        # Verify that mocked method is called with required parameters
+        m_delete.assert_called()
+
+        # Verify return value with expected value
+        self.assertEqual(actual_alarm_id, alarm_definition_id)
+
+    @mock.patch.object(monPlugin.requests, 'delete')
+    def test_delete_alarm_defination_invalid_req_response(self, m_delete):
+        """Test delete_alarm_defination: If an invalid request response received,
+            returns None
+        """
+
+        # Set input parameters to delete_alarm_definition
+        alarm_definition_id = 'AlertDefinition-9a6d8a14-9f25-4d81-4d773497444d'
+
+        # Set mocked function's return values
+        m_delete.return_value.status_code = 404
+
+        # Call delete_alarm_defination method under test
+        actual_alarm_id = self.mon_plugin.delete_alarm_defination(alarm_definition_id)
+
+        # Verify that mocked method is called with required parameters
+        m_delete.assert_called()
+
+        # Verify return value with expected value
+        self.assertEqual(actual_alarm_id, None)
+
+    @mock.patch.object(monPlugin.requests, 'delete')
+    def test_delete_symptom_definition_valid_req_response(self, m_delete):
+        """Test delete_symptom_definition: A valid request response received,
+            returns symptom_id
+        """
+
+        # Set input parameters to delete_symptom_definition
+        symptom_definition_id = 'SymptomDefinition-bcc2cb36-a67b-4deb-9b5884973278'
+
+        # Set mocked function's return values
+        m_delete.return_value.status_code = 204
+
+        # Call delete_symptom_definition method under test
+        actual_symptom_id = self.mon_plugin.delete_symptom_definition(symptom_definition_id)
+
+        # Verify that mocked method is called with required parameters
+        m_delete.assert_called()
+
+        # Verify return value with expected value
+        self.assertEqual(actual_symptom_id, symptom_definition_id)
+
+    @mock.patch.object(monPlugin.requests, 'delete')
+    def test_delete_symptom_definition_invalid_req_response(self, m_delete):
+        """Test delete_symptom_definition: If an invalid request response received,
+            returns None
+        """
+
+        # Set input parameters to delete_symptom_definition
+        symptom_definition_id = 'SymptomDefinition-bcc2cb36-a67b-4deb-9b5884973278'
+
+        # Set mocked function's return values
+        m_delete.return_value.status_code = 404
+
+        # Call delete_symptom_definition method under test
+        actual_symptom_id = self.mon_plugin.delete_symptom_definition(symptom_definition_id)
+
+        # Verify that mocked method is called with required parameters
+        m_delete.assert_called()
+
+        # Verify return value with expected value
+        self.assertEqual(actual_symptom_id, None)
+
+    @mock.patch.object(monPlugin.requests, 'post')
+    @mock.patch.object(monPlugin.MonPlugin, 'check_if_plugin_configured')
+    def test_configure_rest_plugin_valid_plugin_id(self, m_check_if_plugin_configured, m_post):
+        """Test configure rest plugin method-valid plugin id"""
+
+        # mock return values
+        expected_return = m_check_if_plugin_configured.return_value = "mock_pluginid"
+
+        # call configure rest plugin method under test
+        actual_return = self.mon_plugin.configure_rest_plugin()
+
+        # verify that mocked method is called
+        m_check_if_plugin_configured.assert_called()
+        m_post.assert_not_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.MonPlugin, 'enable_rest_plugin')
+    @mock.patch.object(monPlugin.requests, 'post')
+    @mock.patch.object(monPlugin.MonPlugin, 'check_if_plugin_configured')
+    def est_configure_rest_plugin_invalid_plugin_id(self, m_check_if_plugin_configured, m_post, m_enable_rest_plugin):
+        """Test configure rest plugin method-invalid plugin id"""
+
+        # mock return values
+        m_check_if_plugin_configured.return_value = None  # not configured
+        m_post.return_value.status_code = 201  # success
+        m_post.return_value.content = '{"pluginTypeId":"RestPlugin","pluginId":"1ef15663-9739-49fe-8c41-022bcc9f690c",\
+                                        "name":"MON_module_REST_Plugin","version":1518693747871,"enabled":false,\
+                                        "configValues":[{"name":"Url","value":"https://MON.lxd:8080/notify/"},\
+                                        {"name":"Content-type","value":"application/json"},{"name":"Certificate",\
+                                        "value":"AA:E7:3E:A5:34:E0:25:FB:28:84:3B:74:B2:18:74:C0:C3:E8:26:50"},\
+                                        {"name":"ConnectionCount","value":"20"}]}'
+
+        m_enable_rest_plugin.return_value = True  # success
+        expected_return = '1ef15663-9739-49fe-8c41-022bcc9f690c'
+
+        # call configure rest plugin method under test
+        actual_return = self.mon_plugin.configure_rest_plugin()
+
+        # verify that mocked method is called
+        m_check_if_plugin_configured.assert_called()
+        m_post.assert_called()
+        m_enable_rest_plugin.assert_called_with('1ef15663-9739-49fe-8c41-022bcc9f690c', 'MON_module_REST_Plugin')
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.MonPlugin, 'enable_rest_plugin')
+    @mock.patch.object(monPlugin.requests, 'post')
+    @mock.patch.object(monPlugin.MonPlugin, 'check_if_plugin_configured')
+    def est_configure_rest_plugin_failed_to_enable_plugin(self, m_check_if_plugin_configured, m_post,
+                                                          m_enable_rest_plugin):
+        """Test configure rest plugin method-failed to enable plugin case"""
+
+        # mock return values
+        m_check_if_plugin_configured.return_value = None  # not configured
+        m_post.return_value.status_code = 201  # success
+        m_post.return_value.content = '{"pluginTypeId":"RestPlugin","pluginId":"1ef15663-9739-49fe-8c41-022bcc9f690c",\
+                                        "name":"MON_module_REST_Plugin","version":1518693747871,"enabled":false,\
+                                        "configValues":[{"name":"Url","value":"https://MON.lxd:8080/notify/"},\
+                                        {"name":"Content-type","value":"application/json"},{"name":"Certificate",\
+                                        "value":"AA:E7:3E:A5:34:E0:25:FB:28:84:3B:74:B2:18:74:C0:C3:E8:26:50"},\
+                                        {"name":"ConnectionCount","value":"20"}]}'
+
+        m_enable_rest_plugin.return_value = False  # return failure
+        expected_return = None
+
+        # call configure rest plugin method under test
+        actual_return = self.mon_plugin.configure_rest_plugin()
+
+        # verify that mocked method is called
+        m_check_if_plugin_configured.assert_called()
+        m_post.assert_called()
+        m_enable_rest_plugin.assert_called_with('1ef15663-9739-49fe-8c41-022bcc9f690c', 'MON_module_REST_Plugin')
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    def test_check_if_plugin_configured_valid_req_response(self, m_get):
+        """Test check if plugin configured method-valid request response"""
+
+        plugin_name = 'MON_module_REST_Plugin'
+        # mock return values
+        m_get.return_value.status_code = 200
+        expected_return = '1ef15663-9739-49fe-8c41-022bcc9f690c'
+        m_get.return_value.content = '{"notificationPluginInstances":\
+                                       [{"pluginTypeId":"RestPlugin",\
+                                        "pluginId":"1ef15663-9739-49fe-8c41-022bcc9f690c",\
+                                        "name":"MON_module_REST_Plugin","version":1518694966987,\
+                                        "enabled":true,"configValues":[{"name":"Url",\
+                                        "value":"https://MON.lxd:8080/notify/"},\
+                                        {"name":"Content-type","value":"application/json"},\
+                                        {"name":"Certificate",\
+                                        "value":"AA:E7:3E:A5:34:E0:25:FB:28:84:3B:74:B2:18:74:C0"},\
+                                        {"name":"ConnectionCount","value":"20"}]}]}'
+
+        # call check if plugin configured method under test
+        actual_return = self.mon_plugin.check_if_plugin_configured(plugin_name)
+
+        # verify that mocked method is called
+        m_get.assert_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    def test_check_if_plugin_configured_invalid_req_response(self, m_get):
+        """Test check if plugin configured method-invalid request response"""
+
+        plugin_name = 'MON_module_REST_Plugin'
+        # mock return values
+        m_get.return_value.status_code = 201
+        expected_return = None
+        m_get.return_value.content = '{"notificationPluginInstances":\
+                                       [{"pluginTypeId":"RestPlugin",\
+                                        "pluginId":"1ef15663-9739-49fe-8c41-022bcc9f690c",\
+                                        "name":"MON_module_REST_Plugin","version":1518694966987,\
+                                        "enabled":true,"configValues":[{"name":"Url",\
+                                        "value":"https://MON.lxd:8080/notify/"},\
+                                        {"name":"Content-type","value":"application/json"},\
+                                        {"name":"Certificate",\
+                                        "value":"AA:E7:3E:A5:34:E0:25:FB:28:84:3B:74:B2:18:74:C0"},\
+                                        {"name":"ConnectionCount","value":"20"}]}]}'
+
+        # call check if plugin configured method under test
+        actual_return = self.mon_plugin.check_if_plugin_configured(plugin_name)
+
+        # verify that mocked method is called
+        m_get.assert_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'put')
+    def test_enable_rest_plugin_valid_req_response(self, m_put):
+        """Test enable rest plugin method-valid request response"""
+
+        plugin_name = 'MON_module_REST_Plugin'
+        plugin_id = '1ef15663-9739-49fe-8c41-022bcc9f690c'
+        # mock return values
+        m_put.return_value.status_code = 204
+        expected_return = True
+        m_put.return_value.content = ''
+
+        # call enable rest plugin configured method under test
+        actual_return = self.mon_plugin.enable_rest_plugin(plugin_id, plugin_name)
+
+        # verify that mocked method is called
+        m_put.assert_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'put')
+    def test_enable_rest_plugin_invalid_req_response(self, m_put):
+        """Test enable rest plugin method-invalid request response"""
+
+        plugin_name = 'MON_module_REST_Plugin'
+        plugin_id = '08018c0f-8879-4ca1-9b92-00e22d2ff81b'  # invalid plugin id
+        # mock return values
+        m_put.return_value.status_code = 404  # api Error code
+        expected_return = False
+        m_put.return_value.content = '<?xml version="1.0" encoding="UTF-8" standalone="yes"?><ops:\
+                                      error xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" \
+                                      xmlns:xs="http://www.w3.org/2001/XMLSchema" xmlns:ops=\
+                                      "http://webservice.vmware.com/vRealizeOpsMgr/1.0/" \
+                                      httpStatusCode="404" apiErrorCode="404"><ops:message>\
+                                      No such Notification Plugin - 08018c0f-8879-4ca1-9b92-\
+                                      00e22d2ff81b.</ops:message></ops:error>'
+
+        # call enable rest plugin configured method under test
+        actual_return = self.mon_plugin.enable_rest_plugin(plugin_id, plugin_name)
+
+        # verify that mocked method is called
+        m_put.assert_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'post')
+    @mock.patch.object(monPlugin.MonPlugin, 'check_if_plugin_configured')
+    def test_create_alarm_notification_rule_valid_req(self, m_check_if_plugin_configured, m_post):
+        """Test create alarm notification rule method valid request response"""
+
+        alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+        alarm_id = 'AlertDefinition-f1163767-6eac-438f-8e60-a7a867257e14'
+        res_id = 'ac87622f-b761-40a0-b151-00872a2a456e'
+        expected_return = "8db86441-71d8-4830-9e1a-a90be3776d12"
+
+        # mock return values
+        m_check_if_plugin_configured.return_value = '03053f51-f829-438d-993d-cc33a435d76a'
+        m_post.return_value.status_code = 201
+        m_post.return_value.content = '{"id":"8db86441-71d8-4830-9e1a-a90be3776d12",\
+                                      "name":"notify_CPU_Utilization_Above_Thr-e14b203c",\
+                                      "pluginId":"03053f51-f829-438d-993d-cc33a435d76a",\
+                                      "alertControlStates":[],"alertStatuses":[],\
+                                      "resourceFilter":{"matchResourceIdOnly":true,\
+                                      "childrenResourceKindFilters":[],\
+                                      "resourceId":"ac87622f-b761-40a0-b151-00872a2a456e"},\
+                                      "alertTypeFilters":[],"alertDefinitionIdFilters":{"values":[\
+                                      "AlertDefinition-f1163767-6eac-438f-8e60-a7a867257e14"]}}'
+
+        # call enable rest plugin configured method under test
+        actual_return = self.mon_plugin.create_alarm_notification_rule(alarm_name, alarm_id, res_id)
+
+        # verify that mocked method is called
+        m_check_if_plugin_configured.assert_called_with('MON_module_REST_Plugin')
+        m_post.assert_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'post')
+    @mock.patch.object(monPlugin.MonPlugin, 'check_if_plugin_configured')
+    def test_create_alarm_notification_rule_invalid_req(self, m_check_if_plugin_configured, m_post):
+        """Test create alarm notification rule method invalid request response"""
+
+        alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+        alarm_id = 'AlertDefinition-f1163767-6eac-438f-8e60-a7a867257e14'
+        res_id = 'ac87622f-b761-40a0-b151-00872a2a456e'
+        expected_return = None  # invalid req should retrun none
+
+        # mock return values
+        m_check_if_plugin_configured.return_value = '03053f51-f829-438d-993d-cc33a435d76a'
+        m_post.return_value.status_code = 500
+        m_post.return_value.content = '{"message":"Internal Server error, cause unknown.",\
+                                        "moreInformation":[{"name":"errorMessage","value":\
+                                        "there is already a rule with the same rule name"},\
+                                       {"name":"localizedMessage","value":"there is already \
+                                        a rule with the same rule name;"}],"httpStatusCode":500,\
+                                        "apiErrorCode":500}'
+
+        # call enable rest plugin configured method under test
+        actual_return = self.mon_plugin.create_alarm_notification_rule(alarm_name, alarm_id, res_id)
+
+        # verify that mocked method is called
+        m_check_if_plugin_configured.assert_called_with('MON_module_REST_Plugin')
+        m_post.assert_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'post')
+    @mock.patch.object(monPlugin.MonPlugin, 'check_if_plugin_configured')
+    def test_create_alarm_notification_rule_failed_to_get_plugin_id(self,
+                                                                    m_check_if_plugin_configured, m_post):
+        """Test create alarm notification rule method invalid plugin id"""
+
+        alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+        alarm_id = 'AlertDefinition-f1163767-6eac-438f-8e60-a7a867257e14'
+        res_id = 'ac87622f-b761-40a0-b151-00872a2a456e'
+        expected_return = None  # invalid req should retrun none
+
+        # mock return values
+        m_check_if_plugin_configured.return_value = None
+
+        # call enable rest plugin configured method under test
+        actual_return = self.mon_plugin.create_alarm_notification_rule(alarm_name, alarm_id, res_id)
+
+        # verify that mocked method is called
+        m_check_if_plugin_configured.assert_called_with('MON_module_REST_Plugin')
+        m_post.assert_not_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
+    def test_get_metrics_data_valid_rest_req_response(self, m_get_default_Params,
+                                                      m_get_vm_moref_id,
+                                                      m_get_vm_resource_id,
+                                                      m_get):
+        """Test get metrics data of resource method valid request response"""
+
+        metrics = {'collection_period': 1, 'metric_name': 'CPU_UTILIZATION', 'metric_uuid': None,
+                   'schema_version': 1.0, 'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                   'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
+                   'schema_type': 'read_metric_data_request', 'vim_type': 'VMware',
+                   'collection_unit': 'HR', 'vim_uuid': '1'}
+
+        # mock return value
+        m_get_default_Params.return_value = {'metric_key': 'cpu|usage_average', 'unit': '%'}
+        vm_moref_id = m_get_vm_moref_id.return_value = 'vm-6626'
+        m_get_vm_resource_id.return_value = 'ac87622f-b761-40a0-b151-00872a2a456e'
+        m_get.return_value.status_code = 200
+        m_get.return_value.content = '{"values":[{"resourceId":"ac87622f-b761-40a0-b151-\
+                                       00872a2a456e","stat-list":{"stat":[{"timestamps":\
+                                      [1519716874297,1519717174294,1519717474295,1519717774298,\
+                                      1519718074300,1519718374299,1519718674314,1519718974325,\
+                                      1519719274304,1519719574298,1519719874298,1519720174301],\
+                                      "statKey":{"key":"cpu|usage_average"},"intervalUnit":\
+                                      {"quantifier":1},"data":[0.1120000034570694,\
+                                      0.11866666376590729,0.11599999666213989,0.11400000005960464,\
+                                      0.12066666781902313,0.11533333361148834,0.11800000071525574,\
+                                      0.11533333361148834,0.12200000137090683,0.11400000005960464,\
+                                      0.1459999978542328,0.12133333086967468]}]}}]}'
+
+        # call get matrics data method under test
+        actual_return = self.mon_plugin.get_metrics_data(metrics)
+
+        # verify that mocked method is called
+        m_get_default_Params.assert_called_with(metrics['metric_name'])
+        m_get_vm_moref_id.assert_called_with(metrics['resource_uuid'])
+        m_get_vm_resource_id.assert_called_with(vm_moref_id)
+        m_get.assert_called()
+
+        # verify return value with expected value
+        # self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
+    def test_get_metrics_data_invalid_rest_req_response(self, m_get_default_Params,
+                                                        m_get_vm_moref_id,
+                                                        m_get_vm_resource_id,
+                                                        m_get):
+        """Test get metrics data of resource method invalid request response"""
+
+        metrics = {'collection_period': 1, 'metric_name': 'CPU_UTILIZATION', 'metric_uuid': None,
+                   'schema_version': 1.0, 'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                   'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
+                   'schema_type': 'read_metric_data_request', 'vim_type': 'VMware',
+                   'collection_unit': 'HR', 'vim_uuid': '1'}
+
+        # mock return value
+        m_get_default_Params.return_value = {'metric_key': 'cpu|usage_average', 'unit': '%'}
+        vm_moref_id = m_get_vm_moref_id.return_value = 'vm-6626'
+        m_get_vm_resource_id.return_value = 'ac87622f-b761-40a0-b151-00872a2a456e'
+        m_get.return_value.status_code = 400
+        expected_return = {'metric_name': 'CPU_UTILIZATION', 'metric_uuid': '0',
+                           'schema_version': '1.0',
+                           'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                           'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
+                           'metrics_data': {'time_series': [], 'metrics_series': []},
+                           'schema_type': 'read_metric_data_response',
+                           'unit': '%', 'vim_uuid': '1'}
+
+        # call get metrics data method under test
+        actual_return = self.mon_plugin.get_metrics_data(metrics)
+
+        # verify that mocked method is called
+        m_get_default_Params.assert_called_with(metrics['metric_name'])
+        m_get_vm_moref_id.assert_called_with(metrics['resource_uuid'])
+        m_get_vm_resource_id.assert_called_with(vm_moref_id)
+        m_get.assert_called()
+
+        m_get.return_value.content = '{"message":"Invalid request... #1 violations found.",\
+                                       "validationFailures":[{"failureMessage":"Invalid Parameter",\
+                                       "violationPath":"end"}],"httpStatusCode":400,\
+                                       "apiErrorCode":400}'
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
+    def test_get_metrics_data_metric_not_supported(self, m_get_default_Params,
+                                                   m_get_vm_moref_id,
+                                                   m_get_vm_resource_id,
+                                                   m_get):
+        """Test get metrics data of resource method invalid metric name"""
+
+        metrics = {'collection_period': 1, 'metric_name': 'invalid_metric', 'metric_uuid': None,
+                   'schema_version': 1.0,
+                   'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                   'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
+                   'schema_type': 'read_metric_data_request', 'vim_type': 'VMware',
+                   'collection_unit': 'HR', 'vim_uuid': '1'}
+
+        # mock return value
+        m_get_default_Params.return_value = {}  # returns empty dict
+
+        expected_return = {'metric_name': 'invalid_metric', 'metric_uuid': '0', 'vim_uuid': '1',
+                           'schema_version': '1.0', 'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                           'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
+                           'metrics_data': {'time_series': [], 'metrics_series': []},
+                           'schema_type': 'read_metric_data_response', 'unit': None}
+
+        # call get matrics data method under test
+        actual_return = self.mon_plugin.get_metrics_data(metrics)
+
+        # verify that mocked method is called/not called
+        m_get_default_Params.assert_called_with(metrics['metric_name'])
+        m_get_vm_moref_id.assert_not_called()
+        m_get_vm_resource_id.assert_not_called()
+        m_get.assert_not_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
+    def test_get_metrics_data_failed_to_get_vm_moref_id(self, m_get_default_Params,
+                                                        m_get_vm_moref_id,
+                                                        m_get_vm_resource_id,
+                                                        m_get):
+        """Test get metrics data method negative scenario- invalid resource id"""
+
+        metrics = {'collection_period': 1, 'metric_name': 'cpu_utilization', 'metric_uuid': None,
+                   'schema_version': 1.0, 'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                   'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
+                   'schema_type': 'read_metric_data_request', 'vim_type': 'VMware',
+                   'collection_unit': 'HR', 'vim_uuid': '1'}
+
+        # mock return value
+        m_get_default_Params.return_value = {'metric_key': 'cpu|usage_average', 'unit': '%'}
+        m_get_vm_moref_id.return_value = None
+        expected_return = {'metric_name': 'cpu_utilization', 'metric_uuid': '0',
+                           'schema_version': '1.0',
+                           'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                           'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
+                           'metrics_data': {'time_series': [], 'metrics_series': []},
+                           'schema_type': 'read_metric_data_response',
+                           'unit': '%', 'vim_uuid': '1'}
+
+        # call get matrics data method under test
+        actual_return = self.mon_plugin.get_metrics_data(metrics)
+
+        # verify that mocked method is called/not called
+        m_get_default_Params.assert_called_with(metrics['metric_name'])
+        m_get_vm_moref_id.assert_called_with(metrics['resource_uuid'])
+        m_get_vm_resource_id.assert_not_called()
+        m_get.assert_not_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
+    def test_get_metrics_data_failed_to_get_vm_resource_id(self, m_get_default_Params,
+                                                           m_get_vm_moref_id,
+                                                           m_get_vm_resource_id,
+                                                           m_get):
+        """Test get metrics data method negative scenario- invalid moref id"""
+
+        metrics = {'collection_period': 1, 'metric_name': 'CPU_UTILIZATION', 'metric_uuid': None,
+                   'schema_version': 1.0, 'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                   'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
+                   'schema_type': 'read_metric_data_request', 'vim_type': 'VMware',
+                   'collection_unit': 'HR', 'vim_uuid': '1'}
+
+        # mock return value
+        m_get_default_Params.return_value = {'metric_key': 'cpu|usage_average', 'unit': '%'}
+        m_get_vm_moref_id.return_value = 'Invalid-vm-6626'
+        m_get_vm_resource_id.return_value = None
+        expected_return = {'metric_name': 'CPU_UTILIZATION', 'metric_uuid': '0',
+                           'schema_version': '1.0',
+                           'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                           'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
+                           'metrics_data': {'time_series': [], 'metrics_series': []},
+                           'schema_type': 'read_metric_data_response',
+                           'unit': '%', 'vim_uuid': '1'}
+
+        # call get matrics data method under test
+        actual_return = self.mon_plugin.get_metrics_data(metrics)
+
+        # verify that mocked method is called/not called
+        m_get_default_Params.assert_called_with(metrics['metric_name'])
+        m_get_vm_moref_id.assert_called_with(metrics['resource_uuid'])
+        m_get_vm_resource_id.assert_called()
+        m_get_vm_resource_id.assert_called_with('Invalid-vm-6626')
+        m_get.assert_not_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.MonPlugin, 'reconfigure_alarm')
+    @mock.patch.object(monPlugin.MonPlugin, 'update_symptom_defination')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_details')
+    def test_update_alarm_configuration_successful_updation(self, m_get_alarm_defination_details,
+                                                            m_update_symptom_defination,
+                                                            m_reconfigure_alarm):
+        """Test update alarm configuration method"""
+
+        alarm_config = {'alarm_uuid': 'f1163767-6eac-438f-8e60-a7a867257e14',
+                        'correlation_id': 14203,
+                        'description': 'CPU_Utilization_Above_Threshold_L', 'operation': 'GT'}
+
+        # mock return value
+        alarm_details_json = {'states': [{'impact': {'impactType': 'BADGE', 'detail': 'risk'},
+                                          'severity': 'CRITICAL', 'base-symptom-set': {'symptomDefinitionIds':
+                [
+                    'SymptomDefinition-47c88675-bea8-436a-bb41-8d2231428f44'],
+                'relation': 'SELF',
+                'type': 'SYMPTOM_SET',
+                'aggregation': 'ALL'}}],
+                              'description': 'CPU_Utilization_Above_Threshold', 'type': 16,
+                              'id': 'AlertDefinition-f1163767-6eac-438f-8e60-a7a867257e14',
+                              'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d2'}
+        alarm_details = {'symptom_definition_id': 'SymptomDefinition-47c88675-bea8-436a-bb41-\
+                        8d2231428f44', 'alarm_name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-\
+                        a91c-8c19d2', 'alarm_id': 'AlertDefinition-f1163767-6eac-438f-8e60-\
+                        a7a867257e14', 'resource_kind': 'VirtualMachine', 'type': 16}
+        m_get_alarm_defination_details.return_value = (alarm_details_json, alarm_details)
+        m_update_symptom_defination.return_value = 'SymptomDefinition-47c88675-bea8-436a-bb41-\
+                                                   8d2231428f44'
+        expected_return = m_reconfigure_alarm.return_value = 'f1163767-6eac-438f-8e60-a7a867257e14'
+
+        # call update alarm configuration method under test
+        actual_return = self.mon_plugin.update_alarm_configuration(alarm_config)
+
+        # verify that mocked method is called
+        m_get_alarm_defination_details.assert_called_with(alarm_config['alarm_uuid'])
+        m_update_symptom_defination.assert_called_with(alarm_details['symptom_definition_id'],
+                                                       alarm_config)
+        m_reconfigure_alarm.assert_called_with(alarm_details_json, alarm_config)
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.MonPlugin, 'reconfigure_alarm')
+    @mock.patch.object(monPlugin.MonPlugin, 'update_symptom_defination')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_details')
+    def test_update_alarm_configuration_failed_to_reconfigure_alarm(self,
+                                                                    m_get_alarm_defination_details,
+                                                                    m_update_symptom_defination,
+                                                                    m_reconfigure_alarm):
+        """Test update alarm configuration method- failed to reconfigure alarm"""
+
+        alarm_config = {'alarm_uuid': 'f1163767-6eac-438f-8e60-a7a867257e14',
+                        'correlation_id': 14203,
+                        'description': 'CPU_Utilization_Above_Threshold_L', 'operation': 'GT'}
+
+        # mock return value
+        alarm_details_json = {'states': [{'impact': {'impactType': 'BADGE', 'detail': 'risk'},
+                                          'severity': 'CRITICAL', 'base-symptom-set': {'symptomDefinitionIds':
+                [
+                    'SymptomDefinition-47c88675-bea8-436a-bb41-8d2231428f44'],
+                'relation': 'SELF',
+                'type': 'SYMPTOM_SET',
+                'aggregation': 'ALL'}}],
+                              'description': 'CPU_Utilization_Above_Threshold', 'type': 16,
+                              'id': 'AlertDefinition-f1163767-6eac-438f-8e60-a7a867257e14',
+                              'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d2'}
+        alarm_details = {'symptom_definition_id': 'SymptomDefinition-47c88675-bea8-436a-bb41-\
+                        8d2231428f44', 'alarm_name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-\
+                        a91c-8c19d2', 'alarm_id': 'AlertDefinition-f1163767-6eac-438f-8e60-\
+                        a7a867257e14', 'resource_kind': 'VirtualMachine', 'type': 16}
+        m_get_alarm_defination_details.return_value = (alarm_details_json, alarm_details)
+        m_update_symptom_defination.return_value = 'SymptomDefinition-47c88675-bea8-436a-bb41-\
+                                                    8d2231428f44'
+        expected_return = m_reconfigure_alarm.return_value = None  # failed to reconfigure
+
+        # call update alarm configuration method under test
+        actual_return = self.mon_plugin.update_alarm_configuration(alarm_config)
+
+        # verify that mocked method is called
+        m_get_alarm_defination_details.assert_called_with(alarm_config['alarm_uuid'])
+        m_update_symptom_defination.assert_called_with(alarm_details['symptom_definition_id'],
+                                                       alarm_config)
+        m_reconfigure_alarm.assert_called_with(alarm_details_json, alarm_config)
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.MonPlugin, 'reconfigure_alarm')
+    @mock.patch.object(monPlugin.MonPlugin, 'update_symptom_defination')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_details')
+    def test_update_alarm_configuration_failed_to_update_symptom(self,
+                                                                 m_get_alarm_defination_details,
+                                                                 m_update_symptom_defination,
+                                                                 m_reconfigure_alarm):
+        """Test update alarm configuration method- failed to update alarm"""
+
+        alarm_config = {'alarm_uuid': 'f1163767-6eac-438f-8e60-a7a867257e14',
+                        'correlation_id': 14203,
+                        'description': 'CPU_Utilization_Above_Threshold_L', 'operation': 'GT'}
+
+        # mock return value
+        alarm_details_json = {'states': [{'impact': {'impactType': 'BADGE', 'detail': 'risk'},
+                                          'severity': 'CRITICAL', 'base-symptom-set': {'symptomDefinitionIds':
+                [
+                    'SymptomDefinition-47c88675-bea8-436a-bb41-8d2231428f44'],
+                'relation': 'SELF',
+                'type': 'SYMPTOM_SET',
+                'aggregation': 'ALL'}}],
+                              'description': 'CPU_Utilization_Above_Threshold', 'type': 16,
+                              'id': 'AlertDefinition-f1163767-6eac-438f-8e60-a7a867257e14',
+                              'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d2'}
+        alarm_details = {'symptom_definition_id': 'Invalid-47c88675-bea8-436a-bb41-\
+                        8d2231428f44', 'alarm_name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-\
+                        a91c-8c19d2', 'alarm_id': 'AlertDefinition-f1163767-6eac-438f-8e60-\
+                        a7a867257e14', 'resource_kind': 'VirtualMachine', 'type': 16}
+        m_get_alarm_defination_details.return_value = (alarm_details_json, alarm_details)
+        expected_return = m_update_symptom_defination.return_value = None
+
+        # call update alarm configuration method under test
+        actual_return = self.mon_plugin.update_alarm_configuration(alarm_config)
+
+        # verify that mocked method is called
+        m_get_alarm_defination_details.assert_called_with(alarm_config['alarm_uuid'])
+        m_update_symptom_defination.assert_called_with(alarm_details['symptom_definition_id'],
+                                                       alarm_config)
+        m_reconfigure_alarm.assert_not_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
+    def test_verify_metric_support_metric_supported_with_unit(self, m_get_default_Params):
+        """Test verify metric support method for supported metric"""
+
+        # mock return value
+        metric_info = {'metric_unit': '%', 'metric_name': 'cpu_utilization',
+                       'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4'}
+        m_get_default_Params.return_value = {'metric_key': 'cpu|usage_average', 'unit': '%'}
+        expected_return = True  # supported metric returns True
+
+        # call verify metric support method under test
+        actual_return = self.mon_plugin.verify_metric_support(metric_info)
+
+        # verify that mocked method is called
+        m_get_default_Params.assert_called_with(metric_info['metric_name'])
+        # m_get_default_Params.assert_called_with(metric_info)
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
+    def test_verify_metric_support_metric_not_supported(self, m_get_default_Params):
+        """Test verify metric support method for un-supported metric"""
+
+        # mock return value
+        metric_info = {'metric_unit': '%', 'metric_name': 'invalid_metric',
+                       'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4'}
+        m_get_default_Params.return_value = {}
+        expected_return = False  # supported metric returns True
+
+        # call verify metric support method under test
+        actual_return = self.mon_plugin.verify_metric_support(metric_info)
+
+        # verify that mocked method is called
+        m_get_default_Params.assert_called_with(metric_info['metric_name'])
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
+    def test_verify_metric_support_metric_supported_with_mismatched_unit(self,
+                                                                         m_get_default_Params):
+        """Test verify metric support method for supported metric with mismatched unit"""
+
+        # mock return value
+        metric_info = {'metric_unit': '', 'metric_name': 'invalid_metric',
+                       'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4'}
+        m_get_default_Params.return_value = {'metric_key': 'cpu|usage_average', 'unit': '%'}
+        expected_return = True  # supported metric returns True
+
+        # call verify metric support method under test
+        actual_return = self.mon_plugin.verify_metric_support(metric_info)
+
+        # verify that mocked method is called
+        m_get_default_Params.assert_called_with(metric_info['metric_name'])
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.MonPlugin, 'get_triggered_alarms_on_resource')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vrops_resourceid_from_ro_uuid')
+    def test_get_triggered_alarms_list_returns_triggered_alarms(self,
+                                                                m_get_vrops_resourceid,
+                                                                m_triggered_alarms):
+        """Test get triggered alarm list method valid input"""
+
+        # Mock list alarm input
+        list_alarm_input = {'severity': 'CRITICAL',
+                            'correlation_id': 'e14b203c',
+                            'alarm_name': 'CPU_Utilization_Above_Threshold',
+                            'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4'}
+
+        resource_id = m_get_vrops_resourceid.return_value = 'ac87622f-b761-40a0-b151-00872a2a456e'
+        expected_return = m_triggered_alarms.return_value = [{'status': 'ACTIVE',
+                                                              'update_date': '2018-01-12T08:34:05',
+                                                              'severity': 'CRITICAL', 'resource_uuid': 'e14b203c',
+                                                              'cancel_date': '0000-00-00T00:00:00',
+                                                              'alarm_instance_uuid': 'd9e3bc84',
+                                                              'alarm_uuid': '5714977d', 'vim_type': 'VMware',
+                                                              'start_date': '2018-01-12T08:34:05'},
+                                                             {'status': 'CANCELED',
+                                                              'update_date': '2017-12-20T09:37:57',
+                                                              'severity': 'CRITICAL', 'resource_uuid': 'e14b203c',
+                                                              'cancel_date': '2018-01-12T06:49:19',
+                                                              'alarm_instance_uuid': 'd3bbeef6',
+                                                              'alarm_uuid': '7ba1bf3e', 'vim_type': 'VMware',
+                                                              'start_date': '2017-12-20T09:37:57'}]
+
+        # call get triggered alarms list method under test
+        actual_return = self.mon_plugin.get_triggered_alarms_list(list_alarm_input)
+
+        # verify that mocked method is called
+        m_get_vrops_resourceid.assert_called_with(list_alarm_input['resource_uuid'])
+        m_triggered_alarms.assert_called_with(list_alarm_input['resource_uuid'], resource_id)
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.MonPlugin, 'get_triggered_alarms_on_resource')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vrops_resourceid_from_ro_uuid')
+    def test_get_triggered_alarms_list_invalid_resource_uuid(self,
+                                                             m_get_vrops_resourceid,
+                                                             m_triggered_alarms):
+        """Test get triggered alarm list method invalid resource uuid"""
+
+        # Mock list alarm input
+        list_alarm_input = {'severity': 'CRITICAL',
+                            'correlation_id': 'e14b203c',
+                            'alarm_name': 'CPU_Utilization_Above_Threshold',
+                            'resource_uuid': '12345'}  # invalid resource uuid
+
+        expected_return = m_get_vrops_resourceid.return_value = None  # returns empty list
+
+        # call get triggered alarms list method under test
+        actual_return = self.mon_plugin.get_triggered_alarms_list(list_alarm_input)
+
+        # verify that mocked method is called
+        m_get_vrops_resourceid.assert_called_with(list_alarm_input['resource_uuid'])
+        m_triggered_alarms.assert_not_called()
+
+        # verify return value with expected value
+        self.assertEqual([], actual_return)
+
+    @mock.patch.object(monPlugin.MonPlugin, 'get_triggered_alarms_on_resource')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vrops_resourceid_from_ro_uuid')
+    def test_get_triggered_alarms_list_resource_uuid_not_present(self,
+                                                                 m_get_vrops_resourceid,
+                                                                 m_triggered_alarms):
+        """Test get triggered alarm list method resource not present"""
+
+        # Mock list alarm input
+        list_alarm_input = {'severity': 'CRITICAL',
+                            'correlation_id': 'e14b203c',
+                            'alarm_name': 'CPU_Utilization_Above_Threshold'}
+
+        # call get triggered alarms list method under test
+        actual_return = self.mon_plugin.get_triggered_alarms_list(list_alarm_input)
+
+        # verify that mocked method is called
+        m_get_vrops_resourceid.assert_not_called()
+        m_triggered_alarms.assert_not_called()
+
+        # verify return value with expected value
+        self.assertEqual([], actual_return)
+
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
+    def test_get_vrops_resourceid_from_ro_uuid(self, m_get_vm_moref_id, m_get_vm_resource_id):
+        """Test get vrops resourceid from ro uuid method"""
+
+        # Mock the inputs
+        ro_resource_uuid = 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+        vm_moref_id = m_get_vm_moref_id.return_value = 'vm-6626'
+        expected_return = m_get_vm_resource_id.return_value = 'ac87622f-b761-40a0-b151-00872a2a456e'
+
+        # call get_vrops_resourceid_from_ro_uuid method under test
+        actual_return = self.mon_plugin.get_vrops_resourceid_from_ro_uuid(ro_resource_uuid)
+
+        # verify that mocked method is called
+        m_get_vm_moref_id.assert_called_with(ro_resource_uuid)
+        m_get_vm_resource_id.assert_called_with(vm_moref_id)
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
+    def test_get_vrops_resourceid_from_ro_uuid_failed_to_get_vm_resource_id(self,
+                                                                            m_get_vm_moref_id,
+                                                                            m_get_vm_resource_id):
+        """Test get vrops resourceid from ro uuid method negative scenario"""
+
+        # Mock the inputs
+        ro_resource_uuid = 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+        vm_moref_id = m_get_vm_moref_id.return_value = 'vm-6626'
+        expected_return = m_get_vm_resource_id.return_value = None
+
+        # call get_vrops_resourceid_from_ro_uuid method under test
+        actual_return = self.mon_plugin.get_vrops_resourceid_from_ro_uuid(ro_resource_uuid)
+
+        # verify that mocked method is called
+        m_get_vm_moref_id.assert_called_with(ro_resource_uuid)
+        m_get_vm_resource_id.assert_called_with(vm_moref_id)
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
+    def test_get_vrops_resourceid_from_ro_uuid_failed_to_get_vm_moref_id(self,
+                                                                         m_get_vm_moref_id,
+                                                                         m_get_vm_resource_id):
+        """Test get vrops resourceid from ro uuid method negative scenario"""
+
+        # Mock the inputs
+        ro_resource_uuid = 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+        expected_return = vm_moref_id = m_get_vm_moref_id.return_value = None
+
+        # call get_vrops_resourceid_from_ro_uuid method under test
+        actual_return = self.mon_plugin.get_vrops_resourceid_from_ro_uuid(ro_resource_uuid)
+
+        # verify that mocked method is called
+        m_get_vm_moref_id.assert_called_with(ro_resource_uuid)
+        m_get_vm_resource_id.assert_not_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    def test_get_triggered_alarms_on_resource_valid_req_response(self, m_get):
+        """Test get triggered alarms on resource method for valid request"""
+
+        # Mock the inputs
+        ro_resource_uuid = 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+        vrops_resource_id = 'ac87622f-b761-40a0-b151-00872a2a456e'
+        m_get.return_value.status_code = 200
+        expected_return = [{'status': 'ACTIVE', 'update_date': '2018-01-12T08:34:05',
+                            'severity': 'CRITICAL', 'start_date': '2018-01-12T08:34:05',
+                            'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                            'cancel_date': '2018-02-12T08:24:48', 'vim_type': 'VMware',
+                            'alarm_instance_uuid': 'd9e3bc84-dcb4-4905-b592-00a55f4cdaf1',
+                            'alarm_uuid': '5714977d-56f6-4222-adc7-43fa6c6e7e39'}]
+
+        m_get.return_value.content = '{"alerts": [\
+        {\
+            "alertId": "d9e3bc84-dcb4-4905-b592-00a55f4cdaf1",\
+            "resourceId": "ac87622f-b761-40a0-b151-00872a2a456e",\
+            "alertLevel": "CRITICAL",\
+            "status": "ACTIVE",\
+            "startTimeUTC": 1515746045278,\
+            "cancelTimeUTC": 1518423888708,\
+            "updateTimeUTC": 1515746045278,\
+            "alertDefinitionId": "AlertDefinition-5714977d-56f6-4222-adc7-43fa6c6e7e39",\
+            "alertDefinitionName": "CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4"\
+        },\
+        {\
+            "alertId": "5fb5e940-e161-4253-a729-7255c6d6b1f5",\
+            "resourceId": "ac87622f-b761-40a0-b151-00872a2a456e",\
+            "alertLevel": "WARNING",\
+            "status": "CANCELED",\
+            "startTimeUTC": 1506684979154,\
+            "cancelTimeUTC": 0,\
+            "updateTimeUTC": 1520471975507,\
+            "alertDefinitionId": "AlertDefinition-9ec5a921-1a54-411d-85ec-4c1c9b26dd02",\
+            "alertDefinitionName": "VM_CPU_Usage_Alarm"\
+        }]}'
+
+        # call get_vrops_resourceid_from_ro_uuid method under test
+        actual_return = self.mon_plugin.get_triggered_alarms_on_resource(ro_resource_uuid,
+                                                                         vrops_resource_id)
+
+        # verify that mocked method is called
+        m_get.assert_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    def test_get_triggered_alarms_on_resource_invalid_req_response(self, m_get):
+        """Test get triggered alarms on resource method for invalid request"""
+
+        # Mock the inputs
+        ro_resource_uuid = 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+        vrops_resource_id = 'ac87622f-b761-40a0-b151-00872a2a456e'
+        m_get.return_value.status_code = 204
+        expected_return = None
+
+        # call get_vrops_resourceid_from_ro_uuid method under test
+        actual_return = self.mon_plugin.get_triggered_alarms_on_resource(ro_resource_uuid,
+                                                                         vrops_resource_id)
+
+        # verify that mocked method is called
+        m_get.assert_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    def test_get_triggered_alarms_on_resource_no_alarms_present(self, m_get):
+        """Test get triggered alarms on resource method for no alarms present"""
+
+        # Mock the inputs
+        ro_resource_uuid = 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+        vrops_resource_id = 'ac87622f-b761-40a0-b151-00872a2a456e'
+        m_get.return_value.status_code = 200
+        expected_return = []
+        m_get.return_value.content = '{"alerts": []}'
+
+        # call get_vrops_resourceid_from_ro_uuid method under test
+        actual_return = self.mon_plugin.get_triggered_alarms_on_resource(ro_resource_uuid,
+                                                                         vrops_resource_id)
+
+        # verify that mocked method is called
+        m_get.assert_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    def test_convert_date_time_valid_date_time(self):
+        """Test convert date time method valid input"""
+
+        # Mock the inputs
+        date_time = 1515746045278
+        expected_return = '2018-01-12T08:34:05'
+
+        # call convert_date_time method under test
+        actual_return = self.mon_plugin.convert_date_time(date_time)
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    def test_convert_date_time_invalid_date_time(self):
+        """Test convert date time method invalid input"""
+
+        # Mock the inputs
+        date_time = 0
+        expected_return = '0000-00-00T00:00:00'
+
+        # call convert_date_time method under test
+        actual_return = self.mon_plugin.convert_date_time(date_time)
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    def test_get_vm_resource_id_rest_valid_req_response(self, m_get):
+        """Test get vms resource id valid request"""
+
+        # Mock the inputs
+        vm_moref_id = 'vm-6626'
+        m_get.return_value.status_code = 200
+        expected_return = "ac87622f-b761-40a0-b151-00872a2a456e"
+        m_get.return_value.content = \
+            '{ \
+                "resourceList": [\
+                   {\
+                       "creationTime": 1497770174130,\
+                       "resourceKey": {\
+                           "name": "OCInst2.ubuntu(4337d51f-1e65-4ab0-9c08-4897778d4fda)",\
+                           "adapterKindKey": "VMWARE",\
+                           "resourceKindKey": "VirtualMachine",\
+                           "resourceIdentifiers": [\
+                               {\
+                                   "identifierType": {\
+                                   "name": "VMEntityObjectID",\
+                                   "dataType": "STRING",\
+                                   "isPartOfUniqueness": true\
+                                   },\
+                                   "value": "vm-6626"\
+                               }\
+                           ]\
+                       },\
+                       "identifier": "ac87622f-b761-40a0-b151-00872a2a456e"\
+                    }\
+                ]\
+            }'
+
+        # call get_vm_resource_id method under test
+        actual_return = self.mon_plugin.get_vm_resource_id(vm_moref_id)
+
+        # verify that mocked method is called
+        m_get.assert_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    def test_get_vm_resource_id_rest_invalid_req_response(self, m_get):
+        """Test get vms resource id invalid request"""
+
+        # Mock the inputs
+        vm_moref_id = 'vm-6626'
+        m_get.return_value.status_code = 406
+        expected_return = None
+        m_get.return_value.content = '406 Not Acceptable'
+
+        # call get_vm_resource_id method under test
+        actual_return = self.mon_plugin.get_vm_resource_id(vm_moref_id)
+
+        # verify that mocked method is called
+        m_get.assert_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    def test_get_vm_resource_id_rest_invalid_response(self, m_get):
+        """Test get vms resource id invalid response"""
+
+        # Mock the inputs
+        vm_moref_id = 'vm-6626'
+        m_get.return_value.status_code = 200
+        expected_return = None
+        m_get.return_value.content = \
+            '{ \
+                "resourceList": \
+                   {\
+                       "creationTime": 1497770174130,\
+                       "resourceKey": {\
+                           "name": "OCInst2.ubuntu(4337d51f-1e65-4ab0-9c08-4897778d4fda)",\
+                           "adapterKindKey": "VMWARE",\
+                           "resourceKindKey": "VirtualMachine",\
+                           "resourceIdentifiers": [\
+                               {\
+                                   "identifierType": {\
+                                   "name": "VMEntityObjectID",\
+                                   "dataType": "STRING",\
+                                   "isPartOfUniqueness": true\
+                                   },\
+                                   "value": "vm-6626"\
+                               }\
+                           ]\
+                       },\
+                       "identifier": "ac87622f-b761-40a0-b151-00872a2a456e"\
+                    }\
+            }'
+
+        # call get_vm_resource_id method under test
+        actual_return = self.mon_plugin.get_vm_resource_id(vm_moref_id)
+
+        # verify that mocked method is called
+        # m_get.assert_called
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vapp_details_rest')
+    def test_get_vm_moref_id_valid_id_found(self, m_get_vapp_details_rest):
+        """Test get vm moref id valid scenario"""
+
+        # mock the inputs
+        vapp_uuid = 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+        m_get_vapp_details_rest.return_value = {'vm_vcenter_info': {'vm_moref_id': 'vm-6626'}}
+        expected_return = 'vm-6626'
+
+        # call get_vm_resource_id method under test
+        actual_return = self.mon_plugin.get_vm_moref_id(vapp_uuid)
+
+        # verify that mocked method is called
+        m_get_vapp_details_rest.assert_called_with(vapp_uuid)
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vapp_details_rest')
+    def test_get_vm_moref_id_valid_id_not_found(self, m_get_vapp_details_rest):
+        """Test get vm moref id invalid scenario"""
+
+        # mock the inputs
+        vapp_uuid = 'e14b203c-6bf2-4e2f-a91c-8c19d240eda'  # invalid uuid
+        m_get_vapp_details_rest.return_value = {}
+        expected_return = None
+
+        # call get_vm_resource_id method under test
+        actual_return = self.mon_plugin.get_vm_moref_id(vapp_uuid)
+
+        # verify that mocked method is called
+        m_get_vapp_details_rest.assert_called_with(vapp_uuid)
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    @mock.patch.object(monPlugin.MonPlugin, 'connect_as_admin')
+    def test_get_vapp_details_rest_valid_req_response(self, m_connect_as_admin, m_get):
+        """Test get vapp details rest method for valid request response"""
+
+        # mock the inputs
+        vapp_uuid = 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+        m_connect_as_admin.return_value = self.vca
+        self.vca._session = self.session
+        self.vca._session.headers['x-vcloud-authorization'] = '2ec69b2cc6264ad0a47aaf4e3e280d16'
+        m_get.return_value.status_code = 200
+        expected_return = {'vm_vcenter_info': {'vm_moref_id': 'vm-6626'}}
+        m_get.return_value.content = '<?xml version="1.0" encoding="UTF-8"?>\
+        <VApp xmlns="http://www.vmware.com/vcloud/v1.5"  xmlns:vmext="http://www.vmware.com/vcloud/extension/v1.5" >\
+            <Children>\
+                <Vm needsCustomization="false"  type="application/vnd.vmware.vcloud.vm+xml">\
+                    <VCloudExtension required="false">\
+                        <vmext:VmVimInfo>\
+                            <vmext:VmVimObjectRef>\
+                                <vmext:VimServerRef  type="application/vnd.vmware.admin.vmwvirtualcenter+xml"/>\
+                                <vmext:MoRef>vm-6626</vmext:MoRef>\
+                                <vmext:VimObjectType>VIRTUAL_MACHINE</vmext:VimObjectType>\
+                            </vmext:VmVimObjectRef>\
+                        </vmext:VmVimInfo>\
+                    </VCloudExtension>\
+                </Vm>\
+            </Children>\
+        </VApp>'
+
+        # call get_vapp_details_rest method under test
+        actual_return = self.mon_plugin.get_vapp_details_rest(vapp_uuid)
+
+        # verify that mocked method is called
+        m_connect_as_admin.assert_called_with()
+        m_get.assert_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    @mock.patch.object(monPlugin.MonPlugin, 'connect_as_admin')
+    def test_get_vapp_details_rest_invalid_req_response(self, m_connect_as_admin, m_get):
+        """Test get vapp details rest method for invalid request response"""
+
+        # mock the inputs
+        vapp_uuid = 'Invalid-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+        m_connect_as_admin.return_value = self.vca
+        self.vca._session = self.session
+        self.vca._session.headers['x-vcloud-authorization'] = '2ec69b2cc6264ad0a47aaf4e3e280d16'
+        m_get.return_value.status_code = 400
+        expected_return = {}
+        m_get.return_value.content = 'Bad Request'
+
+        # call get_vapp_details_rest method under test
+        actual_return = self.mon_plugin.get_vapp_details_rest(vapp_uuid)
+
+        # verify that mocked method is called
+        m_connect_as_admin.assert_called_with()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    @mock.patch.object(monPlugin.MonPlugin, 'connect_as_admin')
+    def test_get_vapp_details_rest_failed_to_connect_vcd(self, m_connect_as_admin, m_get):
+        """Test get vapp details rest method for failed to connect to vcd"""
+
+        # mock the inputs
+        vapp_uuid = 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+        m_connect_as_admin.return_value = None
+        expected_return = {}
+
+        # call get_vapp_details_rest method under test
+        actual_return = self.mon_plugin.get_vapp_details_rest(vapp_uuid)
+
+        # verify that mocked method is called
+        m_connect_as_admin.assert_called_with()
+        m_get.assert_not_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    @mock.patch.object(monPlugin.MonPlugin, 'connect_as_admin')
+    def test_get_vapp_details_rest_invalid_response(self, m_connect_as_admin, m_get):
+        """Test get vapp details rest method for invalid response"""
+
+        # mock the inputs
+        vapp_uuid = 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+        m_connect_as_admin.return_value = self.vca
+        self.vca._session = self.session
+        self.vca._session.headers['x-vcloud-authorization'] = '2ec69b2cc6264ad0a47aaf4e3e280d16'
+        m_get.return_value.status_code = 200
+        expected_return = {}
+        m_get.return_value.content = '<?xml version="1.0" encoding="UTF-8"?>\
+        <VApp xmlns="http://www.vmware.com/vcloud/v1.5"  xmlns:vmext="http://www.vmware.com/vcloud/extension/v1.5" >\
+            <Children>\
+                <Vm needsCustomization="false"  type="application/vnd.vmware.vcloud.vm+xml">\
+                    <VCloudExtension required="false">\
+                        <vmext:VmVimInfo>\
+                            <vmext:VmVimObjectRef>\
+                                <vmext:VimServerRef  type="application/vnd.vmware.admin.vmwvirtualcenter+xml"/>\
+                                <vmext:MoRef>vm-6626</vmext:MoRef>\
+                                <vmext:VimObjectType>VIRTUAL_MACHINE</vmext:VimObjectType>\
+                        </vmext:VmVimInfo>\
+                    </VCloudExtension>\
+                </Vm>\
+            </Children>\
+        </VApp>'
+
+        # call get_vapp_details_rest method under test
+        actual_return = self.mon_plugin.get_vapp_details_rest(vapp_uuid)
+
+        # verify that mocked method is called
+        m_connect_as_admin.assert_called_with()
+        m_get.assert_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.Client, 'set_credentials')
+    @mock.patch.object(monPlugin, 'Client')
+    def test_connect_as_admin(self, m_client, m_set_credentials):
+        """Test connect as admin to vCD method"""
+
+        # mock the inputs and mocked returns
+        expected_return = m_client.return_value = self.vca
+        m_set_credentials.retrun_value = True
+
+        # call connect_as_admin method under test
+        actual_return = self.mon_plugin.connect_as_admin()
+
+        # verify that mocked method is called
+        m_client.assert_called_with(self.m_vim_access_config['vim_url'],
+                                    verify_ssl_certs=False)
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+# For testing purpose
+# if __name__ == '__main__':
+#   unittest.main()
diff --git a/osm_mon/tests/plugins/VMware/test_plugin_receiver.py b/osm_mon/tests/plugins/VMware/test_plugin_receiver.py
new file mode 100644 (file)
index 0000000..cc5dea9
--- /dev/null
@@ -0,0 +1,927 @@
+# -*- coding: utf-8 -*-
+
+##
+# Copyright 2017-2018 VMware Inc.
+# This file is part of ETSI OSM
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+#
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact:  osslegalrouting@vmware.com
+##
+
+""" Mock tests for VMware vROPs plugin recevier """
+
+import json
+import logging
+import os
+import sys
+import unittest
+from io import UnsupportedOperation
+
+import mock
+
+# sys.path.append("/root/MON/")
+
+log = logging.getLogger(__name__)
+
+sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", "..", ".."))
+
+from osm_mon.plugins.vRealiseOps import plugin_receiver as monPluginRec
+from osm_mon.core.database import VimCredentials
+
+
+class Message(object):
+    """A class to mock a message object value for alarm and matric requests"""
+
+    def __init__(self):
+        """Initialize a mocked message instance"""
+        self.topic = "alarm_or_metric_request"
+        self.key = None
+        self.value = json.dumps({"mock_value": "mock_details"})
+        self.partition = 1
+        self.offset = 100
+
+
+class TestPluginReceiver(unittest.TestCase):
+    """Test class for Plugin Receiver class methods"""
+
+    def setUp(self):
+        """Setup the tests for plugin_receiver class methods"""
+        super(TestPluginReceiver, self).setUp()
+        self.plugin_receiver = monPluginRec.PluginReceiver()
+
+    @mock.patch.object(monPluginRec.PluginReceiver, 'publish_create_alarm_status')
+    @mock.patch.object(monPluginRec.PluginReceiver, 'create_alarm')
+    def test_consume_create_alarm_request_key(self, m_create_alarm, m_publish_create_alarm_status):
+        """Test functionality of consume for create_alarm_request key"""
+
+        vim_uuid = "f85fc39e-723d-4172-979b-de28b36465bb"
+
+        value = {"vim_uuid": vim_uuid, "alarm_create_request": "alarm_details"}
+        m_create_alarm.return_value = "test_alarm_id"
+
+        # Call the consume method of plugin_receiver
+        self.plugin_receiver.handle_alarm_requests('create_alarm_request', value, vim_uuid)
+
+        # verify if create_alarm and publish methods called with correct params
+        m_create_alarm.assert_called_with(value)
+        m_publish_create_alarm_status.assert_called_with("test_alarm_id", value)
+
+    @mock.patch.object(monPluginRec.PluginReceiver, 'publish_update_alarm_status')
+    @mock.patch.object(monPluginRec.PluginReceiver, 'update_alarm')
+    def test_consume_update_alarm_request_key(self, m_update_alarm,
+                                              m_publish_update_alarm_status):
+        """Test functionality of consume for update_alarm_request key"""
+
+        vim_uuid = "f85fc39e-723d-4172-979b-de28b36465bb"
+
+        value = {"vim_uuid": vim_uuid, "alarm_update_request": "alarm_details"}
+
+        # set return value to mocked method
+        m_update_alarm.return_value = "test_alarm_id"
+
+        # Call the consume method of plugin_receiver
+        self.plugin_receiver.handle_alarm_requests('update_alarm_request', value, vim_uuid)
+
+        # verify update_alarm and publish method called with correct params
+        m_update_alarm.assert_called_with(value)
+        m_publish_update_alarm_status.assert_called_with("test_alarm_id", value)
+
+    @mock.patch.object(monPluginRec.PluginReceiver, 'publish_delete_alarm_status')
+    @mock.patch.object(monPluginRec.PluginReceiver, 'delete_alarm')
+    def test_consume_delete_alarm_request_key(self, m_delete_alarm,
+                                              m_publish_delete_alarm_status):
+        """Test functionality of consume for delete_alarm_request key"""
+
+        vim_uuid = "f85fc39e-723d-4172-979b-de28b36465bb"
+
+        value = {"vim_uuid": vim_uuid, "alarm_delete_request": "alarm_details"}
+        m_delete_alarm.return_value = "test_alarm_id"
+
+        # Call the consume method of plugin_receiver and check delete_alarm request
+        self.plugin_receiver.handle_alarm_requests('delete_alarm_request', value, vim_uuid)
+        m_delete_alarm.assert_called_with(value)
+
+        # Check if publish method called with correct parameters
+        m_publish_delete_alarm_status.assert_called_with("test_alarm_id", value)
+
+    @mock.patch.object(monPluginRec.PluginReceiver, 'publish_list_alarm_response')
+    @mock.patch.object(monPluginRec.PluginReceiver, 'list_alarms')
+    def test_consume_list_alarm_request_key(self, m_list_alarms,
+                                            m_publish_list_alarm_response):
+        """ Test functionality of list alarm request key"""
+
+        vim_uuid = "f85fc39e-723d-4172-979b-de28b36465bb"
+        value = {"vim_uuid": vim_uuid, "alarm_list_request": "alarm_details"}
+
+        test_alarm_list = [{"alarm_uuid": "alarm1_details"}, {"alarm_uuid": "alarm2_details"}]
+
+        m_list_alarms.return_value = test_alarm_list
+
+        # Call the consume method of plugin_receiver and check delete_alarm request
+        self.plugin_receiver.handle_alarm_requests('list_alarm_request', value, vim_uuid)
+        m_list_alarms.assert_called_with(value)
+
+        # Check if publish method called with correct parameters
+        m_publish_list_alarm_response.assert_called_with(test_alarm_list, value)
+
+    @mock.patch.object(monPluginRec.PluginReceiver, 'publish_create_alarm_status')
+    @mock.patch.object(monPluginRec.PluginReceiver, 'create_alarm')
+    def test_consume_invalid_alarm_request_key(self, m_create_alarm,
+                                               m_publish_create_alarm_status):
+        """Test functionality of consume for vim_access_credentials invalid request key"""
+
+        vim_uuid = "f85fc39e-723d-4172-979b-de28b36465bb"
+
+        # Call the consume method of plugin_receiver
+        with self.assertRaises(UnsupportedOperation):
+            self.plugin_receiver.handle_alarm_requests('invalid_key', {}, vim_uuid)
+
+        # verify that create_alarm and publish_create_alarm_status methods not called
+        m_create_alarm.assert_not_called()
+        m_publish_create_alarm_status.assert_not_called()
+
+    @mock.patch.object(monPluginRec.PluginReceiver, 'publish_metrics_data_status')
+    @mock.patch.object(monPluginRec.MonPlugin, 'get_metrics_data')
+    def test_consume_invalid_metric_request_key(self, m_get_metrics_data,
+                                                m_publish_metric_data_status):
+        """Test functionality of invalid metric key request"""
+
+        vim_uuid = "f85fc39e-723d-4172-979b-de28b36465bb"
+
+        # Call the consume method of plugin_receiver
+        with self.assertRaises(UnsupportedOperation):
+            self.plugin_receiver.handle_metric_requests('invalid_key', {}, vim_uuid)
+
+        # verify that get metrics data and publish methods not called
+        m_get_metrics_data.assert_not_called()
+        m_publish_metric_data_status.assert_not_called()
+
+    @mock.patch.object(monPluginRec.PluginReceiver, 'publish_metrics_data_status')
+    @mock.patch.object(monPluginRec.MonPlugin, 'get_metrics_data')
+    @mock.patch.object(monPluginRec.PluginReceiver, 'get_vim_access_config')
+    def test_consume_read_metric_data_request_key(self, m_get_vim_access_config,
+                                                  m_get_metrics_data,
+                                                  m_publish_metric_data_status):
+        """Test functionality of consume for read_metric_data_request key"""
+
+        vim_uuid = "f85fc39e-723d-4172-979b-de28b36465bb"
+
+        value = {"vim_uuid": vim_uuid, "metric_name": "metric_details"}
+        m_get_metrics_data.return_value = {"metrics_data": "metrics_details"}
+
+        m_get_vim_access_config.return_value = {'vrops_site': 'abc',
+                                                'vrops_user': 'user',
+                                                'vrops_password': 'passwd',
+                                                'vim_url': 'vcd_url',
+                                                'admin_username': 'admin',
+                                                'admin_password': 'admin_passwd',
+                                                'vim_uuid': '1',
+                                                'tenant_id': 'org_vdc_1'}
+
+        # Call the consume method of plugin_receiver
+        self.plugin_receiver.handle_metric_requests('read_metric_data_request', value, vim_uuid)
+        m_get_metrics_data.assert_called_with(value)
+
+        # Check if publish method called with correct parameters
+        m_publish_metric_data_status.assert_called_with({"metrics_data": "metrics_details"})
+
+    @mock.patch.object(monPluginRec.PluginReceiver, 'publish_create_metric_response')
+    @mock.patch.object(monPluginRec.PluginReceiver, 'verify_metric')
+    def test_consume_create_metric_request_key(self, m_verify_metric,
+                                               m_publish_create_metric_response):
+        """Test functionality of consume for create_metric_request key"""
+
+        vim_uuid = "f85fc39e-723d-4172-979b-de28b36465bb"
+        value = {"vim_uuid": vim_uuid, "metric_create": "metric_details"}
+
+        # set the return value
+        m_verify_metric.return_value = True
+
+        # Call the consume method of plugin_receiver
+        self.plugin_receiver.handle_metric_requests('create_metric_request', value, vim_uuid)
+        m_verify_metric.assert_called_with(value)
+
+        # Check if publish method called with correct parameters
+        m_publish_create_metric_response.assert_called_with(value, True)
+
+    @mock.patch.object(monPluginRec.PluginReceiver, 'publish_update_metric_response')
+    @mock.patch.object(monPluginRec.PluginReceiver, 'verify_metric')
+    def test_consume_update_metric_request_key(self, m_verify_metric,
+                                               m_publish_update_metric_response):
+        """Test functionality of update metric request key"""
+
+        vim_uuid = "f85fc39e-723d-4172-979b-de28b36465bb"
+
+        value = {"vim_uuid": vim_uuid, "metric_create": "metric_details"}
+
+        # set the return value
+        m_verify_metric.return_value = True
+
+        # Call the consume method of plugin_receiver
+        self.plugin_receiver.handle_metric_requests('update_metric_request', value, vim_uuid)
+
+        # verify mocked methods called with correct parameters
+        m_verify_metric.assert_called_with(value)
+        m_publish_update_metric_response.assert_called_with(value, True)
+
+    @mock.patch.object(monPluginRec.PluginReceiver, 'publish_delete_metric_response')
+    def test_consume_delete_metric_request_key(self, m_publish_delete_metric_response):
+        """Test functionality of consume for delete_metric_request key"""
+
+        # Note: vROPS doesn't support deleting metric data
+        vim_uuid = "f85fc39e-723d-4172-979b-de28b36465bb"
+
+        value = {"vim_uuid": vim_uuid, "metric_name": "metric_details"}
+
+        # Call the consume method of plugin_receiver
+        self.plugin_receiver.handle_metric_requests('delete_metric_request', value, vim_uuid)
+
+        # Check if publish method called with correct parameters
+        m_publish_delete_metric_response.assert_called_with(value)
+
+    @mock.patch.object(monPluginRec.MonPlugin, 'configure_alarm')
+    @mock.patch.object(monPluginRec.MonPlugin, 'configure_rest_plugin')
+    @mock.patch.object(monPluginRec.PluginReceiver, 'get_vim_access_config')
+    def test_create_alarm_successful(self, m_get_vim_access_config,
+                                     m_configure_rest_plugin,
+                                     m_configure_alarm):
+        """ Test functionality of create alarm method-positive case"""
+
+        # Mock config_alarm_info
+        config_alarm_info = {"schema_version": 1.0,
+                             "schema_type": "create_alarm_request",
+                             "vim_type": "VMware",
+                             "vim_uuid": "1",
+                             "alarm_create_request": {"correlation_id": 1,
+                                                      "alarm_name": "CPU_Utilize_Threshold",
+                                                      "metric_name": "CPU_UTILIZATION",
+                                                      "tenant_uuid": "tenant_uuid",
+                                                      "resource_uuid": "resource_uuid",
+                                                      "description": "test_create_alarm",
+                                                      "severity": "CRITICAL",
+                                                      "operation": "GT",
+                                                      "threshold_value": 10,
+                                                      "unit": "%",
+                                                      "statistic": "AVERAGE"}}
+
+        # set return value to plugin uuid
+        m_get_vim_access_config.return_value = {'vrops_site': 'abc',
+                                                'vrops_user': 'user',
+                                                'vrops_password': 'passwd',
+                                                'vim_url': 'vcd_url',
+                                                'admin_username': 'admin',
+                                                'admin_password': 'admin_passwd',
+                                                'vim_uuid': '1',
+                                                'tenant_id': 'org_vdc_1'}
+
+        m_configure_rest_plugin.retrun_value = "plugin_uuid"
+        m_configure_alarm.return_value = "alarm_uuid"
+
+        # call create alarm method under test
+        self.plugin_receiver.create_alarm(config_alarm_info)
+
+        # verify mocked methods get called with correct params
+        m_get_vim_access_config.assert_called_with(config_alarm_info['vim_uuid'])
+        m_configure_rest_plugin.assert_called_with()
+        m_configure_alarm.assert_called_with(config_alarm_info["alarm_create_request"])
+
+    @mock.patch.object(monPluginRec.MonPlugin, 'configure_alarm')
+    @mock.patch.object(monPluginRec.MonPlugin, 'configure_rest_plugin')
+    @mock.patch.object(monPluginRec.PluginReceiver, 'get_vim_access_config')
+    def test_create_alarm_failed(self, m_get_vim_access_config,
+                                 m_configure_rest_plugin,
+                                 m_configure_alarm):
+        """ Test functionality of create alarm method negative case"""
+
+        # Mock config_alarm_info
+        config_alarm_info = {"schema_version": 1.0,
+                             "schema_type": "create_alarm_request",
+                             "vim_type": "VMware",
+                             "vim_uuid": "1",
+                             "alarm_create_request": {"correlation_id": 1,
+                                                      "alarm_name": "CPU_Utilize_Threshold",
+                                                      "metric_name": "CPU_UTILIZATION",
+                                                      "tenant_uuid": "tenant_uuid",
+                                                      "resource_uuid": "resource_uuid",
+                                                      "description": "test_create_alarm",
+                                                      "severity": "CRITICAL",
+                                                      "operation": "GT",
+                                                      "threshold_value": 10,
+                                                      "unit": "%",
+                                                      "statistic": "AVERAGE"}}
+
+        # set return value to plugin uuid and alarm_uuid to None
+        m_get_vim_access_config.return_value = {'vrops_site': 'abc',
+                                                'vrops_user': 'user',
+                                                'vrops_password': 'passwd',
+                                                'vim_url': 'vcd_url',
+                                                'admin_username': 'admin',
+                                                'admin_password': 'admin_passwd',
+                                                'vim_uuid': '1',
+                                                'tenant_id': 'org_vdc_1'}
+        m_configure_rest_plugin.retrun_value = "plugin_uuid"
+        m_configure_alarm.return_value = None
+
+        # call create alarm method under test
+        alarm_uuid = self.plugin_receiver.create_alarm(config_alarm_info)
+
+        # verify mocked method called with correct params
+        m_get_vim_access_config.assert_called_with(config_alarm_info['vim_uuid'])
+        m_configure_rest_plugin.assert_called_with()
+        m_configure_alarm.assert_called_with(config_alarm_info["alarm_create_request"])
+
+        # verify create alarm method returns None when failed
+        self.assertEqual(alarm_uuid, None)
+
+    @mock.patch.object(monPluginRec.MonPlugin, 'update_alarm_configuration')
+    @mock.patch.object(monPluginRec.PluginReceiver, 'get_vim_access_config')
+    def test_update_alarm_successful(self, m_get_vim_access_config, m_update_alarm_configuration):
+        """ Test functionality of update alarm method-positive case"""
+
+        # Mock update_alarm_info
+        update_alarm_info = {"schema_version": 1.0, "schema_type": "update_alarm_request",
+                             "vim_type": "VMware", "vim_uuid": "1",
+                             "alarm_update_request": {'alarm_uuid': 'abc', 'correlation_id': 14203}}
+
+        # set return value to mocked method
+        m_update_alarm_configuration.return_value = "alarm_uuid"
+        m_get_vim_access_config.return_value = {'vrops_site': 'abc',
+                                                'vrops_user': 'user',
+                                                'vrops_password': 'passwd',
+                                                'vim_url': 'vcd_url',
+                                                'admin_username': 'admin',
+                                                'admin_password': 'admin_passwd',
+                                                'vim_uuid': '1',
+                                                'tenant_id': 'org_vdc_1'}
+
+        # check update alarm gets called and returned correct value
+        ret_value = self.plugin_receiver.update_alarm(update_alarm_info)
+
+        # check mocked method get called with correct param
+        m_get_vim_access_config.assert_called_with(update_alarm_info['vim_uuid'])
+        m_update_alarm_configuration.assert_called_with(update_alarm_info["alarm_update_request"])
+
+        # check return value and passed values are correct
+        self.assertEqual(ret_value, "alarm_uuid")
+
+    @mock.patch.object(monPluginRec.MonPlugin, 'update_alarm_configuration')
+    @mock.patch.object(monPluginRec.PluginReceiver, 'get_vim_access_config')
+    def test_update_alarm_failed(self, m_get_vim_access_config, m_update_alarm_configuration):
+        """ Test functionality of update alarm method negative case"""
+
+        # Mock update_alarm_info
+        update_alarm_info = {"schema_version": 1.0, "schema_type": "update_alarm_request",
+                             "vim_type": "VMware", "vim_uuid": "1",
+                             "alarm_update_request": {'alarm_uuid': 'abc', 'correlation_id': 14203}}
+
+        # set return value to mocked method
+        m_update_alarm_configuration.return_value = None
+        m_get_vim_access_config.return_value = {'vrops_site': 'abc',
+                                                'vrops_user': 'user',
+                                                'vrops_password': 'passwd',
+                                                'vim_url': 'vcd_url',
+                                                'admin_username': 'admin',
+                                                'admin_password': 'admin_passwd',
+                                                'vim_uuid': '1',
+                                                'tenant_id': 'org_vdc_1'}
+
+        # check update alarm gets called and returned correct value
+        ret_value = self.plugin_receiver.update_alarm(update_alarm_info)
+
+        # check mocked method get called with correct param
+        m_get_vim_access_config.assert_called_with(update_alarm_info['vim_uuid'])
+        m_update_alarm_configuration.assert_called_with(update_alarm_info["alarm_update_request"])
+
+        # check return value and passed values are correct
+        self.assertEqual(ret_value, None)
+
+    @mock.patch.object(monPluginRec.MonPlugin, 'delete_alarm_configuration')
+    @mock.patch.object(monPluginRec.PluginReceiver, 'get_vim_access_config')
+    def test_delete_alarm_successful(self, m_get_vim_access_config, m_delete_alarm_configuration):
+        """ Test functionality of delete alarm method-positive case"""
+
+        # Mock delete_alarm_info
+        delete_alarm_info = {"schema_version": 1.0, "schema_type": "delete_alarm_request",
+                             "vim_type": "VMware", "vim_uuid": "1",
+                             "alarm_delete_request": {'alarm_uuid': 'abc', 'correlation_id': 14203}}
+
+        # set return value to mocked method
+        m_delete_alarm_configuration.return_value = "alarm_uuid"
+        m_get_vim_access_config.return_value = {'vrops_site': 'abc',
+                                                'vrops_user': 'user',
+                                                'vrops_password': 'passwd',
+                                                'vim_url': 'vcd_url',
+                                                'admin_username': 'admin',
+                                                'admin_password': 'admin_passwd',
+                                                'vim_uuid': '1',
+                                                'tenant_id': 'org_vdc_1'}
+
+        # check delete alarm gets called and returned correct value
+        ret_value = self.plugin_receiver.delete_alarm(delete_alarm_info)
+
+        # check mocked method get called with correct param
+        m_get_vim_access_config.assert_called_with(delete_alarm_info['vim_uuid'])
+        m_delete_alarm_configuration.assert_called_with(delete_alarm_info["alarm_delete_request"])
+
+        # check return value and passed values are correct
+        self.assertEqual(ret_value, "alarm_uuid")
+
+    @mock.patch.object(monPluginRec.MonPlugin, 'delete_alarm_configuration')
+    @mock.patch.object(monPluginRec.PluginReceiver, 'get_vim_access_config')
+    def test_delete_alarm_failed(self, m_get_vim_access_config, m_delete_alarm_configuration):
+        """ Test functionality of delete alarm method-negative case"""
+
+        # Mock update_alarm_info
+        delete_alarm_info = {"schema_version": 1.0, "schema_type": "delete_alarm_request",
+                             "vim_type": "VMware", "vim_uuid": "1",
+                             "alarm_delete_request": {'alarm_uuid': 'abc', 'correlation_id': 14203}}
+
+        # set return value to mocked method
+        m_delete_alarm_configuration.return_value = None
+        m_get_vim_access_config.return_value = {'vrops_site': 'abc',
+                                                'vrops_user': 'user',
+                                                'vrops_password': 'passwd',
+                                                'vim_url': 'vcd_url',
+                                                'admin_username': 'admin',
+                                                'admin_password': 'admin_passwd',
+                                                'vim_uuid': '1',
+                                                'tenant_id': 'org_vdc_1'}
+
+        # check delete alarm gets called and returned correct value
+        ret_value = self.plugin_receiver.delete_alarm(delete_alarm_info)
+
+        # check mocked method get called with correct param
+        m_get_vim_access_config.assert_called_with(delete_alarm_info['vim_uuid'])
+        m_delete_alarm_configuration.assert_called_with(delete_alarm_info["alarm_delete_request"])
+
+        # check return value to check failed status
+        self.assertEqual(ret_value, None)
+
+    def test_publish_create_alarm_status(self):
+        """ Test functionality of publish create alarm status method"""
+
+        # Mock config_alarm_info
+        config_alarm_info = {'vim_type': 'VMware', "vim_uuid": "1",
+                             'alarm_create_request': {
+                                 'threshold_value': 0,
+                                 'severity': 'CRITICAL',
+                                 'alarm_name': 'CPU_Utilization_Above_Threshold',
+                                 'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                                 'correlation_id': 1234,
+                                 'statistic': 'AVERAGE',
+                                 'metric_name': 'CPU_UTILIZATION'}
+                             }
+
+        alarm_uuid = "xyz"
+
+        # call publish create status method under test
+        self.plugin_receiver.publish_create_alarm_status(alarm_uuid, config_alarm_info)
+
+        # verify mocked method called with correct params
+        # TODO(diazb): Validate payload generation (self.assertEquals(response, expected_message))
+
+    def test_publish_update_alarm_status(self):
+        """ Test functionality of publish update alarm status method"""
+
+        # Mock update_alarm_info
+        update_alarm_info = {'vim_type': 'VMware',
+                             'vim_uuid': '1',
+                             'schema_type': 'update_alarm_request',
+                             'alarm_update_request': {'alarm_uuid': '6486e69',
+                                                      'correlation_id': 14203,
+                                                      'operation': 'GT'
+                                                      }
+                             }
+
+        alarm_uuid = "xyz"
+
+        # call publish update alarm status method under test
+        self.plugin_receiver.publish_update_alarm_status(alarm_uuid, update_alarm_info)
+
+        # verify mocked method called with correct params
+        # TODO(diazb): Validate payload generation (self.assertEquals(response, expected_message))
+
+    def test_publish_delete_alarm_status(self):
+        """ Test functionality of publish delete alarm status method"""
+
+        # Mock delete_alarm_info
+        delete_alarm_info = {'vim_type': 'VMware',
+                             "vim_uuid": "1",
+                             'schema_type': 'delete_alarm_request',
+                             'alarm_delete_request': {'alarm_uuid': '6486e69',
+                                                      'correlation_id': 14203,
+                                                      'operation': 'GT'
+                                                      }
+                             }
+
+        alarm_uuid = "xyz"
+
+        # call publish delete alarm status method under test
+        self.plugin_receiver.publish_delete_alarm_status(alarm_uuid, delete_alarm_info)
+
+        # verify mocked method called with correct params
+        # TODO(diazb): Validate payload generation (self.assertEquals(response, expected_message))
+
+    def test_publish_metrics_data_status(self):
+        """ Test functionality of publish metrics data status method"""
+
+        # Mock metrics data
+        metrics_data = {
+            'vim_uuid': '1',
+            'metric_name': 'CPU_UTILIZATION', 'metric_uuid': '0',
+            'resource_uuid': 'e14b20', 'correlation_id': 14203,
+            'metrics_data': {'time_series': [15162011, 15162044],
+                             'metrics_series': [0.1166666671, 0.1266666650]},
+            'tenant_uuid': 123, 'unit': '%'
+        }
+
+        # call publish metrics data status method under test
+        self.plugin_receiver.publish_metrics_data_status(metrics_data)
+
+        # verify mocked method called with correct params
+        # TODO(diazb): Validate payload generation (self.assertEquals(response, expected_message))
+
+    @mock.patch.object(monPluginRec.MonPlugin, 'verify_metric_support')
+    @mock.patch.object(monPluginRec.PluginReceiver, 'get_vim_access_config')
+    def test_verify_metric_supported_metric(self, m_get_vim_access_config,
+                                            m_verify_metric_support):
+        """ Test functionality of verify metric method"""
+
+        # mock metric_info
+        metric_info = {'vim_uuid': '1',
+                       'metric_create_request': {'metric_unit': '%',
+                                                 'metric_name': 'CPU_UTILIZATION',
+                                                 'resource_uuid': 'e14b203'}}
+
+        # set mocked function return value to true
+        m_verify_metric_support.return_value = True
+        m_get_vim_access_config.return_value = {'vrops_site': 'abc',
+                                                'vrops_user': 'user',
+                                                'vrops_password': 'passwd',
+                                                'vim_url': 'vcd_url',
+                                                'admin_username': 'admin',
+                                                'admin_password': 'admin_passwd',
+                                                'vim_uuid': '1',
+                                                'tenant_id': 'org_vdc_1'}
+
+        # call verify_metric method under test
+        ret_value = self.plugin_receiver.verify_metric(metric_info)
+
+        # verify mocked method called with correct params
+        m_get_vim_access_config.assert_called_with(metric_info['vim_uuid'])
+        m_verify_metric_support.assert_called_with(metric_info['metric_create_request'])
+
+        # verify the return value
+        self.assertEqual(ret_value, True)
+
+    @mock.patch.object(monPluginRec.MonPlugin, 'verify_metric_support')
+    @mock.patch.object(monPluginRec.PluginReceiver, 'get_vim_access_config')
+    def test_verify_metric_unsupported_metric(self, m_get_vim_access_config,
+                                              m_verify_metric_support):
+        """ Test functionality of verify metric method-negative case"""
+
+        # mock metric_info with unsupported metrics name
+        metric_info = {'vim_uuid': '1',
+                       'metric_create_request': {'metric_unit': '%',
+                                                 'metric_name': 'Invalid',
+                                                 'resource_uuid': 'e14b203'}}
+
+        # set mocked function return value to true
+        m_verify_metric_support.return_value = False
+        m_get_vim_access_config.return_value = {'vrops_site': 'abc',
+                                                'vrops_user': 'user',
+                                                'vrops_password': 'passwd',
+                                                'vim_url': 'vcd_url',
+                                                'admin_username': 'admin',
+                                                'admin_password': 'admin_passwd',
+                                                'vim_uuid': '1',
+                                                'tenant_id': 'org_vdc_1'}
+
+        # call verify_metric method under test
+        ret_value = self.plugin_receiver.verify_metric(metric_info)
+
+        # verify mocked method called with correct params
+        m_get_vim_access_config.assert_called_with(metric_info['vim_uuid'])
+        m_verify_metric_support.assert_called_with(metric_info['metric_create_request'])
+
+        # verify the return value
+        self.assertEqual(ret_value, False)
+
+    def test_publish_create_metric_response(self):
+        """ Test functionality of publish create metric response method"""
+
+        # Mock metric_info
+        metric_info = {
+            'vim_uuid': '1',
+            'vim_type': 'VMware',
+            'correlation_id': 14203,
+            'schema_type': 'create_metric_request',
+            'metric_create_request': {
+                'resource_uuid': '6486e69',
+                'metric_name': 'CPU_UTILIZATION',
+                'metric_unit': '%'
+            }
+        }
+
+        metric_status = True
+
+        # call publish create metric method under test
+        self.plugin_receiver.publish_create_metric_response(metric_info, metric_status)
+
+        # verify mocked method called with correct params
+        # TODO(diazb): Validate payload generation (self.assertEquals(response, expected_message))
+
+    def test_publish_update_metric_response(self):
+        """ Test functionality of publish update metric response method"""
+
+        # Mock metric_info
+        metric_info = {
+            'vim_uuid': '1',
+            'vim_type': 'VMware',
+            'correlation_id': 14203,
+            'schema_type': 'update_metric_request',
+            'metric_create': {
+                'resource_uuid': '6486e69',
+                'metric_name': 'CPU_UTILIZATION',
+                'metric_unit': '%'
+            }
+        }
+
+        metric_status = True
+
+        # call publish update metric method under test
+        self.plugin_receiver.publish_update_metric_response(metric_info, metric_status)
+
+        # verify mocked method called with correct params
+        # TODO(diazb): Validate payload generation (self.assertEquals(response, expected_message))
+
+    def test_publish_delete_metric_response(self):
+        """ Test functionality of publish delete metric response method"""
+
+        # Mock metric_info
+        metric_info = {'vim_uuid': '1', 'vim_type': 'VMware', 'correlation_id': 14203,
+                       'metric_uuid': 'e14b203c', 'resource_uuid': '6486e69',
+                       'metric_name': 'CPU_UTILIZATION',
+                       'schema_type': 'delete_metric_request'}
+
+        # call publish delete metric method under test-vROPS doesn't support
+        # delete metric,just returns response with success
+        self.plugin_receiver.publish_delete_metric_response(metric_info)
+
+        # verify mocked method called with correct params
+        # TODO(diazb): Validate payload generation (self.assertEquals(response, expected_message))
+
+    @mock.patch.object(monPluginRec.MonPlugin, 'get_triggered_alarms_list')
+    @mock.patch.object(monPluginRec.PluginReceiver, 'get_vim_access_config')
+    def test_list_alarms(self, m_get_vim_access_config, m_get_triggered_alarms_list):
+        """ Test functionality of list alarms method"""
+
+        # Mock list alarm input
+        list_alarm_input = {
+            'vim_uuid': '1',
+            'vim_type': 'VMware',
+            'alarm_list_request': {
+                'severity': 'CRITICAL',
+                'correlation_id': 14203,
+                'alarm_name': 'CPU_Utilization_Above_Threshold',
+                'resource_uuid': 'd14b203c'}}
+
+        # set return value to mocked method
+        m_return = [{'status': 'ACTIVE', 'update_date': '2018-01-12T08:34:05',
+                     'severity': 'CRITICAL', 'resource_uuid': 'e14b203c',
+                     'cancel_date': '0000-00-00T00:00:00', 'alarm_instance_uuid': 'd9e3bc84',
+                     'alarm_uuid': '5714977d', 'vim_type': 'VMware',
+                     'start_date': '2018-01-12T08:34:05'},
+                    {'status': 'CANCELED', 'update_date': '2017-12-20T09:37:57',
+                     'severity': 'CRITICAL', 'resource_uuid': 'e14b203c',
+                     'cancel_date': '2018-01-12T06:49:19', 'alarm_instance_uuid': 'd3bbeef6',
+                     'alarm_uuid': '7ba1bf3e', 'vim_type': 'VMware',
+                     'start_date': '2017-12-20T09:37:57'}]
+        m_get_triggered_alarms_list.return_value = m_return
+
+        m_get_vim_access_config.return_value = {'vrops_site': 'abc',
+                                                'vrops_user': 'user',
+                                                'vrops_password': 'passwd',
+                                                'vim_url': 'vcd_url',
+                                                'admin_username': 'admin',
+                                                'admin_password': 'admin_passwd',
+                                                'vim_uuid': '1',
+                                                'tenant_id': 'org_vdc_1'}
+
+        # call list alarms method under test
+        return_value = self.plugin_receiver.list_alarms(list_alarm_input)
+
+        # verify mocked method called with correct params
+        m_get_vim_access_config.assert_called_with(list_alarm_input['vim_uuid'])
+        m_get_triggered_alarms_list.assert_called_with(list_alarm_input['alarm_list_request'])
+
+        # verify list alarm method returns correct list
+        self.assertEqual(return_value, m_return)
+
+    def test_publish_list_alarm_response(self):
+        """ Test functionality of publish list alarm response method"""
+
+        # Mock list alarm input
+        msg_key = 'list_alarm_response'
+        topic = 'alarm_response'
+        list_alarm_input = {'vim_uuid': '1',
+                            'vim_type': 'VMware',
+                            'alarm_list_request': {
+                                'severity': 'CRITICAL',
+                                'correlation_id': 14203,
+                                'alarm_name': 'CPU_Utilization_Above_Threshold',
+                                'resource_uuid': 'd14b203c'}}
+
+        triggered_alarm_list = [{'status': 'ACTIVE', 'update_date': '2018-01-12T08:34:05',
+                                 'severity': 'CRITICAL', 'resource_uuid': 'e14b203c',
+                                 'cancel_date': '0000-00-00T00:00:00',
+                                 'start_date': '2018-01-12T08:34:05',
+                                 'alarm_instance_uuid': 'd9e3bc84',
+                                 'alarm_uuid': '5714977d',
+                                 'vim_type': 'VMware'
+                                 }]
+
+        # call publish list alarm response method under test
+        response = self.plugin_receiver.publish_list_alarm_response(triggered_alarm_list, list_alarm_input)
+
+        # verify mocked method called with correct params
+        # TODO(diazb): Validate payload generation (self.assertEquals(response, expected_message))
+
+    def test_publish_access_update_response(self):
+        """ Test functionality of publish access update response method"""
+
+        # Mock required inputs
+        access_update_status = True
+        access_info_req = {'vim_type': 'VMware',
+                           'vim_uuid': '1',
+                           'access_config': {'vrops_password': 'vmware',
+                                             'vcloud-site': 'https://192.169.241.105',
+                                             'vrops_user': 'Admin', 'correlation_id': 14203,
+                                             'tenant_id': 'Org2'}
+                           }
+
+        # call publish access update response method under test
+        response = self.plugin_receiver.publish_access_update_response(access_update_status, access_info_req)
+
+        # verify mocked method called with correct params
+        # TODO(diazb): Validate payload generation (self.assertEquals(response, expected_message))
+
+    @mock.patch.object(monPluginRec.PluginReceiver, 'write_access_config')
+    def test_update_access_credentials_successful(self, m_write_access_config):
+        """ Test functionality of update access credentials-positive case"""
+
+        # Mock access_info
+        access_info = {'vrops_site': 'https://192.169.241.13', 'vrops_user': 'admin',
+                       'vrops_password': 'vmware', 'vcloud-site': 'https://192.169.241.15',
+                       'admin_username': 'admin', 'admin_password': 'vmware',
+                       'vcenter_ip': '192.169.241.13', 'vcenter_port': '443',
+                       'vcenter_user': 'admin', 'vcenter_password': 'vmware',
+                       'vim_tenant_name': 'Org2', 'orgname': 'Org2', 'tenant_id': 'Org2'}
+
+        # Mock return values
+        expected_status = m_write_access_config.return_value = True
+
+        # call publish update access credentials method under test
+        actual_status = self.plugin_receiver.update_access_credentials(access_info)
+
+        # check write_access_config called with correct params
+        m_write_access_config.assert_called_with(access_info)
+
+        # verify update access credentials returns correct status
+        self.assertEqual(expected_status, actual_status)
+
+    @mock.patch.object(monPluginRec.PluginReceiver, 'write_access_config')
+    def test_update_access_credentials_less_config_params(self, m_write_access_config):
+        """ Test functionality of update access credentials-negative case"""
+
+        # Mock access_info
+        access_info = {'vrops_site': 'https://192.169.241.13', 'vrops_user': 'admin',
+                       'vrops_password': 'vmware', 'vcloud-site': 'https://192.169.241.15',
+                       'admin_username': 'admin', 'admin_password': 'vmware',
+                       'vcenter_ip': '192.169.241.13', 'vcenter_port': '443', 'vcenter_user': 'admin',
+                       'vim_tenant_name': 'Org2', 'orgname': 'Org2', 'tenant_id': 'Org2'}
+
+        # Mock return values
+        expected_status = m_write_access_config.return_value = False
+
+        # call publish update access credentials method under test
+        actual_status = self.plugin_receiver.update_access_credentials(access_info)
+
+        # check if mocked method not called
+        m_write_access_config.assert_not_called()
+
+        # verify update access credentials returns correct status
+        self.assertEqual(expected_status, actual_status)
+
+    @mock.patch.object(monPluginRec.PluginReceiver, 'write_access_config')
+    def test_update_access_credentials_failed(self, m_write_access_config):
+        """ Test functionality of update access credentials-failed case """
+
+        # Mock access_info
+        access_info = {'vrops_site': 'https://192.169.241.13', 'vrops_user': 'admin',
+                       'vrops_password': 'vmware', 'vcloud-site': 'https://192.169.241.15',
+                       'admin_username': 'admin', 'admin_password': 'vmware',
+                       'vcenter_ip': '192.169.241.13', 'vcenter_port': '443',
+                       'vcenter_user': 'admin', 'vcenter_password': 'vmware',
+                       'vim_tenant_name': 'Org2', 'orgname': 'Org2', 'tenant_id': 'Org2'}
+
+        # Mock return values
+        expected_status = m_write_access_config.return_value = False
+
+        # call publish update access credentials method under test
+        actual_status = self.plugin_receiver.update_access_credentials(access_info)
+
+        # check write_access_config called with correct params
+        m_write_access_config.assert_called_with(access_info)
+
+        # verify update access credentials returns correct status
+        self.assertEqual(expected_status, actual_status)
+
+    def test_write_access_config_successful(self):
+        """ Test functionality of write access config method-positive case"""
+
+        # Mock access_info
+        access_info = {'vrops_sit': 'https://192.169.241.13', 'vrops_user': 'admin',
+                       'vrops_password': 'vmware', 'vcloud-site': 'https://192.169.241.15',
+                       'admin_username': 'admin', 'admin_password': 'vmware',
+                       'vcenter_ip': '192.169.241.13', 'vcenter_port': '443',
+                       'vcenter_user': 'admin', 'vcenter_password': 'vmware',
+                       'vim_tenant_name': 'Org2', 'orgname': 'Org2', 'tenant_id': 'Org2'}
+
+        # call write access config method under test
+        actual_status = self.plugin_receiver.write_access_config(access_info)
+
+        # verify write access config returns correct status
+        self.assertEqual(True, actual_status)
+
+    def test_write_access_config_failed(self):
+        """ Test functionality of write access config method-negative case"""
+
+        # Mock access_info
+        access_info = []  # provided incorrect info to generate error
+
+        # call write access config method under test
+        actual_status = self.plugin_receiver.write_access_config(access_info)
+
+        # verify write access config returns correct status
+        self.assertEqual(False, actual_status)
+
+    @mock.patch.object(monPluginRec.AuthManager, 'get_credentials')
+    def test_get_vim_access_config(self, m_get_credentials):
+        """ Test functionality of get_vim_access_config method-positive case"""
+
+        # Mock vim_uuid & access_info
+        vim_uuid = '1'
+        vim_details = VimCredentials()
+        vim_details.name = "vrops_vcd82"
+        vim_details.password = "passwd"
+        vim_details.tenant_name = "MANO-VDC"
+        vim_details.type = "VMware"
+        vim_details.url = "https://10.10.1.1"
+        vim_details.user = "admin"
+        vim_details.uuid = "1"
+        vim_details.config = '{"orgname": "MANO-Org", "tenant_id": "MANO-VDC",\
+                        "admin_username": "administrator","admin_password": "vcd_pwd",\
+                        "vrops_user": "admin", "vrops_password": "vrops_pwd",\
+                        "vrops_site": "https://10.10.1.2","nsx_user": "admin",\
+                        "nsx_manager": "https://10.10.1.3", "nsx_password":"nsx_pwd",\
+                        "sdn_controller": "None", "sdn_port_mapping": "None",\
+                        "vcenter_ip": "10.10.1.4", "vcenter_user": "admin@vsphere.local",\
+                        "vcenter_password": "vcenter_pwd", "vcenter_port": "443"}'
+        m_get_credentials.return_value = vim_details
+        expected_config = {'vrops_password': 'vrops_pwd', 'vcenter_password': 'vcenter_pwd',
+                           'name': 'vrops_vcd82', 'org_user': 'admin',
+                           'org_password': 'passwd', 'nsx_user': 'admin', 'vim_tenant_name': 'MANO-VDC',
+                           'admin_username': 'administrator', 'vcenter_port': '443',
+                           'vim_url': 'https://10.10.1.1', 'orgname': 'MANO-Org',
+                           'admin_password': 'vcd_pwd', 'vrops_user': 'admin', 'vcenter_ip': '10.10.1.4',
+                           'vrops_site': 'https://10.10.1.2', 'nsx_manager': 'https://10.10.1.3',
+                           'nsx_password': 'nsx_pwd', 'vim_type': 'VMware', 'vim_uuid': '1',
+                           'vcenter_user': 'admin@vsphere.local'}
+
+        # call get_vim_access_config method under test
+        actual_config = self.plugin_receiver.get_vim_access_config('1')
+
+        # verify that mocked method is called
+        m_get_credentials.assert_called_with(vim_uuid)
+
+        # Verify return value with expected value
+        self.assertEqual(expected_config, actual_config)
+
+# For testing purpose
+# if __name__ == '__main__':
+
+#    unittest.main()
diff --git a/osm_mon/tests/plugins/__init__.py b/osm_mon/tests/plugins/__init__.py
new file mode 100644 (file)
index 0000000..2d39b96
--- /dev/null
@@ -0,0 +1,22 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2018 Whitestack, LLC
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Whitestack, LLC
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: bdiaz@whitestack.com or glavado@whitestack.com
+##
\ No newline at end of file
diff --git a/tox.ini b/tox.ini
index b10e7f9..5940b95 100644 (file)
--- a/tox.ini
+++ b/tox.ini
@@ -25,7 +25,7 @@
 # test suite on all supported python versions. To use it, "pip install tox"
 # and then run "tox" from this directory.
 [tox]
-envlist = py3, flake8
+envlist = py3
 toxworkdir={homedir}/.tox
 
 [testenv]