Merge "Set node to run on label docker"
authorMike Marchetti <mmarchetti@sandvine.com>
Thu, 25 Oct 2018 13:06:21 +0000 (15:06 +0200)
committerGerrit Code Review <root@osm.etsi.org>
Thu, 25 Oct 2018 13:06:21 +0000 (15:06 +0200)
180 files changed:
Dockerfile
LICENSE [new file with mode: 0644]
Makefile [new file with mode: 0644]
debian/python3-osm-mon.postinst [new file with mode: 0644]
devops-stages/stage-build.sh
docker/Dockerfile
docker/scripts/runInstall.sh
osm_mon/cmd/__init__.py [new file with mode: 0644]
osm_mon/cmd/mon_prometheus_exporter.py [new file with mode: 0644]
osm_mon/collector/__init__.py [new file with mode: 0644]
osm_mon/collector/collector.py [new file with mode: 0644]
osm_mon/collector/prometheus_exporter.py [new file with mode: 0644]
osm_mon/common/__init__.py [new file with mode: 0644]
osm_mon/common/common_db_client.py [new file with mode: 0644]
osm_mon/core/message_bus/common_consumer.py
osm_mon/core/message_bus/consumer.py [changed mode: 0755->0644]
osm_mon/core/message_bus/producer.py [changed mode: 0755->0644]
osm_mon/core/models/acknowledge_alarm.json [deleted file]
osm_mon/core/models/acknowledge_alarm_req.json [new file with mode: 0644]
osm_mon/core/models/create_alarm_resp.json
osm_mon/core/models/create_metric_resp.json
osm_mon/core/models/delete_alarm_req.json
osm_mon/core/models/delete_alarm_resp.json
osm_mon/core/models/delete_metric_req.json
osm_mon/core/models/delete_metric_resp.json
osm_mon/core/models/list_alarm_req.json
osm_mon/core/models/list_alarm_resp.json
osm_mon/core/models/list_metric_req.json
osm_mon/core/models/list_metric_resp.json
osm_mon/core/models/notify_alarm.json
osm_mon/core/models/read_metric_data_req.json
osm_mon/core/models/read_metric_data_resp.json
osm_mon/core/models/update_alarm_req.json
osm_mon/core/models/update_alarm_resp.json
osm_mon/core/models/update_metric_req.json
osm_mon/core/models/update_metric_resp.json
osm_mon/core/settings.py
osm_mon/plugins/CloudWatch/access_credentials.py
osm_mon/plugins/CloudWatch/connection.py
osm_mon/plugins/CloudWatch/metric_alarms.py
osm_mon/plugins/CloudWatch/metrics.py
osm_mon/plugins/CloudWatch/plugin_alarm.py
osm_mon/plugins/CloudWatch/plugin_metric.py
osm_mon/plugins/OpenStack/Aodh/alarm_handler.py [new file with mode: 0644]
osm_mon/plugins/OpenStack/Aodh/alarming.py [deleted file]
osm_mon/plugins/OpenStack/Aodh/notifier.py
osm_mon/plugins/OpenStack/Gnocchi/metric_handler.py [new file with mode: 0644]
osm_mon/plugins/OpenStack/Gnocchi/metrics.py [deleted file]
osm_mon/plugins/OpenStack/response.py
osm_mon/plugins/vRealiseOps/kafka_consumer_vrops.py [deleted file]
osm_mon/plugins/vRealiseOps/mon_plugin_vrops.py
osm_mon/plugins/vRealiseOps/plugin_receiver.py
osm_mon/plugins/vRealiseOps/vROPs_Webservice/install.sh
osm_mon/plugins/vRealiseOps/vROPs_Webservice/vrops_webservice
osm_mon/plugins/vRealiseOps/vrops_config.xml
osm_mon/test/CloudWatch/test_schemas/alarm_details/acknowledge_alarm.json [deleted file]
osm_mon/test/CloudWatch/test_schemas/create_alarm/create_alarm_differentName_differentInstance.json [deleted file]
osm_mon/test/CloudWatch/test_schemas/create_alarm/create_alarm_differentName_sameInstance.json [deleted file]
osm_mon/test/CloudWatch/test_schemas/create_alarm/create_alarm_sameName_differentInstance.json [deleted file]
osm_mon/test/CloudWatch/test_schemas/create_alarm/create_alarm_sameName_sameInstance.json [deleted file]
osm_mon/test/CloudWatch/test_schemas/create_alarm/operation_invalid.json [deleted file]
osm_mon/test/CloudWatch/test_schemas/create_alarm/operation_valid.json [deleted file]
osm_mon/test/CloudWatch/test_schemas/create_alarm/statistic_invalid.json [deleted file]
osm_mon/test/CloudWatch/test_schemas/create_alarm/statistic_valid.json [deleted file]
osm_mon/test/CloudWatch/test_schemas/create_metrics/create_metric_req_invalid.json [deleted file]
osm_mon/test/CloudWatch/test_schemas/create_metrics/create_metric_req_valid.json [deleted file]
osm_mon/test/CloudWatch/test_schemas/delete_alarm/name_invalid.json [deleted file]
osm_mon/test/CloudWatch/test_schemas/delete_alarm/name_valid.json [deleted file]
osm_mon/test/CloudWatch/test_schemas/delete_alarm/name_valid_delete1.json [deleted file]
osm_mon/test/CloudWatch/test_schemas/delete_alarm/name_valid_delete2.json [deleted file]
osm_mon/test/CloudWatch/test_schemas/delete_alarm/name_valid_delete3.json [deleted file]
osm_mon/test/CloudWatch/test_schemas/delete_alarm/name_valid_delete4.json [deleted file]
osm_mon/test/CloudWatch/test_schemas/delete_metrics/delete_metric_req_invalid.json [deleted file]
osm_mon/test/CloudWatch/test_schemas/delete_metrics/delete_metric_req_valid.json [deleted file]
osm_mon/test/CloudWatch/test_schemas/list_alarm/list_alarm_valid_no_arguments.json [deleted file]
osm_mon/test/CloudWatch/test_schemas/list_alarm/list_alarm_valid_one_argument.json [deleted file]
osm_mon/test/CloudWatch/test_schemas/list_alarm/list_alarm_valid_two_arguments.json [deleted file]
osm_mon/test/CloudWatch/test_schemas/list_metrics/list_metric_req_invalid.json [deleted file]
osm_mon/test/CloudWatch/test_schemas/list_metrics/list_metric_req_valid.json [deleted file]
osm_mon/test/CloudWatch/test_schemas/read_metrics_data/read_coll_period_req_invalid.json [deleted file]
osm_mon/test/CloudWatch/test_schemas/read_metrics_data/read_coll_period_req_valid.json [deleted file]
osm_mon/test/CloudWatch/test_schemas/read_metrics_data/read_metric_name_req_invalid.json [deleted file]
osm_mon/test/CloudWatch/test_schemas/read_metrics_data/read_metric_name_req_valid.json [deleted file]
osm_mon/test/CloudWatch/test_schemas/update_alarm/name_invalid.json [deleted file]
osm_mon/test/CloudWatch/test_schemas/update_alarm/name_valid.json [deleted file]
osm_mon/test/CloudWatch/test_schemas/update_alarm/operation_invalid.json [deleted file]
osm_mon/test/CloudWatch/test_schemas/update_alarm/operation_valid.json [deleted file]
osm_mon/test/CloudWatch/test_schemas/update_alarm/statistic_invalid.json [deleted file]
osm_mon/test/CloudWatch/test_schemas/update_alarm/statistic_valid.json [deleted file]
osm_mon/test/CloudWatch/test_schemas/update_alarm/update_alarm_new_alarm.json [deleted file]
osm_mon/test/CloudWatch/test_schemas/update_metrics/update_metric_req_invalid.json [deleted file]
osm_mon/test/CloudWatch/test_schemas/update_metrics/update_metric_req_valid.json [deleted file]
osm_mon/test/CloudWatch/unit_tests_alarms.py [deleted file]
osm_mon/test/CloudWatch/unit_tests_metrics.py [deleted file]
osm_mon/test/OpenStack/__init__.py [deleted file]
osm_mon/test/OpenStack/integration/__init__.py [deleted file]
osm_mon/test/OpenStack/integration/test_alarm_integration.py [deleted file]
osm_mon/test/OpenStack/integration/test_metric_integration.py [deleted file]
osm_mon/test/OpenStack/integration/test_notify_alarm.py [deleted file]
osm_mon/test/OpenStack/integration/test_vim_account.py [deleted file]
osm_mon/test/OpenStack/unit/__init__.py [deleted file]
osm_mon/test/OpenStack/unit/test_alarm_req.py [deleted file]
osm_mon/test/OpenStack/unit/test_alarming.py [deleted file]
osm_mon/test/OpenStack/unit/test_common.py [deleted file]
osm_mon/test/OpenStack/unit/test_metric_calls.py [deleted file]
osm_mon/test/OpenStack/unit/test_metric_req.py [deleted file]
osm_mon/test/OpenStack/unit/test_notifier.py [deleted file]
osm_mon/test/OpenStack/unit/test_responses.py [deleted file]
osm_mon/test/OpenStack/unit/test_settings.py [deleted file]
osm_mon/test/VMware/__init__.py [deleted file]
osm_mon/test/VMware/test_mon_plugin_vrops.py [deleted file]
osm_mon/test/VMware/test_plugin_receiver.py [deleted file]
osm_mon/test/collector/__init__.py [new file with mode: 0644]
osm_mon/test/collector/test_collector.py [new file with mode: 0644]
osm_mon/test/core/kafka_test.py [deleted file]
osm_mon/test/core/test_common_consumer.py
osm_mon/test/core/test_producer.py [deleted file]
osm_mon/test/functional/__init__.py [deleted file]
osm_mon/test/plugins/CloudWatch/__init__.py [new file with mode: 0644]
osm_mon/test/plugins/CloudWatch/test_schemas/alarm_details/acknowledge_alarm.json [new file with mode: 0644]
osm_mon/test/plugins/CloudWatch/test_schemas/create_alarm/create_alarm_differentName_differentInstance.json [new file with mode: 0644]
osm_mon/test/plugins/CloudWatch/test_schemas/create_alarm/create_alarm_differentName_sameInstance.json [new file with mode: 0644]
osm_mon/test/plugins/CloudWatch/test_schemas/create_alarm/create_alarm_sameName_differentInstance.json [new file with mode: 0644]
osm_mon/test/plugins/CloudWatch/test_schemas/create_alarm/create_alarm_sameName_sameInstance.json [new file with mode: 0644]
osm_mon/test/plugins/CloudWatch/test_schemas/create_alarm/operation_invalid.json [new file with mode: 0644]
osm_mon/test/plugins/CloudWatch/test_schemas/create_alarm/operation_valid.json [new file with mode: 0644]
osm_mon/test/plugins/CloudWatch/test_schemas/create_alarm/statistic_invalid.json [new file with mode: 0644]
osm_mon/test/plugins/CloudWatch/test_schemas/create_alarm/statistic_valid.json [new file with mode: 0644]
osm_mon/test/plugins/CloudWatch/test_schemas/create_metrics/create_metric_req_invalid.json [new file with mode: 0644]
osm_mon/test/plugins/CloudWatch/test_schemas/create_metrics/create_metric_req_valid.json [new file with mode: 0644]
osm_mon/test/plugins/CloudWatch/test_schemas/delete_alarm/name_invalid.json [new file with mode: 0644]
osm_mon/test/plugins/CloudWatch/test_schemas/delete_alarm/name_valid.json [new file with mode: 0644]
osm_mon/test/plugins/CloudWatch/test_schemas/delete_alarm/name_valid_delete1.json [new file with mode: 0644]
osm_mon/test/plugins/CloudWatch/test_schemas/delete_alarm/name_valid_delete2.json [new file with mode: 0644]
osm_mon/test/plugins/CloudWatch/test_schemas/delete_alarm/name_valid_delete3.json [new file with mode: 0644]
osm_mon/test/plugins/CloudWatch/test_schemas/delete_alarm/name_valid_delete4.json [new file with mode: 0644]
osm_mon/test/plugins/CloudWatch/test_schemas/delete_metrics/delete_metric_req_invalid.json [new file with mode: 0644]
osm_mon/test/plugins/CloudWatch/test_schemas/delete_metrics/delete_metric_req_valid.json [new file with mode: 0644]
osm_mon/test/plugins/CloudWatch/test_schemas/list_alarm/list_alarm_valid_no_arguments.json [new file with mode: 0644]
osm_mon/test/plugins/CloudWatch/test_schemas/list_alarm/list_alarm_valid_one_argument.json [new file with mode: 0644]
osm_mon/test/plugins/CloudWatch/test_schemas/list_alarm/list_alarm_valid_two_arguments.json [new file with mode: 0644]
osm_mon/test/plugins/CloudWatch/test_schemas/list_metrics/list_metric_req_invalid.json [new file with mode: 0644]
osm_mon/test/plugins/CloudWatch/test_schemas/list_metrics/list_metric_req_valid.json [new file with mode: 0644]
osm_mon/test/plugins/CloudWatch/test_schemas/read_metrics_data/read_coll_period_req_invalid.json [new file with mode: 0644]
osm_mon/test/plugins/CloudWatch/test_schemas/read_metrics_data/read_coll_period_req_valid.json [new file with mode: 0644]
osm_mon/test/plugins/CloudWatch/test_schemas/read_metrics_data/read_metric_name_req_invalid.json [new file with mode: 0644]
osm_mon/test/plugins/CloudWatch/test_schemas/read_metrics_data/read_metric_name_req_valid.json [new file with mode: 0644]
osm_mon/test/plugins/CloudWatch/test_schemas/update_alarm/name_invalid.json [new file with mode: 0644]
osm_mon/test/plugins/CloudWatch/test_schemas/update_alarm/name_valid.json [new file with mode: 0644]
osm_mon/test/plugins/CloudWatch/test_schemas/update_alarm/operation_invalid.json [new file with mode: 0644]
osm_mon/test/plugins/CloudWatch/test_schemas/update_alarm/operation_valid.json [new file with mode: 0644]
osm_mon/test/plugins/CloudWatch/test_schemas/update_alarm/statistic_invalid.json [new file with mode: 0644]
osm_mon/test/plugins/CloudWatch/test_schemas/update_alarm/statistic_valid.json [new file with mode: 0644]
osm_mon/test/plugins/CloudWatch/test_schemas/update_alarm/update_alarm_new_alarm.json [new file with mode: 0644]
osm_mon/test/plugins/CloudWatch/test_schemas/update_metrics/update_metric_req_invalid.json [new file with mode: 0644]
osm_mon/test/plugins/CloudWatch/test_schemas/update_metrics/update_metric_req_valid.json [new file with mode: 0644]
osm_mon/test/plugins/CloudWatch/unit_tests_alarms.py [new file with mode: 0644]
osm_mon/test/plugins/CloudWatch/unit_tests_metrics.py [new file with mode: 0644]
osm_mon/test/plugins/OpenStack/__init__.py [new file with mode: 0644]
osm_mon/test/plugins/OpenStack/integration/__init__.py [new file with mode: 0644]
osm_mon/test/plugins/OpenStack/integration/test_alarm_integration.py [new file with mode: 0644]
osm_mon/test/plugins/OpenStack/integration/test_metric_integration.py [new file with mode: 0644]
osm_mon/test/plugins/OpenStack/integration/test_notify_alarm.py [new file with mode: 0644]
osm_mon/test/plugins/OpenStack/integration/test_vim_account.py [new file with mode: 0644]
osm_mon/test/plugins/OpenStack/unit/__init__.py [new file with mode: 0644]
osm_mon/test/plugins/OpenStack/unit/test_alarm_req.py [new file with mode: 0644]
osm_mon/test/plugins/OpenStack/unit/test_alarming.py [new file with mode: 0644]
osm_mon/test/plugins/OpenStack/unit/test_common.py [new file with mode: 0644]
osm_mon/test/plugins/OpenStack/unit/test_metric_calls.py [new file with mode: 0644]
osm_mon/test/plugins/OpenStack/unit/test_metric_req.py [new file with mode: 0644]
osm_mon/test/plugins/OpenStack/unit/test_notifier.py [new file with mode: 0644]
osm_mon/test/plugins/OpenStack/unit/test_responses.py [new file with mode: 0644]
osm_mon/test/plugins/OpenStack/unit/test_settings.py [new file with mode: 0644]
osm_mon/test/plugins/VMware/__init__.py [new file with mode: 0644]
osm_mon/test/plugins/VMware/test_mon_plugin_vrops.py [new file with mode: 0644]
osm_mon/test/plugins/VMware/test_plugin_receiver.py [new file with mode: 0644]
osm_mon/test/plugins/__init__.py [new file with mode: 0644]
requirements.txt
setup.py
stdeb.cfg [new file with mode: 0644]

index 3fa3cfd..153a6d2 100644 (file)
@@ -24,7 +24,6 @@
 
 FROM ubuntu:16.04
 RUN  apt-get update && \
-  DEBIAN_FRONTEND=noninteractive apt-get --yes install git tox make python python-pip python3 python3-pip debhelper && \
-  DEBIAN_FRONTEND=noninteractive apt-get --yes install wget python-dev python-software-properties python-stdeb && \
-  DEBIAN_FRONTEND=noninteractive apt-get --yes install default-jre libmysqlclient-dev && \
-  DEBIAN_FRONTEND=noninteractive apt-get --yes install libmysqlclient-dev libxml2 python3-all
+  DEBIAN_FRONTEND=noninteractive apt-get --yes install git tox make python-all python3 python3-pip debhelper wget && \
+  DEBIAN_FRONTEND=noninteractive apt-get --yes install libmysqlclient-dev libxml2 python3-all && \
+  DEBIAN_FRONTEND=noninteractive pip3 install -U setuptools setuptools-version-command stdeb
diff --git a/LICENSE b/LICENSE
new file mode 100644 (file)
index 0000000..8dada3e
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,201 @@
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "{}"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright {yyyy} {name of copyright owner}
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
diff --git a/Makefile b/Makefile
new file mode 100644 (file)
index 0000000..99de6f4
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,9 @@
+all: clean package
+
+clean:
+       rm -rf dist deb_dist osm_mon-*.tar.gz osm_mon.egg-info .eggs
+
+package:
+       python3 setup.py --command-packages=stdeb.command sdist_dsc
+       cp debian/python3-osm-mon.postinst deb_dist/osm-mon*/debian
+       cd deb_dist/osm-mon*/  && dpkg-buildpackage -rfakeroot -uc -us
\ No newline at end of file
diff --git a/debian/python3-osm-mon.postinst b/debian/python3-osm-mon.postinst
new file mode 100644 (file)
index 0000000..6f9b9c1
--- /dev/null
@@ -0,0 +1,20 @@
+#!/bin/bash
+
+echo "Installing python dependencies via pip..."
+pip3 install kafka-python==1.4.*
+pip3 install requests==2.18.*
+pip3 install cherrypy==14.0.*
+pip3 install jsmin==2.2.*
+pip3 install jsonschema==2.6.*
+pip3 install python-keystoneclient==3.15.*
+pip3 install boto==2.48
+pip3 install python-cloudwatchlogs-logging==0.0.3
+pip3 install py-cloudwatch==0.0.1
+pip3 install pyvcloud==19.1.1
+pip3 install pyopenssl==17.5.*
+pip3 install six==1.11.*
+pip3 install bottle==0.12.*
+pip3 install peewee==3.1.*
+pip3 install pyyaml==3.*
+pip3 install prometheus_client==0.4.*
+echo "Installation of python dependencies finished"
\ No newline at end of file
index 4251b1c..8a8d332 100755 (executable)
@@ -23,7 +23,4 @@
 #__date__   = "14/Sep/2017"
 
 #!/bin/bash
-rm -rf deb_dist
-rm -rf dist
-rm -rf osm_mon.egg-info
-tox -e build
+make
index 26845d8..be93541 100644 (file)
@@ -42,7 +42,11 @@ ENV DATABASE sqlite:///mon_sqlite.db
 ENV OS_NOTIFIER_URI localhost:8662
 ENV OS_DEFAULT_GRANULARITY 300
 ENV REQUEST_TIMEOUT 10
+ENV OSMMON_LOG_LEVEL INFO
+ENV OSMMON_KAFKA_LOG_LEVEL INFO
+ENV OSMMON_VCA_HOST localhost
+ENV OSMMON_VCA_SECRET secret
 
-EXPOSE 8662
+EXPOSE 8662 8000
 
 CMD /bin/bash mon/docker/scripts/runInstall.sh
index a5231ba..29ab3c6 100755 (executable)
@@ -22,5 +22,6 @@
 ##
 /bin/bash /mon/osm_mon/plugins/vRealiseOps/vROPs_Webservice/install.sh
 python3 /mon/osm_mon/plugins/OpenStack/Aodh/notifier.py &
-python3 /mon/osm_mon/core/message_bus/common_consumer.py
+python3 /mon/osm_mon/core/message_bus/common_consumer.py &
+osm-mon-prometheus-exporter
 
diff --git a/osm_mon/cmd/__init__.py b/osm_mon/cmd/__init__.py
new file mode 100644 (file)
index 0000000..d81308a
--- /dev/null
@@ -0,0 +1,23 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2018 Whitestack, LLC
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Whitestack, LLC
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: bdiaz@whitestack.com or glavado@whitestack.com
+##
diff --git a/osm_mon/cmd/mon_prometheus_exporter.py b/osm_mon/cmd/mon_prometheus_exporter.py
new file mode 100644 (file)
index 0000000..f89a28d
--- /dev/null
@@ -0,0 +1,56 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2018 Whitestack, LLC
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Whitestack, LLC
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: bdiaz@whitestack.com or glavado@whitestack.com
+##
+import logging
+import sys
+
+from osm_mon.core.settings import Config
+from osm_mon.collector.prometheus_exporter import MonPrometheusExporter
+
+
+def main():
+    cfg = Config.instance()
+
+    root = logging.getLogger()
+    root.setLevel(logging.getLevelName(cfg.OSMMON_LOG_LEVEL))
+    ch = logging.StreamHandler(sys.stdout)
+    ch.setLevel(logging.getLevelName(cfg.OSMMON_LOG_LEVEL))
+    formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s', '%m/%d/%Y %I:%M:%S %p')
+    ch.setFormatter(formatter)
+    root.addHandler(ch)
+
+    kafka_logger = logging.getLogger('kafka')
+    kafka_logger.setLevel(logging.getLevelName(cfg.OSMMON_KAFKA_LOG_LEVEL))
+    kafka_handler = logging.StreamHandler(sys.stdout)
+    kafka_handler.setFormatter(formatter)
+    kafka_logger.addHandler(kafka_handler)
+
+    log = logging.getLogger(__name__)
+    log.info("Starting MON Prometheus Exporter...")
+    log.info("Config: %s", vars(cfg))
+    exporter = MonPrometheusExporter()
+    exporter.run()
+
+
+if __name__ == '__main__':
+    main()
diff --git a/osm_mon/collector/__init__.py b/osm_mon/collector/__init__.py
new file mode 100644 (file)
index 0000000..8fc00af
--- /dev/null
@@ -0,0 +1,23 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2018 Whitestack, LLC
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Whitestack, LLC
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: bdiaz@whitestack.com or glavado@whitestack.com
+##
\ No newline at end of file
diff --git a/osm_mon/collector/collector.py b/osm_mon/collector/collector.py
new file mode 100644 (file)
index 0000000..bf485ff
--- /dev/null
@@ -0,0 +1,158 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2018 Whitestack, LLC
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Whitestack, LLC
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: bdiaz@whitestack.com or glavado@whitestack.com
+##
+import json
+import logging
+import random
+import re
+import uuid
+from string import ascii_lowercase
+
+from kafka import KafkaProducer, KafkaConsumer
+from n2vc.vnf import N2VC
+from prometheus_client.core import GaugeMetricFamily
+
+from osm_mon.common.common_db_client import CommonDbClient
+from osm_mon.core.settings import Config
+
+log = logging.getLogger(__name__)
+
+
+class MonCollector:
+    def __init__(self):
+        cfg = Config.instance()
+        self.kafka_server = cfg.BROKER_URI
+        self.common_db_client = CommonDbClient()
+        self.n2vc = N2VC(server=cfg.OSMMON_VCA_HOST, secret=cfg.OSMMON_VCA_SECRET)
+        self.producer = KafkaProducer(bootstrap_servers=self.kafka_server,
+                                      key_serializer=str.encode,
+                                      value_serializer=str.encode)
+        self.consumer = KafkaConsumer(bootstrap_servers=self.kafka_server,
+                                      key_deserializer=bytes.decode,
+                                      value_deserializer=bytes.decode,
+                                      consumer_timeout_ms=10000,
+                                      group_id='mon-collector-' + str(uuid.uuid4()))
+        self.consumer.subscribe(['metric_response'])
+
+    async def collect_metrics(self):
+        """
+        Collects vdu metrics. These can be vim and/or n2vc metrics.
+        It checks for monitoring-params or metrics inside vdu section of vnfd, then collects the metric accordingly.
+        If vim related, it sends a metric read request through Kafka, to be handled by mon-proxy.
+        If n2vc related, it uses the n2vc client to obtain the readings.
+        :return: lists of metrics
+        """
+        # TODO(diazb): Remove dependencies on prometheus_client
+        log.debug("collect_metrics")
+        metrics = {}
+        try:
+            vnfrs = self.common_db_client.get_vnfrs()
+            vca_model_name = 'default'
+            for vnfr in vnfrs:
+                nsr_id = vnfr['nsr-id-ref']
+                vnfd = self.common_db_client.get_vnfd(vnfr['vnfd-id'])
+                for vdur in vnfr['vdur']:
+                    # This avoids errors when vdur records have not been completely filled
+                    if 'name' not in vdur:
+                        continue
+                    vdu = next(
+                        filter(lambda vdu: vdu['id'] == vdur['vdu-id-ref'], vnfd['vdu'])
+                    )
+                    vnf_member_index = vnfr['member-vnf-index-ref']
+                    vdu_name = vdur['name']
+                    if 'monitoring-param' in vdu:
+                        for param in vdu['monitoring-param']:
+                            metric_name = param['nfvi-metric']
+                            payload = await self._generate_read_metric_payload(metric_name, nsr_id, vdu_name,
+                                                                               vnf_member_index)
+                            self.producer.send(topic='metric_request', key='read_metric_data_request',
+                                               value=json.dumps(payload))
+                            self.producer.flush()
+                            for message in self.consumer:
+                                if message.key == 'read_metric_data_response':
+                                    content = json.loads(message.value)
+                                    if content['correlation_id'] == payload['correlation_id']:
+                                        if len(content['metrics_data']['metrics_series']):
+                                            metric_reading = content['metrics_data']['metrics_series'][-1]
+                                            if metric_name not in metrics.keys():
+                                                metrics[metric_name] = GaugeMetricFamily(
+                                                    metric_name,
+                                                    'OSM metric',
+                                                    labels=['ns_id', 'vnf_member_index', 'vdu_name']
+                                                )
+                                            metrics[metric_name].add_metric([nsr_id, vnf_member_index, vdu_name],
+                                                                            metric_reading)
+                                        break
+                    if 'vdu-configuration' in vdu and 'metrics' in vdu['vdu-configuration']:
+                        vnf_name_vca = await self._generate_vca_vdu_name(vdu_name)
+                        vnf_metrics = await self.n2vc.GetMetrics(vca_model_name, vnf_name_vca)
+                        log.debug('VNF Metrics: %s', vnf_metrics)
+                        for vnf_metric_list in vnf_metrics.values():
+                            for vnf_metric in vnf_metric_list:
+                                log.debug("VNF Metric: %s", vnf_metric)
+                                if vnf_metric['key'] not in metrics.keys():
+                                    metrics[vnf_metric['key']] = GaugeMetricFamily(
+                                        vnf_metric['key'],
+                                        'OSM metric',
+                                        labels=['ns_id', 'vnf_member_index', 'vdu_name']
+                                    )
+                                metrics[vnf_metric['key']].add_metric([nsr_id, vnf_member_index, vdu_name],
+                                                                      float(vnf_metric['value']))
+            log.debug("metric.values = %s", metrics.values())
+            return metrics.values()
+        except Exception as e:
+            log.exception("Error collecting metrics")
+            raise e
+
+    @staticmethod
+    async def _generate_vca_vdu_name(vdu_name) -> str:
+        """
+        Replaces all digits in vdu name for corresponding ascii characters. This is the format required by N2VC.
+        :param vdu_name: Vdu name according to the vdur
+        :return: Name with digits replaced with characters
+        """
+        vnf_name_vca = ''.join(
+            ascii_lowercase[int(char)] if char.isdigit() else char for char in vdu_name)
+        vnf_name_vca = re.sub(r'-[a-z]+$', '', vnf_name_vca)
+        return vnf_name_vca
+
+    @staticmethod
+    async def _generate_read_metric_payload(metric_name, nsr_id, vdu_name, vnf_member_index) -> dict:
+        """
+        Builds JSON payload for asking for a metric measurement in MON. It follows the model defined in core.models.
+        :param metric_name: OSM metric name (e.g.: cpu_utilization)
+        :param nsr_id: NSR ID
+        :param vdu_name: Vdu name according to the vdur
+        :param vnf_member_index: Index of the VNF in the NS according to the vnfr
+        :return: JSON payload as dict
+        """
+        cor_id = random.randint(1, 10e7)
+        payload = {
+            'correlation_id': cor_id,
+            'metric_name': metric_name,
+            'ns_id': nsr_id,
+            'vnf_member_index': vnf_member_index,
+            'vdu_name': vdu_name,
+            'collection_period': 1,
+            'collection_unit': 'DAY',
+        }
+        return payload
diff --git a/osm_mon/collector/prometheus_exporter.py b/osm_mon/collector/prometheus_exporter.py
new file mode 100644 (file)
index 0000000..d890337
--- /dev/null
@@ -0,0 +1,87 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2018 Whitestack, LLC
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Whitestack, LLC
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: bdiaz@whitestack.com or glavado@whitestack.com
+##
+import asyncio
+import logging
+import threading
+import time
+
+from prometheus_client import start_http_server
+from prometheus_client.core import REGISTRY
+
+from osm_mon.collector.collector import MonCollector
+from osm_mon.core.settings import Config
+
+log = logging.getLogger(__name__)
+
+
+class MonPrometheusExporter:
+
+    def __init__(self):
+        self.custom_collector = CustomCollector()
+
+    def _run_exporter(self):
+        log.debug('_run_exporter')
+        REGISTRY.register(self.custom_collector)
+        log.info("Starting MON Prometheus exporter at port %s", 8000)
+        start_http_server(8000)
+
+    def run(self):
+        log.debug('_run')
+        collector_thread = threading.Thread(target=self._run_collector)
+        collector_thread.setDaemon(True)
+        collector_thread.start()
+        exporter_thread = threading.Thread(target=self._run_exporter)
+        exporter_thread.setDaemon(True)
+        exporter_thread.start()
+        collector_thread.join()
+        exporter_thread.join()
+
+    def _run_collector(self):
+        log.debug('_run_collector')
+        asyncio.set_event_loop(asyncio.new_event_loop())
+        mon_collector = MonCollector()
+        cfg = Config.instance()
+        while True:
+            log.debug('_run_collector_loop')
+            metrics = asyncio.get_event_loop().run_until_complete(mon_collector.collect_metrics())
+            self.custom_collector.metrics = metrics
+            time.sleep(cfg.OSMMON_COLLECTOR_INTERVAL)
+
+
+class CustomCollector(object):
+
+    def __init__(self):
+        self.mon_collector = MonCollector()
+        self.metrics = []
+
+    def describe(self):
+        log.debug('describe')
+        return []
+
+    def collect(self):
+        log.debug("collect")
+        return self.metrics
+
+
+if __name__ == '__main__':
+    MonPrometheusExporter().run()
diff --git a/osm_mon/common/__init__.py b/osm_mon/common/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/osm_mon/common/common_db_client.py b/osm_mon/common/common_db_client.py
new file mode 100644 (file)
index 0000000..c6237ee
--- /dev/null
@@ -0,0 +1,70 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2018 Whitestack, LLC
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Whitestack, LLC
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: bdiaz@whitestack.com or glavado@whitestack.com
+##
+from osm_common import dbmongo
+
+from osm_mon.core.settings import Config
+
+
+class CommonDbClient:
+    def __init__(self):
+        cfg = Config.instance()
+        self.common_db = dbmongo.DbMongo()
+        self.common_db.db_connect({'host': cfg.MONGO_URI.split(':')[0],
+                                   'port': int(cfg.MONGO_URI.split(':')[1]),
+                                   'name': 'osm'})
+
+    def get_vnfr(self, nsr_id: str, member_index: int):
+        vnfr = self.common_db.get_one("vnfrs",
+                                      {"nsr-id-ref": nsr_id, "member-vnf-index-ref": str(member_index)})
+        return vnfr
+
+    def get_vnfrs(self, nsr_id: str):
+        return [self.get_vnfr(nsr_id, member['member-vnf-index']) for member in
+                self.get_nsr(nsr_id)['nsd']['constituent-vnfd']]
+
+    def get_vnfrs(self):
+        return self.common_db.get_list('vnfrs')
+
+    def get_vnfd(self, vnfd_id: str):
+        vnfr = self.common_db.get_one("vnfds",
+                                      {"_id": vnfd_id})
+        return vnfr
+
+    def get_nsr(self, nsr_id: str):
+        nsr = self.common_db.get_one("nsrs",
+                                     {"id": nsr_id})
+        return nsr
+
+    def get_nslcmop(self, nslcmop_id):
+        nslcmop = self.common_db.get_one("nslcmops",
+                                         {"_id": nslcmop_id})
+        return nslcmop
+
+    def get_vdur(self, nsr_id, member_index, vdu_name):
+        vnfr = self.get_vnfr(nsr_id, member_index)
+        for vdur in vnfr['vdur']:
+            if vdur['name'] == vdu_name:
+                return vdur
+        raise ValueError('vdur not found for nsr-id %s, member_index %s and vdu_name %s', nsr_id, member_index,
+                         vdu_name)
index 35c2e39..3e1f745 100755 (executable)
 import json
 import logging
 import sys
-import threading
+import time
+from json import JSONDecodeError
 
 import six
 import yaml
-from kafka import KafkaConsumer
-from osm_common import dbmongo
 
+from osm_mon.common.common_db_client import CommonDbClient
 from osm_mon.core.auth import AuthManager
 from osm_mon.core.database import DatabaseManager
+from osm_mon.core.message_bus.consumer import Consumer
+from osm_mon.core.message_bus.producer import Producer
 from osm_mon.core.settings import Config
 from osm_mon.plugins.CloudWatch.access_credentials import AccessCredentials
 from osm_mon.plugins.CloudWatch.connection import Connection
 from osm_mon.plugins.CloudWatch.plugin_alarm import plugin_alarms
 from osm_mon.plugins.CloudWatch.plugin_metric import plugin_metrics
-from osm_mon.plugins.OpenStack.Aodh import alarming
-from osm_mon.plugins.OpenStack.Gnocchi import metrics
+from osm_mon.plugins.OpenStack.Aodh import alarm_handler
+from osm_mon.plugins.OpenStack.Gnocchi import metric_handler
 from osm_mon.plugins.vRealiseOps import plugin_receiver
 
+cfg = Config.instance()
+
 logging.basicConfig(stream=sys.stdout,
-                    format='%(asctime)s %(message)s',
+                    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
                     datefmt='%m/%d/%Y %I:%M:%S %p',
-                    level=logging.INFO)
+                    level=logging.getLevelName(cfg.OSMMON_LOG_LEVEL))
 log = logging.getLogger(__name__)
 
+kafka_logger = logging.getLogger('kafka')
+kafka_logger.setLevel(logging.getLevelName(cfg.OSMMON_KAFKA_LOG_LEVEL))
+kafka_formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
+kafka_handler = logging.StreamHandler(sys.stdout)
+kafka_handler.setFormatter(kafka_formatter)
+kafka_logger.addHandler(kafka_handler)
+
 
 class CommonConsumer:
 
     def __init__(self):
-        cfg = Config.instance()
-
         self.auth_manager = AuthManager()
         self.database_manager = DatabaseManager()
         self.database_manager.create_tables()
 
         # Create OpenStack alarming and metric instances
-        self.openstack_metrics = metrics.Metrics()
-        self.openstack_alarms = alarming.Alarming()
+        self.openstack_metrics = metric_handler.OpenstackMetricHandler()
+        self.openstack_alarms = alarm_handler.OpenstackAlarmHandler()
 
         # Create CloudWatch alarm and metric instances
         self.cloudwatch_alarms = plugin_alarms()
@@ -70,54 +79,50 @@ class CommonConsumer:
         self.vrops_rcvr = plugin_receiver.PluginReceiver()
 
         log.info("Connecting to MongoDB...")
-        self.common_db = dbmongo.DbMongo()
-        common_db_uri = cfg.MONGO_URI.split(':')
-        self.common_db.db_connect({'host': common_db_uri[0], 'port': int(common_db_uri[1]), 'name': 'osm'})
+        self.common_db = CommonDbClient()
         log.info("Connection successful.")
 
-        # Initialize consumers for alarms and metrics
-        self.common_consumer = KafkaConsumer(bootstrap_servers=cfg.BROKER_URI,
-                                             key_deserializer=bytes.decode,
-                                             value_deserializer=bytes.decode,
-                                             group_id="mon-consumer")
-
-        # Define subscribe the consumer for the plugins
-        topics = ['metric_request', 'alarm_request', 'access_credentials', 'vim_account']
-        # TODO: Remove access_credentials
-        self.common_consumer.subscribe(topics)
-
     def get_vim_type(self, vim_uuid):
         """Get the vim type that is required by the message."""
         credentials = self.database_manager.get_credentials(vim_uuid)
         return credentials.type
 
-    def get_vdur(self, nsr_id, member_index, vdu_name):
-        vnfr = self.get_vnfr(nsr_id, member_index)
-        for vdur in vnfr['vdur']:
-            if vdur['vdu-id-ref'] == vdu_name:
-                return vdur
-        raise ValueError('vdur not found for nsr-id %s, member_index %s and vdu_name %s', nsr_id, member_index,
-                         vdu_name)
+    def run(self):
+        common_consumer = Consumer("mon-consumer")
 
-    def get_vnfr(self, nsr_id, member_index):
-        vnfr = self.common_db.get_one(table="vnfrs",
-                                      filter={"nsr-id-ref": nsr_id, "member-vnf-index-ref": str(member_index)})
-        return vnfr
+        topics = ['metric_request', 'alarm_request', 'vim_account']
+        common_consumer.subscribe(topics)
+        retries = 1
+        max_retries = 5
+        while True:
+            try:
+                common_consumer.poll()
+                common_consumer.seek_to_end()
+                break
+            except Exception:
+                log.error("Error getting Kafka partitions. Maybe Kafka is not ready yet.")
+                log.error("Retry number %d of %d", retries, max_retries)
+                if retries >= max_retries:
+                    log.error("Achieved max number of retries. Logging exception and exiting...")
+                    log.exception("Exception: ")
+                    return
+                retries = retries + 1
+                time.sleep(2)
 
-    def run(self):
         log.info("Listening for messages...")
-        for message in self.common_consumer:
-            t = threading.Thread(target=self.consume_message, args=(message,))
-            t.start()
+        for message in common_consumer:
+            self.consume_message(message)
 
     def consume_message(self, message):
         log.info("Message arrived: %s", message)
         try:
             try:
                 values = json.loads(message.value)
-            except ValueError:
+            except JSONDecodeError:
                 values = yaml.safe_load(message.value)
 
+            response = None
+
             if message.topic == "vim_account":
                 if message.key == "create" or message.key == "edit":
                     self.auth_manager.store_auth_credentials(values)
@@ -143,12 +148,12 @@ class CommonConsumer:
                 vnf_index = values[list_index]['vnf_member_index'] if contains_list else values['vnf_member_index']
 
                 # Check the vim desired by the message
-                vnfr = self.get_vnfr(ns_id, vnf_index)
+                vnfr = self.common_db.get_vnfr(ns_id, vnf_index)
                 vim_uuid = vnfr['vim-account-id']
 
                 if (contains_list and 'vdu_name' in values[list_index]) or 'vdu_name' in values:
                     vdu_name = values[list_index]['vdu_name'] if contains_list else values['vdu_name']
-                    vdur = self.get_vdur(ns_id, vnf_index, vdu_name)
+                    vdur = self.common_db.get_vdur(ns_id, vnf_index, vdu_name)
                     if contains_list:
                         values[list_index]['resource_uuid'] = vdur['vim-id']
                     else:
@@ -160,31 +165,42 @@ class CommonConsumer:
                 if vim_type == "openstack":
                     log.info("This message is for the OpenStack plugin.")
                     if message.topic == "metric_request":
-                        self.openstack_metrics.metric_calls(message, vim_uuid)
+                        response = self.openstack_metrics.handle_request(message.key, values, vim_uuid)
                     if message.topic == "alarm_request":
-                        self.openstack_alarms.alarming(message, vim_uuid)
+                        response = self.openstack_alarms.handle_message(message.key, values, vim_uuid)
 
                 elif vim_type == "aws":
                     log.info("This message is for the CloudWatch plugin.")
                     aws_conn = self.aws_connection.setEnvironment()
                     if message.topic == "metric_request":
-                        self.cloudwatch_metrics.metric_calls(message, aws_conn)
+                        response = self.cloudwatch_metrics.metric_calls(message.key, values, aws_conn)
                     if message.topic == "alarm_request":
-                        self.cloudwatch_alarms.alarm_calls(message, aws_conn)
-                    if message.topic == "access_credentials":
-                        self.aws_access_credentials.access_credential_calls(message)
+                        response = self.cloudwatch_alarms.alarm_calls(message.key, values, aws_conn)
 
                 elif vim_type == "vmware":
                     log.info("This metric_request message is for the vROPs plugin.")
-                    self.vrops_rcvr.consume(message,vim_uuid)
+                    if message.topic == "metric_request":
+                        response = self.vrops_rcvr.handle_metric_requests(message.key, values, vim_uuid)
+                    if message.topic == "alarm_request":
+                        response = self.vrops_rcvr.handle_alarm_requests(message.key, values, vim_uuid)
 
                 else:
                     log.debug("vim_type is misconfigured or unsupported; %s",
                               vim_type)
+            if response:
+                self._publish_response(message.topic, message.key, response)
 
         except Exception:
             log.exception("Exception processing message: ")
 
+    def _publish_response(self, topic: str, key: str, msg: dict):
+        topic = topic.replace('request', 'response')
+        key = key.replace('request', 'response')
+        producer = Producer()
+        producer.send(topic=topic, key=key, value=json.dumps(msg))
+        producer.flush()
+        producer.close()
+
 
 if __name__ == '__main__':
     CommonConsumer().run()
old mode 100755 (executable)
new mode 100644 (file)
index c9021d2..7936513
@@ -1,95 +1,14 @@
-# Copyright 2017 Intel Research and Development Ireland Limited
-# *************************************************************
-
-# This file is part of OSM Monitoring module
-# All Rights Reserved to Intel Corporation
-
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-
-#         http://www.apache.org/licenses/LICENSE-2.0
-
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# For those usages not covered by the Apache License, Version 2.0 please
-# contact: prithiv.mohan@intel.com or adrian.hoban@intel.com
-##
-
-'''
-This is a kafka consumer app that reads the messages from the message bus for
-alarms and metrics responses.
-
-'''
-
-__author__ = "Prithiv Mohan"
-__date__ = "06/Sep/2017"
-
-
 from kafka import KafkaConsumer
-from kafka.errors import KafkaError
-import json
-import logging
-import logging.config
-import os
-
-
-def logging_handler(filename, mode='a+', encoding=None):
-    if not os.path.exists(filename):
-        open(filename, 'a').close()
-    return logging.FileHandler(filename, mode)
-
-log_config = {
-    'version': 1,
-    'formatters': {
-        'default': {
-            'format': '%(asctime)s %(levelname)s %(name)s %(message)s'
-        },
-    },
-    'handlers': {
-        'file': {
-            '()': logging_handler,
-            'level': 'DEBUG',
-            'formatter': 'default',
-            'filename': '/var/log/osm_mon.log',
-            'mode': 'a+',
-            'encoding': 'utf-8',
-        },
-    },
-    'kafka': {
-        'handlers': ['file'],
-        'level': 'DEBUG',
-    },
-    'root': {
-        'handlers': ['file'],
-        'level': 'DEBUG',
-    },
-}
-
-
-logging.config.dictConfig(log_config)
-logger = logging.getLogger('kafka')
 
-if "BROKER_URI" in os.environ:
-    broker = os.getenv("BROKER_URI")
-else:
-    broker = "localhost:9092"
+from osm_mon.core.settings import Config
 
-alarm_consumer = KafkaConsumer(
-    'alarm_response', 'osm_mon', bootstrap_servers=broker)
-metric_consumer = KafkaConsumer(
-    'metric_response', 'osm_mon', bootstrap_servers=broker)
-try:
-    for message in alarm_consumer:
-        logger.debug(message)
-    for message in metric_consumer:
-        logger.debug(message)
-except KafkaError:
-    log.exception()
 
-alarm_consumer.subscribe('alarm_response')
-metric_consumer.subscribe('metric_response')
+# noinspection PyAbstractClass
+class Consumer(KafkaConsumer):
+    def __init__(self, group_id):
+        cfg = Config.instance()
+        super().__init__(bootstrap_servers=cfg.BROKER_URI,
+                         key_deserializer=bytes.decode,
+                         value_deserializer=bytes.decode,
+                         max_poll_interval_ms=900000,
+                         group_id=group_id)
old mode 100755 (executable)
new mode 100644 (file)
index f04ecf8..573e332
@@ -1,8 +1,9 @@
-# Copyright 2017 Intel Research and Development Ireland Limited
+# Copyright 2018 Whitestack, LLC
 # *************************************************************
 
 # This file is part of OSM Monitoring module
-# All Rights Reserved to Intel Corporation
+# All Rights Reserved to Whitestack, LLC
+
 # Licensed under the Apache License, Version 2.0 (the "License"); you may
 # not use this file except in compliance with the License. You may obtain
 # a copy of the License at
 # under the License.
 
 # For those usages not covered by the Apache License, Version 2.0 please
-# contact: prithiv.mohan@intel.com or adrian.hoban@intel.com
+# contact: bdiaz@whitestack.com or glavado@whitestack.com
 ##
-"""This is a common kafka producer app.
-
-It interacts with the SO and the plugins of the datacenters: OpenStack, VMWare
-and AWS.
-"""
-
-import logging
-import os
-
-from kafka import KafkaProducer as kaf
-from kafka.errors import KafkaError
-
-__author__ = "Prithiv Mohan"
-__date__ = "06/Sep/2017"
-
-current_path = os.path.realpath(__file__)
-json_path = os.path.abspath(os.path.join(current_path, '..', '..', 'models'))
-
-# TODO(): validate all of the request and response messages against the
-# json_schemas
-
-
-class KafkaProducer(object):
-    """A common KafkaProducer for requests and responses."""
-
-    def __init__(self, topic):
-        """Initialize the common kafka producer."""
-        self._topic = topic
-
-        if "BROKER_URI" in os.environ:
-            broker = os.getenv("BROKER_URI")
-        else:
-            broker = "localhost:9092"
-
-        '''
-        If the broker URI is not set in the env by default,
-        localhost container is taken as the host because an instance of
-        is already running.
-        '''
-
-        self.producer = kaf(
-            key_serializer=str.encode,
-            value_serializer=str.encode,
-            bootstrap_servers=broker, api_version=(0, 10))
-
-    def publish(self, key, value, topic=None):
-        """Send the required message on the Kafka message bus."""
-        try:
-            future = self.producer.send(topic=topic, key=key, value=value)
-            record_metadata = future.get(timeout=10)
-        except Exception:
-            logging.exception("Error publishing to {} topic." .format(topic))
-            raise
-        try:
-            logging.debug("TOPIC:", record_metadata.topic)
-            logging.debug("PARTITION:", record_metadata.partition)
-            logging.debug("OFFSET:", record_metadata.offset)
-        except KafkaError:
-            pass
-
-    def publish_alarm_request(self, key, message):
-        """Publish an alarm request."""
-        # External to MON
-
-        self.publish(key,
-                     value=message,
-                     topic='alarm_request')
-
-    def publish_alarm_response(self, key, message):
-        """Publish an alarm response."""
-        # Internal to MON
-
-        self.publish(key,
-                     value=message,
-                     topic='alarm_response')
-
-    def publish_metrics_request(self, key, message):
-        """Create metrics request from SO to MON."""
-        # External to Mon
-
-        self.publish(key,
-                     value=message,
-                     topic='metric_request')
-
-    def publish_metrics_response(self, key, message):
-        """Response for a create metric request from MON to SO."""
-        # Internal to Mon
-
-        self.publish(key,
-                     value=message,
-                     topic='metric_response')
-
-    def read_metric_data_request(self, key, message):
-        """Read metric data request from SO to MON."""
-        # External to Mon
-
-        self.publish(key,
-                     value=message,
-                     topic='metric_request')
-
-    def read_metric_data_response(self, key, message):
-        """Response from MON to SO for read metric data request."""
-        # Internal to Mon
-
-        self.publish(key,
-                     value=message,
-                     topic='metric_response')
-
-    def list_metric_request(self, key, message):
-        """List metric request from SO to MON."""
-        # External to MON
-
-        self.publish(key,
-                     value=message,
-                     topic='metric_request')
-
-    def list_metric_response(self, key, message):
-        """Response from SO to MON for list metrics request."""
-        # Internal to MON
-
-        self.publish(key,
-                     value=message,
-                     topic='metric_response')
-
-    def delete_metric_request(self, key, message):
-        """Delete metric request from SO to MON."""
-        # External to Mon
-
-        self.publish(key,
-                     value=message,
-                     topic='metric_request')
-
-    def delete_metric_response(self, key, message):
-        """Response from MON to SO for delete metric request."""
-        # Internal to Mon
-
-        self.publish(key,
-                     value=message,
-                     topic='metric_response')
-
-    def update_metric_request(self, key, message):
-        """Metric update request from SO to MON."""
-        # External to Mon
-
-        self.publish(key,
-                     value=message,
-                     topic='metric_request')
+from kafka import KafkaProducer
 
-    def update_metric_response(self, key, message):
-        """Reponse from MON to SO for metric update."""
-        # Internal to Mon
+from osm_mon.core.settings import Config
 
-        self.publish(key,
-                     value=message,
-                     topic='metric_response')
 
-    def access_credentials(self, key, message):
-        """Send access credentials to MON from SO."""
+class Producer(KafkaProducer):
+    def __init__(self):
+        cfg = Config.instance()
+        super().__init__(bootstrap_servers=cfg.BROKER_URI,
+                         key_serializer=str.encode,
+                         value_serializer=str.encode)
 
-        self.publish(key,
-                     value=message,
-                     topic='access_credentials')
+    def send(self, topic, value=None, key=None, partition=None, timestamp_ms=None):
+        return super().send(topic, value, key, partition, timestamp_ms)
diff --git a/osm_mon/core/models/acknowledge_alarm.json b/osm_mon/core/models/acknowledge_alarm.json
deleted file mode 100644 (file)
index 6a05f07..0000000
+++ /dev/null
@@ -1,40 +0,0 @@
-
-/* Copyright© 2017 Intel Research and Development Ireland Limited
-# This file is part of OSM Monitoring module
-# All Rights Reserved to Intel Corporation
-
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-
-#         http://www.apache.org/licenses/LICENSE-2.0
-
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# For those usages not covered by the Apache License, Version 2.0 please
-# contact: prithiv.mohan@intel.com or adrian.hoban@intel.com
-
-# This is the message bus schema for acknowledge_alarm */
-
-{  
-  "schema_version": { "type": "string" },
-  "schema_type": { "type": "string" },
-  "vim_type": { "type": "string" },
-  "vim_uuid": { "type": "string" },
-  "ack_details":
-  {
-    "alarm_uuid": { "type": "string" },
-    "ns_id": { "type": "string"},
-    "vnf_member_index": { "type": "integer"},
-    "vdu_name": { "type": "string"}
-  },
-  "required": [ "schema_version",
-                "schema_type",
-                "alarm_uuid",
-                "ns_id",
-                "vnf_member_index" ]
-}
diff --git a/osm_mon/core/models/acknowledge_alarm_req.json b/osm_mon/core/models/acknowledge_alarm_req.json
new file mode 100644 (file)
index 0000000..8a9e50a
--- /dev/null
@@ -0,0 +1,41 @@
+
+/* Copyright© 2017 Intel Research and Development Ireland Limited
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: prithiv.mohan@intel.com or adrian.hoban@intel.com
+
+# This is the message bus schema for acknowledge_alarm */
+
+{  
+  "schema_version": { "type": "string" },
+  "schema_type": { "type": "string" },
+  "vim_type": { "type": "string" },
+  "vim_uuid": { "type": "string" },
+  "ack_details":
+  {
+    "alarm_uuid": { "type": "string" },
+    "ns_id": { "type": "string"},
+    "vnf_member_index": { "type": "integer"},
+    "vdu_name": { "type": "string"}
+  },
+  "required": [ "schema_version",
+                "schema_type",
+                "alarm_uuid",
+                "ns_id",
+                "vnf_member_index",
+                "vdu_name"]
+}
index f6610db..95a125d 100644 (file)
@@ -22,7 +22,6 @@
 {
   "schema_version": { "type": "string" },
   "schema_type": { "type": "string" },
-  "vim_uuid": { "type": "string" },
   "alarm_create_response":
   {
     "correlation_id": { "type": "integer" },
@@ -31,7 +30,6 @@
   },
   "required": [ "schema_version",
                    "schema_type",
-                    "vim_uuid",
                    "correlation_id",
                    "alarm_uuid",
                    "status" ]
index bbe36ea..5b67955 100644 (file)
   "metric_create_response":
   {
     "correlation_id": { "type": "integer" },
+    "metric_uuid": { "type": "string" },
     "status": { "type": "boolean" }
   },
   "required": [ "schema_type",
                 "schema_version",
                 "correlation_id",
-                "vim_uuid",
                 "metric_uuid",
-                "resource_uuid",
                 "status" ]
 }
index 8c3d88b..44dab0a 100644 (file)
     "correlation_id": { "type": "integer" },
     "alarm_uuid": { "type": "string" },
     "ns_id": { "type": "string"},
+    "vdu_name": { "type": "string"},
     "vnf_member_index": { "type": "integer"}
   },
   "required": [ "schema_version",
                 "schema_type",
                 "alarm_uuid",
                 "ns_id",
+                "vdu_name",
                 "vnf_member_index",
                 "correlation_id"
               ]
index 66d243f..5b931f2 100644 (file)
 {
   "schema_version": { "type": "string" },
   "schema_type": { "type": "string" },
-  "vim_uuid": { "type": "string" },
   "alarm_deletion_response":
   {
     "correlation_id": { "type": "integer" },
     "alarm_uuid": { "type": "string" },
-    "status": { "type": "string" }
+    "status": { "type": "boolean" }
   },
   "required": [ "schema_version",
                 "schema_type",
                 "correlation_id",
-                "vim_uuid",
                 "alarm_uuid",
                 "status" ]
 }
index 077646e..2c51042 100644 (file)
@@ -30,9 +30,8 @@
   "required": [ "schema_version",
                 "schema_type",
                 "metric_name",
-                "metric_uuid",
-                "resource_uuid",
-                "correlation_id",
-                "vim_type",
-                "vim_uuid" ]
+                "ns_id",
+                "vdu_name",
+                "vnf_member_index",
+                "correlation_id" ]
 }
index 4d8a8a5..dc99059 100644 (file)
@@ -25,9 +25,7 @@
   "metric_name": { "type": "string" },
   "metric_uuid": { "type": "string" },
   "resource_uuid": { "type": "string" },
-  "tenant_uuid": { "type": "string" },
   "correlation_id": { "type": "integer" },
-  "vim_uuid": { "type": "string" },
   "status": { "type": "boolean" },
   "required": [ "schema_version",
                 "schema_type",
@@ -35,6 +33,5 @@
                 "metric_uuid",
                 "resource_uuid",
                 "status",
-                "correlation_id",
-                "vim_uuid" ]
+                "correlation_id"]
 }
index 7ecc127..a856374 100644 (file)
@@ -33,9 +33,9 @@
   },
   "required": [ "schema_version",
                 "schema_type",
-                "vim_type",
-                "vim_uuid",
-                "correlation_id",
-                "resource_uuid"
+                "ns_id",
+                "vdu_name",
+                "vnf_member_index",
+                "correlation_id"
               ]
 }
index 077e24c..66eaeb3 100644 (file)
@@ -22,7 +22,5 @@
 {
   "schema_version": { "type": "string" },
   "schema_type": { "type": "string" },
-  "vim_type": { "type": "string" },
-  "vim_uuid": { "type": "string" },
   "list_alarm_response": { "$ref": "definitions.json#/notify_details" }
 }
index 9612e44..7474956 100644 (file)
@@ -32,8 +32,9 @@
   },
   "required": [ "schema_version",
                 "schema_type",
-                "vim_type",
-                "vim_uuid",
+                "ns_id",
+                "vdu_name",
+                "vnf_member_index",
                 "correlation_id"
               ]
 }
index 4b56f2c..e8c933d 100644 (file)
 {
   "schema_version": { "type": "string" },
   "schema_type": { "type": "string" },
-  "tenant_uuid": { "type": "string" },
   "correlation_id": { "type": "integer" },
-  "vim_type": { "type": "string" },
-  "vim_uuid": { "type": "string" },
+  "status": { "type": "boolean" },
   "metrics_list":
     [{
       "type": "array",
@@ -39,8 +37,6 @@
     "required": [ "schema_version",
                   "schema_type",
                   "correlation_id",
-                  "vim_type",
-                  "vim_uuid",
                   "metric_name",
                   "metric_uuid",
                   "metric_unit",
index ae39a78..62efa50 100644 (file)
@@ -42,8 +42,6 @@
                 "schema_type",
                 "alarm_uuid",
                 "resource_uuid",
-                "vim_type",
-                "vim_uuid",
                 "severity",
                 "status",
                 "start_date" ]
index 7964163..6214569 100644 (file)
@@ -34,9 +34,9 @@
                "metric_name",
                "metric_uuid",
                "correlation_id",
-               "vim_type",
-               "vim_uuid",
                "collection_period",
                "collection_unit",
-               "resource_uuid"]
+               "ns_id",
+               "vdu_name",
+               "vnf_member_index"]
 }
index c851646..2a96b76 100644 (file)
@@ -26,8 +26,7 @@
   "metric_uuid": { "type": "string" },
   "correlation_id": { "type": "integer" },
   "resource_uuid": { "type": "string" },
-  "tenant_uuid": { "type": "string" },
-  "vim_uuid": { "type": "string" },
+  "status": { "type": "boolean" },
   "metrics_data":
   {
     "time_series": [{
@@ -35,8 +34,7 @@
                       "properties":
                       { "time_stamp":
                         { "type": "integer" }}}
-    ]
-  },
+    ],
     "metrics_series": [{
                       "type": "array",
                       "properties":
                       }
                     }
                   ],
+      },
   "unit": { "type": "string" },
   "required": [ "schema_version",
                 "schema_type",
                 "metric_name",
                 "metric_uuid",
                 "resource_uuid",
-                "vim_uuid",
                 "correlation_id",
                 "time_series",
-                "metrics_series" ]
+                "metrics_series",
+                "status" ]
 }
index f71766f..9760bae 100644 (file)
@@ -25,7 +25,6 @@
   "alarm_update_request":
 {
     "correlation_id": { "type": "integer" },
-    "vim_uuid": { "type": "string" },
     "alarm_uuid": { "type": "string" },
     "metric_name": { "type": "string" },
     "ns_id": { "type": "string"},
   },
   "required": [ "schema_version",
                 "schema_type",
-                "vim_type",
-                "vim_uuid",
                 "correlation_id",
-               "alarm_uuid",
-               "metric_uuid" ]
+                   "alarm_uuid",
+                   "metric_uuid",
+                "ns_id",
+                "vdu_name",
+                "vnf_member_index"]
 }
index 8880274..b0e3e60 100644 (file)
 {
   "schema_version": { "type": "string" },
   "schema_type": { "type": "string" },
-  "vim_uuid": { "type": "string" },
   "alarm_update_response":
   {
     "correlation_id": { "type": "integer" },
     "alarm_uuid": { "type": "string" },
-    "status": { "type": "string" }
+    "status": { "type": "boolean" }
   },
   "required": [ "schema_version",
                 "schema_type",
-                "vim_uuid",
                 "correlation_id",
                 "alarm_uuid",
                 "status" ]
index 2a76722..7c0eba3 100644 (file)
@@ -22,8 +22,6 @@
 {
   "schema_version": { "type": "string" },
   "schema_type": { "type": "string" },
-  "vim_type": { "type": "string" },
-  "vim_uuid": { "type": "string" },
   "metric_update_request":
   {
   "correlation_id": { "type": "integer" },
   "required": [ "schema_version",
                 "schema_type",
                 "correlation_id",
-                "vim_type",
-                "vim_type",
-                "resource_uuid"
+                "metric_name",
+                "metric_unit",
+                "ns_id",
+                "vdu_name",
+                "vnf_member_index"
                 ]
 }
index 3cfd356..a5bdb67 100644 (file)
@@ -23,7 +23,6 @@
   "schema_version": { "type": "string" },
   "schema_type": { "type": "string" },
   "correlation_id": { "type": "integer" },
-  "vim_uuid": { "type": "string" },
   "metric_update_response":
   {
     "metric_uuid": { "type": "string" },
@@ -33,7 +32,6 @@
   "required": [ "schema_version",
                 "schema_type",
                 "correlation_id",
-                "vim_uuid",
                 "metric_uuid",
                 "resource_uuid",
                 "status"]
index d27f0ca..8f5e8f5 100644 (file)
@@ -65,6 +65,11 @@ class Config(object):
         CfgParam('OS_NOTIFIER_URI', "http://localhost:8662", six.text_type),
         CfgParam('OS_DEFAULT_GRANULARITY', "300", six.text_type),
         CfgParam('REQUEST_TIMEOUT', 10, int),
+        CfgParam('OSMMON_LOG_LEVEL', "INFO", six.text_type),
+        CfgParam('OSMMON_KAFKA_LOG_LEVEL', "WARN", six.text_type),
+        CfgParam('OSMMON_COLLECTOR_INTERVAL', 10, int),
+        CfgParam('OSMMON_VCA_HOST', "localhost", six.text_type),
+        CfgParam('OSMMON_VCA_SECRET', "secret", six.text_type),
     ]
 
     _config_dict = {cfg.key: cfg for cfg in _configuration}
index 3774858..8c110ab 100644 (file)
 # contact with: usman.javaid@xflowresearch.com
 ##
 
-'''
+"""
 Access credentials class implements all the methods to store the access credentials for AWS
-'''
+"""
 
 __author__ = "Usman Javaid"
-__date__   = "20-December-2017"
+__date__ = "20-December-2017"
 
-import os
-import sys
 import json
 import logging
+import os
 
 log = logging.getLogger(__name__)
 
+
 class AccessCredentials():
 
     def logtest(self):
         log.info("Access credentials sourced for CloudWatch MON plugin")
 
-
-    def access_credential_calls(self,message):  
-        try:   
+    def access_credential_calls(self, message):
+        try:
             message = json.loads(message.value)['access_config']
-            
+
             AWS_KEY = message['user']
             AWS_SECRET = message['password']
             AWS_REGION = message['vim_tenant_name']
@@ -51,17 +50,15 @@ class AccessCredentials():
             os.environ['AWS_SECRET_ACCESS_KEY'] = AWS_SECRET
             os.environ['AWS_EC2_REGION'] = AWS_REGION
 
+            # aws_credentials.txt file to save the access credentials
+            cloudwatch_credentials = open("../../plugins/CloudWatch/cloudwatch_credentials.txt", "w+")
+            cloudwatch_credentials.write("AWS_ACCESS_KEY_ID=" + AWS_KEY +
+                                         "\nAWS_SECRET_ACCESS_KEY=" + AWS_SECRET +
+                                         "\nAWS_EC2_REGION=" + AWS_REGION)
 
-            #aws_credentials.txt file to save the access credentials 
-            cloudwatch_credentials = open("../../plugins/CloudWatch/cloudwatch_credentials.txt","w+")
-            cloudwatch_credentials.write("AWS_ACCESS_KEY_ID="+AWS_KEY+
-                                         "\nAWS_SECRET_ACCESS_KEY="+AWS_SECRET+
-                                         "\nAWS_EC2_REGION="+AWS_REGION)
-            
-            #Closing the file
+            # Closing the file
             cloudwatch_credentials.close()
             log.info("Access credentials sourced for CloudWatch MON plugin")
 
         except Exception as e:
-                log.error("Access credentials not provided correctly: %s", str(e))
-
+            log.error("Access credentials not provided correctly: %s", str(e))
index 5853ae9..98b05aa 100644 (file)
 # contact with: wajeeha.hamid@xflowresearch.com
 ##
 
-'''
+"""
 Connecting with AWS services --CloudWatch/EC2 using Required keys  
-'''
+"""
 
 __author__ = "Wajeeha Hamid"
-__date__   = "18-September-2017"
+__date__ = "18-September-2017"
 
 import os
 
@@ -34,62 +34,61 @@ try:
     import boto.vpc
     import boto.ec2.cloudwatch
     import boto.ec2.connection
-    import logging 
+    import logging
     from boto.ec2.cloudwatch.alarm import MetricAlarm
     from boto.ec2.cloudwatch.dimension import Dimension
     from boto.sns import connect_to_region
     from boto.utils import get_instance_metadata
 
 except:
-    exit("Boto not avialable. Try activating your virtualenv OR `pip install boto`")
+    exit("Boto not available. Try activating your virtualenv OR `pip install boto`")
 
 log = logging.getLogger(__name__)
 
-class Connection():
-    """Connection Establishement with AWS -- VPC/EC2/CloudWatch"""
-#-----------------------------------------------------------------------------------------------------------------------------
-    def setEnvironment(self):  
+
+class Connection:
+    """Connection Establishment with AWS -- VPC/EC2/CloudWatch"""
+
+    def setEnvironment(self):
         try:
-            """Credentials for connecting to AWS-CloudWatch""" 
-            #Reads from the environment variables
+            """Credentials for connecting to AWS-CloudWatch"""
+            # Reads from the environment variables
             self.AWS_KEY = os.environ.get("AWS_ACCESS_KEY_ID")
             self.AWS_SECRET = os.environ.get("AWS_SECRET_ACCESS_KEY")
-            self.AWS_REGION = os.environ.get("AWS_EC2_REGION","us-west-2")
+            self.AWS_REGION = os.environ.get("AWS_EC2_REGION", "us-west-2")
 
-            #TODO Read from the cloudwatch_credentials.txt file
+            # TODO Read from the cloudwatch_credentials.txt file
 
             return self.connection_instance()
         except Exception as e:
-            log.error("AWS Credentials not configured, Try setting the access credentials first %s: ",str(e)) 
-#-----------------------------------------------------------------------------------------------------------------------------
+            log.error("AWS Credentials not configured, Try setting the access credentials first %s: ", str(e))
+
     def connection_instance(self):
-            try:
-                #VPC Connection
-                self.vpc_conn = boto.vpc.connect_to_region(self.AWS_REGION,
-                    aws_access_key_id=self.AWS_KEY,
-                    aws_secret_access_key=self.AWS_SECRET)
-               
-                
-                #EC2 Connection
-                self.ec2_conn = boto.ec2.connect_to_region(self.AWS_REGION,
-                    aws_access_key_id=self.AWS_KEY,
-                    aws_secret_access_key=self.AWS_SECRET)               
-                
-                """ TODO : Required to add actions against alarms when needed """
-                #self.sns = connect_to_region(self.AWS_REGION)
-                #self.topics = self.sns.get_all_topics()
-                #self.topic = self.topics[u'ListTopicsResponse']['ListTopicsResult']['Topics'][0]['TopicArn']
-
-                #Cloudwatch Connection
-                self.cloudwatch_conn = boto.ec2.cloudwatch.connect_to_region(
-                    self.AWS_REGION,
-                    aws_access_key_id=self.AWS_KEY,
-                    aws_secret_access_key=self.AWS_SECRET) 
-                connection_dict = dict()
-                connection_dict['ec2_connection'] = self.ec2_conn
-                connection_dict['cloudwatch_connection'] = self.cloudwatch_conn
-                return connection_dict
-                
-            except Exception as e:
-                log.error("Failed to Connect with AWS %s: ",str(e)) 
+        try:
+            # VPC Connection
+            self.vpc_conn = boto.vpc.connect_to_region(self.AWS_REGION,
+                                                       aws_access_key_id=self.AWS_KEY,
+                                                       aws_secret_access_key=self.AWS_SECRET)
 
+            # EC2 Connection
+            self.ec2_conn = boto.ec2.connect_to_region(self.AWS_REGION,
+                                                       aws_access_key_id=self.AWS_KEY,
+                                                       aws_secret_access_key=self.AWS_SECRET)
+
+            """ TODO : Required to add actions against alarms when needed """
+            # self.sns = connect_to_region(self.AWS_REGION)
+            # self.topics = self.sns.get_all_topics()
+            # self.topic = self.topics[u'ListTopicsResponse']['ListTopicsResult']['Topics'][0]['TopicArn']
+
+            # Cloudwatch Connection
+            self.cloudwatch_conn = boto.ec2.cloudwatch.connect_to_region(
+                self.AWS_REGION,
+                aws_access_key_id=self.AWS_KEY,
+                aws_secret_access_key=self.AWS_SECRET)
+            connection_dict = dict()
+            connection_dict['ec2_connection'] = self.ec2_conn
+            connection_dict['cloudwatch_connection'] = self.cloudwatch_conn
+            return connection_dict
+
+        except Exception as e:
+            log.error("Failed to Connect with AWS %s: ", str(e))
index 8e5b6fe..5d330b0 100644 (file)
@@ -22,7 +22,7 @@
 ''' Handling of alarms requests via BOTO 2.48 '''
 
 __author__ = "Wajeeha Hamid"
-__date__   = "18-September-2017"
+__date__ = "18-September-2017"
 
 import logging
 
@@ -41,64 +41,66 @@ STATISTICS = {
     "AVERAGE": "Average",
     "MINIMUM": "Minimum",
     "MAXIMUM": "Maximum",
-    "COUNT"  : "SampleCount",
-    "SUM"    : "Sum"}
+    "COUNT": "SampleCount",
+    "SUM": "Sum"}
 
 OPERATIONS = {
-    "GE"     : ">=",
-    "LE"     : "<=",
-    "GT"     : ">",
-    "LT"     : "<",
-    "EQ"     : "="}   
+    "GE": ">=",
+    "LE": "<=",
+    "GT": ">",
+    "LT": "<",
+    "EQ": "="}
+
 
 class MetricAlarm():
     """Alarms Functionality Handler -- Carries out alarming requests and responses via BOTO.Cloudwatch """
+
     def __init__(self):
         self.alarm_resp = dict()
         self.del_resp = dict()
 
-    def config_alarm(self,cloudwatch_conn,create_info):
+    def config_alarm(self, cloudwatch_conn, create_info):
         """Configure or Create a new alarm"""
         inner_dict = dict()
         """ Alarm Name to ID Mapping """
         alarm_info = create_info['alarm_create_request']
         alarm_id = alarm_info['alarm_name'] + "_" + alarm_info['resource_uuid']
-        if self.is_present(cloudwatch_conn,alarm_id)['status'] == True: 
+        if self.is_present(cloudwatch_conn, alarm_id)['status'] == True:
             alarm_id = None
-            log.debug ("Alarm already exists, Try updating the alarm using 'update_alarm_configuration()'")
-            return alarm_id   
-        else:              
+            log.debug("Alarm already exists, Try updating the alarm using 'update_alarm_configuration()'")
+            return alarm_id
+        else:
             try:
                 if alarm_info['statistic'] in STATISTICS:
                     if alarm_info['operation'] in OPERATIONS:
                         alarm = boto.ec2.cloudwatch.alarm.MetricAlarm(
-                            connection = cloudwatch_conn,
-                            name = alarm_info['alarm_name'] + "_" + alarm_info['resource_uuid'],
-                            metric = alarm_info['metric_name'],
-                            namespace = "AWS/EC2",
-                            statistic = STATISTICS[alarm_info['statistic']],
-                            comparison = OPERATIONS[alarm_info['operation']],
-                            threshold = alarm_info['threshold_value'],
-                            period = 60,
-                            evaluation_periods = 1,
+                            connection=cloudwatch_conn,
+                            name=alarm_info['alarm_name'] + "_" + alarm_info['resource_uuid'],
+                            metric=alarm_info['metric_name'],
+                            namespace="AWS/EC2",
+                            statistic=STATISTICS[alarm_info['statistic']],
+                            comparison=OPERATIONS[alarm_info['operation']],
+                            threshold=alarm_info['threshold_value'],
+                            period=60,
+                            evaluation_periods=1,
                             unit=alarm_info['unit'],
-                            description = alarm_info['severity'] + ";" + alarm_id + ";" + alarm_info['description'],
-                            dimensions = {'InstanceId':alarm_info['resource_uuid']},
-                            alarm_actions = None,
-                            ok_actions = None,
-                            insufficient_data_actions = None)
+                            description=alarm_info['severity'] + ";" + alarm_id + ";" + alarm_info['description'],
+                            dimensions={'InstanceId': alarm_info['resource_uuid']},
+                            alarm_actions=None,
+                            ok_actions=None,
+                            insufficient_data_actions=None)
 
                         """Setting Alarm Actions : 
                         alarm_actions = ['arn:aws:swf:us-west-2:465479087178:action/actions/AWS_EC2.InstanceId.Stop/1.0']"""
 
-                        status=cloudwatch_conn.put_metric_alarm(alarm)
+                        status = cloudwatch_conn.put_metric_alarm(alarm)
 
-                        log.debug ("Alarm Configured Succesfully")
+                        log.debug("Alarm Configured Succesfully")
                         self.alarm_resp['schema_version'] = str(create_info['schema_version'])
                         self.alarm_resp['schema_type'] = 'create_alarm_response'
 
                         inner_dict['correlation_id'] = str(alarm_info['correlation_id'])
-                        inner_dict['alarm_uuid'] = str(alarm_id) 
+                        inner_dict['alarm_uuid'] = str(alarm_id)
                         inner_dict['status'] = status
 
                         self.alarm_resp['alarm_create_response'] = inner_dict
@@ -106,18 +108,17 @@ class MetricAlarm():
                         if status == True:
                             return self.alarm_resp
                         else:
-                            return None        
-                    else: 
+                            return None
+                    else:
                         log.error("Operation not supported")
-                        return None        
+                        return None
                 else:
                     log.error("Statistic not supported")
                     return None
             except Exception as e:
                 log.error("Alarm Configuration Failed: " + str(e))
-            
-#-----------------------------------------------------------------------------------------------------------------------------
-    def update_alarm(self,cloudwatch_conn,update_info):
+
+    def update_alarm(self, cloudwatch_conn, update_info):
 
         """Update or reconfigure an alarm"""
         inner_dict = dict()
@@ -125,67 +126,66 @@ class MetricAlarm():
 
         """Alarm Name to ID Mapping"""
         alarm_id = alarm_info['alarm_uuid']
-        status = self.is_present(cloudwatch_conn,alarm_id)
+        status = self.is_present(cloudwatch_conn, alarm_id)
 
         """Verifying : Alarm exists already"""
-        if status['status'] == False: 
+        if status['status'] == False:
             alarm_id = None
             log.debug("Alarm not found, Try creating the alarm using 'configure_alarm()'")
-            return alarm_id   
-        else:            
+            return alarm_id
+        else:
             try:
                 if alarm_info['statistic'] in STATISTICS:
                     if alarm_info['operation'] in OPERATIONS:
                         alarm = boto.ec2.cloudwatch.alarm.MetricAlarm(
-                            connection = cloudwatch_conn,
-                            name = status['info'].name ,
-                            metric = alarm_info['metric_name'],
-                            namespace = "AWS/EC2",
-                            statistic = STATISTICS[alarm_info['statistic']],
-                            comparison = OPERATIONS[alarm_info['operation']],
-                            threshold = alarm_info['threshold_value'],
-                            period = 60,
-                            evaluation_periods = 1,
+                            connection=cloudwatch_conn,
+                            name=status['info'].name,
+                            metric=alarm_info['metric_name'],
+                            namespace="AWS/EC2",
+                            statistic=STATISTICS[alarm_info['statistic']],
+                            comparison=OPERATIONS[alarm_info['operation']],
+                            threshold=alarm_info['threshold_value'],
+                            period=60,
+                            evaluation_periods=1,
                             unit=alarm_info['unit'],
-                            description = alarm_info['severity'] + ";" + alarm_id + ";" + alarm_info['description'],
-                            dimensions = {'InstanceId':str(status['info'].dimensions['InstanceId']).split("'")[1]},
-                            alarm_actions = None,
-                            ok_actions = None,
-                            insufficient_data_actions = None)
+                            description=alarm_info['severity'] + ";" + alarm_id + ";" + alarm_info['description'],
+                            dimensions={'InstanceId': str(status['info'].dimensions['InstanceId']).split("'")[1]},
+                            alarm_actions=None,
+                            ok_actions=None,
+                            insufficient_data_actions=None)
 
                         """Setting Alarm Actions : 
                         alarm_actions = ['arn:aws:swf:us-west-2:465479087178:action/actions/AWS_EC2.InstanceId.Stop/1.0']"""
 
-                        status=cloudwatch_conn.put_metric_alarm(alarm)
-                        log.debug("Alarm %s Updated ",alarm.name)
+                        status = cloudwatch_conn.put_metric_alarm(alarm)
+                        log.debug("Alarm %s Updated ", alarm.name)
                         self.alarm_resp['schema_version'] = str(update_info['schema_version'])
                         self.alarm_resp['schema_type'] = 'update_alarm_response'
 
                         inner_dict['correlation_id'] = str(alarm_info['correlation_id'])
-                        inner_dict['alarm_uuid'] = str(alarm_id) 
+                        inner_dict['alarm_uuid'] = str(alarm_id)
                         inner_dict['status'] = status
 
                         self.alarm_resp['alarm_update_response'] = inner_dict
                         return self.alarm_resp
-                    else: 
+                    else:
                         log.error("Operation not supported")
-                        return None        
+                        return None
                 else:
                     log.error("Statistic not supported")
-                    return None        
+                    return None
             except Exception as e:
-                log.error ("Error in Updating Alarm " + str(e))
-        
-#-----------------------------------------------------------------------------------------------------------------------------
-    def delete_Alarm(self,cloudwatch_conn,del_info_all):
+                log.error("Error in Updating Alarm " + str(e))
+
+    def delete_Alarm(self, cloudwatch_conn, del_info_all):
 
         """Deletes an Alarm with specified alarm_id"""
         inner_dict = dict()
         del_info = del_info_all['alarm_delete_request']
-        status = self.is_present(cloudwatch_conn,del_info['alarm_uuid'])
+        status = self.is_present(cloudwatch_conn, del_info['alarm_uuid'])
         try:
-            if status['status'] == True:                
-                del_status=cloudwatch_conn.delete_alarms(status['info'].name)
+            if status['status'] == True:
+                del_status = cloudwatch_conn.delete_alarms(status['info'].name)
                 self.del_resp['schema_version'] = str(del_info_all['schema_version'])
                 self.del_resp['schema_type'] = 'delete_alarm_response'
                 inner_dict['correlation_id'] = str(del_info['correlation_id'])
@@ -193,90 +193,98 @@ class MetricAlarm():
                 inner_dict['status'] = del_status
                 self.del_resp['alarm_deletion_response'] = inner_dict
                 return self.del_resp
-            return None 
+            return None
         except Exception as e:
-                log.error("Alarm Not Deleted: " + str(e))      
-#-----------------------------------------------------------------------------------------------------------------------------
-    def alarms_list(self,cloudwatch_conn,list_info):
+            log.error("Alarm Not Deleted: " + str(e))
+
+    def alarms_list(self, cloudwatch_conn, list_info):
 
         """Get a list of alarms that are present on a particular VIM type"""
         alarm_list = []
         alarm_info = dict()
         inner_dict = list_info['alarm_list_request']
-        try: #id vim 
+        try:  # id vim 
             alarms = cloudwatch_conn.describe_alarms()
             itr = 0
             for alarm in alarms:
                 list_info['alarm_list_request']['alarm_uuid'] = str(alarm.description).split(';')[1]
 
-                #Severity = alarm_name = resource_uuid = ""
-                if inner_dict['severity'] == "" and inner_dict['alarm_name'] == "" and inner_dict['resource_uuid'] == "":
-                    alarm_list.insert(itr,self.alarm_details(cloudwatch_conn,list_info))
+                # Severity = alarm_name = resource_uuid = ""
+                if inner_dict['severity'] == "" and inner_dict['alarm_name'] == "" and inner_dict[
+                    'resource_uuid'] == "":
+                    alarm_list.insert(itr, self.alarm_details(cloudwatch_conn, list_info))
+                    itr += 1
+                # alarm_name = resource_uuid = ""
+                if inner_dict['severity'] == str(alarm.description).split(';')[0] and inner_dict['alarm_name'] == "" and \
+                        inner_dict['resource_uuid'] == "":
+                    alarm_list.insert(itr, self.alarm_details(cloudwatch_conn, list_info))
                     itr += 1
-                #alarm_name = resource_uuid = ""
-                if inner_dict['severity'] == str(alarm.description).split(';')[0] and inner_dict['alarm_name'] == "" and inner_dict['resource_uuid'] == "":
-                    alarm_list.insert(itr,self.alarm_details(cloudwatch_conn,list_info))
+                # severity = resource_uuid = ""
+                if inner_dict['severity'] == "" and inner_dict['alarm_name'] in alarm.name and inner_dict[
+                    'resource_uuid'] == "":
+                    alarm_list.insert(itr, self.alarm_details(cloudwatch_conn, list_info))
                     itr += 1
-                #severity = resource_uuid = ""
-                if inner_dict['severity'] == "" and inner_dict['alarm_name'] in alarm.name and inner_dict['resource_uuid'] == "":
-                    alarm_list.insert(itr,self.alarm_details(cloudwatch_conn,list_info))
+                # severity = alarm_name = ""    
+                if inner_dict['severity'] == "" and inner_dict['alarm_name'] == "" and inner_dict['resource_uuid'] == \
+                        str(alarm.dimensions['InstanceId']).split("'")[1]:
+                    alarm_list.insert(itr, self.alarm_details(cloudwatch_conn, list_info))
                     itr += 1
-                #severity = alarm_name = ""    
-                if inner_dict['severity'] == "" and inner_dict['alarm_name'] == "" and inner_dict['resource_uuid'] == str(alarm.dimensions['InstanceId']).split("'")[1]:
-                    alarm_list.insert(itr,self.alarm_details(cloudwatch_conn,list_info))
+                # resource_uuid = ""    
+                if inner_dict['severity'] == str(alarm.description).split(';')[0] and inner_dict[
+                    'alarm_name'] in alarm.name and inner_dict['resource_uuid'] == "":
+                    alarm_list.insert(itr, self.alarm_details(cloudwatch_conn, list_info))
                     itr += 1
-                #resource_uuid = ""    
-                if inner_dict['severity'] == str(alarm.description).split(';')[0] and inner_dict['alarm_name'] in alarm.name and inner_dict['resource_uuid'] == "":
-                    alarm_list.insert(itr,self.alarm_details(cloudwatch_conn,list_info))
+                # alarm_name = ""    
+                if inner_dict['severity'] == str(alarm.description).split(';')[0] and inner_dict['alarm_name'] == "" and \
+                        inner_dict['resource_uuid'] == str(alarm.dimensions['InstanceId']).split("'")[1]:
+                    alarm_list.insert(itr, self.alarm_details(cloudwatch_conn, list_info))
                     itr += 1
-                #alarm_name = ""    
-                if inner_dict['severity'] == str(alarm.description).split(';')[0] and inner_dict['alarm_name'] == "" and inner_dict['resource_uuid'] == str(alarm.dimensions['InstanceId']).split("'")[1]:
-                    alarm_list.insert(itr,self.alarm_details(cloudwatch_conn,list_info))
+                # severity = ""    
+                if inner_dict['severity'] == "" and inner_dict['alarm_name'] in alarm.name and inner_dict[
+                    'resource_uuid'] == str(alarm.dimensions['InstanceId']).split("'")[1]:
+                    alarm_list.insert(itr, self.alarm_details(cloudwatch_conn, list_info))
                     itr += 1
-                #severity = ""    
-                if inner_dict['severity'] == "" and inner_dict['alarm_name'] in alarm.name and inner_dict['resource_uuid'] == str(alarm.dimensions['InstanceId']).split("'")[1]:
-                    alarm_list.insert(itr,self.alarm_details(cloudwatch_conn,list_info))
-                    itr += 1                    
-                #Everything provided    
-                if inner_dict['severity'] == str(alarm.description).split(';')[0] and inner_dict['alarm_name'] in alarm.name and inner_dict['resource_uuid'] == str(alarm.dimensions['InstanceId']).split("'")[1]:
-                    alarm_list.insert(itr,self.alarm_details(cloudwatch_conn,list_info))
+                    # Everything provided    
+                if inner_dict['severity'] == str(alarm.description).split(';')[0] and inner_dict[
+                    'alarm_name'] in alarm.name and inner_dict['resource_uuid'] == \
+                        str(alarm.dimensions['InstanceId']).split("'")[1]:
+                    alarm_list.insert(itr, self.alarm_details(cloudwatch_conn, list_info))
                     itr += 1
 
             alarm_info['schema_version'] = str(list_info['schema_version'])
-            alarm_info['schema_type'] = 'list_alarm_response'    
+            alarm_info['schema_type'] = 'list_alarm_response'
             alarm_info['list_alarm_response'] = alarm_list
 
-            return alarm_info                  
+            return alarm_info
         except Exception as e:
-                log.error("Error in Getting List : %s",str(e))    
-#-----------------------------------------------------------------------------------------------------------------------------
-    def alarm_details(self,cloudwatch_conn,ack_info):
+            log.error("Error in Getting List : %s", str(e))
+
+    def alarm_details(self, cloudwatch_conn, ack_info):
 
         """Get an individual alarm details specified by alarm_name"""
         try:
-            alarms_details=cloudwatch_conn.describe_alarm_history()  
-            alarm_details_all = dict()     
+            alarms_details = cloudwatch_conn.describe_alarm_history()
+            alarm_details_all = dict()
             alarm_details_dict = dict()
             ack_info_all = ack_info
 
-
             if 'ack_details' in ack_info:
                 ack_info = ack_info['ack_details']
             elif 'alarm_list_request' in ack_info:
-                ack_info = ack_info['alarm_list_request']    
-            
-            is_present = self.is_present(cloudwatch_conn,ack_info['alarm_uuid'])
+                ack_info = ack_info['alarm_list_request']
 
-            for itr in range (len(alarms_details)):
-                if alarms_details[itr].name == is_present['info'].name :#name, timestamp, summary
+            is_present = self.is_present(cloudwatch_conn, ack_info['alarm_uuid'])
+
+            for itr in range(len(alarms_details)):
+                if alarms_details[itr].name == is_present['info'].name:  # name, timestamp, summary
                     if 'created' in alarms_details[itr].summary:
                         alarm_details_dict['status'] = "New"
                     elif 'updated' in alarms_details[itr].summary:
                         alarm_details_dict['status'] = "Update"
-                    elif 'deleted' in alarms_details[itr].summary:   
+                    elif 'deleted' in alarms_details[itr].summary:
                         alarm_details_dict['status'] = "Canceled"
 
-                    status = alarms_details[itr].summary.split()                  
+                    status = alarms_details[itr].summary.split()
                     alarms = cloudwatch_conn.describe_alarms()
                     for alarm in alarms:
                         if str(alarm.description).split(';')[1] == ack_info['alarm_uuid']:
@@ -284,9 +292,9 @@ class MetricAlarm():
                             alarm_details_dict['resource_uuid'] = str(alarm.dimensions['InstanceId']).split("'")[1]
                             alarm_details_dict['description'] = str(alarm.description).split(';')[1]
                             alarm_details_dict['severity'] = str(alarm.description).split(';')[0]
-                            alarm_details_dict['start_date_time'] = str(alarms_details[itr].timestamp) 
+                            alarm_details_dict['start_date_time'] = str(alarms_details[itr].timestamp)
                             alarm_details_dict['vim_type'] = str(ack_info_all['vim_type'])
-                            #TODO : tenant id
+                            # TODO : tenant id
                             if 'ack_details' in ack_info_all:
                                 alarm_details_all['schema_version'] = str(ack_info_all['schema_version'])
                                 alarm_details_all['schema_type'] = 'notify_alarm'
@@ -294,12 +302,12 @@ class MetricAlarm():
                                 return alarm_details_all
 
                             elif 'alarm_list_request' in ack_info_all:
-                                return alarm_details_dict                     
-                  
+                                return alarm_details_dict
+
         except Exception as e:
-            log.error("Error getting alarm details: %s",str(e))
-#-----------------------------------------------------------------------------------------------------------------------------
-    def is_present(self,cloudwatch_conn,alarm_id):
+            log.error("Error getting alarm details: %s", str(e))
+
+    def is_present(self, cloudwatch_conn, alarm_id):
         """Finding alarm from already configured alarms"""
         alarm_info = dict()
         try:
@@ -309,9 +317,7 @@ class MetricAlarm():
                     alarm_info['status'] = True
                     alarm_info['info'] = alarm
                     return alarm_info
-            alarm_info['status'] = False        
+            alarm_info['status'] = False
             return alarm_info
         except Exception as e:
-                log.error("Error Finding Alarm",str(e))             
-#-----------------------------------------------------------------------------------------------------------------------------
-    
\ No newline at end of file
+            log.error("Error Finding Alarm", str(e))
index 1586359..1812002 100644 (file)
 # contact with: wajeeha.hamid@xflowresearch.com
 ##
 
-'''
+"""
 AWS-Plugin implements all the methods of MON to interact with AWS using the BOTO client
-'''
+"""
 
 __author__ = "Wajeeha Hamid"
-__date__   = "18-Sept-2017"
+__date__ = "18-Sept-2017"
 
 import datetime
 import logging
@@ -40,34 +40,34 @@ except:
 
 log = logging.getLogger(__name__)
 
-class Metrics():
 
-    def createMetrics(self,cloudwatch_conn,metric_info):
+class Metrics:
+
+    def createMetrics(self, cloudwatch_conn, metric_info):
         try:
-            
-            '''createMetrics will be returning the metric_uuid=0 and
-             status=True when the metric is supported by AWS'''
 
-            supported=self.check_metric(metric_info['metric_name'])
+            """createMetrics will be returning the metric_uuid=0 and
+             status=True when the metric is supported by AWS"""
+
+            supported = self.check_metric(metric_info['metric_name'])
             metric_resp = dict()
             metric_resp['resource_uuid'] = metric_info['resource_uuid']
-            
-            if supported['status'] == True:
+
+            if supported['status']:
                 metric_resp['status'] = True
                 metric_resp['metric_uuid'] = 0
-                log.debug("Metrics Configured Succesfully : %s" , metric_resp)
+                log.debug("Metrics Configured Successfully : %s", metric_resp)
             else:
                 metric_resp['status'] = False
                 metric_resp['metric_uuid'] = None
-                log.error("Metric name is not supported") 
-            
-            return metric_resp         
+                log.error("Metric name is not supported")
+
+            return metric_resp
 
         except Exception as e:
             log.error("Metric Configuration Failed: " + str(e))
-#-----------------------------------------------------------------------------------------------------------------------------
-    
-    def metricsData(self,cloudwatch_conn,data_info):
+
+    def metricsData(self, cloudwatch_conn, data_info):
 
         """Getting Metrics Stats for an Hour.The datapoints are
         received after every one minute.
@@ -79,43 +79,48 @@ class Metrics():
             timestamp_arr = {}
             value_arr = {}
 
-            supported=self.check_metric(data_info['metric_name'])
+            supported = self.check_metric(data_info['metric_name'])
 
-            if supported['status'] == True:
+            if supported['status']:
                 if int(data_info['collection_period']) % 60 == 0:
-                    metric_stats=cloudwatch_conn.get_metric_statistics(60, datetime.datetime.utcnow() - datetime.timedelta(seconds=int(data_info['collection_period'])),
-                                        datetime.datetime.utcnow(),supported['metric_name'],'AWS/EC2', 'Maximum',
-                                        dimensions={'InstanceId':data_info['resource_uuid']}, unit='Percent')  
+                    metric_stats = cloudwatch_conn.get_metric_statistics(60,
+                                                                         datetime.datetime.utcnow() - datetime.timedelta(
+                                                                             seconds=int(
+                                                                                 data_info['collection_period'])),
+                                                                         datetime.datetime.utcnow(),
+                                                                         supported['metric_name'], 'AWS/EC2', 'Maximum',
+                                                                         dimensions={
+                                                                             'InstanceId': data_info['resource_uuid']},
+                                                                         unit='Percent')
                     index = 0
-                    for itr in range (len(metric_stats)):
+                    for itr in range(len(metric_stats)):
                         timestamp_arr[index] = str(metric_stats[itr]['Timestamp'])
                         value_arr[index] = metric_stats[itr]['Maximum']
-                        index +=1
+                        index += 1
                     metric_info_dict['time_series'] = timestamp_arr
                     metric_info_dict['metrics_series'] = value_arr
                     log.debug("Metrics Data : %s", metric_info_dict)
                     return metric_info_dict
-                else: 
+                else:
                     log.error("Collection Period should be a multiple of 60")
                     return False
 
             else:
                 log.error("Metric name is not supported")
                 return False
-        
+
         except Exception as e:
             log.error("Error returning Metrics Data" + str(e))
 
-#-----------------------------------------------------------------------------------------------------------------------------
-    def updateMetrics(self,cloudwatch_conn,metric_info):
-        
-        '''updateMetrics will be returning the metric_uuid=0 and
-         status=True when the metric is supported by AWS'''
+    def updateMetrics(self, cloudwatch_conn, metric_info):
+
+        """updateMetrics will be returning the metric_uuid=0 and
+         status=True when the metric is supported by AWS"""
         try:
-            supported=self.check_metric(metric_info['metric_name'])
+            supported = self.check_metric(metric_info['metric_name'])
             update_resp = dict()
             update_resp['resource_uuid'] = metric_info['resource_uuid']
-            if supported['status'] == True:
+            if supported['status']:
                 update_resp['status'] = True
                 update_resp['metric_uuid'] = 0
                 log.debug("Metric Updated : %s", update_resp)
@@ -123,21 +128,21 @@ class Metrics():
                 update_resp['status'] = False
                 update_resp['metric_uuid'] = None
                 log.error("Metric name is not supported")
-             
-            return update_resp  
-        
+
+            return update_resp
+
         except Exception as e:
             log.error("Error in Update Metrics" + str(e))
-#-----------------------------------------------------------------------------------------------------------------------------
-    def deleteMetrics(self,cloudwatch_conn,del_info):
-        
-        ''' " Not supported in AWS"
-        Returning the required parameters with status = False'''
+
+    def deleteMetrics(self, cloudwatch_conn, del_info):
+
+        """ " Not supported in AWS"
+        Returning the required parameters with status = False"""
         try:
-            supported=self.check_metric(del_info['metric_name'])
+            supported = self.check_metric(del_info['metric_name'])
             metric_resp = dict()
             del_resp = dict()
-            if supported['status'] == True:      
+            if supported['status']:
                 del_resp['schema_version'] = del_info['schema_version']
                 del_resp['schema_type'] = "delete_metric_response"
                 del_resp['metric_name'] = del_info['metric_name']
@@ -147,54 +152,52 @@ class Metrics():
                 del_resp['tenant_uuid'] = del_info['tenant_uuid']
                 del_resp['correlation_id'] = del_info['correlation_uuid']
                 del_resp['status'] = False
-                log.info("Metric Deletion Not supported in AWS : %s",del_resp)
+                log.info("Metric Deletion Not supported in AWS : %s", del_resp)
                 return del_resp
             else:
                 log.error("Metric name is not supported")
                 return False
 
         except Exception as e:
-                log.error(" Metric Deletion Not supported in AWS : " + str(e))
-#------------------------------------------------------------------------------------------------------------------------------------
-    
-    def listMetrics(self,cloudwatch_conn ,list_info):
+            log.error(" Metric Deletion Not supported in AWS : " + str(e))
+
+    def listMetrics(self, cloudwatch_conn, list_info):
 
-        '''Returns the list of available AWS/EC2 metrics on which
-        alarms have been configured and the metrics are being monitored'''
+        """Returns the list of available AWS/EC2 metrics on which
+        alarms have been configured and the metrics are being monitored"""
         try:
             supported = self.check_metric(list_info['metric_name'])
-            if supported['status'] == True: 
+            if supported['status']:
                 metrics_list = []
-                metrics_data = dict()    
+                metrics_data = dict()
 
-                #To get the list of associated metrics with the alarms
+                # To get the list of associated metrics with the alarms
                 alarms = cloudwatch_conn.describe_alarms()
                 itr = 0
                 if list_info['metric_name'] == "":
                     for alarm in alarms:
                         metrics_info = dict()
-                        instance_id = str(alarm.dimensions['InstanceId']).split("'")[1] 
-                        metrics_info['metric_name'] = str(alarm.metric) 
-                        metrics_info['metric_uuid'] = 0     
-                        metrics_info['metric_unit'] = str(alarm.unit)    
-                        metrics_info['resource_uuid'] = instance_id 
-                        metrics_list.insert(itr,metrics_info)
+                        instance_id = str(alarm.dimensions['InstanceId']).split("'")[1]
+                        metrics_info['metric_name'] = str(alarm.metric)
+                        metrics_info['metric_uuid'] = 0
+                        metrics_info['metric_unit'] = str(alarm.unit)
+                        metrics_info['resource_uuid'] = instance_id
+                        metrics_list.insert(itr, metrics_info)
                         itr += 1
                     log.info(metrics_list)
                     return metrics_list
-                else: 
+                else:
                     for alarm in alarms:
                         metrics_info = dict()
                         if alarm.metric == supported['metric_name']:
-                            instance_id = str(alarm.dimensions['InstanceId']).split("'")[1] 
+                            instance_id = str(alarm.dimensions['InstanceId']).split("'")[1]
                             metrics_info['metric_name'] = str(alarm.metric)
-                            metrics_info['metric_uuid'] = 0     
-                            metrics_info['metric_unit'] = str(alarm.unit)    
+                            metrics_info['metric_uuid'] = 0
+                            metrics_info['metric_unit'] = str(alarm.unit)
                             metrics_info['resource_uuid'] = instance_id
-                            metrics_list.insert(itr,metrics_info)
+                            metrics_list.insert(itr, metrics_info)
                             itr += 1
-                    return metrics_list               
-                log.debug("Metrics List : %s",metrics_list)
+                    return metrics_list
             else:
                 log.error("Metric name is not supported")
                 return False
@@ -202,11 +205,9 @@ class Metrics():
         except Exception as e:
             log.error("Error in Getting Metric List " + str(e))
 
-#------------------------------------------------------------------------------------------------------------------------------------
+    def check_metric(self, metric_name):
 
-    def check_metric(self,metric_name):
-    
-        ''' Checking whether the metric is supported by AWS '''
+        """ Checking whether the metric is supported by AWS """
         try:
             check_resp = dict()
             # metric_name
@@ -240,22 +241,13 @@ class Metrics():
                 metric_status = False
                 log.info("Metric Not Supported by AWS plugin ")
             check_resp['metric_name'] = metric_name
-            #status
-            if metric_status == True:
+            # status
+            if metric_status:
                 check_resp['status'] = True
             else:
                 check_resp['status'] = False
 
             return check_resp
 
-        except Exception as e: 
-            log.error("Error in Plugin Inputs %s",str(e))     
-#--------------------------------------------------------------------------------------------------------------------------------------
-
-      
-
-
-
-
-
-
+        except Exception as e:
+            log.error("Error in Plugin Inputs %s", str(e))
index 40e7fe5..c125bab 100644 (file)
 # contact with: wajeeha.hamid@xflowresearch.com
 ##
 
-'''
+"""
 AWS-Plugin implements all the methods of MON to interact with AWS using the BOTO client
-'''
-from io import open
-from osm_mon.core.message_bus.producer import KafkaProducer
+"""
+from io import UnsupportedOperation
+
+from osm_mon.core.settings import Config
 from osm_mon.plugins.CloudWatch.metric_alarms import MetricAlarm
 from osm_mon.plugins.CloudWatch.metrics import Metrics
 
 __author__ = "Wajeeha Hamid"
-__date__   = "18-September-2017"
+__date__ = "18-September-2017"
 
-import json
 import logging
 
 log = logging.getLogger(__name__)
 
-class plugin_alarms():
+
+class plugin_alarms:
     """Receives Alarm info from MetricAlarm and connects with the consumer/producer"""
-    def __init__ (self): 
+
+    def __init__(self):
+        self._cfg = Config.instance()
         self.metricAlarm = MetricAlarm()
         self.metric = Metrics()
-        self.producer = KafkaProducer('')     
-#---------------------------------------------------------------------------------------------------------------------------   
-    def configure_alarm(self,alarm_info):
-        alarm_id = self.metricAlarm.config_alarm(self.cloudwatch_conn,alarm_info)
+
+    def configure_alarm(self, alarm_info):
+        alarm_id = self.metricAlarm.config_alarm(self.cloudwatch_conn, alarm_info)
         return alarm_id
-#---------------------------------------------------------------------------------------------------------------------------          
-    def update_alarm_configuration(self,test):
-        alarm_id = self.metricAlarm.update_alarm(self.cloudwatch_conn,test)
+
+    def update_alarm_configuration(self, test):
+        alarm_id = self.metricAlarm.update_alarm(self.cloudwatch_conn, test)
         return alarm_id
-#---------------------------------------------------------------------------------------------------------------------------            
-    def delete_alarm(self,alarm_id):
-        return self.metricAlarm.delete_Alarm(self.cloudwatch_conn,alarm_id)
-#---------------------------------------------------------------------------------------------------------------------------  
-    def get_alarms_list(self,instance_id):
-        return self.metricAlarm.alarms_list(self.cloudwatch_conn,instance_id) 
-#---------------------------------------------------------------------------------------------------------------------------            
-    def get_ack_details(self,ack_info):
-        return self.metricAlarm.alarm_details(self.cloudwatch_conn,ack_info)
-#---------------------------------------------------------------------------------------------------------------------------            
-    def get_metrics_data(self,metric_name,period,instance_id):
-        return self.metric.metricsData(self.cloudwatch_conn,metric_name,period,instance_id)
-#--------------------------------------------------------------------------------------------------------------------------- 
-
-    def alarm_calls(self,message,aws_conn):
+
+    def delete_alarm(self, alarm_id):
+        return self.metricAlarm.delete_Alarm(self.cloudwatch_conn, alarm_id)
+
+    def get_alarms_list(self, instance_id):
+        return self.metricAlarm.alarms_list(self.cloudwatch_conn, instance_id)
+
+    def get_ack_details(self, ack_info):
+        return self.metricAlarm.alarm_details(self.cloudwatch_conn, ack_info)
+
+    def get_metrics_data(self, metric_name, period, instance_id):
+        # TODO: Investigate and fix this call
+        return self.metric.metricsData(self.cloudwatch_conn, metric_name, period, instance_id)
+
+    def alarm_calls(self, key: str, alarm_info: dict, aws_conn: dict):
         """Gets the message from the common consumer"""
         try:
             self.cloudwatch_conn = aws_conn['cloudwatch_connection']
-            self.ec2_conn = aws_conn['ec2_connection'] 
-
-            log.info("Action required against: %s" % (message.topic))
-            alarm_info = json.loads(message.value)
+            self.ec2_conn = aws_conn['ec2_connection']
 
-            if message.key == "create_alarm_request":  
+            if key == "create_alarm_request":
                 alarm_inner_dict = alarm_info['alarm_create_request']
-                metric_status = self.check_metric(alarm_inner_dict['metric_name'])                            
-             
-                if self.check_resource(alarm_inner_dict['resource_uuid']) == True and metric_status['status'] == True:
-                    log.debug ("Resource and Metrics exists")
-                
+                metric_status = self.check_metric(alarm_inner_dict['metric_name'])
+
+                if self.check_resource(alarm_inner_dict['resource_uuid']) and metric_status['status']:
+                    log.debug("Resource and Metrics exists")
+
                     alarm_info['alarm_create_request']['metric_name'] = metric_status['metric_name']
-                    #Generate a valid response message, send via producer
-                    config_resp = self.configure_alarm(alarm_info) #alarm_info = message.value
-             
-                    if config_resp == None:
+                    # Generate a valid response message, send via producer
+                    config_resp = self.configure_alarm(alarm_info)  # alarm_info = message.value
+
+                    if config_resp is None:
                         log.debug("Alarm Already exists")
-                        payload = json.dumps(config_resp)                                   
-                        file = open('../../core/models/create_alarm_resp.json','wb').write((payload))
-                        self.producer.publish_alarm_response(key='create_alarm_response',message=payload)
-             
-                    else: 
-                        payload = json.dumps(config_resp)                                
-                        file = open('../../core/models/create_alarm_resp.json','wb').write((payload))                           
-                        self.producer.publish_alarm_response(key='create_alarm_response',message=payload)
-                        log.info("New alarm created with alarm info: %s", config_resp)                           
-             
+                        # TODO: This should return a response with status False
+                        return config_resp
+
+                    else:
+                        log.info("New alarm created with alarm info: %s", config_resp)
+                        return config_resp
+
                 else:
-                    log.error("Resource ID doesn't exists")                
-                
-            elif message.key == "acknowledge_alarm":
+                    log.error("Resource ID doesn't exists")
+
+            elif key == "acknowledge_alarm":
                 alarm_inner_dict = alarm_info['ack_details']
-                
-                if self.check_resource(alarm_inner_dict['resource_uuid']) == True: 
-                    alarm_info = json.loads(message.value)
-                    #Generate a valid response message, send via producer
+
+                if self.check_resource(alarm_inner_dict['resource_uuid']):
                     ack_details = self.get_ack_details(alarm_info)
-                    payload = json.dumps(ack_details)                                  
-                    file = open('../../core/models/notify_alarm.json','wb').write((payload))
-                    self.producer.notify_alarm(key='notify_alarm',message=payload)
                     log.info("Acknowledge sent: %s", ack_details)
+                    return ack_details
 
                 else:
-                    log.error("Resource ID is Incorrect")                        
-
+                    log.error("Resource ID is Incorrect")
 
-            elif message.key == "update_alarm_request":                         
+            elif key == "update_alarm_request":
                 alarm_inner_dict = alarm_info['alarm_update_request']
                 metric_status = self.check_metric(alarm_inner_dict['metric_name'])
-                
-                if metric_status['status'] == True:
-                    log.debug ("Resource and Metrics exists")
+
+                if metric_status['status']:
+                    log.debug("Resource and Metrics exists")
                     alarm_info['alarm_update_request']['metric_name'] = metric_status['metric_name']
-                    #Generate a valid response message, send via producer
+                    # Generate a valid response message, send via producer
                     update_resp = self.update_alarm_configuration(alarm_info)
 
-                    if update_resp == None:                                    
-                        payload = json.dumps(update_resp)                                   
-                        file = open('../../core/models/update_alarm_resp.json','wb').write((payload))
-                        self.producer.update_alarm_response(key='update_alarm_response',message=payload)
+                    if update_resp is None:
+                        # TODO: This should return a response with status False
                         log.debug("Alarm Already exists")
+                        return update_resp
 
-                    else: 
-                        payload = json.dumps(update_resp)                                   
-                        file = open('../../core/models/update_alarm_resp.json','wb').write((payload))
-                        self.producer.update_alarm_response(key='update_alarm_response',message=payload)
-                        log.info("Alarm Updated with alarm info: %s", update_resp)                           
+                    else:
+                        log.info("Alarm Updated with alarm info: %s", update_resp)
+                        return update_resp
 
                 else:
-                    log.info ("Metric Not Supported")
-         
-            
-            elif message.key == "delete_alarm_request":  
-                del_info = json.loads(message.value)
-                #Generate a valid response message, send via producer
-                del_resp = self.delete_alarm(del_info)
-                payload = json.dumps(del_resp)                                   
-                file = open('../../core/models/delete_alarm_resp.json','wb').write((payload))
-                self.producer.delete_alarm_response(key='delete_alarm_response',message=payload)
+                    log.info("Metric Not Supported")
+
+            elif key == "delete_alarm_request":
+                # Generate a valid response message, send via producer
+                del_resp = self.delete_alarm(alarm_info)
                 log.info("Alarm Deleted with alarm info: %s", del_resp)
+                return del_resp
 
-       
-            elif message.key == "alarm_list_request":
+            elif key == "alarm_list_request":
                 alarm_inner_dict = alarm_info['alarm_list_request']
-                
-                if self.check_resource(alarm_inner_dict['resource_uuid']) == True or alarm_inner_dict['resource_uuid'] == "": 
-                    #Generate a valid response message, send via producer
-                    list_resp = self.get_alarms_list(alarm_info)#['alarm_names']
-                    payload = json.dumps(list_resp)                                                                 
-                    file = open('../../core/models/list_alarm_resp.json','wb').write((payload))
-                    self.producer.list_alarm_response(key='list_alarm_response',message=payload)
 
+                if self.check_resource(alarm_inner_dict['resource_uuid']) or alarm_inner_dict['resource_uuid'] == "":
+                    # Generate a valid response message, send via producer
+                    list_resp = self.get_alarms_list(alarm_info)  # ['alarm_names']
+                    return list_resp
                 else:
-                    log.error("Resource ID is Incorrect")             
+                    log.error("Resource ID is Incorrect")
 
             else:
-                log.debug("Unknown key, no action will be performed")    
+                raise UnsupportedOperation("Unknown key, no action will be performed")
 
         except Exception as e:
-                log.error("Message retrieval exception: %s", str(e))             
-#--------------------------------------------------------------------------------------------------------------------------- 
-    def check_resource(self,resource_uuid):
-        '''Finding Resource with the resource_uuid'''
+            log.error("Message retrieval exception: %s", str(e))
+
+    def check_resource(self, resource_uuid):
+        """Finding Resource with the resource_uuid"""
         try:
             check_resp = dict()
             instances = self.ec2_conn.get_all_instance_status()
 
-            #resource_id
+            # resource_id
             for instance_id in instances:
                 instance_id = str(instance_id).split(':')[1]
 
                 if instance_id == resource_uuid:
                     check_resp['resource_uuid'] = resource_uuid
-                    return True 
+                    return True
             return False
 
-        except Exception as e: 
-            log.error("Error in Plugin Inputs %s",str(e)) 
-#--------------------------------------------------------------------------------------------------------------------------- 
-    def check_metric(self,metric_name):
-        ''' Checking whether the metric is supported by AWS '''
+        except Exception as e:
+            log.error("Error in Plugin Inputs %s", str(e))
+
+    def check_metric(self, metric_name):
+        """ Checking whether the metric is supported by AWS """
         try:
             check_resp = dict()
-            
-            #metric_name
+
+            # metric_name
             if metric_name == 'CPU_UTILIZATION':
                 metric_name = 'CPUUtilization'
                 metric_status = True
@@ -225,12 +205,11 @@ class plugin_alarms():
                 metric_name = None
                 metric_status = False
             check_resp['metric_name'] = metric_name
-            #status
+            # status
 
-            if metric_status == True:
+            if metric_status:
                 check_resp['status'] = True
-                return check_resp   
+                return check_resp
 
-        except Exception as e: 
-            log.error("Error in Plugin Inputs %s",str(e)) 
-#--------------------------------------------------------------------------------------------------------------------------- 
+        except Exception as e:
+            log.error("Error in Plugin Inputs %s", str(e))
index 36b89e3..d31b608 100644 (file)
 # contact with: wajeeha.hamid@xflowresearch.com
 ##
 
-'''
+"""
 AWS-Plugin implements all the methods of MON to interact with AWS using the BOTO client
-'''
-from osm_mon.core.message_bus.producer import KafkaProducer
+"""
+from io import UnsupportedOperation
+
+from osm_mon.core.settings import Config
 from osm_mon.plugins.CloudWatch.metrics import Metrics
 
 __author__ = "Wajeeha Hamid"
-__date__   = "18-September-2017"
+__date__ = "18-September-2017"
 
-import json
 import logging
 
 log = logging.getLogger(__name__)
 
-class plugin_metrics():
+
+class plugin_metrics:
     """Receives Alarm info from MetricAlarm and connects with the consumer/producer """
-    def __init__ (self): 
+
+    def __init__(self):
+        self._cfg = Config.instance()
         self.metric = Metrics()
-        self.producer = KafkaProducer('')
-#---------------------------------------------------------------------------------------------------------------------------   
-    def create_metric_request(self,metric_info):
-        '''Comaptible API using normalized parameters'''
-        metric_resp = self.metric.createMetrics(self.cloudwatch_conn,metric_info)
+
+    def create_metric_request(self, metric_info):
+        """Compatible API using normalized parameters"""
+        metric_resp = self.metric.createMetrics(self.cloudwatch_conn, metric_info)
         return metric_resp
-#---------------------------------------------------------------------------------------------------------------------------          
-    def update_metric_request(self,updated_info):
-        '''Comaptible API using normalized parameters'''
-        update_resp = self.metric.updateMetrics(self.cloudwatch_conn,updated_info)
+
+    def update_metric_request(self, updated_info):
+        """Compatible API using normalized parameters"""
+        update_resp = self.metric.updateMetrics(self.cloudwatch_conn, updated_info)
         return update_resp
-#---------------------------------------------------------------------------------------------------------------------------            
-    def delete_metric_request(self,delete_info):
-        '''Comaptible API using normalized parameters'''
-        del_resp = self.metric.deleteMetrics(self.cloudwatch_conn,delete_info)
+
+    def delete_metric_request(self, delete_info):
+        """Compatible API using normalized parameters"""
+        del_resp = self.metric.deleteMetrics(self.cloudwatch_conn, delete_info)
         return del_resp
-#---------------------------------------------------------------------------------------------------------------------------  
-    def list_metrics_request(self,list_info):
-        '''Comaptible API using normalized parameters'''
-        list_resp = self.metric.listMetrics(self.cloudwatch_conn,list_info)
+
+    def list_metrics_request(self, list_info):
+        """Compatible API using normalized parameters"""
+        list_resp = self.metric.listMetrics(self.cloudwatch_conn, list_info)
         return list_resp
-#---------------------------------------------------------------------------------------------------------------------------                        
-    def read_metrics_data(self,list_info):
-        '''Comaptible API using normalized parameters
-        Read all metric data related to a specified metric'''
-        data_resp=self.metric.metricsData(self.cloudwatch_conn,list_info)
+
+    def read_metrics_data(self, list_info):
+        """Compatible API using normalized parameters
+        Read all metric data related to a specified metric"""
+        data_resp = self.metric.metricsData(self.cloudwatch_conn, list_info)
         return data_resp
-#--------------------------------------------------------------------------------------------------------------------------- 
 
-    def metric_calls(self,message,aws_conn):
+    def metric_calls(self, key: str, metric_info: dict, aws_conn: dict):
         """Gets the message from the common consumer"""
-        
+
         try:
             self.cloudwatch_conn = aws_conn['cloudwatch_connection']
             self.ec2_conn = aws_conn['ec2_connection']
 
-            metric_info = json.loads(message.value)
             metric_response = dict()
 
-            if metric_info['vim_type'] == 'AWS':
-                log.debug ("VIM support : AWS")
-
-            # Check the Functionlity that needs to be performed: topic = 'alarms'/'metrics'/'Access_Credentials'
-                if message.topic == "metric_request":
-                    log.info("Action required against: %s" % (message.topic))
-
-                    if message.key == "create_metric_request":                            
-                        if self.check_resource(metric_info['metric_create_request']['resource_uuid']) == True:
-                            metric_resp = self.create_metric_request(metric_info['metric_create_request']) #alarm_info = message.value
-                            metric_response['schema_version'] = metric_info['schema_version']
-                            metric_response['schema_type']    = "create_metric_response"
-                            metric_response['metric_create_response'] = metric_resp
-                            payload = json.dumps(metric_response)                                                                  
-                            file = open('../../core/models/create_metric_resp.json','wb').write((payload))
-                            self.producer.publish_metrics_response(key='create_metric_response', message=payload, topic ='metric_response')
-                            
-                            log.info("Metric configured: %s", metric_resp)
-                            return metric_response
-                            
-                    elif message.key == "update_metric_request":
-                        if self.check_resource(metric_info['metric_create_request']['resource_uuid']) == True:
-                            update_resp = self.update_metric_request(metric_info['metric_create_request'])
-                            metric_response['schema_version'] = metric_info['schema_version']
-                            metric_response['schema_type'] = "update_metric_response"
-                            metric_response['metric_update_response'] = update_resp
-                            payload = json.dumps(metric_response)                                                                                               
-                            file = open('../../core/models/update_metric_resp.json','wb').write((payload))
-                            self.producer.update_metric_response(key='update_metric_response',message=payload,topic = 'metric_response')
-
-                            log.info("Metric Updates: %s",metric_response)
-                            return metric_response
-                            
-                    elif message.key == "delete_metric_request":
-                        if self.check_resource(metric_info['resource_uuid']) == True:
-                            del_resp=self.delete_metric_request(metric_info)
-                            payload = json.dumps(del_resp)                                                                                               
-                            file = open('../../core/models/delete_metric_resp.json','wb').write((payload))
-                            self.producer.delete_metric_response(key='delete_metric_response',message=payload,topic = 'metric_response')
-
-                            log.info("Metric Deletion Not supported in AWS : %s",del_resp)
-                            return del_resp
-
-                    elif message.key == "list_metric_request": 
-                        if self.check_resource(metric_info['metrics_list_request']['resource_uuid']) == True:
-                            list_resp = self.list_metrics_request(metric_info['metrics_list_request'])
-                            metric_response['schema_version'] = metric_info['schema_version']
-                            metric_response['schema_type'] = "list_metric_response"
-                            metric_response['correlation_id'] = metric_info['metrics_list_request']['correlation_id']
-                            metric_response['vim_type'] = metric_info['vim_type']
-                            metric_response['metrics_list'] = list_resp
-                            payload = json.dumps(metric_response)                                                                                                
-                            file = open('../../core/models/list_metric_resp.json','wb').write((payload))
-                            self.producer.list_metric_response(key='list_metrics_response',message=payload,topic = 'metric_response')
-
-                            log.info("Metric List: %s",metric_response)
-                            return metric_response
-
-                    elif message.key == "read_metric_data_request":
-                        if self.check_resource(metric_info['resource_uuid']) == True:
-                            data_resp = self.read_metrics_data(metric_info)
-                            metric_response['schema_version'] = metric_info['schema_version']
-                            metric_response['schema_type'] = "read_metric_data_response"
-                            metric_response['metric_name'] = metric_info['metric_name']
-                            metric_response['metric_uuid'] = metric_info['metric_uuid']
-                            metric_response['correlation_id'] = metric_info['correlation_uuid']
-                            metric_response['resource_uuid'] = metric_info['resource_uuid']
-                            metric_response['tenant_uuid'] = metric_info['tenant_uuid']
-                            metric_response['metrics_data'] = data_resp
-                            payload = json.dumps(metric_response)                                                                
-                            file = open('../../core/models/read_metric_data_resp.json','wb').write((payload))
-                            self.producer.read_metric_data_response(key='read_metric_data_response',message=payload,topic = 'metric_response')
-                            
-                            log.info("Metric Data Response: %s",metric_response)
-                            return metric_response 
-
-                    else:
-                        log.debug("Unknown key, no action will be performed")
-                else:
-                    log.info("Message topic not relevant to this plugin: %s",
-                         message.topic)
-            
+            log.debug("VIM support : AWS")
+
+            if key == "create_metric_request":
+                if self.check_resource(metric_info['metric_create_request']['resource_uuid']):
+                    metric_resp = self.create_metric_request(
+                        metric_info['metric_create_request'])  # alarm_info = message.value
+                    metric_response['schema_version'] = metric_info['schema_version']
+                    metric_response['schema_type'] = "create_metric_response"
+                    metric_response['metric_create_response'] = metric_resp
+                    log.info("Metric configured: %s", metric_resp)
+                    return metric_response
+
+            elif key == "update_metric_request":
+                if self.check_resource(metric_info['metric_create_request']['resource_uuid']):
+                    update_resp = self.update_metric_request(metric_info['metric_create_request'])
+                    metric_response['schema_version'] = metric_info['schema_version']
+                    metric_response['schema_type'] = "update_metric_response"
+                    metric_response['metric_update_response'] = update_resp
+                    log.info("Metric Updates: %s", metric_response)
+                    return metric_response
+
+            elif key == "delete_metric_request":
+                if self.check_resource(metric_info['resource_uuid']):
+                    del_resp = self.delete_metric_request(metric_info)
+                    log.info("Metric Deletion Not supported in AWS : %s", del_resp)
+                    return del_resp
+
+            elif key == "list_metric_request":
+                if self.check_resource(metric_info['metrics_list_request']['resource_uuid']):
+                    list_resp = self.list_metrics_request(metric_info['metrics_list_request'])
+                    metric_response['schema_version'] = metric_info['schema_version']
+                    metric_response['schema_type'] = "list_metric_response"
+                    metric_response['correlation_id'] = metric_info['metrics_list_request']['correlation_id']
+                    metric_response['vim_type'] = metric_info['vim_type']
+                    metric_response['metrics_list'] = list_resp
+                    log.info("Metric List: %s", metric_response)
+                    return metric_response
+
+            elif key == "read_metric_data_request":
+                if self.check_resource(metric_info['resource_uuid']):
+                    data_resp = self.read_metrics_data(metric_info)
+                    metric_response['schema_version'] = metric_info['schema_version']
+                    metric_response['schema_type'] = "read_metric_data_response"
+                    metric_response['metric_name'] = metric_info['metric_name']
+                    metric_response['metric_uuid'] = metric_info['metric_uuid']
+                    metric_response['correlation_id'] = metric_info['correlation_uuid']
+                    metric_response['resource_uuid'] = metric_info['resource_uuid']
+                    metric_response['tenant_uuid'] = metric_info['tenant_uuid']
+                    metric_response['metrics_data'] = data_resp
+                    log.info("Metric Data Response: %s", metric_response)
+                    return metric_response
+
+            else:
+                raise UnsupportedOperation("Unknown key, no action will be performed")
+
         except Exception as e:
             log.error("Consumer exception: %s", str(e))
 
-#---------------------------------------------------------------------------------------------------------------------------
-    def check_resource(self,resource_uuid):
+    def check_resource(self, resource_uuid):
 
-        '''Checking the resource_uuid is present in EC2 instances'''
+        """Checking the resource_uuid is present in EC2 instances"""
         try:
             check_resp = dict()
             instances = self.ec2_conn.get_all_instance_status()
             status_resource = False
 
-            #resource_id
+            # resource_id
             for instance_id in instances:
                 instance_id = str(instance_id).split(':')[1]
                 if instance_id == resource_uuid:
@@ -179,10 +152,8 @@ class plugin_metrics():
                 else:
                     status_resource = False
 
-            #status
+            # status
             return status_resource
 
-        except Exception as e: 
-            log.error("Error in Plugin Inputs %s",str(e))          
-#---------------------------------------------------------------------------------------------------------------------------   
-
+        except Exception as e:
+            log.error("Error in Plugin Inputs %s", str(e))
diff --git a/osm_mon/plugins/OpenStack/Aodh/alarm_handler.py b/osm_mon/plugins/OpenStack/Aodh/alarm_handler.py
new file mode 100644 (file)
index 0000000..7e6347f
--- /dev/null
@@ -0,0 +1,390 @@
+# Copyright 2017 Intel Research and Development Ireland Limited
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+##
+"""Carry out alarming requests via Aodh API."""
+
+import json
+import logging
+from io import UnsupportedOperation
+
+import six
+
+from osm_mon.core.auth import AuthManager
+from osm_mon.core.database import DatabaseManager
+from osm_mon.core.settings import Config
+from osm_mon.plugins.OpenStack.Gnocchi.metric_handler import METRIC_MAPPINGS
+from osm_mon.plugins.OpenStack.common import Common
+from osm_mon.plugins.OpenStack.response import OpenStackResponseBuilder
+
+log = logging.getLogger(__name__)
+
+SEVERITIES = {
+    "warning": "low",
+    "minor": "low",
+    "major": "moderate",
+    "critical": "critical",
+    "indeterminate": "critical"}
+
+STATISTICS = {
+    "average": "mean",
+    "minimum": "min",
+    "maximum": "max",
+    "count": "count",
+    "sum": "sum"}
+
+
+class OpenstackAlarmHandler(object):
+    """Carries out alarming requests and responses via Aodh API."""
+
+    def __init__(self):
+        """Create the OpenStack alarming instance."""
+        self._database_manager = DatabaseManager()
+        self._auth_manager = AuthManager()
+        self._cfg = Config.instance()
+
+        # Use the Response class to generate valid json response messages
+        self._response = OpenStackResponseBuilder()
+
+    def handle_message(self, key: str, values: dict, vim_uuid: str):
+        """
+        Processes alarm request message depending on it's key
+        :param key: Kafka message key
+        :param values: Dict containing alarm request data. Follows models defined in core.models.
+        :param vim_uuid: UUID of the VIM to handle the alarm request.
+        :return: Dict containing alarm response data. Follows models defined in core.models.
+        """
+
+        log.info("OpenStack alarm action required.")
+
+        verify_ssl = self._auth_manager.is_verify_ssl(vim_uuid)
+
+        auth_token = Common.get_auth_token(vim_uuid, verify_ssl=verify_ssl)
+
+        alarm_endpoint = Common.get_endpoint("alarming", vim_uuid, verify_ssl=verify_ssl)
+        metric_endpoint = Common.get_endpoint("metric", vim_uuid, verify_ssl=verify_ssl)
+
+        vim_account = self._auth_manager.get_credentials(vim_uuid)
+        vim_config = json.loads(vim_account.config)
+
+        if key == "create_alarm_request":
+            alarm_details = values['alarm_create_request']
+            alarm_id = None
+            status = False
+            try:
+                metric_name = alarm_details['metric_name'].lower()
+                resource_id = alarm_details['resource_uuid']
+
+                self.check_for_metric(auth_token, metric_endpoint, metric_name, resource_id, verify_ssl)
+
+                alarm_id = self.configure_alarm(
+                    alarm_endpoint, auth_token, alarm_details, vim_config, verify_ssl)
+
+                log.info("Alarm successfully created")
+                self._database_manager.save_alarm(alarm_id,
+                                                  vim_uuid,
+                                                  alarm_details['threshold_value'],
+                                                  alarm_details['operation'].lower(),
+                                                  alarm_details['metric_name'].lower(),
+                                                  alarm_details['vdu_name'].lower(),
+                                                  alarm_details['vnf_member_index'],
+                                                  alarm_details['ns_id'].lower()
+                                                  )
+                status = True
+            except Exception as e:
+                log.exception("Error creating alarm")
+                raise e
+            finally:
+                return self._response.generate_response('create_alarm_response',
+                                                        cor_id=alarm_details['correlation_id'],
+                                                        status=status,
+                                                        alarm_id=alarm_id)
+
+        elif key == "list_alarm_request":
+            list_details = values['alarm_list_request']
+            alarm_list = None
+            try:
+                alarm_list = self.list_alarms(
+                    alarm_endpoint, auth_token, list_details, verify_ssl)
+            except Exception as e:
+                log.exception("Error listing alarms")
+                raise e
+            finally:
+                return self._response.generate_response('list_alarm_response',
+                                                        cor_id=list_details['correlation_id'],
+                                                        alarm_list=alarm_list)
+
+        elif key == "delete_alarm_request":
+            request_details = values['alarm_delete_request']
+            alarm_id = request_details['alarm_uuid']
+            status = False
+            try:
+                self.delete_alarm(
+                    alarm_endpoint, auth_token, alarm_id, verify_ssl)
+                status = True
+            except Exception as e:
+                log.exception("Error deleting alarm")
+                raise e
+            finally:
+                return self._response.generate_response('delete_alarm_response',
+                                                        cor_id=request_details['correlation_id'],
+                                                        status=status,
+                                                        alarm_id=alarm_id)
+
+        elif key == "acknowledge_alarm_request":
+            try:
+                alarm_id = values['ack_details']['alarm_uuid']
+
+                self.update_alarm_state(
+                    alarm_endpoint, auth_token, alarm_id, verify_ssl)
+
+                log.info("Acknowledged the alarm and cleared it.")
+            except Exception as e:
+                log.exception("Error acknowledging alarm")
+                raise
+            finally:
+                return None
+
+        elif key == "update_alarm_request":
+            # Update alarm configurations
+            alarm_details = values['alarm_update_request']
+            alarm_id = None
+            status = False
+            try:
+                alarm_id = self.update_alarm(
+                    alarm_endpoint, auth_token, alarm_details, vim_config, verify_ssl)
+                status = True
+            except Exception as e:
+                log.exception("Error updating alarm")
+                raise e
+            finally:
+                return self._response.generate_response('update_alarm_response',
+                                                        cor_id=alarm_details['correlation_id'],
+                                                        status=status,
+                                                        alarm_id=alarm_id)
+
+        else:
+            raise UnsupportedOperation("Unknown key {}, no action will be performed.".format(key))
+
+    def configure_alarm(self, alarm_endpoint, auth_token, values, vim_config, verify_ssl):
+        """Create requested alarm in Aodh."""
+        url = "{}/v2/alarms/".format(alarm_endpoint)
+
+        # Check if the desired alarm is supported
+        alarm_name = values['alarm_name'].lower()
+        metric_name = values['metric_name'].lower()
+        resource_id = values['resource_uuid']
+
+        if metric_name not in METRIC_MAPPINGS.keys():
+            raise KeyError("Metric {} is not supported.".format(metric_name))
+
+        if 'granularity' in vim_config and 'granularity' not in values:
+            values['granularity'] = vim_config['granularity']
+        payload = self.check_payload(values, metric_name, resource_id,
+                                     alarm_name)
+        new_alarm = Common.perform_request(
+            url, auth_token, req_type="post", payload=payload, verify_ssl=verify_ssl)
+        return json.loads(new_alarm.text)['alarm_id']
+
+    def delete_alarm(self, endpoint, auth_token, alarm_id, verify_ssl):
+        """Delete alarm function."""
+        url = "{}/v2/alarms/%s".format(endpoint) % alarm_id
+
+        result = Common.perform_request(
+            url, auth_token, req_type="delete", verify_ssl=verify_ssl)
+        if str(result.status_code) == "404":
+            raise ValueError("Alarm {} doesn't exist".format(alarm_id))
+
+    def list_alarms(self, endpoint, auth_token, list_details, verify_ssl):
+        """Generate the requested list of alarms."""
+        url = "{}/v2/alarms/".format(endpoint)
+        a_list, name_list, sev_list, res_list = [], [], [], []
+
+        # TODO(mcgoughh): for now resource_id is a mandatory field
+        # Check for a resource id
+        try:
+            resource = list_details['resource_uuid']
+            name = list_details['alarm_name'].lower()
+            severity = list_details['severity'].lower()
+            sev = SEVERITIES[severity]
+        except KeyError as e:
+            log.warning("Missing parameter for alarm list request: %s", e)
+            raise e
+
+        # Perform the request to get the desired list
+        try:
+            result = Common.perform_request(
+                url, auth_token, req_type="get", verify_ssl=verify_ssl)
+
+            if result is not None:
+                # Get list based on resource id
+                for alarm in json.loads(result.text):
+                    rule = alarm['gnocchi_resources_threshold_rule']
+                    if resource == rule['resource_id']:
+                        res_list.append(alarm['alarm_id'])
+
+                # Generate specified listed if requested
+                if name is not None and sev is not None:
+                    log.info("Return a list of %s alarms with %s severity.",
+                             name, sev)
+                    for alarm in json.loads(result.text):
+                        if name == alarm['name']:
+                            name_list.append(alarm['alarm_id'])
+                    for alarm in json.loads(result.text):
+                        if sev == alarm['severity']:
+                            sev_list.append(alarm['alarm_id'])
+                    name_sev_list = list(set(name_list).intersection(sev_list))
+                    a_list = list(set(name_sev_list).intersection(res_list))
+                elif name is not None:
+                    log.info("Returning a %s list of alarms.", name)
+                    for alarm in json.loads(result.text):
+                        if name == alarm['name']:
+                            name_list.append(alarm['alarm_id'])
+                    a_list = list(set(name_list).intersection(res_list))
+                elif sev is not None:
+                    log.info("Returning %s severity alarm list.", sev)
+                    for alarm in json.loads(result.text):
+                        if sev == alarm['severity']:
+                            sev_list.append(alarm['alarm_id'])
+                    a_list = list(set(sev_list).intersection(res_list))
+                else:
+                    log.info("Returning an entire list of alarms.")
+                    a_list = res_list
+            else:
+                log.info("There are no alarms!")
+            response_list = []
+            for alarm in json.loads(result.text):
+                if alarm['alarm_id'] in a_list:
+                    response_list.append(alarm)
+            return response_list
+
+        except Exception as e:
+            log.exception("Failed to generate alarm list: ")
+            raise e
+
+    def update_alarm_state(self, endpoint, auth_token, alarm_id, verify_ssl):
+        """Set the state of an alarm to ok when ack message is received."""
+        url = "{}/v2/alarms/%s/state".format(endpoint) % alarm_id
+        payload = json.dumps("ok")
+
+        result = Common.perform_request(
+            url, auth_token, req_type="put", payload=payload, verify_ssl=verify_ssl)
+
+        return json.loads(result.text)
+
+    def update_alarm(self, endpoint, auth_token, values, vim_config, verify_ssl):
+        """Get alarm name for an alarm configuration update."""
+        # Get already existing alarm details
+        url = "{}/v2/alarms/%s".format(endpoint) % values['alarm_uuid']
+
+        # Gets current configurations about the alarm
+        result = Common.perform_request(
+            url, auth_token, req_type="get")
+        alarm_name = json.loads(result.text)['name']
+        rule = json.loads(result.text)['gnocchi_resources_threshold_rule']
+        alarm_state = json.loads(result.text)['state']
+        resource_id = rule['resource_id']
+        metric_name = [key for key, value in six.iteritems(METRIC_MAPPINGS) if value == rule['metric']][0]
+
+        # Generates and check payload configuration for alarm update
+        if 'granularity' in vim_config and 'granularity' not in values:
+            values['granularity'] = vim_config['granularity']
+        payload = self.check_payload(values, metric_name, resource_id,
+                                     alarm_name, alarm_state=alarm_state)
+
+        # Updates the alarm configurations with the valid payload
+        update_alarm = Common.perform_request(
+            url, auth_token, req_type="put", payload=payload, verify_ssl=verify_ssl)
+
+        return json.loads(update_alarm.text)['alarm_id']
+
+    def check_payload(self, values, metric_name, resource_id,
+                      alarm_name, alarm_state=None):
+        """Check that the payload is configuration for update/create alarm."""
+        cfg = Config.instance()
+        # Check state and severity
+
+        severity = 'critical'
+        if 'severity' in values:
+            severity = values['severity'].lower()
+
+        if severity == "indeterminate":
+            alarm_state = "insufficient data"
+        if alarm_state is None:
+            alarm_state = "ok"
+
+        statistic = values['statistic'].lower()
+
+        granularity = cfg.OS_DEFAULT_GRANULARITY
+        if 'granularity' in values:
+            granularity = values['granularity']
+
+        resource_type = 'generic'
+        if 'resource_type' in values:
+            resource_type = values['resource_type'].lower()
+
+        # Try to configure the payload for the update/create request
+        # Can only update: threshold, operation, statistic and
+        # the severity of the alarm
+        rule = {'threshold': values['threshold_value'],
+                'comparison_operator': values['operation'].lower(),
+                'metric': METRIC_MAPPINGS[metric_name],
+                'resource_id': resource_id,
+                'resource_type': resource_type,
+                'aggregation_method': STATISTICS[statistic],
+                'granularity': granularity, }
+        payload = json.dumps({'state': alarm_state,
+                              'name': alarm_name,
+                              'severity': SEVERITIES[severity],
+                              'type': 'gnocchi_resources_threshold',
+                              'gnocchi_resources_threshold_rule': rule,
+                              'alarm_actions': [cfg.OS_NOTIFIER_URI],
+                              'repeat_actions': True}, sort_keys=True)
+        return payload
+
+    def get_alarm_state(self, endpoint, auth_token, alarm_id):
+        """Get the state of the alarm."""
+        url = "{}/v2/alarms/%s/state".format(endpoint) % alarm_id
+
+        alarm_state = Common.perform_request(
+            url, auth_token, req_type="get")
+        return json.loads(alarm_state.text)
+
+    def check_for_metric(self, auth_token, metric_endpoint, metric_name, resource_id, verify_ssl):
+        """
+        Checks if resource has a specific metric. If not, throws exception.
+        :param verify_ssl: Boolean flag to set SSL cert validation
+        :param auth_token: OpenStack auth token
+        :param metric_endpoint: OpenStack metric endpoint
+        :param metric_name: Metric name
+        :param resource_id: Resource UUID
+        :return: Metric details from resource
+        :raise Exception: Could not retrieve metric from resource
+        """
+        try:
+            url = "{}/v1/resource/generic/{}".format(metric_endpoint, resource_id)
+            result = Common.perform_request(
+                url, auth_token, req_type="get", verify_ssl=verify_ssl)
+            resource = json.loads(result.text)
+            metrics_dict = resource['metrics']
+            return metrics_dict[METRIC_MAPPINGS[metric_name]]
+        except Exception as e:
+            log.exception("Desired Gnocchi metric not found:", e)
+            raise e
diff --git a/osm_mon/plugins/OpenStack/Aodh/alarming.py b/osm_mon/plugins/OpenStack/Aodh/alarming.py
deleted file mode 100644 (file)
index cea12ba..0000000
+++ /dev/null
@@ -1,399 +0,0 @@
-# Copyright 2017 Intel Research and Development Ireland Limited
-# *************************************************************
-
-# This file is part of OSM Monitoring module
-# All Rights Reserved to Intel Corporation
-
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-
-#         http://www.apache.org/licenses/LICENSE-2.0
-
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# For those usages not covered by the Apache License, Version 2.0 please
-# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
-##
-"""Carry out alarming requests via Aodh API."""
-
-import json
-import logging
-
-import six
-import yaml
-
-from osm_mon.core.auth import AuthManager
-from osm_mon.core.database import DatabaseManager
-from osm_mon.core.message_bus.producer import KafkaProducer
-from osm_mon.core.settings import Config
-from osm_mon.plugins.OpenStack.Gnocchi.metrics import METRIC_MAPPINGS
-from osm_mon.plugins.OpenStack.common import Common
-from osm_mon.plugins.OpenStack.response import OpenStack_Response
-
-log = logging.getLogger(__name__)
-
-SEVERITIES = {
-    "warning": "low",
-    "minor": "low",
-    "major": "moderate",
-    "critical": "critical",
-    "indeterminate": "critical"}
-
-STATISTICS = {
-    "average": "mean",
-    "minimum": "min",
-    "maximum": "max",
-    "count": "count",
-    "sum": "sum"}
-
-
-class Alarming(object):
-    """Carries out alarming requests and responses via Aodh API."""
-
-    def __init__(self):
-        """Create the OpenStack alarming instance."""
-        self._database_manager = DatabaseManager()
-        self._auth_manager = AuthManager()
-
-        # Use the Response class to generate valid json response messages
-        self._response = OpenStack_Response()
-
-        # Initializer a producer to send responses back to SO
-        self._producer = KafkaProducer("alarm_response")
-
-    def alarming(self, message, vim_uuid):
-        """
-        Processes alarm request message depending on it's key
-        :param message: Message containing key and value attributes. This last one can be in JSON or YAML format.
-        :param vim_uuid: UUID of the VIM to handle the alarm request.
-        :return:
-        """
-        try:
-            values = json.loads(message.value)
-        except ValueError:
-            values = yaml.safe_load(message.value)
-
-        log.info("OpenStack alarm action required.")
-
-        auth_token = Common.get_auth_token(vim_uuid)
-
-        alarm_endpoint = Common.get_endpoint("alarming", vim_uuid)
-        metric_endpoint = Common.get_endpoint("metric", vim_uuid)
-
-        vim_account = self._auth_manager.get_credentials(vim_uuid)
-        vim_config = json.loads(vim_account.config)
-
-        if message.key == "create_alarm_request":
-            alarm_details = values['alarm_create_request']
-            alarm_id = None
-            status = False
-            try:
-                metric_name = alarm_details['metric_name'].lower()
-                resource_id = alarm_details['resource_uuid']
-
-                self.check_for_metric(auth_token, metric_endpoint, metric_name, resource_id)
-
-                alarm_id = self.configure_alarm(
-                    alarm_endpoint, auth_token, alarm_details, vim_config)
-
-                log.info("Alarm successfully created")
-                self._database_manager.save_alarm(alarm_id,
-                                                  vim_uuid,
-                                                  alarm_details['threshold_value'],
-                                                  alarm_details['operation'].lower(),
-                                                  alarm_details['metric_name'].lower(),
-                                                  alarm_details['vdu_name'].lower(),
-                                                  alarm_details['vnf_member_index'],
-                                                  alarm_details['ns_id'].lower()
-                                                  )
-                status = True
-            except Exception as e:
-                log.exception("Error creating alarm")
-                raise e
-            finally:
-                self._generate_and_send_response('create_alarm_response',
-                                                 alarm_details['correlation_id'],
-                                                 status=status,
-                                                 alarm_id=alarm_id)
-
-        elif message.key == "list_alarm_request":
-            list_details = values['alarm_list_request']
-            alarm_list = None
-            try:
-                alarm_list = self.list_alarms(
-                    alarm_endpoint, auth_token, list_details)
-            except Exception as e:
-                log.exception("Error listing alarms")
-                raise e
-            finally:
-                self._generate_and_send_response('list_alarm_response',
-                                                 list_details['correlation_id'],
-                                                 alarm_list=alarm_list)
-
-        elif message.key == "delete_alarm_request":
-            request_details = values['alarm_delete_request']
-            alarm_id = request_details['alarm_uuid']
-            status = False
-            try:
-                self.delete_alarm(
-                    alarm_endpoint, auth_token, alarm_id)
-                status = True
-            except Exception as e:
-                log.exception("Error deleting alarm")
-                raise e
-            finally:
-                self._generate_and_send_response('delete_alarm_response',
-                                                 request_details['correlation_id'],
-                                                 status=status,
-                                                 alarm_id=alarm_id)
-
-        elif message.key == "acknowledge_alarm":
-            try:
-                alarm_id = values['ack_details']['alarm_uuid']
-
-                self.update_alarm_state(
-                    alarm_endpoint, auth_token, alarm_id)
-
-                log.info("Acknowledged the alarm and cleared it.")
-            except Exception as e:
-                log.exception("Error acknowledging alarm")
-                raise e
-
-        elif message.key == "update_alarm_request":
-            # Update alarm configurations
-            alarm_details = values['alarm_update_request']
-            alarm_id = None
-            status = False
-            try:
-                alarm_id = self.update_alarm(
-                    alarm_endpoint, auth_token, alarm_details, vim_config)
-                status = True
-            except Exception as e:
-                log.exception("Error updating alarm")
-                raise e
-            finally:
-                self._generate_and_send_response('update_alarm_response',
-                                                 alarm_details['correlation_id'],
-                                                 status=status,
-                                                 alarm_id=alarm_id)
-
-        else:
-            log.debug("Unknown key, no action will be performed")
-
-    def configure_alarm(self, alarm_endpoint, auth_token, values, vim_config):
-        """Create requested alarm in Aodh."""
-        url = "{}/v2/alarms/".format(alarm_endpoint)
-
-        # Check if the desired alarm is supported
-        alarm_name = values['alarm_name'].lower()
-        metric_name = values['metric_name'].lower()
-        resource_id = values['resource_uuid']
-
-        if metric_name not in METRIC_MAPPINGS.keys():
-            raise KeyError("Metric {} is not supported.".format(metric_name))
-
-        if 'granularity' in vim_config and 'granularity' not in values:
-            values['granularity'] = vim_config['granularity']
-        payload = self.check_payload(values, metric_name, resource_id,
-                                     alarm_name)
-        new_alarm = Common.perform_request(
-            url, auth_token, req_type="post", payload=payload)
-        return json.loads(new_alarm.text)['alarm_id']
-
-    def delete_alarm(self, endpoint, auth_token, alarm_id):
-        """Delete alarm function."""
-        url = "{}/v2/alarms/%s".format(endpoint) % alarm_id
-
-        result = Common.perform_request(
-            url, auth_token, req_type="delete")
-        if str(result.status_code) == "404":
-            raise ValueError("Alarm {} doesn't exist".format(alarm_id))
-
-    def list_alarms(self, endpoint, auth_token, list_details):
-        """Generate the requested list of alarms."""
-        url = "{}/v2/alarms/".format(endpoint)
-        a_list, name_list, sev_list, res_list = [], [], [], []
-
-        # TODO(mcgoughh): for now resource_id is a mandatory field
-        # Check for a resource id
-        try:
-            resource = list_details['resource_uuid']
-            name = list_details['alarm_name'].lower()
-            severity = list_details['severity'].lower()
-            sev = SEVERITIES[severity]
-        except KeyError as e:
-            log.warning("Missing parameter for alarm list request: %s", e)
-            raise e
-
-        # Perform the request to get the desired list
-        try:
-            result = Common.perform_request(
-                url, auth_token, req_type="get")
-
-            if result is not None:
-                # Get list based on resource id
-                for alarm in json.loads(result.text):
-                    rule = alarm['gnocchi_resources_threshold_rule']
-                    if resource == rule['resource_id']:
-                        res_list.append(alarm['alarm_id'])
-
-                # Generate specified listed if requested
-                if name is not None and sev is not None:
-                    log.info("Return a list of %s alarms with %s severity.",
-                             name, sev)
-                    for alarm in json.loads(result.text):
-                        if name == alarm['name']:
-                            name_list.append(alarm['alarm_id'])
-                    for alarm in json.loads(result.text):
-                        if sev == alarm['severity']:
-                            sev_list.append(alarm['alarm_id'])
-                    name_sev_list = list(set(name_list).intersection(sev_list))
-                    a_list = list(set(name_sev_list).intersection(res_list))
-                elif name is not None:
-                    log.info("Returning a %s list of alarms.", name)
-                    for alarm in json.loads(result.text):
-                        if name == alarm['name']:
-                            name_list.append(alarm['alarm_id'])
-                    a_list = list(set(name_list).intersection(res_list))
-                elif sev is not None:
-                    log.info("Returning %s severity alarm list.", sev)
-                    for alarm in json.loads(result.text):
-                        if sev == alarm['severity']:
-                            sev_list.append(alarm['alarm_id'])
-                    a_list = list(set(sev_list).intersection(res_list))
-                else:
-                    log.info("Returning an entire list of alarms.")
-                    a_list = res_list
-            else:
-                log.info("There are no alarms!")
-            response_list = []
-            for alarm in json.loads(result.text):
-                if alarm['alarm_id'] in a_list:
-                    response_list.append(alarm)
-            return response_list
-
-        except Exception as e:
-            log.exception("Failed to generate alarm list: ")
-            raise e
-
-    def update_alarm_state(self, endpoint, auth_token, alarm_id):
-        """Set the state of an alarm to ok when ack message is received."""
-        url = "{}/v2/alarms/%s/state".format(endpoint) % alarm_id
-        payload = json.dumps("ok")
-
-        Common.perform_request(
-            url, auth_token, req_type="put", payload=payload)
-
-    def update_alarm(self, endpoint, auth_token, values, vim_config):
-        """Get alarm name for an alarm configuration update."""
-        # Get already existing alarm details
-        url = "{}/v2/alarms/%s".format(endpoint) % values['alarm_uuid']
-
-        # Gets current configurations about the alarm
-        result = Common.perform_request(
-            url, auth_token, req_type="get")
-        alarm_name = json.loads(result.text)['name']
-        rule = json.loads(result.text)['gnocchi_resources_threshold_rule']
-        alarm_state = json.loads(result.text)['state']
-        resource_id = rule['resource_id']
-        metric_name = [key for key, value in six.iteritems(METRIC_MAPPINGS) if value == rule['metric']][0]
-
-        # Generates and check payload configuration for alarm update
-        if 'granularity' in vim_config and 'granularity' not in values:
-            values['granularity'] = vim_config['granularity']
-        payload = self.check_payload(values, metric_name, resource_id,
-                                     alarm_name, alarm_state=alarm_state)
-
-        # Updates the alarm configurations with the valid payload
-        update_alarm = Common.perform_request(
-            url, auth_token, req_type="put", payload=payload)
-
-        return json.loads(update_alarm.text)['alarm_id']
-
-    def check_payload(self, values, metric_name, resource_id,
-                      alarm_name, alarm_state=None):
-        """Check that the payload is configuration for update/create alarm."""
-        cfg = Config.instance()
-        # Check state and severity
-
-        severity = 'critical'
-        if 'severity' in values:
-            severity = values['severity'].lower()
-
-        if severity == "indeterminate":
-            alarm_state = "insufficient data"
-        if alarm_state is None:
-            alarm_state = "ok"
-
-        statistic = values['statistic'].lower()
-
-        granularity = cfg.OS_DEFAULT_GRANULARITY
-        if 'granularity' in values:
-            granularity = values['granularity']
-
-        resource_type = 'generic'
-        if 'resource_type' in values:
-            resource_type = values['resource_type'].lower()
-
-        # Try to configure the payload for the update/create request
-        # Can only update: threshold, operation, statistic and
-        # the severity of the alarm
-        rule = {'threshold': values['threshold_value'],
-                'comparison_operator': values['operation'].lower(),
-                'metric': METRIC_MAPPINGS[metric_name],
-                'resource_id': resource_id,
-                'resource_type': resource_type,
-                'aggregation_method': STATISTICS[statistic],
-                'granularity': granularity, }
-        payload = json.dumps({'state': alarm_state,
-                              'name': alarm_name,
-                              'severity': SEVERITIES[severity],
-                              'type': 'gnocchi_resources_threshold',
-                              'gnocchi_resources_threshold_rule': rule,
-                              'alarm_actions': [cfg.OS_NOTIFIER_URI], }, sort_keys=True)
-        return payload
-
-    def get_alarm_state(self, endpoint, auth_token, alarm_id):
-        """Get the state of the alarm."""
-        url = "{}/v2/alarms/%s/state".format(endpoint) % alarm_id
-
-        alarm_state = Common.perform_request(
-            url, auth_token, req_type="get")
-        return json.loads(alarm_state.text)
-
-    def check_for_metric(self, auth_token, metric_endpoint, metric_name, resource_id):
-        """
-        Checks if resource has a specific metric. If not, throws exception.
-        :param auth_token: OpenStack auth token
-        :param metric_endpoint: OpenStack metric endpoint
-        :param metric_name: Metric name
-        :param resource_id: Resource UUID
-        :return: Metric details from resource
-        :raise Exception: Could not retrieve metric from resource
-        """
-        try:
-            url = "{}/v1/resource/generic/{}".format(metric_endpoint, resource_id)
-            result = Common.perform_request(
-                url, auth_token, req_type="get")
-            resource = json.loads(result.text)
-            metrics_dict = resource['metrics']
-            return metrics_dict[METRIC_MAPPINGS[metric_name]]
-        except Exception as e:
-            log.exception("Desired Gnocchi metric not found:", e)
-            raise e
-
-    def _generate_and_send_response(self, key, correlation_id, **kwargs):
-        try:
-            resp_message = self._response.generate_response(
-                key, cor_id=correlation_id, **kwargs)
-            log.info("Response Message: %s", resp_message)
-            self._producer.publish_alarm_response(
-                key, resp_message)
-        except Exception as e:
-            log.exception("Response creation failed:")
-            raise e
index 1de3284..71c6c1c 100644 (file)
@@ -25,6 +25,7 @@
 import json
 import logging
 import os
+import re
 import sys
 import time
 
@@ -32,19 +33,29 @@ from six.moves.BaseHTTPServer import BaseHTTPRequestHandler
 from six.moves.BaseHTTPServer import HTTPServer
 
 # Initialise a logger for alarm notifier
+from osm_mon.core.message_bus.producer import Producer
+from osm_mon.core.settings import Config
+
+cfg = Config.instance()
 
 logging.basicConfig(stream=sys.stdout,
-                    format='%(asctime)s %(message)s',
+                    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
                     datefmt='%m/%d/%Y %I:%M:%S %p',
-                    level=logging.INFO)
+                    level=logging.getLevelName(cfg.OSMMON_LOG_LEVEL))
 log = logging.getLogger(__name__)
 
+kafka_logger = logging.getLogger('kafka')
+kafka_logger.setLevel(logging.getLevelName(cfg.OSMMON_KAFKA_LOG_LEVEL))
+kafka_formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
+kafka_handler = logging.StreamHandler(sys.stdout)
+kafka_handler.setFormatter(kafka_formatter)
+kafka_logger.addHandler(kafka_handler)
+
 sys.path.append(os.path.abspath(os.path.join(os.path.realpath(__file__), '..', '..', '..', '..', '..')))
 
 from osm_mon.core.database import DatabaseManager
-from osm_mon.core.message_bus.producer import KafkaProducer
 
-from osm_mon.plugins.OpenStack.response import OpenStack_Response
+from osm_mon.plugins.OpenStack.response import OpenStackResponseBuilder
 
 
 class NotifierHandler(BaseHTTPRequestHandler):
@@ -84,8 +95,7 @@ class NotifierHandler(BaseHTTPRequestHandler):
         """Sends alarm notification message to bus."""
 
         # Initialise configuration and authentication for response message
-        response = OpenStack_Response()
-        producer = KafkaProducer('alarm_response')
+        response = OpenStackResponseBuilder()
 
         database_manager = DatabaseManager()
 
@@ -97,7 +107,7 @@ class NotifierHandler(BaseHTTPRequestHandler):
         # Generate and send response
         resp_message = response.generate_response(
             'notify_alarm',
-            a_id=alarm_id,
+            alarm_id=alarm_id,
             vdu_name=alarm.vdu_name,
             vnf_member_index=alarm.vnf_member_index,
             ns_id=alarm.ns_id,
@@ -107,17 +117,20 @@ class NotifierHandler(BaseHTTPRequestHandler):
             sev=values['severity'],
             date=a_date,
             state=values['current'])
-        producer.publish_alarm_response(
-            'notify_alarm', resp_message)
+        self._publish_response('notify_alarm', json.dumps(resp_message))
         log.info("Sent alarm notification: %s", resp_message)
 
+    def _publish_response(self, key: str, msg: str):
+        producer = Producer()
+        producer.send(topic='alarm_response', key=key, value=msg)
+        producer.flush()
+
 
 def run(server_class=HTTPServer, handler_class=NotifierHandler, port=8662):
     """Run the webserver application to retrieve alarm notifications."""
     try:
         server_address = ('', port)
         httpd = server_class(server_address, handler_class)
-        print('Starting alarm notifier...')
         log.info("Starting alarm notifier server on port: %s", port)
         httpd.serve_forever()
     except Exception as exc:
@@ -125,10 +138,11 @@ def run(server_class=HTTPServer, handler_class=NotifierHandler, port=8662):
 
 
 if __name__ == "__main__":
-    from sys import argv
-
-    # Runs the webserver
-    if len(argv) == 2:
-        run(port=int(argv[1]))
+    cfg = Config.instance()
+    p = re.compile(':(\d+)', re.IGNORECASE)
+    m = p.search(cfg.OS_NOTIFIER_URI)
+    if m:
+        port = m.group(1)
+        run(port=int(port))
     else:
         run()
diff --git a/osm_mon/plugins/OpenStack/Gnocchi/metric_handler.py b/osm_mon/plugins/OpenStack/Gnocchi/metric_handler.py
new file mode 100644 (file)
index 0000000..91dc402
--- /dev/null
@@ -0,0 +1,456 @@
+# Copyright 2017 Intel Research and Development Ireland Limited
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+##
+"""Carry out OpenStack metric requests via Gnocchi API."""
+
+import datetime
+import json
+import logging
+import time
+
+import six
+
+from osm_mon.core.auth import AuthManager
+from osm_mon.core.settings import Config
+from osm_mon.plugins.OpenStack.common import Common
+from osm_mon.plugins.OpenStack.response import OpenStackResponseBuilder
+
+log = logging.getLogger(__name__)
+
+METRIC_MAPPINGS = {
+    "average_memory_utilization": "memory.usage",
+    "disk_read_ops": "disk.read.requests",
+    "disk_write_ops": "disk.write.requests",
+    "disk_read_bytes": "disk.read.bytes",
+    "disk_write_bytes": "disk.write.bytes",
+    "packets_dropped": "interface.if_dropped",
+    "packets_received": "interface.if_packets",
+    "packets_sent": "interface.if_packets",
+    "cpu_utilization": "cpu_util",
+}
+
+PERIOD_MS = {
+    "HR": 3600000,
+    "DAY": 86400000,
+    "WEEK": 604800000,
+    "MONTH": 2629746000,
+    "YEAR": 31556952000
+}
+
+
+class OpenstackMetricHandler(object):
+    """OpenStack metric requests performed via the Gnocchi API."""
+
+    def __init__(self):
+        """Initialize the metric actions."""
+        self._cfg = Config.instance()
+
+        # Use the Response class to generate valid json response messages
+        self._response = OpenStackResponseBuilder()
+
+        self._auth_manager = AuthManager()
+
+    def handle_request(self, key: str, values: dict, vim_uuid: str) -> dict:
+        """
+        Processes metric request message depending on it's key
+        :param key: Kafka message key
+        :param values: Dict containing metric request data. Follows models defined in core.models.
+        :param vim_uuid: UUID of the VIM to handle the metric request.
+        :return: Dict containing metric response data. Follows models defined in core.models.
+        """
+
+        log.info("OpenStack metric action required.")
+
+        if 'metric_name' in values and values['metric_name'] not in METRIC_MAPPINGS.keys():
+            raise ValueError('Metric ' + values['metric_name'] + ' is not supported.')
+
+        verify_ssl = self._auth_manager.is_verify_ssl(vim_uuid)
+
+        endpoint = Common.get_endpoint("metric", vim_uuid, verify_ssl=verify_ssl)
+
+        auth_token = Common.get_auth_token(vim_uuid, verify_ssl=verify_ssl)
+
+        if key == "create_metric_request":
+            metric_details = values['metric_create_request']
+            status = False
+            metric_id = None
+            resource_id = None
+            try:
+                # Configure metric
+                metric_id, resource_id = self.configure_metric(endpoint, auth_token, metric_details, verify_ssl)
+                log.info("Metric successfully created")
+                status = True
+            except Exception as e:
+                log.exception("Error creating metric")
+                raise e
+            finally:
+                return self._response.generate_response('create_metric_response',
+                                                        cor_id=metric_details['correlation_id'],
+                                                        status=status,
+                                                        metric_id=metric_id,
+                                                        resource_id=resource_id)
+
+        elif key == "read_metric_data_request":
+            metric_id = None
+            timestamps = []
+            metric_data = []
+            status = False
+            try:
+                metric_id = self.get_metric_id(endpoint,
+                                               auth_token,
+                                               METRIC_MAPPINGS[values['metric_name']],
+                                               values['resource_uuid'],
+                                               verify_ssl)
+                # Read all metric data related to a specified metric
+                timestamps, metric_data = self.read_metric_data(endpoint, auth_token, values, verify_ssl)
+                log.info("Metric data collected successfully")
+                status = True
+            except Exception as e:
+                log.exception("Error reading metric data")
+                raise e
+            finally:
+                return self._response.generate_response('read_metric_data_response',
+                                                        cor_id=values['correlation_id'],
+                                                        status=status,
+                                                        metric_id=metric_id,
+                                                        metric_name=values['metric_name'],
+                                                        resource_id=values['resource_uuid'],
+                                                        times=timestamps,
+                                                        metrics=metric_data)
+
+        elif key == "delete_metric_request":
+            metric_id = None
+            status = False
+            try:
+                # delete the specified metric in the request
+                metric_id = self.get_metric_id(endpoint, auth_token, METRIC_MAPPINGS[values['metric_name']],
+                                               values['resource_uuid'], verify_ssl)
+                self.delete_metric(
+                    endpoint, auth_token, metric_id, verify_ssl)
+                log.info("Metric deleted successfully")
+                status = True
+
+            except Exception as e:
+                log.exception("Error deleting metric")
+                raise e
+            finally:
+                return self._response.generate_response('delete_metric_response',
+                                                        cor_id=values['correlation_id'],
+                                                        metric_id=metric_id,
+                                                        metric_name=values['metric_name'],
+                                                        status=status,
+                                                        resource_id=values['resource_uuid'])
+
+        elif key == "update_metric_request":
+            # Gnocchi doesn't support configuration updates
+            # Log and send a response back to this effect
+            log.warning("Gnocchi doesn't support metric configuration updates.")
+            req_details = values['metric_update_request']
+            metric_name = req_details['metric_name']
+            resource_id = req_details['resource_uuid']
+            metric_id = self.get_metric_id(endpoint, auth_token, metric_name, resource_id, verify_ssl)
+            return self._response.generate_response('update_metric_response',
+                                                    cor_id=req_details['correlation_id'],
+                                                    status=False,
+                                                    resource_id=resource_id,
+                                                    metric_id=metric_id)
+
+        elif key == "list_metric_request":
+            list_details = values['metrics_list_request']
+            metric_list = []
+            status = False
+            try:
+                metric_list = self.list_metrics(
+                    endpoint, auth_token, list_details, verify_ssl)
+                log.info("Metrics listed successfully")
+                status = True
+            except Exception as e:
+                log.exception("Error listing metrics")
+                raise e
+            finally:
+                return self._response.generate_response('list_metric_response',
+                                                        cor_id=list_details['correlation_id'],
+                                                        status=status,
+                                                        metric_list=metric_list)
+
+        else:
+            raise ValueError("Unknown key {}, no action will be performed.".format(key))
+
+    def configure_metric(self, endpoint, auth_token, values, verify_ssl):
+        """Create the new metric in Gnocchi."""
+        required_fields = ['resource_uuid', 'metric_name']
+        for field in required_fields:
+            if field not in values:
+                raise ValueError("Missing field: " + field)
+
+        resource_id = values['resource_uuid']
+        metric_name = values['metric_name'].lower()
+
+        # Check for an existing metric for this resource
+        metric_id = self.get_metric_id(
+            endpoint, auth_token, metric_name, resource_id, verify_ssl)
+
+        if metric_id is None:
+            # Try appending metric to existing resource
+            try:
+                base_url = "{}/v1/resource/generic/%s/metric"
+                res_url = base_url.format(endpoint) % resource_id
+                payload = {metric_name: {'archive_policy_name': 'high',
+                                         'unit': values['metric_unit']}}
+                result = Common.perform_request(
+                    res_url,
+                    auth_token,
+                    req_type="post",
+                    verify_ssl=verify_ssl,
+                    payload=json.dumps(payload, sort_keys=True))
+                # Get id of newly created metric
+                for row in json.loads(result.text):
+                    if row['name'] == metric_name:
+                        metric_id = row['id']
+                log.info("Appended metric to existing resource.")
+
+                return metric_id, resource_id
+            except Exception as exc:
+                # Gnocchi version of resource does not exist creating a new one
+                log.info("Failed to append metric to existing resource:%s",
+                         exc)
+                url = "{}/v1/resource/generic".format(endpoint)
+                metric = {'name': metric_name,
+                          'archive_policy_name': 'high',
+                          'unit': values['metric_unit'], }
+
+                resource_payload = json.dumps({'id': resource_id,
+                                               'metrics': {
+                                                   metric_name: metric}}, sort_keys=True)
+
+                resource = Common.perform_request(
+                    url,
+                    auth_token,
+                    req_type="post",
+                    payload=resource_payload,
+                    verify_ssl=verify_ssl)
+
+                # Return the newly created resource_id for creating alarms
+                new_resource_id = json.loads(resource.text)['id']
+                log.info("Created new resource for metric: %s",
+                         new_resource_id)
+
+                metric_id = self.get_metric_id(
+                    endpoint, auth_token, metric_name, new_resource_id, verify_ssl)
+
+                return metric_id, new_resource_id
+
+        else:
+            raise ValueError("Metric already exists for this resource")
+
+    def delete_metric(self, endpoint, auth_token, metric_id, verify_ssl):
+        """Delete metric."""
+        url = "{}/v1/metric/%s".format(endpoint) % metric_id
+
+        result = Common.perform_request(
+            url,
+            auth_token,
+            req_type="delete",
+            verify_ssl=verify_ssl)
+        if not str(result.status_code).startswith("2"):
+            log.warning("Failed to delete the metric.")
+            raise ValueError("Error deleting metric. Aodh API responded with code " + str(result.status_code))
+
+    def list_metrics(self, endpoint, auth_token, values, verify_ssl):
+        """List all metrics."""
+
+        # Check for a specified list
+        metric_name = None
+        if 'metric_name' in values:
+            metric_name = values['metric_name'].lower()
+
+        resource = None
+        if 'resource_uuid' in values:
+            resource = values['resource_uuid']
+
+        if resource:
+            url = "{}/v1/resource/generic/{}".format(endpoint, resource)
+            result = Common.perform_request(
+                url, auth_token, req_type="get", verify_ssl=verify_ssl)
+            resource_data = json.loads(result.text)
+            metrics = resource_data['metrics']
+
+            if metric_name:
+                if metrics.get(METRIC_MAPPINGS[metric_name]):
+                    metric_id = metrics[METRIC_MAPPINGS[metric_name]]
+                    url = "{}/v1/metric/{}".format(endpoint, metric_id)
+                    result = Common.perform_request(
+                        url, auth_token, req_type="get", verify_ssl=verify_ssl)
+                    metric_list = json.loads(result.text)
+                    log.info("Returning an %s resource list for %s metrics",
+                             metric_name, resource)
+                    return metric_list
+                else:
+                    log.info("Metric {} not found for {} resource".format(metric_name, resource))
+                    return []
+            else:
+                metric_list = []
+                for k, v in metrics.items():
+                    url = "{}/v1/metric/{}".format(endpoint, v)
+                    result = Common.perform_request(
+                        url, auth_token, req_type="get", verify_ssl=verify_ssl)
+                    metric = json.loads(result.text)
+                    metric_list.append(metric)
+                if metric_list:
+                    log.info("Return a list of %s resource metrics", resource)
+                    return metric_list
+
+                else:
+                    log.info("There are no metrics available")
+                    return []
+        else:
+            url = "{}/v1/metric?sort=name:asc".format(endpoint)
+            result = Common.perform_request(
+                url, auth_token, req_type="get", verify_ssl=verify_ssl)
+            metrics = []
+            metrics_partial = json.loads(result.text)
+            for metric in metrics_partial:
+                metrics.append(metric)
+
+            while len(json.loads(result.text)) > 0:
+                last_metric_id = metrics_partial[-1]['id']
+                url = "{}/v1/metric?sort=name:asc&marker={}".format(endpoint, last_metric_id)
+                result = Common.perform_request(
+                    url, auth_token, req_type="get", verify_ssl=verify_ssl)
+                if len(json.loads(result.text)) > 0:
+                    metrics_partial = json.loads(result.text)
+                    for metric in metrics_partial:
+                        metrics.append(metric)
+
+            if metrics is not None:
+                # Format the list response
+                if metric_name is not None:
+                    metric_list = self.response_list(
+                        metrics, metric_name=metric_name)
+                    log.info("Returning a list of %s metrics", metric_name)
+                else:
+                    metric_list = self.response_list(metrics)
+                    log.info("Returning a complete list of metrics")
+                return metric_list
+            else:
+                log.info("There are no metrics available")
+                return []
+
+    def get_metric_id(self, endpoint, auth_token, metric_name, resource_id, verify_ssl):
+        """Check if the desired metric already exists for the resource."""
+        url = "{}/v1/resource/generic/%s".format(endpoint) % resource_id
+        try:
+            # Try return the metric id if it exists
+            result = Common.perform_request(
+                url,
+                auth_token,
+                req_type="get",
+                verify_ssl=verify_ssl)
+            return json.loads(result.text)['metrics'][metric_name]
+        except KeyError as e:
+            log.error("Metric doesn't exist. No metric_id available")
+            raise e
+
+    def read_metric_data(self, endpoint, auth_token, values, verify_ssl):
+        """Collect metric measures over a specified time period."""
+        timestamps = []
+        data = []
+        # get metric_id
+        metric_id = self.get_metric_id(endpoint, auth_token, METRIC_MAPPINGS[values['metric_name']],
+                                       values['resource_uuid'], verify_ssl)
+        # Try and collect measures
+        collection_unit = values['collection_unit'].upper()
+        collection_period = values['collection_period']
+
+        # Define the start and end time based on configurations
+        # FIXME: Local timezone may differ from timezone set in Gnocchi, causing discrepancies in measures
+        stop_time = time.strftime("%Y-%m-%d") + "T" + time.strftime("%X")
+        end_time = int(round(time.time() * 1000))
+        if collection_unit == 'YEAR':
+            diff = PERIOD_MS[collection_unit]
+        else:
+            diff = collection_period * PERIOD_MS[collection_unit]
+        s_time = (end_time - diff) / 1000.0
+        start_time = datetime.datetime.fromtimestamp(s_time).strftime(
+            '%Y-%m-%dT%H:%M:%S.%f')
+        base_url = "{}/v1/metric/%(0)s/measures?start=%(1)s&stop=%(2)s"
+        url = base_url.format(endpoint) % {
+            "0": metric_id, "1": start_time, "2": stop_time}
+
+        # Perform metric data request
+        metric_data = Common.perform_request(
+            url,
+            auth_token,
+            req_type="get",
+            verify_ssl=verify_ssl)
+
+        # Generate a list of the requested timestamps and data
+        for r in json.loads(metric_data.text):
+            timestamp = r[0].replace("T", " ")
+            timestamps.append(timestamp)
+            data.append(r[2])
+
+        return timestamps, data
+
+    def response_list(self, metric_list, metric_name=None, resource=None):
+        """Create the appropriate lists for a list response."""
+        resp_list, name_list, res_list = [], [], []
+
+        # Create required lists
+        for row in metric_list:
+            # Only list OSM metrics
+            name = None
+            if row['name'] in METRIC_MAPPINGS.values():
+                for k, v in six.iteritems(METRIC_MAPPINGS):
+                    if row['name'] == v:
+                        name = k
+                metric = {"metric_name": name,
+                          "metric_uuid": row['id'],
+                          "metric_unit": row['unit'],
+                          "resource_uuid": row['resource_id']}
+                resp_list.append(metric)
+            # Generate metric_name specific list
+            if metric_name is not None and name is not None:
+                if metric_name in METRIC_MAPPINGS.keys() and row['name'] == METRIC_MAPPINGS[metric_name]:
+                    metric = {"metric_name": metric_name,
+                              "metric_uuid": row['id'],
+                              "metric_unit": row['unit'],
+                              "resource_uuid": row['resource_id']}
+                    name_list.append(metric)
+            # Generate resource specific list
+            if resource is not None and name is not None:
+                if row['resource_id'] == resource:
+                    metric = {"metric_name": name,
+                              "metric_uuid": row['id'],
+                              "metric_unit": row['unit'],
+                              "resource_uuid": row['resource_id']}
+                    res_list.append(metric)
+
+        # Join required lists
+        if metric_name is not None and resource is not None:
+            # Return intersection of res_list and name_list
+            return [i for i in res_list for j in name_list if i['metric_uuid'] == j['metric_uuid']]
+        elif metric_name is not None:
+            return name_list
+        elif resource is not None:
+            return res_list
+        else:
+            return resp_list
diff --git a/osm_mon/plugins/OpenStack/Gnocchi/metrics.py b/osm_mon/plugins/OpenStack/Gnocchi/metrics.py
deleted file mode 100644 (file)
index 825671e..0000000
+++ /dev/null
@@ -1,485 +0,0 @@
-# Copyright 2017 Intel Research and Development Ireland Limited
-# *************************************************************
-
-# This file is part of OSM Monitoring module
-# All Rights Reserved to Intel Corporation
-
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-
-#         http://www.apache.org/licenses/LICENSE-2.0
-
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# For those usages not covered by the Apache License, Version 2.0 please
-# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
-##
-"""Carry out OpenStack metric requests via Gnocchi API."""
-
-import datetime
-import json
-import logging
-import time
-
-import six
-import yaml
-
-from osm_mon.core.auth import AuthManager
-from osm_mon.core.message_bus.producer import KafkaProducer
-from osm_mon.plugins.OpenStack.common import Common
-from osm_mon.plugins.OpenStack.response import OpenStack_Response
-
-log = logging.getLogger(__name__)
-
-METRIC_MAPPINGS = {
-    "average_memory_utilization": "memory.usage",
-    "disk_read_ops": "disk.read.requests",
-    "disk_write_ops": "disk.write.requests",
-    "disk_read_bytes": "disk.read.bytes",
-    "disk_write_bytes": "disk.write.bytes",
-    "packets_dropped": "interface.if_dropped",
-    "packets_received": "interface.if_packets",
-    "packets_sent": "interface.if_packets",
-    "cpu_utilization": "cpu_util",
-}
-
-PERIOD_MS = {
-    "HR": 3600000,
-    "DAY": 86400000,
-    "WEEK": 604800000,
-    "MONTH": 2629746000,
-    "YEAR": 31556952000
-}
-
-
-class Metrics(object):
-    """OpenStack metric requests performed via the Gnocchi API."""
-
-    def __init__(self):
-        """Initialize the metric actions."""
-
-        # Use the Response class to generate valid json response messages
-        self._response = OpenStack_Response()
-
-        # Initializer a producer to send responses back to SO
-        self._producer = KafkaProducer("metric_response")
-
-        self._auth_manager = AuthManager()
-
-    def metric_calls(self, message, vim_uuid):
-        """Consume info from the message bus to manage metric requests."""
-        log.info("OpenStack metric action required.")
-        try:
-            values = json.loads(message.value)
-        except ValueError:
-            values = yaml.safe_load(message.value)
-
-        if 'metric_name' in values and values['metric_name'] not in METRIC_MAPPINGS.keys():
-            raise ValueError('Metric ' + values['metric_name'] + ' is not supported.')
-
-        verify_ssl = self._auth_manager.is_verify_ssl(vim_uuid)
-
-        endpoint = Common.get_endpoint("metric", vim_uuid, verify_ssl=verify_ssl)
-
-        auth_token = Common.get_auth_token(vim_uuid, verify_ssl=verify_ssl)
-
-        if message.key == "create_metric_request":
-            # Configure metric
-            metric_details = values['metric_create_request']
-            metric_id, resource_id, status = self.configure_metric(
-                endpoint, auth_token, metric_details, verify_ssl)
-
-            # Generate and send a create metric response
-            try:
-                resp_message = self._response.generate_response(
-                    'create_metric_response',
-                    status=status,
-                    cor_id=metric_details['correlation_id'],
-                    metric_id=metric_id,
-                    r_id=resource_id)
-                log.info("Response messages: %s", resp_message)
-                self._producer.publish_metrics_response(
-                    'create_metric_response', resp_message)
-            except Exception as exc:
-                log.warning("Failed to create response: %s", exc)
-
-        elif message.key == "read_metric_data_request":
-            # Read all metric data related to a specified metric
-            timestamps, metric_data = self.read_metric_data(endpoint, auth_token, values, verify_ssl)
-
-            # Generate and send a response message
-            try:
-                metric_id = self.get_metric_id(endpoint,
-                                               auth_token,
-                                               METRIC_MAPPINGS[values['metric_name']],
-                                               values['resource_uuid'],
-                                               verify_ssl)
-                resp_message = self._response.generate_response(
-                    'read_metric_data_response',
-                    m_id=metric_id,
-                    m_name=values['metric_name'],
-                    r_id=values['resource_uuid'],
-                    cor_id=values['correlation_id'],
-                    times=timestamps,
-                    metrics=metric_data)
-                log.info("Response message: %s", resp_message)
-                self._producer.read_metric_data_response(
-                    'read_metric_data_response', resp_message)
-            except Exception as exc:
-                log.warning("Failed to create response: %s", exc)
-
-        elif message.key == "delete_metric_request":
-            # delete the specified metric in the request
-            metric_id = self.get_metric_id(endpoint, auth_token, METRIC_MAPPINGS[values['metric_name']],
-                                           values['resource_uuid'], verify_ssl)
-            status = self.delete_metric(
-                endpoint, auth_token, metric_id, verify_ssl)
-
-            # Generate and send a response message
-            try:
-                resp_message = self._response.generate_response(
-                    'delete_metric_response',
-                    m_id=metric_id,
-                    m_name=values['metric_name'],
-                    status=status,
-                    r_id=values['resource_uuid'],
-                    cor_id=values['correlation_id'])
-                log.info("Response message: %s", resp_message)
-                self._producer.delete_metric_response(
-                    'delete_metric_response', resp_message)
-            except Exception as exc:
-                log.warning("Failed to create response: %s", exc)
-
-        elif message.key == "update_metric_request":
-            # Gnocchi doesn't support configuration updates
-            # Log and send a response back to this effect
-            log.warning("Gnocchi doesn't support metric configuration updates.")
-            req_details = values['metric_create_request']
-            metric_name = req_details['metric_name']
-            resource_id = req_details['resource_uuid']
-            metric_id = self.get_metric_id(endpoint, auth_token, metric_name, resource_id, verify_ssl)
-
-            # Generate and send a response message
-            try:
-                resp_message = self._response.generate_response(
-                    'update_metric_response',
-                    status=False,
-                    cor_id=req_details['correlation_id'],
-                    r_id=resource_id,
-                    m_id=metric_id)
-                log.info("Response message: %s", resp_message)
-                self._producer.update_metric_response(
-                    'update_metric_response', resp_message)
-            except Exception as exc:
-                log.warning("Failed to create response: %s", exc)
-
-        elif message.key == "list_metric_request":
-            list_details = values['metrics_list_request']
-
-            metric_list = self.list_metrics(
-                endpoint, auth_token, list_details, verify_ssl)
-
-            # Generate and send a response message
-            try:
-                resp_message = self._response.generate_response(
-                    'list_metric_response',
-                    m_list=metric_list,
-                    cor_id=list_details['correlation_id'])
-                log.info("Response message: %s", resp_message)
-                self._producer.list_metric_response(
-                    'list_metric_response', resp_message)
-            except Exception as exc:
-                log.warning("Failed to create response: %s", exc)
-
-        else:
-            log.warning("Unknown key %s, no action will be performed.", message.key)
-
-    def configure_metric(self, endpoint, auth_token, values, verify_ssl):
-        """Create the new metric in Gnocchi."""
-        try:
-            resource_id = values['resource_uuid']
-        except KeyError:
-            log.warning("resource_uuid field is missing.")
-            return None, None, False
-
-        try:
-            metric_name = values['metric_name'].lower()
-        except KeyError:
-            log.warning("metric_name field is missing.")
-            return None, None, False
-
-        # Check for an existing metric for this resource
-        metric_id = self.get_metric_id(
-            endpoint, auth_token, metric_name, resource_id, verify_ssl)
-
-        if metric_id is None:
-            # Try appending metric to existing resource
-            try:
-                base_url = "{}/v1/resource/generic/%s/metric"
-                res_url = base_url.format(endpoint) % resource_id
-                payload = {metric_name: {'archive_policy_name': 'high',
-                                         'unit': values['metric_unit']}}
-                result = Common.perform_request(
-                    res_url,
-                    auth_token,
-                    req_type="post",
-                    verify_ssl=verify_ssl,
-                    payload=json.dumps(payload, sort_keys=True))
-                # Get id of newly created metric
-                for row in json.loads(result.text):
-                    if row['name'] == metric_name:
-                        metric_id = row['id']
-                log.info("Appended metric to existing resource.")
-
-                return metric_id, resource_id, True
-            except Exception as exc:
-                # Gnocchi version of resource does not exist creating a new one
-                log.info("Failed to append metric to existing resource:%s",
-                         exc)
-                try:
-                    url = "{}/v1/resource/generic".format(endpoint)
-                    metric = {'name': metric_name,
-                              'archive_policy_name': 'high',
-                              'unit': values['metric_unit'], }
-
-                    resource_payload = json.dumps({'id': resource_id,
-                                                   'metrics': {
-                                                       metric_name: metric}}, sort_keys=True)
-
-                    resource = Common.perform_request(
-                        url,
-                        auth_token,
-                        req_type="post",
-                        payload=resource_payload,
-                        verify_ssl=verify_ssl)
-
-                    # Return the newly created resource_id for creating alarms
-                    new_resource_id = json.loads(resource.text)['id']
-                    log.info("Created new resource for metric: %s",
-                             new_resource_id)
-
-                    metric_id = self.get_metric_id(
-                        endpoint, auth_token, metric_name, new_resource_id, verify_ssl)
-
-                    return metric_id, new_resource_id, True
-                except Exception as exc:
-                    log.warning("Failed to create a new resource: %s", exc)
-            return None, None, False
-
-        else:
-            log.info("This metric already exists for this resource.")
-
-        return metric_id, resource_id, False
-
-    def delete_metric(self, endpoint, auth_token, metric_id, verify_ssl):
-        """Delete metric."""
-        url = "{}/v1/metric/%s".format(endpoint) % metric_id
-
-        try:
-            result = Common.perform_request(
-                url,
-                auth_token,
-                req_type="delete",
-                verify_ssl=verify_ssl)
-            if str(result.status_code) == "404":
-                log.warning("Failed to delete the metric.")
-                return False
-            else:
-                return True
-        except Exception as exc:
-            log.warning("Failed to delete metric: %s", exc)
-        return False
-
-    def list_metrics(self, endpoint, auth_token, values, verify_ssl):
-        """List all metrics."""
-
-        # Check for a specified list
-        metric_name = None
-        if 'metric_name' in values:
-            metric_name = values['metric_name'].lower()
-
-        resource = None
-        if 'resource_uuid' in values:
-            resource = values['resource_uuid']
-
-        try:
-            if resource:
-                url = "{}/v1/resource/generic/{}".format(endpoint, resource)
-                result = Common.perform_request(
-                    url, auth_token, req_type="get", verify_ssl=verify_ssl)
-                resource_data = json.loads(result.text)
-                metrics = resource_data['metrics']
-
-                if metric_name:
-                    if metrics.get(METRIC_MAPPINGS[metric_name]):
-                        metric_id = metrics[METRIC_MAPPINGS[metric_name]]
-                        url = "{}/v1/metric/{}".format(endpoint, metric_id)
-                        result = Common.perform_request(
-                            url, auth_token, req_type="get", verify_ssl=verify_ssl)
-                        metric_list = json.loads(result.text)
-                        log.info("Returning an %s resource list for %s metrics",
-                                 metric_name, resource)
-                        return metric_list
-                    else:
-                        log.info("Metric {} not found for {} resource".format(metric_name, resource))
-                        return None
-                else:
-                    metric_list = []
-                    for k, v in metrics.items():
-                        url = "{}/v1/metric/{}".format(endpoint, v)
-                        result = Common.perform_request(
-                            url, auth_token, req_type="get", verify_ssl=verify_ssl)
-                        metric = json.loads(result.text)
-                        metric_list.append(metric)
-                    if metric_list:
-                        log.info("Return a list of %s resource metrics", resource)
-                        return metric_list
-
-                    else:
-                        log.info("There are no metrics available")
-                        return []
-            else:
-                url = "{}/v1/metric?sort=name:asc".format(endpoint)
-                result = Common.perform_request(
-                    url, auth_token, req_type="get", verify_ssl=verify_ssl)
-                metrics = []
-                metrics_partial = json.loads(result.text)
-                for metric in metrics_partial:
-                    metrics.append(metric)
-
-                while len(json.loads(result.text)) > 0:
-                    last_metric_id = metrics_partial[-1]['id']
-                    url = "{}/v1/metric?sort=name:asc&marker={}".format(endpoint, last_metric_id)
-                    result = Common.perform_request(
-                        url, auth_token, req_type="get", verify_ssl=verify_ssl)
-                    if len(json.loads(result.text)) > 0:
-                        metrics_partial = json.loads(result.text)
-                        for metric in metrics_partial:
-                            metrics.append(metric)
-
-                if metrics is not None:
-                    # Format the list response
-                    if metric_name is not None:
-                        metric_list = self.response_list(
-                            metrics, metric_name=metric_name)
-                        log.info("Returning a list of %s metrics", metric_name)
-                    else:
-                        metric_list = self.response_list(metrics)
-                        log.info("Returning a complete list of metrics")
-                    return metric_list
-                else:
-                    log.info("There are no metrics available")
-                    return []
-        except Exception as exc:
-            log.exception("Failed to list metrics. %s", exc)
-        return None
-
-    def get_metric_id(self, endpoint, auth_token, metric_name, resource_id, verify_ssl):
-        """Check if the desired metric already exists for the resource."""
-        url = "{}/v1/resource/generic/%s".format(endpoint) % resource_id
-        try:
-            # Try return the metric id if it exists
-            result = Common.perform_request(
-                url,
-                auth_token,
-                req_type="get",
-                verify_ssl=verify_ssl)
-            return json.loads(result.text)['metrics'][metric_name]
-        except KeyError:
-            log.warning("Metric doesn't exist. No metric_id available")
-            return None
-
-    def read_metric_data(self, endpoint, auth_token, values, verify_ssl):
-        """Collect metric measures over a specified time period."""
-        timestamps = []
-        data = []
-        try:
-            # get metric_id
-            metric_id = self.get_metric_id(endpoint, auth_token, METRIC_MAPPINGS[values['metric_name']],
-                                           values['resource_uuid'], verify_ssl)
-            # Try and collect measures
-            collection_unit = values['collection_unit'].upper()
-            collection_period = values['collection_period']
-
-            # Define the start and end time based on configurations
-            # FIXME: Local timezone may differ from timezone set in Gnocchi, causing discrepancies in measures
-            stop_time = time.strftime("%Y-%m-%d") + "T" + time.strftime("%X")
-            end_time = int(round(time.time() * 1000))
-            if collection_unit == 'YEAR':
-                diff = PERIOD_MS[collection_unit]
-            else:
-                diff = collection_period * PERIOD_MS[collection_unit]
-            s_time = (end_time - diff) / 1000.0
-            start_time = datetime.datetime.fromtimestamp(s_time).strftime(
-                '%Y-%m-%dT%H:%M:%S.%f')
-            base_url = "{}/v1/metric/%(0)s/measures?start=%(1)s&stop=%(2)s"
-            url = base_url.format(endpoint) % {
-                "0": metric_id, "1": start_time, "2": stop_time}
-
-            # Perform metric data request
-            metric_data = Common.perform_request(
-                url,
-                auth_token,
-                req_type="get",
-                verify_ssl=verify_ssl)
-
-            # Generate a list of the requested timestamps and data
-            for r in json.loads(metric_data.text):
-                timestamp = r[0].replace("T", " ")
-                timestamps.append(timestamp)
-                data.append(r[2])
-
-            return timestamps, data
-        except Exception as exc:
-            log.warning("Failed to gather specified measures: %s", exc)
-        return timestamps, data
-
-    def response_list(self, metric_list, metric_name=None, resource=None):
-        """Create the appropriate lists for a list response."""
-        resp_list, name_list, res_list = [], [], []
-
-        # Create required lists
-        for row in metric_list:
-            # Only list OSM metrics
-            name = None
-            if row['name'] in METRIC_MAPPINGS.values():
-                for k, v in six.iteritems(METRIC_MAPPINGS):
-                    if row['name'] == v:
-                        name = k
-                metric = {"metric_name": name,
-                          "metric_uuid": row['id'],
-                          "metric_unit": row['unit'],
-                          "resource_uuid": row['resource_id']}
-                resp_list.append(metric)
-            # Generate metric_name specific list
-            if metric_name is not None and name is not None:
-                if metric_name in METRIC_MAPPINGS.keys() and row['name'] == METRIC_MAPPINGS[metric_name]:
-                    metric = {"metric_name": metric_name,
-                              "metric_uuid": row['id'],
-                              "metric_unit": row['unit'],
-                              "resource_uuid": row['resource_id']}
-                    name_list.append(metric)
-            # Generate resource specific list
-            if resource is not None and name is not None:
-                if row['resource_id'] == resource:
-                    metric = {"metric_name": name,
-                              "metric_uuid": row['id'],
-                              "metric_unit": row['unit'],
-                              "resource_uuid": row['resource_id']}
-                    res_list.append(metric)
-
-        # Join required lists
-        if metric_name is not None and resource is not None:
-            # Return intersection of res_list and name_list
-            return [i for i in res_list for j in name_list if i['metric_uuid'] == j['metric_uuid']]
-        elif metric_name is not None:
-            return name_list
-        elif resource is not None:
-            return res_list
-        else:
-            return resp_list
index 8d328f0..9f5b17d 100644 (file)
@@ -21,7 +21,6 @@
 ##
 """Generate valid responses to send back to the SO."""
 
-import json
 import logging
 
 log = logging.getLogger(__name__)
@@ -29,13 +28,13 @@ log = logging.getLogger(__name__)
 schema_version = "1.0"
 
 
-class OpenStack_Response(object):
+class OpenStackResponseBuilder(object):
     """Generates responses for OpenStack plugin."""
 
     def __init__(self):
         """Initialize OpenStack Response instance."""
 
-    def generate_response(self, key, **kwargs):
+    def generate_response(self, key, **kwargs) -> dict:
         """Make call to appropriate response function."""
         if key == "list_alarm_response":
             message = self.alarm_list_response(**kwargs)
@@ -63,15 +62,15 @@ class OpenStack_Response(object):
 
         return message
 
-    def alarm_list_response(self, **kwargs):
+    def alarm_list_response(self, **kwargs) -> dict:
         """Generate the response for an alarm list request."""
         alarm_list_resp = {"schema_version": schema_version,
                            "schema_type": "list_alarm_response",
                            "correlation_id": kwargs['cor_id'],
                            "list_alarm_response": kwargs['alarm_list']}
-        return json.dumps(alarm_list_resp)
+        return alarm_list_resp
 
-    def create_alarm_response(self, **kwargs):
+    def create_alarm_response(self, **kwargs) -> dict:
         """Generate a response for a create alarm request."""
         create_alarm_resp = {"schema_version": schema_version,
                              "schema_type": "create_alarm_response",
@@ -79,9 +78,9 @@ class OpenStack_Response(object):
                                  "correlation_id": kwargs['cor_id'],
                                  "alarm_uuid": kwargs['alarm_id'],
                                  "status": kwargs['status']}}
-        return json.dumps(create_alarm_resp)
+        return create_alarm_resp
 
-    def delete_alarm_response(self, **kwargs):
+    def delete_alarm_response(self, **kwargs) -> dict:
         """Generate a response for a delete alarm request."""
         delete_alarm_resp = {"schema_version": schema_version,
                              "schema_type": "alarm_deletion_response",
@@ -89,9 +88,9 @@ class OpenStack_Response(object):
                                  "correlation_id": kwargs['cor_id'],
                                  "alarm_uuid": kwargs['alarm_id'],
                                  "status": kwargs['status']}}
-        return json.dumps(delete_alarm_resp)
+        return delete_alarm_resp
 
-    def update_alarm_response(self, **kwargs):
+    def update_alarm_response(self, **kwargs) -> dict:
         """Generate a response for an update alarm request."""
         update_alarm_resp = {"schema_version": schema_version,
                              "schema_type": "update_alarm_response",
@@ -99,68 +98,70 @@ class OpenStack_Response(object):
                                  "correlation_id": kwargs['cor_id'],
                                  "alarm_uuid": kwargs['alarm_id'],
                                  "status": kwargs['status']}}
-        return json.dumps(update_alarm_resp)
+        return update_alarm_resp
 
-    def metric_create_response(self, **kwargs):
+    def metric_create_response(self, **kwargs) -> dict:
         """Generate a response for a create metric request."""
         create_metric_resp = {"schema_version": schema_version,
                               "schema_type": "create_metric_response",
                               "correlation_id": kwargs['cor_id'],
                               "metric_create_response": {
                                   "metric_uuid": kwargs['metric_id'],
-                                  "resource_uuid": kwargs['r_id'],
+                                  "resource_uuid": kwargs['resource_id'],
                                   "status": kwargs['status']}}
-        return json.dumps(create_metric_resp)
+        return create_metric_resp
 
-    def read_metric_data_response(self, **kwargs):
+    def read_metric_data_response(self, **kwargs) -> dict:
         """Generate a response for a read metric data request."""
         read_metric_data_resp = {"schema_version": schema_version,
                                  "schema_type": "read_metric_data_response",
-                                 "metric_name": kwargs['m_name'],
-                                 "metric_uuid": kwargs['m_id'],
-                                 "resource_uuid": kwargs['r_id'],
+                                 "metric_name": kwargs['metric_name'],
+                                 "metric_uuid": kwargs['metric_id'],
+                                 "resource_uuid": kwargs['resource_id'],
                                  "correlation_id": kwargs['cor_id'],
+                                 "status": kwargs['status'],
                                  "metrics_data": {
                                      "time_series": kwargs['times'],
                                      "metrics_series": kwargs['metrics']}}
-        return json.dumps(read_metric_data_resp)
+        return read_metric_data_resp
 
-    def delete_metric_response(self, **kwargs):
+    def delete_metric_response(self, **kwargs) -> dict:
         """Generate a response for a delete metric request."""
         delete_metric_resp = {"schema_version": schema_version,
                               "schema_type": "delete_metric_response",
-                              "metric_name": kwargs['m_name'],
-                              "metric_uuid": kwargs['m_id'],
-                              "resource_uuid": kwargs['r_id'],
+                              "metric_name": kwargs['metric_name'],
+                              "metric_uuid": kwargs['metric_id'],
+                              "resource_uuid": kwargs['resource_id'],
                               "correlation_id": kwargs['cor_id'],
                               "status": kwargs['status']}
-        return json.dumps(delete_metric_resp)
+        return delete_metric_resp
 
-    def update_metric_response(self, **kwargs):
+    def update_metric_response(self, **kwargs) -> dict:
         """Generate a repsonse for an update metric request."""
         update_metric_resp = {"schema_version": schema_version,
                               "schema_type": "update_metric_response",
                               "correlation_id": kwargs['cor_id'],
                               "metric_update_response": {
-                                  "metric_uuid": kwargs['m_id'],
+                                  "metric_uuid": kwargs['metric_id'],
                                   "status": kwargs['status'],
-                                  "resource_uuid": kwargs['r_id']}}
-        return json.dumps(update_metric_resp)
+                                  "resource_uuid": kwargs['resource_id']}}
+        return update_metric_resp
 
-    def list_metric_response(self, **kwargs):
+    def list_metric_response(self, **kwargs) -> dict:
         """Generate a response for a list metric request."""
         list_metric_resp = {"schema_version": schema_version,
                             "schema_type": "list_metric_response",
                             "correlation_id": kwargs['cor_id'],
-                            "metrics_list": kwargs['m_list']}
-        return json.dumps(list_metric_resp)
+                            "status": kwargs['status'],
+                            "metrics_list": kwargs['metric_list']}
+        return list_metric_resp
 
-    def notify_alarm(self, **kwargs):
+    def notify_alarm(self, **kwargs) -> dict:
         """Generate a response to send alarm notifications."""
         notify_alarm_resp = {"schema_version": schema_version,
                              "schema_type": "notify_alarm",
                              "notify_details": {
-                                 "alarm_uuid": kwargs['a_id'],
+                                 "alarm_uuid": kwargs['alarm_id'],
                                  "vdu_name": kwargs['vdu_name'],
                                  "vnf_member_index": kwargs['vnf_member_index'],
                                  "ns_id": kwargs['ns_id'],
@@ -170,4 +171,4 @@ class OpenStack_Response(object):
                                  "severity": kwargs['sev'],
                                  "status": kwargs['state'],
                                  "start_date": kwargs['date']}}
-        return json.dumps(notify_alarm_resp)
+        return notify_alarm_resp
diff --git a/osm_mon/plugins/vRealiseOps/kafka_consumer_vrops.py b/osm_mon/plugins/vRealiseOps/kafka_consumer_vrops.py
deleted file mode 100644 (file)
index f5e11e6..0000000
+++ /dev/null
@@ -1,64 +0,0 @@
-# -*- coding: utf-8 -*-
-
-##
-# Copyright 2016-2017 VMware Inc.
-# This file is part of ETSI OSM
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-#
-# For those usages not covered by the Apache License, Version 2.0 please
-# contact:  osslegalrouting@vmware.com
-##
-
-"""
-vROPs Kafka Consumer that consumes the request messages
-"""
-
-
-from kafka import KafkaConsumer
-from kafka.errors import KafkaError
-import logging as log
-
-class vROP_KafkaConsumer(object):
-    """
-        Kafka Consumer for vROPs
-    """
-
-    def __init__(self, topics=[], broker_uri=None):
-        """
-            Method to initize KafkaConsumer
-            Args:
-                broker_uri - hostname:port uri of Kafka broker
-                topics - list of topics to subscribe
-            Returns:
-               None
-        """
-
-        if broker_uri is None:
-            self.broker = '0.0.0.0:9092'
-        else:
-            self.broker = broker_uri
-
-        self.topic = topics
-        print ("vROPs Consumer started, Broker URI: {}".format(self.broker))
-        print ("Subscribed Topics {}".format(self.topic))
-        try:
-            self.vrops_consumer = KafkaConsumer(bootstrap_servers=self.broker)
-            self.vrops_consumer.subscribe(self.topic)
-        except Exception as exp:
-            msg = "fail to create consumer for topic {} with broker {} Error : {}"\
-                    .format(self.topic, self.broker, exp)
-            log.error(msg)
-            raise Exception(msg)
-
index 6ca3d40..af68dfc 100644 (file)
 """
 Monitoring metrics & creating Alarm definitions in vROPs
 """
-import pytz
-import requests
 import logging
 
+import pytz
+import requests
 import six
 from pyvcloud.vcd.client import BasicLoginCredentials
 from pyvcloud.vcd.client import Client
-API_VERSION = '5.9'
 
 from xml.etree import ElementTree as XmlElementTree
 import traceback
@@ -44,31 +43,45 @@ import datetime
 from socket import getfqdn
 
 import urllib3
-urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+
+from osm_mon.core.settings import Config
 
 sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', '..', '..'))
 from osm_mon.core.database import DatabaseManager
 
-OPERATION_MAPPING = {'GE':'GT_EQ', 'LE':'LT_EQ', 'GT':'GT', 'LT':'LT', 'EQ':'EQ'}
-severity_mano2vrops = {'WARNING':'WARNING', 'MINOR':'WARNING', 'MAJOR':"IMMEDIATE",\
-                        'CRITICAL':'CRITICAL', 'INDETERMINATE':'UNKNOWN'}
-PERIOD_MSEC = {'HR':3600000,'DAY':86400000,'WEEK':604800000,'MONTH':2678400000,'YEAR':31536000000}
+API_VERSION = '5.9'
 
-#To Do - Add actual webhook url & certificate
-#SSL_CERTIFICATE_FILE_NAME = 'vROPs_Webservice/SSL_certificate/www.vrops_webservice.com.cert'
-#webhook_url = "https://mano-dev-1:8080/notify/" #for testing
+urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+OPERATION_MAPPING = {'GE': 'GT_EQ', 'LE': 'LT_EQ', 'GT': 'GT', 'LT': 'LT', 'EQ': 'EQ'}
+severity_mano2vrops = {'WARNING': 'WARNING', 'MINOR': 'WARNING', 'MAJOR': "IMMEDIATE",
+                       'CRITICAL': 'CRITICAL', 'INDETERMINATE': 'UNKNOWN'}
+PERIOD_MSEC = {'HR': 3600000, 'DAY': 86400000, 'WEEK': 604800000, 'MONTH': 2678400000, 'YEAR': 31536000000}
+
+# To Do - Add actual webhook url & certificate
+# SSL_CERTIFICATE_FILE_NAME = 'vROPs_Webservice/SSL_certificate/www.vrops_webservice.com.cert'
+# webhook_url = "https://mano-dev-1:8080/notify/" #for testing
 webhook_url = "https://" + getfqdn() + ":8080/notify/"
 SSL_CERTIFICATE_FILE_NAME = ('vROPs_Webservice/SSL_certificate/' + getfqdn() + ".cert")
-#SSL_CERTIFICATE_FILE_NAME = 'vROPs_Webservice/SSL_certificate/10.172.137.214.cert' #for testing
+# SSL_CERTIFICATE_FILE_NAME = 'vROPs_Webservice/SSL_certificate/10.172.137.214.cert' #for testing
 
 MODULE_DIR = os.path.dirname(__file__)
 CONFIG_FILE_NAME = 'vrops_config.xml'
 CONFIG_FILE_PATH = os.path.join(MODULE_DIR, CONFIG_FILE_NAME)
 SSL_CERTIFICATE_FILE_PATH = os.path.join(MODULE_DIR, SSL_CERTIFICATE_FILE_NAME)
 
-class MonPlugin():
+cfg = Config.instance()
+logging.basicConfig(stream=sys.stdout,
+                    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
+                    datefmt='%m/%d/%Y %I:%M:%S %p',
+                    level=logging.getLevelName(cfg.OSMMON_LOG_LEVEL))
+
+logger = logging.getLogger(__name__)
+
+
+class MonPlugin:
     """MON Plugin class for vROPs telemetry plugin
     """
+
     def __init__(self, access_config=None):
         """Constructor of MON plugin
         Params:
@@ -87,37 +100,32 @@ class MonPlugin():
             check against the VIM
         """
 
-        self.logger = logging.getLogger('PluginReceiver.MonPlugin')
-        self.logger.setLevel(logging.DEBUG)
-
         if access_config is None:
-            self.logger.error("VIM Access Configuration not provided")
+            logger.error("VIM Access Configuration not provided")
             raise KeyError("VIM Access Configuration not provided")
 
         self.database_manager = DatabaseManager()
 
         self.access_config = access_config
         if not bool(access_config):
-            self.logger.error("VIM Account details are not added. Please add a VIM account")
+            logger.error("VIM Account details are not added. Please add a VIM account")
             raise KeyError("VIM Account details are not added. Please add a VIM account")
 
         try:
-            self.vrops_site =  access_config['vrops_site']
+            self.vrops_site = access_config['vrops_site']
             self.vrops_user = access_config['vrops_user']
             self.vrops_password = access_config['vrops_password']
             self.vcloud_site = access_config['vim_url']
             self.admin_username = access_config['admin_username']
             self.admin_password = access_config['admin_password']
-            #self.tenant_id = access_config['tenant_id']
+            # self.tenant_id = access_config['tenant_id']
             self.vim_uuid = access_config['vim_uuid']
 
         except KeyError as exp:
-            self.logger.error("Required VIM account details not provided: {}".format(exp))
+            logger.error("Required VIM account details not provided: {}".format(exp))
             raise KeyError("Required VIM account details not provided: {}".format(exp))
 
-
-
-    def configure_alarm(self, config_dict = {}):
+    def configure_alarm(self, config_dict={}):
         """Configures or creates a new alarm using the input parameters in config_dict
         Params:
         "alarm_name": Alarm name in string format
@@ -138,95 +146,93 @@ class MonPlugin():
 
         Returns the UUID of created alarm or None
         """
-        alarm_def = None
-        #1) get alarm & metrics parameters from plugin specific file
+        # 1) get alarm & metrics parameters from plugin specific file
         def_a_params = self.get_default_Params(config_dict['alarm_name'])
         if not def_a_params:
-            self.logger.warning("Alarm not supported: {}".format(config_dict['alarm_name']))
+            logger.warning("Alarm not supported: {}".format(config_dict['alarm_name']))
             return None
         metric_key_params = self.get_default_Params(config_dict['metric_name'])
         if not metric_key_params:
-            self.logger.warning("Metric not supported: {}".format(config_dict['metric_name']))
+            logger.warning("Metric not supported: {}".format(config_dict['metric_name']))
             return None
 
-        #1.2) Check if alarm definition already exists
-        vrops_alarm_name = def_a_params['vrops_alarm']+ '-' + config_dict['resource_uuid']
+        # 1.2) Check if alarm definition already exists
+        vrops_alarm_name = def_a_params['vrops_alarm'] + '-' + config_dict['resource_uuid']
         alert_def_list = self.get_alarm_defination_by_name(vrops_alarm_name)
         if alert_def_list:
-            self.logger.warning("Alarm already exists: {}. Try updating by update_alarm_request"\
-                            .format(vrops_alarm_name))
+            logger.warning("Alarm already exists: {}. Try updating by update_alarm_request"
+                           .format(vrops_alarm_name))
             return None
 
-        #2) create symptom definition
-        symptom_params ={'cancel_cycles': (def_a_params['cancel_period']/300)*def_a_params['cancel_cycles'],
-                        'wait_cycles': (def_a_params['period']/300)*def_a_params['evaluation'],
-                        'resource_kind_key': def_a_params['resource_kind'],
-                        'adapter_kind_key': def_a_params['adapter_kind'],
-                        'symptom_name':vrops_alarm_name,
-                        'severity': severity_mano2vrops[config_dict['severity'].upper()],
-                        'metric_key':metric_key_params['metric_key'],
-                        'operation':OPERATION_MAPPING[config_dict['operation']],
-                        'threshold_value':config_dict['threshold_value']}
+        # 2) create symptom definition
+        symptom_params = {'cancel_cycles': (def_a_params['cancel_period'] / 300) * def_a_params['cancel_cycles'],
+                          'wait_cycles': (def_a_params['period'] / 300) * def_a_params['evaluation'],
+                          'resource_kind_key': def_a_params['resource_kind'],
+                          'adapter_kind_key': def_a_params['adapter_kind'],
+                          'symptom_name': vrops_alarm_name,
+                          'severity': severity_mano2vrops[config_dict['severity'].upper()],
+                          'metric_key': metric_key_params['metric_key'],
+                          'operation': OPERATION_MAPPING[config_dict['operation']],
+                          'threshold_value': config_dict['threshold_value']}
 
         symptom_uuid = self.create_symptom(symptom_params)
         if symptom_uuid is not None:
-            self.logger.info("Symptom defined: {} with ID: {}".format(symptom_params['symptom_name'],symptom_uuid))
+            logger.info("Symptom defined: {} with ID: {}".format(symptom_params['symptom_name'], symptom_uuid))
         else:
-            self.logger.warning("Failed to create Symptom: {}".format(symptom_params['symptom_name']))
+            logger.warning("Failed to create Symptom: {}".format(symptom_params['symptom_name']))
             return None
-        #3) create alert definition
-        #To Do - Get type & subtypes for all 5 alarms
-        alarm_params = {'name':vrops_alarm_name,
-                        'description':config_dict['description']\
-                        if 'description' in config_dict and config_dict['description'] is not None else config_dict['alarm_name'],
-                        'adapterKindKey':def_a_params['adapter_kind'],
-                        'resourceKindKey':def_a_params['resource_kind'],
-                        'waitCycles':1, 'cancelCycles':1,
-                        'type':def_a_params['alarm_type'], 'subType':def_a_params['alarm_subType'],
-                        'severity':severity_mano2vrops[config_dict['severity'].upper()],
-                        'symptomDefinitionId':symptom_uuid,
-                        'impact':def_a_params['impact']}
+        # 3) create alert definition
+        # To Do - Get type & subtypes for all 5 alarms
+        alarm_params = {'name': vrops_alarm_name,
+                        'description': config_dict['description']
+                        if 'description' in config_dict and config_dict['description'] is not None else config_dict[
+                            'alarm_name'],
+                        'adapterKindKey': def_a_params['adapter_kind'],
+                        'resourceKindKey': def_a_params['resource_kind'],
+                        'waitCycles': 1, 'cancelCycles': 1,
+                        'type': def_a_params['alarm_type'], 'subType': def_a_params['alarm_subType'],
+                        'severity': severity_mano2vrops[config_dict['severity'].upper()],
+                        'symptomDefinitionId': symptom_uuid,
+                        'impact': def_a_params['impact']}
 
         alarm_def = self.create_alarm_definition(alarm_params)
         if alarm_def is None:
-            self.logger.warning("Failed to create Alert: {}".format(alarm_params['name']))
+            logger.warning("Failed to create Alert: {}".format(alarm_params['name']))
             return None
 
-        self.logger.info("Alarm defined: {} with ID: {}".format(alarm_params['name'],alarm_def))
+        logger.info("Alarm defined: {} with ID: {}".format(alarm_params['name'], alarm_def))
 
-        #4) Find vm_moref_id from vApp uuid in vCD
+        # 4) Find vm_moref_id from vApp uuid in vCD
         vm_moref_id = self.get_vm_moref_id(config_dict['resource_uuid'])
         if vm_moref_id is None:
-            self.logger.warning("Failed to find vm morefid for vApp in vCD: {}".format(config_dict['resource_uuid']))
+            logger.warning("Failed to find vm morefid for vApp in vCD: {}".format(config_dict['resource_uuid']))
             return None
 
-        #5) Based on vm_moref_id, find VM's corresponding resource_id in vROPs to set notification
+        # 5) Based on vm_moref_id, find VM's corresponding resource_id in vROPs to set notification
         resource_id = self.get_vm_resource_id(vm_moref_id)
         if resource_id is None:
-            self.logger.warning("Failed to find resource in vROPs: {}".format(config_dict['resource_uuid']))
+            logger.warning("Failed to find resource in vROPs: {}".format(config_dict['resource_uuid']))
             return None
 
-        #6) Configure alarm notification for a particular VM using it's resource_id
+        # 6) Configure alarm notification for a particular VM using it's resource_id
         notification_id = self.create_alarm_notification_rule(vrops_alarm_name, alarm_def, resource_id)
         if notification_id is None:
             return None
         else:
             alarm_def_uuid = alarm_def.split('-', 1)[1]
-            self.logger.info("Alarm definition created with notification: {} with ID: {}"\
-                    .format(alarm_params['name'],alarm_def_uuid))
-            ##self.database_manager.save_alarm(alarm_def_uuid, alarm_params['name'], self.vim_uuid)
-            self.database_manager.save_alarm(alarm_def_uuid,
-                                              self.vim_uuid,
-                                              ##alarm_params['name'],
-                                              config_dict['threshold_value'],
-                                              config_dict['operation'],
-                                              config_dict['metric_name'].lower(),
-                                              config_dict['vdu_name'].lower(),
-                                              config_dict['vnf_member_index'].lower(),
-                                              config_dict['ns_id'].lower()
-                                              )
-
-            #Return alarm definition UUID by removing 'AlertDefinition' from UUID
+            logger.info("Alarm definition created with notification: {} with ID: {}"
+                        .format(alarm_params['name'], alarm_def_uuid))
+            self.database_manager.save_alarm(alarm_params['name'],
+                                             self.vim_uuid,
+                                             config_dict['threshold_value'],
+                                             config_dict['operation'],
+                                             config_dict['metric_name'].lower(),
+                                             config_dict['vdu_name'].lower(),
+                                             config_dict['vnf_member_index'].lower(),
+                                             config_dict['ns_id'].lower()
+                                             )
+
+            # Return alarm definition UUID by removing 'AlertDefinition' from UUID
             return (alarm_def_uuid)
 
     def get_default_Params(self, metric_alarm_name):
@@ -239,9 +245,9 @@ class MonPlugin():
         try:
             source = open(CONFIG_FILE_PATH, 'r')
         except IOError as exp:
-            msg = ("Could not read Config file: {}, \nException: {}"\
-                        .format(CONFIG_FILE_PATH, exp))
-            self.logger.error(msg)
+            msg = ("Could not read Config file: {},  nException: {}"
+                   .format(CONFIG_FILE_PATH, exp))
+            logger.error(msg)
             raise IOError(msg)
 
         tree = XmlElementTree.parse(source)
@@ -249,11 +255,11 @@ class MonPlugin():
         for alarm in alarms:
             if alarm.tag.lower() == metric_alarm_name.lower():
                 for param in alarm:
-                    if param.tag in ("period", "evaluation", "cancel_period", "alarm_type",\
-                                    "cancel_cycles", "alarm_subType"):
+                    if param.tag in ("period", "evaluation", "cancel_period", "alarm_type",
+                                     "cancel_cycles", "alarm_subType"):
                         a_params[param.tag] = int(param.text)
                     elif param.tag in ("enabled", "repeat"):
-                        if(param.text.lower() == "true"):
+                        if param.text.lower() == "true":
                             a_params[param.tag] = True
                         else:
                             a_params[param.tag] = False
@@ -262,7 +268,6 @@ class MonPlugin():
         source.close()
         return a_params
 
-
     def create_symptom(self, symptom_params):
         """Create Symptom definition for an alarm
         Params:
@@ -282,37 +287,37 @@ class MonPlugin():
 
         try:
             api_url = '/suite-api/api/symptomdefinitions'
-            headers = {'Content-Type': 'application/json','Accept': 'application/json'}
+            headers = {'Content-Type': 'application/json', 'Accept': 'application/json'}
             data = {
-                        "id": None,
-                        "name": symptom_params['symptom_name'],
-                        "adapterKindKey": symptom_params['adapter_kind_key'],
-                        "resourceKindKey": symptom_params['resource_kind_key'],
-                        "waitCycles": symptom_params['wait_cycles'],
-                        "cancelCycles": symptom_params['cancel_cycles'],
-                        "state": {
-                            "severity": symptom_params['severity'],
-                            "condition": {
-                                "type": "CONDITION_HT",
-                                "key": symptom_params['metric_key'],
-                                "operator": symptom_params['operation'],
-                                "value": symptom_params['threshold_value'],
-                                "valueType": "NUMERIC",
-                                "instanced": False,
-                                "thresholdType": "STATIC"
-                            }
-                        }
+                "id": None,
+                "name": symptom_params['symptom_name'],
+                "adapterKindKey": symptom_params['adapter_kind_key'],
+                "resourceKindKey": symptom_params['resource_kind_key'],
+                "waitCycles": symptom_params['wait_cycles'],
+                "cancelCycles": symptom_params['cancel_cycles'],
+                "state": {
+                    "severity": symptom_params['severity'],
+                    "condition": {
+                        "type": "CONDITION_HT",
+                        "key": symptom_params['metric_key'],
+                        "operator": symptom_params['operation'],
+                        "value": symptom_params['threshold_value'],
+                        "valueType": "NUMERIC",
+                        "instanced": False,
+                        "thresholdType": "STATIC"
                     }
+                }
+            }
 
             resp = requests.post(self.vrops_site + api_url,
                                  auth=(self.vrops_user, self.vrops_password),
                                  headers=headers,
-                                 verify = False,
+                                 verify=False,
                                  data=json.dumps(data))
 
             if resp.status_code != 201:
-                self.logger.warning("Failed to create Symptom definition: {}, response {}"\
-                        .format(symptom_params['symptom_name'], resp.content))
+                logger.warning("Failed to create Symptom definition: {}, response {}"
+                               .format(symptom_params['symptom_name'], resp.content))
                 return None
 
             resp_data = json.loads(resp.content)
@@ -322,9 +327,8 @@ class MonPlugin():
             return symptom_id
 
         except Exception as exp:
-            self.logger.warning("Error creating symptom definition : {}\n{}"\
-            .format(exp, traceback.format_exc()))
-
+            logger.warning("Error creating symptom definition : {} n{}"
+                           .format(exp, traceback.format_exc()))
 
     def create_alarm_definition(self, alarm_params):
         """
@@ -351,42 +355,42 @@ class MonPlugin():
             api_url = '/suite-api/api/alertdefinitions'
             headers = {'Content-Type': 'application/json', 'Accept': 'application/json'}
             data = {
-                        "name": alarm_params['name'],
-                        "description": alarm_params['description'],
-                        "adapterKindKey": alarm_params['adapterKindKey'],
-                        "resourceKindKey": alarm_params['resourceKindKey'],
-                        "waitCycles": 1,
-                        "cancelCycles": 1,
-                        "type": alarm_params['type'],
-                        "subType": alarm_params['subType'],
-                        "states": [
+                "name": alarm_params['name'],
+                "description": alarm_params['description'],
+                "adapterKindKey": alarm_params['adapterKindKey'],
+                "resourceKindKey": alarm_params['resourceKindKey'],
+                "waitCycles": 1,
+                "cancelCycles": 1,
+                "type": alarm_params['type'],
+                "subType": alarm_params['subType'],
+                "states": [
+                    {
+                        "severity": alarm_params['severity'],
+                        "base-symptom-set":
                             {
-                                "severity": alarm_params['severity'],
-                                "base-symptom-set":
-                                    {
-                                        "type": "SYMPTOM_SET",
-                                        "relation": "SELF",
-                                        "aggregation": "ALL",
-                                        "symptomSetOperator": "AND",
-                                        "symptomDefinitionIds": [alarm_params['symptomDefinitionId']]
-                                    },
-                                "impact": {
-                                    "impactType": "BADGE",
-                                    "detail": alarm_params['impact']
-                                }
-                            }
-                        ]
+                                "type": "SYMPTOM_SET",
+                                "relation": "SELF",
+                                "aggregation": "ALL",
+                                "symptomSetOperator": "AND",
+                                "symptomDefinitionIds": [alarm_params['symptomDefinitionId']]
+                            },
+                        "impact": {
+                            "impactType": "BADGE",
+                            "detail": alarm_params['impact']
+                        }
                     }
+                ]
+            }
 
             resp = requests.post(self.vrops_site + api_url,
                                  auth=(self.vrops_user, self.vrops_password),
                                  headers=headers,
-                                 verify = False,
+                                 verify=False,
                                  data=json.dumps(data))
 
             if resp.status_code != 201:
-                self.logger.warning("Failed to create Alarm definition: {}, response {}"\
-                        .format(alarm_params['name'], resp.content))
+                logger.warning("Failed to create Alarm definition: {}, response {}"
+                               .format(alarm_params['name'], resp.content))
                 return None
 
             resp_data = json.loads(resp.content)
@@ -396,8 +400,7 @@ class MonPlugin():
             return alarm_uuid
 
         except Exception as exp:
-            self.logger.warning("Error creating alarm definition : {}\n{}".format(exp, traceback.format_exc()))
-
+            logger.warning("Error creating alarm definition : {} n{}".format(exp, traceback.format_exc()))
 
     def configure_rest_plugin(self):
         """
@@ -409,7 +412,7 @@ class MonPlugin():
         plugin_name = 'MON_module_REST_Plugin'
         plugin_id = self.check_if_plugin_configured(plugin_name)
 
-        #If REST plugin not configured, configure it
+        # If REST plugin not configured, configure it
         if plugin_id is not None:
             return plugin_id
         else:
@@ -417,45 +420,45 @@ class MonPlugin():
                 cert_file_string = open(SSL_CERTIFICATE_FILE_PATH, "rb").read()
             except IOError as exp:
                 msg = ("Could not read SSL certificate file: {}".format(SSL_CERTIFICATE_FILE_PATH))
-                self.logger.error(msg)
+                logger.error(msg)
                 raise IOError(msg)
             cert = load_certificate(FILETYPE_PEM, cert_file_string)
             certificate = cert.digest("sha1")
             api_url = '/suite-api/api/alertplugins'
             headers = {'Content-Type': 'application/json', 'Accept': 'application/json'}
             data = {
-                        "pluginTypeId": "RestPlugin",
-                        "name": plugin_name,
-                        "configValues": [
-                            {
-                                "name": "Url",
-                                "value": webhook_url
-                            },
-                            {
-                                "name": "Content-type",
-                                "value": "application/json"
-                            },
-                            {
-                                "name": "Certificate",
-                                "value": certificate
-                            },
-                            {
-                                "name": "ConnectionCount",
-                                "value": "20"
-                            }
-                        ]
+                "pluginTypeId": "RestPlugin",
+                "name": plugin_name,
+                "configValues": [
+                    {
+                        "name": "Url",
+                        "value": webhook_url
+                    },
+                    {
+                        "name": "Content-type",
+                        "value": "application/json"
+                    },
+                    {
+                        "name": "Certificate",
+                        "value": certificate
+                    },
+                    {
+                        "name": "ConnectionCount",
+                        "value": "20"
                     }
+                ]
+            }
 
             resp = requests.post(self.vrops_site + api_url,
                                  auth=(self.vrops_user, self.vrops_password),
                                  headers=headers,
-                                 verify = False,
+                                 verify=False,
                                  data=json.dumps(data))
 
             if resp.status_code is not 201:
-                self.logger.warning("Failed to create REST Plugin: {} for url: {}, \nresponse code: {},"\
-                            "\nresponse content: {}".format(plugin_name, webhook_url,\
-                            resp.status_code, resp.content))
+                logger.warning("Failed to create REST Plugin: {} for url: {},  nresponse code: {},"
+                               " nresponse content: {}".format(plugin_name, webhook_url,
+                                                               resp.status_code, resp.content))
                 return None
 
             resp_data = json.loads(resp.content)
@@ -463,16 +466,18 @@ class MonPlugin():
                 plugin_id = resp_data['pluginId']
 
             if plugin_id is None:
-                self.logger.warning("Failed to get REST Plugin ID for {}, url: {}".format(plugin_name, webhook_url))
+                logger.warning("Failed to get REST Plugin ID for {}, url: {}".format(plugin_name, webhook_url))
                 return None
             else:
-                self.logger.info("Created REST Plugin: {} with ID : {} for url: {}".format(plugin_name, plugin_id, webhook_url))
+                logger.info(
+                    "Created REST Plugin: {} with ID : {} for url: {}".format(plugin_name, plugin_id, webhook_url))
                 status = self.enable_rest_plugin(plugin_id, plugin_name)
                 if status is False:
-                    self.logger.warning("Failed to enable created REST Plugin: {} for url: {}".format(plugin_name, webhook_url))
+                    logger.warning(
+                        "Failed to enable created REST Plugin: {} for url: {}".format(plugin_name, webhook_url))
                     return None
                 else:
-                    self.logger.info("Enabled REST Plugin: {} for url: {}".format(plugin_name, webhook_url))
+                    logger.info("Enabled REST Plugin: {} for url: {}".format(plugin_name, webhook_url))
                     return plugin_id
 
     def check_if_plugin_configured(self, plugin_name):
@@ -480,17 +485,17 @@ class MonPlugin():
         Returns: plugin_id: if already created, None: if needs to be created
         """
         plugin_id = None
-        #Find the REST Plugin id details for - MON_module_REST_Plugin
+        # Find the REST Plugin id details for - MON_module_REST_Plugin
         api_url = '/suite-api/api/alertplugins'
         headers = {'Accept': 'application/json'}
 
         resp = requests.get(self.vrops_site + api_url,
                             auth=(self.vrops_user, self.vrops_password),
-                            verify = False, headers = headers)
+                            verify=False, headers=headers)
 
         if resp.status_code is not 200:
-            self.logger.warning("Failed to REST GET Alarm plugin details \nResponse code: {}\nResponse content: {}"\
-            .format(resp.status_code, resp.content))
+            logger.warning("Failed to REST GET Alarm plugin details  nResponse code: {} nResponse content: {}"
+                           .format(resp.status_code, resp.content))
             return None
 
         # Look for specific plugin & parse pluginId for 'MON_module_REST_Plugin'
@@ -501,13 +506,12 @@ class MonPlugin():
                     plugin_id = notify_plugin.get('pluginId')
 
         if plugin_id is None:
-            self.logger.warning("REST plugin {} not found".format(plugin_name))
+            logger.warning("REST plugin {} not found".format(plugin_name))
             return None
         else:
-            self.logger.info("Found REST Plugin: {}".format(plugin_name))
+            logger.info("Found REST Plugin: {}".format(plugin_name))
             return plugin_id
 
-
     def enable_rest_plugin(self, plugin_id, plugin_name):
         """
         Enable the REST plugin using plugin_id
@@ -516,8 +520,8 @@ class MonPlugin():
         """
 
         if plugin_id is None or plugin_name is None:
-            self.logger.debug("enable_rest_plugin() : Plugin ID or plugin_name not provided for {} plugin"\
-                        .format(plugin_name))
+            logger.debug("enable_rest_plugin() : Plugin ID or plugin_name not provided for {} plugin"
+                         .format(plugin_name))
             return False
 
         try:
@@ -525,19 +529,19 @@ class MonPlugin():
 
             resp = requests.put(self.vrops_site + api_url,
                                 auth=(self.vrops_user, self.vrops_password),
-                                verify = False)
+                                verify=False)
 
             if resp.status_code is not 204:
-                self.logger.warning("Failed to enable REST plugin {}. \nResponse code {}\nResponse Content: {}"\
-                        .format(plugin_name, resp.status_code, resp.content))
+                logger.warning("Failed to enable REST plugin {}.  nResponse code {} nResponse Content: {}"
+                               .format(plugin_name, resp.status_code, resp.content))
                 return False
 
-            self.logger.info("Enabled REST plugin {}.".format(plugin_name))
+            logger.info("Enabled REST plugin {}.".format(plugin_name))
             return True
 
         except Exception as exp:
-            self.logger.warning("Error enabling REST plugin for {} plugin: Exception: {}\n{}"\
-                    .format(plugin_name, exp, traceback.format_exc()))
+            logger.warning("Error enabling REST plugin for {} plugin: Exception: {} n{}"
+                           .format(plugin_name, exp, traceback.format_exc()))
 
     def create_alarm_notification_rule(self, alarm_name, alarm_id, resource_id):
         """
@@ -554,45 +558,45 @@ class MonPlugin():
         notification_id = None
         plugin_name = 'MON_module_REST_Plugin'
 
-        #1) Find the REST Plugin id details for - MON_module_REST_Plugin
+        # 1) Find the REST Plugin id details for - MON_module_REST_Plugin
         plugin_id = self.check_if_plugin_configured(plugin_name)
         if plugin_id is None:
-            self.logger.warning("Failed to get REST plugin_id for : {}".format('MON_module_REST_Plugin'))
+            logger.warning("Failed to get REST plugin_id for : {}".format('MON_module_REST_Plugin'))
             return None
 
-        #2) Create Alarm notification rule
+        # 2) Create Alarm notification rule
         api_url = '/suite-api/api/notifications/rules'
         headers = {'Content-Type': 'application/json', 'Accept': 'application/json'}
         data = {
-                    "name" : notification_name,
-                    "pluginId" : plugin_id,
-                    "resourceFilter": {
-                        "matchResourceIdOnly": True,
-                        "resourceId": resource_id
-                        },
-                    "alertDefinitionIdFilters" : {
-                    "values" : [ alarm_id ]
-                    }
-                }
+            "name": notification_name,
+            "pluginId": plugin_id,
+            "resourceFilter": {
+                "matchResourceIdOnly": True,
+                "resourceId": resource_id
+            },
+            "alertDefinitionIdFilters": {
+                "values": [alarm_id]
+            }
+        }
 
         resp = requests.post(self.vrops_site + api_url,
                              auth=(self.vrops_user, self.vrops_password),
                              headers=headers,
-                             verify = False,
+                             verify=False,
                              data=json.dumps(data))
 
         if resp.status_code is not 201:
-            self.logger.warning("Failed to create Alarm notification rule {} for {} alarm."\
-                        "\nResponse code: {}\nResponse content: {}"\
-                        .format(notification_name, alarm_name, resp.status_code, resp.content))
+            logger.warning("Failed to create Alarm notification rule {} for {} alarm."
+                           " nResponse code: {} nResponse content: {}"
+                           .format(notification_name, alarm_name, resp.status_code, resp.content))
             return None
 
-        #parse notification id from response
+        # parse notification id from response
         resp_data = json.loads(resp.content)
         if resp_data.get('id') is not None:
             notification_id = resp_data['id']
 
-        self.logger.info("Created Alarm notification rule {} for {} alarm.".format(notification_name, alarm_name))
+        logger.info("Created Alarm notification rule {} for {} alarm.".format(notification_name, alarm_name))
         return notification_id
 
     def get_vm_moref_id(self, vapp_uuid):
@@ -604,14 +608,12 @@ class MonPlugin():
                 vm_details = self.get_vapp_details_rest(vapp_uuid)
                 if vm_details and "vm_vcenter_info" in vm_details:
                     vm_moref_id = vm_details["vm_vcenter_info"].get("vm_moref_id", None)
-
-            self.logger.info("Found vm_moref_id: {} for vApp UUID: {}".format(vm_moref_id, vapp_uuid))
-            return vm_moref_id
+                    logger.info("Found vm_moref_id: {} for vApp UUID: {}".format(vm_moref_id, vapp_uuid))
+                    return vm_moref_id
 
         except Exception as exp:
-            self.logger.warning("Error occurred while getting VM moref ID for VM : {}\n{}"\
-                        .format(exp, traceback.format_exc()))
-
+            logger.warning("Error occurred while getting VM moref ID for VM : {} n{}"
+                           .format(exp, traceback.format_exc()))
 
     def get_vapp_details_rest(self, vapp_uuid=None):
         """
@@ -632,31 +634,31 @@ class MonPlugin():
 
         vca = self.connect_as_admin()
         if not vca:
-            self.logger.warning("Failed to connect to vCD")
+            logger.warning("Failed to connect to vCD")
             return parsed_respond
 
         url_list = [self.vcloud_site, '/api/vApp/vapp-', vapp_uuid]
         get_vapp_restcall = ''.join(url_list)
 
         if vca._session:
-            headers = {'Accept':'application/*+xml;version=' + API_VERSION,
+            headers = {'Accept': 'application/*+xml;version=' + API_VERSION,
                        'x-vcloud-authorization': vca._session.headers['x-vcloud-authorization']}
             response = requests.get(get_vapp_restcall,
                                     headers=headers,
                                     verify=False)
 
             if response.status_code != 200:
-                self.logger.warning("REST API call {} failed. Return status code {}"\
-                            .format(get_vapp_restcall, response.content))
+                logger.warning("REST API call {} failed. Return status code {}"
+                               .format(get_vapp_restcall, response.content))
                 return parsed_respond
 
             try:
                 xmlroot_respond = XmlElementTree.fromstring(response.content)
 
                 namespaces = {'vm': 'http://www.vmware.com/vcloud/v1.5',
-                              "vmext":"http://www.vmware.com/vcloud/extension/v1.5",
-                              "xmlns":"http://www.vmware.com/vcloud/v1.5"
-                             }
+                              "vmext": "http://www.vmware.com/vcloud/extension/v1.5",
+                              "xmlns": "http://www.vmware.com/vcloud/v1.5"
+                              }
 
                 # parse children section for other attrib
                 children_section = xmlroot_respond.find('vm:Children/', namespaces)
@@ -668,15 +670,14 @@ class MonPlugin():
                         vmext = vim_info.find('vmext:VmVimObjectRef', namespaces)
                         if vmext is not None:
                             vm_vcenter_info["vm_moref_id"] = vmext.find('vmext:MoRef', namespaces).text
-                        parsed_respond["vm_vcenter_info"]= vm_vcenter_info
+                        parsed_respond["vm_vcenter_info"] = vm_vcenter_info
 
-            except Exception as exp :
-                self.logger.warning("Error occurred calling rest api for getting vApp details: {}\n{}"\
-                            .format(exp, traceback.format_exc()))
+            except Exception as exp:
+                logger.warning("Error occurred calling rest api for getting vApp details: {} n{}"
+                               .format(exp, traceback.format_exc()))
 
         return parsed_respond
 
-
     def connect_as_admin(self):
         """ Method connect as pvdc admin user to vCloud director.
             There are certain action that can be done only by provider vdc admin user.
@@ -686,20 +687,19 @@ class MonPlugin():
                 The return vca object that letter can be used to connect to vcloud direct as admin for provider vdc
         """
 
-        self.logger.debug("Logging into vCD org as admin.")
+        logger.debug("Logging into vCD org as admin.")
 
         try:
             host = self.vcloud_site
             org = 'System'
             client_as_admin = Client(host, verify_ssl_certs=False)
-            client_as_admin.set_credentials(BasicLoginCredentials(self.admin_username, org,\
+            client_as_admin.set_credentials(BasicLoginCredentials(self.admin_username, org,
                                                                   self.admin_password))
-        except Exception as e:
-            self.logger.warning("Can't connect to a vCloud director as: {} with exception {}"\
-                             .format(self.admin_username, e))
-
-        return client_as_admin
+            return client_as_admin
 
+        except Exception as e:
+            logger.warning("Can't connect to a vCloud director as: {} with exception {}"
+                           .format(self.admin_username, e))
 
     def get_vm_resource_id(self, vm_moref_id):
         """ Find resource ID in vROPs using vm_moref_id
@@ -712,12 +712,12 @@ class MonPlugin():
 
         resp = requests.get(self.vrops_site + api_url,
                             auth=(self.vrops_user, self.vrops_password),
-                            verify = False, headers = headers)
+                            verify=False, headers=headers)
 
         if resp.status_code is not 200:
-            self.logger.warning("Failed to get resource details from vROPs for {}"\
-                             "\nResponse code:{}\nResponse Content: {}"\
-                             .format(vm_moref_id, resp.status_code, resp.content))
+            logger.warning("Failed to get resource details from vROPs for {}"
+                           " nResponse code:{} nResponse Content: {}"
+                           .format(vm_moref_id, resp.status_code, resp.content))
             return None
 
         vm_resource_id = None
@@ -731,20 +731,19 @@ class MonPlugin():
                         if resource_details.get('resourceIdentifiers') is not None:
                             resource_identifiers = resource_details['resourceIdentifiers']
                             for resource_identifier in resource_identifiers:
-                                if resource_identifier['identifierType']['name']=='VMEntityObjectID':
+                                if resource_identifier['identifierType']['name'] == 'VMEntityObjectID':
                                     if resource_identifier.get('value') is not None and \
-                                        resource_identifier['value']==vm_moref_id:
+                                            resource_identifier['value'] == vm_moref_id:
                                         vm_resource_id = resource['identifier']
-                                        self.logger.info("Found VM resource ID: {} for vm_moref_id: {}"\
-                                                         .format(vm_resource_id, vm_moref_id))
+                                        logger.info("Found VM resource ID: {} for vm_moref_id: {}"
+                                                    .format(vm_resource_id, vm_moref_id))
 
         except Exception as exp:
-            self.logger.warning("get_vm_resource_id: Error in parsing {}\n{}"\
-                             .format(exp, traceback.format_exc()))
+            logger.warning("get_vm_resource_id: Error in parsing {} n{}"
+                           .format(exp, traceback.format_exc()))
 
         return vm_resource_id
 
-
     def get_metrics_data(self, metric={}):
         """Get an individual metric's data of a resource.
         Params:
@@ -768,79 +767,80 @@ class MonPlugin():
         return_data['schema_type'] = 'read_metric_data_response'
         return_data['vim_uuid'] = metric['vim_uuid']
         return_data['metric_name'] = metric['metric_name']
-        #To do - No metric_uuid in vROPs, thus returning '0'
+        # To do - No metric_uuid in vROPs, thus returning '0'
         return_data['metric_uuid'] = '0'
         return_data['correlation_id'] = metric['correlation_id']
         return_data['resource_uuid'] = metric['resource_uuid']
-        return_data['metrics_data'] = {'time_series':[], 'metrics_series':[]}
-        #To do - Need confirmation about uuid & id
+        return_data['metrics_data'] = {'time_series': [], 'metrics_series': []}
+        # To do - Need confirmation about uuid & id
         ##if 'tenant_uuid' in metric and metric['tenant_uuid'] is not None:
         ##    return_data['tenant_uuid'] = metric['tenant_uuid']
         ##else:
         ##    return_data['tenant_uuid'] = None
         return_data['unit'] = None
-        #return_data['tenant_id'] = self.tenant_id
-        #self.logger.warning("return_data: {}".format(return_data))
+        # return_data['tenant_id'] = self.tenant_id
+        # logger.warning("return_data: {}".format(return_data))
 
-        #1) Get metric details from plugin specific file & format it into vROPs metrics
+        # 1) Get metric details from plugin specific file & format it into vROPs metrics
         metric_key_params = self.get_default_Params(metric['metric_name'])
 
         if not metric_key_params:
-            self.logger.warning("Metric not supported: {}".format(metric['metric_name']))
-            #To Do: Return message
+            logger.warning("Metric not supported: {}".format(metric['metric_name']))
+            # To Do: Return message
             return return_data
 
         return_data['unit'] = metric_key_params['unit']
 
-        #2) Find the resource id in vROPs based on OSM resource_uuid
-        #2.a) Find vm_moref_id from vApp uuid in vCD
+        # 2) Find the resource id in vROPs based on OSM resource_uuid
+        # 2.a) Find vm_moref_id from vApp uuid in vCD
         vm_moref_id = self.get_vm_moref_id(metric['resource_uuid'])
         if vm_moref_id is None:
-            self.logger.warning("Failed to find vm morefid for vApp in vCD: {}".format(metric['resource_uuid']))
+            logger.warning("Failed to find vm morefid for vApp in vCD: {}".format(metric['resource_uuid']))
             return return_data
-        #2.b) Based on vm_moref_id, find VM's corresponding resource_id in vROPs to set notification
+        # 2.b) Based on vm_moref_id, find VM's corresponding resource_id in vROPs to set notification
         resource_id = self.get_vm_resource_id(vm_moref_id)
         if resource_id is None:
-            self.logger.warning("Failed to find resource in vROPs: {}".format(metric['resource_uuid']))
+            logger.warning("Failed to find resource in vROPs: {}".format(metric['resource_uuid']))
             return return_data
 
-        #3) Calculate begin & end time for period & period unit
+        # 3) Calculate begin & end time for period & period unit
         end_time = int(round(time.time() * 1000))
         if metric['collection_unit'] == 'YR':
             time_diff = PERIOD_MSEC[metric['collection_unit']]
         else:
-            time_diff = metric['collection_period']* PERIOD_MSEC[metric['collection_unit']]
+            time_diff = metric['collection_period'] * PERIOD_MSEC[metric['collection_unit']]
         begin_time = end_time - time_diff
 
-        #4) Get the metrics data
-        self.logger.info("metric_key_params['metric_key'] = {}".format(metric_key_params['metric_key']))
-        self.logger.info("end_time: {}, begin_time: {}".format(end_time, begin_time))
+        # 4) Get the metrics data
+        logger.info("metric_key_params['metric_key'] = {}".format(metric_key_params['metric_key']))
+        logger.info("end_time: {}, begin_time: {}".format(end_time, begin_time))
 
-        url_list = ['/suite-api/api/resources/', resource_id, '/stats?statKey=',\
-                    metric_key_params['metric_key'], '&begin=', str(begin_time),'&end=',str(end_time)]
+        url_list = ['/suite-api/api/resources/', resource_id, '/stats?statKey=',
+                    metric_key_params['metric_key'], '&begin=', str(begin_time), '&end=', str(end_time)]
         api_url = ''.join(url_list)
         headers = {'Accept': 'application/json'}
 
         resp = requests.get(self.vrops_site + api_url,
                             auth=(self.vrops_user, self.vrops_password),
-                            verify = False, headers = headers)
+                            verify=False, headers=headers)
 
         if resp.status_code is not 200:
-            self.logger.warning("Failed to retrieve Metric data from vROPs for {}\nResponse code:{}\nResponse Content: {}"\
+            logger.warning(
+                "Failed to retrieve Metric data from vROPs for {} nResponse code:{} nResponse Content: {}"
                     .format(metric['metric_name'], resp.status_code, resp.content))
             return return_data
 
-        #5) Convert to required format
+        # 5) Convert to required format
         metrics_data = {}
         json_data = json.loads(resp.content)
-        for resp_key,resp_val in six.iteritems(json_data):
+        for resp_key, resp_val in six.iteritems(json_data):
             if resp_key == 'values':
                 data = json_data['values'][0]
-                for data_k,data_v in six.iteritems(data):
+                for data_k, data_v in six.iteritems(data):
                     if data_k == 'stat-list':
                         stat_list = data_v
-                        for stat_list_k,stat_list_v in six.iteritems(stat_list):
-                            for stat_keys,stat_vals in six.iteritems(stat_list_v[0]):
+                        for stat_list_k, stat_list_v in six.iteritems(stat_list):
+                            for stat_keys, stat_vals in six.iteritems(stat_list_v[0]):
                                 if stat_keys == 'timestamps':
                                     metrics_data['time_series'] = stat_list_v[0]['timestamps']
                                 if stat_keys == 'data':
@@ -854,27 +854,27 @@ class MonPlugin():
         """Update alarm configuration (i.e. Symptom & alarm) as per request
         """
         if new_alarm_config.get('alarm_uuid') is None:
-            self.logger.warning("alarm_uuid is required to update an Alarm")
+            logger.warning("alarm_uuid is required to update an Alarm")
             return None
-        #1) Get Alarm details from it's uuid & find the symptom definition
+        # 1) Get Alarm details from it's uuid & find the symptom definition
         alarm_details_json, alarm_details = self.get_alarm_defination_details(new_alarm_config['alarm_uuid'])
         if alarm_details_json is None:
             return None
 
         try:
-            #2) Update the symptom definition
+            # 2) Update the symptom definition
             if alarm_details['alarm_id'] is not None and alarm_details['symptom_definition_id'] is not None:
                 symptom_defination_id = alarm_details['symptom_definition_id']
             else:
-                self.logger.info("Symptom Definition ID not found for {}".format(new_alarm_config['alarm_uuid']))
+                logger.info("Symptom Definition ID not found for {}".format(new_alarm_config['alarm_uuid']))
                 return None
 
             symptom_uuid = self.update_symptom_defination(symptom_defination_id, new_alarm_config)
 
-            #3) Update the alarm definition & Return UUID if successful update
+            # 3) Update the alarm definition & Return UUID if successful update
             if symptom_uuid is None:
-                self.logger.info("Symptom Definition details not found for {}"\
-                                .format(new_alarm_config['alarm_uuid']))
+                logger.info("Symptom Definition details not found for {}"
+                            .format(new_alarm_config['alarm_uuid']))
                 return None
             else:
                 alarm_uuid = self.reconfigure_alarm(alarm_details_json, new_alarm_config)
@@ -883,13 +883,13 @@ class MonPlugin():
                 else:
                     return alarm_uuid
         except:
-            self.logger.error("Exception while updating alarm: {}".format(traceback.format_exc()))
+            logger.error("Exception while updating alarm: {}".format(traceback.format_exc()))
 
     def get_alarm_defination_details(self, alarm_uuid):
         """Get alarm details based on alarm UUID
         """
         if alarm_uuid is None:
-            self.logger.warning("get_alarm_defination_details: Alarm UUID not provided")
+            logger.warning("get_alarm_defination_details: Alarm UUID not provided")
             return None, None
 
         alarm_details = {}
@@ -899,11 +899,11 @@ class MonPlugin():
 
         resp = requests.get(self.vrops_site + api_url + alarm_uuid,
                             auth=(self.vrops_user, self.vrops_password),
-                            verify = False, headers = headers)
+                            verify=False, headers=headers)
 
         if resp.status_code is not 200:
-            self.logger.warning("Alarm to be updated not found: {}\nResponse code:{}\nResponse Content: {}"\
-                    .format(alarm_uuid, resp.status_code, resp.content))
+            logger.warning("Alarm to be updated not found: {} nResponse code:{} nResponse Content: {}"
+                           .format(alarm_uuid, resp.status_code, resp.content))
             return None, None
 
         try:
@@ -915,14 +915,14 @@ class MonPlugin():
                 alarm_details['resource_kind'] = json_data['resourceKindKey']
                 alarm_details['type'] = json_data['type']
                 alarm_details['sub_type'] = json_data['subType']
-                alarm_details['symptom_definition_id'] = json_data['states'][0]['base-symptom-set']['symptomDefinitionIds'][0]
+                alarm_details['symptom_definition_id'] = \
+                    json_data['states'][0]['base-symptom-set']['symptomDefinitionIds'][0]
         except Exception as exp:
-            self.logger.warning("Exception while retrieving alarm definition details: {}".format(exp))
+            logger.warning("Exception while retrieving alarm definition details: {}".format(exp))
             return None, None
 
         return json_data, alarm_details
 
-
     def get_alarm_defination_by_name(self, alarm_name):
         """Get alarm details based on alarm name
         """
@@ -930,7 +930,7 @@ class MonPlugin():
         alert_match_list = []
 
         if alarm_name is None:
-            self.logger.warning("get_alarm_defination_by_name: Alarm name not provided")
+            logger.warning("get_alarm_defination_by_name: Alarm name not provided")
             return alert_match_list
 
         json_data = {}
@@ -939,11 +939,11 @@ class MonPlugin():
 
         resp = requests.get(self.vrops_site + api_url,
                             auth=(self.vrops_user, self.vrops_password),
-                            verify = False, headers = headers)
+                            verify=False, headers=headers)
 
         if resp.status_code is not 200:
-            self.logger.warning("get_alarm_defination_by_name: Error in response: {}\nResponse code:{}"\
-                    "\nResponse Content: {}".format(alarm_name, resp.status_code, resp.content))
+            logger.warning("get_alarm_defination_by_name: Error in response: {} nResponse code:{}"
+                           " nResponse Content: {}".format(alarm_name, resp.status_code, resp.content))
             return alert_match_list
 
         try:
@@ -952,21 +952,20 @@ class MonPlugin():
                 alerts_list = json_data['alertDefinitions']
                 alert_match_list = list(filter(lambda alert: alert['name'] == alarm_name, alerts_list))
                 status = False if not alert_match_list else True
-                #self.logger.debug("Found alert_match_list: {}for larm_name: {},\nstatus: {}".format(alert_match_list, alarm_name,status))
+                # logger.debug("Found alert_match_list: {}for larm_name: {}, nstatus: {}".format(alert_match_list, alarm_name,status))
 
             return alert_match_list
 
         except Exception as exp:
-            self.logger.warning("Exception while searching alarm definition: {}".format(exp))
+            logger.warning("Exception while searching alarm definition: {}".format(exp))
             return alert_match_list
 
-
     def update_symptom_defination(self, symptom_uuid, new_alarm_config):
         """Update symptom definition based on new alarm input configuration
         """
-        #1) Get symptom definition details
+        # 1) Get symptom definition details
         symptom_details = self.get_symptom_defination_details(symptom_uuid)
-        #print "\n\nsymptom_details: {}".format(symptom_details)
+        # print " n nsymptom_details: {}".format(symptom_details)
         if symptom_details is None:
             return None
 
@@ -976,48 +975,46 @@ class MonPlugin():
             symptom_details['state']['condition']['operator'] = OPERATION_MAPPING[new_alarm_config['operation']]
         if 'threshold_value' in new_alarm_config and new_alarm_config['threshold_value'] is not None:
             symptom_details['state']['condition']['value'] = new_alarm_config['threshold_value']
-        #Find vrops metric key from metric_name, if required
+        # Find vrops metric key from metric_name, if required
         """
         if 'metric_name' in new_alarm_config and new_alarm_config['metric_name'] is not None:
             metric_key_params = self.get_default_Params(new_alarm_config['metric_name'])
             if not metric_key_params:
-                self.logger.warning("Metric not supported: {}".format(config_dict['metric_name']))
+                logger.warning("Metric not supported: {}".format(config_dict['metric_name']))
                 return None
             symptom_details['state']['condition']['key'] = metric_key_params['metric_key']
         """
-        self.logger.info("Fetched Symptom details : {}".format(symptom_details))
+        logger.info("Fetched Symptom details : {}".format(symptom_details))
 
         api_url = '/suite-api/api/symptomdefinitions'
-        headers = {'Content-Type': 'application/json', 'Accept':'application/json'}
+        headers = {'Content-Type': 'application/json', 'Accept': 'application/json'}
         data = json.dumps(symptom_details)
         resp = requests.put(self.vrops_site + api_url,
-                             auth=(self.vrops_user, self.vrops_password),
-                             headers=headers,
-                             verify = False,
-                             data=data)
+                            auth=(self.vrops_user, self.vrops_password),
+                            headers=headers,
+                            verify=False,
+                            data=data)
 
         if resp.status_code != 200:
-            self.logger.warning("Failed to update Symptom definition: {}, response {}"\
-                    .format(symptom_uuid, resp.content))
+            logger.warning("Failed to update Symptom definition: {}, response {}"
+                           .format(symptom_uuid, resp.content))
             return None
 
-
         if symptom_uuid is not None:
-            self.logger.info("Symptom definition updated {} for alarm: {}"\
-                    .format(symptom_uuid, new_alarm_config['alarm_uuid']))
+            logger.info("Symptom definition updated {} for alarm: {}"
+                        .format(symptom_uuid, new_alarm_config['alarm_uuid']))
             return symptom_uuid
         else:
-            self.logger.warning("Failed to update Symptom Definition {} for : {}"\
-                    .format(symptom_uuid, new_alarm_config['alarm_uuid']))
+            logger.warning("Failed to update Symptom Definition {} for : {}"
+                           .format(symptom_uuid, new_alarm_config['alarm_uuid']))
             return None
 
-
     def get_symptom_defination_details(self, symptom_uuid):
         """Get symptom definition details
         """
         symptom_details = {}
         if symptom_uuid is None:
-            self.logger.warning("get_symptom_defination_details: Symptom UUID not provided")
+            logger.warning("get_symptom_defination_details: Symptom UUID not provided")
             return None
 
         api_url = '/suite-api/api/symptomdefinitions/'
@@ -1025,18 +1022,17 @@ class MonPlugin():
 
         resp = requests.get(self.vrops_site + api_url + symptom_uuid,
                             auth=(self.vrops_user, self.vrops_password),
-                            verify = False, headers = headers)
+                            verify=False, headers=headers)
 
         if resp.status_code is not 200:
-            self.logger.warning("Symptom definition not found {} \nResponse code:{}\nResponse Content: {}"\
-                    .format(symptom_uuid, resp.status_code, resp.content))
+            logger.warning("Symptom definition not found {}  nResponse code:{} nResponse Content: {}"
+                           .format(symptom_uuid, resp.status_code, resp.content))
             return None
 
         symptom_details = json.loads(resp.content)
-        #print "New symptom Details: {}".format(symptom_details)
+        # print "New symptom Details: {}".format(symptom_details)
         return symptom_details
 
-
     def reconfigure_alarm(self, alarm_details_json, new_alarm_config):
         """Reconfigure alarm definition as per input
         """
@@ -1046,52 +1042,52 @@ class MonPlugin():
             alarm_details_json['description'] = new_alarm_config['description']
 
         api_url = '/suite-api/api/alertdefinitions'
-        headers = {'Content-Type': 'application/json', 'Accept':'application/json'}
+        headers = {'Content-Type': 'application/json', 'Accept': 'application/json'}
         data = json.dumps(alarm_details_json)
         resp = requests.put(self.vrops_site + api_url,
-                             auth=(self.vrops_user, self.vrops_password),
-                             headers=headers,
-                             verify = False,
-                             data=data)
+                            auth=(self.vrops_user, self.vrops_password),
+                            headers=headers,
+                            verify=False,
+                            data=data)
 
         if resp.status_code != 200:
-            self.logger.warning("Failed to update Alarm definition: {}, response code {}, response content: {}"\
-                    .format(alarm_details_json['id'], resp.status_code, resp.content))
+            logger.warning("Failed to update Alarm definition: {}, response code {}, response content: {}"
+                           .format(alarm_details_json['id'], resp.status_code, resp.content))
             return None
         else:
             parsed_alarm_details = json.loads(resp.content)
             alarm_def_uuid = parsed_alarm_details['id'].split('-', 1)[1]
-            self.logger.info("Successfully updated Alarm definition: {}".format(alarm_def_uuid))
+            logger.info("Successfully updated Alarm definition: {}".format(alarm_def_uuid))
             return alarm_def_uuid
 
     def delete_alarm_configuration(self, delete_alarm_req_dict):
         """Delete complete alarm configuration
         """
         if delete_alarm_req_dict['alarm_uuid'] is None:
-            self.logger.info("delete_alarm_configuration: Alarm UUID not provided")
+            logger.info("delete_alarm_configuration: Alarm UUID not provided")
             return None
-        #1)Get alarm & symptom definition details
+        # 1)Get alarm & symptom definition details
         alarm_details_json, alarm_details = self.get_alarm_defination_details(delete_alarm_req_dict['alarm_uuid'])
         if alarm_details is None or alarm_details_json is None:
             return None
 
-        #2) Delete alarm notification
+        # 2) Delete alarm notification
         rule_id = self.delete_notification_rule(alarm_details['alarm_name'])
         if rule_id is None:
             return None
 
-        #3) Delete alarm configuration
+        # 3) Delete alarm configuration
         alarm_id = self.delete_alarm_defination(alarm_details['alarm_id'])
         if alarm_id is None:
             return None
 
-        #4) Delete alarm symptom
+        # 4) Delete alarm symptom
         symptom_id = self.delete_symptom_definition(alarm_details['symptom_definition_id'])
         if symptom_id is None:
             return None
         else:
-            self.logger.info("Completed deleting alarm configuration: {}"\
-                    .format(delete_alarm_req_dict['alarm_uuid']))
+            logger.info("Completed deleting alarm configuration: {}"
+                        .format(delete_alarm_req_dict['alarm_uuid']))
             return delete_alarm_req_dict['alarm_uuid']
 
     def delete_notification_rule(self, alarm_name):
@@ -1102,15 +1098,15 @@ class MonPlugin():
             return None
         else:
             api_url = '/suite-api/api/notifications/rules/'
-            headers = {'Accept':'application/json'}
+            headers = {'Accept': 'application/json'}
             resp = requests.delete(self.vrops_site + api_url + rule_id,
-                                auth=(self.vrops_user, self.vrops_password),
-                                verify = False, headers = headers)
+                                   auth=(self.vrops_user, self.vrops_password),
+                                   verify=False, headers=headers)
             if resp.status_code is not 204:
-                self.logger.warning("Failed to delete notification rules for {}".format(alarm_name))
+                logger.warning("Failed to delete notification rules for {}".format(alarm_name))
                 return None
             else:
-                self.logger.info("Deleted notification rules for {}".format(alarm_name))
+                logger.info("Deleted notification rules for {}".format(alarm_name))
                 return rule_id
 
     def get_notification_rule_id_by_alarm_name(self, alarm_name):
@@ -1118,14 +1114,14 @@ class MonPlugin():
         """
         alarm_notify_id = 'notify_' + alarm_name
         api_url = '/suite-api/api/notifications/rules'
-        headers = {'Content-Type': 'application/json', 'Accept':'application/json'}
+        headers = {'Content-Type': 'application/json', 'Accept': 'application/json'}
         resp = requests.get(self.vrops_site + api_url,
                             auth=(self.vrops_user, self.vrops_password),
-                            verify = False, headers = headers)
+                            verify=False, headers=headers)
 
         if resp.status_code is not 200:
-            self.logger.warning("Failed to get notification rules details for {}"\
-                    .format(alarm_name))
+            logger.warning("Failed to get notification rules details for {}"
+                           .format(alarm_name))
             return None
 
         notifications = json.loads(resp.content)
@@ -1134,45 +1130,44 @@ class MonPlugin():
             for dict in notifications_list:
                 if dict['name'] is not None and dict['name'] == alarm_notify_id:
                     notification_id = dict['id']
-                    self.logger.info("Found Notification id to be deleted: {} for {}"\
-                            .format(notification_id, alarm_name))
+                    logger.info("Found Notification id to be deleted: {} for {}"
+                                .format(notification_id, alarm_name))
                     return notification_id
 
-            self.logger.warning("Notification id to be deleted not found for {}"\
-                            .format(alarm_name))
+            logger.warning("Notification id to be deleted not found for {}"
+                           .format(alarm_name))
             return None
 
     def delete_alarm_defination(self, alarm_id):
         """Delete created Alarm definition
         """
         api_url = '/suite-api/api/alertdefinitions/'
-        headers = {'Accept':'application/json'}
+        headers = {'Accept': 'application/json'}
         resp = requests.delete(self.vrops_site + api_url + alarm_id,
-                            auth=(self.vrops_user, self.vrops_password),
-                            verify = False, headers = headers)
+                               auth=(self.vrops_user, self.vrops_password),
+                               verify=False, headers=headers)
         if resp.status_code is not 204:
-            self.logger.warning("Failed to delete alarm definition {}".format(alarm_id))
+            logger.warning("Failed to delete alarm definition {}".format(alarm_id))
             return None
         else:
-            self.logger.info("Deleted alarm definition {}".format(alarm_id))
+            logger.info("Deleted alarm definition {}".format(alarm_id))
             return alarm_id
 
     def delete_symptom_definition(self, symptom_id):
         """Delete symptom definition
         """
         api_url = '/suite-api/api/symptomdefinitions/'
-        headers = {'Accept':'application/json'}
+        headers = {'Accept': 'application/json'}
         resp = requests.delete(self.vrops_site + api_url + symptom_id,
-                            auth=(self.vrops_user, self.vrops_password),
-                            verify = False, headers = headers)
+                               auth=(self.vrops_user, self.vrops_password),
+                               verify=False, headers=headers)
         if resp.status_code is not 204:
-            self.logger.warning("Failed to delete symptom definition {}".format(symptom_id))
+            logger.warning("Failed to delete symptom definition {}".format(symptom_id))
             return None
         else:
-            self.logger.info("Deleted symptom definition {}".format(symptom_id))
+            logger.info("Deleted symptom definition {}".format(symptom_id))
             return symptom_id
 
-
     def verify_metric_support(self, metric_info):
         """Verify, if Metric is supported by vROPs plugin, verify metric unit & return status
             Returns:
@@ -1180,108 +1175,107 @@ class MonPlugin():
         """
         status = False
         if 'metric_name' not in metric_info:
-            self.logger.debug("Metric name not provided: {}".format(metric_info))
+            logger.debug("Metric name not provided: {}".format(metric_info))
             return status
         metric_key_params = self.get_default_Params(metric_info['metric_name'].lower())
         if not metric_key_params:
-            self.logger.warning("Metric not supported: {}".format(metric_info['metric_name']))
+            logger.warning("Metric not supported: {}".format(metric_info['metric_name']))
             return status
         else:
-            #If Metric is supported, verify optional metric unit & return status
+            # If Metric is supported, verify optional metric unit & return status
             if 'metric_unit' in metric_info:
                 if metric_key_params.get('unit') == metric_info['metric_unit']:
-                    self.logger.info("Metric is supported with unit: {}".format(metric_info['metric_name']))
+                    logger.info("Metric is supported with unit: {}".format(metric_info['metric_name']))
                     status = True
                 else:
-                    self.logger.debug("Metric supported but there is unit mismatch for: {}."\
-                                    "Supported unit: {}"\
-                                    .format(metric_info['metric_name'],metric_key_params['unit']))
+                    logger.debug("Metric supported but there is unit mismatch for: {}."
+                                 "Supported unit: {}"
+                                 .format(metric_info['metric_name'], metric_key_params['unit']))
                     status = True
         return status
 
     def get_triggered_alarms_list(self, list_alarm_input):
         """Get list of triggered alarms on a resource based on alarm input request.
         """
-        #TO Do - Need to add filtering of alarms based on Severity & alarm name
+        # TO Do - Need to add filtering of alarms based on Severity & alarm name
 
         triggered_alarms_list = []
         if list_alarm_input.get('resource_uuid') is None:
-            self.logger.warning("Resource UUID is required to get triggered alarms list")
+            logger.warning("Resource UUID is required to get triggered alarms list")
             return triggered_alarms_list
 
-        #1)Find vROPs resource ID using RO resource UUID
+        # 1)Find vROPs resource ID using RO resource UUID
         vrops_resource_id = self.get_vrops_resourceid_from_ro_uuid(list_alarm_input['resource_uuid'])
         if vrops_resource_id is None:
             return triggered_alarms_list
 
-        #2)Get triggered alarms on particular resource
-        triggered_alarms_list = self.get_triggered_alarms_on_resource(list_alarm_input['resource_uuid'], vrops_resource_id)
+        # 2)Get triggered alarms on particular resource
+        triggered_alarms_list = self.get_triggered_alarms_on_resource(list_alarm_input['resource_uuid'],
+                                                                      vrops_resource_id)
         return triggered_alarms_list
 
     def get_vrops_resourceid_from_ro_uuid(self, ro_resource_uuid):
         """Fetch vROPs resource ID using resource UUID from RO/SO
         """
-        #1) Find vm_moref_id from vApp uuid in vCD
+        # 1) Find vm_moref_id from vApp uuid in vCD
         vm_moref_id = self.get_vm_moref_id(ro_resource_uuid)
         if vm_moref_id is None:
-            self.logger.warning("Failed to find vm morefid for vApp in vCD: {}".format(ro_resource_uuid))
+            logger.warning("Failed to find vm morefid for vApp in vCD: {}".format(ro_resource_uuid))
             return None
 
-        #2) Based on vm_moref_id, find VM's corresponding resource_id in vROPs to set notification
+        # 2) Based on vm_moref_id, find VM's corresponding resource_id in vROPs to set notification
         vrops_resource_id = self.get_vm_resource_id(vm_moref_id)
         if vrops_resource_id is None:
-            self.logger.warning("Failed to find resource in vROPs: {}".format(ro_resource_uuid))
+            logger.warning("Failed to find resource in vROPs: {}".format(ro_resource_uuid))
             return None
         return vrops_resource_id
 
-
     def get_triggered_alarms_on_resource(self, ro_resource_uuid, vrops_resource_id):
         """Get triggered alarms on particular resource & return list of dictionary of alarms
         """
         resource_alarms = []
         api_url = '/suite-api/api/alerts?resourceId='
-        headers = {'Accept':'application/json'}
+        headers = {'Accept': 'application/json'}
         resp = requests.get(self.vrops_site + api_url + vrops_resource_id,
                             auth=(self.vrops_user, self.vrops_password),
-                            verify = False, headers = headers)
+                            verify=False, headers=headers)
 
         if resp.status_code is not 200:
-            self.logger.warning("Failed to get triggered alarms for {}"\
-                    .format(ro_resource_uuid))
+            logger.warning("Failed to get triggered alarms for {}"
+                           .format(ro_resource_uuid))
             return None
 
         all_alerts = json.loads(resp.content)
         if 'alerts' in all_alerts:
             if not all_alerts['alerts']:
-                self.logger.info("No alarms present on resource {}".format(ro_resource_uuid))
+                logger.info("No alarms present on resource {}".format(ro_resource_uuid))
                 return resource_alarms
             all_alerts_list = all_alerts['alerts']
             for alarm in all_alerts_list:
-                #self.logger.info("Triggered Alarm {}".format(alarm))
-                if alarm['alertDefinitionName'] is not None and\
-                    len(alarm['alertDefinitionName'].split('-', 1)) == 2:
-                        if alarm['alertDefinitionName'].split('-', 1)[1] == ro_resource_uuid:
-                            alarm_instance = {}
-                            alarm_instance['alarm_uuid'] = alarm['alertDefinitionId'].split('-', 1)[1]
-                            alarm_instance['resource_uuid'] = ro_resource_uuid
-                            alarm_instance['alarm_instance_uuid'] = alarm['alertId']
-                            alarm_instance['vim_type'] = 'VMware'
-                            #find severity of alarm
-                            severity = None
-                            for key,value in six.iteritems(severity_mano2vrops):
-                                if value == alarm['alertLevel']:
-                                    severity = key
-                            if severity is None:
-                                severity = 'INDETERMINATE'
-                            alarm_instance['severity'] = severity
-                            alarm_instance['status'] = alarm['status']
-                            alarm_instance['start_date'] = self.convert_date_time(alarm['startTimeUTC'])
-                            alarm_instance['update_date'] = self.convert_date_time(alarm['updateTimeUTC'])
-                            alarm_instance['cancel_date'] = self.convert_date_time(alarm['cancelTimeUTC'])
-                            self.logger.info("Triggered Alarm on resource {}".format(alarm_instance))
-                            resource_alarms.append(alarm_instance)
+                # logger.info("Triggered Alarm {}".format(alarm))
+                if alarm['alertDefinitionName'] is not None and \
+                        len(alarm['alertDefinitionName'].split('-', 1)) == 2:
+                    if alarm['alertDefinitionName'].split('-', 1)[1] == ro_resource_uuid:
+                        alarm_instance = {'alarm_uuid': alarm['alertDefinitionId'].split('-', 1)[1],
+                                          'resource_uuid': ro_resource_uuid,
+                                          'alarm_instance_uuid': alarm['alertId'],
+                                          'vim_type': 'VMware'}
+                        # find severity of alarm
+                        severity = None
+                        for key, value in six.iteritems(severity_mano2vrops):
+                            if value == alarm['alertLevel']:
+                                severity = key
+                        if severity is None:
+                            severity = 'INDETERMINATE'
+                        alarm_instance['severity'] = severity
+                        alarm_instance['status'] = alarm['status']
+                        alarm_instance['start_date'] = self.convert_date_time(alarm['startTimeUTC'])
+                        alarm_instance['update_date'] = self.convert_date_time(alarm['updateTimeUTC'])
+                        alarm_instance['cancel_date'] = self.convert_date_time(alarm['cancelTimeUTC'])
+                        logger.info("Triggered Alarm on resource {}".format(alarm_instance))
+                        resource_alarms.append(alarm_instance)
         if not resource_alarms:
-            self.logger.info("No alarms present on resource {}".format(ro_resource_uuid))
+            logger.info("No alarms present on resource {}".format(ro_resource_uuid))
         return resource_alarms
 
     def convert_date_time(self, date_time):
@@ -1289,7 +1283,6 @@ class MonPlugin():
         """
         date_time_formatted = '0000-00-00T00:00:00'
         if date_time != 0:
-            complete_datetime = datetime.datetime.fromtimestamp(date_time/1000.0, tz=pytz.utc).isoformat('T')
-            date_time_formatted = complete_datetime.split('.',1)[0]
+            complete_datetime = datetime.datetime.fromtimestamp(date_time / 1000.0, tz=pytz.utc).isoformat('T')
+            date_time_formatted = complete_datetime.split('.', 1)[0]
         return date_time_formatted
-
index 130c4bd..ce9e1f8 100644 (file)
@@ -31,139 +31,112 @@ import logging
 import os
 import sys
 import traceback
-#Core producer
+from io import UnsupportedOperation
+
 import six
 
+from osm_mon.core.settings import Config
 from osm_mon.plugins.vRealiseOps.mon_plugin_vrops import MonPlugin
 
 sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', '..', '..'))
-from osm_mon.core.message_bus.producer import KafkaProducer
 
 from osm_mon.core.auth import AuthManager
 
-#from core.message_bus.producer import KafkaProducer
 from xml.etree import ElementTree as XmlElementTree
 
-
 schema_version = "1.0"
 req_config_params = ('vrops_site', 'vrops_user', 'vrops_password',
-                    'vcloud-site','admin_username','admin_password',
-                    'vcenter_ip','vcenter_port','vcenter_user','vcenter_password',
-                    'vim_tenant_name','orgname')
+                     'vcloud-site', 'admin_username', 'admin_password',
+                     'vcenter_ip', 'vcenter_port', 'vcenter_user', 'vcenter_password',
+                     'vim_tenant_name', 'orgname')
 MODULE_DIR = os.path.dirname(__file__)
 CONFIG_FILE_NAME = 'vrops_config.xml'
 CONFIG_FILE_PATH = os.path.join(MODULE_DIR, CONFIG_FILE_NAME)
 
-def set_logger():
-    """Set Logger
-    """
-    BASE_DIR = os.path.dirname(os.path.dirname(__file__))
-    logger = logging.getLogger()
-    formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
-    handler = logging.FileHandler(os.path.join(BASE_DIR,"mon_vrops_log.log"))
-    handler.setFormatter(formatter)
-    logger.addHandler(handler)
+cfg = Config.instance()
+logging.basicConfig(stream=sys.stdout,
+                    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
+                    datefmt='%m/%d/%Y %I:%M:%S %p',
+                    level=logging.getLevelName(cfg.OSMMON_LOG_LEVEL))
+
+logger = logging.getLogger(__name__)
 
 
-class PluginReceiver():
+class PluginReceiver:
     """MON Plugin receiver receiving request messages & responding using producer for vROPs
     telemetry plugin
     """
+
     def __init__(self):
         """Constructor of PluginReceiver
         """
+        self._cfg = Config.instance()
+
+    def handle_alarm_requests(self, key: str, values: dict, vim_uuid: str):
+        values['vim_uuid'] = vim_uuid
+        if key == "create_alarm_request":
+            config_alarm_info = values
+            alarm_uuid = self.create_alarm(config_alarm_info)
+            logger.info("Alarm created with alarm uuid: {}".format(alarm_uuid))
+            # Publish message using producer
+            return self.publish_create_alarm_status(alarm_uuid, config_alarm_info)
+        elif key == "update_alarm_request":
+            update_alarm_info = values
+            alarm_uuid = self.update_alarm(update_alarm_info)
+            logger.info("Alarm definition updated : alarm uuid: {}".format(alarm_uuid))
+            # Publish message using producer
+            return self.publish_update_alarm_status(alarm_uuid, update_alarm_info)
+        elif key == "delete_alarm_request":
+            delete_alarm_info = values
+            alarm_uuid = self.delete_alarm(delete_alarm_info)
+            logger.info("Alarm definition deleted : alarm uuid: {}".format(alarm_uuid))
+            # Publish message using producer
+            return self.publish_delete_alarm_status(alarm_uuid, delete_alarm_info)
+        elif key == "list_alarm_request":
+            request_input = values
+            triggered_alarm_list = self.list_alarms(request_input)
+            # Publish message using producer
+            return self.publish_list_alarm_response(triggered_alarm_list, request_input)
+        else:
+            raise UnsupportedOperation("Unknown key, no action will be performed")
+
+    def handle_metric_requests(self, key: str, values: dict, vim_uuid: str):
+        values['vim_uuid'] = vim_uuid
+        if key == "read_metric_data_request":
+            metric_request_info = values
+            access_config = self.get_vim_access_config(metric_request_info['vim_uuid'])
+            mon_plugin_obj = MonPlugin(access_config)
+            metrics_data = mon_plugin_obj.get_metrics_data(metric_request_info)
+            logger.info("Collected Metrics Data: {}".format(metrics_data))
+            # Publish message using producer
+            return self.publish_metrics_data_status(metrics_data)
+        elif key == "create_metric_request":
+            metric_info = values
+            metric_status = self.verify_metric(metric_info)
+            # Publish message using producer
+            return self.publish_create_metric_response(metric_info, metric_status)
+        elif key == "update_metric_request":
+            metric_info = values
+            metric_status = self.verify_metric(metric_info)
+            # Publish message using producer
+            return self.publish_update_metric_response(metric_info, metric_status)
+        elif key == "delete_metric_request":
+            metric_info = values
+            # Deleting Metric Data is not allowed. Publish status as False
+            logger.warning("Deleting Metric is not allowed by VMware vROPs plugin: {}"
+                           .format(metric_info['metric_name']))
+            # Publish message using producer
+            return self.publish_delete_metric_response(metric_info)
 
-
-        self.logger = logging.getLogger('PluginReceiver')
-        self.logger.setLevel(logging.DEBUG)
-        set_logger()
-
-        #Core producer
-        self.producer_alarms = KafkaProducer('alarm_response')
-        self.producer_metrics = KafkaProducer('metric_response')
-        self.producer_access_credentials = KafkaProducer('vim_access_credentials_response')
-
-
-    def consume(self, message, vim_uuid):
-        """Consume the message, act on it & respond
-        """
-        try:
-            self.logger.info("Message received for VIM: {} :\nMessage Topic={}:{}:{}:\n"\
-                             "Message Key={}\nMessage Value={}"\
-                             .format(vim_uuid, message.topic, message.partition, message.offset,\
-                                     message.key, message.value))
-            message_values = json.loads(message.value)
-            #Adding vim_uuid to message
-            message_values['vim_uuid'] = vim_uuid
-            self.logger.info("Action required for: {}".format(message.topic))
-            if message.topic == 'alarm_request':
-                if message.key == "create_alarm_request":
-                    config_alarm_info = message_values
-                    alarm_uuid = self.create_alarm(config_alarm_info)
-                    self.logger.info("Alarm created with alarm uuid: {}".format(alarm_uuid))
-                    #Publish message using producer
-                    self.publish_create_alarm_status(alarm_uuid, config_alarm_info)
-                elif message.key == "update_alarm_request":
-                    update_alarm_info = message_values
-                    alarm_uuid = self.update_alarm(update_alarm_info)
-                    self.logger.info("Alarm definition updated : alarm uuid: {}".format(alarm_uuid))
-                    #Publish message using producer
-                    self.publish_update_alarm_status(alarm_uuid, update_alarm_info)
-                elif message.key == "delete_alarm_request":
-                    delete_alarm_info = message_values
-                    alarm_uuid = self.delete_alarm(delete_alarm_info)
-                    self.logger.info("Alarm definition deleted : alarm uuid: {}".format(alarm_uuid))
-                    #Publish message using producer
-                    self.publish_delete_alarm_status(alarm_uuid, delete_alarm_info)
-                elif message.key == "list_alarm_request":
-                    request_input = message_values
-                    triggered_alarm_list = self.list_alarms(request_input)
-                    #Publish message using producer
-                    self.publish_list_alarm_response(triggered_alarm_list, request_input)
-            elif message.topic == 'metric_request':
-                if message.key == "read_metric_data_request":
-                    metric_request_info = message_values
-                    access_config = None
-                    access_config = self.get_vim_access_config(metric_request_info['vim_uuid'])
-                    mon_plugin_obj = MonPlugin(access_config)
-                    metrics_data = mon_plugin_obj.get_metrics_data(metric_request_info)
-                    self.logger.info("Collected Metrics Data: {}".format(metrics_data))
-                    #Publish message using producer
-                    self.publish_metrics_data_status(metrics_data)
-                elif message.key == "create_metric_request":
-                    metric_info = message_values
-                    metric_status = self.verify_metric(metric_info)
-                    #Publish message using producer
-                    self.publish_create_metric_response(metric_info, metric_status)
-                elif message.key == "update_metric_request":
-                    metric_info = message_values
-                    metric_status = self.verify_metric(metric_info)
-                    #Publish message using producer
-                    self.publish_update_metric_response(metric_info, metric_status)
-                elif message.key == "delete_metric_request":
-                    metric_info = message_values
-                    #Deleting Metric Data is not allowed. Publish status as False
-                    self.logger.warning("Deleting Metric is not allowed by VMware vROPs plugin: {}"\
-                                        .format(metric_info['metric_name']))
-                    #Publish message using producer
-                    self.publish_delete_metric_response(metric_info)
-            elif message.topic == 'access_credentials':
-                if message.key == "vim_access_credentials":
-                    access_info = message_values
-                    access_update_status = self.update_access_credentials(access_info['access_config'])
-                    self.publish_access_update_response(access_update_status, access_info)
-
-        except:
-            self.logger.error("Exception in vROPs plugin receiver: {}".format(traceback.format_exc()))
-
+        else:
+            raise UnsupportedOperation("Unknown key, no action will be performed")
 
     def create_alarm(self, config_alarm_info):
         """Create alarm using vROPs plugin
         """
-        access_config = None
         access_config = self.get_vim_access_config(config_alarm_info['vim_uuid'])
         mon_plugin = MonPlugin(access_config)
-        plugin_uuid = mon_plugin.configure_rest_plugin()
+        mon_plugin.configure_rest_plugin()
         alarm_uuid = mon_plugin.configure_alarm(config_alarm_info['alarm_create_request'])
         return alarm_uuid
 
@@ -172,24 +145,23 @@ class PluginReceiver():
         """
         topic = 'alarm_response'
         msg_key = 'create_alarm_response'
-        response_msg = {"schema_version":schema_version,
-                         "schema_type":"create_alarm_response",
-                         "vim_uuid":config_alarm_info["vim_uuid"],
-                         "alarm_create_response":
-                            {"correlation_id":config_alarm_info["alarm_create_request"]["correlation_id"],
-                             "alarm_uuid":alarm_uuid,
+        response_msg = {"schema_version": schema_version,
+                        "schema_type": "create_alarm_response",
+                        "vim_uuid": config_alarm_info["vim_uuid"],
+                        "alarm_create_response":
+                            {"correlation_id": config_alarm_info["alarm_create_request"]["correlation_id"],
+                             "alarm_uuid": alarm_uuid,
                              "status": True if alarm_uuid else False
-                            }
-                       }
-        self.logger.info("Publishing response:\nTopic={}\nKey={}\nValue={}"\
-                .format(topic, msg_key, response_msg))
-        #Core producer
-        self.producer_alarms.publish(key=msg_key, value=json.dumps(response_msg), topic=topic)
+                             }
+                        }
+        logger.info("Publishing response:\nTopic={}\nKey={}\nValue={}"
+                    .format(topic, msg_key, response_msg))
+
+        return response_msg
 
     def update_alarm(self, update_alarm_info):
         """Update already created alarm
         """
-        access_config = None
         access_config = self.get_vim_access_config(update_alarm_info['vim_uuid'])
         mon_plugin = MonPlugin(access_config)
         alarm_uuid = mon_plugin.update_alarm_configuration(update_alarm_info['alarm_update_request'])
@@ -200,25 +172,24 @@ class PluginReceiver():
         """
         topic = 'alarm_response'
         msg_key = 'update_alarm_response'
-        response_msg = {"schema_version":schema_version,
-                         "schema_type":"update_alarm_response",
-                         "vim_uuid":update_alarm_info["vim_uuid"],
-                         "alarm_update_response":
-                            {"correlation_id":update_alarm_info["alarm_update_request"]["correlation_id"],
-                             "alarm_uuid":update_alarm_info["alarm_update_request"]["alarm_uuid"] \
-                             if update_alarm_info["alarm_update_request"].get('alarm_uuid') is not None else None,
+        response_msg = {"schema_version": schema_version,
+                        "schema_type": "update_alarm_response",
+                        "vim_uuid": update_alarm_info["vim_uuid"],
+                        "alarm_update_response":
+                            {"correlation_id": update_alarm_info["alarm_update_request"]["correlation_id"],
+                             "alarm_uuid": update_alarm_info["alarm_update_request"]["alarm_uuid"] \
+                                 if update_alarm_info["alarm_update_request"].get('alarm_uuid') is not None else None,
                              "status": True if alarm_uuid else False
-                            }
-                       }
-        self.logger.info("Publishing response:\nTopic={}\nKey={}\nValue={}"\
-                .format(topic, msg_key, response_msg))
-        #Core producer
-        self.producer_alarms.publish(key=msg_key, value=json.dumps(response_msg), topic=topic)
+                             }
+                        }
+        logger.info("Publishing response:\nTopic={}\nKey={}\nValue={}"
+                    .format(topic, msg_key, response_msg))
+
+        return response_msg
 
     def delete_alarm(self, delete_alarm_info):
         """Delete alarm configuration
         """
-        access_config = None
         access_config = self.get_vim_access_config(delete_alarm_info['vim_uuid'])
         mon_plugin = MonPlugin(access_config)
         alarm_uuid = mon_plugin.delete_alarm_configuration(delete_alarm_info['alarm_delete_request'])
@@ -229,36 +200,33 @@ class PluginReceiver():
         """
         topic = 'alarm_response'
         msg_key = 'delete_alarm_response'
-        response_msg = {"schema_version":schema_version,
-                         "schema_type":"delete_alarm_response",
-                         "vim_uuid":delete_alarm_info['vim_uuid'],
-                         "alarm_deletion_response":
-                            {"correlation_id":delete_alarm_info["alarm_delete_request"]["correlation_id"],
-                             "alarm_uuid":delete_alarm_info["alarm_delete_request"]["alarm_uuid"],
+        response_msg = {"schema_version": schema_version,
+                        "schema_type": "delete_alarm_response",
+                        "vim_uuid": delete_alarm_info['vim_uuid'],
+                        "alarm_deletion_response":
+                            {"correlation_id": delete_alarm_info["alarm_delete_request"]["correlation_id"],
+                             "alarm_uuid": delete_alarm_info["alarm_delete_request"]["alarm_uuid"],
                              "status": True if alarm_uuid else False
-                            }
-                       }
-        self.logger.info("Publishing response:\nTopic={}\nKey={}\nValue={}"\
-                .format(topic, msg_key, response_msg))
-        #Core producer
-        self.producer_alarms.publish(key=msg_key, value=json.dumps(response_msg), topic=topic)
+                             }
+                        }
+        logger.info("Publishing response:\nTopic={}\nKey={}\nValue={}"
+                    .format(topic, msg_key, response_msg))
 
+        return response_msg
 
     def publish_metrics_data_status(self, metrics_data):
         """Publish the requested metric data using producer
         """
         topic = 'metric_response'
         msg_key = 'read_metric_data_response'
-        self.logger.info("Publishing response:\nTopic={}\nKey={}\nValue={}"\
-                .format(topic, msg_key, metrics_data))
-        #Core producer
-        self.producer_metrics.publish(key=msg_key, value=json.dumps(metrics_data), topic=topic)
+        logger.info("Publishing response:\nTopic={}\nKey={}\nValue={}"
+                    .format(topic, msg_key, metrics_data))
 
+        return metrics_data
 
     def verify_metric(self, metric_info):
         """Verify if metric is supported or not
         """
-        access_config = None
         access_config = self.get_vim_access_config(metric_info['vim_uuid'])
         mon_plugin = MonPlugin(access_config)
         if 'metric_create_request' in metric_info:
@@ -272,44 +240,44 @@ class PluginReceiver():
         """
         topic = 'metric_response'
         msg_key = 'create_metric_response'
-        response_msg = {"schema_version":schema_version,
-                         "schema_type":"create_metric_response",
-                         ##"vim_uuid":metric_info['vim_uuid'],
-                         ##"correlation_id":metric_info['correlation_id'],
-                         "metric_create_response":
+        response_msg = {"schema_version": schema_version,
+                        "schema_type": "create_metric_response",
+                        ##"vim_uuid":metric_info['vim_uuid'],
+                        ##"correlation_id":metric_info['correlation_id'],
+                        "metric_create_response":
                             {
-                             ##"metric_uuid":'0',
-                             ##"resource_uuid":metric_info['metric_create']['resource_uuid'],
-                             ##"vim_uuid":metric_info['vim_uuid'], #May be required. TODO - Confirm
-                             "correlation_id":metric_info['correlation_id'],
-                             "status":metric_status
+                                ##"metric_uuid":'0',
+                                ##"resource_uuid":metric_info['metric_create']['resource_uuid'],
+                                ##"vim_uuid":metric_info['vim_uuid'], #May be required. TODO - Confirm
+                                "correlation_id": metric_info['correlation_id'],
+                                "status": metric_status
                             }
-                       }
-        self.logger.info("Publishing response:\nTopic={}\nKey={}\nValue={}"\
-                .format(topic, msg_key, response_msg))
-        #Core producer
-        self.producer_metrics.publish(key=msg_key, value=json.dumps(response_msg), topic=topic)
+                        }
+        logger.info("Publishing response:\nTopic={}\nKey={}\nValue={}"
+                    .format(topic, msg_key, response_msg))
+
+        return response_msg
 
     def publish_update_metric_response(self, metric_info, metric_status):
         """Publish update metric response
         """
         topic = 'metric_response'
         msg_key = 'update_metric_response'
-        response_msg = {"schema_version":schema_version,
-                        "schema_type":"metric_update_response",
-                        "vim_uuid":metric_info['vim_uuid'],
+        response_msg = {"schema_version": schema_version,
+                        "schema_type": "metric_update_response",
+                        "vim_uuid": metric_info['vim_uuid'],
                         "metric_update_response":
                             {
-                             "metric_uuid":'0',
-                             "correlation_id":metric_info['correlation_id'],
-                             "resource_uuid":metric_info['metric_create']['resource_uuid'],
-                             "status":metric_status
+                                "metric_uuid": '0',
+                                "correlation_id": metric_info['correlation_id'],
+                                "resource_uuid": metric_info['metric_create']['resource_uuid'],
+                                "status": metric_status
                             }
-                       }
-        self.logger.info("Publishing response:\nTopic={}\nKey={}\nValue={}"\
-                .format(topic, msg_key, response_msg))
-        #Core producer
-        self.producer_metrics.publish(key=msg_key, value=json.dumps(response_msg), topic=topic)
+                        }
+        logger.info("Publishing response:\nTopic={}\nKey={}\nValue={}"
+                    .format(topic, msg_key, response_msg))
+
+        return response_msg
 
     def publish_delete_metric_response(self, metric_info):
         """Publish delete metric response
@@ -321,64 +289,60 @@ class PluginReceiver():
         else:
             tenant_uuid = None
 
-        response_msg = {"schema_version":schema_version,
-                        "schema_type":"delete_metric_response",
-                        "vim_uuid":metric_info['vim_uuid'],
-                        "correlation_id":metric_info['correlation_id'],
-                        "metric_name":metric_info['metric_name'],
-                        "metric_uuid":'0',
-                        "resource_uuid":metric_info['resource_uuid'],
-                        "tenant_uuid":tenant_uuid,
-                        "status":False
-                       }
-        self.logger.info("Publishing response:\nTopic={}\nKey={}\nValue={}"\
-                .format(topic, msg_key, response_msg))
-        #Core producer
-        self.producer_metrics.publish(key=msg_key, value=json.dumps(response_msg), topic=topic)
+        response_msg = {"schema_version": schema_version,
+                        "schema_type": "delete_metric_response",
+                        "vim_uuid": metric_info['vim_uuid'],
+                        "correlation_id": metric_info['correlation_id'],
+                        "metric_name": metric_info['metric_name'],
+                        "metric_uuid": '0',
+                        "resource_uuid": metric_info['resource_uuid'],
+                        "tenant_uuid": tenant_uuid,
+                        "status": False
+                        }
+        logger.info("Publishing response:\nTopic={}\nKey={}\nValue={}"
+                    .format(topic, msg_key, response_msg))
+
+        return response_msg
 
     def list_alarms(self, list_alarm_input):
         """Collect list of triggered alarms based on input
         """
-        access_config = None
         access_config = self.get_vim_access_config(list_alarm_input['vim_uuid'])
         mon_plugin = MonPlugin(access_config)
         triggered_alarms = mon_plugin.get_triggered_alarms_list(list_alarm_input['alarm_list_request'])
         return triggered_alarms
 
-
     def publish_list_alarm_response(self, triggered_alarm_list, list_alarm_input):
         """Publish list of triggered alarms
         """
         topic = 'alarm_response'
         msg_key = 'list_alarm_response'
-        response_msg = {"schema_version":schema_version,
-                        "schema_type":"list_alarm_response",
-                        "vim_type":"VMware",
-                        "vim_uuid":list_alarm_input['vim_uuid'],
-                        "correlation_id":list_alarm_input['alarm_list_request']['correlation_id'],
-                        "list_alarm_response":triggered_alarm_list
-                       }
-        self.logger.info("Publishing response:\nTopic={}\nKey={}\nValue={}"\
-                .format(topic, msg_key, response_msg))
-        #Core producer
-        self.producer_alarms.publish(key=msg_key, value=json.dumps(response_msg), topic=topic)
-
+        response_msg = {"schema_version": schema_version,
+                        "schema_type": "list_alarm_response",
+                        "vim_type": "VMware",
+                        "vim_uuid": list_alarm_input['vim_uuid'],
+                        "correlation_id": list_alarm_input['alarm_list_request']['correlation_id'],
+                        "list_alarm_response": triggered_alarm_list
+                        }
+        logger.info("Publishing response:\nTopic={}\nKey={}\nValue={}"
+                    .format(topic, msg_key, response_msg))
+
+        return response_msg
 
     def update_access_credentials(self, access_info):
         """Verify if all the required access config params are provided and
            updates access config in default vrops config file
         """
         update_status = False
-        wr_status = False
-        #Check if all the required config params are passed in request
-        if not all (keys in access_info for keys in req_config_params):
-            self.logger.debug("All required Access Config Parameters not provided")
-            self.logger.debug("List of required Access Config Parameters: {}".format(req_config_params))
-            self.logger.debug("List of given Access Config Parameters: {}".format(access_info))
+        # Check if all the required config params are passed in request
+        if not all(keys in access_info for keys in req_config_params):
+            logger.debug("All required Access Config Parameters not provided")
+            logger.debug("List of required Access Config Parameters: {}".format(req_config_params))
+            logger.debug("List of given Access Config Parameters: {}".format(access_info))
             return update_status
 
         wr_status = self.write_access_config(access_info)
-        return wr_status    #True/False
+        return wr_status  True/False
 
     def write_access_config(self, access_info):
         """Write access configuration to vROPs config file.
@@ -391,39 +355,36 @@ class PluginReceiver():
             for config in root:
                 if config.tag == 'Access_Config':
                     for param in config:
-                        for key,val in six.iteritems(access_info):
+                        for key, val in six.iteritems(access_info):
                             if param.tag == key:
-                                #print param.tag, val
+                                # print param.tag, val
                                 param.text = val
 
             tree.write(CONFIG_FILE_PATH)
             wr_status = True
         except Exception as exp:
-            self.logger.warning("Failed to update Access Config Parameters: {}".format(exp))
+            logger.warning("Failed to update Access Config Parameters: {}".format(exp))
 
         return wr_status
 
-
     def publish_access_update_response(self, access_update_status, access_info_req):
         """Publish access update response
         """
         topic = 'access_credentials'
         msg_key = 'vim_access_credentials_response'
-        response_msg = {"schema_version":schema_version,
-                        "schema_type":"vim_access_credentials_response",
-                        "correlation_id":access_info_req['access_config']['correlation_id'],
-                        "status":access_update_status
-                       }
-        self.logger.info("Publishing response:\nTopic={}\nKey={}\nValue={}"\
-                .format(topic, msg_key, response_msg))
-        #Core Add producer
-        self.producer_access_credentials.publish(key=msg_key, value=json.dumps(response_msg), topic=topic)
-
+        response_msg = {"schema_version": schema_version,
+                        "schema_type": "vim_access_credentials_response",
+                        "correlation_id": access_info_req['access_config']['correlation_id'],
+                        "status": access_update_status
+                        }
+        logger.info("Publishing response:\nTopic={}\nKey={}\nValue={}" \
+                    .format(topic, msg_key, response_msg))
+        # Core Add producer
+        return response_msg
 
     def get_vim_access_config(self, vim_uuid):
         """Get VIM access configuration & account details from path: VIM_ACCOUNTS_FILE_PATH
         """
-        vim_account_details = None
         vim_account = {}
         auth_manager = AuthManager()
         vim_account_details = auth_manager.get_credentials(vim_uuid)
@@ -458,9 +419,10 @@ class PluginReceiver():
                 if vim_config['orgname'] is not None:
                     vim_account['orgname'] = vim_config['orgname']
         except Exception as exp:
-            self.logger.error("VIM account details not sufficient: {}".format(exp))
+            logger.error("VIM account details not sufficient: {}".format(exp))
         return vim_account
 
+
 """
 def main():
     #log.basicConfig(filename='mon_vrops_log.log',level=log.DEBUG)
@@ -471,4 +433,3 @@ def main():
 if __name__ == "__main__":
     main()
 """
-
index 2948ec3..6d05dfc 100755 (executable)
@@ -52,14 +52,14 @@ function install_packages(){
 
 apt-get update  # To get the latest package lists
 
-[ "$_DISTRO" == "Ubuntu" ] && install_packages "python-yaml python-bottle python-jsonschema python-requests libxml2-dev libxslt-dev python-dev python-pip openssl"
-[ "$_DISTRO" == "CentOS" -o "$_DISTRO" == "Red" ] && install_packages "python-jsonschema python-requests libxslt-devel libxml2-devel python-devel python-pip openssl"
+[ "$_DISTRO" == "Ubuntu" ] && install_packages "python3-yaml python3-bottle python3-jsonschema python3-requests libxml2-dev libxslt-dev python3-dev python3-pip openssl"
+[ "$_DISTRO" == "CentOS" -o "$_DISTRO" == "Red" ] && install_packages "python3-jsonschema python3-requests libxslt-devel libxml2-devel python3-devel python3-pip openssl"
 #The only way to install python-bottle on Centos7 is with easy_install or pip
 [ "$_DISTRO" == "CentOS" -o "$_DISTRO" == "Red" ] && easy_install -U bottle
 
 #required for vmware connector TODO move that to separete opt in install script
-pip install pip==9.0.3
-pip install cherrypy
+pip3 install pip==9.0.3
+pip3 install cherrypy
 
 echo '
  #################################################################
@@ -76,7 +76,7 @@ echo '
  #####             Start Web Service                      #####
  #################################################################'
 
-nohup python "${WebServiceFile}" &
+nohup python3 "${WebServiceFile}" &
 
 echo '
  #################################################################
index fd51449..d7cf33c 100755 (executable)
  Webservice for vRealize Operations (vROPs) to post/notify alarms details.
 
 """
+from osm_mon.core.settings import Config
+
 __author__ = "Arpita Kate"
 __date__ = "$15-Sept-2017 16:09:29$"
 __version__ = '0.1'
 
-
-from bottle import (ServerAdapter, route, run, server_names, redirect, default_app,
-                     request, response, template, debug, TEMPLATE_PATH , static_file)
-from socket import getfqdn
-from datetime import datetime
-from xml.etree import ElementTree as ET
+import json
 import logging
 import os
-import json
 import sys
+from datetime import datetime
+from socket import getfqdn
+
 import requests
-sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', '..', '..','..'))
-from osm_mon.core.message_bus.producer import KafkaProducer
-#from core.message_bus.producer import KafkaProducer
+from bottle import (ServerAdapter, route, run, server_names, default_app,
+                    request, response)
+
+sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', '..', '..', '..'))
+from osm_mon.core.message_bus.producer import Producer
+
+# from core.message_bus.producer import KafkaProducer
 
 sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', '..', '..'))
 from osm_mon.core.database import DatabaseManager
@@ -55,34 +58,35 @@ except ImportError:
     from cherrypy.wsgiserver import CherryPyWSGIServer as WSGIServer
     from cherrypy.wsgiserver.ssl_pyopenssl import pyOpenSSLAdapter
 
-#Set Constants
+# Set Constants
 BASE_DIR = os.path.dirname(os.path.dirname(__file__))
 CERT_DIR = os.path.join(BASE_DIR, "SSL_certificate")
 certificate_name = getfqdn() + ".cert"
 key_name = getfqdn() + ".key"
 CERTIFICATE = os.path.join(CERT_DIR, certificate_name)
 KEY = os.path.join(CERT_DIR, key_name)
-#CERTIFICATE = os.path.join(CERT_DIR, "www.vrops_webservice.com.cert")
-#KEY = os.path.join(CERT_DIR, "www.vrops_webservice.com.key")
+# CERTIFICATE = os.path.join(CERT_DIR, "www.vrops_webservice.com.cert")
+# KEY = os.path.join(CERT_DIR, "www.vrops_webservice.com.key")
 CONFIG_FILE = os.path.join(BASE_DIR, '../vrops_config.xml')
-#Severity Mapping from vROPs to OSM
+# Severity Mapping from vROPs to OSM
 VROPS_SEVERITY_TO_OSM_MAPPING = {
-                "ALERT_CRITICALITY_LEVEL_CRITICAL":"CRITICAL",
-                "ALERT_CRITICALITY_LEVEL_WARNING":"WARNING",
-                "ALERT_CRITICALITY_LEVEL_IMMEDIATE":"MAJOR",
-                "ALERT_CRITICALITY_LEVEL_INFO":"INDETERMINATE",
-                "ALERT_CRITICALITY_LEVEL_AUTO":"INDETERMINATE",
-                "ALERT_CRITICALITY_LEVEL_UNKNOWN":"INDETERMINATE",
-                "ALERT_CRITICALITY_LEVEL_NONE":"INDETERMINATE"
-            }
-
-#Set logger
-logger = logging.getLogger('vROPs_Webservice')
-formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
-hdlr = logging.FileHandler(os.path.join(BASE_DIR,"vrops_webservice.log"))
-hdlr.setFormatter(formatter)
-logger.addHandler(hdlr)
-logger.setLevel(logging.DEBUG)
+    "ALERT_CRITICALITY_LEVEL_CRITICAL": "CRITICAL",
+    "ALERT_CRITICALITY_LEVEL_WARNING": "WARNING",
+    "ALERT_CRITICALITY_LEVEL_IMMEDIATE": "MAJOR",
+    "ALERT_CRITICALITY_LEVEL_INFO": "INDETERMINATE",
+    "ALERT_CRITICALITY_LEVEL_AUTO": "INDETERMINATE",
+    "ALERT_CRITICALITY_LEVEL_UNKNOWN": "INDETERMINATE",
+    "ALERT_CRITICALITY_LEVEL_NONE": "INDETERMINATE"
+}
+
+# Set logger
+cfg = Config.instance()
+logging.basicConfig(stream=sys.stdout,
+                    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
+                    datefmt='%m/%d/%Y %I:%M:%S %p',
+                    level=logging.getLevelName(cfg.OSMMON_LOG_LEVEL))
+
+logger = logging.getLogger(__name__)
 
 
 def format_datetime(str_date):
@@ -96,14 +100,15 @@ def format_datetime(str_date):
     date_fromat = "%Y-%m-%dT%H:%M:%S"
     formated_datetime = None
     try:
-        datetime_obj = datetime.fromtimestamp(float(str_date)/1000.)
+        datetime_obj = datetime.fromtimestamp(float(str_date) / 1000.)
         formated_datetime = datetime_obj.strftime(date_fromat)
     except Exception as exp:
         logger.error('Exception: {} occured while converting date {} into format {}'.format(
-                           exp,str_date, date_fromat))
+            exp, str_date, date_fromat))
 
     return formated_datetime
 
+
 def get_alarm_config(alarm_name):
     """
         Method to get configuration parameters
@@ -114,19 +119,20 @@ def get_alarm_config(alarm_name):
     """
     vim_account = {}
     database_manager = DatabaseManager()
-    vim_account_details = database_manager.get_credentials_for_alarm_name(alarm_name,'VMware')
+    alarm = database_manager.get_alarm(alarm_name, 'VMware')
+    vim_account_details = alarm.credentials
 
     try:
         if vim_account_details is not None:
             vim_account['name'] = vim_account_details.name
             vim_account['vim_tenant_name'] = vim_account_details.tenant_name
             vim_account['vim_type'] = vim_account_details.type
-            vim_account['vim_uuid'] = vim_account_details._id
+            vim_account['vim_uuid'] = vim_account_details.uuid
             vim_account['vim_url'] = vim_account_details.url
             vim_account['org_user'] = vim_account_details.user
             vim_account['org_password'] = vim_account_details.password
 
-            vim_config = json.loads(vim_account_details.config) 
+            vim_config = json.loads(vim_account_details.config)
             vim_account['tenant_id'] = vim_config['tenant_id']
             vim_account['admin_username'] = vim_config['admin_username']
             vim_account['admin_password'] = vim_config['admin_password']
@@ -159,29 +165,29 @@ def get_alarm_definationID(alarm_instance_uuid, access_config):
                 alarm defination ID
     """
     alarm_definationID = None
-    if alarm_instance_uuid :
+    if alarm_instance_uuid:
         try:
-            #access_config = get_alarm_config()
+            # access_config = get_alarm_config()
             headers = {'Accept': 'application/json'}
-            api_url = '{}/suite-api/api/alerts/{}'\
-                        .format(access_config.get('vrops_site'), alarm_instance_uuid)
+            api_url = '{}/suite-api/api/alerts/{}' \
+                .format(access_config.get('vrops_site'), alarm_instance_uuid)
             api_response = requests.get(api_url,
-                                        auth=(access_config.get('vrops_user'),\
+                                        auth=(access_config.get('vrops_user'),
                                               access_config.get('vrops_password')),
-                                        verify = False,
-                                        headers = headers
+                                        verify=False,
+                                        headers=headers
                                         )
 
-            if  api_response.status_code == 200:
+            if api_response.status_code == 200:
                 data = api_response.json()
                 if data.get("alertDefinitionId") is not None:
                     alarm_definationID = '-'.join(data.get("alertDefinitionId").split('-')[1:])
             else:
-                logger.error("Failed to get alert definition ID for alarm {}"\
+                logger.error("Failed to get alert definition ID for alarm {}"
                              .format(alarm_instance_uuid))
         except Exception as exp:
-            logger.error("Exception occured while getting alert definition ID for alarm : {}"\
-                          .format(exp, alarm_instance_uuid))
+            logger.error("Exception occured while getting alert definition ID for alarm : {}"
+                         .format(exp, alarm_instance_uuid))
 
     return alarm_definationID
 
@@ -195,23 +201,23 @@ def notify_alarm(alarmID):
         Returns:
            response code
     """
-    logger.info("Request:{} from:{} {} {} "\
+    logger.info("Request:{} from:{} {} {} " \
                 .format(request, request.remote_addr, request.method, request.url))
     response.headers['Content-Type'] = 'application/json'
     try:
         postdata = json.loads(request.body.read())
         notify_details = {}
         vim_access_config = get_alarm_config(postdata.get('alertName'))
-        #Parse noditfy data
-        notify_details['vim_uuid'] = vim_access_config.get('vim_uuid') 
-        notify_details['alarm_uuid'] = get_alarm_definationID(postdata.get('alertId'),\
+        # Parse notify data
+        notify_details['vim_uuid'] = vim_access_config.get('vim_uuid')
+        notify_details['alarm_uuid'] = get_alarm_definationID(postdata.get('alertId'),
                                                               vim_access_config)
         notify_details['description'] = postdata.get('info')
         notify_details['alarm_instance_uuid'] = alarmID
         notify_details['resource_uuid'] = '-'.join(postdata.get('alertName').split('-')[1:])
-        notify_details['tenant_uuid'] =  vim_access_config.get('tenant_id')
+        notify_details['tenant_uuid'] = vim_access_config.get('tenant_id')
         notify_details['vim_type'] = "VMware"
-        notify_details['severity'] = VROPS_SEVERITY_TO_OSM_MAPPING.get(postdata.get('criticality'),\
+        notify_details['severity'] = VROPS_SEVERITY_TO_OSM_MAPPING.get(postdata.get('criticality'),
                                                                        'INDETERMINATE')
         notify_details['status'] = postdata.get('status')
         if postdata.get('startDate'):
@@ -224,20 +230,21 @@ def notify_alarm(alarmID):
         alarm_details = {'schema_version': 1.0,
                          'schema_type': "notify_alarm",
                          'notify_details': notify_details
-                        }
+                         }
         alarm_data = json.dumps(alarm_details)
         logger.info("Alarm details: {}".format(alarm_data))
 
-        #Publish Alarm details
-        kafkaMsgProducer = KafkaProducer("alarm_response")
-        kafkaMsgProducer.publish(topic='alarm_response', key='notify_alarm', value=alarm_data)
+        # Publish Alarm details
+        producer = Producer()
+        producer.send(topic='alarm_response', key=notify_alarm, value=alarm_data)
+        producer.flush()
 
-        #return 201 on Success
+        # return 201 on Success
         response.status = 201
 
     except Exception as exp:
         logger.error('Exception: {} occured while notifying alarm {}.'.format(exp, alarmID))
-        #return 500 on Error
+        # return 500 on Error
         response.status = 500
 
     return response
@@ -256,7 +263,7 @@ class SSLWebServer(ServerAdapter):
         server.ssl_adapter = pyOpenSSLAdapter(
             certificate=CERTIFICATE,
             private_key=KEY,
-           # certificate_chain="intermediate_cert.crt"
+            # certificate_chain="intermediate_cert.crt"
         )
 
         try:
@@ -268,8 +275,8 @@ class SSLWebServer(ServerAdapter):
 
 
 if __name__ == "__main__":
-    #Start SSL Web Service
+    # Start SSL Web Service
     logger.info("Start vROPs Web Service")
     app = default_app()
     server_names['sslwebserver'] = SSLWebServer
-    run(app=app,host=getfqdn(), port=8080, server='sslwebserver')
+    run(app=app, host=getfqdn(), port=8080, server='sslwebserver')
index 8165903..c2bf38c 100644 (file)
         <orgname>Org2</orgname>
         <tenant_id>Org2</tenant_id>
     </Access_Config>
-</alarmsDefaultConfig>
+</alarmsDefaultConfig>
\ No newline at end of file
diff --git a/osm_mon/test/CloudWatch/test_schemas/alarm_details/acknowledge_alarm.json b/osm_mon/test/CloudWatch/test_schemas/alarm_details/acknowledge_alarm.json
deleted file mode 100644 (file)
index 341b2bd..0000000
+++ /dev/null
@@ -1,11 +0,0 @@
-{
-"schema_version": "1.0",
-"schema_type": "alarm_ack",
-"vim_type": "AWS",
-"ack_details":
-{
-"alarm_uuid": "CPU_Utilization_i-098da78cbd8304e17",
-"resource_uuid": "i-098da78cbd8304e17",
-"tenant_uuid": ""
-}
-}
\ No newline at end of file
diff --git a/osm_mon/test/CloudWatch/test_schemas/create_alarm/create_alarm_differentName_differentInstance.json b/osm_mon/test/CloudWatch/test_schemas/create_alarm/create_alarm_differentName_differentInstance.json
deleted file mode 100644 (file)
index ecf403e..0000000
+++ /dev/null
@@ -1,18 +0,0 @@
-{
-"schema_version": "1.0",
-"schema_type": "create_alarm_request",
-"vim_type": "AWS",
-"alarm_create_request":
-{
-"correlation_id": "SO123",
-"alarm_name": "CPU_Utilization_Above_Threshold",
-"resource_uuid": "i-098da78cbd8304e17",
-"description": "",
-"severity": "Critical",
-"operation": "GE",
-"threshold_value": 1.5,
-"unit": "",
-"metric_name": "CPU_UTILIZATION",
-"statistic": "MAXIMUM"
-}
-}
diff --git a/osm_mon/test/CloudWatch/test_schemas/create_alarm/create_alarm_differentName_sameInstance.json b/osm_mon/test/CloudWatch/test_schemas/create_alarm/create_alarm_differentName_sameInstance.json
deleted file mode 100644 (file)
index 17c423d..0000000
+++ /dev/null
@@ -1,18 +0,0 @@
-{
-"schema_version": "1.0",
-"schema_type": "create_alarm_request",
-"vim_type": "AWS",
-"alarm_create_request":
-{
-"correlation_id": "SO123",
-"alarm_name": "CPU_Utilization_Above_Threshold1",
-"resource_uuid": "i-098da78cbd8304e17",
-"description": "",
-"severity": "Critical",
-"operation": "GE",
-"threshold_value": 1.5,
-"unit": "",
-"metric_name": "CPU_UTILIZATION",
-"statistic": "MAXIMUM"
-}
-}
diff --git a/osm_mon/test/CloudWatch/test_schemas/create_alarm/create_alarm_sameName_differentInstance.json b/osm_mon/test/CloudWatch/test_schemas/create_alarm/create_alarm_sameName_differentInstance.json
deleted file mode 100644 (file)
index b2f5acb..0000000
+++ /dev/null
@@ -1,18 +0,0 @@
-{
-"schema_version": "1.0",
-"schema_type": "create_alarm_request",
-"vim_type": "AWS",
-"alarm_create_request":
-{
-"correlation_id": "SO123",
-"alarm_name": "CPU_Utilization_Above_Threshold",
-"resource_uuid": "i-09462760703837b26",
-"description": "",
-"severity": "Critical",
-"operation": "GE",
-"threshold_value": 1.5,
-"unit": "",
-"metric_name": "CPU_UTILIZATION",
-"statistic": "MAXIMUM"
-}
-}
diff --git a/osm_mon/test/CloudWatch/test_schemas/create_alarm/create_alarm_sameName_sameInstance.json b/osm_mon/test/CloudWatch/test_schemas/create_alarm/create_alarm_sameName_sameInstance.json
deleted file mode 100644 (file)
index ecf403e..0000000
+++ /dev/null
@@ -1,18 +0,0 @@
-{
-"schema_version": "1.0",
-"schema_type": "create_alarm_request",
-"vim_type": "AWS",
-"alarm_create_request":
-{
-"correlation_id": "SO123",
-"alarm_name": "CPU_Utilization_Above_Threshold",
-"resource_uuid": "i-098da78cbd8304e17",
-"description": "",
-"severity": "Critical",
-"operation": "GE",
-"threshold_value": 1.5,
-"unit": "",
-"metric_name": "CPU_UTILIZATION",
-"statistic": "MAXIMUM"
-}
-}
diff --git a/osm_mon/test/CloudWatch/test_schemas/create_alarm/operation_invalid.json b/osm_mon/test/CloudWatch/test_schemas/create_alarm/operation_invalid.json
deleted file mode 100644 (file)
index 31e1e0b..0000000
+++ /dev/null
@@ -1,18 +0,0 @@
-{
-"schema_version": "1.0",
-"schema_type": "create_alarm_request",
-"vim_type": "AWS",
-"alarm_create_request":
-{
-"correlation_id": "SO123",
-"alarm_name": "CPU_Utilization_Above_Threshold2",
-"resource_uuid": "i-098da78cbd8304e17",
-"description": "",
-"severity": "Critical",
-"operation": "Greaterthan",
-"threshold_value": 1.5,
-"unit": "",
-"metric_name": "CPU_UTILIZATION",
-"statistic": "MAXIMUM"
-}
-}
diff --git a/osm_mon/test/CloudWatch/test_schemas/create_alarm/operation_valid.json b/osm_mon/test/CloudWatch/test_schemas/create_alarm/operation_valid.json
deleted file mode 100644 (file)
index adb789b..0000000
+++ /dev/null
@@ -1,18 +0,0 @@
-{
-"schema_version": "1.0",
-"schema_type": "create_alarm_request",
-"vim_type": "AWS",
-"alarm_create_request":
-{
-"correlation_id": "SO123",
-"alarm_name": "CPU_Utilization_Above_Threshold2",
-"resource_uuid": "i-098da78cbd8304e17",
-"description": "",
-"severity": "Critical",
-"operation": "GE",
-"threshold_value": 1.5,
-"unit": "",
-"metric_name": "CPU_UTILIZATION",
-"statistic": "MAXIMUM"
-}
-}
diff --git a/osm_mon/test/CloudWatch/test_schemas/create_alarm/statistic_invalid.json b/osm_mon/test/CloudWatch/test_schemas/create_alarm/statistic_invalid.json
deleted file mode 100644 (file)
index 8c2e68d..0000000
+++ /dev/null
@@ -1,18 +0,0 @@
-{
-"schema_version": "1.0",
-"schema_type": "create_alarm_request",
-"vim_type": "AWS",
-"alarm_create_request":
-{
-"correlation_id": "SO123",
-"alarm_name": "CPU_Utilization_Above_Threshold2",
-"resource_uuid": "i-098da78cbd8304e17",
-"description": "",
-"severity": "Critical",
-"operation": "GE",
-"threshold_value": 1.5,
-"unit": "",
-"metric_name": "CPU_UTILIZATION",
-"statistic": "MAX"
-}
-}
diff --git a/osm_mon/test/CloudWatch/test_schemas/create_alarm/statistic_valid.json b/osm_mon/test/CloudWatch/test_schemas/create_alarm/statistic_valid.json
deleted file mode 100644 (file)
index adb789b..0000000
+++ /dev/null
@@ -1,18 +0,0 @@
-{
-"schema_version": "1.0",
-"schema_type": "create_alarm_request",
-"vim_type": "AWS",
-"alarm_create_request":
-{
-"correlation_id": "SO123",
-"alarm_name": "CPU_Utilization_Above_Threshold2",
-"resource_uuid": "i-098da78cbd8304e17",
-"description": "",
-"severity": "Critical",
-"operation": "GE",
-"threshold_value": 1.5,
-"unit": "",
-"metric_name": "CPU_UTILIZATION",
-"statistic": "MAXIMUM"
-}
-}
diff --git a/osm_mon/test/CloudWatch/test_schemas/create_metrics/create_metric_req_invalid.json b/osm_mon/test/CloudWatch/test_schemas/create_metrics/create_metric_req_invalid.json
deleted file mode 100644 (file)
index 0fe0dcb..0000000
+++ /dev/null
@@ -1,13 +0,0 @@
-{
-"schema_version": "1.0",
-"schema_type": "create_metrics_request",
-"tenant_uuid": "",
-"correlation_id": "SO123",
-"vim_type": "AWS",
-"metric_create":
-{
-"metric_name": "CPU_UTILIZ",
-"metric_unit": "",
-"resource_uuid": "i-098da78cbd8304e17"
-}
-}
\ No newline at end of file
diff --git a/osm_mon/test/CloudWatch/test_schemas/create_metrics/create_metric_req_valid.json b/osm_mon/test/CloudWatch/test_schemas/create_metrics/create_metric_req_valid.json
deleted file mode 100644 (file)
index 18cc23c..0000000
+++ /dev/null
@@ -1,13 +0,0 @@
-{
-"schema_version": "1.0",
-"schema_type": "create_metrics_request",
-"tenant_uuid": "",
-"correlation_id": "SO123",
-"vim_type": "AWS",
-"metric_create":
-{
-"metric_name": "CPU_UTILIZATION",
-"metric_unit": "",
-"resource_uuid": "i-098da78cbd8304e17"
-}
-}
\ No newline at end of file
diff --git a/osm_mon/test/CloudWatch/test_schemas/delete_alarm/name_invalid.json b/osm_mon/test/CloudWatch/test_schemas/delete_alarm/name_invalid.json
deleted file mode 100644 (file)
index e51a670..0000000
+++ /dev/null
@@ -1,10 +0,0 @@
-{
-"schema_version": "1.0",
-"schema_type": "delete_alarm_request",
-"vim_type": "AWS",
-"alarm_delete_request":
-{
-"alarm_uuid": "CPU_Utilization_Above_Threshold_i-098da78cbd8304e16",
-"correlation_id": "SO123"
-}
-}
\ No newline at end of file
diff --git a/osm_mon/test/CloudWatch/test_schemas/delete_alarm/name_valid.json b/osm_mon/test/CloudWatch/test_schemas/delete_alarm/name_valid.json
deleted file mode 100644 (file)
index a2cd4b5..0000000
+++ /dev/null
@@ -1,10 +0,0 @@
-{
-"schema_version": "1.0",
-"schema_type": "delete_alarm_request",
-"vim_type": "AWS",
-"alarm_delete_request":
-{
-"alarm_uuid": "CPU_Utilization_Above_Threshold_i-098da78cbd8304e17",
-"correlation_id": "SO123"
-}
-}
\ No newline at end of file
diff --git a/osm_mon/test/CloudWatch/test_schemas/delete_alarm/name_valid_delete1.json b/osm_mon/test/CloudWatch/test_schemas/delete_alarm/name_valid_delete1.json
deleted file mode 100644 (file)
index f465df7..0000000
+++ /dev/null
@@ -1,10 +0,0 @@
-{
-"schema_version": "1.0",
-"schema_type": "delete_alarm_request",
-"vim_type": "AWS",
-"alarm_delete_request":
-{
-"alarm_uuid": "CPU_Utilization_Above_Threshold1_i-098da78cbd8304e17",
-"correlation_id": "SO123"
-}
-}
\ No newline at end of file
diff --git a/osm_mon/test/CloudWatch/test_schemas/delete_alarm/name_valid_delete2.json b/osm_mon/test/CloudWatch/test_schemas/delete_alarm/name_valid_delete2.json
deleted file mode 100644 (file)
index 1fa6870..0000000
+++ /dev/null
@@ -1,10 +0,0 @@
-{
-"schema_version": "1.0",
-"schema_type": "delete_alarm_request",
-"vim_type": "AWS",
-"alarm_delete_request":
-{
-"alarm_uuid": "CPU_Utilization_Above_Threshold_i-09462760703837b26",
-"correlation_id": "SO123"
-}
-}
\ No newline at end of file
diff --git a/osm_mon/test/CloudWatch/test_schemas/delete_alarm/name_valid_delete3.json b/osm_mon/test/CloudWatch/test_schemas/delete_alarm/name_valid_delete3.json
deleted file mode 100644 (file)
index 6c35ab2..0000000
+++ /dev/null
@@ -1,10 +0,0 @@
-{
-"schema_version": "1.0",
-"schema_type": "delete_alarm_request",
-"vim_type": "AWS",
-"alarm_delete_request":
-{
-"alarm_uuid": "CPU_Utilization_Above_Threshold2_i-098da78cbd8304e17",
-"correlation_id": "SO123"
-}
-}
\ No newline at end of file
diff --git a/osm_mon/test/CloudWatch/test_schemas/delete_alarm/name_valid_delete4.json b/osm_mon/test/CloudWatch/test_schemas/delete_alarm/name_valid_delete4.json
deleted file mode 100644 (file)
index 716b039..0000000
+++ /dev/null
@@ -1,10 +0,0 @@
-{
-"schema_version": "1.0",
-"schema_type": "delete_alarm_request",
-"vim_type": "AWS",
-"alarm_delete_request":
-{
-"alarm_uuid": "CPU_Utilization_Above_Threshold4_i-098da78cbd8304e17",
-"correlation_id": "SO123"
-}
-}
\ No newline at end of file
diff --git a/osm_mon/test/CloudWatch/test_schemas/delete_metrics/delete_metric_req_invalid.json b/osm_mon/test/CloudWatch/test_schemas/delete_metrics/delete_metric_req_invalid.json
deleted file mode 100644 (file)
index f30ab87..0000000
+++ /dev/null
@@ -1,10 +0,0 @@
-{
-"schema_version": "1.0",
-"schema_type": "delete_metric_data_request",
-"metric_name": "CPU_UTILIATION",
-"metric_uuid": "",
-"resource_uuid": "i-098da78cbd8304e17",
-"tenant_uuid": "",
-"correlation_uuid": "S0123",
-"vim_type": "AWS"
-}
\ No newline at end of file
diff --git a/osm_mon/test/CloudWatch/test_schemas/delete_metrics/delete_metric_req_valid.json b/osm_mon/test/CloudWatch/test_schemas/delete_metrics/delete_metric_req_valid.json
deleted file mode 100644 (file)
index ea3922b..0000000
+++ /dev/null
@@ -1,10 +0,0 @@
-{
-"schema_version": "1.0",
-"schema_type": "delete_metric_data_request",
-"metric_name": "CPU_UTILIZATION",
-"metric_uuid": "",
-"resource_uuid": "i-098da78cbd8304e17",
-"tenant_uuid": "",
-"correlation_uuid": "S0123",
-"vim_type": "AWS"
-}
\ No newline at end of file
diff --git a/osm_mon/test/CloudWatch/test_schemas/list_alarm/list_alarm_valid_no_arguments.json b/osm_mon/test/CloudWatch/test_schemas/list_alarm/list_alarm_valid_no_arguments.json
deleted file mode 100644 (file)
index a4d02a3..0000000
+++ /dev/null
@@ -1,12 +0,0 @@
-{
-"schema_version": "1.0",
-"schema_type": "list_alarm_request",
-"vim_type": "AWS",
-"alarm_list_request":
-{
-"correlation_id": "SO123",
-"resource_uuid": "",
-"alarm_name": "",
-"severity": ""
-}
-}
\ No newline at end of file
diff --git a/osm_mon/test/CloudWatch/test_schemas/list_alarm/list_alarm_valid_one_argument.json b/osm_mon/test/CloudWatch/test_schemas/list_alarm/list_alarm_valid_one_argument.json
deleted file mode 100644 (file)
index d0f31f2..0000000
+++ /dev/null
@@ -1,12 +0,0 @@
-{
-"schema_version": "1.0",
-"schema_type": "list_alarm_request",
-"vim_type": "AWS",
-"alarm_list_request":
-{
-"correlation_id": "SO123",
-"resource_uuid": "i-098da78cbd8304e17",
-"alarm_name": "",
-"severity": ""
-}
-}
\ No newline at end of file
diff --git a/osm_mon/test/CloudWatch/test_schemas/list_alarm/list_alarm_valid_two_arguments.json b/osm_mon/test/CloudWatch/test_schemas/list_alarm/list_alarm_valid_two_arguments.json
deleted file mode 100644 (file)
index bf46579..0000000
+++ /dev/null
@@ -1,12 +0,0 @@
-{
-"schema_version": "1.0",
-"schema_type": "list_alarm_request",
-"vim_type": "AWS",
-"alarm_list_request":
-{
-"correlation_id": "SO123",
-"resource_uuid": "i-098da78cbd8304e17",
-"alarm_name": "",
-"severity": "Critical"
-}
-}
\ No newline at end of file
diff --git a/osm_mon/test/CloudWatch/test_schemas/list_metrics/list_metric_req_invalid.json b/osm_mon/test/CloudWatch/test_schemas/list_metrics/list_metric_req_invalid.json
deleted file mode 100644 (file)
index 6108e77..0000000
+++ /dev/null
@@ -1,11 +0,0 @@
-{
-"schema_version": "1.0",
-"schema_type": "list_metrics_request",
-"vim_type": "AWS",
-"metrics_list_request":
-{
-"metric_name": "CPU_UTILZATION",
-"correlation_id": "SO123",
-"resource_uuid": "i-098da78cbd8304e17"
-}
-}
\ No newline at end of file
diff --git a/osm_mon/test/CloudWatch/test_schemas/list_metrics/list_metric_req_valid.json b/osm_mon/test/CloudWatch/test_schemas/list_metrics/list_metric_req_valid.json
deleted file mode 100644 (file)
index b1bd9de..0000000
+++ /dev/null
@@ -1,11 +0,0 @@
-{
-"schema_version": "1.0",
-"schema_type": "list_metrics_request",
-"vim_type": "AWS",
-"metrics_list_request":
-{
-"metric_name": "CPU_UTILIZATION",
-"correlation_id": "SO123",
-"resource_uuid": "i-098da78cbd8304e17"
-}
-}
\ No newline at end of file
diff --git a/osm_mon/test/CloudWatch/test_schemas/read_metrics_data/read_coll_period_req_invalid.json b/osm_mon/test/CloudWatch/test_schemas/read_metrics_data/read_coll_period_req_invalid.json
deleted file mode 100644 (file)
index 815edf9..0000000
+++ /dev/null
@@ -1,12 +0,0 @@
-{
-"schema_version": "1.0",
-"schema_type": "read_metric_data_request",
-"metric_name": "CPU_UTILIZATION",
-"metric_uuid": "0",
-"resource_uuid": "i-098da78cbd8304e17",
-"tenant_uuid": "",
-"correlation_uuid": "SO123",
-"vim_type":"AWS",
-"collection_period":"3500" ,
-"collection_unit": ""
-}
\ No newline at end of file
diff --git a/osm_mon/test/CloudWatch/test_schemas/read_metrics_data/read_coll_period_req_valid.json b/osm_mon/test/CloudWatch/test_schemas/read_metrics_data/read_coll_period_req_valid.json
deleted file mode 100644 (file)
index dad9a24..0000000
+++ /dev/null
@@ -1,12 +0,0 @@
-{
-"schema_version": "1.0",
-"schema_type": "read_metric_data_request",
-"metric_name": "CPU_UTILIZATION",
-"metric_uuid": "0",
-"resource_uuid": "i-098da78cbd8304e17",
-"tenant_uuid": "",
-"correlation_uuid": "SO123",
-"vim_type":"AWS",
-"collection_period":"3600" ,
-"collection_unit": ""
-}
\ No newline at end of file
diff --git a/osm_mon/test/CloudWatch/test_schemas/read_metrics_data/read_metric_name_req_invalid.json b/osm_mon/test/CloudWatch/test_schemas/read_metrics_data/read_metric_name_req_invalid.json
deleted file mode 100644 (file)
index 0ff4f0e..0000000
+++ /dev/null
@@ -1,12 +0,0 @@
-{
-"schema_version": "1.0",
-"schema_type": "read_metric_data_request",
-"metric_name": "CPU_UTLIZATION",
-"metric_uuid": "0",
-"resource_uuid": "i-098da78cbd8304e17",
-"tenant_uuid": "",
-"correlation_uuid": "SO123",
-"vim_type":"AWS",
-"collection_period":"3600" ,
-"collection_unit": ""
-}
\ No newline at end of file
diff --git a/osm_mon/test/CloudWatch/test_schemas/read_metrics_data/read_metric_name_req_valid.json b/osm_mon/test/CloudWatch/test_schemas/read_metrics_data/read_metric_name_req_valid.json
deleted file mode 100644 (file)
index dad9a24..0000000
+++ /dev/null
@@ -1,12 +0,0 @@
-{
-"schema_version": "1.0",
-"schema_type": "read_metric_data_request",
-"metric_name": "CPU_UTILIZATION",
-"metric_uuid": "0",
-"resource_uuid": "i-098da78cbd8304e17",
-"tenant_uuid": "",
-"correlation_uuid": "SO123",
-"vim_type":"AWS",
-"collection_period":"3600" ,
-"collection_unit": ""
-}
\ No newline at end of file
diff --git a/osm_mon/test/CloudWatch/test_schemas/update_alarm/name_invalid.json b/osm_mon/test/CloudWatch/test_schemas/update_alarm/name_invalid.json
deleted file mode 100644 (file)
index fe171e4..0000000
+++ /dev/null
@@ -1,17 +0,0 @@
-{
-"schema_version": "1.0",
-"schema_type": "update_alarm_request",
-"vim_type": "AWS",
-"alarm_update_request":
-{
-"correlation_id": "SO123",
-"alarm_uuid": "CPU_Utilization_Above_Threshold_i-098da78cbd8304e13",
-"description": "",
-"severity": "Critical",
-"operation": "LE",
-"threshold_value": 1.5,
-"unit": "",
-"metric_name": "CPU_UTILIZATION",
-"statistic": "MAXIMUM"
-}
-}
\ No newline at end of file
diff --git a/osm_mon/test/CloudWatch/test_schemas/update_alarm/name_valid.json b/osm_mon/test/CloudWatch/test_schemas/update_alarm/name_valid.json
deleted file mode 100644 (file)
index 7070dff..0000000
+++ /dev/null
@@ -1,17 +0,0 @@
-{
-"schema_version": "1.0",
-"schema_type": "update_alarm_request",
-"vim_type": "AWS",
-"alarm_update_request":
-{
-"correlation_id": "SO123",
-"alarm_uuid": "CPU_Utilization_Above_Threshold4_i-098da78cbd8304e17",
-"description": "",
-"severity": "Critical",
-"operation": "LE",
-"threshold_value": 1.5,
-"unit": "",
-"metric_name": "CPU_UTILIZATION",
-"statistic": "MAXIMUM"
-}
-}
\ No newline at end of file
diff --git a/osm_mon/test/CloudWatch/test_schemas/update_alarm/operation_invalid.json b/osm_mon/test/CloudWatch/test_schemas/update_alarm/operation_invalid.json
deleted file mode 100644 (file)
index 0116228..0000000
+++ /dev/null
@@ -1,17 +0,0 @@
-{
-"schema_version": "1.0",
-"schema_type": "update_alarm_request",
-"vim_type": "AWS",
-"alarm_update_request":
-{
-"correlation_id": "SO123",
-"alarm_uuid": "CPU_Utilization_Above_Threshold_i-098da78cbd8304e17",
-"description": "",
-"severity": "Critical",
-"operation": "Less",
-"threshold_value": 1.5,
-"unit": "",
-"metric_name": "CPU_UTILIZATION",
-"statistic": "MAXIMUM"
-}
-}
\ No newline at end of file
diff --git a/osm_mon/test/CloudWatch/test_schemas/update_alarm/operation_valid.json b/osm_mon/test/CloudWatch/test_schemas/update_alarm/operation_valid.json
deleted file mode 100644 (file)
index 5fb8eb6..0000000
+++ /dev/null
@@ -1,17 +0,0 @@
-{
-"schema_version": "1.0",
-"schema_type": "update_alarm_request",
-"vim_type": "AWS",
-"alarm_update_request":
-{
-"correlation_id": "SO123",
-"alarm_uuid": "CPU_Utilization_Above_Threshold_i-098da78cbd8304e17",
-"description": "",
-"severity": "Critical",
-"operation": "LE",
-"threshold_value": 1.5,
-"unit": "",
-"metric_name": "CPU_UTILIZATION",
-"statistic": "MAXIMUM"
-}
-}
\ No newline at end of file
diff --git a/osm_mon/test/CloudWatch/test_schemas/update_alarm/statistic_invalid.json b/osm_mon/test/CloudWatch/test_schemas/update_alarm/statistic_invalid.json
deleted file mode 100644 (file)
index 991d844..0000000
+++ /dev/null
@@ -1,17 +0,0 @@
-{
-"schema_version": "1.0",
-"schema_type": "update_alarm_request",
-"vim_type": "AWS",
-"alarm_update_request":
-{
-"correlation_id": "SO123",
-"alarm_uuid": "CPU_Utilization_Above_Threshold_i-098da78cbd8304e17",
-"description": "",
-"severity": "Critical",
-"operation": "LE",
-"threshold_value": 1.5,
-"unit": "",
-"metric_name": "CPU_UTILIZATION",
-"statistic": "MAX"
-}
-}
\ No newline at end of file
diff --git a/osm_mon/test/CloudWatch/test_schemas/update_alarm/statistic_valid.json b/osm_mon/test/CloudWatch/test_schemas/update_alarm/statistic_valid.json
deleted file mode 100644 (file)
index 5fb8eb6..0000000
+++ /dev/null
@@ -1,17 +0,0 @@
-{
-"schema_version": "1.0",
-"schema_type": "update_alarm_request",
-"vim_type": "AWS",
-"alarm_update_request":
-{
-"correlation_id": "SO123",
-"alarm_uuid": "CPU_Utilization_Above_Threshold_i-098da78cbd8304e17",
-"description": "",
-"severity": "Critical",
-"operation": "LE",
-"threshold_value": 1.5,
-"unit": "",
-"metric_name": "CPU_UTILIZATION",
-"statistic": "MAXIMUM"
-}
-}
\ No newline at end of file
diff --git a/osm_mon/test/CloudWatch/test_schemas/update_alarm/update_alarm_new_alarm.json b/osm_mon/test/CloudWatch/test_schemas/update_alarm/update_alarm_new_alarm.json
deleted file mode 100644 (file)
index 581fb55..0000000
+++ /dev/null
@@ -1,18 +0,0 @@
-{
-"schema_version": "1.0",
-"schema_type": "create_alarm_request",
-"vim_type": "AWS",
-"alarm_create_request":
-{
-"correlation_id": "SO123",
-"alarm_name": "CPU_Utilization_Above_Threshold4",
-"resource_uuid": "i-098da78cbd8304e17",
-"description": "",
-"severity": "Critical",
-"operation": "GE",
-"threshold_value": 1.5,
-"unit": "",
-"metric_name": "CPU_UTILIZATION",
-"statistic": "MAXIMUM"
-}
-}
diff --git a/osm_mon/test/CloudWatch/test_schemas/update_metrics/update_metric_req_invalid.json b/osm_mon/test/CloudWatch/test_schemas/update_metrics/update_metric_req_invalid.json
deleted file mode 100644 (file)
index 0fe0dcb..0000000
+++ /dev/null
@@ -1,13 +0,0 @@
-{
-"schema_version": "1.0",
-"schema_type": "create_metrics_request",
-"tenant_uuid": "",
-"correlation_id": "SO123",
-"vim_type": "AWS",
-"metric_create":
-{
-"metric_name": "CPU_UTILIZ",
-"metric_unit": "",
-"resource_uuid": "i-098da78cbd8304e17"
-}
-}
\ No newline at end of file
diff --git a/osm_mon/test/CloudWatch/test_schemas/update_metrics/update_metric_req_valid.json b/osm_mon/test/CloudWatch/test_schemas/update_metrics/update_metric_req_valid.json
deleted file mode 100644 (file)
index 18cc23c..0000000
+++ /dev/null
@@ -1,13 +0,0 @@
-{
-"schema_version": "1.0",
-"schema_type": "create_metrics_request",
-"tenant_uuid": "",
-"correlation_id": "SO123",
-"vim_type": "AWS",
-"metric_create":
-{
-"metric_name": "CPU_UTILIZATION",
-"metric_unit": "",
-"resource_uuid": "i-098da78cbd8304e17"
-}
-}
\ No newline at end of file
diff --git a/osm_mon/test/CloudWatch/unit_tests_alarms.py b/osm_mon/test/CloudWatch/unit_tests_alarms.py
deleted file mode 100644 (file)
index ae036cf..0000000
+++ /dev/null
@@ -1,408 +0,0 @@
-from connection import Connection
-import unittest
-import sys
-import jsmin
-import json
-import os
-import time
-from jsmin import jsmin
-sys.path.append("../../test/core/")
-from test_producer import KafkaProducer
-from kafka import KafkaConsumer
-try:
-    import boto
-    import boto.ec2
-    import boto.vpc
-    import boto.ec2.cloudwatch
-    import boto.ec2.connection
-except:
-    exit("Boto not avialable. Try activating your virtualenv OR `pip install boto`")
-
-#--------------------------------------------------------------------------------------------------------------------------------------
-
-# Test Producer object to generate request
-
-producer = KafkaProducer('create_alarm_request')
-obj = Connection() 
-connections = obj.setEnvironment()
-connections_res = obj.connection_instance()
-cloudwatch_conn = connections_res['cloudwatch_connection'] 
-
-#--------------------------------------------------------------------------------------------------------------------------------------
-
-'''Test E2E Flow : Test cases has been tested one at a time.
-1) Commom Request is generated using request function in test_producer.py(/test/core)
-2) The request is then consumed by the comsumer (plugin)
-3) The response is sent back on the message bus in plugin_alarm.py using
-   response functions in producer.py(/core/message-bus)
-4) The response is then again consumed by the unit_tests_alarms.py
-   and the test cases has been applied on the response.
-'''
-
-class config_alarm_name_test(unittest.TestCase):
-   
-
-    def setUp(self):
-        pass
-    #To generate a request of testing new alarm name and new instance id in create alarm request
-    def test_differentName_differentInstance(self):
-        time.sleep(2)
-        producer.request("test_schemas/create_alarm/create_alarm_differentName_differentInstance.json",'create_alarm_request', '','alarm_request')  
-        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
-
-        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
-        _consumer.subscribe(['alarm_response'])
-
-        for message in _consumer:
-            if message.key == "create_alarm_response": 
-                info = json.loads(json.loads(message.value))
-                print(info)
-                time.sleep(1)
-                self.assertTrue(info['alarm_create_response']['status'])
-                return        
-
-    #To generate a request of testing new alarm name and existing instance id in create alarm request
-    def test_differentName_sameInstance(self):
-        time.sleep(2)
-        producer.request("test_schemas/create_alarm/create_alarm_differentName_sameInstance.json",'create_alarm_request', '','alarm_request')  
-        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
-
-        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
-        _consumer.subscribe(['alarm_response'])
-
-        for message in _consumer:
-            if message.key == "create_alarm_response": 
-                info = json.loads(json.loads(message.value))
-                print(info)
-                time.sleep(1)
-                producer.request("test_schemas/delete_alarm/name_valid_delete1.json",'delete_alarm_request','','alarm_request')
-                self.assertTrue(info['alarm_create_response']['status'])   
-                return
-
-    #To generate a request of testing existing alarm name and new instance id in create alarm request    
-    def test_sameName_differentInstance(self): 
-        time.sleep(2)
-        producer.request("test_schemas/create_alarm/create_alarm_sameName_differentInstance.json",'create_alarm_request', '','alarm_request')  
-        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
-
-        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
-        _consumer.subscribe(['alarm_response'])
-
-        for message in _consumer:
-            if message.key == "create_alarm_response": 
-                info = json.loads(json.loads(message.value))
-                print(info)
-                time.sleep(1)
-                producer.request("test_schemas/delete_alarm/name_valid_delete2.json",'delete_alarm_request', '','alarm_request')
-                self.assertTrue(info['alarm_create_response']['status']) 
-                return    
-
-    #To generate a request of testing existing alarm name and existing instance id in create alarm request
-    def test_sameName_sameInstance(self):  
-        time.sleep(2)
-        producer.request("test_schemas/create_alarm/create_alarm_sameName_sameInstance.json",'create_alarm_request', '','alarm_request')  
-        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
-
-        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
-        _consumer.subscribe(['alarm_response'])
-
-        for message in _consumer:
-            if message.key == "create_alarm_response": 
-                info = json.loads(json.loads(message.value))
-                print(info, "---")
-                time.sleep(1)
-                producer.request("test_schemas/delete_alarm/name_valid.json",'delete_alarm_request', '','alarm_request')
-                self.assertEqual(info, None)  
-                return        
-
-    #To generate a request of testing valid statistics in create alarm request
-    def test_statisticValid(self):       
-        time.sleep(2)
-        producer.request("test_schemas/create_alarm/statistic_valid.json",'create_alarm_request', '','alarm_request')  
-        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
-
-        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
-        _consumer.subscribe(['alarm_response'])
-
-        for message in _consumer:
-            if message.key == "create_alarm_response": 
-                info = json.loads(json.loads(message.value))
-                print(info)
-                time.sleep(1)
-                producer.request("test_schemas/delete_alarm/name_valid_delete3.json",'delete_alarm_request', '','alarm_request')
-                self.assertTrue(info['alarm_create_response']['status']) 
-                return
-
-    #To generate a request of testing Invalid statistics in create alarm request    
-    def test_statisticValidNot(self):       
-        time.sleep(2)
-        producer.request("test_schemas/create_alarm/statistic_invalid.json",'create_alarm_request', '','alarm_request')  
-        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
-
-        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
-        _consumer.subscribe(['alarm_response'])
-
-        for message in _consumer:
-            if message.key == "create_alarm_response": 
-                info = json.loads(json.loads(message.value))
-                print(info, "---")
-                time.sleep(1)
-                producer.request("test_schemas/delete_alarm/name_valid_delete3.json",'delete_alarm_request', '','alarm_request')
-                self.assertEqual(info, None)
-                return  
-
-    #To generate a request of testing valid operation in create alarm request
-    def test_operationValid(self):       
-        time.sleep(2)
-        producer.request("test_schemas/create_alarm/operation_valid.json",'create_alarm_request', '','alarm_request')  
-        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
-
-        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
-        _consumer.subscribe(['alarm_response'])
-
-        for message in _consumer:
-            if message.key == "create_alarm_response": 
-                info = json.loads(json.loads(message.value))
-                print(info)
-                time.sleep(1)
-                producer.request("test_schemas/delete_alarm/name_valid_delete3.json",'delete_alarm_request', '','alarm_request')
-                self.assertTrue(info['alarm_create_response']['status']) 
-                return
-
-    #To generate a request of testing Invalid operation in create alarm request
-    def test_operationValidNot(self):       
-        time.sleep(2)
-        producer.request("test_schemas/create_alarm/operation_invalid.json",'create_alarm_request', '','alarm_request')  
-        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
-
-        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
-        _consumer.subscribe(['alarm_response'])
-
-        for message in _consumer:
-            if message.key == "create_alarm_response": 
-                info = json.loads(json.loads(message.value))
-                print(info)
-                time.sleep(1)
-                self.assertEqual(info,None) 
-                return                 
-                 
-
-#--------------------------------------------------------------------------------------------------------------------------------------
-class update_alarm_name_test(unittest.TestCase):
-
-    #To generate a request of testing valid alarm_id in update alarm request    
-    def test_nameValid(self):
-        producer.request("test_schemas/update_alarm/update_alarm_new_alarm.json",'create_alarm_request', '','alarm_request')  
-        time.sleep(2)
-        producer.request("test_schemas/update_alarm/name_valid.json",'update_alarm_request', '','alarm_request')  
-        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
-
-        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
-        _consumer.subscribe(['alarm_response'])
-
-        for message in _consumer:
-            if message.key == "update_alarm_response": 
-                info = json.loads(json.loads(json.loads(message.value)))
-                print(info)
-                time.sleep(1)
-                producer.request("test_schemas/delete_alarm/name_valid_delete4.json",'delete_alarm_request', '','alarm_request')
-                self.assertTrue(info['alarm_update_response']['status'])
-                return 
-    
-    #To generate a request of testing invalid alarm_id in update alarm request
-    def test_nameInvalid(self):
-        time.sleep(2)
-        producer.request("test_schemas/update_alarm/name_invalid.json",'update_alarm_request', '','alarm_request')  
-        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
-
-        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
-        _consumer.subscribe(['alarm_response'])
-
-        for message in _consumer:
-            if message.key == "update_alarm_response": 
-                info = json.loads(json.loads(json.loads(message.value)))
-                print(info)
-                time.sleep(1)
-                self.assertEqual(info,None)
-                return
-
-    #To generate a request of testing valid statistics in update alarm request
-    def test_statisticValid(self):
-        producer.request("test_schemas/create_alarm/create_alarm_differentName_differentInstance.json",'create_alarm_request', '','alarm_request')  
-        time.sleep(2)
-        producer.request("test_schemas/update_alarm/statistic_valid.json",'update_alarm_request', '','alarm_request')  
-        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
-
-        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
-        _consumer.subscribe(['alarm_response'])
-
-        for message in _consumer:
-            if message.key == "update_alarm_response": 
-                info = json.loads(json.loads(json.loads(message.value)))
-                print(info)
-                time.sleep(1)
-                producer.request("test_schemas/delete_alarm/name_valid.json",'delete_alarm_request', '','alarm_request')
-                self.assertTrue(info['alarm_update_response']['status'])
-                return
-
-    #To generate a request of testing Invalid statistics in update alarm request
-    def test_statisticInvalid(self):
-        time.sleep(2)
-        producer.request("test_schemas/update_alarm/statistic_invalid.json",'update_alarm_request', '','alarm_request')  
-        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
-
-        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
-        _consumer.subscribe(['alarm_response'])
-
-        for message in _consumer:
-            if message.key == "update_alarm_response": 
-                info = json.loads(json.loads(json.loads(message.value)))
-                print(info)
-                time.sleep(1)
-                self.assertEqual(info,None)
-                return            
-
-    #To generate a request of testing valid operation in update alarm request
-    def test_operationValid(self):
-        producer.request("test_schemas/create_alarm/create_alarm_differentName_differentInstance.json",'create_alarm_request', '','alarm_request')  
-        time.sleep(2)
-        producer.request("test_schemas/update_alarm/operation_valid.json",'update_alarm_request', '','alarm_request')  
-        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
-
-        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
-        _consumer.subscribe(['alarm_response'])
-
-        for message in _consumer:
-            if message.key == "update_alarm_response": 
-                info = json.loads(json.loads(json.loads(message.value)))
-                print(info)
-                time.sleep(1)
-                producer.request("test_schemas/delete_alarm/name_valid.json",'delete_alarm_request', '','alarm_request')
-                self.assertTrue(info['alarm_update_response']['status'])
-                return
-              
-#--------------------------------------------------------------------------------------------------------------------------------------
-class delete_alarm_test(unittest.TestCase):
-
-    #To generate a request of testing valid alarm_id in delete alarm request   
-    def test_nameValid(self):             
-        producer.request("test_schemas/create_alarm/create_alarm_differentName_differentInstance.json",'create_alarm_request', '','alarm_request')  
-        time.sleep(2)
-        producer.request("test_schemas/delete_alarm/name_valid.json",'delete_alarm_request', '','alarm_request') 
-        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
-
-        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
-        _consumer.subscribe(['alarm_response'])
-
-        for message in _consumer:
-            if message.key == "delete_alarm_response": 
-                info = json.loads(json.loads(json.loads(message.value)))
-                print(info)
-                time.sleep(1)                
-                self.assertTrue(info['alarm_deletion_response']['status'])
-                return
-
-    #To generate a request of testing Invalid alarm_id in delete alarm request
-    def test_nameInvalid(self):              
-        time.sleep(2)
-        producer.request("test_schemas/delete_alarm/name_invalid.json",'delete_alarm_request', '','alarm_request') 
-        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
-
-        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
-        _consumer.subscribe(['alarm_response'])
-
-        for message in _consumer:
-            if message.key == "delete_alarm_response": 
-                info = json.loads(json.loads(json.loads(message.value)))
-                print(info)
-                time.sleep(1)                
-                self.assertEqual(info,None)
-                return             
-
-#--------------------------------------------------------------------------------------------------------------------------------------
-class list_alarm_test(unittest.TestCase): 
-
-    #To generate a request of testing valid input fields in alarm list request   
-    def test_valid_no_arguments(self):
-        time.sleep(2)
-        producer.request("test_schemas/list_alarm/list_alarm_valid_no_arguments.json",'alarm_list_request', '','alarm_request') 
-        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
-
-        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
-        _consumer.subscribe(['alarm_response'])
-
-        for message in _consumer:
-            if message.key == "list_alarm_response": 
-                info = json.loads(json.loads(json.loads(message.value)))
-                print(info)
-                time.sleep(1)                
-                self.assertEqual(type(info),dict)
-                return
-
-    #To generate a request of testing valid input fields in alarm list request
-    def test_valid_one_arguments(self):
-        time.sleep(2)
-        producer.request("test_schemas/list_alarm/list_alarm_valid_one_arguments.json",'alarm_list_request', '','alarm_request') 
-        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
-
-        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
-        _consumer.subscribe(['alarm_response'])
-
-        for message in _consumer:
-            if message.key == "list_alarm_response": 
-                info = json.loads(json.loads(json.loads(message.value)))
-                print(info)
-                time.sleep(1)                
-                self.assertEqual(type(info),dict)
-                return
-
-    #To generate a request of testing valid input fields in alarm list request
-    def test_valid_two_arguments(self):
-        time.sleep(2)
-        producer.request("test_schemas/list_alarm/list_alarm_valid_two_arguments.json",'alarm_list_request', '','alarm_request') 
-        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
-
-        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
-        _consumer.subscribe(['alarm_response'])
-
-        for message in _consumer:
-            if message.key == "list_alarm_response": 
-                info = json.loads(json.loads(json.loads(message.value)))
-                print(info)
-                time.sleep(1)                
-                self.assertEqual(type(info),dict)
-                return
-
-
-#--------------------------------------------------------------------------------------------------------------------------------------
-class alarm_details_test(unittest.TestCase):
-
-    #To generate a request of testing valid input fields in acknowledge alarm
-    def test_Valid(self):
-        time.sleep(2)
-        producer.request("test_schemas/alarm_details/acknowledge_alarm.json",'acknowledge_alarm', '','alarm_request') 
-        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
-
-        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
-        _consumer.subscribe(['alarm_response'])
-
-        for message in _consumer:
-            if message.key == "notify_alarm": 
-                info = json.loads(json.loads(json.loads(message.value)))
-                print(info)
-                time.sleep(1)                
-                self.assertEqual(type(info),dict)
-                return                
-
-if __name__ == '__main__':
-
-    # Saving test reults in Log file
-
-    log_file = 'log_file.txt'
-    f = open(log_file, "w")
-    runner = unittest.TextTestRunner(f)
-    unittest.main(testRunner=runner)
-    f.close()
-
-    # For printing results on Console
-    # unittest.main()
diff --git a/osm_mon/test/CloudWatch/unit_tests_metrics.py b/osm_mon/test/CloudWatch/unit_tests_metrics.py
deleted file mode 100644 (file)
index 625e872..0000000
+++ /dev/null
@@ -1,208 +0,0 @@
-from connection import Connection
-import unittest
-import sys
-import jsmin
-import json
-import os
-import time
-from jsmin import jsmin
-sys.path.append("../../test/core/")
-from test_producer import KafkaProducer
-from kafka import KafkaConsumer
-try:
-    import boto
-    import boto.ec2
-    import boto.vpc
-    import boto.ec2.cloudwatch
-    import boto.ec2.connection
-except:
-    exit("Boto not avialable. Try activating your virtualenv OR `pip install boto`")
-
-#--------------------------------------------------------------------------------------------------------------------------------------
-
-# Test Producer object to generate request
-
-producer = KafkaProducer('')
-obj = Connection() 
-connections = obj.setEnvironment()
-connections_res = obj.connection_instance()
-cloudwatch_conn = connections_res['cloudwatch_connection'] 
-
-# Consumer Object to consume response from message bus
-server = {'server': 'localhost:9092', 'topic': 'metric_request'}
-_consumer = KafkaConsumer(bootstrap_servers=server['server'])
-_consumer.subscribe(['metric_response'])
-
-#--------------------------------------------------------------------------------------------------------------------------------------
-
-'''Test E2E Flow : Test cases has been tested one at a time.
-1) Commom Request is generated using request function in test_producer.py(/core/message-bus)
-2) The request is then consumed by the comsumer (plugin)
-3) The response is sent back on the message bus in plugin_metrics.py using
-   response functions in producer.py(/core/message-bus)
-4) The response is then again consumed by the unit_tests_metrics.py
-   and the test cases has been applied on the response.
-'''
-class test_create_metrics(unittest.TestCase):
-
-    def test_status_positive(self):
-        time.sleep(2)
-        # To generate Request of testing valid meric_name in create metrics requests
-        producer.request("create_metrics/create_metric_req_valid.json",'create_metric_request', '','metric_request')  
-
-        for message in _consumer:
-            if message.key == "create_metric_response": 
-                resp = json.loads(json.loads(json.loads(message.value)))
-                time.sleep(1)
-                self.assertTrue(resp['metric_create_response']['status'])
-                self.assertEqual(resp['metric_create_response']['metric_uuid'],0)
-                return 
-
-    def test_status_negative(self):
-        time.sleep(2)
-        # To generate Request of testing invalid meric_name in create metrics requests
-        producer.request("create_metrics/create_metric_req_invalid.json",'create_metric_request', '','metric_request')  
-
-        for message in _consumer:
-            if message.key == "create_metric_response": 
-                resp = json.loads(json.loads(json.loads(message.value)))
-                time.sleep(1)
-                self.assertFalse(resp['metric_create_response']['status'])
-                self.assertEqual(resp['metric_create_response']['metric_uuid'],None)
-                return 
-
-class test_metrics_data(unittest.TestCase):
-
-    def test_met_name_positive(self):
-        time.sleep(2)
-        # To generate Request of testing valid meric_name in read_metric_data_request
-        producer.request("read_metrics_data/read_metric_name_req_valid.json",'read_metric_data_request', '','metric_request')  
-        for message in _consumer:
-            if message.key == "read_metric_data_response": 
-                resp = json.loads(json.loads(json.loads(message.value)))
-                time.sleep(1)
-                self.assertEqual(type(resp['metrics_data']),dict)
-                return 
-
-    def test_met_name_negative(self):
-        time.sleep(2)
-        # To generate Request of testing invalid meric_name in read_metric_data_request
-        producer.request("read_metrics_data/read_metric_name_req_invalid.json",'read_metric_data_request', '','metric_request')  
-        for message in _consumer:
-            if message.key == "read_metric_data_response": 
-                resp = json.loads(json.loads(json.loads(message.value)))
-                time.sleep(1)
-                self.assertFalse(resp['metrics_data'])
-                return 
-
-    def test_coll_period_positive(self):
-        # To generate Request of testing valid collection_period in read_metric_data_request
-        # For AWS metric_data_stats collection period should be a multiple of 60
-        time.sleep(2)
-        producer.request("read_metrics_data/read_coll_period_req_valid.json",'read_metric_data_request', '','metric_request')  
-        for message in _consumer:
-            if message.key == "read_metric_data_response": 
-                resp = json.loads(json.loads(json.loads(message.value)))
-                time.sleep(1)
-                self.assertEqual(type(resp),dict)
-                return
-
-    def test_coll_period_negative(self):
-        time.sleep(2)
-        # To generate Request of testing invalid collection_period in read_metric_data_request
-        producer.request("read_metrics_data/read_coll_period_req_invalid.json",'read_metric_data_request', '','metric_request')  
-        for message in _consumer:
-            if message.key == "read_metric_data_response": 
-                resp = json.loads(json.loads(json.loads(message.value)))
-                time.sleep(1)
-                self.assertFalse(resp['metrics_data'])
-                return
-
-class test_update_metrics(unittest.TestCase):
-
-    def test_upd_status_positive(self):
-        time.sleep(2)
-        # To generate Request of testing valid meric_name in update metrics requests
-        producer.request("update_metrics/update_metric_req_valid.json",'update_metric_request', '','metric_request')  
-        for message in _consumer:
-            if message.key == "update_metric_response": 
-                resp = json.loads(json.loads(json.loads(message.value)))
-                time.sleep(1)
-                self.assertTrue(resp['metric_update_response']['status'])
-                self.assertEqual(resp['metric_update_response']['metric_uuid'],0)
-                return
-
-    def test_upd_status_negative(self):
-        time.sleep(2)
-        # To generate Request of testing invalid meric_name in update metrics requests
-        producer.request("update_metrics/update_metric_req_invalid.json",'update_metric_request', '','metric_request')  
-        for message in _consumer:
-            if message.key == "update_metric_response": 
-                resp = json.loads(json.loads(json.loads(message.value)))
-                time.sleep(1)
-                self.assertFalse(resp['metric_update_response']['status'])
-                self.assertEqual(resp['metric_update_response']['metric_uuid'],None)
-                return
-
-class test_delete_metrics(unittest.TestCase):
-
-    def test_del_met_name_positive(self):
-        time.sleep(2)
-        # To generate Request of testing valid meric_name in delete metrics requests
-        producer.request("delete_metrics/delete_metric_req_valid.json",'delete_metric_request', '','metric_request')  
-        for message in _consumer:
-            if message.key == "delete_metric_response": 
-                resp = json.loads(json.loads(json.loads(message.value)))
-                time.sleep(1)
-                self.assertFalse(resp['status'])
-                return
-
-    def test_del_met_name_negative(self):
-        time.sleep(2)
-        # To generate Request of testing invalid meric_name in delete metrics requests
-        producer.request("delete_metrics/delete_metric_req_invalid.json",'delete_metric_request', '','metric_request')  
-        for message in _consumer:
-            if message.key == "delete_metric_response": 
-                resp = json.loads(json.loads(json.loads(message.value)))
-                time.sleep(1)
-                self.assertFalse(resp)
-                return
-
-class test_list_metrics(unittest.TestCase):
-
-    def test_list_met_name_positive(self):
-        time.sleep(2)
-        # To generate Request of testing valid meric_name in list metrics requests
-        producer.request("list_metrics/list_metric_req_valid.json",'list_metric_request', '','metric_request')  
-        for message in _consumer:
-            if message.key == "list_metrics_response": 
-                resp = json.loads(json.loads(json.loads(message.value)))
-                time.sleep(1)
-                self.assertEqual(type(resp['metrics_list']),list)
-                return
-
-    def test_list_met_name_negitive(self):
-        time.sleep(2)
-        # To generate Request of testing invalid meric_name in list metrics requests
-        producer.request("list_metrics/list_metric_req_invalid.json",'list_metric_request', '','metric_request')  
-        for message in _consumer:
-            if message.key == "list_metrics_response": 
-                resp = json.loads(json.loads(json.loads(message.value)))
-                time.sleep(1)
-                self.assertFalse(resp['metrics_list'])
-                return
-
-
-if __name__ == '__main__':
-
-    # Saving test reults in Log file
-
-    log_file = 'log_file.txt'
-    f = open(log_file, "w")
-    runner = unittest.TextTestRunner(f)
-    unittest.main(testRunner=runner)
-    f.close()
-
-    # For printing results on Console
-    # unittest.main()
-
diff --git a/osm_mon/test/OpenStack/__init__.py b/osm_mon/test/OpenStack/__init__.py
deleted file mode 100644 (file)
index 32eb94e..0000000
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2017 Intel Research and Development Ireland Limited
-# *************************************************************
-
-# This file is part of OSM Monitoring module
-# All Rights Reserved to Intel Corporation
-
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-
-#         http://www.apache.org/licenses/LICENSE-2.0
-
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# For those usages not covered by the Apache License, Version 2.0 please
-# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
-##
diff --git a/osm_mon/test/OpenStack/integration/__init__.py b/osm_mon/test/OpenStack/integration/__init__.py
deleted file mode 100644 (file)
index d81308a..0000000
+++ /dev/null
@@ -1,23 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# Copyright 2018 Whitestack, LLC
-# *************************************************************
-
-# This file is part of OSM Monitoring module
-# All Rights Reserved to Whitestack, LLC
-
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-
-#         http://www.apache.org/licenses/LICENSE-2.0
-
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# For those usages not covered by the Apache License, Version 2.0 please
-# contact: bdiaz@whitestack.com or glavado@whitestack.com
-##
diff --git a/osm_mon/test/OpenStack/integration/test_alarm_integration.py b/osm_mon/test/OpenStack/integration/test_alarm_integration.py
deleted file mode 100644 (file)
index bdd2033..0000000
+++ /dev/null
@@ -1,223 +0,0 @@
-# Copyright 2017 Intel Research and Development Ireland Limited
-# *************************************************************
-
-# This file is part of OSM Monitoring module
-# All Rights Reserved to Intel Corporation
-
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-
-#         http://www.apache.org/licenses/LICENSE-2.0
-
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# For those usages not covered by the Apache License, Version 2.0 please
-# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
-
-# __author__ = "Helena McGough"
-"""Test an end to end Openstack alarm requests."""
-
-import json
-import logging
-import unittest
-
-import mock
-from kafka import KafkaConsumer
-from kafka import KafkaProducer
-from kafka.errors import KafkaError
-
-from osm_mon.core.auth import AuthManager
-from osm_mon.core.database import DatabaseManager, VimCredentials
-from osm_mon.core.message_bus.producer import KafkaProducer as Producer
-from osm_mon.plugins.OpenStack import response
-from osm_mon.plugins.OpenStack.Aodh import alarming
-from osm_mon.plugins.OpenStack.common import Common
-
-log = logging.getLogger(__name__)
-
-mock_creds = VimCredentials()
-mock_creds.config = '{}'
-
-
-@mock.patch.object(Producer, "publish_alarm_request", mock.Mock())
-@mock.patch.object(DatabaseManager, "save_alarm", mock.Mock())
-@mock.patch.object(Common, "get_auth_token", mock.Mock())
-@mock.patch.object(Common, "get_endpoint", mock.Mock())
-class AlarmIntegrationTest(unittest.TestCase):
-    def setUp(self):
-        try:
-            self.producer = KafkaProducer(bootstrap_servers='localhost:9092',
-                                          key_serializer=str.encode,
-                                          value_serializer=str.encode
-                                          )
-            self.req_consumer = KafkaConsumer(bootstrap_servers='localhost:9092',
-                                              key_deserializer=bytes.decode,
-                                              value_deserializer=bytes.decode,
-                                              auto_offset_reset='earliest',
-                                              consumer_timeout_ms=60000)
-            self.req_consumer.subscribe(['alarm_request'])
-        except KafkaError:
-            self.skipTest('Kafka server not present.')
-        # Set up common and alarming class instances
-        self.alarms = alarming.Alarming()
-        self.openstack_auth = Common()
-
-    def tearDown(self):
-        self.producer.close()
-        self.req_consumer.close()
-
-    @mock.patch.object(Common, "perform_request")
-    @mock.patch.object(AuthManager, 'get_credentials')
-    @mock.patch.object(alarming.Alarming, "update_alarm")
-    @mock.patch.object(response.OpenStack_Response, "generate_response")
-    def test_update_alarm_req(self, resp, update_alarm, get_creds, perf_req):
-        """Test Aodh update alarm request message from KafkaProducer."""
-        # Set-up message, producer and consumer for tests
-        payload = {"alarm_update_request": {"correlation_id": 123,
-                                            "alarm_uuid": "alarm_id",
-                                            "metric_uuid": "metric_id"}}
-
-        get_creds.return_value = mock_creds
-        perf_req.return_value = type('obj', (object,), {'text': json.dumps({"metrics": {"cpu_util": "1"}})})
-        resp.return_value = ''
-
-        self.producer.send('alarm_request', key="update_alarm_request",
-                           value=json.dumps(payload))
-
-        for message in self.req_consumer:
-            if message.key == "update_alarm_request":
-                # Mock a valid alarm update
-                update_alarm.return_value = "alarm_id"
-                self.alarms.alarming(message, 'test_id')
-
-                # A response message is generated and sent via MON's producer
-                resp.assert_called_with(
-                    'update_alarm_response', alarm_id="alarm_id", cor_id=123,
-                    status=True)
-
-                return
-        self.fail("No message received in consumer")
-
-    @mock.patch.object(Common, "perform_request")
-    @mock.patch.object(AuthManager, 'get_credentials')
-    @mock.patch.object(alarming.Alarming, "configure_alarm")
-    @mock.patch.object(response.OpenStack_Response, "generate_response")
-    def test_create_alarm_req(self, resp, config_alarm, get_creds, perf_req):
-        """Test Aodh create alarm request message from KafkaProducer."""
-        # Set-up message, producer and consumer for tests
-        payload = {"alarm_create_request": {"correlation_id": 123,
-                                            "alarm_name": "my_alarm",
-                                            "metric_name": "cpu_utilization",
-                                            "resource_uuid": "my_resource",
-                                            "severity": "WARNING",
-                                            "threshold_value": 60,
-                                            "operation": "GT",
-                                            "vdu_name": "vdu",
-                                            "vnf_member_index": "1",
-                                            "ns_id": "1"}}
-
-        get_creds.return_value = mock_creds
-        perf_req.return_value = type('obj', (object,), {'text': json.dumps({"metrics": {"cpu_util": "1"}})})
-        resp.return_value = ''
-        self.producer.send('alarm_request', key="create_alarm_request",
-                           value=json.dumps(payload))
-
-        for message in self.req_consumer:
-            if message.key == "create_alarm_request":
-                # Mock a valid alarm creation
-                config_alarm.return_value = "alarm_id"
-                self.alarms.alarming(message, 'test_id')
-
-                # A response message is generated and sent via MON's produce
-                resp.assert_called_with(
-                    'create_alarm_response', status=True, alarm_id="alarm_id",
-                    cor_id=123)
-
-                return
-        self.fail("No message received in consumer")
-
-    @mock.patch.object(Common, "perform_request")
-    @mock.patch.object(AuthManager, 'get_credentials')
-    @mock.patch.object(alarming.Alarming, "list_alarms")
-    @mock.patch.object(response.OpenStack_Response, "generate_response")
-    def test_list_alarm_req(self, resp, list_alarm, get_creds, perf_req):
-        """Test Aodh list alarm request message from KafkaProducer."""
-        # Set-up message, producer and consumer for tests
-        payload = {"alarm_list_request": {"correlation_id": 123,
-                                          "resource_uuid": "resource_id", }}
-
-        self.producer.send('alarm_request', key="list_alarm_request",
-                           value=json.dumps(payload))
-
-        get_creds.return_value = mock_creds
-        perf_req.return_value = type('obj', (object,), {'text': json.dumps([])})
-        resp.return_value = ''
-
-        for message in self.req_consumer:
-            if message.key == "list_alarm_request":
-                # Mock an empty list generated by the request
-                list_alarm.return_value = []
-                self.alarms.alarming(message, 'test_id')
-
-                # Response message is generated
-                resp.assert_called_with(
-                    'list_alarm_response', alarm_list=[],
-                    cor_id=123)
-
-                return
-        self.fail("No message received in consumer")
-
-    @mock.patch.object(Common, "perform_request")
-    @mock.patch.object(AuthManager, 'get_credentials')
-    @mock.patch.object(alarming.Alarming, "delete_alarm")
-    @mock.patch.object(response.OpenStack_Response, "generate_response")
-    def test_delete_alarm_req(self, resp, del_alarm, get_creds, perf_req):
-        """Test Aodh delete alarm request message from KafkaProducer."""
-        # Set-up message, producer and consumer for tests
-        payload = {"alarm_delete_request": {"correlation_id": 123,
-                                            "alarm_uuid": "alarm_id", }}
-
-        self.producer.send('alarm_request', key="delete_alarm_request",
-                           value=json.dumps(payload))
-
-        get_creds.return_value = mock_creds
-        perf_req.return_value = type('obj', (object,), {'text': json.dumps([])})
-        resp.return_value = ''
-
-        for message in self.req_consumer:
-            if message.key == "delete_alarm_request":
-                self.alarms.alarming(message, 'test_id')
-
-                # Response message is generated and sent by MON's producer
-                resp.assert_called_with(
-                    'delete_alarm_response', alarm_id="alarm_id",
-                    status=True, cor_id=123)
-
-                return
-        self.fail("No message received in consumer")
-
-    @mock.patch.object(AuthManager, 'get_credentials')
-    @mock.patch.object(alarming.Alarming, "update_alarm_state")
-    def test_ack_alarm_req(self, ack_alarm, get_creds):
-        """Test Aodh acknowledge alarm request message from KafkaProducer."""
-        # Set-up message, producer and consumer for tests
-        payload = {"ack_details": {"alarm_uuid": "alarm_id", }}
-
-        self.producer.send('alarm_request', key="acknowledge_alarm",
-                           value=json.dumps(payload))
-
-        get_creds.return_value = mock_creds
-        ack_alarm.return_value = True
-
-        for message in self.req_consumer:
-            if message.key == "acknowledge_alarm":
-                self.alarms.alarming(message, 'test_id')
-                ack_alarm.assert_called_with(mock.ANY, mock.ANY, 'alarm_id')
-                return
-
-        self.fail("No message received in consumer")
diff --git a/osm_mon/test/OpenStack/integration/test_metric_integration.py b/osm_mon/test/OpenStack/integration/test_metric_integration.py
deleted file mode 100644 (file)
index eb672da..0000000
+++ /dev/null
@@ -1,244 +0,0 @@
-# Copyright 2017 Intel Research and Development Ireland Limited
-# *************************************************************
-
-# This file is part of OSM Monitoring module
-# All Rights Reserved to Intel Corporation
-
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-
-#         http://www.apache.org/licenses/LICENSE-2.0
-
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# For those usages not covered by the Apache License, Version 2.0 please
-# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
-
-# __author__ = "Helena McGough"
-"""Test an end to end Openstack metric requests."""
-
-import json
-
-import logging
-import unittest
-
-from kafka.errors import KafkaError
-
-from osm_mon.core.auth import AuthManager
-from osm_mon.core.database import VimCredentials
-from osm_mon.core.message_bus.producer import KafkaProducer as Producer
-
-from kafka import KafkaConsumer
-from kafka import KafkaProducer
-
-import mock
-
-from osm_mon.plugins.OpenStack import response
-
-from osm_mon.plugins.OpenStack.Gnocchi import metrics
-
-from osm_mon.plugins.OpenStack.common import Common
-
-log = logging.getLogger(__name__)
-
-mock_creds = VimCredentials()
-mock_creds.config = '{}'
-
-
-@mock.patch.object(Producer, "publish_alarm_request", mock.Mock())
-@mock.patch.object(Common, "get_auth_token", mock.Mock())
-@mock.patch.object(Common, "get_endpoint", mock.Mock())
-class MetricIntegrationTest(unittest.TestCase):
-    def setUp(self):
-        # Set up common and alarming class instances
-        self.metric_req = metrics.Metrics()
-        self.openstack_auth = Common()
-
-        try:
-            self.producer = KafkaProducer(bootstrap_servers='localhost:9092',
-                                          key_serializer=str.encode,
-                                          value_serializer=str.encode
-                                          )
-            self.req_consumer = KafkaConsumer(bootstrap_servers='localhost:9092',
-                                              key_deserializer=bytes.decode,
-                                              value_deserializer=bytes.decode,
-                                              auto_offset_reset='earliest',
-                                              consumer_timeout_ms=60000)
-            self.req_consumer.subscribe(['metric_request'])
-        except KafkaError:
-            self.skipTest('Kafka server not present.')
-
-    @mock.patch.object(Common, "perform_request")
-    @mock.patch.object(AuthManager, 'get_credentials')
-    @mock.patch.object(metrics.Metrics, "configure_metric")
-    @mock.patch.object(response.OpenStack_Response, "generate_response")
-    def test_create_metric_req(self, resp, config_metric, get_creds, perf_req):
-        """Test Gnocchi create metric request message from producer."""
-        # Set-up message, producer and consumer for tests
-        payload = {"metric_create_request": {"correlation_id": 123,
-                                             "metric_name": "cpu_utilization",
-                                             "resource_uuid": "resource_id"}}
-
-        get_creds.return_value = mock_creds
-        perf_req.return_value = type('obj', (object,), {'text': json.dumps({"metrics": {"cpu_util": "1"}})})
-        resp.return_value = ''
-
-        self.producer.send('metric_request', key="create_metric_request",
-                           value=json.dumps(payload))
-
-        for message in self.req_consumer:
-            if message.key == "create_metric_request":
-                # A valid metric is created
-                config_metric.return_value = "metric_id", "resource_id", True
-                self.metric_req.metric_calls(message, 'test_id')
-
-                # A response message is generated and sent by MON's producer
-                resp.assert_called_with(
-                    'create_metric_response', status=True, cor_id=123,
-                    metric_id="metric_id", r_id="resource_id")
-
-                return
-        self.fail("No message received in consumer")
-
-    @mock.patch.object(Common, "perform_request")
-    @mock.patch.object(AuthManager, 'get_credentials')
-    @mock.patch.object(metrics.Metrics, "delete_metric")
-    @mock.patch.object(response.OpenStack_Response, "generate_response")
-    def test_delete_metric_req(self, resp, del_metric, get_creds, perf_req):
-        """Test Gnocchi delete metric request message from producer."""
-        # Set-up message, producer and consumer for tests
-        payload = {"vim_type": "OpenSTACK",
-                   "vim_uuid": "1",
-                   "correlation_id": 123,
-                   "metric_name": "cpu_utilization",
-                   "resource_uuid": "resource_id"}
-
-        get_creds.return_value = mock_creds
-        perf_req.return_value = type('obj', (object,), {'text': json.dumps({"metrics": {"cpu_util": "1"}})})
-        resp.return_value = ''
-
-        self.producer.send('metric_request', key="delete_metric_request",
-                           value=json.dumps(payload))
-
-        for message in self.req_consumer:
-            if message.key == "delete_metric_request":
-                # Metric has been deleted
-                del_metric.return_value = True
-                self.metric_req.metric_calls(message, 'test_id')
-
-                # A response message is generated and sent by MON's producer
-                resp.assert_called_with(
-                    'delete_metric_response', m_id='1',
-                    m_name="cpu_utilization", status=True, r_id="resource_id",
-                    cor_id=123)
-
-                return
-        self.fail("No message received in consumer")
-
-    @mock.patch.object(Common, "perform_request")
-    @mock.patch.object(AuthManager, 'get_credentials')
-    @mock.patch.object(metrics.Metrics, "read_metric_data")
-    @mock.patch.object(response.OpenStack_Response, "generate_response")
-    def test_read_metric_data_req(self, resp, read_data, get_creds, perf_req):
-        """Test Gnocchi read metric data request message from producer."""
-        # Set-up message, producer and consumer for tests
-        payload = {"vim_type": "OpenSTACK",
-                   "vim_uuid": "test_id",
-                   "correlation_id": 123,
-                   "metric_name": "cpu_utilization",
-                   "resource_uuid": "resource_id"}
-
-        get_creds.return_value = mock_creds
-        perf_req.return_value = type('obj', (object,), {'text': json.dumps({"metrics": {"cpu_util": "1"}})})
-        resp.return_value = ''
-
-        self.producer.send('metric_request', key="read_metric_data_request",
-                           value=json.dumps(payload))
-
-        for message in self.req_consumer:
-            # Check the vim desired by the message
-            if message.key == "read_metric_data_request":
-                # Mock empty lists generated by the request message
-                read_data.return_value = [], []
-                self.metric_req.metric_calls(message, 'test_id')
-
-                # A response message is generated and sent by MON's producer
-                resp.assert_called_with(
-                    'read_metric_data_response', m_id='1',
-                    m_name="cpu_utilization", r_id="resource_id", cor_id=123, times=[],
-                    metrics=[])
-
-                return
-        self.fail("No message received in consumer")
-
-    @mock.patch.object(Common, "perform_request")
-    @mock.patch.object(AuthManager, 'get_credentials')
-    @mock.patch.object(metrics.Metrics, "list_metrics")
-    @mock.patch.object(response.OpenStack_Response, "generate_response")
-    def test_list_metrics_req(self, resp, list_metrics, get_creds, perf_req):
-        """Test Gnocchi list metrics request message from producer."""
-        # Set-up message, producer and consumer for tests
-        payload = {"vim_type": "OpenSTACK",
-                   "vim_uuid": "1",
-                   "metrics_list_request":
-                       {"correlation_id": 123, }}
-
-        get_creds.return_value = mock_creds
-        perf_req.return_value = type('obj', (object,), {'text': json.dumps({"metrics": {"cpu_util": "1"}})})
-        resp.return_value = ''
-
-        self.producer.send('metric_request', key="list_metric_request",
-                           value=json.dumps(payload))
-
-        for message in self.req_consumer:
-            # Check the vim desired by the message
-            if message.key == "list_metric_request":
-                # Mock an empty list generated by the request
-                list_metrics.return_value = []
-                self.metric_req.metric_calls(message, 'test_id')
-
-                # A response message is generated and sent by MON's producer
-                resp.assert_called_with(
-                    'list_metric_response', m_list=[], cor_id=123)
-
-                return
-        self.fail("No message received in consumer")
-
-    @mock.patch.object(Common, "perform_request")
-    @mock.patch.object(AuthManager, 'get_credentials')
-    @mock.patch.object(metrics.Metrics, "get_metric_id")
-    @mock.patch.object(response.OpenStack_Response, "generate_response")
-    def test_update_metrics_req(self, resp, get_id, get_creds, perf_req):
-        """Test Gnocchi update metric request message from KafkaProducer."""
-        # Set-up message, producer and consumer for tests
-        payload = {"metric_create_request": {"metric_name": "my_metric",
-                                             "correlation_id": 123,
-                                             "resource_uuid": "resource_id", }}
-
-        get_creds.return_value = mock_creds
-        perf_req.return_value = type('obj', (object,), {'text': json.dumps({"metrics": {"cpu_util": "1"}})})
-        resp.return_value = ''
-
-        self.producer.send('metric_request', key="update_metric_request",
-                           value=json.dumps(payload))
-
-        for message in self.req_consumer:
-            # Check the vim desired by the message
-            if message.key == "update_metric_request":
-                # Gnocchi doesn't support metric updates
-                get_id.return_value = "metric_id"
-                self.metric_req.metric_calls(message, 'test_id')
-
-                # Response message is generated and sent via MON's producer
-                # No metric update has taken place
-                resp.assert_called_with(
-                    'update_metric_response', status=False, cor_id=123,
-                    r_id="resource_id", m_id="metric_id")
-
-                return
-        self.fail("No message received in consumer")
diff --git a/osm_mon/test/OpenStack/integration/test_notify_alarm.py b/osm_mon/test/OpenStack/integration/test_notify_alarm.py
deleted file mode 100644 (file)
index 0841446..0000000
+++ /dev/null
@@ -1,191 +0,0 @@
-# Copyright 2017 Intel Research and Development Ireland Limited
-# *************************************************************
-
-# This file is part of OSM Monitoring module
-# All Rights Reserved to Intel Corporation
-
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-
-#         http://www.apache.org/licenses/LICENSE-2.0
-
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# For those usages not covered by the Apache License, Version 2.0 please
-# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
-##
-"""Tests for all common OpenStack methods."""
-
-
-from __future__ import unicode_literals
-import json
-import logging
-import socket
-import unittest
-from threading import Thread
-
-import mock
-import requests
-from six.moves.BaseHTTPServer import BaseHTTPRequestHandler
-from six.moves.BaseHTTPServer import HTTPServer
-
-from osm_mon.core.message_bus.producer import KafkaProducer
-from osm_mon.core.settings import Config
-from osm_mon.plugins.OpenStack.Aodh.alarming import Alarming
-from osm_mon.plugins.OpenStack.common import Common
-from osm_mon.plugins.OpenStack.response import OpenStack_Response
-
-log = logging.getLogger(__name__)
-
-# Create an instance of the common openstack class, producer and consumer
-openstack_auth = Common()
-
-# Mock a valid get_response for alarm details
-valid_get_resp = '{"gnocchi_resources_threshold_rule":\
-                  {"resource_id": "my_resource_id"}}'
-
-
-class MockResponse(object):
-    """Mock a response class for generating responses."""
-
-    def __init__(self, text):
-        """Initialise a mock response with a text attribute."""
-        self.text = text
-
-
-class MockNotifierHandler(BaseHTTPRequestHandler):
-    """Mock the NotifierHandler class for testing purposes."""
-
-    def _set_headers(self):
-        """Set the headers for a request."""
-        self.send_response(200)
-        self.send_header('Content-type', 'text/html')
-        self.end_headers()
-
-    def do_GET(self):
-        """Mock functionality for GET request."""
-        #        self.send_response(requests.codes.ok)
-        self._set_headers()
-        pass
-
-    def do_POST(self):
-        """Mock functionality for a POST request."""
-        self._set_headers()
-        content_length = int(self.headers['Content-Length'])
-        post_data = self.rfile.read(content_length)
-        try:
-            post_data = post_data.decode()
-        except AttributeError:
-            pass
-        self.notify_alarm(json.loads(post_data))
-
-    def notify_alarm(self, values):
-        """Mock the notify_alarm functionality to generate a valid response."""
-        config = Config.instance()
-        config.read_environ()
-        self._alarming = Alarming()
-        self._common = Common()
-        self._response = OpenStack_Response()
-        self._producer = KafkaProducer('alarm_response')
-        alarm_id = values['alarm_id']
-
-        auth_token = Common.get_auth_token('test_id')
-        endpoint = Common.get_endpoint('alarming', 'test_id')
-
-        # If authenticated generate and send response message
-        if auth_token is not None and endpoint is not None:
-            url = "{}/v2/alarms/%s".format(endpoint) % alarm_id
-
-            # Get the resource_id of the triggered alarm and the date
-            result = Common.perform_request(
-                url, auth_token, req_type="get")
-            alarm_details = json.loads(result.text)
-            gnocchi_rule = alarm_details['gnocchi_resources_threshold_rule']
-            resource_id = gnocchi_rule['resource_id']
-            # Mock a date for testing purposes
-            a_date = "dd-mm-yyyy 00:00"
-
-            # Process an alarm notification if resource_id is valid
-            if resource_id is not None:
-                # Try generate and send response
-                try:
-                    resp_message = self._response.generate_response(
-                        'notify_alarm', a_id=alarm_id,
-                        r_id=resource_id,
-                        sev=values['severity'], date=a_date,
-                        state=values['current'], vim_type="OpenStack")
-                    self._producer.publish_alarm_response(
-                        'notify_alarm', resp_message)
-                except Exception:
-                    pass
-
-
-def get_free_port():
-    """Function to get a free port to run the test webserver on."""
-    s = socket.socket(socket.AF_INET, type=socket.SOCK_STREAM)
-    s.bind(('localhost', 0))
-    address, port = s.getsockname()
-    s.close()
-    return port
-
-
-# Create the webserver, port and run it on its own thread
-mock_server_port = get_free_port()
-mock_server = HTTPServer(('localhost', mock_server_port), MockNotifierHandler)
-mock_server_thread = Thread(target=mock_server.serve_forever)
-mock_server_thread.setDaemon(True)
-mock_server_thread.start()
-
-
-def test_do_get():
-    """Integration test for get request on notifier webserver."""
-    url = 'http://localhost:{port}/users'.format(port=mock_server_port)
-
-    # Send a request to the mock API server and store the response.
-    response = requests.get(url)
-
-    # Confirm that the request-response cycle completed successfully.
-    assert response.ok
-
-
-class AlarmNotificationTest(unittest.TestCase):
-    @mock.patch.object(KafkaProducer, "publish_alarm_response")
-    @mock.patch.object(OpenStack_Response, "generate_response")
-    @mock.patch.object(Common, "perform_request")
-    @mock.patch.object(Common, "get_endpoint")
-    @mock.patch.object(Common, "get_auth_token")
-    def test_post_notify_alarm(self, auth, endpoint, perf_req, resp, notify):
-        """Integration test for notify_alarm."""
-        url = 'http://localhost:{port}/users'.format(port=mock_server_port)
-        payload = {"severity": "critical",
-                   "alarm_name": "my_alarm",
-                   "current": "current_state",
-                   "alarm_id": "my_alarm_id",
-                   "reason": "Threshold has been broken",
-                   "reason_data": {"count": 1,
-                                   "most_recent": "null",
-                                   "type": "threshold",
-                                   "disposition": "unknown"},
-                   "previous": "previous_state"}
-
-        # Mock authenticate and request response for testing
-        auth.return_value = "my_auth_token"
-        endpoint.return_value = "my_endpoint"
-        perf_req.return_value = MockResponse(valid_get_resp)
-
-        # Generate a post request for testing
-        response = requests.post(url, json.dumps(payload))
-        self.assertEqual(response.status_code, 200)
-        # A response message is generated with the following details
-        resp.assert_called_with(
-            "notify_alarm", a_id="my_alarm_id", r_id="my_resource_id",
-            sev="critical", date='dd-mm-yyyy 00:00', state="current_state",
-            vim_type="OpenStack")
-
-        # Response message is sent back to the SO via MON's producer
-        notify.assert_called_with("notify_alarm", mock.ANY)
diff --git a/osm_mon/test/OpenStack/integration/test_vim_account.py b/osm_mon/test/OpenStack/integration/test_vim_account.py
deleted file mode 100644 (file)
index da34bb2..0000000
+++ /dev/null
@@ -1,96 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# Copyright 2018 Whitestack, LLC
-# *************************************************************
-
-# This file is part of OSM Monitoring module
-# All Rights Reserved to Whitestack, LLC
-
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-
-#         http://www.apache.org/licenses/LICENSE-2.0
-
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# For those usages not covered by the Apache License, Version 2.0 please
-# contact: bdiaz@whitestack.com or glavado@whitestack.com
-##
-
-"""Test an end to end Openstack vim_account requests."""
-
-import json
-import logging
-import unittest
-
-from osm_mon.core.auth import AuthManager
-from osm_mon.core.database import DatabaseManager
-
-log = logging.getLogger(__name__)
-
-
-class VimAccountTest(unittest.TestCase):
-    def setUp(self):
-        self.auth_manager = AuthManager()
-        self.database_manager = DatabaseManager()
-        self.database_manager.create_tables()
-
-    def test_create_edit_delete_vim_account(self):
-        """Test vim_account creation message from KafkaProducer."""
-        # Set-up message, producer and consumer for tests
-        create_payload = {
-            "_id": "test_id",
-            "name": "test_name",
-            "vim_type": "openstack",
-            "vim_url": "auth_url",
-            "vim_user": "user",
-            "vim_password": "password",
-            "vim_tenant_name": "tenant",
-            "config":
-                {
-                    "foo": "bar"
-                }
-        }
-        self.auth_manager.store_auth_credentials(create_payload)
-
-        creds = self.auth_manager.get_credentials('test_id')
-
-        self.assertIsNotNone(creds)
-        self.assertEqual(creds.name, create_payload['name'])
-        self.assertEqual(json.loads(creds.config), create_payload['config'])
-
-        # Set-up message, producer and consumer for tests
-        edit_payload = {
-            "_id": "test_id",
-            "name": "test_name_edited",
-            "vim_type": "openstack",
-            "vim_url": "auth_url",
-            "vim_user": "user",
-            "vim_password": "password",
-            "vim_tenant_name": "tenant",
-            "config":
-                {
-                    "foo_edited": "bar_edited"
-                }
-        }
-
-        self.auth_manager.store_auth_credentials(edit_payload)
-
-        creds = self.auth_manager.get_credentials('test_id')
-
-        self.assertEqual(creds.name, edit_payload['name'])
-        self.assertEqual(json.loads(creds.config), edit_payload['config'])
-
-        delete_payload = {
-            "_id": "test_id"
-        }
-
-        self.auth_manager.delete_auth_credentials(delete_payload)
-
-        creds = self.auth_manager.get_credentials('test_id')
-        self.assertIsNone(creds)
diff --git a/osm_mon/test/OpenStack/unit/__init__.py b/osm_mon/test/OpenStack/unit/__init__.py
deleted file mode 100644 (file)
index e69de29..0000000
diff --git a/osm_mon/test/OpenStack/unit/test_alarm_req.py b/osm_mon/test/OpenStack/unit/test_alarm_req.py
deleted file mode 100644 (file)
index e5f0d86..0000000
+++ /dev/null
@@ -1,166 +0,0 @@
-# Copyright 2017 iIntel Research and Development Ireland Limited
-# **************************************************************
-
-# This file is part of OSM Monitoring module
-# All Rights Reserved to Intel Corporation
-
-# Licensed under the Apache License, Version 2.0 (the 'License'); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-
-#         http://www.apache.org/licenses/LICENSE-2.0
-
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an 'AS IS' BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# For those usages not covered by the Apache License, Version 2.0 please
-# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
-##
-"""Tests for all alarm request message keys."""
-
-import json
-
-import logging
-
-import unittest
-
-import mock
-
-from osm_mon.core.auth import AuthManager
-from osm_mon.core.database import VimCredentials, DatabaseManager
-from osm_mon.core.message_bus.producer import KafkaProducer
-from osm_mon.plugins.OpenStack.Aodh import alarming as alarm_req
-from osm_mon.plugins.OpenStack.common import Common
-
-log = logging.getLogger(__name__)
-
-mock_creds = VimCredentials()
-mock_creds.config = '{}'
-
-
-class Message(object):
-    """A class to mock a message object value for alarm requests."""
-
-    def __init__(self):
-        """Initialize a mocked message instance."""
-        self.topic = 'alarm_request'
-        self.key = None
-        self.value = json.dumps({'mock_value': 'mock_details'})
-
-
-@mock.patch.object(KafkaProducer, 'publish', mock.Mock())
-class TestAlarmKeys(unittest.TestCase):
-    """Integration test for alarm request keys."""
-
-    def setUp(self):
-        """Setup the tests for alarm request keys."""
-        super(TestAlarmKeys, self).setUp()
-        self.alarming = alarm_req.Alarming()
-        self.alarming.common = Common()
-
-    @mock.patch.object(AuthManager, 'get_credentials')
-    @mock.patch.object(Common, 'get_endpoint')
-    @mock.patch.object(Common, 'get_auth_token')
-    def test_alarming_authentication(self, get_token, get_endpoint, get_creds):
-        """Test getting an auth_token and endpoint for alarm requests."""
-        # if auth_token is None environment variables are used to authenticate
-        message = Message()
-
-        get_creds.return_value = mock_creds
-
-        self.alarming.alarming(message, 'test_id')
-
-        get_token.assert_called_with('test_id')
-        get_endpoint.assert_any_call('alarming', 'test_id')
-
-    @mock.patch.object(Common, 'get_endpoint', mock.Mock())
-    @mock.patch.object(Common, 'get_auth_token', mock.Mock())
-    @mock.patch.object(AuthManager, 'get_credentials')
-    @mock.patch.object(alarm_req.Alarming, 'delete_alarm')
-    def test_delete_alarm_key(self, del_alarm, get_creds):
-        """Test the functionality for a create alarm request."""
-        # Mock a message value and key
-        message = Message()
-        message.key = 'delete_alarm_request'
-        message.value = json.dumps({'alarm_delete_request': {
-            'correlation_id': 1,
-            'alarm_uuid': 'my_alarm_id'
-        }})
-
-        get_creds.return_value = mock_creds
-        del_alarm.return_value = {}
-
-        # Call the alarming functionality and check delete request
-        self.alarming.alarming(message, 'test_id')
-        del_alarm.assert_called_with(mock.ANY, mock.ANY, 'my_alarm_id')
-
-    @mock.patch.object(Common, 'get_endpoint', mock.Mock())
-    @mock.patch.object(Common, 'get_auth_token', mock.Mock())
-    @mock.patch.object(AuthManager, 'get_credentials')
-    @mock.patch.object(alarm_req.Alarming, 'list_alarms')
-    def test_list_alarm_key(self, list_alarm, get_creds):
-        """Test the functionality for a list alarm request."""
-        # Mock a message with list alarm key and value
-        message = Message()
-        message.key = 'list_alarm_request'
-        message.value = json.dumps({'alarm_list_request': {'correlation_id': 1}})
-
-        get_creds.return_value = mock_creds
-
-        list_alarm.return_value = []
-
-        # Call the alarming functionality and check list functionality
-        self.alarming.alarming(message, 'test_id')
-        list_alarm.assert_called_with(mock.ANY, mock.ANY, {'correlation_id': 1})
-
-    @mock.patch.object(Common, 'get_auth_token', mock.Mock())
-    @mock.patch.object(Common, 'get_endpoint', mock.Mock())
-    @mock.patch.object(AuthManager, 'get_credentials')
-    @mock.patch.object(alarm_req.Alarming, 'update_alarm_state')
-    def test_ack_alarm_key(self, ack_alarm, get_creds):
-        """Test the functionality for an acknowledge alarm request."""
-        # Mock a message with acknowledge alarm key and value
-        message = Message()
-        message.key = 'acknowledge_alarm'
-        message.value = json.dumps({'ack_details':
-                                        {'alarm_uuid': 'my_alarm_id'}})
-
-        get_creds.return_value = mock_creds
-
-        # Call alarming functionality and check acknowledge functionality
-        self.alarming.alarming(message, 'test_id')
-        ack_alarm.assert_called_with(mock.ANY, mock.ANY, 'my_alarm_id')
-
-    @mock.patch.object(Common, 'get_auth_token', mock.Mock())
-    @mock.patch.object(Common, 'get_endpoint', mock.Mock())
-    @mock.patch.object(DatabaseManager, 'save_alarm', mock.Mock())
-    @mock.patch.object(Common, "perform_request")
-    @mock.patch.object(AuthManager, 'get_credentials')
-    @mock.patch.object(alarm_req.Alarming, 'configure_alarm')
-    def test_config_alarm_key(self, config_alarm, get_creds, perf_req):
-        """Test the functionality for a create alarm request."""
-        # Mock a message with config alarm key and value
-        message = Message()
-        message.key = 'create_alarm_request'
-        message.value = json.dumps({'alarm_create_request': {'correlation_id': 1, 'threshold_value': 50,
-                                                             'operation': 'GT', 'metric_name': 'cpu_utilization',
-                                                             'vdu_name': 'vdu',
-                                                             'vnf_member_index': '1',
-                                                             'ns_id': '1',
-                                                             'resource_uuid': '123'}})
-        mock_perf_req_return_value = {"metrics": {"cpu_util": 123}}
-        perf_req.return_value = type('obj', (object,), {'text': json.dumps(mock_perf_req_return_value, sort_keys=True)})
-        get_creds.return_value = mock_creds
-
-        # Call alarming functionality and check config alarm call
-        config_alarm.return_value = 'my_alarm_id'
-        self.alarming.alarming(message, 'test_id')
-        config_alarm.assert_called_with(mock.ANY, mock.ANY, {'correlation_id': 1, 'threshold_value': 50,
-                                                             'operation': 'GT',
-                                                             'metric_name': 'cpu_utilization',
-                                                             'vdu_name': 'vdu',
-                                                             'vnf_member_index': '1', 'ns_id': '1',
-                                                             'resource_uuid': '123'}, {})
diff --git a/osm_mon/test/OpenStack/unit/test_alarming.py b/osm_mon/test/OpenStack/unit/test_alarming.py
deleted file mode 100644 (file)
index 19a9826..0000000
+++ /dev/null
@@ -1,294 +0,0 @@
-# Copyright 2017 iIntel Research and Development Ireland Limited
-# **************************************************************
-
-# This file is part of OSM Monitoring module
-# All Rights Reserved to Intel Corporation
-
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-
-#         http://www.apache.org/licenses/LICENSE-2.0
-
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# For those usages not covered by the Apache License, Version 2.0 please
-# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
-##
-"""Tests for all alarm request message keys."""
-
-import json
-import logging
-import unittest
-
-import mock
-
-from osm_mon.core.settings import Config
-from osm_mon.plugins.OpenStack.Aodh import alarming as alarm_req
-from osm_mon.plugins.OpenStack.common import Common
-
-log = logging.getLogger(__name__)
-
-auth_token = mock.ANY
-alarm_endpoint = "alarm_endpoint"
-metric_endpoint = "metric_endpoint"
-
-
-class Response(object):
-    """Mock a response message class."""
-
-    def __init__(self, result):
-        """Initialise the response text and status code."""
-        self.text = json.dumps(result)
-        self.status_code = "MOCK_STATUS_CODE"
-
-
-class TestAlarming(unittest.TestCase):
-    """Tests for alarming class functions."""
-
-    maxDiff = None
-
-    def setUp(self):
-        """Setup for tests."""
-        super(TestAlarming, self).setUp()
-        self.alarming = alarm_req.Alarming()
-
-    @mock.patch.object(Common, "perform_request")
-    def test_config_invalid_alarm_req(self, perf_req):
-        """Test configure an invalid alarm request."""
-        # Configuring with invalid metric name results in failure
-        values = {"alarm_name": "my_alarm",
-                  "metric_name": "my_metric",
-                  "resource_uuid": "my_r_id"}
-        with self.assertRaises(KeyError):
-            self.alarming.configure_alarm(alarm_endpoint, auth_token, values, {})
-        perf_req.assert_not_called()
-        perf_req.reset_mock()
-
-        # Configuring with missing metric name results in failure
-        values = {"alarm_name": "disk_write_ops",
-                  "resource_uuid": "my_r_id"}
-
-        with self.assertRaises(KeyError):
-            self.alarming.configure_alarm(alarm_endpoint, auth_token, values, {})
-        perf_req.assert_not_called()
-
-    @mock.patch.object(Common, "perform_request")
-    def test_config_valid_alarm_req(self, perf_req):
-        """Test config a valid alarm."""
-        values = {"alarm_name": "disk_write_ops",
-                  "metric_name": "disk_write_ops",
-                  "resource_uuid": "my_r_id",
-                  "statistic": "AVERAGE",
-                  "threshold_value": 60,
-                  "operation": "GT"}
-
-        perf_req.return_value = type('obj', (object,), {'text': '{"alarm_id":"1"}'})
-
-        self.alarming.configure_alarm(alarm_endpoint, auth_token, values, {})
-        payload = {"name": "disk_write_ops",
-                   "gnocchi_resources_threshold_rule": {"resource_type": "generic", "comparison_operator": "gt",
-                                                        "granularity": "300", "metric": "disk.write.requests",
-                                                        "aggregation_method": "mean", "threshold": 60,
-                                                        "resource_id": "my_r_id"},
-                   "alarm_actions": ["http://localhost:8662"], "state": "ok", "type": "gnocchi_resources_threshold",
-                   "severity": "critical"}
-        perf_req.assert_called_with(
-            "alarm_endpoint/v2/alarms/", auth_token,
-            req_type="post", payload=json.dumps(payload, sort_keys=True))
-
-    @mock.patch.object(Common, "perform_request")
-    def test_delete_alarm_req(self, perf_req):
-        """Test delete alarm request."""
-        self.alarming.delete_alarm(alarm_endpoint, auth_token, "my_alarm_id")
-
-        perf_req.assert_called_with(
-            "alarm_endpoint/v2/alarms/my_alarm_id", auth_token, req_type="delete")
-
-    @mock.patch.object(Common, "perform_request")
-    def test_invalid_list_alarm_req(self, perf_req):
-        """Test invalid list alarm_req."""
-        # Request will not be performed without a resource_id
-        list_details = {"mock_details": "invalid_details"}
-        with self.assertRaises(KeyError):
-            self.alarming.list_alarms(alarm_endpoint, auth_token, list_details)
-        perf_req.assert_not_called()
-
-    @mock.patch.object(Common, "perform_request")
-    def test_valid_list_alarm_req(self, perf_req):
-        """Test valid list alarm request."""
-        # Minimum requirement for an alarm list is resource_id
-        list_details = {"resource_uuid": "mock_r_id", "alarm_name": "mock_alarm", "severity": "critical"}
-
-        mock_perf_req_return_value = [
-            {"alarm_id": "1", "name": "mock_alarm", "severity": "critical",
-             "gnocchi_resources_threshold_rule": {"resource_id": "mock_r_id"}}]
-        perf_req.return_value = type('obj', (object,),
-                                     {'text': json.dumps(mock_perf_req_return_value)})
-
-        alarm_list = self.alarming.list_alarms(alarm_endpoint, auth_token, list_details)
-
-        self.assertDictEqual(alarm_list[0], mock_perf_req_return_value[0])
-
-        perf_req.assert_called_with(
-            "alarm_endpoint/v2/alarms/", auth_token, req_type="get")
-        perf_req.reset_mock()
-
-        # Check list with alarm_name defined
-        list_details = {"resource_uuid": "mock_r_id",
-                        "alarm_name": "mock_alarm",
-                        "severity": "critical"}
-        alarm_list = self.alarming.list_alarms(alarm_endpoint, auth_token, list_details)
-
-        self.assertDictEqual(alarm_list[0], mock_perf_req_return_value[0])
-
-        perf_req.assert_called_with(
-            "alarm_endpoint/v2/alarms/", auth_token, req_type="get")
-
-    @mock.patch.object(Common, "perform_request")
-    def test_ack_alarm_req(self, perf_req):
-        """Test update alarm state for acknowledge alarm request."""
-        self.alarming.update_alarm_state(alarm_endpoint, auth_token, "my_alarm_id")
-
-        perf_req.assert_called_with(
-            "alarm_endpoint/v2/alarms/my_alarm_id/state", auth_token, req_type="put",
-            payload=json.dumps("ok"))
-
-    @mock.patch.object(Common, "perform_request")
-    def test_update_alarm_invalid(self, perf_req):
-        """Test update alarm with invalid get response."""
-        values = {"alarm_uuid": "my_alarm_id"}
-
-        perf_req.return_value = type('obj', (object,), {'invalid_prop': 'Invalid response'})
-
-        with self.assertRaises(Exception):
-            self.alarming.update_alarm(alarm_endpoint, auth_token, values, {})
-        perf_req.assert_called_with(mock.ANY, auth_token, req_type="get")
-
-    @mock.patch.object(Common, "perform_request")
-    def test_update_alarm_invalid_payload(self, perf_req):
-        """Test update alarm with invalid payload."""
-        resp = Response({"name": "my_alarm",
-                         "state": "alarm",
-                         "gnocchi_resources_threshold_rule":
-                             {"resource_id": "my_resource_id",
-                              "metric": "my_metric"}})
-        perf_req.return_value = resp
-        values = {"alarm_uuid": "my_alarm_id"}
-
-        with self.assertRaises(Exception):
-            self.alarming.update_alarm(alarm_endpoint, auth_token, values, {})
-        perf_req.assert_called_with(mock.ANY, auth_token, req_type="get")
-        self.assertEqual(perf_req.call_count, 1)
-
-    @mock.patch.object(alarm_req.Alarming, "check_payload")
-    @mock.patch.object(Common, "perform_request")
-    def test_update_alarm_valid(self, perf_req, check_pay):
-        """Test valid update alarm request."""
-        resp = Response({"alarm_id": "1",
-                         "name": "my_alarm",
-                         "state": "alarm",
-                         "gnocchi_resources_threshold_rule":
-                             {"resource_id": "my_resource_id",
-                              "metric": "disk.write.requests"}})
-        perf_req.return_value = resp
-        values = {"alarm_uuid": "my_alarm_id"}
-
-        self.alarming.update_alarm(alarm_endpoint, auth_token, values, {})
-
-        check_pay.assert_called_with(values, "disk_write_ops", "my_resource_id",
-                                     "my_alarm", alarm_state="alarm")
-
-        self.assertEqual(perf_req.call_count, 2)
-        # Second call is the update request
-        perf_req.assert_called_with(
-            'alarm_endpoint/v2/alarms/my_alarm_id', auth_token,
-            req_type="put", payload=check_pay.return_value)
-
-    @mock.patch.object(Config, "instance")
-    def test_check_valid_payload(self, cfg):
-        """Test the check payload function for a valid payload."""
-        values = {"severity": "warning",
-                  "statistic": "COUNT",
-                  "threshold_value": 12,
-                  "operation": "GT",
-                  "granularity": 300,
-                  "resource_type": "generic"}
-        cfg.return_value.OS_NOTIFIER_URI = "http://localhost:8662"
-        payload = self.alarming.check_payload(
-            values, "disk_write_ops", "r_id", "alarm_name")
-
-        self.assertDictEqual(
-            json.loads(payload), {"name": "alarm_name",
-                                  "gnocchi_resources_threshold_rule":
-                                      {"resource_id": "r_id",
-                                       "metric": "disk.write.requests",
-                                       "comparison_operator": "gt",
-                                       "aggregation_method": "count",
-                                       "threshold": 12,
-                                       "granularity": 300,
-                                       "resource_type": "generic"},
-                                  "severity": "low",
-                                  "state": "ok",
-                                  "type": "gnocchi_resources_threshold",
-                                  "alarm_actions": ["http://localhost:8662"]})
-
-    @mock.patch.object(Config, "instance")
-    @mock.patch.object(Common, "perform_request")
-    def test_check_valid_state_payload(self, perform_req, cfg):
-        """Test the check payload function for a valid payload with state."""
-        values = {"severity": "warning",
-                  "statistic": "COUNT",
-                  "threshold_value": 12,
-                  "operation": "GT",
-                  "granularity": 300,
-                  "resource_type": "generic"}
-        cfg.return_value.OS_NOTIFIER_URI = "http://localhost:8662"
-        payload = self.alarming.check_payload(
-            values, "disk_write_ops", "r_id", "alarm_name", alarm_state="alarm")
-
-        self.assertEqual(
-            json.loads(payload), {"name": "alarm_name",
-                                  "gnocchi_resources_threshold_rule":
-                                      {"resource_id": "r_id",
-                                       "metric": "disk.write.requests",
-                                       "comparison_operator": "gt",
-                                       "aggregation_method": "count",
-                                       "threshold": 12,
-                                       "granularity": 300,
-                                       "resource_type": "generic"},
-                                  "severity": "low",
-                                  "state": "alarm",
-                                  "type": "gnocchi_resources_threshold",
-                                  "alarm_actions": ["http://localhost:8662"]})
-
-    def test_check_invalid_payload(self):
-        """Test the check payload function for an invalid payload."""
-        values = {"alarm_values": "mock_invalid_details"}
-        with self.assertRaises(Exception):
-            self.alarming.check_payload(values, "my_metric", "r_id", "alarm_name")
-
-    @mock.patch.object(Common, "perform_request")
-    def test_get_alarm_state(self, perf_req):
-        """Test the get alarm state function."""
-        perf_req.return_value = type('obj', (object,), {'text': '{"alarm_id":"1"}'})
-
-        self.alarming.get_alarm_state(alarm_endpoint, auth_token, "alarm_id")
-
-        perf_req.assert_called_with(
-            "alarm_endpoint/v2/alarms/alarm_id/state", auth_token, req_type="get")
-
-    @mock.patch.object(Common, "perform_request")
-    def test_check_for_metric(self, perf_req):
-        """Test the check for metric function."""
-        mock_perf_req_return_value = {"metrics": {"cpu_util": 123}}
-        perf_req.return_value = type('obj', (object,), {'text': json.dumps(mock_perf_req_return_value)})
-
-        self.alarming.check_for_metric(auth_token, metric_endpoint, "cpu_utilization", "r_id")
-
-        perf_req.assert_called_with(
-            "metric_endpoint/v1/resource/generic/r_id", auth_token, req_type="get")
diff --git a/osm_mon/test/OpenStack/unit/test_common.py b/osm_mon/test/OpenStack/unit/test_common.py
deleted file mode 100644 (file)
index e6c52fb..0000000
+++ /dev/null
@@ -1,119 +0,0 @@
-# Copyright 2017 Intel Research and Development Ireland Limited
-# *************************************************************
-
-# This file is part of OSM Monitoring module
-# All Rights Reserved to Intel Corporation
-
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-
-#         http://www.apache.org/licenses/LICENSE-2.0
-
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# For those usages not covered by the Apache License, Version 2.0 please
-# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
-##
-"""Tests for all common OpenStack methods."""
-
-import json
-import logging
-import unittest
-
-import mock
-import requests
-from keystoneclient.v3 import client
-
-from osm_mon.core.auth import AuthManager
-from osm_mon.core.database import VimCredentials
-from osm_mon.plugins.OpenStack.common import Common
-
-__author__ = "Helena McGough"
-
-log = logging.getLogger(__name__)
-
-
-class Message(object):
-    """Mock a message for an access credentials request."""
-
-    def __init__(self):
-        """Initialise the topic and value of access_cred message."""
-        self.topic = "access_credentials"
-        self.value = json.dumps({"mock_value": "mock_details",
-                                 "vim_type": "OPENSTACK",
-                                 "access_config":
-                                     {"openstack_site": "my_site",
-                                      "user": "my_user",
-                                      "password": "my_password",
-                                      "vim_tenant_name": "my_tenant"}})
-
-
-class TestCommon(unittest.TestCase):
-    """Test the common class for OpenStack plugins."""
-
-    def setUp(self):
-        """Test Setup."""
-        super(TestCommon, self).setUp()
-        self.common = Common()
-        self.creds = VimCredentials()
-        self.creds.id = 'test_id'
-        self.creds.user = 'user'
-        self.creds.url = 'url'
-        self.creds.password = 'password'
-        self.creds.tenant_name = 'tenant_name'
-
-    @mock.patch.object(AuthManager, "get_credentials")
-    @mock.patch.object(client.Client, "get_raw_token_from_identity_service")
-    def test_get_auth_token(self, get_token, get_creds):
-        """Test generating a new authentication token."""
-        get_creds.return_value = self.creds
-        Common.get_auth_token('test_id')
-        get_creds.assert_called_with('test_id')
-        get_token.assert_called_with(auth_url='url', password='password', project_name='tenant_name', username='user',
-                                     project_domain_id='default', user_domain_id='default')
-
-    @mock.patch.object(requests, 'post')
-    def test_post_req(self, post):
-        """Testing a post request."""
-        Common.perform_request("url", "auth_token", req_type="post",
-                                    payload="payload")
-
-        post.assert_called_with("url", data="payload", headers=mock.ANY,
-                                timeout=mock.ANY, verify=True)
-
-    @mock.patch.object(requests, 'get')
-    def test_get_req(self, get):
-        """Testing a get request."""
-        # Run the defualt get request without any parameters
-        Common.perform_request("url", "auth_token", req_type="get")
-
-        get.assert_called_with("url", params=None, headers=mock.ANY,
-                               timeout=mock.ANY, verify=True)
-
-        # Test with some parameters specified
-        get.reset_mock()
-        Common.perform_request("url", "auth_token", req_type="get",
-                                    params="some parameters")
-
-        get.assert_called_with("url", params="some parameters",
-                               headers=mock.ANY, timeout=mock.ANY, verify=True)
-
-    @mock.patch.object(requests, 'put')
-    def test_put_req(self, put):
-        """Testing a put request."""
-        Common.perform_request("url", "auth_token", req_type="put",
-                                    payload="payload")
-        put.assert_called_with("url", data="payload", headers=mock.ANY,
-                               timeout=mock.ANY, verify=True)
-
-    @mock.patch.object(requests, 'delete')
-    def test_delete_req(self, delete):
-        """Testing a delete request."""
-        Common.perform_request("url", "auth_token", req_type="delete")
-
-        delete.assert_called_with("url", headers=mock.ANY, timeout=mock.ANY, verify=True)
diff --git a/osm_mon/test/OpenStack/unit/test_metric_calls.py b/osm_mon/test/OpenStack/unit/test_metric_calls.py
deleted file mode 100644 (file)
index 3f89a91..0000000
+++ /dev/null
@@ -1,313 +0,0 @@
-# Copyright 2017 iIntel Research and Development Ireland Limited
-# *************************************************************
-
-# This file is part of OSM Monitoring module
-# All Rights Reserved to Intel Corporation
-
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-
-#         http://www.apache.org/licenses/LICENSE-2.0
-
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# For those usages not covered by the Apache License, Version 2.0 please
-# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
-##
-"""Tests for all metric request message keys."""
-
-import json
-
-import logging
-
-import unittest
-
-import mock
-
-from osm_mon.core.auth import AuthManager
-from osm_mon.plugins.OpenStack.Gnocchi import metrics as metric_req
-
-from osm_mon.plugins.OpenStack.common import Common
-
-log = logging.getLogger(__name__)
-
-# Mock auth_token and endpoint
-endpoint = mock.ANY
-auth_token = mock.ANY
-
-# Mock a valid metric list for some tests, and a resultant list
-metric_list = [{"name": "disk.write.requests",
-                "id": "metric_id",
-                "unit": "units",
-                "resource_id": "r_id"}]
-result_list = ["metric_id", "r_id", "units", "disk_write_ops"]
-
-
-class Response(object):
-    """Mock a response object for requests."""
-
-    def __init__(self):
-        """Initialise test and status code values."""
-        self.text = json.dumps([{"id": "test_id"}])
-        self.status_code = "STATUS_CODE"
-
-
-def perform_request_side_effect(*args, **kwargs):
-    resp = Response()
-    if 'marker' in args[0]:
-        resp.text = json.dumps([])
-    if 'resource/generic' in args[0]:
-        resp.text = json.dumps({'metrics': {'cpu_util': 'test_id'}})
-    return resp
-
-
-class TestMetricCalls(unittest.TestCase):
-    """Integration test for metric request keys."""
-
-    def setUp(self):
-        """Setup the tests for metric request keys."""
-        super(TestMetricCalls, self).setUp()
-        self.metrics = metric_req.Metrics()
-        self.metrics._common = Common()
-
-    @mock.patch.object(metric_req.Metrics, "get_metric_id")
-    @mock.patch.object(Common, "perform_request")
-    def test_invalid_config_metric_req(
-            self, perf_req, get_metric):
-        """Test the configure metric function, for an invalid metric."""
-        # Test invalid configuration for creating a metric
-        values = {"metric_details": "invalid_metric"}
-
-        m_id, r_id, status = self.metrics.configure_metric(
-            endpoint, auth_token, values, verify_ssl=False)
-
-        perf_req.assert_not_called()
-        self.assertEqual(status, False)
-
-        # Test with an invalid metric name, will not perform request
-        values = {"resource_uuid": "r_id"}
-
-        m_id, r_id, status = self.metrics.configure_metric(
-            endpoint, auth_token, values, verify_ssl=False)
-
-        perf_req.assert_not_called()
-        self.assertEqual(status, False)
-
-        # If metric exists, it won't be recreated
-        get_metric.return_value = "metric_id"
-
-        m_id, r_id, status = self.metrics.configure_metric(
-            endpoint, auth_token, values, verify_ssl=False)
-
-        perf_req.assert_not_called()
-        self.assertEqual(status, False)
-
-    @mock.patch.object(metric_req.Metrics, "get_metric_id")
-    @mock.patch.object(Common, "perform_request")
-    @mock.patch.object(AuthManager, "get_credentials")
-    def test_valid_config_metric_req(
-            self, get_creds, perf_req, get_metric):
-        """Test the configure metric function, for a valid metric."""
-        # Test valid configuration and payload for creating a metric
-        get_creds.return_value = type('obj', (object,), {'config': '{"insecure":true}'})
-        values = {"resource_uuid": "r_id",
-                  "metric_unit": "units",
-                  "metric_name": "cpu_util"}
-        get_metric.return_value = None
-        payload = {"id": "r_id",
-                   "metrics": {"cpu_util":
-                                   {"archive_policy_name": "high",
-                                    "name": "cpu_util",
-                                    "unit": "units"}}}
-
-        perf_req.return_value = type('obj', (object,), {'text': '{"metrics":{"cpu_util":1}, "id":1}'})
-
-        self.metrics.configure_metric(endpoint, auth_token, values, verify_ssl=False)
-
-        perf_req.assert_called_with(
-            "<ANY>/v1/resource/generic", auth_token, req_type="post", verify_ssl=False,
-            payload=json.dumps(payload, sort_keys=True))
-
-    @mock.patch.object(Common, "perform_request")
-    def test_delete_metric_req(self, perf_req):
-        """Test the delete metric function."""
-        self.metrics.delete_metric(endpoint, auth_token, "metric_id", verify_ssl=False)
-
-        perf_req.assert_called_with(
-            "<ANY>/v1/metric/metric_id", auth_token, req_type="delete", verify_ssl=False)
-
-    @mock.patch.object(Common, "perform_request")
-    def test_delete_metric_invalid_status(self, perf_req):
-        """Test invalid response for delete request."""
-        perf_req.return_value = type('obj', (object,), {"status_code": "404"})
-
-        status = self.metrics.delete_metric(endpoint, auth_token, "metric_id", verify_ssl=False)
-
-        self.assertEqual(status, False)
-
-    @mock.patch.object(metric_req.Metrics, "response_list")
-    @mock.patch.object(Common, "perform_request")
-    def test_complete_list_metric_req(self, perf_req, resp_list):
-        """Test the complete list metric function."""
-        # Test listing metrics without any configuration options
-        values = {}
-        perf_req.side_effect = perform_request_side_effect
-        self.metrics.list_metrics(endpoint, auth_token, values, verify_ssl=False)
-
-        perf_req.assert_any_call(
-            "<ANY>/v1/metric?sort=name:asc", auth_token, req_type="get", verify_ssl=False)
-        resp_list.assert_called_with([{u'id': u'test_id'}])
-
-    @mock.patch.object(metric_req.Metrics, "response_list")
-    @mock.patch.object(Common, "perform_request")
-    def test_resource_list_metric_req(self, perf_req, resp_list):
-        """Test the resource list metric function."""
-        # Test listing metrics with a resource id specified
-        values = {"resource_uuid": "resource_id"}
-        perf_req.side_effect = perform_request_side_effect
-        self.metrics.list_metrics(endpoint, auth_token, values, verify_ssl=False)
-
-        perf_req.assert_any_call(
-            "<ANY>/v1/metric/test_id", auth_token, req_type="get", verify_ssl=False)
-
-    @mock.patch.object(metric_req.Metrics, "response_list")
-    @mock.patch.object(Common, "perform_request")
-    def test_name_list_metric_req(self, perf_req, resp_list):
-        """Test the metric_name list metric function."""
-        # Test listing metrics with a metric_name specified
-        values = {"metric_name": "disk_write_bytes"}
-        perf_req.side_effect = perform_request_side_effect
-        self.metrics.list_metrics(endpoint, auth_token, values, verify_ssl=False)
-
-        perf_req.assert_any_call(
-            "<ANY>/v1/metric?sort=name:asc", auth_token, req_type="get", verify_ssl=False)
-        resp_list.assert_called_with(
-            [{u'id': u'test_id'}], metric_name="disk_write_bytes")
-
-    @mock.patch.object(metric_req.Metrics, "response_list")
-    @mock.patch.object(Common, "perform_request")
-    def test_combined_list_metric_req(self, perf_req, resp_list):
-        """Test the combined resource and metric list metric function."""
-        # Test listing metrics with a resource id and metric name specified
-
-        values = {"resource_uuid": "resource_id",
-                  "metric_name": "cpu_utilization"}
-        perf_req.side_effect = perform_request_side_effect
-        self.metrics.list_metrics(endpoint, auth_token, values, verify_ssl=False)
-
-        perf_req.assert_any_call(
-            "<ANY>/v1/metric/test_id", auth_token, req_type="get", verify_ssl=False)
-
-    @mock.patch.object(Common, "perform_request")
-    def test_get_metric_id(self, perf_req):
-        """Test get_metric_id function."""
-        perf_req.return_value = type('obj', (object,), {'text': '{"alarm_id":"1"}'})
-        self.metrics.get_metric_id(endpoint, auth_token, "my_metric", "r_id", verify_ssl=False)
-
-        perf_req.assert_called_with(
-            "<ANY>/v1/resource/generic/r_id", auth_token, req_type="get", verify_ssl=False)
-
-    @mock.patch.object(metric_req.Metrics, "get_metric_id")
-    @mock.patch.object(Common, "perform_request")
-    def test_valid_read_data_req(self, perf_req, get_metric):
-        """Test the read metric data function, for a valid call."""
-        values = {"metric_name": "disk_write_ops",
-                  "resource_uuid": "resource_id",
-                  "collection_unit": "DAY",
-                  "collection_period": 1}
-
-        perf_req.return_value = type('obj', (object,), {'text': '{"metric_data":"[]"}'})
-
-        get_metric.return_value = "metric_id"
-        self.metrics.read_metric_data(endpoint, auth_token, values, verify_ssl=False)
-
-        perf_req.assert_called_once()
-
-    @mock.patch.object(Common, "perform_request")
-    def test_invalid_read_data_req(self, perf_req):
-        """Test the read metric data function, for an invalid call."""
-        # Teo empty lists wil be returned because the values are invalid
-        values = {}
-
-        times, data = self.metrics.read_metric_data(
-            endpoint, auth_token, values, verify_ssl=False)
-
-        self.assertEqual(times, [])
-        self.assertEqual(data, [])
-
-    def test_complete_response_list(self):
-        """Test the response list function for formatting metric lists."""
-        # Mock a list for testing purposes, with valid OSM metric
-        resp_list = self.metrics.response_list(metric_list)
-
-        # Check for the expected values in the resulting list
-        for l in result_list:
-            self.assertIn(l, resp_list[0].values())
-
-    def test_name_response_list(self):
-        """Test the response list with metric name configured."""
-        # Mock the metric name to test a metric name list
-        # Test with a name that is not in the list
-        invalid_name = "my_metric"
-        resp_list = self.metrics.response_list(
-            metric_list, metric_name=invalid_name)
-
-        self.assertEqual(resp_list, [])
-
-        # Test with a name on the list
-        valid_name = "disk_write_ops"
-        resp_list = self.metrics.response_list(
-            metric_list, metric_name=valid_name)
-
-        # Check for the expected values in the resulting list
-        for l in result_list:
-            self.assertIn(l, resp_list[0].values())
-
-    def test_resource_response_list(self):
-        """Test the response list with resource_id configured."""
-        # Mock a resource_id to test a resource list
-        # Test with resource not on the list
-        invalid_id = "mock_resource"
-        resp_list = self.metrics.response_list(metric_list, resource=invalid_id)
-
-        self.assertEqual(resp_list, [])
-
-        # Test with a resource on the list
-        valid_id = "r_id"
-        resp_list = self.metrics.response_list(metric_list, resource=valid_id)
-
-        # Check for the expected values in the resulting list
-        for l in result_list:
-            self.assertIn(l, resp_list[0].values())
-
-    def test_combined_response_list(self):
-        """Test the response list function with resource_id and metric_name."""
-        # Test for a combined resource and name list
-        # resource and name are on the list
-        valid_name = "disk_write_ops"
-        valid_id = "r_id"
-        resp_list = self.metrics.response_list(
-            metric_list, metric_name=valid_name, resource=valid_id)
-
-        # Check for the expected values in the resulting list
-        for l in result_list:
-            self.assertIn(l, resp_list[0].values())
-
-        # resource not on list
-        invalid_id = "mock_resource"
-        resp_list = self.metrics.response_list(
-            metric_list, metric_name=valid_name, resource=invalid_id)
-
-        self.assertEqual(resp_list, [])
-
-        # metric name not on list
-        invalid_name = "mock_metric"
-        resp_list = self.metrics.response_list(
-            metric_list, metric_name=invalid_name, resource=valid_id)
-
-        self.assertEqual(resp_list, [])
diff --git a/osm_mon/test/OpenStack/unit/test_metric_req.py b/osm_mon/test/OpenStack/unit/test_metric_req.py
deleted file mode 100644 (file)
index 7bb81c9..0000000
+++ /dev/null
@@ -1,168 +0,0 @@
-# Copyright 2017 iIntel Research and Development Ireland Limited
-# *************************************************************
-
-# This file is part of OSM Monitoring module
-# All Rights Reserved to Intel Corporation
-
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-
-#         http://www.apache.org/licenses/LICENSE-2.0
-
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# For those usages not covered by the Apache License, Version 2.0 please
-# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
-##
-"""Tests for all metric request message keys."""
-
-import json
-import logging
-import unittest
-
-import mock
-
-from osm_mon.core.auth import AuthManager
-from osm_mon.core.message_bus.producer import KafkaProducer
-from osm_mon.plugins.OpenStack.Gnocchi import metrics as metric_req
-from osm_mon.plugins.OpenStack.common import Common
-
-log = logging.getLogger(__name__)
-
-
-class Message(object):
-    """A class to mock a message object value for metric requests."""
-
-    def __init__(self):
-        """Initialize a mocked message instance."""
-        self.topic = "metric_request"
-        self.key = None
-        self.value = json.dumps({"mock_message": "message_details"})
-
-
-@mock.patch.object(KafkaProducer, 'publish', mock.Mock())
-class TestMetricReq(unittest.TestCase):
-    """Integration test for metric request keys."""
-
-    def setUp(self):
-        """Setup the tests for metric request keys."""
-        super(TestMetricReq, self).setUp()
-        self.metrics = metric_req.Metrics()
-
-    @mock.patch.object(Common, "get_auth_token", mock.Mock())
-    @mock.patch.object(Common, "get_endpoint", mock.Mock())
-    @mock.patch.object(metric_req.Metrics, "delete_metric")
-    @mock.patch.object(metric_req.Metrics, "get_metric_id")
-    @mock.patch.object(AuthManager, "get_credentials")
-    def test_delete_metric_key(self, get_creds, get_metric_id, del_metric):
-        """Test the functionality for a delete metric request."""
-        # Mock a message value and key
-        message = Message()
-        message.key = "delete_metric_request"
-        message.value = json.dumps({"metric_name": "disk_write_ops", "resource_uuid": "my_r_id", "correlation_id": 1})
-
-        get_creds.return_value = type('obj', (object,), {
-            'config': '{"insecure":true}'
-        })
-        del_metric.return_value = True
-
-        # Call the metric functionality and check delete request
-        get_metric_id.return_value = "my_metric_id"
-        self.metrics.metric_calls(message, 'test_id')
-        del_metric.assert_called_with(mock.ANY, mock.ANY, "my_metric_id", False)
-
-    @mock.patch.object(Common, "get_auth_token", mock.Mock())
-    @mock.patch.object(Common, 'get_endpoint', mock.Mock())
-    @mock.patch.object(metric_req.Metrics, "list_metrics")
-    @mock.patch.object(AuthManager, "get_credentials")
-    def test_list_metric_key(self, get_creds, list_metrics):
-        """Test the functionality for a list metric request."""
-        # Mock a message with list metric key and value
-        message = Message()
-        message.key = "list_metric_request"
-        message.value = json.dumps({"metrics_list_request": {"correlation_id": 1}})
-
-        get_creds.return_value = type('obj', (object,), {
-            'config': '{"insecure":true}'
-        })
-
-        list_metrics.return_value = []
-
-        # Call the metric functionality and check list functionality
-        self.metrics.metric_calls(message, 'test_id')
-        list_metrics.assert_called_with(mock.ANY, mock.ANY, {"correlation_id": 1}, False)
-
-    @mock.patch.object(Common, "get_auth_token", mock.Mock())
-    @mock.patch.object(Common, 'get_endpoint', mock.Mock())
-    @mock.patch.object(metric_req.Metrics, "read_metric_data")
-    @mock.patch.object(metric_req.Metrics, "list_metrics")
-    @mock.patch.object(metric_req.Metrics, "delete_metric")
-    @mock.patch.object(metric_req.Metrics, "configure_metric")
-    @mock.patch.object(AuthManager, "get_credentials")
-    @mock.patch.object(Common, "perform_request")
-    def test_update_metric_key(self, perf_req, get_creds, config_metric, delete_metric, list_metrics,
-                               read_data):
-        """Test the functionality for an update metric request."""
-        # Mock a message with update metric key and value
-        message = Message()
-        message.key = "update_metric_request"
-        message.value = json.dumps({"metric_create_request":
-                                        {"correlation_id": 1,
-                                         "metric_name": "my_metric",
-                                         "resource_uuid": "my_r_id"}})
-
-        get_creds.return_value = type('obj', (object,), {
-            'config': '{"insecure":true}'
-        })
-
-        perf_req.return_value = type('obj', (object,), {'text': '{"metric_id":"1"}'})
-
-        # Call metric functionality and confirm no function is called
-        # Gnocchi does not support updating a metric configuration
-        self.metrics.metric_calls(message, 'test_id')
-        config_metric.assert_not_called()
-        list_metrics.assert_not_called()
-        delete_metric.assert_not_called()
-        read_data.assert_not_called()
-
-    @mock.patch.object(Common, "get_auth_token", mock.Mock())
-    @mock.patch.object(Common, 'get_endpoint', mock.Mock())
-    @mock.patch.object(metric_req.Metrics, "configure_metric")
-    @mock.patch.object(AuthManager, "get_credentials")
-    def test_config_metric_key(self, get_credentials, config_metric):
-        """Test the functionality for a create metric request."""
-        # Mock a message with create metric key and value
-        message = Message()
-        message.key = "create_metric_request"
-        message.value = json.dumps({"metric_create_request": {"correlation_id": 123}})
-        get_credentials.return_value = type('obj', (object,), {'config': '{"insecure":true}'})
-        # Call metric functionality and check config metric
-        config_metric.return_value = "metric_id", "resource_id", True
-        self.metrics.metric_calls(message, 'test_id')
-        config_metric.assert_called_with(mock.ANY, mock.ANY, {"correlation_id": 123}, False)
-
-    @mock.patch.object(Common, "get_auth_token", mock.Mock())
-    @mock.patch.object(Common, 'get_endpoint', mock.Mock())
-    @mock.patch.object(metric_req.Metrics, "read_metric_data")
-    @mock.patch.object(AuthManager, "get_credentials")
-    def test_read_data_key(self, get_creds, read_data):
-        """Test the functionality for a read metric data request."""
-        # Mock a message with a read data key and value
-        message = Message()
-        message.key = "read_metric_data_request"
-        message.value = json.dumps({"alarm_uuid": "alarm_id"})
-
-        get_creds.return_value = type('obj', (object,), {
-            'config': '{"insecure":true}'
-        })
-
-        # Call metric functionality and check read data metrics
-        read_data.return_value = "time_stamps", "data_values"
-        self.metrics.metric_calls(message, 'test_id')
-        read_data.assert_called_with(
-            mock.ANY, mock.ANY, json.loads(message.value), False)
diff --git a/osm_mon/test/OpenStack/unit/test_notifier.py b/osm_mon/test/OpenStack/unit/test_notifier.py
deleted file mode 100644 (file)
index e2695d4..0000000
+++ /dev/null
@@ -1,132 +0,0 @@
-# Copyright 2017 Intel Research and Development Ireland Limited
-# *************************************************************
-
-# This file is part of OSM Monitoring module
-# All Rights Reserved to Intel Corporation
-
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-
-#         http://www.apache.org/licenses/LICENSE-2.0
-
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# For those usages not covered by the Apache License, Version 2.0 please
-# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
-##
-"""Tests for all common OpenStack methods."""
-
-import json
-import unittest
-
-import mock
-
-from osm_mon.core.database import DatabaseManager, Alarm
-from osm_mon.core.message_bus.producer import KafkaProducer
-from osm_mon.plugins.OpenStack.Aodh.notifier import NotifierHandler
-
-post_data = {"severity": "critical",
-             "alarm_name": "my_alarm",
-             "current": "current_state",
-             "alarm_id": "my_alarm_id",
-             "reason": "Threshold has been broken",
-             "reason_data": {"count": 1,
-                             "most_recent": "null",
-                             "type": "threshold",
-                             "disposition": "unknown"},
-             "previous": "previous_state"}
-
-
-class Response(object):
-    """Mock a response class for generating responses."""
-
-    def __init__(self, text):
-        """Initialise a mock response with a text attribute."""
-        self.text = text
-
-
-class RFile():
-    def read(self, content_length):
-        return json.dumps(post_data, sort_keys=True)
-
-
-class MockNotifierHandler(NotifierHandler):
-    """Mock the NotifierHandler class for testing purposes."""
-
-    def __init__(self):
-        """Initialise mock NotifierHandler."""
-        self.headers = {'Content-Length': '20'}
-        self.rfile = RFile()
-
-    def setup(self):
-        """Mock setup function."""
-        pass
-
-    def handle(self):
-        """Mock handle function."""
-        pass
-
-    def finish(self):
-        """Mock finish function."""
-        pass
-
-
-class TestNotifier(unittest.TestCase):
-    """Test the NotifierHandler class for requests from aodh."""
-
-    def setUp(self):
-        """Setup tests."""
-        super(TestNotifier, self).setUp()
-        self.handler = MockNotifierHandler()
-
-    @mock.patch.object(NotifierHandler, "_set_headers")
-    def test_do_GET(self, set_head):
-        """Tests do_GET. Validates _set_headers has been called."""
-        self.handler.do_GET()
-
-        set_head.assert_called_once()
-
-    @mock.patch.object(NotifierHandler, "notify_alarm")
-    @mock.patch.object(NotifierHandler, "_set_headers")
-    def test_do_POST(self, set_head, notify):
-        """Tests do_POST. Validates notify_alarm has been called."""
-        self.handler.do_POST()
-
-        set_head.assert_called_once()
-        notify.assert_called_with(post_data)
-
-    @mock.patch.object(KafkaProducer, "publish_alarm_response")
-    @mock.patch.object(DatabaseManager, "get_alarm")
-    def test_notify_alarm_valid_alarm(
-            self, get_alarm, notify):
-        """
-        Tests notify_alarm when request from OpenStack references an existing alarm in the DB.
-        Validates KafkaProducer.notify_alarm has been called.
-        """
-        # Generate return values for valid notify_alarm operation
-        mock_alarm = Alarm()
-        get_alarm.return_value = mock_alarm
-
-        self.handler.notify_alarm(post_data)
-
-        notify.assert_called_with("notify_alarm", mock.ANY)
-
-    @mock.patch.object(KafkaProducer, "publish_alarm_response")
-    @mock.patch.object(DatabaseManager, "get_alarm")
-    def test_notify_alarm_invalid_alarm(
-            self, get_alarm, notify):
-        """
-        Tests notify_alarm when request from OpenStack references a non existing alarm in the DB.
-        Validates Exception is thrown and KafkaProducer.notify_alarm has not been called.
-        """
-        # Generate return values for valid notify_alarm operation
-        get_alarm.return_value = None
-
-        with self.assertRaises(Exception):
-            self.handler.notify_alarm(post_data)
-        notify.assert_not_called()
diff --git a/osm_mon/test/OpenStack/unit/test_responses.py b/osm_mon/test/OpenStack/unit/test_responses.py
deleted file mode 100644 (file)
index 6cf4e3f..0000000
+++ /dev/null
@@ -1,117 +0,0 @@
-# Copyright 2017 iIntel Research and Development Ireland Limited
-# *************************************************************
-
-# This file is part of OSM Monitoring module
-# All Rights Reserved to Intel Corporation
-
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-
-#         http://www.apache.org/licenses/LICENSE-2.0
-
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# For those usages not covered by the Apache License, Version 2.0 please
-# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
-##
-"""Test that the correct responses are generated for each message."""
-
-import logging
-
-import unittest
-
-import mock
-
-from osm_mon.plugins.OpenStack import response as resp
-
-log = logging.getLogger(__name__)
-
-
-class TestOpenStackResponse(unittest.TestCase):
-    """Tests for responses generated by the OpenStack plugins."""
-
-    def setUp(self):
-        """Setup for testing OpenStack plugin responses."""
-        super(TestOpenStackResponse, self).setUp()
-        self.plugin_resp = resp.OpenStack_Response()
-
-    def test_invalid_key(self):
-        """Test if an invalid key is entered for a response."""
-        message = self.plugin_resp.generate_response("mock_invalid_key")
-        self.assertEqual(message, None)
-
-    @mock.patch.object(
-        resp.OpenStack_Response, "alarm_list_response")
-    def test_list_alarm_resp(self, alarm_list_resp):
-        """Test out a function call for a list alarm response."""
-        message = self.plugin_resp.generate_response("list_alarm_response")
-        self.assertEqual(alarm_list_resp.return_value, message)
-
-    @mock.patch.object(
-        resp.OpenStack_Response, "list_metric_response")
-    def test_list_metric_resp(self, metric_list_resp):
-        """Test list metric response function call."""
-        message = self.plugin_resp.generate_response("list_metric_response")
-        self.assertEqual(message, metric_list_resp.return_value)
-
-    @mock.patch.object(
-        resp.OpenStack_Response, "delete_alarm_response")
-    def test_delete_alarm_resp(self, del_alarm_resp):
-        """Test delete alarm response function call."""
-        message = self.plugin_resp.generate_response("delete_alarm_response")
-        self.assertEqual(message, del_alarm_resp.return_value)
-
-    @mock.patch.object(
-        resp.OpenStack_Response, "delete_metric_response")
-    def test_delete_metric_resp(self, del_metric_resp):
-        """Test the response functionality of delete metric response."""
-        message = self.plugin_resp.generate_response("delete_metric_response")
-        self.assertEqual(message, del_metric_resp.return_value)
-
-    @mock.patch.object(
-        resp.OpenStack_Response, "create_alarm_response")
-    def test_create_alarm_resp(self, config_alarm_resp):
-        """Test create alarm response function call."""
-        message = self.plugin_resp.generate_response("create_alarm_response")
-        self.assertEqual(message, config_alarm_resp.return_value)
-
-    @mock.patch.object(
-        resp.OpenStack_Response, "metric_create_response")
-    def test_create_metric_resp(self, config_metric_resp):
-        """Test create metric response function call."""
-        message = self.plugin_resp.generate_response("create_metric_response")
-        self.assertEqual(message, config_metric_resp.return_value)
-
-    @mock.patch.object(
-        resp.OpenStack_Response, "update_alarm_response")
-    def test_update_alarm_resp(self, up_alarm_resp):
-        """Test update alarm response function call."""
-        message = self.plugin_resp.generate_response("update_alarm_response")
-        self.assertEqual(message, up_alarm_resp.return_value)
-
-    @mock.patch.object(
-        resp.OpenStack_Response, "update_metric_response")
-    def test_update_metric_resp(self, up_metric_resp):
-        """Test update metric response function call."""
-        message = self.plugin_resp.generate_response("update_metric_response")
-        self.assertEqual(message, up_metric_resp.return_value)
-
-    @mock.patch.object(
-        resp.OpenStack_Response, "notify_alarm")
-    def test_notify_alarm(self, notify_alarm):
-        """Test notify alarm response function call."""
-        message = self.plugin_resp.generate_response("notify_alarm")
-        self.assertEqual(message, notify_alarm.return_value)
-
-    @mock.patch.object(
-        resp.OpenStack_Response, "read_metric_data_response")
-    def test_read_metric_data_resp(self, read_data_resp):
-        """Test read metric data response function call."""
-        message = self.plugin_resp.generate_response(
-            "read_metric_data_response")
-        self.assertEqual(message, read_data_resp.return_value)
diff --git a/osm_mon/test/OpenStack/unit/test_settings.py b/osm_mon/test/OpenStack/unit/test_settings.py
deleted file mode 100644 (file)
index 42619f8..0000000
+++ /dev/null
@@ -1,46 +0,0 @@
-# Copyright 2017 Intel Research and Development Ireland Limited
-# *************************************************************
-
-# This file is part of OSM Monitoring module
-# All Rights Reserved to Intel Corporation
-
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-
-#         http://www.apache.org/licenses/LICENSE-2.0
-
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# For those usages not covered by the Apache License, Version 2.0 please
-# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
-##
-"""Tests for settings for OpenStack plugins configurations."""
-
-import logging
-import os
-import unittest
-
-from osm_mon.core.settings import Config
-
-log = logging.getLogger(__name__)
-
-
-class TestSettings(unittest.TestCase):
-    """Test the settings class for OpenStack plugin configuration."""
-
-    def setUp(self):
-        """Test Setup."""
-        super(TestSettings, self).setUp()
-        self.cfg = Config.instance()
-
-    def test_set_os_username(self):
-        """Test reading the environment for OpenStack plugin configuration."""
-        os.environ["OS_NOTIFIER_URI"] = "test"
-        self.cfg.read_environ()
-
-        self.assertEqual(self.cfg.OS_NOTIFIER_URI, "test")
diff --git a/osm_mon/test/VMware/__init__.py b/osm_mon/test/VMware/__init__.py
deleted file mode 100644 (file)
index e2d83ed..0000000
+++ /dev/null
@@ -1,34 +0,0 @@
-# -*- coding: utf-8 -*-
-
-##
-# Copyright 2017-2018 VMware Inc.
-# This file is part of ETSI OSM
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-#
-# For those usages not covered by the Apache License, Version 2.0 please
-# contact:  osslegalrouting@vmware.com
-##
-
-"""VMware MON plugin tests."""
-
-import logging
-
-# Initialise a logger for tests
-logging.basicConfig(filename='vmware_mon_tests.log',
-                    format='%(asctime)s %(message)s',
-                    datefmt='%m/%d/%Y %I:%M:%S %p', filemode='a',
-                    level=logging.INFO)
-log = logging.getLogger(__name__)
-
diff --git a/osm_mon/test/VMware/test_mon_plugin_vrops.py b/osm_mon/test/VMware/test_mon_plugin_vrops.py
deleted file mode 100644 (file)
index 30e9a3d..0000000
+++ /dev/null
@@ -1,3168 +0,0 @@
-# -*- coding: utf-8 -*-
-
-##
-# Copyright 2017-2018 VMware Inc.
-# This file is part of ETSI OSM
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-#
-# For those usages not covered by the Apache License, Version 2.0 please
-# contact:  osslegalrouting@vmware.com
-##
-
-""" Mock tests for VMware vROPs Mon plugin """
-
-import sys
-
-import json
-
-import logging
-
-import unittest
-
-import mock
-
-import requests
-
-import os
-
-sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)),"..","..",".."))
-
-from osm_mon.plugins.vRealiseOps import mon_plugin_vrops as monPlugin
-
-from pyvcloud.vcd.client import Client,BasicLoginCredentials
-
-
-class TestMonPlugin(unittest.TestCase):
-    """Test class for vROPs Mon Plugin class methods"""
-
-    def setUp(self):
-        """Setup the tests for Mon Plugin class methods"""
-        super(TestMonPlugin, self).setUp()
-
-        self.m_vim_access_config = {'vrops_site':'abc',
-            'vrops_user':'user',
-            'vrops_password':'passwd',
-            'vim_url':'vcd_url',
-            'admin_username':'admin',
-            'admin_password':'admin_passwd',
-            'vim_uuid':'1',
-            'tenant_id':'org_vdc_1'}
-        self.mon_plugin = monPlugin.MonPlugin(self.m_vim_access_config)
-        # create client object
-        self.vca = Client('test', verify_ssl_certs=False)
-        # create session
-        self.session = requests.Session()
-
-
-    def test_get_default_Params_valid_metric_alarm_name(self):
-        """Test get default params method"""
-
-        # Mock valid metric_alarm_name and response
-        metric_alarm_name = "Average_Memory_Usage_Above_Threshold"
-        expected_return = {'impact': 'risk', 'cancel_cycles': 2, 'adapter_kind': 'VMWARE',
-                           'repeat': False,'cancel_period': 300, 'alarm_type': 16,
-                           'vrops_alarm': 'Avg_Mem_Usage_Above_Thr','enabled': True, 'period': 300,
-                           'resource_kind': 'VirtualMachine', 'alarm_subType': 19,
-                           'action': 'acknowledge', 'evaluation': 2, 'unit': '%'}
-
-        # call get default param function under test
-        actual_return = self.mon_plugin.get_default_Params(metric_alarm_name)
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-    def test_get_default_Params_invalid_metric_alarm_name(self):
-        """Test get default params method invalid metric alarm"""
-
-        # Mock valid metric_alarm_name and response
-        metric_alarm_name = "Invalid_Alarm"
-        expected_return = {}
-
-        # call get default param function under test
-        actual_return = self.mon_plugin.get_default_Params(metric_alarm_name)
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-    @mock.patch.object(monPlugin.requests, 'post')
-    def test_create_symptom_valid_req_response(self, m_post):
-        """Test create symptom method-valid request"""
-
-        # Mock valid symptom params and mock responses
-        symptom_param = {'threshold_value': 0, 'cancel_cycles': 1, 'adapter_kind_key': 'VMWARE',
-                         'resource_kind_key': 'VirtualMachine', 'severity': 'CRITICAL',
-                         'symptom_name':\
-                         'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
-                         'operation': 'GT', 'wait_cycles': 1, 'metric_key': 'cpu|usage_average'}
-
-        m_post.return_value.status_code = 201
-        m_post.return_value.content = \
-                         '{"id":"SymptomDefinition-351c23b4-bc3c-4c7b-b4af-1ad90a673c5d",\
-                         "name":"CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4",\
-                         "adapterKindKey":"VMWARE","resourceKindKey":"VirtualMachine",\
-                         "waitCycles":1,"cancelCycles":1,\
-                         "state":{"severity":"CRITICAL","condition":{"type":"CONDITION_HT",\
-                         "key":"cpu|usage_average","operator":"GT","value":"0.0",\
-                         "valueType":"NUMERIC",\
-                         "instanced":false,"thresholdType":"STATIC"}}}'
-
-        expected_return = "SymptomDefinition-351c23b4-bc3c-4c7b-b4af-1ad90a673c5d"
-
-        # call create symptom method under test
-        actual_return = self.mon_plugin.create_symptom(symptom_param)
-
-        # verify that mocked method is called
-        m_post.assert_called()
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-    @mock.patch.object(monPlugin.requests, 'post')
-    def test_create_symptom_invalid_req_response(self, m_post):
-        """Test create symptom method-invalid response"""
-
-        # Mock valid symptom params and invalid  mock responses
-        symptom_param = {'threshold_value': 0, 'cancel_cycles': 1, 'adapter_kind_key': 'VMWARE',
-                         'resource_kind_key': 'VirtualMachine', 'severity': 'CRITICAL',
-                         'symptom_name':\
-                         'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
-                         'operation': 'GT', 'wait_cycles': 1, 'metric_key': 'cpu|usage_average'}
-
-        m_post.return_value.status_code = 404
-        m_post.return_value.content = '404 Not Found'
-
-        expected_return = None
-
-        # call create symptom method under test
-        actual_return = self.mon_plugin.create_symptom(symptom_param)
-
-        # verify that mocked method is called
-        m_post.assert_called()
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-    @mock.patch.object(monPlugin.requests, 'post')
-    def test_create_symptom_incorrect_data(self, m_post):
-        """Test create symptom method-incorrect data"""
-
-        # Mock valid symptom params and invalid  mock responses
-        symptom_param = {'threshold_value': 0, 'cancel_cycles': 1, 'adapter_kind_key': 'VMWARE',
-                         'resource_kind_key': 'VirtualMachine', 'severity': 'CRITICAL',
-                         'symptom_name':\
-                         'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
-                         'operation': 'GT', 'metric_key': 'cpu|usage_average'}
-
-        expected_return = None
-
-        # call create symptom method under test
-        actual_return = self.mon_plugin.create_symptom(symptom_param)
-
-        # verify that mocked method is not called
-        m_post.assert_not_called()
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-    @mock.patch.object(monPlugin.requests, 'post')
-    def test_create_alarm_definition_valid_req_response(self, m_post):
-        """Test create alarm definition method-valid response"""
-
-        # Mock valid alarm params and mock responses
-        alarm_param = {'description': 'CPU_Utilization_Above_Threshold', 'cancelCycles': 1,
-                       'subType': 19, 'waitCycles': 1,
-                       'severity': 'CRITICAL', 'impact': 'risk', 'adapterKindKey': 'VMWARE',
-                       'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
-                       'resourceKindKey': 'VirtualMachine', 'type': 16,
-                       'symptomDefinitionId':\
-                       'SymptomDefinition-25278b06-bff8-4409-a141-9b4e064235df'}
-
-        m_post.return_value.status_code = 201
-        m_post.return_value.content = \
-                       '{"id":"AlertDefinition-d4f21e4b-770a-45d6-b298-022eaf489115",\
-                       "name":"CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4",\
-                       "description":"CPU_Utilization_Above_Threshold","adapterKindKey":"VMWARE",\
-                       "resourceKindKey":"VirtualMachine","waitCycles":1,"cancelCycles":1,\
-                       "type":16,"subType":19,\
-                       "states":[{"severity":"CRITICAL","base-symptom-set":{"type":"SYMPTOM_SET",\
-                       "relation":"SELF","aggregation":"ALL","symptomSetOperator":"AND",\
-                       "symptomDefinitionIds":\
-                       ["SymptomDefinition-25278b06-bff8-4409-a141-9b4e064235df"]},\
-                       "impact":{"impactType":"BADGE","detail":"risk"}}]}'
-
-        expected_return = "AlertDefinition-d4f21e4b-770a-45d6-b298-022eaf489115"
-
-        # call create alarm definition method under test
-        actual_return = self.mon_plugin.create_alarm_definition(alarm_param)
-
-        # verify that mocked method is called
-        m_post.assert_called()
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-    @mock.patch.object(monPlugin.requests, 'post')
-    def test_create_alarm_definition_invalid_req_response(self, m_post):
-        """Test create alarm definition method-invalid response"""
-
-        # Mock valid alarm params and mock responses
-        alarm_param = {'description': 'CPU_Utilization_Above_Threshold', 'cancelCycles': 1,
-                       'subType': 19, 'waitCycles': 1,
-                       'severity': 'CRITICAL', 'impact': 'risk', 'adapterKindKey': 'VMWARE',
-                       'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
-                       'resourceKindKey': 'VirtualMachine', 'type': 16,
-                       'symptomDefinitionId':\
-                       'SymptomDefinition-25278b06-bff8-4409-a141-9b4e064235df'}
-
-        m_post.return_value.status_code = 404
-        m_post.return_value.content = '404 Not Found'
-
-        expected_return = None
-
-        # call create alarm definition method under test
-        actual_return = self.mon_plugin.create_alarm_definition(alarm_param)
-
-        # verify that mocked method is called
-        m_post.assert_called()
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-    @mock.patch.object(monPlugin.requests, 'post')
-    def test_create_alarm_definition_incorrect_data(self, m_post):
-        """Test create alarm definition method-incorrect data"""
-
-        # Mock incorrect alarm param
-        alarm_param = {'description': 'CPU_Utilization_Above_Threshold', 'cancelCycles': 1,
-                       'subType': 19, 'waitCycles': 1, 'type': 16,
-                       'severity': 'CRITICAL', 'impact': 'risk', 'adapterKindKey': 'VMWARE',
-                       'symptomDefinitionId':\
-                       'SymptomDefinition-25278b06-bff8-4409-a141-9b4e064235df'}
-        expected_return = None
-
-        # call create symptom method under test
-        actual_return = self.mon_plugin.create_alarm_definition(alarm_param)
-
-        # verify that mocked method is not called
-        m_post.assert_not_called()
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-    @mock.patch.object(monPlugin.DatabaseManager, 'save_alarm')
-    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_notification_rule')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
-    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_definition')
-    @mock.patch.object(monPlugin.MonPlugin, 'create_symptom')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_by_name')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
-    def test_configure_alarm_valid_req(self, m_get_default_Params,\
-                                       m_get_alarm_defination_by_name,\
-                                       m_create_symptom,\
-                                       m_create_alarm_definition,\
-                                       m_get_vm_moref_id,\
-                                       m_get_vm_resource_id,\
-                                       m_create_alarm_notification_rule,\
-                                       m_save_alarm):
-
-        """Test configure alarm valid request creating alarm"""
-
-        #Mock input configuration dictionary
-        config_dict = {'threshold_value': 0, 'severity': 'CRITICAL',
-                       'alarm_name': 'CPU_Utilization_Above_Threshold',
-                       'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
-                       'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
-                       'statistic': 'AVERAGE', 'metric_name': 'cpu_utilization',
-                       'vdu_name':'vdu1','vnf_member_index':'index1','ns_id':'nsd1',
-                       'operation': 'GT', 'unit': '%',
-                       'description': 'CPU_Utilization_Above_Threshold'}
-
-        #symptom parameters to be passed for symptom creation
-        symptom_params = {'threshold_value': 0,
-                          'cancel_cycles': 1,
-                          'adapter_kind_key': 'VMWARE',
-                          'resource_kind_key': 'VirtualMachine',
-                          'severity': 'CRITICAL',
-                          'symptom_name':\
-                          'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
-                          'operation': 'GT',
-                          'wait_cycles': 1,
-                          'metric_key': 'cpu|usage_average'}
-
-        #alarm parameters to  be passed for alarm creation
-        alarm_params = {'description': 'CPU_Utilization_Above_Threshold',
-                        'cancelCycles': 1, 'subType': 19,
-                        'waitCycles': 1, 'severity': 'CRITICAL',
-                        'impact': 'risk', 'adapterKindKey': 'VMWARE',
-                        'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
-                        'resourceKindKey': 'VirtualMachine',
-                        'symptomDefinitionId':\
-                        'SymptomDefinition-2e8f9ddc-9f7b-4cd6-b85d-7d7fe3a8a804',
-                        'type': 16}
-
-        vm_moref_id = 'vm-6626'
-        vrops_alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
-        alarm_def = 'AlertDefinition-0f3cdcb3-4e1b-4a0b-86d0-66d4b3f65220'
-        resource_id = 'ac87622f-b761-40a0-b151-00872a2a456e'
-        alarm_def_uuid = '0f3cdcb3-4e1b-4a0b-86d0-66d4b3f65220'
-
-        #Mock default Parameters for alarm & metric configuration
-        m_get_default_Params.side_effect = [{'impact': 'risk', 'cancel_cycles': 1,
-                                             'adapter_kind': 'VMWARE', 'repeat': False,
-                                             'cancel_period': 300, 'alarm_type': 16,
-                                             'vrops_alarm': 'CPU_Utilization_Above_Thr',
-                                             'enabled': True, 'period': 300,
-                                             'resource_kind': 'VirtualMachine',
-                                             'alarm_subType': 19, 'action': 'acknowledge',
-                                             'evaluation': 1, 'unit': 'msec'},
-                                            {'metric_key': 'cpu|usage_average', 'unit': '%'}
-                                            ]
-
-        #set mocked function return values
-        m_get_alarm_defination_by_name.return_value = []
-        m_create_symptom.return_value = 'SymptomDefinition-2e8f9ddc-9f7b-4cd6-b85d-7d7fe3a8a804'
-        m_create_alarm_definition.return_value =\
-        'AlertDefinition-0f3cdcb3-4e1b-4a0b-86d0-66d4b3f65220'
-        m_get_vm_moref_id.return_value = vm_moref_id
-        m_get_vm_resource_id.return_value = 'ac87622f-b761-40a0-b151-00872a2a456e'
-        m_create_alarm_notification_rule.return_value = 'f37900e7-dd01-4383-b84c-08f519530d71'
-
-        #Call configure_alarm method under test
-        return_value = self.mon_plugin.configure_alarm(config_dict)
-
-        #Verify that mocked methods are called with correct parameters
-        self.assertEqual(m_get_default_Params.call_count, 2)
-        m_get_alarm_defination_by_name.assert_called_with(vrops_alarm_name)
-        m_create_symptom.assert_called_with(symptom_params)
-        m_create_alarm_definition.assert_called_with(alarm_params)
-        m_get_vm_moref_id.assert_called_with(config_dict['resource_uuid'])
-        m_get_vm_resource_id.assert_called_with(vm_moref_id)
-        m_create_alarm_notification_rule.assert_called_with(vrops_alarm_name,\
-                                                            alarm_def,\
-                                                            resource_id)
-        m_save_alarm.assert_called_with(alarm_def_uuid,'1',
-                                        config_dict['threshold_value'],
-                                        config_dict['operation'],
-                                        config_dict['metric_name'],
-                                        config_dict['vdu_name'],
-                                        config_dict['vnf_member_index'],
-                                        config_dict['ns_id'])
-
-        #Verify return value with expected value of alarm_def_uuid
-        self.assertEqual(return_value, alarm_def_uuid)
-
-
-    @mock.patch.object(monPlugin.DatabaseManager, 'save_alarm')
-    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_notification_rule')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
-    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_definition')
-    @mock.patch.object(monPlugin.MonPlugin, 'create_symptom')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_by_name')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
-    def test_configure_alarm_invalid_alarm_name_req(self, m_get_default_Params,\
-                                       m_get_alarm_defination_by_name,\
-                                       m_create_symptom,\
-                                       m_create_alarm_definition,\
-                                       m_get_vm_moref_id,\
-                                       m_get_vm_resource_id,\
-                                       m_create_alarm_notification_rule,\
-                                       m_save_alarm):
-        """Test configure alarm invalid test: for invalid alarm name"""
-
-        #Mock input configuration dictionary
-        config_dict = {'threshold_value': 0, 'severity': 'CRITICAL',
-                       'alarm_name': 'CPU_Utilization_Above_Threshold',
-                       'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
-                       'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
-                       'statistic': 'AVERAGE', 'metric_name': 'CPU_UTILIZATION',
-                       'operation': 'GT', 'unit': '%',
-                       'description': 'CPU_Utilization_Above_Threshold'}
-
-        alarm_def_uuid = None
-
-        #Mock default Parameters return value to None
-        m_get_default_Params.return_value = {}
-
-        #Call configure_alarm method under test
-        return_value = self.mon_plugin.configure_alarm(config_dict)
-
-        #Verify that mocked methods are called with correct parameters
-        m_get_default_Params.assert_called_with(config_dict['alarm_name'])
-        m_get_alarm_defination_by_name.assert_not_called()
-        m_create_symptom.assert_not_called()
-        m_create_alarm_definition.assert_not_called()
-        m_get_vm_moref_id.assert_not_called()
-        m_get_vm_resource_id.assert_not_called()
-        m_create_alarm_notification_rule.assert_not_called()
-        m_save_alarm.assert_not_called()
-
-        #Verify return value with expected value i.e. None
-        self.assertEqual(return_value, alarm_def_uuid)
-
-
-    @mock.patch.object(monPlugin.DatabaseManager, 'save_alarm')
-    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_notification_rule')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
-    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_definition')
-    @mock.patch.object(monPlugin.MonPlugin, 'create_symptom')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_by_name')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
-    def test_configure_alarm_invalid_metric_name_req(self, m_get_default_Params,\
-                                       m_get_alarm_defination_by_name,\
-                                       m_create_symptom,\
-                                       m_create_alarm_definition,\
-                                       m_get_vm_moref_id,\
-                                       m_get_vm_resource_id,\
-                                       m_create_alarm_notification_rule,\
-                                       m_save_alarm):
-        """Test configure alarm invalid test: for invalid metric name"""
-
-        #Mock input configuration dictionary
-        config_dict = {'threshold_value': 0, 'severity': 'CRITICAL',
-                       'alarm_name': 'CPU_Utilization_Above_Threshold',
-                       'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
-                       'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
-                       'statistic': 'AVERAGE', 'metric_name': 'CPU_UTILIZATION',
-                       'operation': 'GT', 'unit': '%',
-                       'description': 'CPU_Utilization_Above_Threshold'}
-
-        alarm_def_uuid = None
-
-        #Mock default Parameters return values for metrics to None
-        m_get_default_Params.side_effect = [{'impact': 'risk', 'cancel_cycles': 1,
-                                             'adapter_kind': 'VMWARE', 'repeat': False,
-                                             'cancel_period': 300, 'alarm_type': 16,
-                                             'vrops_alarm': 'CPU_Utilization_Above_Thr',
-                                             'enabled': True, 'period': 300,
-                                             'resource_kind': 'VirtualMachine',
-                                             'alarm_subType': 19, 'action': 'acknowledge',
-                                             'evaluation': 1, 'unit': 'msec'},
-                                            {}
-                                            ]
-
-        #Call configure_alarm method under test
-        return_value = self.mon_plugin.configure_alarm(config_dict)
-
-        #Verify that mocked methods are called with correct parameters
-        self.assertEqual(m_get_default_Params.call_count, 2)
-        m_get_alarm_defination_by_name.assert_not_called()
-        m_create_symptom.assert_not_called()
-        m_create_alarm_definition.assert_not_called()
-        m_get_vm_moref_id.assert_not_called()
-        m_get_vm_resource_id.assert_not_called()
-        m_create_alarm_notification_rule.assert_not_called()
-        m_save_alarm.assert_not_called()
-
-        #Verify return value with expected value i.e. None
-        self.assertEqual(return_value, alarm_def_uuid)
-
-
-    @mock.patch.object(monPlugin.DatabaseManager, 'save_alarm')
-    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_notification_rule')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
-    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_definition')
-    @mock.patch.object(monPlugin.MonPlugin, 'create_symptom')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_by_name')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
-    def test_configure_alarm_invalid_already_exists(self, m_get_default_Params,\
-                                       m_get_alarm_defination_by_name,\
-                                       m_create_symptom,\
-                                       m_create_alarm_definition,\
-                                       m_get_vm_moref_id,\
-                                       m_get_vm_resource_id,\
-                                       m_create_alarm_notification_rule,\
-                                       m_save_alarm):
-        """Test configure alarm invalid test: for alarm that already exists"""
-
-        #Mock input configuration dictionary
-        config_dict = {'threshold_value': 0, 'severity': 'CRITICAL',
-                       'alarm_name': 'CPU_Utilization_Above_Threshold',
-                       'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
-                       'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
-                       'statistic': 'AVERAGE', 'metric_name': 'CPU_UTILIZATION',
-                       'operation': 'GT', 'unit': '%',
-                       'description': 'CPU_Utilization_Above_Threshold'}
-
-        vrops_alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
-        alarm_def_uuid = None
-
-        #Mock default Parameters for alarm & metric configuration
-        m_get_default_Params.side_effect = [{'impact': 'risk', 'cancel_cycles': 1,
-                                             'adapter_kind': 'VMWARE', 'repeat': False,
-                                             'cancel_period': 300, 'alarm_type': 16,
-                                             'vrops_alarm': 'CPU_Utilization_Above_Thr',
-                                             'enabled': True, 'period': 300,
-                                             'resource_kind': 'VirtualMachine',
-                                             'alarm_subType': 19, 'action': 'acknowledge',
-                                             'evaluation': 1, 'unit': 'msec'},
-                                            {'metric_key': 'cpu|usage_average', 'unit': '%'}
-                                            ]
-        #set mocked function return value
-        m_get_alarm_defination_by_name.return_value = ['mocked_alarm_CPU_Utilization_Above_Thr']
-
-
-        #Call configure_alarm method under test
-        return_value = self.mon_plugin.configure_alarm(config_dict)
-
-        #Verify that mocked methods are called with correct parameters
-        self.assertEqual(m_get_default_Params.call_count, 2)
-        m_get_alarm_defination_by_name.assert_called_with(vrops_alarm_name)
-        m_create_symptom.assert_not_called()
-        m_create_alarm_definition.assert_not_called()
-        m_get_vm_moref_id.assert_not_called()
-        m_get_vm_resource_id.assert_not_called()
-        m_create_alarm_notification_rule.assert_not_called()
-        m_save_alarm.assert_not_called()
-        #Verify return value with expected value of alarm_def_uuid
-        self.assertEqual(return_value, alarm_def_uuid)
-
-
-    @mock.patch.object(monPlugin.DatabaseManager, 'save_alarm')
-    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_notification_rule')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
-    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_definition')
-    @mock.patch.object(monPlugin.MonPlugin, 'create_symptom')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_by_name')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
-    def test_configure_alarm_failed_symptom_creation(self, m_get_default_Params,\
-                                       m_get_alarm_defination_by_name,\
-                                       m_create_symptom,\
-                                       m_create_alarm_definition,\
-                                       m_get_vm_moref_id,\
-                                       m_get_vm_resource_id,\
-                                       m_create_alarm_notification_rule,\
-                                       m_save_alarm):
-        """Test configure alarm: failed to create symptom"""
-
-        #Mock input configuration dictionary
-        config_dict = {'threshold_value': 0, 'severity': 'CRITICAL',
-                       'alarm_name': 'CPU_Utilization_Above_Threshold',
-                       'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
-                       'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
-                       'statistic': 'AVERAGE', 'metric_name': 'CPU_UTILIZATION',
-                       'operation': 'GT', 'unit': '%',
-                       'description': 'CPU_Utilization_Above_Threshold'}
-
-        #symptom parameters to be passed for symptom creation
-        symptom_params = {'threshold_value': 0,
-                          'cancel_cycles': 1,
-                          'adapter_kind_key': 'VMWARE',
-                          'resource_kind_key': 'VirtualMachine',
-                          'severity': 'CRITICAL',
-                          'symptom_name':\
-                          'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
-                          'operation': 'GT',
-                          'wait_cycles': 1,
-                          'metric_key': 'cpu|usage_average'}
-        vrops_alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
-        alarm_def_uuid = None
-
-        #Mock default Parameters for alarm & metric configuration
-        m_get_default_Params.side_effect = [{'impact': 'risk', 'cancel_cycles': 1,
-                                             'adapter_kind': 'VMWARE', 'repeat': False,
-                                             'cancel_period': 300, 'alarm_type': 16,
-                                             'vrops_alarm': 'CPU_Utilization_Above_Thr',
-                                             'enabled': True, 'period': 300,
-                                             'resource_kind': 'VirtualMachine',
-                                             'alarm_subType': 19, 'action': 'acknowledge',
-                                             'evaluation': 1, 'unit': 'msec'},
-                                            {'metric_key': 'cpu|usage_average', 'unit': '%'}
-                                            ]
-        #set mocked function return values
-        m_get_alarm_defination_by_name.return_value = []
-        m_create_symptom.return_value = None
-
-        #Call configure_alarm method under test
-        return_value = self.mon_plugin.configure_alarm(config_dict)
-
-        #Verify that mocked methods are called with correct parameters
-        self.assertEqual(m_get_default_Params.call_count, 2)
-        m_get_alarm_defination_by_name.assert_called_with(vrops_alarm_name)
-        m_create_symptom.assert_called_with(symptom_params)
-        m_create_alarm_definition.assert_not_called()
-        m_get_vm_moref_id.assert_not_called()
-        m_get_vm_resource_id.assert_not_called()
-        m_create_alarm_notification_rule.assert_not_called()
-        m_save_alarm.assert_not_called()
-
-        #Verify return value with expected value of alarm_def_uuid
-        self.assertEqual(return_value, alarm_def_uuid)
-
-
-    @mock.patch.object(monPlugin.DatabaseManager, 'save_alarm')
-    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_notification_rule')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
-    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_definition')
-    @mock.patch.object(monPlugin.MonPlugin, 'create_symptom')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_by_name')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
-    def test_configure_alarm_failed_alert_creation(self, m_get_default_Params,\
-                                       m_get_alarm_defination_by_name,\
-                                       m_create_symptom,\
-                                       m_create_alarm_definition,\
-                                       m_get_vm_moref_id,\
-                                       m_get_vm_resource_id,\
-                                       m_create_alarm_notification_rule,\
-                                       m_save_alarm):
-        """Test configure alarm: failed to create alert in vROPs"""
-
-        #Mock input configuration dictionary
-        config_dict = {'threshold_value': 0, 'severity': 'CRITICAL',
-                       'alarm_name': 'CPU_Utilization_Above_Threshold',
-                       'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
-                       'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
-                       'statistic': 'AVERAGE', 'metric_name': 'CPU_UTILIZATION',
-                       'operation': 'GT', 'unit': '%',
-                       'description': 'CPU_Utilization_Above_Threshold'}
-
-        #symptom parameters to be passed for symptom creation
-        symptom_params = {'threshold_value': 0,
-                          'cancel_cycles': 1,
-                          'adapter_kind_key': 'VMWARE',
-                          'resource_kind_key': 'VirtualMachine',
-                          'severity': 'CRITICAL',
-                          'symptom_name':\
-                          'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
-                          'operation': 'GT',
-                          'wait_cycles': 1,
-                          'metric_key': 'cpu|usage_average'}
-
-        #alarm parameters to  be passed for alarm creation
-        alarm_params = {'description': 'CPU_Utilization_Above_Threshold',
-                        'cancelCycles': 1, 'subType': 19,
-                        'waitCycles': 1, 'severity': 'CRITICAL',
-                        'impact': 'risk', 'adapterKindKey': 'VMWARE',
-                        'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
-                        'resourceKindKey': 'VirtualMachine',
-                        'symptomDefinitionId':\
-                        'SymptomDefinition-2e8f9ddc-9f7b-4cd6-b85d-7d7fe3a8a804',
-                        'type': 16}
-
-        vrops_alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
-        alarm_def_uuid = None
-
-        #Mock default Parameters for alarm & metric configuration
-        m_get_default_Params.side_effect = [{'impact': 'risk', 'cancel_cycles': 1,
-                                             'adapter_kind': 'VMWARE', 'repeat': False,
-                                             'cancel_period': 300, 'alarm_type': 16,
-                                             'vrops_alarm': 'CPU_Utilization_Above_Thr',
-                                             'enabled': True, 'period': 300,
-                                             'resource_kind': 'VirtualMachine',
-                                             'alarm_subType': 19, 'action': 'acknowledge',
-                                             'evaluation': 1, 'unit': 'msec'},
-                                            {'metric_key': 'cpu|usage_average', 'unit': '%'}
-                                            ]
-        #set mocked function return values
-        m_get_alarm_defination_by_name.return_value = []
-        m_create_symptom.return_value = 'SymptomDefinition-2e8f9ddc-9f7b-4cd6-b85d-7d7fe3a8a804'
-        m_create_alarm_definition.return_value = None
-
-        #Call configure_alarm method under test
-        return_value = self.mon_plugin.configure_alarm(config_dict)
-
-        #Verify that mocked methods are called with correct parameters
-        self.assertEqual(m_get_default_Params.call_count, 2)
-        m_get_alarm_defination_by_name.assert_called_with(vrops_alarm_name)
-        m_create_symptom.assert_called_with(symptom_params)
-        m_create_alarm_definition.assert_called_with(alarm_params)
-        m_get_vm_moref_id.assert_not_called()
-        m_get_vm_resource_id.assert_not_called()
-        m_create_alarm_notification_rule.assert_not_called()
-        m_save_alarm.assert_not_called()
-
-        #Verify return value with expected value of alarm_def_uuid
-        self.assertEqual(return_value, alarm_def_uuid)
-
-
-    @mock.patch.object(monPlugin.DatabaseManager, 'save_alarm')
-    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_notification_rule')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
-    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_definition')
-    @mock.patch.object(monPlugin.MonPlugin, 'create_symptom')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_by_name')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
-    def test_configure_alarm_failed_to_get_vm_moref_id(self, m_get_default_Params,\
-                                       m_get_alarm_defination_by_name,\
-                                       m_create_symptom,\
-                                       m_create_alarm_definition,\
-                                       m_get_vm_moref_id,\
-                                       m_get_vm_resource_id,\
-                                       m_create_alarm_notification_rule,\
-                                       m_save_alarm):
-        """Test configure alarm: failed to get vm_moref_id"""
-
-        #Mock input configuration dictionary
-        config_dict = {'threshold_value': 0, 'severity': 'CRITICAL',
-                       'alarm_name': 'CPU_Utilization_Above_Threshold',
-                       'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
-                       'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
-                       'statistic': 'AVERAGE', 'metric_name': 'CPU_UTILIZATION',
-                       'operation': 'GT', 'unit': '%',
-                       'description': 'CPU_Utilization_Above_Threshold'}
-
-        #symptom parameters to be passed for symptom creation
-        symptom_params = {'threshold_value': 0,
-                          'cancel_cycles': 1,
-                          'adapter_kind_key': 'VMWARE',
-                          'resource_kind_key': 'VirtualMachine',
-                          'severity': 'CRITICAL',
-                          'symptom_name':\
-                          'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
-                          'operation': 'GT',
-                          'wait_cycles': 1,
-                          'metric_key': 'cpu|usage_average'}
-
-        #alarm parameters to  be passed for alarm creation
-        alarm_params = {'description': 'CPU_Utilization_Above_Threshold',
-                        'cancelCycles': 1, 'subType': 19,
-                        'waitCycles': 1, 'severity': 'CRITICAL',
-                        'impact': 'risk', 'adapterKindKey': 'VMWARE',
-                        'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
-                        'resourceKindKey': 'VirtualMachine',
-                        'symptomDefinitionId':\
-                        'SymptomDefinition-2e8f9ddc-9f7b-4cd6-b85d-7d7fe3a8a804',
-                        'type': 16}
-
-        vrops_alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
-        alarm_def_uuid = None
-
-        #Mock default Parameters for alarm & metric configuration
-        m_get_default_Params.side_effect = [{'impact': 'risk', 'cancel_cycles': 1,
-                                             'adapter_kind': 'VMWARE', 'repeat': False,
-                                             'cancel_period': 300, 'alarm_type': 16,
-                                             'vrops_alarm': 'CPU_Utilization_Above_Thr',
-                                             'enabled': True, 'period': 300,
-                                             'resource_kind': 'VirtualMachine',
-                                             'alarm_subType': 19, 'action': 'acknowledge',
-                                             'evaluation': 1, 'unit': 'msec'},
-                                            {'metric_key': 'cpu|usage_average', 'unit': '%'}
-                                            ]
-        #set mocked function return values
-        m_get_alarm_defination_by_name.return_value = []
-        m_create_symptom.return_value = 'SymptomDefinition-2e8f9ddc-9f7b-4cd6-b85d-7d7fe3a8a804'
-        m_create_alarm_definition.return_value =\
-        'AlertDefinition-0f3cdcb3-4e1b-4a0b-86d0-66d4b3f65220'
-        m_get_vm_moref_id.return_value = None
-
-        #Call configure_alarm method under test
-        return_value = self.mon_plugin.configure_alarm(config_dict)
-
-        #Verify that mocked methods are called with correct parameters
-        self.assertEqual(m_get_default_Params.call_count, 2)
-        m_get_alarm_defination_by_name.assert_called_with(vrops_alarm_name)
-        m_create_symptom.assert_called_with(symptom_params)
-        m_create_alarm_definition.assert_called_with(alarm_params)
-        m_get_vm_moref_id.assert_called_with(config_dict['resource_uuid'])
-        m_get_vm_resource_id.assert_not_called()
-        m_create_alarm_notification_rule.assert_not_called()
-        m_save_alarm.assert_not_called()
-
-        #Verify return value with expected value of alarm_def_uuid
-        self.assertEqual(return_value, alarm_def_uuid)
-
-
-    @mock.patch.object(monPlugin.DatabaseManager, 'save_alarm')
-    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_notification_rule')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
-    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_definition')
-    @mock.patch.object(monPlugin.MonPlugin, 'create_symptom')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_by_name')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
-    def test_configure_alarm_failed_to_get_vm_resource_id(self, m_get_default_Params,\
-                                       m_get_alarm_defination_by_name,\
-                                       m_create_symptom,\
-                                       m_create_alarm_definition,\
-                                       m_get_vm_moref_id,\
-                                       m_get_vm_resource_id,\
-                                       m_create_alarm_notification_rule,\
-                                       m_save_alarm):
-        """Test configure alarm: failed to get vm resource_id"""
-
-        #Mock input configuration dictionary
-        config_dict = {'threshold_value': 0, 'severity': 'CRITICAL',
-                       'alarm_name': 'CPU_Utilization_Above_Threshold',
-                       'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
-                       'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
-                       'statistic': 'AVERAGE', 'metric_name': 'CPU_UTILIZATION',
-                       'operation': 'GT', 'unit': '%',
-                       'description': 'CPU_Utilization_Above_Threshold'}
-
-        #symptom parameters to be passed for symptom creation
-        symptom_params = {'threshold_value': 0,
-                          'cancel_cycles': 1,
-                          'adapter_kind_key': 'VMWARE',
-                          'resource_kind_key': 'VirtualMachine',
-                          'severity': 'CRITICAL',
-                          'symptom_name':\
-                          'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
-                          'operation': 'GT',
-                          'wait_cycles': 1,
-                          'metric_key': 'cpu|usage_average'}
-
-        #alarm parameters to  be passed for alarm creation
-        alarm_params = {'description': 'CPU_Utilization_Above_Threshold',
-                        'cancelCycles': 1, 'subType': 19,
-                        'waitCycles': 1, 'severity': 'CRITICAL',
-                        'impact': 'risk', 'adapterKindKey': 'VMWARE',
-                        'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
-                        'resourceKindKey': 'VirtualMachine',
-                        'symptomDefinitionId':\
-                        'SymptomDefinition-2e8f9ddc-9f7b-4cd6-b85d-7d7fe3a8a804',
-                        'type': 16}
-
-        vrops_alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
-        vm_moref_id = 'vm-6626'
-        alarm_def_uuid = None
-
-        #Mock default Parameters for alarm & metric configuration
-        m_get_default_Params.side_effect = [{'impact': 'risk', 'cancel_cycles': 1,
-                                             'adapter_kind': 'VMWARE', 'repeat': False,
-                                             'cancel_period': 300, 'alarm_type': 16,
-                                             'vrops_alarm': 'CPU_Utilization_Above_Thr',
-                                             'enabled': True, 'period': 300,
-                                             'resource_kind': 'VirtualMachine',
-                                             'alarm_subType': 19, 'action': 'acknowledge',
-                                             'evaluation': 1, 'unit': 'msec'},
-                                            {'metric_key': 'cpu|usage_average', 'unit': '%'}
-                                            ]
-        #set mocked function return values
-        m_get_alarm_defination_by_name.return_value = []
-        m_create_symptom.return_value = 'SymptomDefinition-2e8f9ddc-9f7b-4cd6-b85d-7d7fe3a8a804'
-        m_create_alarm_definition.return_value =\
-        'AlertDefinition-0f3cdcb3-4e1b-4a0b-86d0-66d4b3f65220'
-        m_get_vm_moref_id.return_value = vm_moref_id
-        m_get_vm_resource_id.return_value = None
-        m_save_alarm.assert_not_called()
-
-        #Call configure_alarm method under test
-        return_value = self.mon_plugin.configure_alarm(config_dict)
-
-        #Verify that mocked methods are called with correct parameters
-        self.assertEqual(m_get_default_Params.call_count, 2)
-        m_get_alarm_defination_by_name.assert_called_with(vrops_alarm_name)
-        m_create_symptom.assert_called_with(symptom_params)
-        m_create_alarm_definition.assert_called_with(alarm_params)
-        m_get_vm_moref_id.assert_called_with(config_dict['resource_uuid'])
-        m_get_vm_resource_id.assert_called_with(vm_moref_id)
-        m_create_alarm_notification_rule.assert_not_called()
-
-        #Verify return value with expected value of alarm_def_uuid
-        self.assertEqual(return_value, alarm_def_uuid)
-
-
-    @mock.patch.object(monPlugin.DatabaseManager, 'save_alarm')
-    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_notification_rule')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
-    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_definition')
-    @mock.patch.object(monPlugin.MonPlugin, 'create_symptom')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_by_name')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
-    def test_configure_alarm_failed_to_create_alarm_notification_rule(self, m_get_default_Params,\
-                                       m_get_alarm_defination_by_name,\
-                                       m_create_symptom,\
-                                       m_create_alarm_definition,\
-                                       m_get_vm_moref_id,\
-                                       m_get_vm_resource_id,\
-                                       m_create_alarm_notification_rule,\
-                                       m_save_alarm):
-        """Test configure alarm: failed to create alarm notification rule"""
-
-        #Mock input configuration dictionary
-        config_dict = {'threshold_value': 0, 'severity': 'CRITICAL',
-                       'alarm_name': 'CPU_Utilization_Above_Threshold',
-                       'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
-                       'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
-                       'statistic': 'AVERAGE', 'metric_name': 'CPU_UTILIZATION',
-                       'operation': 'GT', 'unit': '%',
-                       'description': 'CPU_Utilization_Above_Threshold'}
-
-        #symptom parameters to be passed for symptom creation
-        symptom_params = {'threshold_value': 0,
-                          'cancel_cycles': 1,
-                          'adapter_kind_key': 'VMWARE',
-                          'resource_kind_key': 'VirtualMachine',
-                          'severity': 'CRITICAL',
-                          'symptom_name':\
-                          'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
-                          'operation': 'GT',
-                          'wait_cycles': 1,
-                          'metric_key': 'cpu|usage_average'}
-
-        #alarm parameters to  be passed for alarm creation
-        alarm_params = {'description': 'CPU_Utilization_Above_Threshold',
-                        'cancelCycles': 1, 'subType': 19,
-                        'waitCycles': 1, 'severity': 'CRITICAL',
-                        'impact': 'risk', 'adapterKindKey': 'VMWARE',
-                        'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
-                        'resourceKindKey': 'VirtualMachine',
-                        'symptomDefinitionId':\
-                        'SymptomDefinition-2e8f9ddc-9f7b-4cd6-b85d-7d7fe3a8a804',
-                        'type': 16}
-
-        vrops_alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
-        vm_moref_id = 'vm-6626'
-        alarm_def = 'AlertDefinition-0f3cdcb3-4e1b-4a0b-86d0-66d4b3f65220'
-        resource_id = 'ac87622f-b761-40a0-b151-00872a2a456e'
-        alarm_def_uuid = None
-
-        #Mock default Parameters for alarm & metric configuration
-        m_get_default_Params.side_effect = [{'impact': 'risk', 'cancel_cycles': 1,
-                                             'adapter_kind': 'VMWARE', 'repeat': False,
-                                             'cancel_period': 300, 'alarm_type': 16,
-                                             'vrops_alarm': 'CPU_Utilization_Above_Thr',
-                                             'enabled': True, 'period': 300,
-                                             'resource_kind': 'VirtualMachine',
-                                             'alarm_subType': 19, 'action': 'acknowledge',
-                                             'evaluation': 1, 'unit': 'msec'},
-                                            {'metric_key': 'cpu|usage_average', 'unit': '%'}
-                                            ]
-        #set mocked function return values
-        m_get_alarm_defination_by_name.return_value = []
-        m_create_symptom.return_value = 'SymptomDefinition-2e8f9ddc-9f7b-4cd6-b85d-7d7fe3a8a804'
-        m_create_alarm_definition.return_value =\
-        'AlertDefinition-0f3cdcb3-4e1b-4a0b-86d0-66d4b3f65220'
-        m_get_vm_moref_id.return_value = vm_moref_id
-        m_get_vm_resource_id.return_value = 'ac87622f-b761-40a0-b151-00872a2a456e'
-        m_create_alarm_notification_rule.return_value = None
-
-        #Call configure_alarm method under test
-        return_value = self.mon_plugin.configure_alarm(config_dict)
-
-        #Verify that mocked methods are called with correct parameters
-        self.assertEqual(m_get_default_Params.call_count, 2)
-        m_get_alarm_defination_by_name.assert_called_with(vrops_alarm_name)
-        m_create_symptom.assert_called_with(symptom_params)
-        m_create_alarm_definition.assert_called_with(alarm_params)
-        m_get_vm_moref_id.assert_called_with(config_dict['resource_uuid'])
-        m_get_vm_resource_id.assert_called_with(vm_moref_id)
-        m_create_alarm_notification_rule.assert_called_with(vrops_alarm_name, alarm_def, resource_id)
-        m_save_alarm.assert_not_called()
-
-        #Verify return value with expected value of alarm_def_uuid
-        self.assertEqual(return_value, alarm_def_uuid)
-
-
-    @mock.patch.object(monPlugin.requests, 'get')
-    def test_get_alarm_defination_details_valid_rest_req_response(self, m_get):
-        """Test get_alarm_defination_details: For a valid REST request response"""
-
-        alarm_uuid = '9a6d8a14-9f25-4d81-bf91-4d773497444d'
-
-        #Set mocked function's return values
-        m_get.return_value.status_code = 200
-        m_get.return_value.content = '{"id":"AlertDefinition-9a6d8a14-9f25-4d81-bf91-4d773497444d",\
-                            "name":"CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4",\
-                            "description":"CPU_Utilization_Above_Threshold",\
-                            "adapterKindKey":"VMWARE","resourceKindKey":"VirtualMachine",\
-                            "waitCycles":1,"cancelCycles":1,"type":16,"subType":19,\
-                            "states":[{"severity":"CRITICAL","base-symptom-set":\
-                            {"type":"SYMPTOM_SET","relation":"SELF",\
-                            "aggregation":"ALL","symptomSetOperator":"AND","symptomDefinitionIds":\
-                            ["SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278"]},\
-                            "impact":{"impactType":"BADGE","detail":"risk"}}]}'
-
-        expected_alarm_details = {'adapter_kind': 'VMWARE','symptom_definition_id':\
-                                  'SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278',
-                                  'alarm_name':\
-                                  'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
-                                  'alarm_id': 'AlertDefinition-9a6d8a14-9f25-4d81-bf91-4d773497444d',
-                                  'resource_kind': 'VirtualMachine', 'type': 16, 'sub_type': 19}
-
-        expected_alarm_details_json = {'states':
-                                       [{'impact':
-                                         {'impactType':'BADGE', 'detail':'risk'},'severity':'CRITICAL',
-                                         'base-symptom-set': {'symptomDefinitionIds':\
-                                         ['SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278'],
-                                         'relation': 'SELF', 'type': 'SYMPTOM_SET',
-                                         'aggregation':'ALL', 'symptomSetOperator': 'AND'}}],
-                                       'adapterKindKey': 'VMWARE',
-                                       'description': 'CPU_Utilization_Above_Threshold',
-                                       'type': 16, 'cancelCycles': 1, 
-                                       'resourceKindKey': 'VirtualMachine',
-                                       'subType': 19, 'waitCycles': 1,
-                                       'id': 'AlertDefinition-9a6d8a14-9f25-4d81-bf91-4d773497444d',
-                                       'name':\
-                                       'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'}
-
-        #Call get_alarm_defination_details method under test
-        alarm_details_json, alarm_details = self.mon_plugin.get_alarm_defination_details(alarm_uuid)
-
-        #Verify that mocked method is called
-        m_get.assert_called()
-
-        #Verify return value with expected value
-        self.assertEqual(expected_alarm_details, alarm_details)
-        self.assertEqual(expected_alarm_details_json, alarm_details_json)
-
-
-    @mock.patch.object(monPlugin.requests, 'get')
-    def test_get_alarm_defination_details_invalid_rest_req_response(self, m_get):
-        """Test get_alarm_defination_details: For an invalid REST request response"""
-
-        alarm_uuid = '9a6d8a14-9f25-4d81-bf91-4d773497444d'
-
-        #Set mocked function's return values
-        m_get.return_value.status_code = 404
-        m_get.return_value.content = '{"message": "No such AlertDefinition - \
-                                        AlertDefinition-9a6d8a14-9f25-4d81-bf91-4d773497444.",\
-                                        "httpStatusCode": 404,"apiErrorCode": 404}'
-
-        expected_alarm_details = None
-        expected_alarm_details_json = None
-
-        #Call get_alarm_defination_details method under test
-        alarm_details_json, alarm_details = self.mon_plugin.get_alarm_defination_details(alarm_uuid)
-
-        #verify that mocked method is called
-        m_get.assert_called()
-
-        #Verify return value with expected value
-        self.assertEqual(expected_alarm_details, alarm_details)
-        self.assertEqual(expected_alarm_details_json, alarm_details_json)
-
-
-    @mock.patch.object(monPlugin.requests, 'get')
-    def test_get_alarm_defination_by_name_valid_rest_req_response(self, m_get):
-        """Test get_alarm_defination_by_name: For a valid REST request response"""
-
-        alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
-
-        #Set mocked function's return values
-        m_get.return_value.status_code = 200
-        m_get.return_value.content = '{"pageInfo": {"totalCount": 1,"page": 0,"pageSize": 1000},\
-                                    "links": [\
-                                        {"href": "/suite-api/api/alertdefinitions?page=0&amp;pageSize=1000",\
-                                        "rel": "SELF","name": "current"},\
-                                        {"href": "/suite-api/api/alertdefinitions?page=0&amp;pageSize=1000",\
-                                         "rel": "RELATED","name": "first"},\
-                                        {"href": "/suite-api/api/alertdefinitions?page=0&amp;pageSize=1000",\
-                                         "rel": "RELATED","name": "last"}],\
-                                    "alertDefinitions": [{\
-                                        "id": "AlertDefinition-9a6d8a14-9f25-4d81-bf91-4d773497444d",\
-                                        "name": "CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4",\
-                                        "description": "CPU_Utilization_Above_Threshold",\
-                                        "adapterKindKey": "VMWARE","resourceKindKey": "VirtualMachine",\
-                                        "waitCycles": 1,"cancelCycles": 1,"type": 16,"subType": 19,\
-                                        "states": [{"impact": {"impactType": "BADGE","detail": "risk"},\
-                                            "severity": "CRITICAL",\
-                                            "base-symptom-set": {"type": "SYMPTOM_SET",\
-                                            "relation": "SELF","aggregation": "ALL",\
-                                            "symptomSetOperator": "AND",\
-                                            "symptomDefinitionIds": [\
-                                            "SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278"]}}]\
-                                        }]}'
-
-        #Expected return match list
-        Exp_alert_match_list = [{'states':
-                                   [{'impact': {'impactType': 'BADGE', 'detail': 'risk'},
-                                     'severity': 'CRITICAL',
-                                     'base-symptom-set': {
-                                         'symptomDefinitionIds': \
-                                         ['SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278'],
-                                         'relation': 'SELF',
-                                         'type': 'SYMPTOM_SET',
-                                         'aggregation': 'ALL',
-                                         'symptomSetOperator': 'AND'}
-                                     }],
-                                   'adapterKindKey': 'VMWARE',
-                                   'description': 'CPU_Utilization_Above_Threshold',
-                                   'type': 16,
-                                   'cancelCycles': 1,
-                                   'resourceKindKey': 'VirtualMachine',
-                                   'subType': 19, 'waitCycles': 1,
-                                   'id': 'AlertDefinition-9a6d8a14-9f25-4d81-bf91-4d773497444d',
-                                   'name': \
-                                   'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
-                                   }]
-
-        #Call get_alarm_defination_by_name method under test
-        alert_match_list = self.mon_plugin.get_alarm_defination_by_name(alarm_name)
-
-        #Verify that mocked method is called
-        m_get.assert_called()
-
-        #Verify return value with expected value
-        self.assertEqual(Exp_alert_match_list, alert_match_list)
-
-
-    @mock.patch.object(monPlugin.requests, 'get')
-    def test_get_alarm_defination_by_name_no_valid_alarm_found(self, m_get):
-        """Test get_alarm_defination_by_name: With no valid alarm found in returned list"""
-
-        alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda5'
-
-        #Set mocked function's return values
-        m_get.return_value.status_code = 200
-        m_get.return_value.content = '{"pageInfo": {"totalCount": 1,"page": 0,"pageSize": 1000},\
-                                    "links": [\
-                                        {"href": "/suite-api/api/alertdefinitions?page=0&amp;pageSize=1000",\
-                                        "rel": "SELF","name": "current"},\
-                                        {"href": "/suite-api/api/alertdefinitions?page=0&amp;pageSize=1000",\
-                                         "rel": "RELATED","name": "first"},\
-                                        {"href": "/suite-api/api/alertdefinitions?page=0&amp;pageSize=1000",\
-                                         "rel": "RELATED","name": "last"}],\
-                                    "alertDefinitions": [{\
-                                        "id": "AlertDefinition-9a6d8a14-9f25-4d81-bf91-4d773497444d",\
-                                        "name": "CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4",\
-                                        "description": "CPU_Utilization_Above_Threshold",\
-                                        "adapterKindKey": "VMWARE","resourceKindKey": "VirtualMachine",\
-                                        "waitCycles": 1,"cancelCycles": 1,"type": 16,"subType": 19,\
-                                        "states": [{"impact": {"impactType": "BADGE","detail": "risk"},\
-                                            "severity": "CRITICAL",\
-                                            "base-symptom-set": {"type": "SYMPTOM_SET",\
-                                            "relation": "SELF","aggregation": "ALL",\
-                                            "symptomSetOperator": "AND",\
-                                            "symptomDefinitionIds": [\
-                                            "SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278"]}}]\
-                                        }]}'
-
-        #Expected return match list
-        Exp_alert_match_list = []
-
-        #Call get_alarm_defination_by_name method under test
-        alert_match_list = self.mon_plugin.get_alarm_defination_by_name(alarm_name)
-
-        #Verify that mocked method is called
-        m_get.assert_called()
-
-        #Verify return value with expected value
-        self.assertEqual(Exp_alert_match_list, alert_match_list)
-
-
-    @mock.patch.object(monPlugin.requests, 'put')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_symptom_defination_details')
-    def test_update_symptom_defination_valid_symptom_req_response(self,\
-                                                                  m_get_symptom_defination_details,\
-                                                                  m_put):
-        """Test update_symptom_defination: With valid REST response, update symptom"""
-
-        #Expected symptom to be updated
-        symptom_defination_id = 'SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278'
-        new_alarm_config = {'severity':"CRITICAL",
-                            'operation': 'GT',
-                            'threshold_value':5,
-                            'alarm_uuid':'9a6d8a14-9f25-4d81-bf91-4d773497444d'
-                            }
-
-        #Set mocked function's return values
-        m_get_symptom_defination_details.return_value = {
-                            "id": "SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278",
-                            "name": "CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4",
-                            "adapterKindKey": "VMWARE",
-                            "resourceKindKey": "VirtualMachine",
-                            "waitCycles": 1,
-                            "cancelCycles": 1,
-                            "state": {"severity": "CRITICAL",
-                                      "condition": {
-                                          "type": "CONDITION_HT",
-                                          "key": "cpu|usage_average","operator": "GT","value": "0.0",
-                                          "valueType": "NUMERIC","instanced": False,
-                                          "thresholdType": "STATIC"}
-                                      }
-                           }
-
-        m_put.return_value.status_code = 200
-        m_put.return_value.content = '{\
-            "id":"SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278",\
-            "name":"CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4",\
-            "adapterKindKey":"VMWARE","resourceKindKey":"VirtualMachine","waitCycles":1,\
-            "cancelCycles":1,\
-            "state":{\
-                "severity":"CRITICAL",\
-                "condition":{\
-                    "type":"CONDITION_HT","key":"cpu|usage_average","operator":"GT","value":"5.0",\
-                    "valueType":"NUMERIC","instanced":False,"thresholdType":"STATIC"}}}'
-
-        #Call update_symptom_defination method under test
-        symptom_uuid = self.mon_plugin.update_symptom_defination(symptom_defination_id,\
-                                                                 new_alarm_config)
-
-        #Verify that mocked method is called with required parameters
-        m_get_symptom_defination_details.assert_called_with(symptom_defination_id)
-        #m_put.assert_called_with(symptom_defination_id,new_alarm_config)
-
-        #Verify return value with expected value
-        self.assertEqual(symptom_defination_id, symptom_uuid)
-
-
-    @mock.patch.object(monPlugin.requests, 'put')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_symptom_defination_details')
-    def test_update_symptom_defination_invalid_symptom_req_response(self,\
-                                                                  m_get_symptom_defination_details,\
-                                                                  m_put):
-        """Test update_symptom_defination: If invalid REST response received, return None"""
-
-        #Expected symptom to be updated
-        symptom_defination_id = 'SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278'
-        new_alarm_config = {'severity':"CRITICAL",
-                            'operation': 'GT',
-                            'threshold_value':5,
-                            'alarm_uuid':'9a6d8a14-9f25-4d81-bf91-4d773497444d'
-                            }
-
-        #Set mocked function's return values
-        m_get_symptom_defination_details.return_value = {
-                            "id": "SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278",
-                            "name": "CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4",
-                            "adapterKindKey": "VMWARE",
-                            "resourceKindKey": "VirtualMachine",
-                            "waitCycles": 1,
-                            "cancelCycles": 1,
-                            "state": {"severity": "CRITICAL",
-                                      "condition": {
-                                          "type": "CONDITION_HT",
-                                          "key": "cpu|usage_average","operator": "GT","value": "0.0",
-                                          "valueType": "NUMERIC","instanced": False,
-                                          "thresholdType": "STATIC"}
-                                      }
-                           }
-
-        m_put.return_value.status_code = 500
-        m_put.return_value.content = '{\
-            "message": "Internal Server error, cause unknown.",\
-            "moreInformation": [\
-                {"name": "errorMessage",\
-                 "value": "Symptom Definition CPU_Utilization_Above_Thr-e14b203c-\
-                 6bf2-4e2f-a91c-8c19d240eda4 does not exist and hence cannot be updated."},\
-                {"name": "localizedMessage",\
-                 "value": "Symptom Definition CPU_Utilization_Above_Thr-e14b203c-\
-                 6bf2-4e2f-a91c-8c19d240eda4 does not exist and hence cannot be updated.;"}],\
-            "httpStatusCode": 500,"apiErrorCode": 500}'
-
-        #Call update_symptom_defination method under test
-        symptom_uuid = self.mon_plugin.update_symptom_defination(symptom_defination_id,\
-                                                                 new_alarm_config)
-
-        #Verify that mocked method is called with required parameters
-        m_get_symptom_defination_details.assert_called_with(symptom_defination_id)
-        m_put.assert_called()
-
-        #Verify return value with expected value
-        self.assertEqual(symptom_uuid, None)
-
-
-    @mock.patch.object(monPlugin.requests, 'put')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_symptom_defination_details')
-    def test_update_symptom_defination_failed_to_get_symptom_defination(self,\
-                                                                  m_get_symptom_defination_details,\
-                                                                  m_put):
-        """Test update_symptom_defination: if fails to get symptom_defination returns None"""
-
-        #Expected symptom to be updated
-        symptom_defination_id = 'SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278'
-        new_alarm_config = {'severity':"CRITICAL",
-                            'operation': 'GT',
-                            'threshold_value':5,
-                            'alarm_uuid':'9a6d8a14-9f25-4d81-bf91-4d773497444d'
-                            }
-
-        #Set mocked function's return values
-        m_get_symptom_defination_details.return_value = None
-
-        #Call update_symptom_defination method under test
-        symptom_uuid = self.mon_plugin.update_symptom_defination(symptom_defination_id,\
-                                                                 new_alarm_config)
-
-        #Verify that mocked method is called with required parameters
-        m_get_symptom_defination_details.assert_called_with(symptom_defination_id)
-        m_put.assert_not_called()
-
-        #Verify return value with expected value
-        self.assertEqual(symptom_uuid, None)
-
-
-    @mock.patch.object(monPlugin.requests, 'get')
-    def test_get_symptom_defination_details_valid_req_response(self,m_get):
-        """Test update_symptom_defination: With valid REST response symptom is created"""
-
-        #Expected symptom to be updated
-        symptom_uuid = 'SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278'
-
-        #Set mocked function's return values
-        m_get.return_value.status_code = 200
-        m_get.return_value.content = '{\
-            "id": "SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278",\
-            "name": "CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4",\
-            "adapterKindKey": "VMWARE","resourceKindKey": "VirtualMachine","waitCycles": 1,\
-            "cancelCycles": 1,"state": {"severity": "CRITICAL","condition": {"type": "CONDITION_HT",\
-            "key": "cpu|usage_average","operator": "GT","value": "6.0","valueType": "NUMERIC",\
-            "instanced": false,"thresholdType": "STATIC"}}}'
-        expected_symptom_details = {\
-            "id": "SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278",\
-            "name": "CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4",\
-            "adapterKindKey": "VMWARE","resourceKindKey": "VirtualMachine","waitCycles": 1,\
-            "cancelCycles": 1,"state": {"severity": "CRITICAL","condition": {"type": "CONDITION_HT",\
-            "key": "cpu|usage_average","operator": "GT","value": "6.0","valueType": "NUMERIC",\
-            "instanced": False,"thresholdType": "STATIC"}}}
-
-        #Call update_symptom_defination method under test
-        symptom_details = self.mon_plugin.get_symptom_defination_details(symptom_uuid)
-
-        #Verify that mocked method is called with required parameters
-        m_get.assert_called()
-
-        #Verify return value with expected value
-        self.assertEqual(expected_symptom_details, symptom_details)
-
-
-    @mock.patch.object(monPlugin.requests, 'get')
-    def test_get_symptom_defination_details_invalid_req_response(self,m_get):
-        """Test update_symptom_defination: if invalid REST response received return None"""
-
-        #Expected symptom to be updated
-        symptom_uuid = 'SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278'
-
-        #Set mocked function's return values
-        m_get.return_value.status_code = 404
-        m_get.return_value.content = '{"message": "No such SymptomDefinition\
-        - SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278.",\
-        "httpStatusCode": 404,"apiErrorCode": 404}'
-
-        expected_symptom_details = None
-
-        #Call update_symptom_defination method under test
-        symptom_details = self.mon_plugin.get_symptom_defination_details(symptom_uuid)
-
-        #Verify that mocked method is called with required parameters
-        m_get.assert_called()
-
-        #Verify return value with expected value
-        self.assertEqual(expected_symptom_details, symptom_details)
-
-
-    @mock.patch.object(monPlugin.requests, 'get')
-    def test_get_symptom_defination_details_symptom_uuid_not_provided(self,m_get):
-        """Test update_symptom_defination: if required symptom uuid is not provided"""
-
-        #Expected symptom to be updated
-        symptom_uuid = None
-        expected_symptom_details = None
-
-        #Call update_symptom_defination method under test
-        symptom_details = self.mon_plugin.get_symptom_defination_details(symptom_uuid)
-
-        #Verify that mocked method is called with required parameters
-        m_get.assert_not_called()
-
-        #Verify return value with expected value
-        self.assertEqual(expected_symptom_details, symptom_details)
-
-
-    @mock.patch.object(monPlugin.requests, 'put')
-    def test_reconfigure_alarm_valid_req_response(self, m_put):
-        """Test reconfigure_alarm: for valid REST response"""
-
-        #Set input parameters to reconfigure_alarm
-        alarm_details_json = {
-            'id': 'AlertDefinition-9a6d8a14-9f25-4d81-bf91-4d773497444d',
-            'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
-            'description': 'CPU_Utilization_Above_Threshold', 'adapterKindKey': 'VMWARE',
-            'states':[{'impact':{'impactType':'BADGE', 'detail':'risk'}, 'severity':'CRITICAL',
-                       'base-symptom-set':{
-                           'symptomDefinitionIds':['SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278'],
-                           'relation': 'SELF','type': 'SYMPTOM_SET', 'aggregation':'ALL',
-                           'symptomSetOperator': 'AND'}}],
-            'type': 16, 'cancelCycles': 1, 'resourceKindKey': 'VirtualMachine','subType': 19,
-            'waitCycles': 1}
-
-        new_alarm_config = {'severity':'WARNING',
-                            'description': 'CPU_Utilization_Above_Threshold_Warning'}
-
-        #Set mocked function's return values
-        m_put.return_value.status_code = 200
-        m_put.return_value.content = '{"id":"AlertDefinition-9a6d8a14-9f25-4d81-bf91-4d773497444d",\
-            "name":"CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4",\
-            "description":"CPU_Utilization_Above_Threshold_Warning","adapterKindKey":"VMWARE",\
-            "resourceKindKey":"VirtualMachine","waitCycles":1,"cancelCycles":1,"type":16,\
-            "subType":19,"states":[{"severity":"WARNING","base-symptom-set":{"type":"SYMPTOM_SET",\
-            "relation":"SELF","aggregation":"ALL","symptomSetOperator":"AND",\
-            "symptomDefinitionIds":["SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278"]},\
-            "impact":{"impactType":"BADGE","detail":"risk"}}]}'
-
-        #Expected alarm_def_uuid to be returned
-        expected_alarm_def_uuid = '9a6d8a14-9f25-4d81-bf91-4d773497444d'
-
-        #Call reconfigure_alarm method under test
-        alarm_def_uuid = self.mon_plugin.reconfigure_alarm(alarm_details_json, new_alarm_config)
-
-        #Verify that mocked method is called with required parameters
-        m_put.assert_called()
-
-        #Verify return value with expected value
-        self.assertEqual(expected_alarm_def_uuid, alarm_def_uuid)
-
-
-    @mock.patch.object(monPlugin.requests, 'put')
-    def test_reconfigure_alarm_invalid_req_response(self, m_put):
-        """Test reconfigure_alarm: for invalid REST response, return None"""
-
-        #Set input parameters to reconfigure_alarm
-        alarm_details_json = {
-            'id': 'AlertDefinition-9a6d8a14-9f25-4d81-bf91-4d773497444d',
-            'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
-            'description': 'CPU_Utilization_Above_Threshold', 'adapterKindKey': 'VMWARE',
-            'states':[{'impact':{'impactType':'BADGE', 'detail':'risk'}, 'severity':'CRITICAL',
-                       'base-symptom-set':{
-                           'symptomDefinitionIds':['SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278'],
-                           'relation': 'SELF','type': 'SYMPTOM_SET', 'aggregation':'ALL',
-                           'symptomSetOperator': 'AND'}}],
-            'type': 16, 'cancelCycles': 1, 'resourceKindKey': 'VirtualMachine','subType': 19,
-            'waitCycles': 1}
-
-        new_alarm_config = {'severity':'WARNING',
-                            'description': 'CPU_Utilization_Above_Threshold_Warning'}
-
-        #Set mocked function's return values
-        m_put.return_value.status_code = 500
-        m_put.return_value.content = '{"message": "Internal Server error, cause unknown.",\
-            "moreInformation": [{"name": "errorMessage",\
-            "value": "Cannot update Alert Definition CPU_Utilization_Above_Thr-\
-            e14b203c-6bf2-4e2f-a91c-8c19d240eda4 since it does not exist"},\
-            {"name": "localizedMessage",\
-            "value": "Cannot update Alert Definition CPU_Utilization_Above_Thr-\
-            e14b203c-6bf2-4e2f-a91c-8c19d240eda4 since it does not exist;"}],\
-            "httpStatusCode": 500,"apiErrorCode": 500}'
-
-        #Expected alarm_def_uuid to be returned
-        expected_alarm_def_uuid = None
-
-        #Call reconfigure_alarm method under test
-        alarm_def_uuid = self.mon_plugin.reconfigure_alarm(alarm_details_json, new_alarm_config)
-
-        #Verify that mocked method is called with required parameters
-        m_put.assert_called()
-
-        #Verify return value with expected value
-        self.assertEqual(expected_alarm_def_uuid, alarm_def_uuid)
-
-
-    @mock.patch.object(monPlugin.MonPlugin, 'delete_symptom_definition')
-    @mock.patch.object(monPlugin.MonPlugin, 'delete_alarm_defination')
-    @mock.patch.object(monPlugin.MonPlugin, 'delete_notification_rule')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_details')
-    def test_delete_alarm_configuration_successful_alarm_deletion(self,\
-                                                                  m_get_alarm_defination_details,\
-                                                                  m_delete_notification_rule,\
-                                                                  m_delete_alarm_defination,\
-                                                                  m_delete_symptom_definition):
-        """Test delete_alarm_configuration: for successful alarm deletion, return alarm uuid"""
-
-        #Set input parameters to delete_alarm_configuration
-        delete_alarm_req_dict = {'alarm_uuid':'9a6d8a14-9f25-4d81-bf91-4d773497444d'}
-
-        #Set mocked function's return values
-        alarm_details_json = {
-            'id': 'AlertDefinition-9a6d8a14-9f25-4d81-4d773497444d',
-            'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-8c19d240eda4',
-            'symptomDefinitionIds':['SymptomDefinition-bcc2cb36-a67b-4deb-9b5884973278']}
-        alarm_details = {
-            'alarm_name':'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-8c19d240eda4',
-            'alarm_id':'AlertDefinition-9a6d8a14-9f25-4d81-4d773497444d',
-            'symptom_definition_id':'SymptomDefinition-bcc2cb36-a67b-4deb-9b5884973278'}
-
-        m_get_alarm_defination_details.return_value = (alarm_details_json, alarm_details)
-        m_delete_notification_rule.return_value = '989e7293-d78d-4405-92e30ec4f247'
-        m_delete_alarm_defination.return_value = alarm_details['alarm_id']
-        m_delete_symptom_definition.return_value = alarm_details['symptom_definition_id']
-
-        #Call reconfigure_alarm method under test
-        alarm_uuid = self.mon_plugin.delete_alarm_configuration(delete_alarm_req_dict)
-
-        #Verify that mocked method is called with required parameters
-        m_get_alarm_defination_details.assert_called_with(delete_alarm_req_dict['alarm_uuid'])
-        m_delete_notification_rule.assert_called_with(alarm_details['alarm_name'])
-        m_delete_alarm_defination.assert_called_with(alarm_details['alarm_id'])
-        m_delete_symptom_definition.assert_called_with(alarm_details['symptom_definition_id'])
-
-        #Verify return value with expected value
-        self.assertEqual(alarm_uuid, delete_alarm_req_dict['alarm_uuid'])
-
-
-    @mock.patch.object(monPlugin.MonPlugin, 'delete_symptom_definition')
-    @mock.patch.object(monPlugin.MonPlugin, 'delete_alarm_defination')
-    @mock.patch.object(monPlugin.MonPlugin, 'delete_notification_rule')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_details')
-    def test_delete_alarm_configuration_failed_to_get_alarm_defination(self,\
-                                                                  m_get_alarm_defination_details,\
-                                                                  m_delete_notification_rule,\
-                                                                  m_delete_alarm_defination,\
-                                                                  m_delete_symptom_definition):
-        """Test delete_alarm_configuration: if failed to get alarm definition, return None"""
-
-        #Set input parameters to delete_alarm_configuration
-        delete_alarm_req_dict = {'alarm_uuid':'9a6d8a14-9f25-4d81-bf91-4d773497444d'}
-
-        #Set mocked function's return values
-        alarm_details_json = None
-        alarm_details = None
-
-        m_get_alarm_defination_details.return_value = (alarm_details_json, alarm_details)
-
-        #Call reconfigure_alarm method under test
-        alarm_uuid = self.mon_plugin.delete_alarm_configuration(delete_alarm_req_dict)
-
-        #Verify that mocked method is called with required parameters
-        m_get_alarm_defination_details.assert_called_with(delete_alarm_req_dict['alarm_uuid'])
-        m_delete_notification_rule.assert_not_called()
-        m_delete_alarm_defination.assert_not_called()
-        m_delete_symptom_definition.assert_not_called()
-
-        #Verify return value with expected value
-        self.assertEqual(alarm_uuid, None)
-
-
-    @mock.patch.object(monPlugin.MonPlugin, 'delete_symptom_definition')
-    @mock.patch.object(monPlugin.MonPlugin, 'delete_alarm_defination')
-    @mock.patch.object(monPlugin.MonPlugin, 'delete_notification_rule')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_details')
-    def test_delete_alarm_configuration_failed_to_delete_notification_rule(self,\
-                                                                  m_get_alarm_defination_details,\
-                                                                  m_delete_notification_rule,\
-                                                                  m_delete_alarm_defination,\
-                                                                  m_delete_symptom_definition):
-        """Test delete_alarm_configuration: if failed to delete notification rule, return None"""
-
-        #Set input parameters to delete_alarm_configuration
-        delete_alarm_req_dict = {'alarm_uuid':'9a6d8a14-9f25-4d81-bf91-4d773497444d'}
-
-        #Set mocked function's return values
-        alarm_details_json = {
-            'id': 'AlertDefinition-9a6d8a14-9f25-4d81-4d773497444d',
-            'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-8c19d240eda4',
-            'symptomDefinitionIds':['SymptomDefinition-bcc2cb36-a67b-4deb-9b5884973278']}
-        alarm_details = {
-            'alarm_name':'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-8c19d240eda4',
-            'alarm_id':'AlertDefinition-9a6d8a14-9f25-4d81-4d773497444d',
-            'symptom_definition_id':'SymptomDefinition-bcc2cb36-a67b-4deb-9b5884973278'}
-
-        m_get_alarm_defination_details.return_value = (alarm_details_json, alarm_details)
-        m_delete_notification_rule.return_value = None
-
-        #Call reconfigure_alarm method under test
-        alarm_uuid = self.mon_plugin.delete_alarm_configuration(delete_alarm_req_dict)
-
-        #Verify that mocked method is called with required parameters
-        m_get_alarm_defination_details.assert_called_with(delete_alarm_req_dict['alarm_uuid'])
-        m_delete_notification_rule.assert_called_with(alarm_details['alarm_name'])
-        m_delete_alarm_defination.assert_not_called()
-        m_delete_symptom_definition.assert_not_called()
-
-        #Verify return value with expected value
-        self.assertEqual(alarm_uuid, None)
-
-
-    @mock.patch.object(monPlugin.MonPlugin, 'delete_symptom_definition')
-    @mock.patch.object(monPlugin.MonPlugin, 'delete_alarm_defination')
-    @mock.patch.object(monPlugin.MonPlugin, 'delete_notification_rule')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_details')
-    def test_delete_alarm_configuration_failed_to_delete_alarm_defination(self,\
-                                                                  m_get_alarm_defination_details,\
-                                                                  m_delete_notification_rule,\
-                                                                  m_delete_alarm_defination,\
-                                                                  m_delete_symptom_definition):
-        """Test delete_alarm_configuration: if failed to delete alarm definition, return None"""
-
-        #Set input parameters to delete_alarm_configuration
-        delete_alarm_req_dict = {'alarm_uuid':'9a6d8a14-9f25-4d81-bf91-4d773497444d'}
-
-        #Set mocked function's return values
-        alarm_details_json = {
-            'id': 'AlertDefinition-9a6d8a14-9f25-4d81-4d773497444d',
-            'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-8c19d240eda4',
-            'symptomDefinitionIds':['SymptomDefinition-bcc2cb36-a67b-4deb-9b5884973278']}
-        alarm_details = {
-            'alarm_name':'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-8c19d240eda4',
-            'alarm_id':'AlertDefinition-9a6d8a14-9f25-4d81-4d773497444d',
-            'symptom_definition_id':'SymptomDefinition-bcc2cb36-a67b-4deb-9b5884973278'}
-
-        m_get_alarm_defination_details.return_value = (alarm_details_json, alarm_details)
-        m_delete_notification_rule.return_value = '989e7293-d78d-4405-92e30ec4f247'
-        m_delete_alarm_defination.return_value = None
-
-        #Call reconfigure_alarm method under test
-        alarm_uuid = self.mon_plugin.delete_alarm_configuration(delete_alarm_req_dict)
-
-        #Verify that mocked method is called with required parameters
-        m_get_alarm_defination_details.assert_called_with(delete_alarm_req_dict['alarm_uuid'])
-        m_delete_notification_rule.assert_called_with(alarm_details['alarm_name'])
-        m_delete_alarm_defination.assert_called_with(alarm_details['alarm_id'])
-        m_delete_symptom_definition.assert_not_called()
-
-        #Verify return value with expected value
-        self.assertEqual(alarm_uuid, None)
-
-
-    @mock.patch.object(monPlugin.MonPlugin, 'delete_symptom_definition')
-    @mock.patch.object(monPlugin.MonPlugin, 'delete_alarm_defination')
-    @mock.patch.object(monPlugin.MonPlugin, 'delete_notification_rule')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_details')
-    def test_delete_alarm_configuration_failed_to_delete_symptom_definition(self,\
-                                                                  m_get_alarm_defination_details,\
-                                                                  m_delete_notification_rule,\
-                                                                  m_delete_alarm_defination,\
-                                                                  m_delete_symptom_definition):
-        """Test delete_alarm_configuration: if failed to delete symptom definition, return None"""
-
-        #Set input parameters to delete_alarm_configuration
-        delete_alarm_req_dict = {'alarm_uuid':'9a6d8a14-9f25-4d81-bf91-4d773497444d'}
-
-        #Set mocked function's return values
-        alarm_details_json = {
-            'id': 'AlertDefinition-9a6d8a14-9f25-4d81-4d773497444d',
-            'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-8c19d240eda4',
-            'symptomDefinitionIds':['SymptomDefinition-bcc2cb36-a67b-4deb-9b5884973278']}
-        alarm_details = {
-            'alarm_name':'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-8c19d240eda4',
-            'alarm_id':'AlertDefinition-9a6d8a14-9f25-4d81-4d773497444d',
-            'symptom_definition_id':'SymptomDefinition-bcc2cb36-a67b-4deb-9b5884973278'}
-
-        m_get_alarm_defination_details.return_value = (alarm_details_json, alarm_details)
-        m_delete_notification_rule.return_value = '989e7293-d78d-4405-92e30ec4f247'
-        m_delete_alarm_defination.return_value = alarm_details['alarm_id']
-        m_delete_symptom_definition.return_value = None
-
-        #Call reconfigure_alarm method under test
-        alarm_uuid = self.mon_plugin.delete_alarm_configuration(delete_alarm_req_dict)
-
-        #Verify that mocked method is called with required parameters
-        m_get_alarm_defination_details.assert_called_with(delete_alarm_req_dict['alarm_uuid'])
-        m_delete_notification_rule.assert_called_with(alarm_details['alarm_name'])
-        m_delete_alarm_defination.assert_called_with(alarm_details['alarm_id'])
-        m_delete_symptom_definition.assert_called_with(alarm_details['symptom_definition_id'])
-
-        #Verify return value with expected value
-        self.assertEqual(alarm_uuid, None)
-
-
-    @mock.patch.object(monPlugin.requests, 'delete')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_notification_rule_id_by_alarm_name')
-    def test_delete_notification_rule_successful_deletion_req_response(self,\
-                                                            m_get_notification_rule_id_by_alarm_name,\
-                                                            m_delete):
-        """Test delete_notification_rule: Valid notification rule is deleted & returns rule_id"""
-
-        #Set input parameters to delete_notification_rule
-        alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-8c19d240eda4'
-
-        #Set mocked function's return values
-        m_get_notification_rule_id_by_alarm_name.return_value = '8db86441-71d8-4830-9e1a-a90be3776d12'
-        m_delete.return_value.status_code = 204
-
-        #Call delete_notification_rule method under test
-        rule_id = self.mon_plugin.delete_notification_rule(alarm_name)
-
-        #Verify that mocked method is called with required parameters
-        m_get_notification_rule_id_by_alarm_name.assert_called_with(alarm_name)
-        m_delete.assert_called()
-
-        #Verify return value with expected value
-        self.assertEqual(rule_id, '8db86441-71d8-4830-9e1a-a90be3776d12')
-
-
-    @mock.patch.object(monPlugin.requests, 'delete')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_notification_rule_id_by_alarm_name')
-    def test_delete_notification_rule_failed_to_get_notification_rule_id(self,\
-                                                            m_get_notification_rule_id_by_alarm_name,\
-                                                            m_delete):
-        """Test delete_notification_rule: if notification rule is not found, returns None"""
-
-        #Set input parameters to delete_notification_rule
-        alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-8c19d240eda4'
-
-        #Set mocked function's return values
-        m_get_notification_rule_id_by_alarm_name.return_value = None
-
-        #Call delete_notification_rule method under test
-        rule_id = self.mon_plugin.delete_notification_rule(alarm_name)
-
-        #Verify that mocked method is called with required parameters
-        m_get_notification_rule_id_by_alarm_name.assert_called_with(alarm_name)
-        m_delete.assert_not_called()
-
-        # verify return value with expected value
-        self.assertEqual(rule_id, None)
-
-
-    @mock.patch.object(monPlugin.requests, 'delete')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_notification_rule_id_by_alarm_name')
-    def test_delete_notification_rule_invalid_deletion_req_response(self,\
-                                                            m_get_notification_rule_id_by_alarm_name,\
-                                                            m_delete):
-        """Test delete_notification_rule: If an invalid response is received, returns None"""
-
-        #Set input parameters to delete_notification_rule
-        alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-8c19d240eda4'
-
-        #Set mocked function's return values
-        m_get_notification_rule_id_by_alarm_name.return_value = '8db86441-71d8-4830-9e1a-a90be3776d12'
-        m_delete.return_value.status_code = 404
-
-        #Call delete_notification_rule method under test
-        rule_id = self.mon_plugin.delete_notification_rule(alarm_name)
-
-        #Verify that mocked method is called with required parameters
-        m_get_notification_rule_id_by_alarm_name.assert_called_with(alarm_name)
-        m_delete.assert_called()
-
-        #Verify return value with expected value
-        self.assertEqual(rule_id, None)
-
-
-    @mock.patch.object(monPlugin.requests, 'get')
-    def test_get_notification_rule_id_by_alarm_name_valid_req_response(self,m_get):
-        """Test get_notification_rule_id_by_alarm_name: A valid request response received,
-            returns notification_id
-        """
-
-        #Set input parameters to get_notification_rule_id_by_alarm_name
-        alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
-
-        #Set mocked function's return values
-        m_get.return_value.status_code = 200
-        m_get.return_value.content = '{\
-        "pageInfo": {"totalCount": 0,"page": 0,"pageSize": 1000},\
-        "links": [\
-            {"href": "/suite-api/api/notifications/rules?page=0&amp;pageSize=1000",\
-            "rel": "SELF","name": "current"},\
-            {"href": "/suite-api/api/notifications/rules?page=0&amp;pageSize=1000",\
-            "rel": "RELATED","name": "first"},\
-            {"href": "/suite-api/api/notifications/rules?page=0&amp;pageSize=1000",\
-            "rel": "RELATED","name": "last"}],\
-        "notification-rule": [{\
-        "id": "2b86fa23-0c15-445c-a2b1-7bd725c46f59",\
-        "name": "notify_CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4",\
-        "pluginId": "03053f51-f829-438d-993d-cc33a435d76a",\
-        "links": [{"href": "/suite-api/api/notifications/rules/2b86fa23-0c15-445c-a2b1-7bd725c46f59",\
-        "rel": "SELF","name": "linkToSelf"}]}]}'
-
-        #Call get_notification_rule_id_by_alarm_name method under test
-        notification_id = self.mon_plugin.get_notification_rule_id_by_alarm_name(alarm_name)
-
-        #Verify that mocked method is called with required parameters
-        m_get.assert_called()
-
-        #Verify return value with expected value
-        self.assertEqual(notification_id, '2b86fa23-0c15-445c-a2b1-7bd725c46f59')
-
-
-    @mock.patch.object(monPlugin.requests, 'get')
-    def test_get_notification_rule_id_by_alarm_name_invalid_req_response(self,m_get):
-        """Test get_notification_rule_id_by_alarm_name: If an invalid response received,\
-            returns None
-        """
-
-        #Set input parameters to delete_alarm_configuration
-        alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
-
-        #Set mocked function's return values
-        m_get.return_value.status_code = 404
-
-        #Call get_notification_rule_id_by_alarm_name method under test
-        notification_id = self.mon_plugin.get_notification_rule_id_by_alarm_name(alarm_name)
-
-        #Verify that mocked method is called with required parameters
-        m_get.assert_called()
-
-        #Verify return value with expected value
-        self.assertEqual(notification_id, None)
-
-
-    @mock.patch.object(monPlugin.requests, 'get')
-    def test_get_notification_rule_id_by_alarm_name_rule_not_found(self,m_get):
-        """Test get_notification_rule_id_by_alarm_name: If a notification rule is not found,
-            returns None
-        """
-
-        #Set input parameters to delete_alarm_configuration
-        alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda'
-
-        #Set mocked function's return values
-        m_get.return_value.status_code = 200
-        m_get.return_value.content = '{\
-        "pageInfo": {"totalCount": 0,"page": 0,"pageSize": 1000},\
-        "links": [\
-            {"href": "/suite-api/api/notifications/rules?page=0&amp;pageSize=1000",\
-            "rel": "SELF","name": "current"},\
-            {"href": "/suite-api/api/notifications/rules?page=0&amp;pageSize=1000",\
-            "rel": "RELATED","name": "first"},\
-            {"href": "/suite-api/api/notifications/rules?page=0&amp;pageSize=1000",\
-            "rel": "RELATED","name": "last"}],\
-        "notification-rule": [{\
-        "id": "2b86fa23-0c15-445c-a2b1-7bd725c46f59",\
-        "name": "notify_CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4",\
-        "pluginId": "03053f51-f829-438d-993d-cc33a435d76a",\
-        "links": [{"href": "/suite-api/api/notifications/rules/2b86fa23-0c15-445c-a2b1-7bd725c46f59",\
-        "rel": "SELF","name": "linkToSelf"}]}]}'
-
-        #Call get_notification_rule_id_by_alarm_name method under test
-        notification_id = self.mon_plugin.get_notification_rule_id_by_alarm_name(alarm_name)
-
-        #Verify that mocked method is called with required parameters
-        m_get.assert_called()
-
-        #Verify return value with expected value
-        self.assertEqual(notification_id, None)
-
-
-    @mock.patch.object(monPlugin.requests, 'delete')
-    def test_delete_alarm_defination_valid_req_response(self,m_delete):
-        """Test delete_alarm_defination: A valid request response received,
-            returns symptom_id
-        """
-
-        #Set input parameters to delete_alarm_definition
-        alarm_definition_id = 'AlertDefinition-9a6d8a14-9f25-4d81-4d773497444d'
-
-        #Set mocked function's return values
-        m_delete.return_value.status_code = 204
-
-        #Call delete_alarm_defination method under test
-        actual_alarm_id = self.mon_plugin.delete_alarm_defination(alarm_definition_id)
-
-        #Verify that mocked method is called with required parameters
-        m_delete.assert_called()
-
-        #Verify return value with expected value
-        self.assertEqual(actual_alarm_id, alarm_definition_id)
-
-
-    @mock.patch.object(monPlugin.requests, 'delete')
-    def test_delete_alarm_defination_invalid_req_response(self,m_delete):
-        """Test delete_alarm_defination: If an invalid request response received,
-            returns None
-        """
-
-        #Set input parameters to delete_alarm_definition
-        alarm_definition_id = 'AlertDefinition-9a6d8a14-9f25-4d81-4d773497444d'
-
-        #Set mocked function's return values
-        m_delete.return_value.status_code = 404
-
-        #Call delete_alarm_defination method under test
-        actual_alarm_id = self.mon_plugin.delete_alarm_defination(alarm_definition_id)
-
-        #Verify that mocked method is called with required parameters
-        m_delete.assert_called()
-
-        #Verify return value with expected value
-        self.assertEqual(actual_alarm_id, None)
-
-
-    @mock.patch.object(monPlugin.requests, 'delete')
-    def test_delete_symptom_definition_valid_req_response(self,m_delete):
-        """Test delete_symptom_definition: A valid request response received,
-            returns symptom_id
-        """
-
-        #Set input parameters to delete_symptom_definition
-        symptom_definition_id = 'SymptomDefinition-bcc2cb36-a67b-4deb-9b5884973278'
-
-        #Set mocked function's return values
-        m_delete.return_value.status_code = 204
-
-        #Call delete_symptom_definition method under test
-        actual_symptom_id = self.mon_plugin.delete_symptom_definition(symptom_definition_id)
-
-        #Verify that mocked method is called with required parameters
-        m_delete.assert_called()
-
-        #Verify return value with expected value
-        self.assertEqual(actual_symptom_id, symptom_definition_id)
-
-
-    @mock.patch.object(monPlugin.requests, 'delete')
-    def test_delete_symptom_definition_invalid_req_response(self,m_delete):
-        """Test delete_symptom_definition: If an invalid request response received,
-            returns None
-        """
-
-        #Set input parameters to delete_symptom_definition
-        symptom_definition_id = 'SymptomDefinition-bcc2cb36-a67b-4deb-9b5884973278'
-
-        #Set mocked function's return values
-        m_delete.return_value.status_code = 404
-
-        #Call delete_symptom_definition method under test
-        actual_symptom_id = self.mon_plugin.delete_symptom_definition(symptom_definition_id)
-
-        #Verify that mocked method is called with required parameters
-        m_delete.assert_called()
-
-        #Verify return value with expected value
-        self.assertEqual(actual_symptom_id, None)
-
-
-    @mock.patch.object(monPlugin.requests, 'post')
-    @mock.patch.object(monPlugin.MonPlugin, 'check_if_plugin_configured')
-    def test_configure_rest_plugin_valid_plugin_id(self, m_check_if_plugin_configured, m_post):
-        """Test configure rest plugin method-valid plugin id"""
-
-        # mock return values
-        expected_return = m_check_if_plugin_configured.return_value = "mock_pluginid"
-
-        # call configure rest plugin method under test
-        actual_return = self.mon_plugin.configure_rest_plugin()
-
-        # verify that mocked method is called
-        m_check_if_plugin_configured.assert_called()
-        m_post.assert_not_called()
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-    @mock.patch.object(monPlugin.MonPlugin,'enable_rest_plugin')
-    @mock.patch.object(monPlugin.requests, 'post')
-    @mock.patch.object(monPlugin.MonPlugin, 'check_if_plugin_configured')
-    def est_configure_rest_plugin_invalid_plugin_id(self, m_check_if_plugin_configured, m_post, m_enable_rest_plugin):
-        """Test configure rest plugin method-invalid plugin id"""
-
-        # mock return values
-        m_check_if_plugin_configured.return_value = None # not configured
-        m_post.return_value.status_code = 201 #success
-        m_post.return_value.content = '{"pluginTypeId":"RestPlugin","pluginId":"1ef15663-9739-49fe-8c41-022bcc9f690c",\
-                                        "name":"MON_module_REST_Plugin","version":1518693747871,"enabled":false,\
-                                        "configValues":[{"name":"Url","value":"https://MON.lxd:8080/notify/"},\
-                                        {"name":"Content-type","value":"application/json"},{"name":"Certificate",\
-                                        "value":"AA:E7:3E:A5:34:E0:25:FB:28:84:3B:74:B2:18:74:C0:C3:E8:26:50"},\
-                                        {"name":"ConnectionCount","value":"20"}]}'
-
-        m_enable_rest_plugin.return_value = True #success
-        expected_return = '1ef15663-9739-49fe-8c41-022bcc9f690c'
-
-        # call configure rest plugin method under test
-        actual_return = self.mon_plugin.configure_rest_plugin()
-
-        # verify that mocked method is called
-        m_check_if_plugin_configured.assert_called()
-        m_post.assert_called()
-        m_enable_rest_plugin.assert_called_with('1ef15663-9739-49fe-8c41-022bcc9f690c','MON_module_REST_Plugin')
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-    @mock.patch.object(monPlugin.MonPlugin,'enable_rest_plugin')
-    @mock.patch.object(monPlugin.requests, 'post')
-    @mock.patch.object(monPlugin.MonPlugin, 'check_if_plugin_configured')
-    def est_configure_rest_plugin_failed_to_enable_plugin(self, m_check_if_plugin_configured, m_post, m_enable_rest_plugin):
-        """Test configure rest plugin method-failed to enable plugin case"""
-
-        # mock return values
-        m_check_if_plugin_configured.return_value = None # not configured
-        m_post.return_value.status_code = 201 #success
-        m_post.return_value.content = '{"pluginTypeId":"RestPlugin","pluginId":"1ef15663-9739-49fe-8c41-022bcc9f690c",\
-                                        "name":"MON_module_REST_Plugin","version":1518693747871,"enabled":false,\
-                                        "configValues":[{"name":"Url","value":"https://MON.lxd:8080/notify/"},\
-                                        {"name":"Content-type","value":"application/json"},{"name":"Certificate",\
-                                        "value":"AA:E7:3E:A5:34:E0:25:FB:28:84:3B:74:B2:18:74:C0:C3:E8:26:50"},\
-                                        {"name":"ConnectionCount","value":"20"}]}'
-
-        m_enable_rest_plugin.return_value = False #return failure
-        expected_return = None
-
-        # call configure rest plugin method under test
-        actual_return = self.mon_plugin.configure_rest_plugin()
-
-        # verify that mocked method is called
-        m_check_if_plugin_configured.assert_called()
-        m_post.assert_called()
-        m_enable_rest_plugin.assert_called_with('1ef15663-9739-49fe-8c41-022bcc9f690c','MON_module_REST_Plugin')
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-    @mock.patch.object(monPlugin.requests, 'get')
-    def test_check_if_plugin_configured_valid_req_response(self, m_get):
-        """Test check if plugin configured method-valid request response"""
-
-        plugin_name = 'MON_module_REST_Plugin'
-        # mock return values
-        m_get.return_value.status_code = 200
-        expected_return = '1ef15663-9739-49fe-8c41-022bcc9f690c'
-        m_get.return_value.content = '{"notificationPluginInstances":\
-                                       [{"pluginTypeId":"RestPlugin",\
-                                        "pluginId":"1ef15663-9739-49fe-8c41-022bcc9f690c",\
-                                        "name":"MON_module_REST_Plugin","version":1518694966987,\
-                                        "enabled":true,"configValues":[{"name":"Url",\
-                                        "value":"https://MON.lxd:8080/notify/"},\
-                                        {"name":"Content-type","value":"application/json"},\
-                                        {"name":"Certificate",\
-                                        "value":"AA:E7:3E:A5:34:E0:25:FB:28:84:3B:74:B2:18:74:C0"},\
-                                        {"name":"ConnectionCount","value":"20"}]}]}'
-
-        # call check if plugin configured method under test
-        actual_return = self.mon_plugin.check_if_plugin_configured(plugin_name)
-
-        # verify that mocked method is called
-        m_get.assert_called()
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-    @mock.patch.object(monPlugin.requests, 'get')
-    def test_check_if_plugin_configured_invalid_req_response(self, m_get):
-        """Test check if plugin configured method-invalid request response"""
-
-        plugin_name = 'MON_module_REST_Plugin'
-        # mock return values
-        m_get.return_value.status_code = 201
-        expected_return = None
-        m_get.return_value.content = '{"notificationPluginInstances":\
-                                       [{"pluginTypeId":"RestPlugin",\
-                                        "pluginId":"1ef15663-9739-49fe-8c41-022bcc9f690c",\
-                                        "name":"MON_module_REST_Plugin","version":1518694966987,\
-                                        "enabled":true,"configValues":[{"name":"Url",\
-                                        "value":"https://MON.lxd:8080/notify/"},\
-                                        {"name":"Content-type","value":"application/json"},\
-                                        {"name":"Certificate",\
-                                        "value":"AA:E7:3E:A5:34:E0:25:FB:28:84:3B:74:B2:18:74:C0"},\
-                                        {"name":"ConnectionCount","value":"20"}]}]}'
-
-        # call check if plugin configured method under test
-        actual_return = self.mon_plugin.check_if_plugin_configured(plugin_name)
-
-        # verify that mocked method is called
-        m_get.assert_called()
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-    @mock.patch.object(monPlugin.requests, 'put')
-    def test_enable_rest_plugin_valid_req_response(self, m_put):
-        """Test enable rest plugin method-valid request response"""
-
-        plugin_name = 'MON_module_REST_Plugin'
-        plugin_id = '1ef15663-9739-49fe-8c41-022bcc9f690c'
-        # mock return values
-        m_put.return_value.status_code = 204
-        expected_return = True
-        m_put.return_value.content = ''
-
-        # call enable rest plugin configured method under test
-        actual_return = self.mon_plugin.enable_rest_plugin(plugin_id, plugin_name)
-
-        # verify that mocked method is called
-        m_put.assert_called()
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-    @mock.patch.object(monPlugin.requests, 'put')
-    def test_enable_rest_plugin_invalid_req_response(self, m_put):
-        """Test enable rest plugin method-invalid request response"""
-
-        plugin_name = 'MON_module_REST_Plugin'
-        plugin_id = '08018c0f-8879-4ca1-9b92-00e22d2ff81b' #invalid plugin id
-        # mock return values
-        m_put.return_value.status_code = 404 # api Error code
-        expected_return = False
-        m_put.return_value.content = '<?xml version="1.0" encoding="UTF-8" standalone="yes"?><ops:\
-                                      error xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" \
-                                      xmlns:xs="http://www.w3.org/2001/XMLSchema" xmlns:ops=\
-                                      "http://webservice.vmware.com/vRealizeOpsMgr/1.0/" \
-                                      httpStatusCode="404" apiErrorCode="404"><ops:message>\
-                                      No such Notification Plugin - 08018c0f-8879-4ca1-9b92-\
-                                      00e22d2ff81b.</ops:message></ops:error>'
-
-        # call enable rest plugin configured method under test
-        actual_return = self.mon_plugin.enable_rest_plugin(plugin_id, plugin_name)
-
-        # verify that mocked method is called
-        m_put.assert_called()
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-    @mock.patch.object(monPlugin.requests, 'post')
-    @mock.patch.object(monPlugin.MonPlugin, 'check_if_plugin_configured')
-    def test_create_alarm_notification_rule_valid_req(self, m_check_if_plugin_configured, m_post):
-        """Test create alarm notification rule method valid request response"""
-
-        alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
-        alarm_id = 'AlertDefinition-f1163767-6eac-438f-8e60-a7a867257e14'
-        res_id = 'ac87622f-b761-40a0-b151-00872a2a456e'
-        expected_return = "8db86441-71d8-4830-9e1a-a90be3776d12"
-
-        # mock return values
-        m_check_if_plugin_configured.return_value = '03053f51-f829-438d-993d-cc33a435d76a'
-        m_post.return_value.status_code = 201
-        m_post.return_value.content = '{"id":"8db86441-71d8-4830-9e1a-a90be3776d12",\
-                                      "name":"notify_CPU_Utilization_Above_Thr-e14b203c",\
-                                      "pluginId":"03053f51-f829-438d-993d-cc33a435d76a",\
-                                      "alertControlStates":[],"alertStatuses":[],\
-                                      "resourceFilter":{"matchResourceIdOnly":true,\
-                                      "childrenResourceKindFilters":[],\
-                                      "resourceId":"ac87622f-b761-40a0-b151-00872a2a456e"},\
-                                      "alertTypeFilters":[],"alertDefinitionIdFilters":{"values":[\
-                                      "AlertDefinition-f1163767-6eac-438f-8e60-a7a867257e14"]}}'
-
-        # call enable rest plugin configured method under test
-        actual_return = self.mon_plugin.create_alarm_notification_rule(alarm_name, alarm_id, res_id)
-
-        # verify that mocked method is called
-        m_check_if_plugin_configured.assert_called_with('MON_module_REST_Plugin')
-        m_post.assert_called()
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-    @mock.patch.object(monPlugin.requests, 'post')
-    @mock.patch.object(monPlugin.MonPlugin, 'check_if_plugin_configured')
-    def test_create_alarm_notification_rule_invalid_req(self, m_check_if_plugin_configured, m_post):
-        """Test create alarm notification rule method invalid request response"""
-
-        alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
-        alarm_id = 'AlertDefinition-f1163767-6eac-438f-8e60-a7a867257e14'
-        res_id = 'ac87622f-b761-40a0-b151-00872a2a456e'
-        expected_return = None # invalid req should retrun none
-
-        # mock return values
-        m_check_if_plugin_configured.return_value = '03053f51-f829-438d-993d-cc33a435d76a'
-        m_post.return_value.status_code = 500
-        m_post.return_value.content = '{"message":"Internal Server error, cause unknown.",\
-                                        "moreInformation":[{"name":"errorMessage","value":\
-                                        "there is already a rule with the same rule name"},\
-                                       {"name":"localizedMessage","value":"there is already \
-                                        a rule with the same rule name;"}],"httpStatusCode":500,\
-                                        "apiErrorCode":500}'
-
-        # call enable rest plugin configured method under test
-        actual_return = self.mon_plugin.create_alarm_notification_rule(alarm_name, alarm_id, res_id)
-
-        # verify that mocked method is called
-        m_check_if_plugin_configured.assert_called_with('MON_module_REST_Plugin')
-        m_post.assert_called()
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-    @mock.patch.object(monPlugin.requests, 'post')
-    @mock.patch.object(monPlugin.MonPlugin, 'check_if_plugin_configured')
-    def test_create_alarm_notification_rule_failed_to_get_plugin_id(self, \
-                                    m_check_if_plugin_configured, m_post):
-        """Test create alarm notification rule method invalid plugin id"""
-
-        alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
-        alarm_id = 'AlertDefinition-f1163767-6eac-438f-8e60-a7a867257e14'
-        res_id = 'ac87622f-b761-40a0-b151-00872a2a456e'
-        expected_return = None # invalid req should retrun none
-
-        # mock return values
-        m_check_if_plugin_configured.return_value = None
-
-        # call enable rest plugin configured method under test
-        actual_return = self.mon_plugin.create_alarm_notification_rule(alarm_name, alarm_id, res_id)
-
-        # verify that mocked method is called
-        m_check_if_plugin_configured.assert_called_with('MON_module_REST_Plugin')
-        m_post.assert_not_called()
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-    @mock.patch.object(monPlugin.requests, 'get')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
-    def test_get_metrics_data_valid_rest_req_response(self, m_get_default_Params, \
-                                                      m_get_vm_moref_id, \
-                                                      m_get_vm_resource_id, \
-                                                      m_get):
-        """Test get metrics data of resource method valid request response"""
-
-        metrics = {'collection_period': 1, 'metric_name': 'CPU_UTILIZATION', 'metric_uuid': None, \
-                   'schema_version': 1.0, 'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',\
-                   'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef', \
-                   'schema_type': 'read_metric_data_request', 'vim_type': 'VMware', \
-                   'collection_unit': 'HR', 'vim_uuid':'1'}
-
-        # mock return value
-        m_get_default_Params.return_value = {'metric_key': 'cpu|usage_average', 'unit': '%'}
-        vm_moref_id = m_get_vm_moref_id.return_value = 'vm-6626'
-        m_get_vm_resource_id.return_value = 'ac87622f-b761-40a0-b151-00872a2a456e'
-        m_get.return_value.status_code = 200
-        m_get.return_value.content = '{"values":[{"resourceId":"ac87622f-b761-40a0-b151-\
-                                       00872a2a456e","stat-list":{"stat":[{"timestamps":\
-                                      [1519716874297,1519717174294,1519717474295,1519717774298,\
-                                      1519718074300,1519718374299,1519718674314,1519718974325,\
-                                      1519719274304,1519719574298,1519719874298,1519720174301],\
-                                      "statKey":{"key":"cpu|usage_average"},"intervalUnit":\
-                                      {"quantifier":1},"data":[0.1120000034570694,\
-                                      0.11866666376590729,0.11599999666213989,0.11400000005960464,\
-                                      0.12066666781902313,0.11533333361148834,0.11800000071525574,\
-                                      0.11533333361148834,0.12200000137090683,0.11400000005960464,\
-                                      0.1459999978542328,0.12133333086967468]}]}}]}'
-
-        # call get matrics data method under test
-        actual_return = self.mon_plugin.get_metrics_data(metrics)
-
-        # verify that mocked method is called
-        m_get_default_Params.assert_called_with(metrics['metric_name'])
-        m_get_vm_moref_id.assert_called_with(metrics['resource_uuid'])
-        m_get_vm_resource_id.assert_called_with(vm_moref_id)
-        m_get.assert_called()
-
-        # verify return value with expected value
-        #self.assertEqual(expected_return, actual_return)
-
-
-    @mock.patch.object(monPlugin.requests, 'get')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
-    def test_get_metrics_data_invalid_rest_req_response(self, m_get_default_Params, \
-                                                      m_get_vm_moref_id, \
-                                                      m_get_vm_resource_id, \
-                                                      m_get):
-        """Test get metrics data of resource method invalid request response"""
-
-        metrics = {'collection_period': 1, 'metric_name': 'CPU_UTILIZATION', 'metric_uuid': None, \
-                   'schema_version': 1.0, 'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',\
-                   'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef', \
-                   'schema_type': 'read_metric_data_request', 'vim_type': 'VMware', \
-                   'collection_unit': 'HR', 'vim_uuid':'1'}
-
-        # mock return value
-        m_get_default_Params.return_value = {'metric_key': 'cpu|usage_average', 'unit': '%'}
-        vm_moref_id = m_get_vm_moref_id.return_value = 'vm-6626'
-        m_get_vm_resource_id.return_value = 'ac87622f-b761-40a0-b151-00872a2a456e'
-        m_get.return_value.status_code = 400
-        expected_return = {'metric_name': 'CPU_UTILIZATION', 'metric_uuid': '0',
-                           'schema_version': '1.0',
-                           'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
-                           'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
-                           'metrics_data': {'time_series': [], 'metrics_series': []},
-                           'schema_type': 'read_metric_data_response',
-                            'unit': '%', 'vim_uuid':'1'}
-
-        # call get metrics data method under test
-        actual_return = self.mon_plugin.get_metrics_data(metrics)
-
-        # verify that mocked method is called
-        m_get_default_Params.assert_called_with(metrics['metric_name'])
-        m_get_vm_moref_id.assert_called_with(metrics['resource_uuid'])
-        m_get_vm_resource_id.assert_called_with(vm_moref_id)
-        m_get.assert_called()
-
-        m_get.return_value.content = '{"message":"Invalid request... #1 violations found.",\
-                                       "validationFailures":[{"failureMessage":"Invalid Parameter",\
-                                       "violationPath":"end"}],"httpStatusCode":400,\
-                                       "apiErrorCode":400}'
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-    @mock.patch.object(monPlugin.requests, 'get')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
-    def test_get_metrics_data_metric_not_supported(self, m_get_default_Params, \
-                                                   m_get_vm_moref_id, \
-                                                   m_get_vm_resource_id, \
-                                                   m_get):
-        """Test get metrics data of resource method invalid metric name"""
-
-        metrics = {'collection_period': 1, 'metric_name': 'invalid_metric', 'metric_uuid': None,
-                   'schema_version': 1.0,
-                   'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
-                   'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
-                   'schema_type': 'read_metric_data_request', 'vim_type': 'VMware',
-                   'collection_unit': 'HR', 'vim_uuid':'1'}
-
-        # mock return value
-        m_get_default_Params.return_value = {} # returns empty dict
-
-        expected_return = {'metric_name': 'invalid_metric', 'metric_uuid': '0','vim_uuid': '1',
-                           'schema_version': '1.0','resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
-                           'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
-                           'metrics_data': {'time_series': [], 'metrics_series': []},
-                           'schema_type': 'read_metric_data_response','unit': None}
-
-        # call get matrics data method under test
-        actual_return = self.mon_plugin.get_metrics_data(metrics)
-
-        # verify that mocked method is called/not called
-        m_get_default_Params.assert_called_with(metrics['metric_name'])
-        m_get_vm_moref_id.assert_not_called()
-        m_get_vm_resource_id.assert_not_called()
-        m_get.assert_not_called()
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-    @mock.patch.object(monPlugin.requests, 'get')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
-    def test_get_metrics_data_failed_to_get_vm_moref_id(self, m_get_default_Params, \
-                                                        m_get_vm_moref_id, \
-                                                        m_get_vm_resource_id, \
-                                                        m_get):
-        """Test get metrics data method negative scenario- invalid resource id"""
-
-        metrics = {'collection_period': 1, 'metric_name': 'cpu_utilization', 'metric_uuid': None, \
-                   'schema_version': 1.0, 'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',\
-                   'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef', \
-                   'schema_type': 'read_metric_data_request', 'vim_type': 'VMware', \
-                   'collection_unit': 'HR', 'vim_uuid':'1'}
-
-        # mock return value
-        m_get_default_Params.return_value = {'metric_key': 'cpu|usage_average', 'unit': '%'}
-        m_get_vm_moref_id.return_value = None
-        expected_return = {'metric_name': 'cpu_utilization', 'metric_uuid': '0',
-                           'schema_version': '1.0',
-                           'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
-                           'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
-                           'metrics_data': {'time_series': [], 'metrics_series': []},
-                           'schema_type': 'read_metric_data_response',
-                            'unit': '%', 'vim_uuid':'1'}
-
-        # call get matrics data method under test
-        actual_return = self.mon_plugin.get_metrics_data(metrics)
-
-        # verify that mocked method is called/not called
-        m_get_default_Params.assert_called_with(metrics['metric_name'])
-        m_get_vm_moref_id.assert_called_with(metrics['resource_uuid'])
-        m_get_vm_resource_id.assert_not_called()
-        m_get.assert_not_called()
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-    @mock.patch.object(monPlugin.requests, 'get')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
-    def test_get_metrics_data_failed_to_get_vm_resource_id(self, m_get_default_Params, \
-                                                           m_get_vm_moref_id, \
-                                                           m_get_vm_resource_id, \
-                                                           m_get):
-        """Test get metrics data method negative scenario- invalid moref id"""
-
-        metrics = {'collection_period': 1, 'metric_name': 'CPU_UTILIZATION', 'metric_uuid': None, \
-                   'schema_version': 1.0, 'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',\
-                   'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef', \
-                   'schema_type': 'read_metric_data_request', 'vim_type': 'VMware', \
-                   'collection_unit': 'HR', 'vim_uuid':'1'}
-
-        # mock return value
-        m_get_default_Params.return_value = {'metric_key': 'cpu|usage_average', 'unit': '%'}
-        m_get_vm_moref_id.return_value = 'Invalid-vm-6626'
-        m_get_vm_resource_id.return_value = None
-        expected_return = {'metric_name': 'CPU_UTILIZATION', 'metric_uuid': '0',
-                           'schema_version': '1.0',
-                           'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
-                           'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
-                           'metrics_data': {'time_series': [], 'metrics_series': []},
-                           'schema_type': 'read_metric_data_response',
-                            'unit': '%', 'vim_uuid':'1'}
-
-        # call get matrics data method under test
-        actual_return = self.mon_plugin.get_metrics_data(metrics)
-
-        # verify that mocked method is called/not called
-        m_get_default_Params.assert_called_with(metrics['metric_name'])
-        m_get_vm_moref_id.assert_called_with(metrics['resource_uuid'])
-        m_get_vm_resource_id.assert_called()
-        m_get_vm_resource_id.assert_called_with('Invalid-vm-6626')
-        m_get.assert_not_called()
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-    @mock.patch.object(monPlugin.MonPlugin, 'reconfigure_alarm')
-    @mock.patch.object(monPlugin.MonPlugin, 'update_symptom_defination')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_details')
-    def test_update_alarm_configuration_successful_updation(self, m_get_alarm_defination_details, \
-                                                            m_update_symptom_defination, \
-                                                            m_reconfigure_alarm ):
-        """Test update alarm configuration method"""
-
-        alarm_config = {'alarm_uuid': 'f1163767-6eac-438f-8e60-a7a867257e14',
-                        'correlation_id': 14203,
-                        'description': 'CPU_Utilization_Above_Threshold_L', 'operation': 'GT'}
-
-        # mock return value
-        alarm_details_json = {'states': [{'impact': {'impactType': 'BADGE', 'detail': 'risk'},
-                              'severity': 'CRITICAL', 'base-symptom-set': {'symptomDefinitionIds':
-                              ['SymptomDefinition-47c88675-bea8-436a-bb41-8d2231428f44'],
-                              'relation': 'SELF', 'type': 'SYMPTOM_SET', 'aggregation': 'ALL'}}],
-                              'description': 'CPU_Utilization_Above_Threshold', 'type': 16,
-                              'id': 'AlertDefinition-f1163767-6eac-438f-8e60-a7a867257e14',
-                              'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d2'}
-        alarm_details = {'symptom_definition_id': 'SymptomDefinition-47c88675-bea8-436a-bb41-\
-                        8d2231428f44', 'alarm_name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-\
-                        a91c-8c19d2', 'alarm_id': 'AlertDefinition-f1163767-6eac-438f-8e60-\
-                        a7a867257e14', 'resource_kind': 'VirtualMachine', 'type': 16}
-        m_get_alarm_defination_details.return_value = (alarm_details_json, alarm_details)
-        m_update_symptom_defination.return_value = 'SymptomDefinition-47c88675-bea8-436a-bb41-\
-                                                   8d2231428f44'
-        expected_return = m_reconfigure_alarm.return_value = 'f1163767-6eac-438f-8e60-a7a867257e14'
-
-        # call update alarm configuration method under test
-        actual_return = self.mon_plugin.update_alarm_configuration(alarm_config)
-
-        # verify that mocked method is called
-        m_get_alarm_defination_details.assert_called_with(alarm_config['alarm_uuid'])
-        m_update_symptom_defination.assert_called_with(alarm_details['symptom_definition_id'],\
-                                                       alarm_config)
-        m_reconfigure_alarm.assert_called_with(alarm_details_json, alarm_config)
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-    @mock.patch.object(monPlugin.MonPlugin, 'reconfigure_alarm')
-    @mock.patch.object(monPlugin.MonPlugin, 'update_symptom_defination')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_details')
-    def test_update_alarm_configuration_failed_to_reconfigure_alarm(self, \
-                                                        m_get_alarm_defination_details, \
-                                                        m_update_symptom_defination, \
-                                                        m_reconfigure_alarm ):
-        """Test update alarm configuration method- failed to reconfigure alarm"""
-
-        alarm_config = {'alarm_uuid': 'f1163767-6eac-438f-8e60-a7a867257e14',
-                        'correlation_id': 14203,
-                        'description': 'CPU_Utilization_Above_Threshold_L', 'operation': 'GT'}
-
-        # mock return value
-        alarm_details_json = {'states': [{'impact': {'impactType': 'BADGE', 'detail': 'risk'},
-                              'severity': 'CRITICAL', 'base-symptom-set': {'symptomDefinitionIds':
-                              ['SymptomDefinition-47c88675-bea8-436a-bb41-8d2231428f44'],
-                              'relation': 'SELF', 'type': 'SYMPTOM_SET', 'aggregation': 'ALL'}}],
-                              'description': 'CPU_Utilization_Above_Threshold', 'type': 16,
-                              'id': 'AlertDefinition-f1163767-6eac-438f-8e60-a7a867257e14',
-                              'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d2'}
-        alarm_details = {'symptom_definition_id': 'SymptomDefinition-47c88675-bea8-436a-bb41-\
-                        8d2231428f44', 'alarm_name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-\
-                        a91c-8c19d2', 'alarm_id': 'AlertDefinition-f1163767-6eac-438f-8e60-\
-                        a7a867257e14', 'resource_kind': 'VirtualMachine', 'type': 16}
-        m_get_alarm_defination_details.return_value = (alarm_details_json, alarm_details)
-        m_update_symptom_defination.return_value = 'SymptomDefinition-47c88675-bea8-436a-bb41-\
-                                                    8d2231428f44'
-        expected_return = m_reconfigure_alarm.return_value = None # failed to reconfigure
-
-        # call update alarm configuration method under test
-        actual_return = self.mon_plugin.update_alarm_configuration(alarm_config)
-
-        # verify that mocked method is called
-        m_get_alarm_defination_details.assert_called_with(alarm_config['alarm_uuid'])
-        m_update_symptom_defination.assert_called_with(alarm_details['symptom_definition_id'],\
-                                                       alarm_config)
-        m_reconfigure_alarm.assert_called_with(alarm_details_json, alarm_config)
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-    @mock.patch.object(monPlugin.MonPlugin, 'reconfigure_alarm')
-    @mock.patch.object(monPlugin.MonPlugin, 'update_symptom_defination')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_details')
-    def test_update_alarm_configuration_failed_to_update_symptom(self, \
-                                                        m_get_alarm_defination_details, \
-                                                        m_update_symptom_defination, \
-                                                        m_reconfigure_alarm ):
-        """Test update alarm configuration method- failed to update alarm"""
-
-        alarm_config = {'alarm_uuid': 'f1163767-6eac-438f-8e60-a7a867257e14',
-                        'correlation_id': 14203,
-                        'description': 'CPU_Utilization_Above_Threshold_L', 'operation': 'GT'}
-
-        # mock return value
-        alarm_details_json = {'states': [{'impact': {'impactType': 'BADGE', 'detail': 'risk'},
-                              'severity': 'CRITICAL', 'base-symptom-set': {'symptomDefinitionIds':
-                              ['SymptomDefinition-47c88675-bea8-436a-bb41-8d2231428f44'],
-                              'relation': 'SELF', 'type': 'SYMPTOM_SET', 'aggregation': 'ALL'}}],
-                              'description': 'CPU_Utilization_Above_Threshold', 'type': 16,
-                              'id': 'AlertDefinition-f1163767-6eac-438f-8e60-a7a867257e14',
-                              'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d2'}
-        alarm_details = {'symptom_definition_id': 'Invalid-47c88675-bea8-436a-bb41-\
-                        8d2231428f44', 'alarm_name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-\
-                        a91c-8c19d2', 'alarm_id': 'AlertDefinition-f1163767-6eac-438f-8e60-\
-                        a7a867257e14', 'resource_kind': 'VirtualMachine', 'type': 16}
-        m_get_alarm_defination_details.return_value = (alarm_details_json, alarm_details)
-        expected_return = m_update_symptom_defination.return_value = None
-
-        # call update alarm configuration method under test
-        actual_return = self.mon_plugin.update_alarm_configuration(alarm_config)
-
-        # verify that mocked method is called
-        m_get_alarm_defination_details.assert_called_with(alarm_config['alarm_uuid'])
-        m_update_symptom_defination.assert_called_with(alarm_details['symptom_definition_id'],\
-                                                       alarm_config)
-        m_reconfigure_alarm.assert_not_called()
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
-    def test_verify_metric_support_metric_supported_with_unit(self,m_get_default_Params):
-        """Test verify metric support method for supported metric"""
-
-        # mock return value
-        metric_info = {'metric_unit': '%', 'metric_name': 'cpu_utilization',
-                       'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4'}
-        m_get_default_Params.return_value = {'metric_key': 'cpu|usage_average', 'unit': '%'}
-        expected_return = True #supported metric returns True
-
-        # call verify metric support method under test
-        actual_return = self.mon_plugin.verify_metric_support(metric_info)
-
-        # verify that mocked method is called
-        m_get_default_Params.assert_called_with(metric_info['metric_name'])
-        #m_get_default_Params.assert_called_with(metric_info)
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
-    def test_verify_metric_support_metric_not_supported(self,m_get_default_Params):
-        """Test verify metric support method for un-supported metric"""
-
-        # mock return value
-        metric_info = {'metric_unit': '%', 'metric_name': 'invalid_metric',
-                       'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4'}
-        m_get_default_Params.return_value = {}
-        expected_return = False #supported metric returns True
-
-        # call verify metric support method under test
-        actual_return = self.mon_plugin.verify_metric_support(metric_info)
-
-        # verify that mocked method is called
-        m_get_default_Params.assert_called_with(metric_info['metric_name'])
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
-    def test_verify_metric_support_metric_supported_with_mismatched_unit(self, \
-                                                               m_get_default_Params):
-        """Test verify metric support method for supported metric with mismatched unit"""
-
-        # mock return value
-        metric_info = {'metric_unit': '', 'metric_name': 'invalid_metric',
-                       'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4'}
-        m_get_default_Params.return_value = {'metric_key': 'cpu|usage_average', 'unit': '%'}
-        expected_return = True #supported metric returns True
-
-        # call verify metric support method under test
-        actual_return = self.mon_plugin.verify_metric_support(metric_info)
-
-        # verify that mocked method is called
-        m_get_default_Params.assert_called_with(metric_info['metric_name'])
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-    @mock.patch.object(monPlugin.MonPlugin, 'get_triggered_alarms_on_resource')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_vrops_resourceid_from_ro_uuid')
-    def test_get_triggered_alarms_list_returns_triggered_alarms(self, \
-                                                        m_get_vrops_resourceid, \
-                                                        m_triggered_alarms):
-        """Test get triggered alarm list method valid input"""
-
-        # Mock list alarm input
-        list_alarm_input = {'severity': 'CRITICAL',
-                            'correlation_id': 'e14b203c',
-                            'alarm_name': 'CPU_Utilization_Above_Threshold',
-                            'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4'}
-
-        resource_id = m_get_vrops_resourceid.return_value = 'ac87622f-b761-40a0-b151-00872a2a456e'
-        expected_return = m_triggered_alarms.return_value = [{'status': 'ACTIVE',
-                                           'update_date': '2018-01-12T08:34:05',
-                                           'severity': 'CRITICAL', 'resource_uuid': 'e14b203c',
-                                           'cancel_date': '0000-00-00T00:00:00',
-                                           'alarm_instance_uuid': 'd9e3bc84',
-                                           'alarm_uuid': '5714977d', 'vim_type': 'VMware',
-                                           'start_date': '2018-01-12T08:34:05'},
-                                          {'status': 'CANCELED','update_date':'2017-12-20T09:37:57',
-                                           'severity': 'CRITICAL', 'resource_uuid': 'e14b203c',
-                                           'cancel_date': '2018-01-12T06:49:19',
-                                           'alarm_instance_uuid': 'd3bbeef6',
-                                           'alarm_uuid': '7ba1bf3e', 'vim_type': 'VMware',
-                                           'start_date': '2017-12-20T09:37:57'}]
-
-        # call get triggered alarms list method under test
-        actual_return = self.mon_plugin.get_triggered_alarms_list(list_alarm_input)
-
-        # verify that mocked method is called
-        m_get_vrops_resourceid.assert_called_with(list_alarm_input['resource_uuid'])
-        m_triggered_alarms.assert_called_with(list_alarm_input['resource_uuid'] , resource_id)
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-    @mock.patch.object(monPlugin.MonPlugin, 'get_triggered_alarms_on_resource')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_vrops_resourceid_from_ro_uuid')
-    def test_get_triggered_alarms_list_invalid_resource_uuid(self, \
-                                                        m_get_vrops_resourceid, \
-                                                        m_triggered_alarms):
-        """Test get triggered alarm list method invalid resource uuid"""
-
-        # Mock list alarm input
-        list_alarm_input = {'severity': 'CRITICAL',
-                            'correlation_id': 'e14b203c',
-                            'alarm_name': 'CPU_Utilization_Above_Threshold',
-                            'resource_uuid': '12345'} #invalid resource uuid
-
-        expected_return = m_get_vrops_resourceid.return_value = None #returns empty list
-
-        # call get triggered alarms list method under test
-        actual_return = self.mon_plugin.get_triggered_alarms_list(list_alarm_input)
-
-        # verify that mocked method is called
-        m_get_vrops_resourceid.assert_called_with(list_alarm_input['resource_uuid'])
-        m_triggered_alarms.assert_not_called()
-
-        # verify return value with expected value
-        self.assertEqual([], actual_return)
-
-
-    @mock.patch.object(monPlugin.MonPlugin, 'get_triggered_alarms_on_resource')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_vrops_resourceid_from_ro_uuid')
-    def test_get_triggered_alarms_list_resource_uuid_not_present(self, \
-                                                        m_get_vrops_resourceid, \
-                                                        m_triggered_alarms):
-        """Test get triggered alarm list method resource not present"""
-
-        # Mock list alarm input
-        list_alarm_input = {'severity': 'CRITICAL',
-                            'correlation_id': 'e14b203c',
-                            'alarm_name': 'CPU_Utilization_Above_Threshold'}
-
-        # call get triggered alarms list method under test
-        actual_return = self.mon_plugin.get_triggered_alarms_list(list_alarm_input)
-
-        # verify that mocked method is called
-        m_get_vrops_resourceid.assert_not_called()
-        m_triggered_alarms.assert_not_called()
-
-        # verify return value with expected value
-        self.assertEqual([], actual_return)
-
-
-    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
-    def test_get_vrops_resourceid_from_ro_uuid(self, m_get_vm_moref_id, m_get_vm_resource_id):
-        """Test get vrops resourceid from ro uuid method"""
-
-        # Mock the inputs
-        ro_resource_uuid = 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
-        vm_moref_id = m_get_vm_moref_id.return_value = 'vm-6626'
-        expected_return = m_get_vm_resource_id.return_value ='ac87622f-b761-40a0-b151-00872a2a456e'
-
-        # call get_vrops_resourceid_from_ro_uuid method under test
-        actual_return = self.mon_plugin.get_vrops_resourceid_from_ro_uuid(ro_resource_uuid)
-
-        # verify that mocked method is called
-        m_get_vm_moref_id.assert_called_with(ro_resource_uuid)
-        m_get_vm_resource_id.assert_called_with(vm_moref_id)
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
-    def test_get_vrops_resourceid_from_ro_uuid_failed_to_get_vm_resource_id(self, \
-                                                                    m_get_vm_moref_id, \
-                                                                    m_get_vm_resource_id):
-        """Test get vrops resourceid from ro uuid method negative scenario"""
-
-        # Mock the inputs
-        ro_resource_uuid = 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
-        vm_moref_id = m_get_vm_moref_id.return_value = 'vm-6626'
-        expected_return = m_get_vm_resource_id.return_value = None
-
-        # call get_vrops_resourceid_from_ro_uuid method under test
-        actual_return = self.mon_plugin.get_vrops_resourceid_from_ro_uuid(ro_resource_uuid)
-
-        # verify that mocked method is called
-        m_get_vm_moref_id.assert_called_with(ro_resource_uuid)
-        m_get_vm_resource_id.assert_called_with(vm_moref_id)
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
-    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
-    def test_get_vrops_resourceid_from_ro_uuid_failed_to_get_vm_moref_id(self, \
-                                                                    m_get_vm_moref_id, \
-                                                                    m_get_vm_resource_id):
-        """Test get vrops resourceid from ro uuid method negative scenario"""
-
-        # Mock the inputs
-        ro_resource_uuid = 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
-        expected_return = vm_moref_id = m_get_vm_moref_id.return_value = None
-
-        # call get_vrops_resourceid_from_ro_uuid method under test
-        actual_return = self.mon_plugin.get_vrops_resourceid_from_ro_uuid(ro_resource_uuid)
-
-        # verify that mocked method is called
-        m_get_vm_moref_id.assert_called_with(ro_resource_uuid)
-        m_get_vm_resource_id.assert_not_called()
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-    @mock.patch.object(monPlugin.requests, 'get')
-    def test_get_triggered_alarms_on_resource_valid_req_response(self, m_get):
-        """Test get triggered alarms on resource method for valid request"""
-
-        # Mock the inputs
-        ro_resource_uuid = 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
-        vrops_resource_id = 'ac87622f-b761-40a0-b151-00872a2a456e'
-        m_get.return_value.status_code = 200
-        expected_return = [{'status': 'ACTIVE', 'update_date': '2018-01-12T08:34:05',
-                            'severity': 'CRITICAL', 'start_date': '2018-01-12T08:34:05',
-                            'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
-                            'cancel_date': '2018-02-12T08:24:48', 'vim_type': 'VMware',
-                            'alarm_instance_uuid': 'd9e3bc84-dcb4-4905-b592-00a55f4cdaf1',
-                            'alarm_uuid': '5714977d-56f6-4222-adc7-43fa6c6e7e39'}]
-
-        m_get.return_value.content = '{"alerts": [\
-        {\
-            "alertId": "d9e3bc84-dcb4-4905-b592-00a55f4cdaf1",\
-            "resourceId": "ac87622f-b761-40a0-b151-00872a2a456e",\
-            "alertLevel": "CRITICAL",\
-            "status": "ACTIVE",\
-            "startTimeUTC": 1515746045278,\
-            "cancelTimeUTC": 1518423888708,\
-            "updateTimeUTC": 1515746045278,\
-            "alertDefinitionId": "AlertDefinition-5714977d-56f6-4222-adc7-43fa6c6e7e39",\
-            "alertDefinitionName": "CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4"\
-        },\
-        {\
-            "alertId": "5fb5e940-e161-4253-a729-7255c6d6b1f5",\
-            "resourceId": "ac87622f-b761-40a0-b151-00872a2a456e",\
-            "alertLevel": "WARNING",\
-            "status": "CANCELED",\
-            "startTimeUTC": 1506684979154,\
-            "cancelTimeUTC": 0,\
-            "updateTimeUTC": 1520471975507,\
-            "alertDefinitionId": "AlertDefinition-9ec5a921-1a54-411d-85ec-4c1c9b26dd02",\
-            "alertDefinitionName": "VM_CPU_Usage_Alarm"\
-        }]}'
-
-        # call get_vrops_resourceid_from_ro_uuid method under test
-        actual_return = self.mon_plugin.get_triggered_alarms_on_resource(ro_resource_uuid, \
-                                                                        vrops_resource_id)
-
-        # verify that mocked method is called
-        m_get.assert_called()
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-    @mock.patch.object(monPlugin.requests, 'get')
-    def test_get_triggered_alarms_on_resource_invalid_req_response(self, m_get):
-        """Test get triggered alarms on resource method for invalid request"""
-
-        # Mock the inputs
-        ro_resource_uuid = 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
-        vrops_resource_id = 'ac87622f-b761-40a0-b151-00872a2a456e'
-        m_get.return_value.status_code = 204
-        expected_return = None
-
-        # call get_vrops_resourceid_from_ro_uuid method under test
-        actual_return = self.mon_plugin.get_triggered_alarms_on_resource(ro_resource_uuid, \
-                                                                        vrops_resource_id)
-
-        # verify that mocked method is called
-        m_get.assert_called()
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-    @mock.patch.object(monPlugin.requests, 'get')
-    def test_get_triggered_alarms_on_resource_no_alarms_present(self, m_get):
-        """Test get triggered alarms on resource method for no alarms present"""
-
-        # Mock the inputs
-        ro_resource_uuid = 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
-        vrops_resource_id = 'ac87622f-b761-40a0-b151-00872a2a456e'
-        m_get.return_value.status_code = 200
-        expected_return = []
-        m_get.return_value.content = '{"alerts": []}'
-
-        # call get_vrops_resourceid_from_ro_uuid method under test
-        actual_return = self.mon_plugin.get_triggered_alarms_on_resource(ro_resource_uuid, \
-                                                                        vrops_resource_id)
-
-        # verify that mocked method is called
-        m_get.assert_called()
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-    def test_convert_date_time_valid_date_time(self):
-        """Test convert date time method valid input"""
-
-        # Mock the inputs
-        date_time = 1515746045278
-        expected_return = '2018-01-12T08:34:05'
-
-        # call convert_date_time method under test
-        actual_return = self.mon_plugin.convert_date_time(date_time)
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-    def test_convert_date_time_invalid_date_time(self):
-        """Test convert date time method invalid input"""
-
-        # Mock the inputs
-        date_time = 0
-        expected_return = '0000-00-00T00:00:00'
-
-        # call convert_date_time method under test
-        actual_return = self.mon_plugin.convert_date_time(date_time)
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-    @mock.patch.object(monPlugin.requests, 'get')
-    def test_get_vm_resource_id_rest_valid_req_response(self, m_get):
-        """Test get vms resource id valid request"""
-
-        # Mock the inputs
-        vm_moref_id = 'vm-6626'
-        m_get.return_value.status_code = 200
-        expected_return = "ac87622f-b761-40a0-b151-00872a2a456e"
-        m_get.return_value.content = \
-        '{ \
-            "resourceList": [\
-               {\
-                   "creationTime": 1497770174130,\
-                   "resourceKey": {\
-                       "name": "OCInst2.ubuntu(4337d51f-1e65-4ab0-9c08-4897778d4fda)",\
-                       "adapterKindKey": "VMWARE",\
-                       "resourceKindKey": "VirtualMachine",\
-                       "resourceIdentifiers": [\
-                           {\
-                               "identifierType": {\
-                               "name": "VMEntityObjectID",\
-                               "dataType": "STRING",\
-                               "isPartOfUniqueness": true\
-                               },\
-                               "value": "vm-6626"\
-                           }\
-                       ]\
-                   },\
-                   "identifier": "ac87622f-b761-40a0-b151-00872a2a456e"\
-                }\
-            ]\
-        }'
-
-        # call get_vm_resource_id method under test
-        actual_return = self.mon_plugin.get_vm_resource_id(vm_moref_id)
-
-        # verify that mocked method is called
-        m_get.assert_called()
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-    @mock.patch.object(monPlugin.requests, 'get')
-    def test_get_vm_resource_id_rest_invalid_req_response(self, m_get):
-        """Test get vms resource id invalid request"""
-
-        # Mock the inputs
-        vm_moref_id = 'vm-6626'
-        m_get.return_value.status_code = 406
-        expected_return = None
-        m_get.return_value.content = '406 Not Acceptable'
-
-        # call get_vm_resource_id method under test
-        actual_return = self.mon_plugin.get_vm_resource_id(vm_moref_id)
-
-        # verify that mocked method is called
-        m_get.assert_called()
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-    @mock.patch.object(monPlugin.requests, 'get')
-    def test_get_vm_resource_id_rest_invalid_response(self, m_get):
-        """Test get vms resource id invalid response"""
-
-        # Mock the inputs
-        vm_moref_id = 'vm-6626'
-        m_get.return_value.status_code = 200
-        expected_return = None
-        m_get.return_value.content = \
-        '{ \
-            "resourceList": \
-               {\
-                   "creationTime": 1497770174130,\
-                   "resourceKey": {\
-                       "name": "OCInst2.ubuntu(4337d51f-1e65-4ab0-9c08-4897778d4fda)",\
-                       "adapterKindKey": "VMWARE",\
-                       "resourceKindKey": "VirtualMachine",\
-                       "resourceIdentifiers": [\
-                           {\
-                               "identifierType": {\
-                               "name": "VMEntityObjectID",\
-                               "dataType": "STRING",\
-                               "isPartOfUniqueness": true\
-                               },\
-                               "value": "vm-6626"\
-                           }\
-                       ]\
-                   },\
-                   "identifier": "ac87622f-b761-40a0-b151-00872a2a456e"\
-                }\
-        }'
-
-        # call get_vm_resource_id method under test
-        actual_return = self.mon_plugin.get_vm_resource_id(vm_moref_id)
-
-        # verify that mocked method is called
-        #m_get.assert_called
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-    @mock.patch.object(monPlugin.MonPlugin, 'get_vapp_details_rest')
-    def test_get_vm_moref_id_valid_id_found (self, m_get_vapp_details_rest):
-        """Test get vm moref id valid scenario"""
-
-        #mock the inputs
-        vapp_uuid = 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
-        m_get_vapp_details_rest.return_value = {'vm_vcenter_info': {'vm_moref_id': 'vm-6626'}}
-        expected_return = 'vm-6626'
-
-        # call get_vm_resource_id method under test
-        actual_return = self.mon_plugin.get_vm_moref_id(vapp_uuid)
-
-        # verify that mocked method is called
-        m_get_vapp_details_rest.assert_called_with(vapp_uuid)
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-    @mock.patch.object(monPlugin.MonPlugin, 'get_vapp_details_rest')
-    def test_get_vm_moref_id_valid_id_not_found(self, m_get_vapp_details_rest):
-        """Test get vm moref id invalid scenario"""
-
-        #mock the inputs
-        vapp_uuid = 'e14b203c-6bf2-4e2f-a91c-8c19d240eda'#invalid uuid
-        m_get_vapp_details_rest.return_value = {}
-        expected_return = None
-
-        # call get_vm_resource_id method under test
-        actual_return = self.mon_plugin.get_vm_moref_id(vapp_uuid)
-
-        # verify that mocked method is called
-        m_get_vapp_details_rest.assert_called_with(vapp_uuid)
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-    @mock.patch.object(monPlugin.requests, 'get')
-    @mock.patch.object(monPlugin.MonPlugin, 'connect_as_admin')
-    def test_get_vapp_details_rest_valid_req_response(self, m_connect_as_admin, m_get):
-        """Test get vapp details rest method for valid request response"""
-
-        #mock the inputs
-        vapp_uuid = 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
-        m_connect_as_admin.return_value = self.vca
-        self.vca._session = self.session
-        self.vca._session.headers['x-vcloud-authorization'] = '2ec69b2cc6264ad0a47aaf4e3e280d16'
-        m_get.return_value.status_code = 200
-        expected_return = {'vm_vcenter_info': {'vm_moref_id': 'vm-6626'}}
-        m_get.return_value.content = '<?xml version="1.0" encoding="UTF-8"?>\
-        <VApp xmlns="http://www.vmware.com/vcloud/v1.5"  xmlns:vmext="http://www.vmware.com/vcloud/extension/v1.5" >\
-            <Children>\
-                <Vm needsCustomization="false"  type="application/vnd.vmware.vcloud.vm+xml">\
-                    <VCloudExtension required="false">\
-                        <vmext:VmVimInfo>\
-                            <vmext:VmVimObjectRef>\
-                                <vmext:VimServerRef  type="application/vnd.vmware.admin.vmwvirtualcenter+xml"/>\
-                                <vmext:MoRef>vm-6626</vmext:MoRef>\
-                                <vmext:VimObjectType>VIRTUAL_MACHINE</vmext:VimObjectType>\
-                            </vmext:VmVimObjectRef>\
-                        </vmext:VmVimInfo>\
-                    </VCloudExtension>\
-                </Vm>\
-            </Children>\
-        </VApp>'
-
-        # call get_vapp_details_rest method under test
-        actual_return = self.mon_plugin.get_vapp_details_rest(vapp_uuid)
-
-        # verify that mocked method is called
-        m_connect_as_admin.assert_called_with()
-        m_get.assert_called()
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-    @mock.patch.object(monPlugin.requests, 'get')
-    @mock.patch.object(monPlugin.MonPlugin, 'connect_as_admin')
-    def test_get_vapp_details_rest_invalid_req_response(self, m_connect_as_admin, m_get):
-        """Test get vapp details rest method for invalid request response"""
-
-        #mock the inputs
-        vapp_uuid = 'Invalid-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
-        m_connect_as_admin.return_value = self.vca
-        self.vca._session = self.session
-        self.vca._session.headers['x-vcloud-authorization'] = '2ec69b2cc6264ad0a47aaf4e3e280d16'
-        m_get.return_value.status_code = 400
-        expected_return = {}
-        m_get.return_value.content = 'Bad Request'
-
-        # call get_vapp_details_rest method under test
-        actual_return = self.mon_plugin.get_vapp_details_rest(vapp_uuid)
-
-        # verify that mocked method is called
-        m_connect_as_admin.assert_called_with()
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-    @mock.patch.object(monPlugin.requests, 'get')
-    @mock.patch.object(monPlugin.MonPlugin, 'connect_as_admin')
-    def test_get_vapp_details_rest_failed_to_connect_vcd(self, m_connect_as_admin, m_get):
-        """Test get vapp details rest method for failed to connect to vcd"""
-
-        #mock the inputs
-        vapp_uuid = 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
-        m_connect_as_admin.return_value = None
-        expected_return = {}
-
-        # call get_vapp_details_rest method under test
-        actual_return = self.mon_plugin.get_vapp_details_rest(vapp_uuid)
-
-        # verify that mocked method is called
-        m_connect_as_admin.assert_called_with()
-        m_get.assert_not_called()
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-    @mock.patch.object(monPlugin.requests, 'get')
-    @mock.patch.object(monPlugin.MonPlugin, 'connect_as_admin')
-    def test_get_vapp_details_rest_invalid_response(self, m_connect_as_admin, m_get):
-        """Test get vapp details rest method for invalid response"""
-
-        #mock the inputs
-        vapp_uuid = 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
-        m_connect_as_admin.return_value = self.vca
-        self.vca._session = self.session
-        self.vca._session.headers['x-vcloud-authorization'] = '2ec69b2cc6264ad0a47aaf4e3e280d16'
-        m_get.return_value.status_code = 200
-        expected_return = {}
-        m_get.return_value.content = '<?xml version="1.0" encoding="UTF-8"?>\
-        <VApp xmlns="http://www.vmware.com/vcloud/v1.5"  xmlns:vmext="http://www.vmware.com/vcloud/extension/v1.5" >\
-            <Children>\
-                <Vm needsCustomization="false"  type="application/vnd.vmware.vcloud.vm+xml">\
-                    <VCloudExtension required="false">\
-                        <vmext:VmVimInfo>\
-                            <vmext:VmVimObjectRef>\
-                                <vmext:VimServerRef  type="application/vnd.vmware.admin.vmwvirtualcenter+xml"/>\
-                                <vmext:MoRef>vm-6626</vmext:MoRef>\
-                                <vmext:VimObjectType>VIRTUAL_MACHINE</vmext:VimObjectType>\
-                        </vmext:VmVimInfo>\
-                    </VCloudExtension>\
-                </Vm>\
-            </Children>\
-        </VApp>'
-
-        # call get_vapp_details_rest method under test
-        actual_return = self.mon_plugin.get_vapp_details_rest(vapp_uuid)
-
-        # verify that mocked method is called
-        m_connect_as_admin.assert_called_with()
-        m_get.assert_called()
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-    @mock.patch.object(monPlugin.Client, 'set_credentials')
-    @mock.patch.object(monPlugin, 'Client')
-    def test_connect_as_admin(self, m_client, m_set_credentials):
-        """Test connect as admin to vCD method"""
-
-        #mock the inputs and mocked returns
-        expected_return = m_client.return_value = self.vca
-        m_set_credentials.retrun_value = True
-
-        # call connect_as_admin method under test
-        actual_return = self.mon_plugin.connect_as_admin()
-
-        # verify that mocked method is called
-        m_client.assert_called_with(self.m_vim_access_config['vim_url'],
-                                                 verify_ssl_certs=False)
-
-        # verify return value with expected value
-        self.assertEqual(expected_return, actual_return)
-
-
-# For testing purpose
-#if __name__ == '__main__':
-#   unittest.main()
-
-
diff --git a/osm_mon/test/VMware/test_plugin_receiver.py b/osm_mon/test/VMware/test_plugin_receiver.py
deleted file mode 100644 (file)
index 1d63bd9..0000000
+++ /dev/null
@@ -1,1077 +0,0 @@
-# -*- coding: utf-8 -*-
-
-##
-# Copyright 2017-2018 VMware Inc.
-# This file is part of ETSI OSM
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-#
-# For those usages not covered by the Apache License, Version 2.0 please
-# contact:  osslegalrouting@vmware.com
-##
-
-""" Mock tests for VMware vROPs plugin recevier """
-
-import sys
-#sys.path.append("/root/MON/")
-
-import json
-
-import logging
-
-import unittest
-
-import mock
-
-import requests
-
-import os
-
-log = logging.getLogger(__name__)
-
-sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)),"..","..",".."))
-
-from osm_mon.plugins.vRealiseOps import plugin_receiver as monPluginRec
-from osm_mon.core.database import VimCredentials
-
-
-class Message(object):
-    """A class to mock a message object value for alarm and matric requests"""
-
-    def __init__(self):
-        """Initialize a mocked message instance"""
-        self.topic = "alarm_or_metric_request"
-        self.key = None
-        self.value = json.dumps({"mock_value": "mock_details"})
-        self.partition = 1
-        self.offset = 100
-
-
-class TestPluginReceiver(unittest.TestCase):
-    """Test class for Plugin Receiver class methods"""
-
-    def setUp(self):
-        """Setup the tests for plugin_receiver class methods"""
-        super(TestPluginReceiver, self).setUp()
-        self.plugin_receiver = monPluginRec.PluginReceiver()
-
-    @mock.patch.object(monPluginRec.PluginReceiver, 'publish_create_alarm_status')
-    @mock.patch.object(monPluginRec.PluginReceiver, 'create_alarm')
-    def test_consume_create_alarm_request_key(self, m_create_alarm,\
-                                              m_publish_create_alarm_status):
-        """Test functionality of consume for create_alarm_request key"""
-
-        vim_uuid = "f85fc39e-723d-4172-979b-de28b36465bb"
-        # Mock a message
-        msg = Message()
-        msg.topic = "alarm_request"
-        msg.key = "create_alarm_request"
-
-        msg.value = json.dumps({"vim_uuid":vim_uuid,"alarm_create_request":"alarm_details"})
-        m_create_alarm.return_value = "test_alarm_id"
-
-        config_alarm_info = json.loads(msg.value)
-
-        # Call the consume method of plugin_receiver
-        self.plugin_receiver.consume(msg,vim_uuid)
-
-        # verify if create_alarm and publish methods called with correct params
-        m_create_alarm.assert_called_with(config_alarm_info)
-        m_publish_create_alarm_status.assert_called_with("test_alarm_id", config_alarm_info)
-
-
-    @mock.patch.object(monPluginRec.PluginReceiver, 'publish_update_alarm_status')
-    @mock.patch.object(monPluginRec.PluginReceiver, 'update_alarm')
-    def test_consume_update_alarm_request_key(self, m_update_alarm,\
-                                              m_publish_update_alarm_status):
-        """Test functionality of consume for update_alarm_request key"""
-
-        vim_uuid = "f85fc39e-723d-4172-979b-de28b36465bb"
-        # Mock a message
-        msg = Message()
-        msg.topic = "alarm_request"
-        msg.key = "update_alarm_request"
-
-        msg.value = json.dumps({"vim_uuid":vim_uuid,"alarm_update_request":"alarm_details"})
-
-        # set return value to mocked method
-        m_update_alarm.return_value = "test_alarm_id"
-
-        update_alarm_info = json.loads(msg.value)
-
-        # Call the consume method of plugin_receiver
-        self.plugin_receiver.consume(msg,vim_uuid)
-
-        # verify update_alarm and publish method called with correct params
-        m_update_alarm.assert_called_with(update_alarm_info)
-        m_publish_update_alarm_status.assert_called_with("test_alarm_id", update_alarm_info)
-
-
-    @mock.patch.object(monPluginRec.PluginReceiver, 'publish_delete_alarm_status')
-    @mock.patch.object(monPluginRec.PluginReceiver, 'delete_alarm')
-    def test_consume_delete_alarm_request_key(self, m_delete_alarm,\
-                                              m_publish_delete_alarm_status):
-        """Test functionality of consume for delete_alarm_request key"""
-
-        vim_uuid = "f85fc39e-723d-4172-979b-de28b36465bb"
-        # Mock a message
-        msg = Message()
-        msg.topic = "alarm_request"
-        msg.key = "delete_alarm_request"
-
-        msg.value = json.dumps({"vim_uuid":vim_uuid,"alarm_delete_request":"alarm_details"})
-        m_delete_alarm.return_value = "test_alarm_id"
-
-        delete_alarm_info = json.loads(msg.value)
-
-        # Call the consume method of plugin_receiver and check delete_alarm request
-        self.plugin_receiver.consume(msg,vim_uuid)
-        m_delete_alarm.assert_called_with(delete_alarm_info)
-
-        # Check if publish method called with correct parameters
-        m_publish_delete_alarm_status.assert_called_with("test_alarm_id", delete_alarm_info)
-
-
-    @mock.patch.object(monPluginRec.PluginReceiver, 'publish_list_alarm_response')
-    @mock.patch.object(monPluginRec.PluginReceiver, 'list_alarms')
-    def test_consume_list_alarm_request_key(self, m_list_alarms,\
-                                            m_publish_list_alarm_response):
-        """ Test functionality of list alarm request key"""
-
-        vim_uuid = "f85fc39e-723d-4172-979b-de28b36465bb"
-        # Mock a message
-        msg = Message()
-        msg.topic = "alarm_request"
-        msg.key = "list_alarm_request"
-        test_alarm_list = [{"alarm_uuid":"alarm1_details"},{"alarm_uuid":"alarm2_details"}]
-
-        msg.value = json.dumps({"vim_uuid":vim_uuid,"alarm_list_request":"alarm_details"})
-        m_list_alarms.return_value = test_alarm_list
-
-        list_alarms_info = json.loads(msg.value)
-
-        # Call the consume method of plugin_receiver and check delete_alarm request
-        self.plugin_receiver.consume(msg,vim_uuid)
-        m_list_alarms.assert_called_with(list_alarms_info)
-
-        # Check if publish method called with correct parameters
-        m_publish_list_alarm_response.assert_called_with(test_alarm_list, list_alarms_info)
-
-
-    @mock.patch.object(monPluginRec.PluginReceiver, 'publish_access_update_response')
-    @mock.patch.object(monPluginRec.PluginReceiver, 'update_access_credentials')
-    def test_consume_vim_access_request_key(self, m_update_access_credentials,\
-                                            m_publish_access_update_response):
-        """Test functionality of consume for vim_access_credentials request key"""
-
-        vim_uuid = "f85fc39e-723d-4172-979b-de28b36465bb"
-        # Mock a message
-        msg = Message()
-        msg.topic = "access_credentials"
-        msg.key = "vim_access_credentials"
-
-        msg.value = json.dumps({"vim_uuid":vim_uuid,"access_config":"access_details"})
-        # set return value to mocked method
-        m_update_access_credentials.return_value = True
-
-        access_info = json.loads(msg.value)
-
-        # Call the consume method of plugin_receiver
-        self.plugin_receiver.consume(msg,vim_uuid)
-
-        # check if mocked method called with required parameters
-        m_update_access_credentials.assert_called_with("access_details")
-
-        # Check if publish method called with correct parameters
-        m_publish_access_update_response.assert_called_with(True, access_info)
-
-
-    @mock.patch.object(monPluginRec.PluginReceiver, 'publish_create_alarm_status')
-    @mock.patch.object(monPluginRec.PluginReceiver, 'create_alarm')
-    def test_consume_invalid_alarm_request_key(self, m_create_alarm,\
-                                               m_publish_create_alarm_status):
-        """Test functionality of consume for vim_access_credentials invalid request key"""
-
-        vim_uuid = "f85fc39e-723d-4172-979b-de28b36465bb"
-        # Mock a message with invalid alarm request key
-        msg = Message()
-        msg.topic = "alarm_request"
-        msg.key = "invalid_alarm_request" # invalid key
-
-        # Call the consume method of plugin_receiver
-        self.plugin_receiver.consume(msg,vim_uuid)
-
-        # verify that create_alarm and publish_create_alarm_status methods not called
-        m_create_alarm.assert_not_called()
-        m_publish_create_alarm_status.assert_not_called()
-
-
-    @mock.patch.object(monPluginRec.PluginReceiver, 'publish_metrics_data_status')
-    @mock.patch.object(monPluginRec.MonPlugin, 'get_metrics_data')
-    def test_consume_invalid_metric_request_key(self, m_get_metrics_data,\
-                                                m_publish_metric_data_status):
-        """Test functionality of invalid metric key request"""
-
-        vim_uuid = "f85fc39e-723d-4172-979b-de28b36465bb"
-        # Mock a message with invalid metric request key
-        msg = Message()
-        msg.topic = "metric_request"
-        msg.key = "invalid_metric_data_request" #invalid key
-
-        # Call the consume method of plugin_receiver
-        self.plugin_receiver.consume(msg,vim_uuid)
-
-        # verify that get martics data and publish methods not called
-        m_get_metrics_data.assert_not_called()
-        m_publish_metric_data_status.assert_not_called()
-
-
-    @mock.patch.object(monPluginRec.PluginReceiver, 'publish_metrics_data_status')
-    @mock.patch.object(monPluginRec.MonPlugin, 'get_metrics_data')
-    @mock.patch.object(monPluginRec.PluginReceiver,'get_vim_access_config')
-    def test_consume_read_metric_data_request_key(self, m_get_vim_access_config,\
-                                                  m_get_metrics_data,\
-                                                  m_publish_metric_data_status):
-        """Test functionality of consume for read_metric_data_request key"""
-
-        vim_uuid = "f85fc39e-723d-4172-979b-de28b36465bb"
-        # Mock a message
-        msg = Message()
-        msg.topic = "metric_request"
-        msg.key = "read_metric_data_request"
-
-        msg.value = json.dumps({"vim_uuid":vim_uuid,"metric_name":"metric_details"})
-        m_get_metrics_data.return_value = {"metrics_data":"metrics_details"}
-
-        m_get_vim_access_config.return_value = {'vrops_site':'abc',
-            'vrops_user':'user',
-            'vrops_password':'passwd',
-            'vim_url':'vcd_url',
-            'admin_username':'admin',
-            'admin_password':'admin_passwd',
-            'vim_uuid':'1',
-            'tenant_id':'org_vdc_1'}
-
-        metric_request_info = json.loads(msg.value)
-
-        # Call the consume method of plugin_receiver
-        self.plugin_receiver.consume(msg,vim_uuid)
-        m_get_metrics_data.assert_called_with(metric_request_info)
-
-        # Check if publish method called with correct parameters
-        m_publish_metric_data_status.assert_called_with({"metrics_data":"metrics_details"})
-
-
-    @mock.patch.object(monPluginRec.PluginReceiver, 'publish_create_metric_response')
-    @mock.patch.object(monPluginRec.PluginReceiver, 'verify_metric')
-    def test_consume_create_metric_request_key(self, m_verify_metric,\
-                                               m_publish_create_metric_response):
-        """Test functionality of consume for create_metric_request key"""
-
-        vim_uuid = "f85fc39e-723d-4172-979b-de28b36465bb"
-        # Mock a message
-        msg = Message()
-        msg.topic = "metric_request"
-        msg.key = "create_metric_request"
-
-        msg.value = json.dumps({"vim_uuid":vim_uuid,"metric_create":"metric_details"})
-
-        # set the return value
-        m_verify_metric.return_value = True
-
-        metric_info = json.loads(msg.value)
-
-        # Call the consume method of plugin_receiver
-        self.plugin_receiver.consume(msg,vim_uuid)
-        m_verify_metric.assert_called_with(metric_info)
-
-        # Check if publish method called with correct parameters
-        m_publish_create_metric_response.assert_called_with(metric_info, True)
-
-
-    @mock.patch.object(monPluginRec.PluginReceiver, 'publish_update_metric_response')
-    @mock.patch.object(monPluginRec.PluginReceiver, 'verify_metric')
-    def test_consume_update_metric_request_key(self, m_verify_metric,\
-                                               m_publish_update_metric_response):
-        """Test functionality of update metric request key"""
-
-        vim_uuid = "f85fc39e-723d-4172-979b-de28b36465bb" 
-        # Mock a message
-        msg = Message()
-        msg.topic = "metric_request"
-        msg.key = "update_metric_request"
-
-        msg.value = json.dumps({"vim_uuid":vim_uuid,"metric_create":"metric_details"})
-
-        # set the return value
-        m_verify_metric.return_value = True
-
-        metric_info = json.loads(msg.value)
-
-        # Call the consume method of plugin_receiver
-        self.plugin_receiver.consume(msg,vim_uuid)
-
-        # verify mocked methods called with correct parameters
-        m_verify_metric.assert_called_with(metric_info)
-        m_publish_update_metric_response.assert_called_with(metric_info, True)
-
-
-    @mock.patch.object(monPluginRec.PluginReceiver, 'publish_delete_metric_response')
-    def test_consume_delete_metric_request_key(self, m_publish_delete_metric_response):
-        """Test functionality of consume for delete_metric_request key"""
-
-        # Note: vROPS doesn't support deleting metric data
-        vim_uuid = "f85fc39e-723d-4172-979b-de28b36465bb"  
-        # Mock a message
-        msg = Message()
-        msg.topic = "metric_request"
-        msg.key = "delete_metric_request"
-
-        msg.value = json.dumps({"vim_uuid":vim_uuid,"metric_name":"metric_details"})
-
-        metric_info = json.loads(msg.value)
-
-        # Call the consume method of plugin_receiver
-        self.plugin_receiver.consume(msg,vim_uuid)
-
-        # Check if publish method called with correct parameters
-        m_publish_delete_metric_response.assert_called_with(metric_info)
-
-
-    @mock.patch.object(monPluginRec.MonPlugin, 'configure_alarm')
-    @mock.patch.object(monPluginRec.MonPlugin, 'configure_rest_plugin')
-    @mock.patch.object(monPluginRec.PluginReceiver, 'get_vim_access_config')
-    def test_create_alarm_successful(self, m_get_vim_access_config,\
-                                     m_configure_rest_plugin,\
-                                     m_configure_alarm):
-        """ Test functionality of create alarm method-positive case"""
-
-        # Mock config_alarm_info
-        config_alarm_info = {"schema_version":1.0,
-                             "schema_type":"create_alarm_request",
-                             "vim_type":"VMware",
-                             "vim_uuid":"1",
-                             "alarm_create_request":{"correlation_id": 1,
-                             "alarm_name": "CPU_Utilize_Threshold",
-                             "metric_name": "CPU_UTILIZATION",
-                             "tenant_uuid": "tenant_uuid",
-                             "resource_uuid": "resource_uuid",
-                             "description": "test_create_alarm",
-                             "severity": "CRITICAL",
-                             "operation": "GT",
-                             "threshold_value": 10,
-                             "unit": "%",
-                             "statistic": "AVERAGE"}}
-
-        # set return value to plugin uuid
-        m_get_vim_access_config.return_value = {'vrops_site':'abc',
-            'vrops_user':'user',
-            'vrops_password':'passwd',
-            'vim_url':'vcd_url',
-            'admin_username':'admin',
-            'admin_password':'admin_passwd',
-            'vim_uuid':'1',
-            'tenant_id':'org_vdc_1'}
-
-        m_configure_rest_plugin.retrun_value = "plugin_uuid"
-        m_configure_alarm.return_value = "alarm_uuid"
-
-        # call create alarm method under test
-        self.plugin_receiver.create_alarm(config_alarm_info)
-
-        # verify mocked methods get called with correct params
-        m_get_vim_access_config.assert_called_with(config_alarm_info['vim_uuid'])
-        m_configure_rest_plugin.assert_called_with()
-        m_configure_alarm.assert_called_with(config_alarm_info["alarm_create_request"])
-
-
-    @mock.patch.object(monPluginRec.MonPlugin, 'configure_alarm')
-    @mock.patch.object(monPluginRec.MonPlugin, 'configure_rest_plugin')
-    @mock.patch.object(monPluginRec.PluginReceiver, 'get_vim_access_config')
-    def test_create_alarm_failed(self, m_get_vim_access_config,\
-                                m_configure_rest_plugin,\
-                                m_configure_alarm):
-        """ Test functionality of create alarm method negative case"""
-
-        # Mock config_alarm_info
-        config_alarm_info = {"schema_version":1.0,
-                             "schema_type":"create_alarm_request",
-                             "vim_type":"VMware",
-                             "vim_uuid":"1",
-                             "alarm_create_request":{"correlation_id": 1,
-                             "alarm_name": "CPU_Utilize_Threshold",
-                             "metric_name": "CPU_UTILIZATION",
-                             "tenant_uuid": "tenant_uuid",
-                             "resource_uuid": "resource_uuid",
-                             "description": "test_create_alarm",
-                             "severity": "CRITICAL",
-                             "operation": "GT",
-                             "threshold_value": 10,
-                             "unit": "%",
-                             "statistic": "AVERAGE"}}
-
-        # set return value to plugin uuid and alarm_uuid to None
-        m_get_vim_access_config.return_value = {'vrops_site':'abc',
-            'vrops_user':'user',
-            'vrops_password':'passwd',
-            'vim_url':'vcd_url',
-            'admin_username':'admin',
-            'admin_password':'admin_passwd',
-            'vim_uuid':'1',
-            'tenant_id':'org_vdc_1'}
-        m_configure_rest_plugin.retrun_value = "plugin_uuid"
-        m_configure_alarm.return_value = None
-
-        # call create alarm method under test
-        alarm_uuid = self.plugin_receiver.create_alarm(config_alarm_info)
-
-        # verify mocked method called with correct params
-        m_get_vim_access_config.assert_called_with(config_alarm_info['vim_uuid'])
-        m_configure_rest_plugin.assert_called_with()
-        m_configure_alarm.assert_called_with(config_alarm_info["alarm_create_request"])
-
-        # verify create alarm method returns None when failed
-        self.assertEqual(alarm_uuid, None)
-
-
-    @mock.patch.object(monPluginRec.MonPlugin, 'update_alarm_configuration')
-    @mock.patch.object(monPluginRec.PluginReceiver, 'get_vim_access_config')
-    def test_update_alarm_successful(self, m_get_vim_access_config, m_update_alarm_configuration):
-        """ Test functionality of update alarm method-positive case"""
-
-        # Mock update_alarm_info
-        update_alarm_info = {"schema_version":1.0,"schema_type":"update_alarm_request",
-                             "vim_type":"VMware","vim_uuid":"1",
-                             "alarm_update_request":{'alarm_uuid': 'abc', 'correlation_id': 14203}}
-
-        # set return value to mocked method
-        m_update_alarm_configuration.return_value = "alarm_uuid"
-        m_get_vim_access_config.return_value = {'vrops_site':'abc',
-                                                'vrops_user':'user',
-                                                'vrops_password':'passwd',
-                                                'vim_url':'vcd_url',
-                                                'admin_username':'admin',
-                                                'admin_password':'admin_passwd',
-                                                'vim_uuid':'1',
-                                                'tenant_id':'org_vdc_1'}
-
-        # check update alarm gets called and returned correct value
-        ret_value = self.plugin_receiver.update_alarm(update_alarm_info)
-
-        # check mocked method get called with correct param
-        m_get_vim_access_config.assert_called_with(update_alarm_info['vim_uuid'])
-        m_update_alarm_configuration.assert_called_with(update_alarm_info["alarm_update_request"])
-
-        # check return value and passed values are correct
-        self.assertEqual(ret_value, "alarm_uuid")
-
-
-    @mock.patch.object(monPluginRec.MonPlugin, 'update_alarm_configuration')
-    @mock.patch.object(monPluginRec.PluginReceiver, 'get_vim_access_config')
-    def test_update_alarm_failed(self, m_get_vim_access_config, m_update_alarm_configuration):
-        """ Test functionality of update alarm method negative case"""
-
-        # Mock update_alarm_info
-        update_alarm_info = {"schema_version":1.0,"schema_type":"update_alarm_request",
-                             "vim_type":"VMware","vim_uuid":"1",
-                             "alarm_update_request":{'alarm_uuid': 'abc', 'correlation_id': 14203}}
-
-        # set return value to mocked method
-        m_update_alarm_configuration.return_value = None
-        m_get_vim_access_config.return_value = {'vrops_site':'abc',
-                                                'vrops_user':'user',
-                                                'vrops_password':'passwd',
-                                                'vim_url':'vcd_url',
-                                                'admin_username':'admin',
-                                                'admin_password':'admin_passwd',
-                                                'vim_uuid':'1',
-                                                'tenant_id':'org_vdc_1'}
-
-        # check update alarm gets called and returned correct value
-        ret_value = self.plugin_receiver.update_alarm(update_alarm_info)
-
-        # check mocked method get called with correct param
-        m_get_vim_access_config.assert_called_with(update_alarm_info['vim_uuid'])
-        m_update_alarm_configuration.assert_called_with(update_alarm_info["alarm_update_request"])
-
-        # check return value and passed values are correct
-        self.assertEqual(ret_value, None)
-
-
-    @mock.patch.object(monPluginRec.MonPlugin, 'delete_alarm_configuration')
-    @mock.patch.object(monPluginRec.PluginReceiver, 'get_vim_access_config')
-    def test_delete_alarm_successful(self, m_get_vim_access_config, m_delete_alarm_configuration):
-        """ Test functionality of delete alarm method-positive case"""
-
-        # Mock delete_alarm_info
-        delete_alarm_info = {"schema_version":1.0,"schema_type":"delete_alarm_request",
-                             "vim_type":"VMware","vim_uuid":"1",
-                             "alarm_delete_request":{'alarm_uuid': 'abc', 'correlation_id': 14203}}
-
-        # set return value to mocked method
-        m_delete_alarm_configuration.return_value = "alarm_uuid"
-        m_get_vim_access_config.return_value = {'vrops_site':'abc',
-                                                'vrops_user':'user',
-                                                'vrops_password':'passwd',
-                                                'vim_url':'vcd_url',
-                                                'admin_username':'admin',
-                                                'admin_password':'admin_passwd',
-                                                'vim_uuid':'1',
-                                                'tenant_id':'org_vdc_1'}
-
-        # check delete alarm gets called and returned correct value
-        ret_value = self.plugin_receiver.delete_alarm(delete_alarm_info)
-
-        # check mocked method get called with correct param
-        m_get_vim_access_config.assert_called_with(delete_alarm_info['vim_uuid'])
-        m_delete_alarm_configuration.assert_called_with(delete_alarm_info["alarm_delete_request"])
-
-        # check return value and passed values are correct
-        self.assertEqual(ret_value, "alarm_uuid")
-
-
-    @mock.patch.object(monPluginRec.MonPlugin, 'delete_alarm_configuration')
-    @mock.patch.object(monPluginRec.PluginReceiver, 'get_vim_access_config')
-    def test_delete_alarm_failed(self, m_get_vim_access_config, m_delete_alarm_configuration):
-        """ Test functionality of delete alarm method-negative case"""
-
-        # Mock update_alarm_info
-        delete_alarm_info = {"schema_version":1.0,"schema_type":"delete_alarm_request",
-                             "vim_type":"VMware","vim_uuid":"1",
-                             "alarm_delete_request":{'alarm_uuid': 'abc', 'correlation_id': 14203}}
-
-        # set return value to mocked method
-        m_delete_alarm_configuration.return_value = None
-        m_get_vim_access_config.return_value = {'vrops_site':'abc',
-                                                'vrops_user':'user',
-                                                'vrops_password':'passwd',
-                                                'vim_url':'vcd_url',
-                                                'admin_username':'admin',
-                                                'admin_password':'admin_passwd',
-                                                'vim_uuid':'1',
-                                                'tenant_id':'org_vdc_1'}
-
-        # check delete alarm gets called and returned correct value
-        ret_value = self.plugin_receiver.delete_alarm(delete_alarm_info)
-
-        # check mocked method get called with correct param
-        m_get_vim_access_config.assert_called_with(delete_alarm_info['vim_uuid'])
-        m_delete_alarm_configuration.assert_called_with(delete_alarm_info["alarm_delete_request"])
-
-        # check return value to check failed status
-        self.assertEqual(ret_value, None)
-
-
-    @mock.patch.object(monPluginRec.KafkaProducer, 'publish')
-    def test_publish_create_alarm_status(self, m_publish):
-        """ Test functionality of publish create alarm status method"""
-
-        # Mock config_alarm_info
-        config_alarm_info = {'vim_type': 'VMware', "vim_uuid":"1",
-                             'alarm_create_request': {
-                                 'threshold_value': 0,
-                                 'severity': 'CRITICAL',
-                                 'alarm_name': 'CPU_Utilization_Above_Threshold',
-                                 'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
-                                 'correlation_id': 1234,
-                                 'statistic': 'AVERAGE',
-                                 'metric_name': 'CPU_UTILIZATION'}
-                            }
-
-        alarm_uuid = "xyz"
-
-        # call publish create status method under test
-        self.plugin_receiver.publish_create_alarm_status(alarm_uuid, config_alarm_info)
-
-        # verify mocked method called with correct params
-        m_publish.assert_called_with(key='create_alarm_response',\
-                                     value=mock.ANY,\
-                                     topic='alarm_response')
-
-
-    @mock.patch.object(monPluginRec.KafkaProducer, 'publish')
-    def test_publish_update_alarm_status(self, m_publish):
-        """ Test functionality of publish update alarm status method"""
-
-        # Mock update_alarm_info
-        update_alarm_info = {'vim_type' : 'VMware',
-                             'vim_uuid':'1',
-                             'schema_type': 'update_alarm_request',
-                             'alarm_update_request':{'alarm_uuid': '6486e69',
-                                                     'correlation_id': 14203,
-                                                     'operation': 'GT'
-                                                     }
-                            }
-
-        alarm_uuid = "xyz"
-
-        # call publish update alarm status method under test
-        self.plugin_receiver.publish_update_alarm_status(alarm_uuid, update_alarm_info)
-
-        # verify mocked method called with correct params
-        m_publish.assert_called_with(key='update_alarm_response',\
-                                     value=mock.ANY,\
-                                     topic='alarm_response')
-
-
-    @mock.patch.object(monPluginRec.KafkaProducer, 'publish')
-    def test_publish_delete_alarm_status(self, m_publish):
-        """ Test functionality of publish delete alarm status method"""
-
-        # Mock delete_alarm_info
-        delete_alarm_info = {'vim_type' : 'VMware',
-                             "vim_uuid":"1",
-                             'schema_type': 'delete_alarm_request',
-                             'alarm_delete_request':{'alarm_uuid': '6486e69',
-                                                     'correlation_id': 14203,
-                                                     'operation': 'GT'
-                                                     }
-                            }
-
-        alarm_uuid = "xyz"
-
-        # call publish delete alarm status method under test
-        self.plugin_receiver.publish_delete_alarm_status(alarm_uuid, delete_alarm_info)
-
-        # verify mocked method called with correct params
-        m_publish.assert_called_with(key='delete_alarm_response',\
-                                     value=mock.ANY,\
-                                     topic='alarm_response')
-
-
-    @mock.patch.object(monPluginRec.KafkaProducer, 'publish')
-    def test_publish_metrics_data_status(self, m_publish):
-        """ Test functionality of publish metrics data status method"""
-
-        # Mock metrics data
-        metrics_data = {
-                        'vim_uuid':'1',
-                        'metric_name': 'CPU_UTILIZATION', 'metric_uuid': '0',
-                        'resource_uuid': 'e14b20', 'correlation_id': 14203,
-                        'metrics_data': {'time_series': [15162011, 15162044],
-                        'metrics_series': [0.1166666671, 0.1266666650]},
-                        'tenant_uuid': 123, 'unit': '%'
-                       }
-
-        # call publish metrics data status method under test
-        self.plugin_receiver.publish_metrics_data_status(metrics_data)
-
-        # verify mocked method called with correct params
-        m_publish.assert_called_with(key='read_metric_data_response',\
-                                     value=mock.ANY,\
-                                     topic='metric_response')
-
-
-    @mock.patch.object(monPluginRec.MonPlugin, 'verify_metric_support')
-    @mock.patch.object(monPluginRec.PluginReceiver, 'get_vim_access_config')
-    def test_verify_metric_supported_metric(self, m_get_vim_access_config,\
-                                            m_verify_metric_support):
-        """ Test functionality of verify metric method"""
-
-        # mock metric_info
-        metric_info = {'vim_uuid':'1',\
-                       'metric_create_request':{'metric_unit': '%',\
-                                        'metric_name': 'CPU_UTILIZATION',\
-                                        'resource_uuid': 'e14b203'}}
-
-        # set mocked function return value to true
-        m_verify_metric_support.return_value = True
-        m_get_vim_access_config.return_value = {'vrops_site':'abc',
-                                                'vrops_user':'user',
-                                                'vrops_password':'passwd',
-                                                'vim_url':'vcd_url',
-                                                'admin_username':'admin',
-                                                'admin_password':'admin_passwd',
-                                                'vim_uuid':'1',
-                                                'tenant_id':'org_vdc_1'}
-
-        # call verify_metric method under test
-        ret_value = self.plugin_receiver.verify_metric(metric_info)
-
-        # verify mocked method called with correct params
-        m_get_vim_access_config.assert_called_with(metric_info['vim_uuid'])
-        m_verify_metric_support.assert_called_with(metric_info['metric_create_request'])
-
-        # verify the return value
-        self.assertEqual(ret_value, True)
-
-
-    @mock.patch.object(monPluginRec.MonPlugin, 'verify_metric_support')
-    @mock.patch.object(monPluginRec.PluginReceiver, 'get_vim_access_config')
-    def test_verify_metric_unsupported_metric(self, m_get_vim_access_config,\
-                                              m_verify_metric_support):
-        """ Test functionality of verify metric method-negative case"""
-
-        # mock metric_info with unsupported metrics name
-        metric_info = {'vim_uuid':'1',\
-                       'metric_create_request':{'metric_unit': '%',\
-                                        'metric_name': 'Invalid',\
-                                        'resource_uuid': 'e14b203'}}
-
-        # set mocked function return value to true
-        m_verify_metric_support.return_value = False
-        m_get_vim_access_config.return_value = {'vrops_site':'abc',
-                                                'vrops_user':'user',
-                                                'vrops_password':'passwd',
-                                                'vim_url':'vcd_url',
-                                                'admin_username':'admin',
-                                                'admin_password':'admin_passwd',
-                                                'vim_uuid':'1',
-                                                'tenant_id':'org_vdc_1'}
-
-        # call verify_metric method under test
-        ret_value = self.plugin_receiver.verify_metric(metric_info)
-
-        # verify mocked method called with correct params
-        m_get_vim_access_config.assert_called_with(metric_info['vim_uuid'])
-        m_verify_metric_support.assert_called_with(metric_info['metric_create_request'])
-
-        # verify the return value
-        self.assertEqual(ret_value, False)
-
-
-    @mock.patch.object(monPluginRec.KafkaProducer, 'publish')
-    def test_publish_create_metric_response(self, m_publish):
-        """ Test functionality of publish create metric response method"""
-
-        # Mock metric_info
-        metric_info = {
-                       'vim_uuid':'1',
-                       'vim_type' : 'VMware',
-                       'correlation_id': 14203,
-                       'schema_type': 'create_metric_request',
-                       'metric_create_request':{
-                                        'resource_uuid': '6486e69',
-                                        'metric_name': 'CPU_UTILIZATION',
-                                        'metric_unit': '%'
-                                        }
-                       }
-
-        metric_status = True
-
-        # call publish create metric method under test
-        self.plugin_receiver.publish_create_metric_response(metric_info, metric_status)
-
-        # verify mocked method called with correct params
-        m_publish.assert_called_with(key='create_metric_response',\
-                                     value=mock.ANY,\
-                                     topic='metric_response')
-
-
-    @mock.patch.object(monPluginRec.KafkaProducer, 'publish')
-    def test_publish_update_metric_response(self, m_publish):
-        """ Test functionality of publish update metric response method"""
-
-        # Mock metric_info
-        metric_info = {
-                       'vim_uuid':'1',
-                       'vim_type' : 'VMware',
-                       'correlation_id': 14203,
-                       'schema_type': 'update_metric_request',
-                       'metric_create':{
-                                        'resource_uuid': '6486e69',
-                                        'metric_name': 'CPU_UTILIZATION',
-                                        'metric_unit': '%'
-                                        }
-                       }
-
-        metric_status = True
-
-        # call publish update metric method under test
-        self.plugin_receiver.publish_update_metric_response(metric_info, metric_status)
-
-        # verify mocked method called with correct params
-        m_publish.assert_called_with(key='update_metric_response',\
-                                     value=mock.ANY,\
-                                     topic='metric_response')
-
-
-    @mock.patch.object(monPluginRec.KafkaProducer, 'publish')
-    def test_publish_delete_metric_response(self, m_publish):
-        """ Test functionality of publish delete metric response method"""
-
-        # Mock metric_info
-        metric_info = {'vim_uuid':'1', 'vim_type' : 'VMware','correlation_id': 14203,
-                       'metric_uuid': 'e14b203c', 'resource_uuid': '6486e69',
-                       'metric_name': 'CPU_UTILIZATION',
-                       'schema_type': 'delete_metric_request'}
-
-        metric_status = True
-
-        # call publish delete metric method under test-vROPS doesn't support
-        # delete metric,just returns response with success
-        self.plugin_receiver.publish_delete_metric_response(metric_info)
-
-        # verify mocked method called with correct params
-        m_publish.assert_called_with(key='delete_metric_response',\
-                                     value=mock.ANY,\
-                                     topic='metric_response')
-
-
-    @mock.patch.object(monPluginRec.MonPlugin, 'get_triggered_alarms_list')
-    @mock.patch.object(monPluginRec.PluginReceiver, 'get_vim_access_config')
-    def test_list_alarms(self, m_get_vim_access_config, m_get_triggered_alarms_list):
-        """ Test functionality of list alarms method"""
-
-        # Mock list alarm input
-        list_alarm_input = {
-                            'vim_uuid':'1',
-                            'vim_type' : 'VMware',
-                            'alarm_list_request':{
-                                'severity': 'CRITICAL',
-                                'correlation_id': 14203,
-                                'alarm_name': 'CPU_Utilization_Above_Threshold',
-                                'resource_uuid': 'd14b203c'}}
-
-        # set return value to mocked method
-        m_return = [{'status': 'ACTIVE', 'update_date': '2018-01-12T08:34:05',
-                     'severity': 'CRITICAL', 'resource_uuid': 'e14b203c',
-                     'cancel_date': '0000-00-00T00:00:00','alarm_instance_uuid': 'd9e3bc84',
-                     'alarm_uuid': '5714977d', 'vim_type': 'VMware',
-                     'start_date': '2018-01-12T08:34:05'},
-                    {'status': 'CANCELED', 'update_date': '2017-12-20T09:37:57',
-                     'severity': 'CRITICAL', 'resource_uuid': 'e14b203c',
-                     'cancel_date': '2018-01-12T06:49:19', 'alarm_instance_uuid': 'd3bbeef6',
-                     'alarm_uuid': '7ba1bf3e', 'vim_type': 'VMware',
-                     'start_date': '2017-12-20T09:37:57'}]
-        m_get_triggered_alarms_list.return_value = m_return
-
-        m_get_vim_access_config.return_value = {'vrops_site':'abc',
-                                                'vrops_user':'user',
-                                                'vrops_password':'passwd',
-                                                'vim_url':'vcd_url',
-                                                'admin_username':'admin',
-                                                'admin_password':'admin_passwd',
-                                                'vim_uuid':'1',
-                                                'tenant_id':'org_vdc_1'}
-
-
-        # call list alarms method under test
-        return_value = self.plugin_receiver.list_alarms(list_alarm_input)
-
-        # verify mocked method called with correct params
-        m_get_vim_access_config.assert_called_with(list_alarm_input['vim_uuid'])
-        m_get_triggered_alarms_list.assert_called_with(list_alarm_input['alarm_list_request'])
-
-        # verify list alarm method returns correct list
-        self.assertEqual(return_value, m_return)
-
-
-    @mock.patch.object(monPluginRec.KafkaProducer, 'publish')
-    def test_publish_list_alarm_response(self, m_publish):
-        """ Test functionality of publish list alarm response method"""
-
-        # Mock list alarm input
-        msg_key = 'list_alarm_response'
-        topic = 'alarm_response'
-        list_alarm_input = {'vim_uuid':'1',
-                            'vim_type' : 'VMware',
-                            'alarm_list_request':{
-                                'severity': 'CRITICAL',
-                                'correlation_id': 14203,
-                                'alarm_name': 'CPU_Utilization_Above_Threshold',
-                                'resource_uuid': 'd14b203c'}}
-
-        triggered_alarm_list = [{'status': 'ACTIVE', 'update_date': '2018-01-12T08:34:05',
-                                 'severity': 'CRITICAL','resource_uuid': 'e14b203c',
-                                 'cancel_date': '0000-00-00T00:00:00',
-                                 'start_date': '2018-01-12T08:34:05',
-                                 'alarm_instance_uuid': 'd9e3bc84',
-                                 'alarm_uuid': '5714977d',
-                                 'vim_type': 'VMware'
-                                 }]
-
-        # call publish list alarm response method under test
-        self.plugin_receiver.publish_list_alarm_response(triggered_alarm_list, list_alarm_input)
-
-        # verify mocked method called with correct params
-        m_publish.assert_called_with(key=msg_key,value=mock.ANY, topic=topic)
-
-
-    @mock.patch.object(monPluginRec.KafkaProducer, 'publish')
-    def test_publish_access_update_response(self, m_publish):
-        """ Test functionality of publish access update response method"""
-
-        # Mock required inputs
-        access_update_status = True
-        msg_key = 'vim_access_credentials_response'
-        topic = 'access_credentials'
-        access_info_req = {'vim_type': 'VMware',
-                           'vim_uuid':'1',
-                           'access_config': {'vrops_password': 'vmware',
-                                             'vcloud-site': 'https://192.169.241.105',
-                                             'vrops_user': 'Admin', 'correlation_id': 14203,
-                                             'tenant_id': 'Org2'}
-                           }
-
-        # call publish access update response method under test
-        self.plugin_receiver.publish_access_update_response(access_update_status, access_info_req)
-
-        # verify mocked method called with correct params
-        m_publish.assert_called_with(key=msg_key ,value=mock.ANY, topic=topic)
-
-
-    @mock.patch.object(monPluginRec.PluginReceiver, 'write_access_config')
-    def test_update_access_credentials_successful(self, m_write_access_config):
-        """ Test functionality of update access credentials-positive case"""
-
-        # Mock access_info
-        access_info = {'vrops_site':'https://192.169.241.13','vrops_user':'admin',
-                       'vrops_password':'vmware','vcloud-site':'https://192.169.241.15',
-                       'admin_username':'admin','admin_password':'vmware',
-                       'vcenter_ip':'192.169.241.13','vcenter_port':'443',
-                       'vcenter_user':'admin','vcenter_password':'vmware',
-                       'vim_tenant_name':'Org2','orgname':'Org2','tenant_id':'Org2'}
-
-        # Mock return values
-        expected_status = m_write_access_config.return_value = True
-
-        # call publish update access credentials method under test
-        actual_status = self.plugin_receiver.update_access_credentials(access_info)
-
-        # check write_access_config called with correct params
-        m_write_access_config.assert_called_with(access_info)
-
-        # verify update access credentials returns correct status
-        self.assertEqual(expected_status, actual_status)
-
-
-    @mock.patch.object(monPluginRec.PluginReceiver, 'write_access_config')
-    def test_update_access_credentials_less_config_params(self, m_write_access_config):
-        """ Test functionality of update access credentials-negative case"""
-
-        # Mock access_info
-        access_info = {'vrops_site':'https://192.169.241.13','vrops_user':'admin',
-                       'vrops_password':'vmware','vcloud-site':'https://192.169.241.15',
-                       'admin_username':'admin','admin_password':'vmware',
-                       'vcenter_ip':'192.169.241.13','vcenter_port':'443','vcenter_user':'admin',
-                       'vim_tenant_name':'Org2','orgname':'Org2','tenant_id':'Org2'}
-
-        # Mock return values
-        expected_status = m_write_access_config.return_value = False
-
-        # call publish update access credentials method under test
-        actual_status = self.plugin_receiver.update_access_credentials(access_info)
-
-        # check if mocked method not called
-        m_write_access_config.assert_not_called()
-
-        # verify update access credentials returns correct status
-        self.assertEqual(expected_status, actual_status)
-
-
-    @mock.patch.object(monPluginRec.PluginReceiver, 'write_access_config')
-    def test_update_access_credentials_failed(self, m_write_access_config):
-        """ Test functionality of update access credentials-failed case """
-
-        # Mock access_info
-        access_info = {'vrops_site':'https://192.169.241.13','vrops_user':'admin',
-                       'vrops_password':'vmware','vcloud-site':'https://192.169.241.15',
-                       'admin_username':'admin','admin_password':'vmware',
-                       'vcenter_ip':'192.169.241.13','vcenter_port':'443',
-                       'vcenter_user':'admin','vcenter_password':'vmware',
-                       'vim_tenant_name':'Org2','orgname':'Org2','tenant_id':'Org2'}
-
-        # Mock return values
-        expected_status = m_write_access_config.return_value = False
-
-        # call publish update access credentials method under test
-        actual_status = self.plugin_receiver.update_access_credentials(access_info)
-
-        # check write_access_config called with correct params
-        m_write_access_config.assert_called_with(access_info)
-
-        # verify update access credentials returns correct status
-        self.assertEqual(expected_status, actual_status)
-
-
-    def test_write_access_config_successful(self):
-        """ Test functionality of write access config method-positive case"""
-
-        # Mock access_info
-        access_info = {'vrops_sit':'https://192.169.241.13','vrops_user':'admin',
-                       'vrops_password':'vmware','vcloud-site':'https://192.169.241.15',
-                       'admin_username':'admin','admin_password':'vmware',
-                       'vcenter_ip':'192.169.241.13','vcenter_port':'443',
-                       'vcenter_user':'admin','vcenter_password':'vmware',
-                       'vim_tenant_name':'Org2','orgname':'Org2','tenant_id':'Org2'}
-
-        # call write access config method under test
-        actual_status = self.plugin_receiver.write_access_config(access_info)
-
-        # verify write access config returns correct status
-        self.assertEqual(True, actual_status)
-
-
-    def test_write_access_config_failed(self):
-        """ Test functionality of write access config method-negative case"""
-
-        # Mock access_info
-        access_info = [] # provided incorrect info to generate error
-
-        # call write access config method under test
-        actual_status = self.plugin_receiver.write_access_config(access_info)
-
-        # verify write access config returns correct status
-        self.assertEqual(False, actual_status)
-
-
-    @mock.patch.object(monPluginRec.AuthManager, 'get_credentials')
-    def test_get_vim_access_config(self, m_get_credentials):
-        """ Test functionality of get_vim_access_config method-positive case"""
-
-        # Mock vim_uuid & access_info
-        vim_uuid = '1'
-        vim_details = VimCredentials()
-        vim_details.name = "vrops_vcd82"
-        vim_details.password = "passwd"
-        vim_details.tenant_name = "MANO-VDC"
-        vim_details.type = "VMware"
-        vim_details.url = "https://10.10.1.1"
-        vim_details.user = "admin"
-        vim_details.uuid = "1"
-        vim_details.config = '{"orgname": "MANO-Org", "tenant_id": "MANO-VDC",\
-                        "admin_username": "administrator","admin_password": "vcd_pwd",\
-                        "vrops_user": "admin", "vrops_password": "vrops_pwd",\
-                        "vrops_site": "https://10.10.1.2","nsx_user": "admin",\
-                        "nsx_manager": "https://10.10.1.3", "nsx_password":"nsx_pwd",\
-                        "sdn_controller": "None", "sdn_port_mapping": "None",\
-                        "vcenter_ip": "10.10.1.4", "vcenter_user": "admin@vsphere.local",\
-                        "vcenter_password": "vcenter_pwd", "vcenter_port": "443"}'
-        m_get_credentials.return_value = vim_details
-        expected_config = {'vrops_password': 'vrops_pwd', 'vcenter_password': 'vcenter_pwd',
-                         'name': 'vrops_vcd82', 'org_user': 'admin',
-                         'org_password': 'passwd', 'nsx_user': 'admin', 'vim_tenant_name': 'MANO-VDC',
-                         'admin_username': 'administrator', 'vcenter_port': '443',
-                         'vim_url': 'https://10.10.1.1', 'orgname': 'MANO-Org',
-                         'admin_password':'vcd_pwd', 'vrops_user':'admin', 'vcenter_ip':'10.10.1.4',
-                         'vrops_site': 'https://10.10.1.2', 'nsx_manager': 'https://10.10.1.3',
-                         'nsx_password': 'nsx_pwd', 'vim_type': 'VMware', 'vim_uuid': '1',
-                         'vcenter_user': 'admin@vsphere.local'}
-
-        # call get_vim_access_config method under test
-        actual_config = self.plugin_receiver.get_vim_access_config('1')
-
-        #verify that mocked method is called
-        m_get_credentials.assert_called_with(vim_uuid)
-
-        #Verify return value with expected value
-        self.assertEqual(expected_config, actual_config)
-
-
-# For testing purpose
-#if __name__ == '__main__':
-
-#    unittest.main()
-
diff --git a/osm_mon/test/collector/__init__.py b/osm_mon/test/collector/__init__.py
new file mode 100644 (file)
index 0000000..2d39b96
--- /dev/null
@@ -0,0 +1,22 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2018 Whitestack, LLC
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Whitestack, LLC
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: bdiaz@whitestack.com or glavado@whitestack.com
+##
\ No newline at end of file
diff --git a/osm_mon/test/collector/test_collector.py b/osm_mon/test/collector/test_collector.py
new file mode 100644 (file)
index 0000000..84905b7
--- /dev/null
@@ -0,0 +1,62 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2018 Whitestack, LLC
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Whitestack, LLC
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: bdiaz@whitestack.com or glavado@whitestack.com
+##
+import asyncio
+import random
+import unittest
+
+from mock import mock
+
+from osm_mon.collector.collector import MonCollector
+
+
+class MonCollectorTest(unittest.TestCase):
+    def setUp(self):
+        super().setUp()
+        self.loop = asyncio.new_event_loop()
+        asyncio.set_event_loop(None)
+
+    def test_generate_vca_vdu_name(self):
+        vdur_name = 'test-juju-metrics01-1-ubuntuvdu1-1'
+        expected = 'test-juju-metricsab-b-ubuntuvdub'
+        result = self.loop.run_until_complete(MonCollector._generate_vca_vdu_name(vdur_name))
+        self.assertEqual(result, expected)
+
+    @mock.patch.object(random, 'randint')
+    def test_generate_read_metric_payload(self, randint):
+        randint.return_value = 1
+        metric_name = 'cpu_utilization'
+        nsr_id = 'test_id'
+        vdu_name = 'test_vdu'
+        vnf_member_index = 1
+        expected_payload = {
+            'correlation_id': 1,
+            'metric_name': metric_name,
+            'ns_id': nsr_id,
+            'vnf_member_index': vnf_member_index,
+            'vdu_name': vdu_name,
+            'collection_period': 1,
+            'collection_unit': 'DAY',
+        }
+        result = self.loop.run_until_complete(
+            MonCollector._generate_read_metric_payload(metric_name, nsr_id, vdu_name, vnf_member_index))
+        self.assertEqual(result, expected_payload)
diff --git a/osm_mon/test/core/kafka_test.py b/osm_mon/test/core/kafka_test.py
deleted file mode 100644 (file)
index 7e4267c..0000000
+++ /dev/null
@@ -1,63 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-
-# Copyright 2017 Intel Research and Development Ireland Limited
-# *************************************************************
-
-# This file is part of OSM Monitoring module
-# All Rights Reserved to Intel Corporation
-
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-
-#         http://www.apache.org/licenses/LICENSE-2.0
-
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# For those usages not covered by the Apache License, Version 2.0 please
-# contact: prithiv.mohan@intel.com or adrian.hoban@intel.com
-
-#__author__ = "Prithiv Mohan"
-#__date__   = "25/Sep/2017"
-
-import sys
-import threading
-import pytest
-from kafka import KafkaConsumer, KafkaProducer
-
-def test_end_to_end():
-    producer = KafkaProducer(bootstrap_servers='localhost:9092',
-                             retries=5,
-                             max_block_ms=10000,
-                             value_serializer=str.encode)
-    consumer = KafkaConsumer(bootstrap_servers='localhost:9092',
-                             group_id=None,
-                             consumer_timeout_ms=10000,
-                             auto_offset_reset='earliest',
-                             value_deserializer=bytes.decode)
-
-    topic = 'TutorialTopic'
-
-    messages = 100
-    futures = []
-    for i in range(messages):
-        futures.append(producer.send(topic, 'msg %d' % i))
-    ret = [f.get(timeout=30) for f in futures]
-    assert len(ret) == messages
-
-    producer.close()
-
-    consumer.subscribe([topic])
-    msgs = set()
-    for i in range(messages):
-        try:
-            msgs.add(next(consumer).value)
-        except StopIteration:
-            break
-
-    assert msgs == set(['msg %d' % i for i in range(messages)])
index ddbdf8b..adbcb1d 100644 (file)
@@ -1,11 +1,34 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2018 Whitestack, LLC
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Whitestack, LLC
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: bdiaz@whitestack.com or glavado@whitestack.com
+##
 import unittest
 
 import mock
 from kafka import KafkaProducer
 from kafka.errors import KafkaError
+from osm_common import dbmongo
 
-from osm_mon.core.database import VimCredentials
-from osm_mon.core.message_bus.common_consumer import *
+from osm_mon.core.database import VimCredentials, DatabaseManager
+from osm_mon.core.message_bus.common_consumer import CommonConsumer
 
 
 @mock.patch.object(dbmongo.DbMongo, "db_connect", mock.Mock())
@@ -51,7 +74,8 @@ class CommonConsumerTest(unittest.TestCase):
                                         'internal-connection-point': [],
                                         'vdu-id-ref': 'ubuntuvnf_vnfd-VM',
                                         'id': 'ffd73f33-c8bb-4541-a977-44dcc3cbe28d',
-                                        'vim-id': '27042672-5190-4209-b844-95bbaeea7ea7'
+                                        'vim-id': '27042672-5190-4209-b844-95bbaeea7ea7',
+                                        'name': 'ubuntuvnf_vnfd-VM'
                                     }
                                 ],
                                 'vnfd-ref': 'ubuntuvnf_vnfd',
@@ -60,12 +84,13 @@ class CommonConsumerTest(unittest.TestCase):
                                 'vnfd-id': 'a314c865-aee7-4d9b-9c9d-079d7f857f01',
                                 'id': 'a314c865-aee7-4d9b-9c9d-079d7f857f01'}
         common_consumer = CommonConsumer()
-        vdur = common_consumer.get_vdur('5ec3f571-d540-4cb0-9992-971d1b08312e', '1', 'ubuntuvnf_vnfd-VM')
+        vdur = common_consumer.common_db.get_vdur('5ec3f571-d540-4cb0-9992-971d1b08312e', '1', 'ubuntuvnf_vnfd-VM')
         expected_vdur = {
             'internal-connection-point': [],
             'vdu-id-ref': 'ubuntuvnf_vnfd-VM',
             'id': 'ffd73f33-c8bb-4541-a977-44dcc3cbe28d',
-            'vim-id': '27042672-5190-4209-b844-95bbaeea7ea7'
+            'vim-id': '27042672-5190-4209-b844-95bbaeea7ea7',
+            'name': 'ubuntuvnf_vnfd-VM'
         }
 
         self.assertDictEqual(vdur, expected_vdur)
diff --git a/osm_mon/test/core/test_producer.py b/osm_mon/test/core/test_producer.py
deleted file mode 100644 (file)
index 5dc3caf..0000000
+++ /dev/null
@@ -1,82 +0,0 @@
-# Copyright 2017 Intel Research and Development Ireland Limited
-# *************************************************************
-
-# This file is part of OSM Monitoring module
-# All Rights Reserved to Intel Corporation
-
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-
-#         http://www.apache.org/licenses/LICENSE-2.0
-
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# For those usages not covered by the Apache License, Version 2.0 please
-# contact: prithiv.mohan@intel.com or adrian.hoban@intel.com
-##
-"""This is a KafkaProducer with a request function to test the plugins."""
-
-import json
-
-import logging as log
-
-import os
-
-import jsmin
-
-from kafka import KafkaProducer as kaf
-
-from kafka.errors import KafkaError
-
-
-class KafkaProducer(object):
-    """A KafkaProducer for testing purposes."""
-
-    def __init__(self, topic):
-        """Initialize a KafkaProducer and it's topic."""
-        self._topic = topic
-
-        if "ZOOKEEPER_URI" in os.environ:
-            broker = os.getenv("ZOOKEEPER_URI")
-        else:
-            broker = "localhost:9092"
-
-            '''
-            If the zookeeper broker URI is not set in the env, by default,
-            localhost container is taken as the host because an instance of
-            is already running.
-            '''
-
-        self.producer = kaf(
-            key_serializer=str.encode,
-            value_serializer=lambda v: json.dumps(v).encode('ascii'),
-            bootstrap_servers=broker, api_version=(0, 10))
-
-    def publish(self, key, value, topic):
-        """Send messages to the message bus with a defing key and topic."""
-        try:
-            future = self.producer.send(topic=topic, key=key, value=value)
-            self.producer.flush()
-        except Exception:
-            log.exception("Error publishing to {} topic." .format(topic))
-            raise
-        try:
-            record_metadata = future.get(timeout=10)
-            log.debug("TOPIC:", record_metadata.topic)
-            log.debug("PARTITION:", record_metadata.partition)
-            log.debug("OFFSET:", record_metadata.offset)
-        except KafkaError:
-            pass
-
-    def request(self, path, key, message, topic):
-        """Test json files are loaded and sent on the message bus."""
-        # External to MON
-        payload_create_alarm = jsmin(open(os.path.join(path)).read())
-        self.publish(key=key,
-                     value=json.loads(payload_create_alarm),
-                     topic=topic)
diff --git a/osm_mon/test/functional/__init__.py b/osm_mon/test/functional/__init__.py
deleted file mode 100644 (file)
index e69de29..0000000
diff --git a/osm_mon/test/plugins/CloudWatch/__init__.py b/osm_mon/test/plugins/CloudWatch/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/osm_mon/test/plugins/CloudWatch/test_schemas/alarm_details/acknowledge_alarm.json b/osm_mon/test/plugins/CloudWatch/test_schemas/alarm_details/acknowledge_alarm.json
new file mode 100644 (file)
index 0000000..341b2bd
--- /dev/null
@@ -0,0 +1,11 @@
+{
+"schema_version": "1.0",
+"schema_type": "alarm_ack",
+"vim_type": "AWS",
+"ack_details":
+{
+"alarm_uuid": "CPU_Utilization_i-098da78cbd8304e17",
+"resource_uuid": "i-098da78cbd8304e17",
+"tenant_uuid": ""
+}
+}
\ No newline at end of file
diff --git a/osm_mon/test/plugins/CloudWatch/test_schemas/create_alarm/create_alarm_differentName_differentInstance.json b/osm_mon/test/plugins/CloudWatch/test_schemas/create_alarm/create_alarm_differentName_differentInstance.json
new file mode 100644 (file)
index 0000000..ecf403e
--- /dev/null
@@ -0,0 +1,18 @@
+{
+"schema_version": "1.0",
+"schema_type": "create_alarm_request",
+"vim_type": "AWS",
+"alarm_create_request":
+{
+"correlation_id": "SO123",
+"alarm_name": "CPU_Utilization_Above_Threshold",
+"resource_uuid": "i-098da78cbd8304e17",
+"description": "",
+"severity": "Critical",
+"operation": "GE",
+"threshold_value": 1.5,
+"unit": "",
+"metric_name": "CPU_UTILIZATION",
+"statistic": "MAXIMUM"
+}
+}
diff --git a/osm_mon/test/plugins/CloudWatch/test_schemas/create_alarm/create_alarm_differentName_sameInstance.json b/osm_mon/test/plugins/CloudWatch/test_schemas/create_alarm/create_alarm_differentName_sameInstance.json
new file mode 100644 (file)
index 0000000..17c423d
--- /dev/null
@@ -0,0 +1,18 @@
+{
+"schema_version": "1.0",
+"schema_type": "create_alarm_request",
+"vim_type": "AWS",
+"alarm_create_request":
+{
+"correlation_id": "SO123",
+"alarm_name": "CPU_Utilization_Above_Threshold1",
+"resource_uuid": "i-098da78cbd8304e17",
+"description": "",
+"severity": "Critical",
+"operation": "GE",
+"threshold_value": 1.5,
+"unit": "",
+"metric_name": "CPU_UTILIZATION",
+"statistic": "MAXIMUM"
+}
+}
diff --git a/osm_mon/test/plugins/CloudWatch/test_schemas/create_alarm/create_alarm_sameName_differentInstance.json b/osm_mon/test/plugins/CloudWatch/test_schemas/create_alarm/create_alarm_sameName_differentInstance.json
new file mode 100644 (file)
index 0000000..b2f5acb
--- /dev/null
@@ -0,0 +1,18 @@
+{
+"schema_version": "1.0",
+"schema_type": "create_alarm_request",
+"vim_type": "AWS",
+"alarm_create_request":
+{
+"correlation_id": "SO123",
+"alarm_name": "CPU_Utilization_Above_Threshold",
+"resource_uuid": "i-09462760703837b26",
+"description": "",
+"severity": "Critical",
+"operation": "GE",
+"threshold_value": 1.5,
+"unit": "",
+"metric_name": "CPU_UTILIZATION",
+"statistic": "MAXIMUM"
+}
+}
diff --git a/osm_mon/test/plugins/CloudWatch/test_schemas/create_alarm/create_alarm_sameName_sameInstance.json b/osm_mon/test/plugins/CloudWatch/test_schemas/create_alarm/create_alarm_sameName_sameInstance.json
new file mode 100644 (file)
index 0000000..ecf403e
--- /dev/null
@@ -0,0 +1,18 @@
+{
+"schema_version": "1.0",
+"schema_type": "create_alarm_request",
+"vim_type": "AWS",
+"alarm_create_request":
+{
+"correlation_id": "SO123",
+"alarm_name": "CPU_Utilization_Above_Threshold",
+"resource_uuid": "i-098da78cbd8304e17",
+"description": "",
+"severity": "Critical",
+"operation": "GE",
+"threshold_value": 1.5,
+"unit": "",
+"metric_name": "CPU_UTILIZATION",
+"statistic": "MAXIMUM"
+}
+}
diff --git a/osm_mon/test/plugins/CloudWatch/test_schemas/create_alarm/operation_invalid.json b/osm_mon/test/plugins/CloudWatch/test_schemas/create_alarm/operation_invalid.json
new file mode 100644 (file)
index 0000000..31e1e0b
--- /dev/null
@@ -0,0 +1,18 @@
+{
+"schema_version": "1.0",
+"schema_type": "create_alarm_request",
+"vim_type": "AWS",
+"alarm_create_request":
+{
+"correlation_id": "SO123",
+"alarm_name": "CPU_Utilization_Above_Threshold2",
+"resource_uuid": "i-098da78cbd8304e17",
+"description": "",
+"severity": "Critical",
+"operation": "Greaterthan",
+"threshold_value": 1.5,
+"unit": "",
+"metric_name": "CPU_UTILIZATION",
+"statistic": "MAXIMUM"
+}
+}
diff --git a/osm_mon/test/plugins/CloudWatch/test_schemas/create_alarm/operation_valid.json b/osm_mon/test/plugins/CloudWatch/test_schemas/create_alarm/operation_valid.json
new file mode 100644 (file)
index 0000000..adb789b
--- /dev/null
@@ -0,0 +1,18 @@
+{
+"schema_version": "1.0",
+"schema_type": "create_alarm_request",
+"vim_type": "AWS",
+"alarm_create_request":
+{
+"correlation_id": "SO123",
+"alarm_name": "CPU_Utilization_Above_Threshold2",
+"resource_uuid": "i-098da78cbd8304e17",
+"description": "",
+"severity": "Critical",
+"operation": "GE",
+"threshold_value": 1.5,
+"unit": "",
+"metric_name": "CPU_UTILIZATION",
+"statistic": "MAXIMUM"
+}
+}
diff --git a/osm_mon/test/plugins/CloudWatch/test_schemas/create_alarm/statistic_invalid.json b/osm_mon/test/plugins/CloudWatch/test_schemas/create_alarm/statistic_invalid.json
new file mode 100644 (file)
index 0000000..8c2e68d
--- /dev/null
@@ -0,0 +1,18 @@
+{
+"schema_version": "1.0",
+"schema_type": "create_alarm_request",
+"vim_type": "AWS",
+"alarm_create_request":
+{
+"correlation_id": "SO123",
+"alarm_name": "CPU_Utilization_Above_Threshold2",
+"resource_uuid": "i-098da78cbd8304e17",
+"description": "",
+"severity": "Critical",
+"operation": "GE",
+"threshold_value": 1.5,
+"unit": "",
+"metric_name": "CPU_UTILIZATION",
+"statistic": "MAX"
+}
+}
diff --git a/osm_mon/test/plugins/CloudWatch/test_schemas/create_alarm/statistic_valid.json b/osm_mon/test/plugins/CloudWatch/test_schemas/create_alarm/statistic_valid.json
new file mode 100644 (file)
index 0000000..adb789b
--- /dev/null
@@ -0,0 +1,18 @@
+{
+"schema_version": "1.0",
+"schema_type": "create_alarm_request",
+"vim_type": "AWS",
+"alarm_create_request":
+{
+"correlation_id": "SO123",
+"alarm_name": "CPU_Utilization_Above_Threshold2",
+"resource_uuid": "i-098da78cbd8304e17",
+"description": "",
+"severity": "Critical",
+"operation": "GE",
+"threshold_value": 1.5,
+"unit": "",
+"metric_name": "CPU_UTILIZATION",
+"statistic": "MAXIMUM"
+}
+}
diff --git a/osm_mon/test/plugins/CloudWatch/test_schemas/create_metrics/create_metric_req_invalid.json b/osm_mon/test/plugins/CloudWatch/test_schemas/create_metrics/create_metric_req_invalid.json
new file mode 100644 (file)
index 0000000..0fe0dcb
--- /dev/null
@@ -0,0 +1,13 @@
+{
+"schema_version": "1.0",
+"schema_type": "create_metrics_request",
+"tenant_uuid": "",
+"correlation_id": "SO123",
+"vim_type": "AWS",
+"metric_create":
+{
+"metric_name": "CPU_UTILIZ",
+"metric_unit": "",
+"resource_uuid": "i-098da78cbd8304e17"
+}
+}
\ No newline at end of file
diff --git a/osm_mon/test/plugins/CloudWatch/test_schemas/create_metrics/create_metric_req_valid.json b/osm_mon/test/plugins/CloudWatch/test_schemas/create_metrics/create_metric_req_valid.json
new file mode 100644 (file)
index 0000000..18cc23c
--- /dev/null
@@ -0,0 +1,13 @@
+{
+"schema_version": "1.0",
+"schema_type": "create_metrics_request",
+"tenant_uuid": "",
+"correlation_id": "SO123",
+"vim_type": "AWS",
+"metric_create":
+{
+"metric_name": "CPU_UTILIZATION",
+"metric_unit": "",
+"resource_uuid": "i-098da78cbd8304e17"
+}
+}
\ No newline at end of file
diff --git a/osm_mon/test/plugins/CloudWatch/test_schemas/delete_alarm/name_invalid.json b/osm_mon/test/plugins/CloudWatch/test_schemas/delete_alarm/name_invalid.json
new file mode 100644 (file)
index 0000000..e51a670
--- /dev/null
@@ -0,0 +1,10 @@
+{
+"schema_version": "1.0",
+"schema_type": "delete_alarm_request",
+"vim_type": "AWS",
+"alarm_delete_request":
+{
+"alarm_uuid": "CPU_Utilization_Above_Threshold_i-098da78cbd8304e16",
+"correlation_id": "SO123"
+}
+}
\ No newline at end of file
diff --git a/osm_mon/test/plugins/CloudWatch/test_schemas/delete_alarm/name_valid.json b/osm_mon/test/plugins/CloudWatch/test_schemas/delete_alarm/name_valid.json
new file mode 100644 (file)
index 0000000..a2cd4b5
--- /dev/null
@@ -0,0 +1,10 @@
+{
+"schema_version": "1.0",
+"schema_type": "delete_alarm_request",
+"vim_type": "AWS",
+"alarm_delete_request":
+{
+"alarm_uuid": "CPU_Utilization_Above_Threshold_i-098da78cbd8304e17",
+"correlation_id": "SO123"
+}
+}
\ No newline at end of file
diff --git a/osm_mon/test/plugins/CloudWatch/test_schemas/delete_alarm/name_valid_delete1.json b/osm_mon/test/plugins/CloudWatch/test_schemas/delete_alarm/name_valid_delete1.json
new file mode 100644 (file)
index 0000000..f465df7
--- /dev/null
@@ -0,0 +1,10 @@
+{
+"schema_version": "1.0",
+"schema_type": "delete_alarm_request",
+"vim_type": "AWS",
+"alarm_delete_request":
+{
+"alarm_uuid": "CPU_Utilization_Above_Threshold1_i-098da78cbd8304e17",
+"correlation_id": "SO123"
+}
+}
\ No newline at end of file
diff --git a/osm_mon/test/plugins/CloudWatch/test_schemas/delete_alarm/name_valid_delete2.json b/osm_mon/test/plugins/CloudWatch/test_schemas/delete_alarm/name_valid_delete2.json
new file mode 100644 (file)
index 0000000..1fa6870
--- /dev/null
@@ -0,0 +1,10 @@
+{
+"schema_version": "1.0",
+"schema_type": "delete_alarm_request",
+"vim_type": "AWS",
+"alarm_delete_request":
+{
+"alarm_uuid": "CPU_Utilization_Above_Threshold_i-09462760703837b26",
+"correlation_id": "SO123"
+}
+}
\ No newline at end of file
diff --git a/osm_mon/test/plugins/CloudWatch/test_schemas/delete_alarm/name_valid_delete3.json b/osm_mon/test/plugins/CloudWatch/test_schemas/delete_alarm/name_valid_delete3.json
new file mode 100644 (file)
index 0000000..6c35ab2
--- /dev/null
@@ -0,0 +1,10 @@
+{
+"schema_version": "1.0",
+"schema_type": "delete_alarm_request",
+"vim_type": "AWS",
+"alarm_delete_request":
+{
+"alarm_uuid": "CPU_Utilization_Above_Threshold2_i-098da78cbd8304e17",
+"correlation_id": "SO123"
+}
+}
\ No newline at end of file
diff --git a/osm_mon/test/plugins/CloudWatch/test_schemas/delete_alarm/name_valid_delete4.json b/osm_mon/test/plugins/CloudWatch/test_schemas/delete_alarm/name_valid_delete4.json
new file mode 100644 (file)
index 0000000..716b039
--- /dev/null
@@ -0,0 +1,10 @@
+{
+"schema_version": "1.0",
+"schema_type": "delete_alarm_request",
+"vim_type": "AWS",
+"alarm_delete_request":
+{
+"alarm_uuid": "CPU_Utilization_Above_Threshold4_i-098da78cbd8304e17",
+"correlation_id": "SO123"
+}
+}
\ No newline at end of file
diff --git a/osm_mon/test/plugins/CloudWatch/test_schemas/delete_metrics/delete_metric_req_invalid.json b/osm_mon/test/plugins/CloudWatch/test_schemas/delete_metrics/delete_metric_req_invalid.json
new file mode 100644 (file)
index 0000000..f30ab87
--- /dev/null
@@ -0,0 +1,10 @@
+{
+"schema_version": "1.0",
+"schema_type": "delete_metric_data_request",
+"metric_name": "CPU_UTILIATION",
+"metric_uuid": "",
+"resource_uuid": "i-098da78cbd8304e17",
+"tenant_uuid": "",
+"correlation_uuid": "S0123",
+"vim_type": "AWS"
+}
\ No newline at end of file
diff --git a/osm_mon/test/plugins/CloudWatch/test_schemas/delete_metrics/delete_metric_req_valid.json b/osm_mon/test/plugins/CloudWatch/test_schemas/delete_metrics/delete_metric_req_valid.json
new file mode 100644 (file)
index 0000000..ea3922b
--- /dev/null
@@ -0,0 +1,10 @@
+{
+"schema_version": "1.0",
+"schema_type": "delete_metric_data_request",
+"metric_name": "CPU_UTILIZATION",
+"metric_uuid": "",
+"resource_uuid": "i-098da78cbd8304e17",
+"tenant_uuid": "",
+"correlation_uuid": "S0123",
+"vim_type": "AWS"
+}
\ No newline at end of file
diff --git a/osm_mon/test/plugins/CloudWatch/test_schemas/list_alarm/list_alarm_valid_no_arguments.json b/osm_mon/test/plugins/CloudWatch/test_schemas/list_alarm/list_alarm_valid_no_arguments.json
new file mode 100644 (file)
index 0000000..a4d02a3
--- /dev/null
@@ -0,0 +1,12 @@
+{
+"schema_version": "1.0",
+"schema_type": "list_alarm_request",
+"vim_type": "AWS",
+"alarm_list_request":
+{
+"correlation_id": "SO123",
+"resource_uuid": "",
+"alarm_name": "",
+"severity": ""
+}
+}
\ No newline at end of file
diff --git a/osm_mon/test/plugins/CloudWatch/test_schemas/list_alarm/list_alarm_valid_one_argument.json b/osm_mon/test/plugins/CloudWatch/test_schemas/list_alarm/list_alarm_valid_one_argument.json
new file mode 100644 (file)
index 0000000..d0f31f2
--- /dev/null
@@ -0,0 +1,12 @@
+{
+"schema_version": "1.0",
+"schema_type": "list_alarm_request",
+"vim_type": "AWS",
+"alarm_list_request":
+{
+"correlation_id": "SO123",
+"resource_uuid": "i-098da78cbd8304e17",
+"alarm_name": "",
+"severity": ""
+}
+}
\ No newline at end of file
diff --git a/osm_mon/test/plugins/CloudWatch/test_schemas/list_alarm/list_alarm_valid_two_arguments.json b/osm_mon/test/plugins/CloudWatch/test_schemas/list_alarm/list_alarm_valid_two_arguments.json
new file mode 100644 (file)
index 0000000..bf46579
--- /dev/null
@@ -0,0 +1,12 @@
+{
+"schema_version": "1.0",
+"schema_type": "list_alarm_request",
+"vim_type": "AWS",
+"alarm_list_request":
+{
+"correlation_id": "SO123",
+"resource_uuid": "i-098da78cbd8304e17",
+"alarm_name": "",
+"severity": "Critical"
+}
+}
\ No newline at end of file
diff --git a/osm_mon/test/plugins/CloudWatch/test_schemas/list_metrics/list_metric_req_invalid.json b/osm_mon/test/plugins/CloudWatch/test_schemas/list_metrics/list_metric_req_invalid.json
new file mode 100644 (file)
index 0000000..6108e77
--- /dev/null
@@ -0,0 +1,11 @@
+{
+"schema_version": "1.0",
+"schema_type": "list_metrics_request",
+"vim_type": "AWS",
+"metrics_list_request":
+{
+"metric_name": "CPU_UTILZATION",
+"correlation_id": "SO123",
+"resource_uuid": "i-098da78cbd8304e17"
+}
+}
\ No newline at end of file
diff --git a/osm_mon/test/plugins/CloudWatch/test_schemas/list_metrics/list_metric_req_valid.json b/osm_mon/test/plugins/CloudWatch/test_schemas/list_metrics/list_metric_req_valid.json
new file mode 100644 (file)
index 0000000..b1bd9de
--- /dev/null
@@ -0,0 +1,11 @@
+{
+"schema_version": "1.0",
+"schema_type": "list_metrics_request",
+"vim_type": "AWS",
+"metrics_list_request":
+{
+"metric_name": "CPU_UTILIZATION",
+"correlation_id": "SO123",
+"resource_uuid": "i-098da78cbd8304e17"
+}
+}
\ No newline at end of file
diff --git a/osm_mon/test/plugins/CloudWatch/test_schemas/read_metrics_data/read_coll_period_req_invalid.json b/osm_mon/test/plugins/CloudWatch/test_schemas/read_metrics_data/read_coll_period_req_invalid.json
new file mode 100644 (file)
index 0000000..815edf9
--- /dev/null
@@ -0,0 +1,12 @@
+{
+"schema_version": "1.0",
+"schema_type": "read_metric_data_request",
+"metric_name": "CPU_UTILIZATION",
+"metric_uuid": "0",
+"resource_uuid": "i-098da78cbd8304e17",
+"tenant_uuid": "",
+"correlation_uuid": "SO123",
+"vim_type":"AWS",
+"collection_period":"3500" ,
+"collection_unit": ""
+}
\ No newline at end of file
diff --git a/osm_mon/test/plugins/CloudWatch/test_schemas/read_metrics_data/read_coll_period_req_valid.json b/osm_mon/test/plugins/CloudWatch/test_schemas/read_metrics_data/read_coll_period_req_valid.json
new file mode 100644 (file)
index 0000000..dad9a24
--- /dev/null
@@ -0,0 +1,12 @@
+{
+"schema_version": "1.0",
+"schema_type": "read_metric_data_request",
+"metric_name": "CPU_UTILIZATION",
+"metric_uuid": "0",
+"resource_uuid": "i-098da78cbd8304e17",
+"tenant_uuid": "",
+"correlation_uuid": "SO123",
+"vim_type":"AWS",
+"collection_period":"3600" ,
+"collection_unit": ""
+}
\ No newline at end of file
diff --git a/osm_mon/test/plugins/CloudWatch/test_schemas/read_metrics_data/read_metric_name_req_invalid.json b/osm_mon/test/plugins/CloudWatch/test_schemas/read_metrics_data/read_metric_name_req_invalid.json
new file mode 100644 (file)
index 0000000..0ff4f0e
--- /dev/null
@@ -0,0 +1,12 @@
+{
+"schema_version": "1.0",
+"schema_type": "read_metric_data_request",
+"metric_name": "CPU_UTLIZATION",
+"metric_uuid": "0",
+"resource_uuid": "i-098da78cbd8304e17",
+"tenant_uuid": "",
+"correlation_uuid": "SO123",
+"vim_type":"AWS",
+"collection_period":"3600" ,
+"collection_unit": ""
+}
\ No newline at end of file
diff --git a/osm_mon/test/plugins/CloudWatch/test_schemas/read_metrics_data/read_metric_name_req_valid.json b/osm_mon/test/plugins/CloudWatch/test_schemas/read_metrics_data/read_metric_name_req_valid.json
new file mode 100644 (file)
index 0000000..dad9a24
--- /dev/null
@@ -0,0 +1,12 @@
+{
+"schema_version": "1.0",
+"schema_type": "read_metric_data_request",
+"metric_name": "CPU_UTILIZATION",
+"metric_uuid": "0",
+"resource_uuid": "i-098da78cbd8304e17",
+"tenant_uuid": "",
+"correlation_uuid": "SO123",
+"vim_type":"AWS",
+"collection_period":"3600" ,
+"collection_unit": ""
+}
\ No newline at end of file
diff --git a/osm_mon/test/plugins/CloudWatch/test_schemas/update_alarm/name_invalid.json b/osm_mon/test/plugins/CloudWatch/test_schemas/update_alarm/name_invalid.json
new file mode 100644 (file)
index 0000000..fe171e4
--- /dev/null
@@ -0,0 +1,17 @@
+{
+"schema_version": "1.0",
+"schema_type": "update_alarm_request",
+"vim_type": "AWS",
+"alarm_update_request":
+{
+"correlation_id": "SO123",
+"alarm_uuid": "CPU_Utilization_Above_Threshold_i-098da78cbd8304e13",
+"description": "",
+"severity": "Critical",
+"operation": "LE",
+"threshold_value": 1.5,
+"unit": "",
+"metric_name": "CPU_UTILIZATION",
+"statistic": "MAXIMUM"
+}
+}
\ No newline at end of file
diff --git a/osm_mon/test/plugins/CloudWatch/test_schemas/update_alarm/name_valid.json b/osm_mon/test/plugins/CloudWatch/test_schemas/update_alarm/name_valid.json
new file mode 100644 (file)
index 0000000..7070dff
--- /dev/null
@@ -0,0 +1,17 @@
+{
+"schema_version": "1.0",
+"schema_type": "update_alarm_request",
+"vim_type": "AWS",
+"alarm_update_request":
+{
+"correlation_id": "SO123",
+"alarm_uuid": "CPU_Utilization_Above_Threshold4_i-098da78cbd8304e17",
+"description": "",
+"severity": "Critical",
+"operation": "LE",
+"threshold_value": 1.5,
+"unit": "",
+"metric_name": "CPU_UTILIZATION",
+"statistic": "MAXIMUM"
+}
+}
\ No newline at end of file
diff --git a/osm_mon/test/plugins/CloudWatch/test_schemas/update_alarm/operation_invalid.json b/osm_mon/test/plugins/CloudWatch/test_schemas/update_alarm/operation_invalid.json
new file mode 100644 (file)
index 0000000..0116228
--- /dev/null
@@ -0,0 +1,17 @@
+{
+"schema_version": "1.0",
+"schema_type": "update_alarm_request",
+"vim_type": "AWS",
+"alarm_update_request":
+{
+"correlation_id": "SO123",
+"alarm_uuid": "CPU_Utilization_Above_Threshold_i-098da78cbd8304e17",
+"description": "",
+"severity": "Critical",
+"operation": "Less",
+"threshold_value": 1.5,
+"unit": "",
+"metric_name": "CPU_UTILIZATION",
+"statistic": "MAXIMUM"
+}
+}
\ No newline at end of file
diff --git a/osm_mon/test/plugins/CloudWatch/test_schemas/update_alarm/operation_valid.json b/osm_mon/test/plugins/CloudWatch/test_schemas/update_alarm/operation_valid.json
new file mode 100644 (file)
index 0000000..5fb8eb6
--- /dev/null
@@ -0,0 +1,17 @@
+{
+"schema_version": "1.0",
+"schema_type": "update_alarm_request",
+"vim_type": "AWS",
+"alarm_update_request":
+{
+"correlation_id": "SO123",
+"alarm_uuid": "CPU_Utilization_Above_Threshold_i-098da78cbd8304e17",
+"description": "",
+"severity": "Critical",
+"operation": "LE",
+"threshold_value": 1.5,
+"unit": "",
+"metric_name": "CPU_UTILIZATION",
+"statistic": "MAXIMUM"
+}
+}
\ No newline at end of file
diff --git a/osm_mon/test/plugins/CloudWatch/test_schemas/update_alarm/statistic_invalid.json b/osm_mon/test/plugins/CloudWatch/test_schemas/update_alarm/statistic_invalid.json
new file mode 100644 (file)
index 0000000..991d844
--- /dev/null
@@ -0,0 +1,17 @@
+{
+"schema_version": "1.0",
+"schema_type": "update_alarm_request",
+"vim_type": "AWS",
+"alarm_update_request":
+{
+"correlation_id": "SO123",
+"alarm_uuid": "CPU_Utilization_Above_Threshold_i-098da78cbd8304e17",
+"description": "",
+"severity": "Critical",
+"operation": "LE",
+"threshold_value": 1.5,
+"unit": "",
+"metric_name": "CPU_UTILIZATION",
+"statistic": "MAX"
+}
+}
\ No newline at end of file
diff --git a/osm_mon/test/plugins/CloudWatch/test_schemas/update_alarm/statistic_valid.json b/osm_mon/test/plugins/CloudWatch/test_schemas/update_alarm/statistic_valid.json
new file mode 100644 (file)
index 0000000..5fb8eb6
--- /dev/null
@@ -0,0 +1,17 @@
+{
+"schema_version": "1.0",
+"schema_type": "update_alarm_request",
+"vim_type": "AWS",
+"alarm_update_request":
+{
+"correlation_id": "SO123",
+"alarm_uuid": "CPU_Utilization_Above_Threshold_i-098da78cbd8304e17",
+"description": "",
+"severity": "Critical",
+"operation": "LE",
+"threshold_value": 1.5,
+"unit": "",
+"metric_name": "CPU_UTILIZATION",
+"statistic": "MAXIMUM"
+}
+}
\ No newline at end of file
diff --git a/osm_mon/test/plugins/CloudWatch/test_schemas/update_alarm/update_alarm_new_alarm.json b/osm_mon/test/plugins/CloudWatch/test_schemas/update_alarm/update_alarm_new_alarm.json
new file mode 100644 (file)
index 0000000..581fb55
--- /dev/null
@@ -0,0 +1,18 @@
+{
+"schema_version": "1.0",
+"schema_type": "create_alarm_request",
+"vim_type": "AWS",
+"alarm_create_request":
+{
+"correlation_id": "SO123",
+"alarm_name": "CPU_Utilization_Above_Threshold4",
+"resource_uuid": "i-098da78cbd8304e17",
+"description": "",
+"severity": "Critical",
+"operation": "GE",
+"threshold_value": 1.5,
+"unit": "",
+"metric_name": "CPU_UTILIZATION",
+"statistic": "MAXIMUM"
+}
+}
diff --git a/osm_mon/test/plugins/CloudWatch/test_schemas/update_metrics/update_metric_req_invalid.json b/osm_mon/test/plugins/CloudWatch/test_schemas/update_metrics/update_metric_req_invalid.json
new file mode 100644 (file)
index 0000000..0fe0dcb
--- /dev/null
@@ -0,0 +1,13 @@
+{
+"schema_version": "1.0",
+"schema_type": "create_metrics_request",
+"tenant_uuid": "",
+"correlation_id": "SO123",
+"vim_type": "AWS",
+"metric_create":
+{
+"metric_name": "CPU_UTILIZ",
+"metric_unit": "",
+"resource_uuid": "i-098da78cbd8304e17"
+}
+}
\ No newline at end of file
diff --git a/osm_mon/test/plugins/CloudWatch/test_schemas/update_metrics/update_metric_req_valid.json b/osm_mon/test/plugins/CloudWatch/test_schemas/update_metrics/update_metric_req_valid.json
new file mode 100644 (file)
index 0000000..18cc23c
--- /dev/null
@@ -0,0 +1,13 @@
+{
+"schema_version": "1.0",
+"schema_type": "create_metrics_request",
+"tenant_uuid": "",
+"correlation_id": "SO123",
+"vim_type": "AWS",
+"metric_create":
+{
+"metric_name": "CPU_UTILIZATION",
+"metric_unit": "",
+"resource_uuid": "i-098da78cbd8304e17"
+}
+}
\ No newline at end of file
diff --git a/osm_mon/test/plugins/CloudWatch/unit_tests_alarms.py b/osm_mon/test/plugins/CloudWatch/unit_tests_alarms.py
new file mode 100644 (file)
index 0000000..ae036cf
--- /dev/null
@@ -0,0 +1,408 @@
+from connection import Connection
+import unittest
+import sys
+import jsmin
+import json
+import os
+import time
+from jsmin import jsmin
+sys.path.append("../../test/core/")
+from test_producer import KafkaProducer
+from kafka import KafkaConsumer
+try:
+    import boto
+    import boto.ec2
+    import boto.vpc
+    import boto.ec2.cloudwatch
+    import boto.ec2.connection
+except:
+    exit("Boto not avialable. Try activating your virtualenv OR `pip install boto`")
+
+#--------------------------------------------------------------------------------------------------------------------------------------
+
+# Test Producer object to generate request
+
+producer = KafkaProducer('create_alarm_request')
+obj = Connection() 
+connections = obj.setEnvironment()
+connections_res = obj.connection_instance()
+cloudwatch_conn = connections_res['cloudwatch_connection'] 
+
+#--------------------------------------------------------------------------------------------------------------------------------------
+
+'''Test E2E Flow : Test cases has been tested one at a time.
+1) Commom Request is generated using request function in test_producer.py(/test/core)
+2) The request is then consumed by the comsumer (plugin)
+3) The response is sent back on the message bus in plugin_alarm.py using
+   response functions in producer.py(/core/message-bus)
+4) The response is then again consumed by the unit_tests_alarms.py
+   and the test cases has been applied on the response.
+'''
+
+class config_alarm_name_test(unittest.TestCase):
+   
+
+    def setUp(self):
+        pass
+    #To generate a request of testing new alarm name and new instance id in create alarm request
+    def test_differentName_differentInstance(self):
+        time.sleep(2)
+        producer.request("test_schemas/create_alarm/create_alarm_differentName_differentInstance.json",'create_alarm_request', '','alarm_request')  
+        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
+
+        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
+        _consumer.subscribe(['alarm_response'])
+
+        for message in _consumer:
+            if message.key == "create_alarm_response": 
+                info = json.loads(json.loads(message.value))
+                print(info)
+                time.sleep(1)
+                self.assertTrue(info['alarm_create_response']['status'])
+                return        
+
+    #To generate a request of testing new alarm name and existing instance id in create alarm request
+    def test_differentName_sameInstance(self):
+        time.sleep(2)
+        producer.request("test_schemas/create_alarm/create_alarm_differentName_sameInstance.json",'create_alarm_request', '','alarm_request')  
+        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
+
+        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
+        _consumer.subscribe(['alarm_response'])
+
+        for message in _consumer:
+            if message.key == "create_alarm_response": 
+                info = json.loads(json.loads(message.value))
+                print(info)
+                time.sleep(1)
+                producer.request("test_schemas/delete_alarm/name_valid_delete1.json",'delete_alarm_request','','alarm_request')
+                self.assertTrue(info['alarm_create_response']['status'])   
+                return
+
+    #To generate a request of testing existing alarm name and new instance id in create alarm request    
+    def test_sameName_differentInstance(self): 
+        time.sleep(2)
+        producer.request("test_schemas/create_alarm/create_alarm_sameName_differentInstance.json",'create_alarm_request', '','alarm_request')  
+        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
+
+        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
+        _consumer.subscribe(['alarm_response'])
+
+        for message in _consumer:
+            if message.key == "create_alarm_response": 
+                info = json.loads(json.loads(message.value))
+                print(info)
+                time.sleep(1)
+                producer.request("test_schemas/delete_alarm/name_valid_delete2.json",'delete_alarm_request', '','alarm_request')
+                self.assertTrue(info['alarm_create_response']['status']) 
+                return    
+
+    #To generate a request of testing existing alarm name and existing instance id in create alarm request
+    def test_sameName_sameInstance(self):  
+        time.sleep(2)
+        producer.request("test_schemas/create_alarm/create_alarm_sameName_sameInstance.json",'create_alarm_request', '','alarm_request')  
+        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
+
+        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
+        _consumer.subscribe(['alarm_response'])
+
+        for message in _consumer:
+            if message.key == "create_alarm_response": 
+                info = json.loads(json.loads(message.value))
+                print(info, "---")
+                time.sleep(1)
+                producer.request("test_schemas/delete_alarm/name_valid.json",'delete_alarm_request', '','alarm_request')
+                self.assertEqual(info, None)  
+                return        
+
+    #To generate a request of testing valid statistics in create alarm request
+    def test_statisticValid(self):       
+        time.sleep(2)
+        producer.request("test_schemas/create_alarm/statistic_valid.json",'create_alarm_request', '','alarm_request')  
+        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
+
+        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
+        _consumer.subscribe(['alarm_response'])
+
+        for message in _consumer:
+            if message.key == "create_alarm_response": 
+                info = json.loads(json.loads(message.value))
+                print(info)
+                time.sleep(1)
+                producer.request("test_schemas/delete_alarm/name_valid_delete3.json",'delete_alarm_request', '','alarm_request')
+                self.assertTrue(info['alarm_create_response']['status']) 
+                return
+
+    #To generate a request of testing Invalid statistics in create alarm request    
+    def test_statisticValidNot(self):       
+        time.sleep(2)
+        producer.request("test_schemas/create_alarm/statistic_invalid.json",'create_alarm_request', '','alarm_request')  
+        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
+
+        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
+        _consumer.subscribe(['alarm_response'])
+
+        for message in _consumer:
+            if message.key == "create_alarm_response": 
+                info = json.loads(json.loads(message.value))
+                print(info, "---")
+                time.sleep(1)
+                producer.request("test_schemas/delete_alarm/name_valid_delete3.json",'delete_alarm_request', '','alarm_request')
+                self.assertEqual(info, None)
+                return  
+
+    #To generate a request of testing valid operation in create alarm request
+    def test_operationValid(self):       
+        time.sleep(2)
+        producer.request("test_schemas/create_alarm/operation_valid.json",'create_alarm_request', '','alarm_request')  
+        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
+
+        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
+        _consumer.subscribe(['alarm_response'])
+
+        for message in _consumer:
+            if message.key == "create_alarm_response": 
+                info = json.loads(json.loads(message.value))
+                print(info)
+                time.sleep(1)
+                producer.request("test_schemas/delete_alarm/name_valid_delete3.json",'delete_alarm_request', '','alarm_request')
+                self.assertTrue(info['alarm_create_response']['status']) 
+                return
+
+    #To generate a request of testing Invalid operation in create alarm request
+    def test_operationValidNot(self):       
+        time.sleep(2)
+        producer.request("test_schemas/create_alarm/operation_invalid.json",'create_alarm_request', '','alarm_request')  
+        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
+
+        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
+        _consumer.subscribe(['alarm_response'])
+
+        for message in _consumer:
+            if message.key == "create_alarm_response": 
+                info = json.loads(json.loads(message.value))
+                print(info)
+                time.sleep(1)
+                self.assertEqual(info,None) 
+                return                 
+                 
+
+#--------------------------------------------------------------------------------------------------------------------------------------
+class update_alarm_name_test(unittest.TestCase):
+
+    #To generate a request of testing valid alarm_id in update alarm request    
+    def test_nameValid(self):
+        producer.request("test_schemas/update_alarm/update_alarm_new_alarm.json",'create_alarm_request', '','alarm_request')  
+        time.sleep(2)
+        producer.request("test_schemas/update_alarm/name_valid.json",'update_alarm_request', '','alarm_request')  
+        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
+
+        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
+        _consumer.subscribe(['alarm_response'])
+
+        for message in _consumer:
+            if message.key == "update_alarm_response": 
+                info = json.loads(json.loads(json.loads(message.value)))
+                print(info)
+                time.sleep(1)
+                producer.request("test_schemas/delete_alarm/name_valid_delete4.json",'delete_alarm_request', '','alarm_request')
+                self.assertTrue(info['alarm_update_response']['status'])
+                return 
+    
+    #To generate a request of testing invalid alarm_id in update alarm request
+    def test_nameInvalid(self):
+        time.sleep(2)
+        producer.request("test_schemas/update_alarm/name_invalid.json",'update_alarm_request', '','alarm_request')  
+        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
+
+        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
+        _consumer.subscribe(['alarm_response'])
+
+        for message in _consumer:
+            if message.key == "update_alarm_response": 
+                info = json.loads(json.loads(json.loads(message.value)))
+                print(info)
+                time.sleep(1)
+                self.assertEqual(info,None)
+                return
+
+    #To generate a request of testing valid statistics in update alarm request
+    def test_statisticValid(self):
+        producer.request("test_schemas/create_alarm/create_alarm_differentName_differentInstance.json",'create_alarm_request', '','alarm_request')  
+        time.sleep(2)
+        producer.request("test_schemas/update_alarm/statistic_valid.json",'update_alarm_request', '','alarm_request')  
+        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
+
+        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
+        _consumer.subscribe(['alarm_response'])
+
+        for message in _consumer:
+            if message.key == "update_alarm_response": 
+                info = json.loads(json.loads(json.loads(message.value)))
+                print(info)
+                time.sleep(1)
+                producer.request("test_schemas/delete_alarm/name_valid.json",'delete_alarm_request', '','alarm_request')
+                self.assertTrue(info['alarm_update_response']['status'])
+                return
+
+    #To generate a request of testing Invalid statistics in update alarm request
+    def test_statisticInvalid(self):
+        time.sleep(2)
+        producer.request("test_schemas/update_alarm/statistic_invalid.json",'update_alarm_request', '','alarm_request')  
+        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
+
+        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
+        _consumer.subscribe(['alarm_response'])
+
+        for message in _consumer:
+            if message.key == "update_alarm_response": 
+                info = json.loads(json.loads(json.loads(message.value)))
+                print(info)
+                time.sleep(1)
+                self.assertEqual(info,None)
+                return            
+
+    #To generate a request of testing valid operation in update alarm request
+    def test_operationValid(self):
+        producer.request("test_schemas/create_alarm/create_alarm_differentName_differentInstance.json",'create_alarm_request', '','alarm_request')  
+        time.sleep(2)
+        producer.request("test_schemas/update_alarm/operation_valid.json",'update_alarm_request', '','alarm_request')  
+        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
+
+        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
+        _consumer.subscribe(['alarm_response'])
+
+        for message in _consumer:
+            if message.key == "update_alarm_response": 
+                info = json.loads(json.loads(json.loads(message.value)))
+                print(info)
+                time.sleep(1)
+                producer.request("test_schemas/delete_alarm/name_valid.json",'delete_alarm_request', '','alarm_request')
+                self.assertTrue(info['alarm_update_response']['status'])
+                return
+              
+#--------------------------------------------------------------------------------------------------------------------------------------
+class delete_alarm_test(unittest.TestCase):
+
+    #To generate a request of testing valid alarm_id in delete alarm request   
+    def test_nameValid(self):             
+        producer.request("test_schemas/create_alarm/create_alarm_differentName_differentInstance.json",'create_alarm_request', '','alarm_request')  
+        time.sleep(2)
+        producer.request("test_schemas/delete_alarm/name_valid.json",'delete_alarm_request', '','alarm_request') 
+        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
+
+        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
+        _consumer.subscribe(['alarm_response'])
+
+        for message in _consumer:
+            if message.key == "delete_alarm_response": 
+                info = json.loads(json.loads(json.loads(message.value)))
+                print(info)
+                time.sleep(1)                
+                self.assertTrue(info['alarm_deletion_response']['status'])
+                return
+
+    #To generate a request of testing Invalid alarm_id in delete alarm request
+    def test_nameInvalid(self):              
+        time.sleep(2)
+        producer.request("test_schemas/delete_alarm/name_invalid.json",'delete_alarm_request', '','alarm_request') 
+        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
+
+        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
+        _consumer.subscribe(['alarm_response'])
+
+        for message in _consumer:
+            if message.key == "delete_alarm_response": 
+                info = json.loads(json.loads(json.loads(message.value)))
+                print(info)
+                time.sleep(1)                
+                self.assertEqual(info,None)
+                return             
+
+#--------------------------------------------------------------------------------------------------------------------------------------
+class list_alarm_test(unittest.TestCase): 
+
+    #To generate a request of testing valid input fields in alarm list request   
+    def test_valid_no_arguments(self):
+        time.sleep(2)
+        producer.request("test_schemas/list_alarm/list_alarm_valid_no_arguments.json",'alarm_list_request', '','alarm_request') 
+        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
+
+        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
+        _consumer.subscribe(['alarm_response'])
+
+        for message in _consumer:
+            if message.key == "list_alarm_response": 
+                info = json.loads(json.loads(json.loads(message.value)))
+                print(info)
+                time.sleep(1)                
+                self.assertEqual(type(info),dict)
+                return
+
+    #To generate a request of testing valid input fields in alarm list request
+    def test_valid_one_arguments(self):
+        time.sleep(2)
+        producer.request("test_schemas/list_alarm/list_alarm_valid_one_arguments.json",'alarm_list_request', '','alarm_request') 
+        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
+
+        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
+        _consumer.subscribe(['alarm_response'])
+
+        for message in _consumer:
+            if message.key == "list_alarm_response": 
+                info = json.loads(json.loads(json.loads(message.value)))
+                print(info)
+                time.sleep(1)                
+                self.assertEqual(type(info),dict)
+                return
+
+    #To generate a request of testing valid input fields in alarm list request
+    def test_valid_two_arguments(self):
+        time.sleep(2)
+        producer.request("test_schemas/list_alarm/list_alarm_valid_two_arguments.json",'alarm_list_request', '','alarm_request') 
+        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
+
+        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
+        _consumer.subscribe(['alarm_response'])
+
+        for message in _consumer:
+            if message.key == "list_alarm_response": 
+                info = json.loads(json.loads(json.loads(message.value)))
+                print(info)
+                time.sleep(1)                
+                self.assertEqual(type(info),dict)
+                return
+
+
+#--------------------------------------------------------------------------------------------------------------------------------------
+class alarm_details_test(unittest.TestCase):
+
+    #To generate a request of testing valid input fields in acknowledge alarm
+    def test_Valid(self):
+        time.sleep(2)
+        producer.request("test_schemas/alarm_details/acknowledge_alarm.json",'acknowledge_alarm', '','alarm_request') 
+        server = {'server': 'localhost:9092', 'topic': 'alarm_request'}
+
+        _consumer = KafkaConsumer(bootstrap_servers=server['server'])
+        _consumer.subscribe(['alarm_response'])
+
+        for message in _consumer:
+            if message.key == "notify_alarm": 
+                info = json.loads(json.loads(json.loads(message.value)))
+                print(info)
+                time.sleep(1)                
+                self.assertEqual(type(info),dict)
+                return                
+
+if __name__ == '__main__':
+
+    # Saving test reults in Log file
+
+    log_file = 'log_file.txt'
+    f = open(log_file, "w")
+    runner = unittest.TextTestRunner(f)
+    unittest.main(testRunner=runner)
+    f.close()
+
+    # For printing results on Console
+    # unittest.main()
diff --git a/osm_mon/test/plugins/CloudWatch/unit_tests_metrics.py b/osm_mon/test/plugins/CloudWatch/unit_tests_metrics.py
new file mode 100644 (file)
index 0000000..625e872
--- /dev/null
@@ -0,0 +1,208 @@
+from connection import Connection
+import unittest
+import sys
+import jsmin
+import json
+import os
+import time
+from jsmin import jsmin
+sys.path.append("../../test/core/")
+from test_producer import KafkaProducer
+from kafka import KafkaConsumer
+try:
+    import boto
+    import boto.ec2
+    import boto.vpc
+    import boto.ec2.cloudwatch
+    import boto.ec2.connection
+except:
+    exit("Boto not avialable. Try activating your virtualenv OR `pip install boto`")
+
+#--------------------------------------------------------------------------------------------------------------------------------------
+
+# Test Producer object to generate request
+
+producer = KafkaProducer('')
+obj = Connection() 
+connections = obj.setEnvironment()
+connections_res = obj.connection_instance()
+cloudwatch_conn = connections_res['cloudwatch_connection'] 
+
+# Consumer Object to consume response from message bus
+server = {'server': 'localhost:9092', 'topic': 'metric_request'}
+_consumer = KafkaConsumer(bootstrap_servers=server['server'])
+_consumer.subscribe(['metric_response'])
+
+#--------------------------------------------------------------------------------------------------------------------------------------
+
+'''Test E2E Flow : Test cases has been tested one at a time.
+1) Commom Request is generated using request function in test_producer.py(/core/message-bus)
+2) The request is then consumed by the comsumer (plugin)
+3) The response is sent back on the message bus in plugin_metrics.py using
+   response functions in producer.py(/core/message-bus)
+4) The response is then again consumed by the unit_tests_metrics.py
+   and the test cases has been applied on the response.
+'''
+class test_create_metrics(unittest.TestCase):
+
+    def test_status_positive(self):
+        time.sleep(2)
+        # To generate Request of testing valid meric_name in create metrics requests
+        producer.request("create_metrics/create_metric_req_valid.json",'create_metric_request', '','metric_request')  
+
+        for message in _consumer:
+            if message.key == "create_metric_response": 
+                resp = json.loads(json.loads(json.loads(message.value)))
+                time.sleep(1)
+                self.assertTrue(resp['metric_create_response']['status'])
+                self.assertEqual(resp['metric_create_response']['metric_uuid'],0)
+                return 
+
+    def test_status_negative(self):
+        time.sleep(2)
+        # To generate Request of testing invalid meric_name in create metrics requests
+        producer.request("create_metrics/create_metric_req_invalid.json",'create_metric_request', '','metric_request')  
+
+        for message in _consumer:
+            if message.key == "create_metric_response": 
+                resp = json.loads(json.loads(json.loads(message.value)))
+                time.sleep(1)
+                self.assertFalse(resp['metric_create_response']['status'])
+                self.assertEqual(resp['metric_create_response']['metric_uuid'],None)
+                return 
+
+class test_metrics_data(unittest.TestCase):
+
+    def test_met_name_positive(self):
+        time.sleep(2)
+        # To generate Request of testing valid meric_name in read_metric_data_request
+        producer.request("read_metrics_data/read_metric_name_req_valid.json",'read_metric_data_request', '','metric_request')  
+        for message in _consumer:
+            if message.key == "read_metric_data_response": 
+                resp = json.loads(json.loads(json.loads(message.value)))
+                time.sleep(1)
+                self.assertEqual(type(resp['metrics_data']),dict)
+                return 
+
+    def test_met_name_negative(self):
+        time.sleep(2)
+        # To generate Request of testing invalid meric_name in read_metric_data_request
+        producer.request("read_metrics_data/read_metric_name_req_invalid.json",'read_metric_data_request', '','metric_request')  
+        for message in _consumer:
+            if message.key == "read_metric_data_response": 
+                resp = json.loads(json.loads(json.loads(message.value)))
+                time.sleep(1)
+                self.assertFalse(resp['metrics_data'])
+                return 
+
+    def test_coll_period_positive(self):
+        # To generate Request of testing valid collection_period in read_metric_data_request
+        # For AWS metric_data_stats collection period should be a multiple of 60
+        time.sleep(2)
+        producer.request("read_metrics_data/read_coll_period_req_valid.json",'read_metric_data_request', '','metric_request')  
+        for message in _consumer:
+            if message.key == "read_metric_data_response": 
+                resp = json.loads(json.loads(json.loads(message.value)))
+                time.sleep(1)
+                self.assertEqual(type(resp),dict)
+                return
+
+    def test_coll_period_negative(self):
+        time.sleep(2)
+        # To generate Request of testing invalid collection_period in read_metric_data_request
+        producer.request("read_metrics_data/read_coll_period_req_invalid.json",'read_metric_data_request', '','metric_request')  
+        for message in _consumer:
+            if message.key == "read_metric_data_response": 
+                resp = json.loads(json.loads(json.loads(message.value)))
+                time.sleep(1)
+                self.assertFalse(resp['metrics_data'])
+                return
+
+class test_update_metrics(unittest.TestCase):
+
+    def test_upd_status_positive(self):
+        time.sleep(2)
+        # To generate Request of testing valid meric_name in update metrics requests
+        producer.request("update_metrics/update_metric_req_valid.json",'update_metric_request', '','metric_request')  
+        for message in _consumer:
+            if message.key == "update_metric_response": 
+                resp = json.loads(json.loads(json.loads(message.value)))
+                time.sleep(1)
+                self.assertTrue(resp['metric_update_response']['status'])
+                self.assertEqual(resp['metric_update_response']['metric_uuid'],0)
+                return
+
+    def test_upd_status_negative(self):
+        time.sleep(2)
+        # To generate Request of testing invalid meric_name in update metrics requests
+        producer.request("update_metrics/update_metric_req_invalid.json",'update_metric_request', '','metric_request')  
+        for message in _consumer:
+            if message.key == "update_metric_response": 
+                resp = json.loads(json.loads(json.loads(message.value)))
+                time.sleep(1)
+                self.assertFalse(resp['metric_update_response']['status'])
+                self.assertEqual(resp['metric_update_response']['metric_uuid'],None)
+                return
+
+class test_delete_metrics(unittest.TestCase):
+
+    def test_del_met_name_positive(self):
+        time.sleep(2)
+        # To generate Request of testing valid meric_name in delete metrics requests
+        producer.request("delete_metrics/delete_metric_req_valid.json",'delete_metric_request', '','metric_request')  
+        for message in _consumer:
+            if message.key == "delete_metric_response": 
+                resp = json.loads(json.loads(json.loads(message.value)))
+                time.sleep(1)
+                self.assertFalse(resp['status'])
+                return
+
+    def test_del_met_name_negative(self):
+        time.sleep(2)
+        # To generate Request of testing invalid meric_name in delete metrics requests
+        producer.request("delete_metrics/delete_metric_req_invalid.json",'delete_metric_request', '','metric_request')  
+        for message in _consumer:
+            if message.key == "delete_metric_response": 
+                resp = json.loads(json.loads(json.loads(message.value)))
+                time.sleep(1)
+                self.assertFalse(resp)
+                return
+
+class test_list_metrics(unittest.TestCase):
+
+    def test_list_met_name_positive(self):
+        time.sleep(2)
+        # To generate Request of testing valid meric_name in list metrics requests
+        producer.request("list_metrics/list_metric_req_valid.json",'list_metric_request', '','metric_request')  
+        for message in _consumer:
+            if message.key == "list_metrics_response": 
+                resp = json.loads(json.loads(json.loads(message.value)))
+                time.sleep(1)
+                self.assertEqual(type(resp['metrics_list']),list)
+                return
+
+    def test_list_met_name_negitive(self):
+        time.sleep(2)
+        # To generate Request of testing invalid meric_name in list metrics requests
+        producer.request("list_metrics/list_metric_req_invalid.json",'list_metric_request', '','metric_request')  
+        for message in _consumer:
+            if message.key == "list_metrics_response": 
+                resp = json.loads(json.loads(json.loads(message.value)))
+                time.sleep(1)
+                self.assertFalse(resp['metrics_list'])
+                return
+
+
+if __name__ == '__main__':
+
+    # Saving test reults in Log file
+
+    log_file = 'log_file.txt'
+    f = open(log_file, "w")
+    runner = unittest.TextTestRunner(f)
+    unittest.main(testRunner=runner)
+    f.close()
+
+    # For printing results on Console
+    # unittest.main()
+
diff --git a/osm_mon/test/plugins/OpenStack/__init__.py b/osm_mon/test/plugins/OpenStack/__init__.py
new file mode 100644 (file)
index 0000000..32eb94e
--- /dev/null
@@ -0,0 +1,21 @@
+# Copyright 2017 Intel Research and Development Ireland Limited
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+##
diff --git a/osm_mon/test/plugins/OpenStack/integration/__init__.py b/osm_mon/test/plugins/OpenStack/integration/__init__.py
new file mode 100644 (file)
index 0000000..cd7731b
--- /dev/null
@@ -0,0 +1,34 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2018 Whitestack, LLC
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Whitestack, LLC
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: bdiaz@whitestack.com or glavado@whitestack.com
+##
+import logging
+import sys
+
+from osm_mon.core.settings import Config
+
+cfg = Config.instance()
+logging.basicConfig(stream=sys.stdout,
+                    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
+                    datefmt='%m/%d/%Y %I:%M:%S %p',
+                    level=logging.getLevelName(cfg.OSMMON_LOG_LEVEL))
+log = logging.getLogger(__name__)
diff --git a/osm_mon/test/plugins/OpenStack/integration/test_alarm_integration.py b/osm_mon/test/plugins/OpenStack/integration/test_alarm_integration.py
new file mode 100644 (file)
index 0000000..fbec56c
--- /dev/null
@@ -0,0 +1,221 @@
+# Copyright 2017 Intel Research and Development Ireland Limited
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+
+# __author__ = "Helena McGough"
+"""Test an end to end Openstack alarm requests."""
+
+import json
+import logging
+import unittest
+
+import mock
+from kafka import KafkaConsumer
+from kafka import KafkaProducer
+from kafka.errors import KafkaError
+
+from osm_mon.core.auth import AuthManager
+from osm_mon.core.database import DatabaseManager, VimCredentials
+from osm_mon.plugins.OpenStack import response
+from osm_mon.plugins.OpenStack.Aodh import alarm_handler
+from osm_mon.plugins.OpenStack.common import Common
+
+log = logging.getLogger(__name__)
+
+mock_creds = VimCredentials()
+mock_creds.config = '{}'
+
+
+@mock.patch.object(DatabaseManager, "save_alarm", mock.Mock())
+@mock.patch.object(Common, "get_auth_token", mock.Mock())
+@mock.patch.object(Common, "get_endpoint", mock.Mock())
+class AlarmIntegrationTest(unittest.TestCase):
+    def setUp(self):
+        try:
+            self.producer = KafkaProducer(bootstrap_servers='localhost:9092',
+                                          key_serializer=str.encode,
+                                          value_serializer=str.encode
+                                          )
+            self.req_consumer = KafkaConsumer(bootstrap_servers='localhost:9092',
+                                              key_deserializer=bytes.decode,
+                                              value_deserializer=bytes.decode,
+                                              auto_offset_reset='earliest',
+                                              consumer_timeout_ms=60000)
+            self.req_consumer.subscribe(['alarm_request'])
+        except KafkaError:
+            self.skipTest('Kafka server not present.')
+        # Set up common and alarming class instances
+        self.alarms = alarm_handler.OpenstackAlarmHandler()
+        self.openstack_auth = Common()
+
+    def tearDown(self):
+        self.producer.close()
+        self.req_consumer.close()
+
+    @mock.patch.object(Common, "perform_request")
+    @mock.patch.object(AuthManager, 'get_credentials')
+    @mock.patch.object(alarm_handler.OpenstackAlarmHandler, "update_alarm")
+    @mock.patch.object(response.OpenStackResponseBuilder, "generate_response")
+    def test_update_alarm_req(self, resp, update_alarm, get_creds, perf_req):
+        """Test Aodh update alarm request message from KafkaProducer."""
+        # Set-up message, producer and consumer for tests
+        payload = {"alarm_update_request": {"correlation_id": 123,
+                                            "alarm_uuid": "alarm_id",
+                                            "metric_uuid": "metric_id"}}
+
+        get_creds.return_value = mock_creds
+        perf_req.return_value = type('obj', (object,), {'text': json.dumps({"metrics": {"cpu_util": "1"}})})
+        resp.return_value = ''
+
+        self.producer.send('alarm_request', key="update_alarm_request",
+                           value=json.dumps(payload))
+
+        for message in self.req_consumer:
+            if message.key == "update_alarm_request":
+                # Mock a valid alarm update
+                update_alarm.return_value = "alarm_id"
+                self.alarms.handle_message(message, 'test_id')
+
+                # A response message is generated and sent via MON's producer
+                resp.assert_called_with(
+                    'update_alarm_response', alarm_id="alarm_id", cor_id=123,
+                    status=True)
+
+                return
+        self.fail("No message received in consumer")
+
+    @mock.patch.object(Common, "perform_request")
+    @mock.patch.object(AuthManager, 'get_credentials')
+    @mock.patch.object(alarm_handler.OpenstackAlarmHandler, "configure_alarm")
+    @mock.patch.object(response.OpenStackResponseBuilder, "generate_response")
+    def test_create_alarm_req(self, resp, config_alarm, get_creds, perf_req):
+        """Test Aodh create alarm request message from KafkaProducer."""
+        # Set-up message, producer and consumer for tests
+        payload = {"alarm_create_request": {"correlation_id": 123,
+                                            "alarm_name": "my_alarm",
+                                            "metric_name": "cpu_utilization",
+                                            "resource_uuid": "my_resource",
+                                            "severity": "WARNING",
+                                            "threshold_value": 60,
+                                            "operation": "GT",
+                                            "vdu_name": "vdu",
+                                            "vnf_member_index": "1",
+                                            "ns_id": "1"}}
+
+        get_creds.return_value = mock_creds
+        perf_req.return_value = type('obj', (object,), {'text': json.dumps({"metrics": {"cpu_util": "1"}})})
+        resp.return_value = ''
+        self.producer.send('alarm_request', key="create_alarm_request",
+                           value=json.dumps(payload))
+
+        for message in self.req_consumer:
+            if message.key == "create_alarm_request":
+                # Mock a valid alarm creation
+                config_alarm.return_value = "alarm_id"
+                self.alarms.handle_message(message, 'test_id')
+
+                # A response message is generated and sent via MON's produce
+                resp.assert_called_with(
+                    'create_alarm_response', status=True, alarm_id="alarm_id",
+                    cor_id=123)
+
+                return
+        self.fail("No message received in consumer")
+
+    @mock.patch.object(Common, "perform_request")
+    @mock.patch.object(AuthManager, 'get_credentials')
+    @mock.patch.object(alarm_handler.OpenstackAlarmHandler, "list_alarms")
+    @mock.patch.object(response.OpenStackResponseBuilder, "generate_response")
+    def test_list_alarm_req(self, resp, list_alarm, get_creds, perf_req):
+        """Test Aodh list alarm request message from KafkaProducer."""
+        # Set-up message, producer and consumer for tests
+        payload = {"alarm_list_request": {"correlation_id": 123,
+                                          "resource_uuid": "resource_id", }}
+
+        self.producer.send('alarm_request', key="list_alarm_request",
+                           value=json.dumps(payload))
+
+        get_creds.return_value = mock_creds
+        perf_req.return_value = type('obj', (object,), {'text': json.dumps([])})
+        resp.return_value = ''
+
+        for message in self.req_consumer:
+            if message.key == "list_alarm_request":
+                # Mock an empty list generated by the request
+                list_alarm.return_value = []
+                self.alarms.handle_message(message, 'test_id')
+
+                # Response message is generated
+                resp.assert_called_with(
+                    'list_alarm_response', alarm_list=[],
+                    cor_id=123)
+
+                return
+        self.fail("No message received in consumer")
+
+    @mock.patch.object(Common, "perform_request")
+    @mock.patch.object(AuthManager, 'get_credentials')
+    @mock.patch.object(alarm_handler.OpenstackAlarmHandler, "delete_alarm")
+    @mock.patch.object(response.OpenStackResponseBuilder, "generate_response")
+    def test_delete_alarm_req(self, resp, del_alarm, get_creds, perf_req):
+        """Test Aodh delete alarm request message from KafkaProducer."""
+        # Set-up message, producer and consumer for tests
+        payload = {"alarm_delete_request": {"correlation_id": 123,
+                                            "alarm_uuid": "alarm_id", }}
+
+        self.producer.send('alarm_request', key="delete_alarm_request",
+                           value=json.dumps(payload))
+
+        get_creds.return_value = mock_creds
+        perf_req.return_value = type('obj', (object,), {'text': json.dumps([])})
+        resp.return_value = ''
+
+        for message in self.req_consumer:
+            if message.key == "delete_alarm_request":
+                self.alarms.handle_message(message, 'test_id')
+
+                # Response message is generated and sent by MON's producer
+                resp.assert_called_with(
+                    'delete_alarm_response', alarm_id="alarm_id",
+                    status=True, cor_id=123)
+
+                return
+        self.fail("No message received in consumer")
+
+    @mock.patch.object(AuthManager, 'get_credentials')
+    @mock.patch.object(alarm_handler.OpenstackAlarmHandler, "update_alarm_state")
+    def test_ack_alarm_req(self, ack_alarm, get_creds):
+        """Test Aodh acknowledge alarm request message from KafkaProducer."""
+        # Set-up message, producer and consumer for tests
+        payload = {"ack_details": {"alarm_uuid": "alarm_id", }}
+
+        self.producer.send('alarm_request', key="acknowledge_alarm",
+                           value=json.dumps(payload))
+
+        get_creds.return_value = mock_creds
+        ack_alarm.return_value = True
+
+        for message in self.req_consumer:
+            if message.key == "acknowledge_alarm":
+                self.alarms.handle_message(message, 'test_id')
+                ack_alarm.assert_called_with(mock.ANY, mock.ANY, 'alarm_id', True)
+                return
+
+        self.fail("No message received in consumer")
diff --git a/osm_mon/test/plugins/OpenStack/integration/test_metric_integration.py b/osm_mon/test/plugins/OpenStack/integration/test_metric_integration.py
new file mode 100644 (file)
index 0000000..578c8b1
--- /dev/null
@@ -0,0 +1,242 @@
+# Copyright 2017 Intel Research and Development Ireland Limited
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+
+# __author__ = "Helena McGough"
+"""Test an end to end Openstack metric requests."""
+
+import json
+
+import logging
+import unittest
+
+from kafka.errors import KafkaError
+
+from osm_mon.core.auth import AuthManager
+from osm_mon.core.database import VimCredentials
+
+from kafka import KafkaConsumer
+from kafka import KafkaProducer
+
+import mock
+
+from osm_mon.plugins.OpenStack import response
+
+from osm_mon.plugins.OpenStack.Gnocchi import metric_handler
+
+from osm_mon.plugins.OpenStack.common import Common
+
+log = logging.getLogger(__name__)
+
+mock_creds = VimCredentials()
+mock_creds.config = '{}'
+
+
+@mock.patch.object(Common, "get_auth_token", mock.Mock())
+@mock.patch.object(Common, "get_endpoint", mock.Mock())
+class MetricIntegrationTest(unittest.TestCase):
+    def setUp(self):
+        # Set up common and alarming class instances
+        self.metric_req = metric_handler.OpenstackMetricHandler()
+        self.openstack_auth = Common()
+
+        try:
+            self.producer = KafkaProducer(bootstrap_servers='localhost:9092',
+                                          key_serializer=str.encode,
+                                          value_serializer=str.encode
+                                          )
+            self.req_consumer = KafkaConsumer(bootstrap_servers='localhost:9092',
+                                              key_deserializer=bytes.decode,
+                                              value_deserializer=bytes.decode,
+                                              auto_offset_reset='earliest',
+                                              consumer_timeout_ms=60000)
+            self.req_consumer.subscribe(['metric_request'])
+        except KafkaError:
+            self.skipTest('Kafka server not present.')
+
+    @mock.patch.object(Common, "perform_request")
+    @mock.patch.object(AuthManager, 'get_credentials')
+    @mock.patch.object(metric_handler.OpenstackMetricHandler, "configure_metric")
+    @mock.patch.object(response.OpenStackResponseBuilder, "generate_response")
+    def test_create_metric_req(self, resp, config_metric, get_creds, perf_req):
+        """Test Gnocchi create metric request message from producer."""
+        # Set-up message, producer and consumer for tests
+        payload = {"metric_create_request": {"correlation_id": 123,
+                                             "metric_name": "cpu_utilization",
+                                             "resource_uuid": "resource_id"}}
+
+        get_creds.return_value = mock_creds
+        perf_req.return_value = type('obj', (object,), {'text': json.dumps({"metrics": {"cpu_util": "1"}})})
+        resp.return_value = ''
+
+        self.producer.send('metric_request', key="create_metric_request",
+                           value=json.dumps(payload))
+
+        for message in self.req_consumer:
+            if message.key == "create_metric_request":
+                # A valid metric is created
+                config_metric.return_value = "metric_id", "resource_id"
+                self.metric_req.handle_request(message, 'test_id')
+
+                # A response message is generated and sent by MON's producer
+                resp.assert_called_with(
+                    'create_metric_response', status=True, cor_id=123,
+                    metric_id="metric_id", resource_id="resource_id")
+
+                return
+        self.fail("No message received in consumer")
+
+    @mock.patch.object(Common, "perform_request")
+    @mock.patch.object(AuthManager, 'get_credentials')
+    @mock.patch.object(metric_handler.OpenstackMetricHandler, "delete_metric")
+    @mock.patch.object(response.OpenStackResponseBuilder, "generate_response")
+    def test_delete_metric_req(self, resp, del_metric, get_creds, perf_req):
+        """Test Gnocchi delete metric request message from producer."""
+        # Set-up message, producer and consumer for tests
+        payload = {"vim_type": "OpenSTACK",
+                   "vim_uuid": "1",
+                   "correlation_id": 123,
+                   "metric_name": "cpu_utilization",
+                   "resource_uuid": "resource_id"}
+
+        get_creds.return_value = mock_creds
+        perf_req.return_value = type('obj', (object,), {'text': json.dumps({"metrics": {"cpu_util": "1"}})})
+        resp.return_value = ''
+
+        self.producer.send('metric_request', key="delete_metric_request",
+                           value=json.dumps(payload))
+
+        for message in self.req_consumer:
+            if message.key == "delete_metric_request":
+                # Metric has been deleted
+                del_metric.return_value = True
+                self.metric_req.handle_request(message, 'test_id')
+
+                # A response message is generated and sent by MON's producer
+                resp.assert_called_with(
+                    'delete_metric_response', metric_id='1',
+                    metric_name="cpu_utilization", status=True, resource_id="resource_id",
+                    cor_id=123)
+
+                return
+        self.fail("No message received in consumer")
+
+    @mock.patch.object(Common, "perform_request")
+    @mock.patch.object(AuthManager, 'get_credentials')
+    @mock.patch.object(metric_handler.OpenstackMetricHandler, "read_metric_data")
+    @mock.patch.object(response.OpenStackResponseBuilder, "generate_response")
+    def test_read_metric_data_req(self, resp, read_data, get_creds, perf_req):
+        """Test Gnocchi read metric data request message from producer."""
+        # Set-up message, producer and consumer for tests
+        payload = {"vim_type": "OpenSTACK",
+                   "vim_uuid": "test_id",
+                   "correlation_id": 123,
+                   "metric_name": "cpu_utilization",
+                   "resource_uuid": "resource_id"}
+
+        get_creds.return_value = mock_creds
+        perf_req.return_value = type('obj', (object,), {'text': json.dumps({"metrics": {"cpu_util": "1"}})})
+        resp.return_value = ''
+
+        self.producer.send('metric_request', key="read_metric_data_request",
+                           value=json.dumps(payload))
+
+        for message in self.req_consumer:
+            # Check the vim desired by the message
+            if message.key == "read_metric_data_request":
+                # Mock empty lists generated by the request message
+                read_data.return_value = [], []
+                self.metric_req.handle_request(message, 'test_id')
+
+                # A response message is generated and sent by MON's producer
+                resp.assert_called_with(
+                    'read_metric_data_response', metric_id='1',
+                    metric_name="cpu_utilization", resource_id="resource_id", cor_id=123, times=[],
+                    metrics=[], status=True)
+
+                return
+        self.fail("No message received in consumer")
+
+    @mock.patch.object(Common, "perform_request")
+    @mock.patch.object(AuthManager, 'get_credentials')
+    @mock.patch.object(metric_handler.OpenstackMetricHandler, "list_metrics")
+    @mock.patch.object(response.OpenStackResponseBuilder, "generate_response")
+    def test_list_metrics_req(self, resp, list_metrics, get_creds, perf_req):
+        """Test Gnocchi list metrics request message from producer."""
+        # Set-up message, producer and consumer for tests
+        payload = {"vim_type": "OpenSTACK",
+                   "vim_uuid": "1",
+                   "metrics_list_request":
+                       {"correlation_id": 123, }}
+
+        get_creds.return_value = mock_creds
+        perf_req.return_value = type('obj', (object,), {'text': json.dumps({"metrics": {"cpu_util": "1"}})})
+        resp.return_value = ''
+
+        self.producer.send('metric_request', key="list_metric_request",
+                           value=json.dumps(payload))
+
+        for message in self.req_consumer:
+            # Check the vim desired by the message
+            if message.key == "list_metric_request":
+                # Mock an empty list generated by the request
+                list_metrics.return_value = []
+                self.metric_req.handle_request(message, 'test_id')
+
+                # A response message is generated and sent by MON's producer
+                resp.assert_called_with(
+                    'list_metric_response', metric_list=[], cor_id=123, status=True)
+
+                return
+        self.fail("No message received in consumer")
+
+    @mock.patch.object(Common, "perform_request")
+    @mock.patch.object(AuthManager, 'get_credentials')
+    @mock.patch.object(metric_handler.OpenstackMetricHandler, "get_metric_id")
+    @mock.patch.object(response.OpenStackResponseBuilder, "generate_response")
+    def test_update_metrics_req(self, resp, get_id, get_creds, perf_req):
+        """Test Gnocchi update metric request message from KafkaProducer."""
+        # Set-up message, producer and consumer for tests
+        payload = {"metric_update_request": {"metric_name": "my_metric",
+                                             "correlation_id": 123,
+                                             "resource_uuid": "resource_id", }}
+
+        get_creds.return_value = mock_creds
+        perf_req.return_value = type('obj', (object,), {'text': json.dumps({"metrics": {"cpu_util": "1"}})})
+        resp.return_value = ''
+
+        self.producer.send('metric_request', key="update_metric_request",
+                           value=json.dumps(payload))
+
+        for message in self.req_consumer:
+            # Check the vim desired by the message
+            if message.key == "update_metric_request":
+                # Gnocchi doesn't support metric updates
+                get_id.return_value = "metric_id"
+                self.metric_req.handle_request(message, 'test_id')
+
+                # Response message is generated and sent via MON's producer
+                # No metric update has taken place
+                resp.assert_called_with(
+                    'update_metric_response', status=False, cor_id=123,
+                    resource_id="resource_id", metric_id="metric_id")
+
+                return
+        self.fail("No message received in consumer")
diff --git a/osm_mon/test/plugins/OpenStack/integration/test_notify_alarm.py b/osm_mon/test/plugins/OpenStack/integration/test_notify_alarm.py
new file mode 100644 (file)
index 0000000..8aa2c9f
--- /dev/null
@@ -0,0 +1,183 @@
+# Copyright 2017 Intel Research and Development Ireland Limited
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+##
+"""Tests for all common OpenStack methods."""
+
+from __future__ import unicode_literals
+import json
+import logging
+import socket
+import unittest
+from threading import Thread
+
+import mock
+import requests
+from kafka import KafkaProducer
+from six.moves.BaseHTTPServer import BaseHTTPRequestHandler
+from six.moves.BaseHTTPServer import HTTPServer
+
+from osm_mon.core.settings import Config
+from osm_mon.plugins.OpenStack.Aodh.alarm_handler import OpenstackAlarmHandler
+from osm_mon.plugins.OpenStack.common import Common
+from osm_mon.plugins.OpenStack.response import OpenStackResponseBuilder
+
+log = logging.getLogger(__name__)
+
+# Create an instance of the common openstack class, producer and consumer
+openstack_auth = Common()
+
+# Mock a valid get_response for alarm details
+valid_get_resp = '{"gnocchi_resources_threshold_rule":\
+                  {"resource_id": "my_resource_id"}}'
+
+
+class MockResponse(object):
+    """Mock a response class for generating responses."""
+
+    def __init__(self, text):
+        """Initialise a mock response with a text attribute."""
+        self.text = text
+
+
+class MockNotifierHandler(BaseHTTPRequestHandler):
+    """Mock the NotifierHandler class for testing purposes."""
+
+    def _set_headers(self):
+        """Set the headers for a request."""
+        self.send_response(200)
+        self.send_header('Content-type', 'text/html')
+        self.end_headers()
+
+    def do_GET(self):
+        """Mock functionality for GET request."""
+        #        self.send_response(requests.codes.ok)
+        self._set_headers()
+        pass
+
+    def do_POST(self):
+        """Mock functionality for a POST request."""
+        self._set_headers()
+        content_length = int(self.headers['Content-Length'])
+        post_data = self.rfile.read(content_length)
+        try:
+            post_data = post_data.decode()
+        except AttributeError:
+            pass
+        self.notify_alarm(json.loads(post_data))
+
+    def notify_alarm(self, values):
+        """Mock the notify_alarm functionality to generate a valid response."""
+        cfg = Config.instance()
+        self._alarming = OpenstackAlarmHandler()
+        self._common = Common()
+        self._response = OpenStackResponseBuilder()
+        alarm_id = values['alarm_id']
+
+        auth_token = Common.get_auth_token('test_id')
+        endpoint = Common.get_endpoint('alarming', 'test_id')
+
+        # If authenticated generate and send response message
+        if auth_token is not None and endpoint is not None:
+            url = "{}/v2/alarms/%s".format(endpoint) % alarm_id
+
+            # Get the resource_id of the triggered alarm and the date
+            result = Common.perform_request(
+                url, auth_token, req_type="get")
+            alarm_details = json.loads(result.text)
+            gnocchi_rule = alarm_details['gnocchi_resources_threshold_rule']
+            resource_id = gnocchi_rule['resource_id']
+            # Mock a date for testing purposes
+            a_date = "dd-mm-yyyy 00:00"
+
+            # Process an alarm notification if resource_id is valid
+            if resource_id is not None:
+                # Try generate and send response
+                try:
+                    resp_message = self._response.generate_response(
+                        'notify_alarm',
+                        alarm_id=alarm_id,
+                        resource_id=resource_id,
+                        sev=values['severity'], date=a_date,
+                        state=values['current'], vim_type="OpenStack")
+                except Exception:
+                    log.exception("Error generating response")
+
+
+def get_free_port():
+    """Function to get a free port to run the test webserver on."""
+    s = socket.socket(socket.AF_INET, type=socket.SOCK_STREAM)
+    s.bind(('localhost', 0))
+    address, port = s.getsockname()
+    s.close()
+    return port
+
+
+# Create the webserver, port and run it on its own thread
+mock_server_port = get_free_port()
+mock_server = HTTPServer(('localhost', mock_server_port), MockNotifierHandler)
+mock_server_thread = Thread(target=mock_server.serve_forever)
+mock_server_thread.setDaemon(True)
+mock_server_thread.start()
+
+
+def test_do_get():
+    """Integration test for get request on notifier webserver."""
+    url = 'http://localhost:{port}/users'.format(port=mock_server_port)
+
+    # Send a request to the mock API server and store the response.
+    response = requests.get(url)
+
+    # Confirm that the request-response cycle completed successfully.
+    assert response.ok
+
+
+class AlarmNotificationTest(unittest.TestCase):
+    @mock.patch.object(OpenStackResponseBuilder, "generate_response")
+    @mock.patch.object(Common, "perform_request")
+    @mock.patch.object(Common, "get_endpoint")
+    @mock.patch.object(Common, "get_auth_token")
+    def test_post_notify_alarm(self, auth, endpoint, perf_req, resp):
+        """Integration test for notify_alarm."""
+        url = 'http://localhost:{port}/users'.format(port=mock_server_port)
+        payload = {"severity": "critical",
+                   "alarm_name": "my_alarm",
+                   "current": "current_state",
+                   "alarm_id": "my_alarm_id",
+                   "reason": "Threshold has been broken",
+                   "reason_data": {"count": 1,
+                                   "most_recent": "null",
+                                   "type": "threshold",
+                                   "disposition": "unknown"},
+                   "previous": "previous_state"}
+
+        # Mock authenticate and request response for testing
+        auth.return_value = "my_auth_token"
+        endpoint.return_value = "my_endpoint"
+        perf_req.return_value = MockResponse(valid_get_resp)
+
+        # Generate a post request for testing
+        response = requests.post(url, json.dumps(payload))
+        self.assertEqual(response.status_code, 200)
+        # A response message is generated with the following details
+        resp.assert_called_with(
+            "notify_alarm", alarm_id="my_alarm_id", resource_id="my_resource_id",
+            sev="critical", date='dd-mm-yyyy 00:00', state="current_state",
+            vim_type="OpenStack")
diff --git a/osm_mon/test/plugins/OpenStack/integration/test_vim_account.py b/osm_mon/test/plugins/OpenStack/integration/test_vim_account.py
new file mode 100644 (file)
index 0000000..da34bb2
--- /dev/null
@@ -0,0 +1,96 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2018 Whitestack, LLC
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Whitestack, LLC
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: bdiaz@whitestack.com or glavado@whitestack.com
+##
+
+"""Test an end to end Openstack vim_account requests."""
+
+import json
+import logging
+import unittest
+
+from osm_mon.core.auth import AuthManager
+from osm_mon.core.database import DatabaseManager
+
+log = logging.getLogger(__name__)
+
+
+class VimAccountTest(unittest.TestCase):
+    def setUp(self):
+        self.auth_manager = AuthManager()
+        self.database_manager = DatabaseManager()
+        self.database_manager.create_tables()
+
+    def test_create_edit_delete_vim_account(self):
+        """Test vim_account creation message from KafkaProducer."""
+        # Set-up message, producer and consumer for tests
+        create_payload = {
+            "_id": "test_id",
+            "name": "test_name",
+            "vim_type": "openstack",
+            "vim_url": "auth_url",
+            "vim_user": "user",
+            "vim_password": "password",
+            "vim_tenant_name": "tenant",
+            "config":
+                {
+                    "foo": "bar"
+                }
+        }
+        self.auth_manager.store_auth_credentials(create_payload)
+
+        creds = self.auth_manager.get_credentials('test_id')
+
+        self.assertIsNotNone(creds)
+        self.assertEqual(creds.name, create_payload['name'])
+        self.assertEqual(json.loads(creds.config), create_payload['config'])
+
+        # Set-up message, producer and consumer for tests
+        edit_payload = {
+            "_id": "test_id",
+            "name": "test_name_edited",
+            "vim_type": "openstack",
+            "vim_url": "auth_url",
+            "vim_user": "user",
+            "vim_password": "password",
+            "vim_tenant_name": "tenant",
+            "config":
+                {
+                    "foo_edited": "bar_edited"
+                }
+        }
+
+        self.auth_manager.store_auth_credentials(edit_payload)
+
+        creds = self.auth_manager.get_credentials('test_id')
+
+        self.assertEqual(creds.name, edit_payload['name'])
+        self.assertEqual(json.loads(creds.config), edit_payload['config'])
+
+        delete_payload = {
+            "_id": "test_id"
+        }
+
+        self.auth_manager.delete_auth_credentials(delete_payload)
+
+        creds = self.auth_manager.get_credentials('test_id')
+        self.assertIsNone(creds)
diff --git a/osm_mon/test/plugins/OpenStack/unit/__init__.py b/osm_mon/test/plugins/OpenStack/unit/__init__.py
new file mode 100644 (file)
index 0000000..cd7731b
--- /dev/null
@@ -0,0 +1,34 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2018 Whitestack, LLC
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Whitestack, LLC
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: bdiaz@whitestack.com or glavado@whitestack.com
+##
+import logging
+import sys
+
+from osm_mon.core.settings import Config
+
+cfg = Config.instance()
+logging.basicConfig(stream=sys.stdout,
+                    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
+                    datefmt='%m/%d/%Y %I:%M:%S %p',
+                    level=logging.getLevelName(cfg.OSMMON_LOG_LEVEL))
+log = logging.getLogger(__name__)
diff --git a/osm_mon/test/plugins/OpenStack/unit/test_alarm_req.py b/osm_mon/test/plugins/OpenStack/unit/test_alarm_req.py
new file mode 100644 (file)
index 0000000..02cec8b
--- /dev/null
@@ -0,0 +1,150 @@
+# Copyright 2017 iIntel Research and Development Ireland Limited
+# **************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the 'License'); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an 'AS IS' BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+##
+"""Tests for all alarm request message keys."""
+
+import json
+import logging
+import unittest
+from io import UnsupportedOperation
+
+import mock
+
+from osm_mon.core.auth import AuthManager
+from osm_mon.core.database import VimCredentials, DatabaseManager
+from osm_mon.plugins.OpenStack.Aodh import alarm_handler as alarm_req
+from osm_mon.plugins.OpenStack.Aodh.alarm_handler import OpenstackAlarmHandler
+from osm_mon.plugins.OpenStack.common import Common
+
+log = logging.getLogger(__name__)
+
+mock_creds = VimCredentials()
+mock_creds.config = '{}'
+
+
+class Message(object):
+    """A class to mock a message object value for alarm requests."""
+
+    def __init__(self):
+        """Initialize a mocked message instance."""
+        self.topic = 'alarm_request'
+        self.key = None
+        self.value = json.dumps({'mock_value': 'mock_details'})
+
+
+class TestAlarmKeys(unittest.TestCase):
+    """Integration test for alarm request keys."""
+
+    def setUp(self):
+        """Setup the tests for alarm request keys."""
+        super(TestAlarmKeys, self).setUp()
+        self.alarming = alarm_req.OpenstackAlarmHandler()
+        self.alarming.common = Common()
+
+    @mock.patch.object(AuthManager, 'get_credentials')
+    @mock.patch.object(Common, 'get_endpoint')
+    @mock.patch.object(Common, 'get_auth_token')
+    def test_alarming_authentication(self, get_token, get_endpoint, get_creds):
+        """Test getting an auth_token and endpoint for alarm requests."""
+        # if auth_token is None environment variables are used to authenticate
+        get_creds.return_value = mock_creds
+
+        with self.assertRaises(UnsupportedOperation):
+            self.alarming.handle_message('', {}, 'test_id')
+
+        get_token.assert_called_with('test_id', verify_ssl=True)
+        get_endpoint.assert_any_call('alarming', 'test_id', verify_ssl=True)
+
+    @mock.patch.object(Common, 'get_endpoint', mock.Mock())
+    @mock.patch.object(Common, 'get_auth_token', mock.Mock())
+    @mock.patch.object(AuthManager, 'get_credentials')
+    @mock.patch.object(alarm_req.OpenstackAlarmHandler, 'delete_alarm')
+    def test_delete_alarm_key(self, del_alarm, get_creds):
+        """Test the functionality for a create alarm request."""
+        value = {'alarm_delete_request': {
+            'correlation_id': 1,
+            'alarm_uuid': 'my_alarm_id'
+        }}
+
+        get_creds.return_value = mock_creds
+        del_alarm.return_value = {}
+
+        # Call the alarming functionality and check delete request
+        self.alarming.handle_message('delete_alarm_request', value, 'test_id')
+        del_alarm.assert_called_with(mock.ANY, mock.ANY, 'my_alarm_id', True)
+
+    @mock.patch.object(Common, 'get_endpoint', mock.Mock())
+    @mock.patch.object(Common, 'get_auth_token', mock.Mock())
+    @mock.patch.object(AuthManager, 'get_credentials')
+    @mock.patch.object(alarm_req.OpenstackAlarmHandler, 'list_alarms')
+    def test_list_alarm_key(self, list_alarm, get_creds):
+        """Test the functionality for a list alarm request."""
+        value = {'alarm_list_request': {'correlation_id': 1}}
+
+        get_creds.return_value = mock_creds
+
+        list_alarm.return_value = []
+
+        # Call the alarming functionality and check list functionality
+        self.alarming.handle_message('list_alarm_request', value, 'test_id')
+        list_alarm.assert_called_with(mock.ANY, mock.ANY, {'correlation_id': 1}, True)
+
+    @mock.patch.object(Common, 'get_auth_token', mock.Mock())
+    @mock.patch.object(Common, 'get_endpoint', mock.Mock())
+    @mock.patch.object(AuthManager, 'get_credentials')
+    @mock.patch.object(alarm_req.OpenstackAlarmHandler, 'update_alarm_state')
+    def test_ack_alarm_key(self, ack_alarm, get_creds):
+        """Test the functionality for an acknowledge alarm request."""
+        value = {'ack_details': {'alarm_uuid': 'my_alarm_id'}}
+
+        get_creds.return_value = mock_creds
+
+        # Call alarming functionality and check acknowledge functionality
+        self.alarming.handle_message('acknowledge_alarm_request', value, 'test_id')
+        ack_alarm.assert_called_with(mock.ANY, mock.ANY, 'my_alarm_id', True)
+
+    @mock.patch.object(Common, 'get_auth_token', mock.Mock())
+    @mock.patch.object(Common, 'get_endpoint', mock.Mock())
+    @mock.patch.object(DatabaseManager, 'save_alarm', mock.Mock())
+    @mock.patch.object(Common, "perform_request")
+    @mock.patch.object(AuthManager, 'get_credentials')
+    @mock.patch.object(alarm_req.OpenstackAlarmHandler, 'configure_alarm')
+    def test_config_alarm_key(self, config_alarm, get_creds, perf_req):
+        """Test the functionality for a create alarm request."""
+        value = {'alarm_create_request': {'correlation_id': 1, 'threshold_value': 50,
+                                          'operation': 'GT', 'metric_name': 'cpu_utilization',
+                                          'vdu_name': 'vdu',
+                                          'vnf_member_index': '1',
+                                          'ns_id': '1',
+                                          'resource_uuid': '123'}}
+        mock_perf_req_return_value = {"metrics": {"cpu_util": 123}}
+        perf_req.return_value = type('obj', (object,), {'text': json.dumps(mock_perf_req_return_value, sort_keys=True)})
+        get_creds.return_value = mock_creds
+
+        # Call alarming functionality and check config alarm call
+        config_alarm.return_value = 'my_alarm_id'
+        self.alarming.handle_message('create_alarm_request', value, 'test_id')
+        config_alarm.assert_called_with(mock.ANY, mock.ANY, {'correlation_id': 1, 'threshold_value': 50,
+                                                             'operation': 'GT',
+                                                             'metric_name': 'cpu_utilization',
+                                                             'vdu_name': 'vdu',
+                                                             'vnf_member_index': '1', 'ns_id': '1',
+                                                             'resource_uuid': '123'}, {}, True)
diff --git a/osm_mon/test/plugins/OpenStack/unit/test_alarming.py b/osm_mon/test/plugins/OpenStack/unit/test_alarming.py
new file mode 100644 (file)
index 0000000..67486e7
--- /dev/null
@@ -0,0 +1,299 @@
+# Copyright 2017 iIntel Research and Development Ireland Limited
+# **************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+##
+"""Tests for all alarm request message keys."""
+
+import json
+import logging
+import unittest
+
+import mock
+
+from osm_mon.core.settings import Config
+from osm_mon.plugins.OpenStack.Aodh import alarm_handler as alarm_req
+from osm_mon.plugins.OpenStack.common import Common
+
+log = logging.getLogger(__name__)
+
+auth_token = mock.ANY
+alarm_endpoint = "alarm_endpoint"
+metric_endpoint = "metric_endpoint"
+
+
+class Response(object):
+    """Mock a response message class."""
+
+    def __init__(self, result):
+        """Initialise the response text and status code."""
+        self.text = json.dumps(result)
+        self.status_code = "MOCK_STATUS_CODE"
+
+
+class TestAlarming(unittest.TestCase):
+    """Tests for alarming class functions."""
+
+    maxDiff = None
+
+    def setUp(self):
+        """Setup for tests."""
+        super(TestAlarming, self).setUp()
+        self.alarming = alarm_req.OpenstackAlarmHandler()
+
+    @mock.patch.object(Common, "perform_request")
+    def test_config_invalid_alarm_req(self, perf_req):
+        """Test configure an invalid alarm request."""
+        # Configuring with invalid metric name results in failure
+        values = {"alarm_name": "my_alarm",
+                  "metric_name": "my_metric",
+                  "resource_uuid": "my_r_id"}
+        with self.assertRaises(KeyError):
+            self.alarming.configure_alarm(alarm_endpoint, auth_token, values, {}, True)
+        perf_req.assert_not_called()
+        perf_req.reset_mock()
+
+        # Configuring with missing metric name results in failure
+        values = {"alarm_name": "disk_write_ops",
+                  "resource_uuid": "my_r_id"}
+
+        with self.assertRaises(KeyError):
+            self.alarming.configure_alarm(alarm_endpoint, auth_token, values, {}, True)
+        perf_req.assert_not_called()
+
+    @mock.patch.object(Common, "perform_request")
+    def test_config_valid_alarm_req(self, perf_req):
+        """Test config a valid alarm."""
+        values = {"alarm_name": "disk_write_ops",
+                  "metric_name": "disk_write_ops",
+                  "resource_uuid": "my_r_id",
+                  "statistic": "AVERAGE",
+                  "threshold_value": 60,
+                  "operation": "GT"}
+
+        perf_req.return_value = type('obj', (object,), {'text': '{"alarm_id":"1"}'})
+
+        self.alarming.configure_alarm(alarm_endpoint, auth_token, values, {}, True)
+        payload = {"name": "disk_write_ops",
+                   "gnocchi_resources_threshold_rule": {"resource_type": "generic", "comparison_operator": "gt",
+                                                        "granularity": "300", "metric": "disk.write.requests",
+                                                        "aggregation_method": "mean", "threshold": 60,
+                                                        "resource_id": "my_r_id"},
+                   "alarm_actions": ["http://localhost:8662"], "repeat_actions": True, "state": "ok", "type": "gnocchi_resources_threshold",
+                   "severity": "critical"}
+        perf_req.assert_called_with(
+            "alarm_endpoint/v2/alarms/", auth_token,
+            req_type="post", payload=json.dumps(payload, sort_keys=True), verify_ssl=True)
+
+    @mock.patch.object(Common, "perform_request")
+    def test_delete_alarm_req(self, perf_req):
+        """Test delete alarm request."""
+        self.alarming.delete_alarm(alarm_endpoint, auth_token, "my_alarm_id", True)
+
+        perf_req.assert_called_with(
+            "alarm_endpoint/v2/alarms/my_alarm_id", auth_token, req_type="delete", verify_ssl=True)
+
+    @mock.patch.object(Common, "perform_request")
+    def test_invalid_list_alarm_req(self, perf_req):
+        """Test invalid list alarm_req."""
+        # Request will not be performed without a resource_id
+        list_details = {"mock_details": "invalid_details"}
+        with self.assertRaises(KeyError):
+            self.alarming.list_alarms(alarm_endpoint, auth_token, list_details, True)
+        perf_req.assert_not_called()
+
+    @mock.patch.object(Common, "perform_request")
+    def test_valid_list_alarm_req(self, perf_req):
+        """Test valid list alarm request."""
+        # Minimum requirement for an alarm list is resource_id
+        list_details = {"resource_uuid": "mock_r_id", "alarm_name": "mock_alarm", "severity": "critical"}
+
+        mock_perf_req_return_value = [
+            {"alarm_id": "1", "name": "mock_alarm", "severity": "critical",
+             "gnocchi_resources_threshold_rule": {"resource_id": "mock_r_id"}}]
+        perf_req.return_value = type('obj', (object,),
+                                     {'text': json.dumps(mock_perf_req_return_value)})
+
+        alarm_list = self.alarming.list_alarms(alarm_endpoint, auth_token, list_details, True)
+
+        self.assertDictEqual(alarm_list[0], mock_perf_req_return_value[0])
+
+        perf_req.assert_called_with(
+            "alarm_endpoint/v2/alarms/", auth_token, req_type="get", verify_ssl=True)
+        perf_req.reset_mock()
+
+        # Check list with alarm_name defined
+        list_details = {"resource_uuid": "mock_r_id",
+                        "alarm_name": "mock_alarm",
+                        "severity": "critical"}
+        alarm_list = self.alarming.list_alarms(alarm_endpoint, auth_token, list_details, True)
+
+        self.assertDictEqual(alarm_list[0], mock_perf_req_return_value[0])
+
+        perf_req.assert_called_with(
+            "alarm_endpoint/v2/alarms/", auth_token, req_type="get", verify_ssl=True)
+
+    @mock.patch.object(Common, "perform_request")
+    def test_ack_alarm_req(self, perf_req):
+        """Test update alarm state for acknowledge alarm request."""
+        resp = Response({})
+        perf_req.return_value = resp
+
+        self.alarming.update_alarm_state(alarm_endpoint, auth_token, "my_alarm_id", True)
+
+        perf_req.assert_called_with(
+            "alarm_endpoint/v2/alarms/my_alarm_id/state", auth_token, req_type="put",
+            payload=json.dumps("ok"), verify_ssl=True)
+
+    @mock.patch.object(Common, "perform_request")
+    def test_update_alarm_invalid(self, perf_req):
+        """Test update alarm with invalid get response."""
+        values = {"alarm_uuid": "my_alarm_id"}
+
+        perf_req.return_value = type('obj', (object,), {'invalid_prop': 'Invalid response'})
+
+        with self.assertRaises(Exception):
+            self.alarming.update_alarm(alarm_endpoint, auth_token, values, {}, True)
+        perf_req.assert_called_with(mock.ANY, auth_token, req_type="get")
+
+    @mock.patch.object(Common, "perform_request")
+    def test_update_alarm_invalid_payload(self, perf_req):
+        """Test update alarm with invalid payload."""
+        resp = Response({"name": "my_alarm",
+                         "state": "alarm",
+                         "gnocchi_resources_threshold_rule":
+                             {"resource_id": "my_resource_id",
+                              "metric": "my_metric"}})
+        perf_req.return_value = resp
+        values = {"alarm_uuid": "my_alarm_id"}
+
+        with self.assertRaises(Exception):
+            self.alarming.update_alarm(alarm_endpoint, auth_token, values, {}, True)
+        perf_req.assert_called_with(mock.ANY, auth_token, req_type="get")
+        self.assertEqual(perf_req.call_count, 1)
+
+    @mock.patch.object(alarm_req.OpenstackAlarmHandler, "check_payload")
+    @mock.patch.object(Common, "perform_request")
+    def test_update_alarm_valid(self, perf_req, check_pay):
+        """Test valid update alarm request."""
+        resp = Response({"alarm_id": "1",
+                         "name": "my_alarm",
+                         "state": "alarm",
+                         "gnocchi_resources_threshold_rule":
+                             {"resource_id": "my_resource_id",
+                              "metric": "disk.write.requests"}})
+        perf_req.return_value = resp
+        values = {"alarm_uuid": "my_alarm_id"}
+
+        self.alarming.update_alarm(alarm_endpoint, auth_token, values, {}, True)
+
+        check_pay.assert_called_with(values, "disk_write_ops", "my_resource_id",
+                                     "my_alarm", alarm_state="alarm")
+
+        self.assertEqual(perf_req.call_count, 2)
+        # Second call is the update request
+        perf_req.assert_called_with(
+            'alarm_endpoint/v2/alarms/my_alarm_id', auth_token,
+            req_type="put", payload=check_pay.return_value, verify_ssl=True)
+
+    @mock.patch.object(Config, "instance")
+    def test_check_valid_payload(self, cfg):
+        """Test the check payload function for a valid payload."""
+        values = {"severity": "warning",
+                  "statistic": "COUNT",
+                  "threshold_value": 12,
+                  "operation": "GT",
+                  "granularity": 300,
+                  "resource_type": "generic"}
+        cfg.return_value.OS_NOTIFIER_URI = "http://localhost:8662"
+        payload = self.alarming.check_payload(
+            values, "disk_write_ops", "r_id", "alarm_name")
+
+        self.assertDictEqual(
+            json.loads(payload), {"name": "alarm_name",
+                                  "gnocchi_resources_threshold_rule":
+                                      {"resource_id": "r_id",
+                                       "metric": "disk.write.requests",
+                                       "comparison_operator": "gt",
+                                       "aggregation_method": "count",
+                                       "threshold": 12,
+                                       "granularity": 300,
+                                       "resource_type": "generic"},
+                                  "severity": "low",
+                                  "state": "ok",
+                                  "type": "gnocchi_resources_threshold",
+                                  "alarm_actions": ["http://localhost:8662"],
+                                  "repeat_actions": True})
+
+    @mock.patch.object(Config, "instance")
+    @mock.patch.object(Common, "perform_request")
+    def test_check_valid_state_payload(self, perform_req, cfg):
+        """Test the check payload function for a valid payload with state."""
+        values = {"severity": "warning",
+                  "statistic": "COUNT",
+                  "threshold_value": 12,
+                  "operation": "GT",
+                  "granularity": 300,
+                  "resource_type": "generic"}
+        cfg.return_value.OS_NOTIFIER_URI = "http://localhost:8662"
+        payload = self.alarming.check_payload(
+            values, "disk_write_ops", "r_id", "alarm_name", alarm_state="alarm")
+
+        self.assertEqual(
+            json.loads(payload), {"name": "alarm_name",
+                                  "gnocchi_resources_threshold_rule":
+                                      {"resource_id": "r_id",
+                                       "metric": "disk.write.requests",
+                                       "comparison_operator": "gt",
+                                       "aggregation_method": "count",
+                                       "threshold": 12,
+                                       "granularity": 300,
+                                       "resource_type": "generic"},
+                                  "severity": "low",
+                                  "state": "alarm",
+                                  "type": "gnocchi_resources_threshold",
+                                  "alarm_actions": ["http://localhost:8662"],
+                                  "repeat_actions": True})
+
+    def test_check_invalid_payload(self):
+        """Test the check payload function for an invalid payload."""
+        values = {"alarm_values": "mock_invalid_details"}
+        with self.assertRaises(Exception):
+            self.alarming.check_payload(values, "my_metric", "r_id", "alarm_name")
+
+    @mock.patch.object(Common, "perform_request")
+    def test_get_alarm_state(self, perf_req):
+        """Test the get alarm state function."""
+        perf_req.return_value = type('obj', (object,), {'text': '{"alarm_id":"1"}'})
+
+        self.alarming.get_alarm_state(alarm_endpoint, auth_token, "alarm_id")
+
+        perf_req.assert_called_with(
+            "alarm_endpoint/v2/alarms/alarm_id/state", auth_token, req_type="get")
+
+    @mock.patch.object(Common, "perform_request")
+    def test_check_for_metric(self, perf_req):
+        """Test the check for metric function."""
+        mock_perf_req_return_value = {"metrics": {"cpu_util": 123}}
+        perf_req.return_value = type('obj', (object,), {'text': json.dumps(mock_perf_req_return_value)})
+
+        self.alarming.check_for_metric(auth_token, metric_endpoint, "cpu_utilization", "r_id", True)
+
+        perf_req.assert_called_with(
+            "metric_endpoint/v1/resource/generic/r_id", auth_token, req_type="get", verify_ssl=True)
diff --git a/osm_mon/test/plugins/OpenStack/unit/test_common.py b/osm_mon/test/plugins/OpenStack/unit/test_common.py
new file mode 100644 (file)
index 0000000..e6c52fb
--- /dev/null
@@ -0,0 +1,119 @@
+# Copyright 2017 Intel Research and Development Ireland Limited
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+##
+"""Tests for all common OpenStack methods."""
+
+import json
+import logging
+import unittest
+
+import mock
+import requests
+from keystoneclient.v3 import client
+
+from osm_mon.core.auth import AuthManager
+from osm_mon.core.database import VimCredentials
+from osm_mon.plugins.OpenStack.common import Common
+
+__author__ = "Helena McGough"
+
+log = logging.getLogger(__name__)
+
+
+class Message(object):
+    """Mock a message for an access credentials request."""
+
+    def __init__(self):
+        """Initialise the topic and value of access_cred message."""
+        self.topic = "access_credentials"
+        self.value = json.dumps({"mock_value": "mock_details",
+                                 "vim_type": "OPENSTACK",
+                                 "access_config":
+                                     {"openstack_site": "my_site",
+                                      "user": "my_user",
+                                      "password": "my_password",
+                                      "vim_tenant_name": "my_tenant"}})
+
+
+class TestCommon(unittest.TestCase):
+    """Test the common class for OpenStack plugins."""
+
+    def setUp(self):
+        """Test Setup."""
+        super(TestCommon, self).setUp()
+        self.common = Common()
+        self.creds = VimCredentials()
+        self.creds.id = 'test_id'
+        self.creds.user = 'user'
+        self.creds.url = 'url'
+        self.creds.password = 'password'
+        self.creds.tenant_name = 'tenant_name'
+
+    @mock.patch.object(AuthManager, "get_credentials")
+    @mock.patch.object(client.Client, "get_raw_token_from_identity_service")
+    def test_get_auth_token(self, get_token, get_creds):
+        """Test generating a new authentication token."""
+        get_creds.return_value = self.creds
+        Common.get_auth_token('test_id')
+        get_creds.assert_called_with('test_id')
+        get_token.assert_called_with(auth_url='url', password='password', project_name='tenant_name', username='user',
+                                     project_domain_id='default', user_domain_id='default')
+
+    @mock.patch.object(requests, 'post')
+    def test_post_req(self, post):
+        """Testing a post request."""
+        Common.perform_request("url", "auth_token", req_type="post",
+                                    payload="payload")
+
+        post.assert_called_with("url", data="payload", headers=mock.ANY,
+                                timeout=mock.ANY, verify=True)
+
+    @mock.patch.object(requests, 'get')
+    def test_get_req(self, get):
+        """Testing a get request."""
+        # Run the defualt get request without any parameters
+        Common.perform_request("url", "auth_token", req_type="get")
+
+        get.assert_called_with("url", params=None, headers=mock.ANY,
+                               timeout=mock.ANY, verify=True)
+
+        # Test with some parameters specified
+        get.reset_mock()
+        Common.perform_request("url", "auth_token", req_type="get",
+                                    params="some parameters")
+
+        get.assert_called_with("url", params="some parameters",
+                               headers=mock.ANY, timeout=mock.ANY, verify=True)
+
+    @mock.patch.object(requests, 'put')
+    def test_put_req(self, put):
+        """Testing a put request."""
+        Common.perform_request("url", "auth_token", req_type="put",
+                                    payload="payload")
+        put.assert_called_with("url", data="payload", headers=mock.ANY,
+                               timeout=mock.ANY, verify=True)
+
+    @mock.patch.object(requests, 'delete')
+    def test_delete_req(self, delete):
+        """Testing a delete request."""
+        Common.perform_request("url", "auth_token", req_type="delete")
+
+        delete.assert_called_with("url", headers=mock.ANY, timeout=mock.ANY, verify=True)
diff --git a/osm_mon/test/plugins/OpenStack/unit/test_metric_calls.py b/osm_mon/test/plugins/OpenStack/unit/test_metric_calls.py
new file mode 100644 (file)
index 0000000..b71ca72
--- /dev/null
@@ -0,0 +1,308 @@
+# Copyright 2017 iIntel Research and Development Ireland Limited
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+##
+"""Tests for all metric request message keys."""
+
+import json
+import logging
+import unittest
+
+import mock
+
+from osm_mon.core.auth import AuthManager
+from osm_mon.plugins.OpenStack.Gnocchi import metric_handler as metric_req
+from osm_mon.plugins.OpenStack.common import Common
+
+log = logging.getLogger(__name__)
+
+# Mock auth_token and endpoint
+endpoint = mock.ANY
+auth_token = mock.ANY
+
+# Mock a valid metric list for some tests, and a resultant list
+metric_list = [{"name": "disk.write.requests",
+                "id": "metric_id",
+                "unit": "units",
+                "resource_id": "r_id"}]
+result_list = ["metric_id", "r_id", "units", "disk_write_ops"]
+
+
+class Response(object):
+    """Mock a response object for requests."""
+
+    def __init__(self):
+        """Initialise test and status code values."""
+        self.text = json.dumps([{"id": "test_id"}])
+        self.status_code = "STATUS_CODE"
+
+
+def perform_request_side_effect(*args, **kwargs):
+    resp = Response()
+    if 'marker' in args[0]:
+        resp.text = json.dumps([])
+    if 'resource/generic' in args[0]:
+        resp.text = json.dumps({'metrics': {'cpu_util': 'test_id'}})
+    return resp
+
+
+class TestMetricCalls(unittest.TestCase):
+    """Integration test for metric request keys."""
+
+    def setUp(self):
+        """Setup the tests for metric request keys."""
+        super(TestMetricCalls, self).setUp()
+        self.metrics = metric_req.OpenstackMetricHandler()
+        self.metrics._common = Common()
+
+    @mock.patch.object(metric_req.OpenstackMetricHandler, "get_metric_id")
+    @mock.patch.object(Common, "perform_request")
+    def test_invalid_config_metric_req(
+            self, perf_req, get_metric):
+        """Test the configure metric function, for an invalid metric."""
+        # Test invalid configuration for creating a metric
+        values = {"metric_details": "invalid_metric"}
+
+        with self.assertRaises(ValueError):
+            self.metrics.configure_metric(endpoint, auth_token, values, verify_ssl=False)
+
+        perf_req.assert_not_called()
+
+        # Test with an invalid metric name, will not perform request
+        values = {"resource_uuid": "r_id"}
+
+        with self.assertRaises(ValueError):
+            self.metrics.configure_metric(endpoint, auth_token, values, verify_ssl=False)
+
+        perf_req.assert_not_called()
+
+        # If metric exists, it won't be recreated
+        get_metric.return_value = "metric_id"
+
+        with self.assertRaises(ValueError):
+            self.metrics.configure_metric(endpoint, auth_token, values, verify_ssl=False)
+
+        perf_req.assert_not_called()
+
+    @mock.patch.object(metric_req.OpenstackMetricHandler, "get_metric_id")
+    @mock.patch.object(Common, "perform_request")
+    @mock.patch.object(AuthManager, "get_credentials")
+    def test_valid_config_metric_req(
+            self, get_creds, perf_req, get_metric):
+        """Test the configure metric function, for a valid metric."""
+        # Test valid configuration and payload for creating a metric
+        get_creds.return_value = type('obj', (object,), {'config': '{"insecure":true}'})
+        values = {"resource_uuid": "r_id",
+                  "metric_unit": "units",
+                  "metric_name": "cpu_util"}
+        get_metric.return_value = None
+        payload = {"id": "r_id",
+                   "metrics": {"cpu_util":
+                                   {"archive_policy_name": "high",
+                                    "name": "cpu_util",
+                                    "unit": "units"}}}
+
+        perf_req.return_value = type('obj', (object,), {'text': '{"metrics":{"cpu_util":1}, "id":1}'})
+
+        self.metrics.configure_metric(endpoint, auth_token, values, verify_ssl=False)
+
+        perf_req.assert_called_with(
+            "<ANY>/v1/resource/generic", auth_token, req_type="post", verify_ssl=False,
+            payload=json.dumps(payload, sort_keys=True))
+
+    @mock.patch.object(Common, "perform_request")
+    def test_delete_metric_req(self, perf_req):
+        """Test the delete metric function."""
+        mock_response = Response()
+        mock_response.status_code = 200
+        perf_req.return_value = mock_response
+
+        self.metrics.delete_metric(endpoint, auth_token, "metric_id", verify_ssl=False)
+
+        perf_req.assert_called_with(
+            "<ANY>/v1/metric/metric_id", auth_token, req_type="delete", verify_ssl=False)
+
+    @mock.patch.object(Common, "perform_request")
+    def test_delete_metric_invalid_status(self, perf_req):
+        """Test invalid response for delete request."""
+        perf_req.return_value = type('obj', (object,), {"status_code": "404"})
+
+        with self.assertRaises(ValueError):
+            self.metrics.delete_metric(endpoint, auth_token, "metric_id", verify_ssl=False)
+
+    @mock.patch.object(metric_req.OpenstackMetricHandler, "response_list")
+    @mock.patch.object(Common, "perform_request")
+    def test_complete_list_metric_req(self, perf_req, resp_list):
+        """Test the complete list metric function."""
+        # Test listing metrics without any configuration options
+        values = {}
+        perf_req.side_effect = perform_request_side_effect
+        self.metrics.list_metrics(endpoint, auth_token, values, verify_ssl=False)
+
+        perf_req.assert_any_call(
+            "<ANY>/v1/metric?sort=name:asc", auth_token, req_type="get", verify_ssl=False)
+        resp_list.assert_called_with([{u'id': u'test_id'}])
+
+    @mock.patch.object(metric_req.OpenstackMetricHandler, "response_list")
+    @mock.patch.object(Common, "perform_request")
+    def test_resource_list_metric_req(self, perf_req, resp_list):
+        """Test the resource list metric function."""
+        # Test listing metrics with a resource id specified
+        values = {"resource_uuid": "resource_id"}
+        perf_req.side_effect = perform_request_side_effect
+        self.metrics.list_metrics(endpoint, auth_token, values, verify_ssl=False)
+
+        perf_req.assert_any_call(
+            "<ANY>/v1/metric/test_id", auth_token, req_type="get", verify_ssl=False)
+
+    @mock.patch.object(metric_req.OpenstackMetricHandler, "response_list")
+    @mock.patch.object(Common, "perform_request")
+    def test_name_list_metric_req(self, perf_req, resp_list):
+        """Test the metric_name list metric function."""
+        # Test listing metrics with a metric_name specified
+        values = {"metric_name": "disk_write_bytes"}
+        perf_req.side_effect = perform_request_side_effect
+        self.metrics.list_metrics(endpoint, auth_token, values, verify_ssl=False)
+
+        perf_req.assert_any_call(
+            "<ANY>/v1/metric?sort=name:asc", auth_token, req_type="get", verify_ssl=False)
+        resp_list.assert_called_with(
+            [{u'id': u'test_id'}], metric_name="disk_write_bytes")
+
+    @mock.patch.object(metric_req.OpenstackMetricHandler, "response_list")
+    @mock.patch.object(Common, "perform_request")
+    def test_combined_list_metric_req(self, perf_req, resp_list):
+        """Test the combined resource and metric list metric function."""
+        # Test listing metrics with a resource id and metric name specified
+
+        values = {"resource_uuid": "resource_id",
+                  "metric_name": "cpu_utilization"}
+        perf_req.side_effect = perform_request_side_effect
+        self.metrics.list_metrics(endpoint, auth_token, values, verify_ssl=False)
+
+        perf_req.assert_any_call(
+            "<ANY>/v1/metric/test_id", auth_token, req_type="get", verify_ssl=False)
+
+    @mock.patch.object(Common, "perform_request")
+    def test_get_metric_id(self, perf_req):
+        """Test get_metric_id function."""
+        mock_response = Response()
+        mock_response.text = json.dumps({'metrics': {'my_metric': 'id'}})
+        perf_req.return_value = mock_response
+        self.metrics.get_metric_id(endpoint, auth_token, "my_metric", "r_id", verify_ssl=False)
+
+        perf_req.assert_called_with(
+            "<ANY>/v1/resource/generic/r_id", auth_token, req_type="get", verify_ssl=False)
+
+    @mock.patch.object(metric_req.OpenstackMetricHandler, "get_metric_id")
+    @mock.patch.object(Common, "perform_request")
+    def test_valid_read_data_req(self, perf_req, get_metric):
+        """Test the read metric data function, for a valid call."""
+        values = {"metric_name": "disk_write_ops",
+                  "resource_uuid": "resource_id",
+                  "collection_unit": "DAY",
+                  "collection_period": 1}
+
+        perf_req.return_value = type('obj', (object,), {'text': '{"metric_data":"[]"}'})
+
+        get_metric.return_value = "metric_id"
+        self.metrics.read_metric_data(endpoint, auth_token, values, verify_ssl=False)
+
+        perf_req.assert_called_once()
+
+    @mock.patch.object(Common, "perform_request")
+    def test_invalid_read_data_req(self, perf_req):
+        """Test the read metric data function for an invalid call."""
+        values = {}
+
+        with self.assertRaises(KeyError):
+            self.metrics.read_metric_data(endpoint, auth_token, values, verify_ssl=False)
+
+    def test_complete_response_list(self):
+        """Test the response list function for formatting metric lists."""
+        # Mock a list for testing purposes, with valid OSM metric
+        resp_list = self.metrics.response_list(metric_list)
+
+        # Check for the expected values in the resulting list
+        for l in result_list:
+            self.assertIn(l, resp_list[0].values())
+
+    def test_name_response_list(self):
+        """Test the response list with metric name configured."""
+        # Mock the metric name to test a metric name list
+        # Test with a name that is not in the list
+        invalid_name = "my_metric"
+        resp_list = self.metrics.response_list(
+            metric_list, metric_name=invalid_name)
+
+        self.assertEqual(resp_list, [])
+
+        # Test with a name on the list
+        valid_name = "disk_write_ops"
+        resp_list = self.metrics.response_list(
+            metric_list, metric_name=valid_name)
+
+        # Check for the expected values in the resulting list
+        for l in result_list:
+            self.assertIn(l, resp_list[0].values())
+
+    def test_resource_response_list(self):
+        """Test the response list with resource_id configured."""
+        # Mock a resource_id to test a resource list
+        # Test with resource not on the list
+        invalid_id = "mock_resource"
+        resp_list = self.metrics.response_list(metric_list, resource=invalid_id)
+
+        self.assertEqual(resp_list, [])
+
+        # Test with a resource on the list
+        valid_id = "r_id"
+        resp_list = self.metrics.response_list(metric_list, resource=valid_id)
+
+        # Check for the expected values in the resulting list
+        for l in result_list:
+            self.assertIn(l, resp_list[0].values())
+
+    def test_combined_response_list(self):
+        """Test the response list function with resource_id and metric_name."""
+        # Test for a combined resource and name list
+        # resource and name are on the list
+        valid_name = "disk_write_ops"
+        valid_id = "r_id"
+        resp_list = self.metrics.response_list(
+            metric_list, metric_name=valid_name, resource=valid_id)
+
+        # Check for the expected values in the resulting list
+        for l in result_list:
+            self.assertIn(l, resp_list[0].values())
+
+        # resource not on list
+        invalid_id = "mock_resource"
+        resp_list = self.metrics.response_list(
+            metric_list, metric_name=valid_name, resource=invalid_id)
+
+        self.assertEqual(resp_list, [])
+
+        # metric name not on list
+        invalid_name = "mock_metric"
+        resp_list = self.metrics.response_list(
+            metric_list, metric_name=invalid_name, resource=valid_id)
+
+        self.assertEqual(resp_list, [])
diff --git a/osm_mon/test/plugins/OpenStack/unit/test_metric_req.py b/osm_mon/test/plugins/OpenStack/unit/test_metric_req.py
new file mode 100644 (file)
index 0000000..2fa31a6
--- /dev/null
@@ -0,0 +1,159 @@
+# Copyright 2017 iIntel Research and Development Ireland Limited
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+##
+"""Tests for all metric request message keys."""
+
+import json
+import logging
+import unittest
+
+import mock
+
+from osm_mon.core.auth import AuthManager
+from osm_mon.plugins.OpenStack.Gnocchi import metric_handler as metric_req
+from osm_mon.plugins.OpenStack.Gnocchi.metric_handler import OpenstackMetricHandler
+from osm_mon.plugins.OpenStack.common import Common
+
+log = logging.getLogger(__name__)
+
+
+class Response(object):
+    """Mock a response object for requests."""
+
+    def __init__(self):
+        """Initialise test and status code values."""
+        self.text = json.dumps([{"id": "test_id"}])
+        self.status_code = "STATUS_CODE"
+
+
+class Message(object):
+    """A class to mock a message object value for metric requests."""
+
+    def __init__(self):
+        """Initialize a mocked message instance."""
+        self.topic = "metric_request"
+        self.key = None
+        self.value = json.dumps({"mock_message": "message_details"})
+
+
+class TestMetricReq(unittest.TestCase):
+    """Integration test for metric request keys."""
+
+    def setUp(self):
+        """Setup the tests for metric request keys."""
+        super(TestMetricReq, self).setUp()
+        self.metrics = metric_req.OpenstackMetricHandler()
+
+    @mock.patch.object(Common, "get_auth_token", mock.Mock())
+    @mock.patch.object(Common, "get_endpoint", mock.Mock())
+    @mock.patch.object(metric_req.OpenstackMetricHandler, "delete_metric")
+    @mock.patch.object(metric_req.OpenstackMetricHandler, "get_metric_id")
+    @mock.patch.object(AuthManager, "get_credentials")
+    def test_delete_metric_key(self, get_creds, get_metric_id, del_metric):
+        """Test the functionality for a delete metric request."""
+        value = {"metric_name": "disk_write_ops", "resource_uuid": "my_r_id", "correlation_id": 1}
+
+        get_creds.return_value = type('obj', (object,), {
+            'config': '{"insecure":true}'
+        })
+        del_metric.return_value = True
+
+        # Call the metric functionality and check delete request
+        get_metric_id.return_value = "my_metric_id"
+        self.metrics.handle_request('delete_metric_request', value, 'test_id')
+        del_metric.assert_called_with(mock.ANY, mock.ANY, "my_metric_id", False)
+
+    @mock.patch.object(Common, "get_auth_token", mock.Mock())
+    @mock.patch.object(Common, 'get_endpoint', mock.Mock())
+    @mock.patch.object(metric_req.OpenstackMetricHandler, "list_metrics")
+    @mock.patch.object(AuthManager, "get_credentials")
+    def test_list_metric_key(self, get_creds, list_metrics):
+        """Test the functionality for a list metric request."""
+        value = {"metrics_list_request": {"correlation_id": 1}}
+
+        get_creds.return_value = type('obj', (object,), {
+            'config': '{"insecure":true}'
+        })
+
+        list_metrics.return_value = []
+
+        # Call the metric functionality and check list functionality
+        self.metrics.handle_request('list_metric_request', value, 'test_id')
+        list_metrics.assert_called_with(mock.ANY, mock.ANY, {"correlation_id": 1}, False)
+
+    @mock.patch.object(Common, "get_auth_token", mock.Mock())
+    @mock.patch.object(Common, 'get_endpoint', mock.Mock())
+    @mock.patch.object(AuthManager, "get_credentials")
+    @mock.patch.object(Common, "perform_request")
+    def test_update_metric_key(self, perf_req, get_creds):
+        """Test the functionality for an update metric request."""
+        value = {"metric_update_request":
+                     {"correlation_id": 1,
+                      "metric_name": "my_metric",
+                      "resource_uuid": "my_r_id"}}
+
+        get_creds.return_value = type('obj', (object,), {
+            'config': '{"insecure":true}'
+        })
+
+        mock_response = Response()
+        mock_response.text = json.dumps({'metrics': {'my_metric': 'id'}})
+        perf_req.return_value = mock_response
+
+        # Call metric functionality and confirm no function is called
+        # Gnocchi does not support updating a metric configuration
+        self.metrics.handle_request('update_metric_request', value, 'test_id')
+
+    @mock.patch.object(Common, "get_auth_token", mock.Mock())
+    @mock.patch.object(Common, 'get_endpoint', mock.Mock())
+    @mock.patch.object(OpenstackMetricHandler, "configure_metric")
+    @mock.patch.object(AuthManager, "get_credentials")
+    def test_config_metric_key(self, get_credentials, config_metric):
+        """Test the functionality for a create metric request."""
+        value = {"metric_create_request": {"correlation_id": 123}}
+        get_credentials.return_value = type('obj', (object,), {'config': '{"insecure":true}'})
+        # Call metric functionality and check config metric
+        config_metric.return_value = "metric_id", "resource_id"
+        self.metrics.handle_request('create_metric_request', value, 'test_id')
+        config_metric.assert_called_with(mock.ANY, mock.ANY, {"correlation_id": 123}, False)
+
+    @mock.patch.object(Common, "get_auth_token", mock.Mock())
+    @mock.patch.object(Common, 'get_endpoint', mock.Mock())
+    @mock.patch.object(OpenstackMetricHandler, "read_metric_data")
+    @mock.patch.object(AuthManager, "get_credentials")
+    @mock.patch.object(Common, "perform_request")
+    def test_read_data_key(self, perf_req, get_creds, read_data):
+        """Test the functionality for a read metric data request."""
+        value = {"correlation_id": 123, "metric_name": "cpu_utilization", "resource_uuid": "uuid"}
+
+        get_creds.return_value = type('obj', (object,), {
+            'config': '{"insecure":true}'
+        })
+
+        mock_response = Response()
+        mock_response.text = json.dumps({'metrics': {'cpu_util': 'id'}})
+        perf_req.return_value = mock_response
+
+        # Call metric functionality and check read data metrics
+        read_data.return_value = "time_stamps", "data_values"
+        self.metrics.handle_request('read_metric_data_request', value, 'test_id')
+        read_data.assert_called_with(
+            mock.ANY, mock.ANY, value, False)
diff --git a/osm_mon/test/plugins/OpenStack/unit/test_notifier.py b/osm_mon/test/plugins/OpenStack/unit/test_notifier.py
new file mode 100644 (file)
index 0000000..a420c70
--- /dev/null
@@ -0,0 +1,133 @@
+# Copyright 2017 Intel Research and Development Ireland Limited
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+##
+"""Tests for all common OpenStack methods."""
+
+import json
+import unittest
+
+import mock
+from kafka import KafkaProducer
+
+from osm_mon.core.database import DatabaseManager, Alarm
+from osm_mon.plugins.OpenStack.Aodh.notifier import NotifierHandler
+
+post_data = {"severity": "critical",
+             "alarm_name": "my_alarm",
+             "current": "current_state",
+             "alarm_id": "my_alarm_id",
+             "reason": "Threshold has been broken",
+             "reason_data": {"count": 1,
+                             "most_recent": "null",
+                             "type": "threshold",
+                             "disposition": "unknown"},
+             "previous": "previous_state"}
+
+
+class Response(object):
+    """Mock a response class for generating responses."""
+
+    def __init__(self, text):
+        """Initialise a mock response with a text attribute."""
+        self.text = text
+
+
+class RFile():
+    def read(self, content_length):
+        return json.dumps(post_data, sort_keys=True)
+
+
+class MockNotifierHandler(NotifierHandler):
+    """Mock the NotifierHandler class for testing purposes."""
+
+    def __init__(self):
+        """Initialise mock NotifierHandler."""
+        self.headers = {'Content-Length': '20'}
+        self.rfile = RFile()
+
+    def setup(self):
+        """Mock setup function."""
+        pass
+
+    def handle(self):
+        """Mock handle function."""
+        pass
+
+    def finish(self):
+        """Mock finish function."""
+        pass
+
+
+@mock.patch.object(KafkaProducer, "__init__", lambda *args, **kwargs: None)
+@mock.patch.object(KafkaProducer, "flush", mock.Mock())
+class TestNotifier(unittest.TestCase):
+    """Test the NotifierHandler class for requests from aodh."""
+
+    def setUp(self):
+        """Setup tests."""
+        super(TestNotifier, self).setUp()
+        self.handler = MockNotifierHandler()
+
+    @mock.patch.object(NotifierHandler, "_set_headers")
+    def test_do_GET(self, set_head):
+        """Tests do_GET. Validates _set_headers has been called."""
+        self.handler.do_GET()
+
+        set_head.assert_called_once()
+
+    @mock.patch.object(NotifierHandler, "notify_alarm")
+    @mock.patch.object(NotifierHandler, "_set_headers")
+    def test_do_POST(self, set_head, notify):
+        """Tests do_POST. Validates notify_alarm has been called."""
+        self.handler.do_POST()
+
+        set_head.assert_called_once()
+        notify.assert_called_with(post_data)
+
+    @mock.patch.object(NotifierHandler, "_publish_response")
+    @mock.patch.object(DatabaseManager, "get_alarm")
+    def test_notify_alarm_valid_alarm(
+            self, get_alarm, notify):
+        """
+        Tests notify_alarm when request from OpenStack references an existing alarm in the DB.
+        Validates KafkaProducer.notify_alarm has been called.
+        """
+        # Generate return values for valid notify_alarm operation
+        mock_alarm = Alarm()
+        get_alarm.return_value = mock_alarm
+
+        self.handler.notify_alarm(post_data)
+        notify.assert_called_with('notify_alarm', mock.ANY)
+
+    @mock.patch.object(NotifierHandler, "_publish_response")
+    @mock.patch.object(DatabaseManager, "get_alarm")
+    def test_notify_alarm_invalid_alarm(
+            self, get_alarm, notify):
+        """
+        Tests notify_alarm when request from OpenStack references a non existing alarm in the DB.
+        Validates Exception is thrown and KafkaProducer.notify_alarm has not been called.
+        """
+        # Generate return values for valid notify_alarm operation
+        get_alarm.return_value = None
+
+        with self.assertRaises(Exception):
+            self.handler.notify_alarm(post_data)
+        notify.assert_not_called()
diff --git a/osm_mon/test/plugins/OpenStack/unit/test_responses.py b/osm_mon/test/plugins/OpenStack/unit/test_responses.py
new file mode 100644 (file)
index 0000000..1377bc0
--- /dev/null
@@ -0,0 +1,116 @@
+# Copyright 2017 iIntel Research and Development Ireland Limited
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+##
+"""Test that the correct responses are generated for each message."""
+
+import logging
+import unittest
+
+import mock
+
+from osm_mon.plugins.OpenStack import response as resp
+
+log = logging.getLogger(__name__)
+
+
+class TestOpenStackResponse(unittest.TestCase):
+    """Tests for responses generated by the OpenStack plugins."""
+
+    def setUp(self):
+        """Setup for testing OpenStack plugin responses."""
+        super(TestOpenStackResponse, self).setUp()
+        self.plugin_resp = resp.OpenStackResponseBuilder()
+
+    def test_invalid_key(self):
+        """Test if an invalid key is entered for a response."""
+        message = self.plugin_resp.generate_response("mock_invalid_key")
+        self.assertEqual(message, None)
+
+    @mock.patch.object(
+        resp.OpenStackResponseBuilder, "alarm_list_response")
+    def test_list_alarm_resp(self, alarm_list_resp):
+        """Test out a function call for a list alarm response."""
+        message = self.plugin_resp.generate_response("list_alarm_response")
+        self.assertEqual(alarm_list_resp.return_value, message)
+
+    @mock.patch.object(
+        resp.OpenStackResponseBuilder, "list_metric_response")
+    def test_list_metric_resp(self, metric_list_resp):
+        """Test list metric response function call."""
+        message = self.plugin_resp.generate_response("list_metric_response")
+        self.assertEqual(message, metric_list_resp.return_value)
+
+    @mock.patch.object(
+        resp.OpenStackResponseBuilder, "delete_alarm_response")
+    def test_delete_alarm_resp(self, del_alarm_resp):
+        """Test delete alarm response function call."""
+        message = self.plugin_resp.generate_response("delete_alarm_response")
+        self.assertEqual(message, del_alarm_resp.return_value)
+
+    @mock.patch.object(
+        resp.OpenStackResponseBuilder, "delete_metric_response")
+    def test_delete_metric_resp(self, del_metric_resp):
+        """Test the response functionality of delete metric response."""
+        message = self.plugin_resp.generate_response("delete_metric_response")
+        self.assertEqual(message, del_metric_resp.return_value)
+
+    @mock.patch.object(
+        resp.OpenStackResponseBuilder, "create_alarm_response")
+    def test_create_alarm_resp(self, config_alarm_resp):
+        """Test create alarm response function call."""
+        message = self.plugin_resp.generate_response("create_alarm_response")
+        self.assertEqual(message, config_alarm_resp.return_value)
+
+    @mock.patch.object(
+        resp.OpenStackResponseBuilder, "metric_create_response")
+    def test_create_metric_resp(self, config_metric_resp):
+        """Test create metric response function call."""
+        message = self.plugin_resp.generate_response("create_metric_response")
+        self.assertEqual(message, config_metric_resp.return_value)
+
+    @mock.patch.object(
+        resp.OpenStackResponseBuilder, "update_alarm_response")
+    def test_update_alarm_resp(self, up_alarm_resp):
+        """Test update alarm response function call."""
+        message = self.plugin_resp.generate_response("update_alarm_response")
+        self.assertEqual(message, up_alarm_resp.return_value)
+
+    @mock.patch.object(
+        resp.OpenStackResponseBuilder, "update_metric_response")
+    def test_update_metric_resp(self, up_metric_resp):
+        """Test update metric response function call."""
+        message = self.plugin_resp.generate_response("update_metric_response")
+        self.assertEqual(message, up_metric_resp.return_value)
+
+    @mock.patch.object(
+        resp.OpenStackResponseBuilder, "notify_alarm")
+    def test_notify_alarm(self, notify_alarm):
+        """Test notify alarm response function call."""
+        message = self.plugin_resp.generate_response("notify_alarm")
+        self.assertEqual(message, notify_alarm.return_value)
+
+    @mock.patch.object(
+        resp.OpenStackResponseBuilder, "read_metric_data_response")
+    def test_read_metric_data_resp(self, read_data_resp):
+        """Test read metric data response function call."""
+        message = self.plugin_resp.generate_response(
+            "read_metric_data_response")
+        self.assertEqual(message, read_data_resp.return_value)
diff --git a/osm_mon/test/plugins/OpenStack/unit/test_settings.py b/osm_mon/test/plugins/OpenStack/unit/test_settings.py
new file mode 100644 (file)
index 0000000..42619f8
--- /dev/null
@@ -0,0 +1,46 @@
+# Copyright 2017 Intel Research and Development Ireland Limited
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: helena.mcgough@intel.com or adrian.hoban@intel.com
+##
+"""Tests for settings for OpenStack plugins configurations."""
+
+import logging
+import os
+import unittest
+
+from osm_mon.core.settings import Config
+
+log = logging.getLogger(__name__)
+
+
+class TestSettings(unittest.TestCase):
+    """Test the settings class for OpenStack plugin configuration."""
+
+    def setUp(self):
+        """Test Setup."""
+        super(TestSettings, self).setUp()
+        self.cfg = Config.instance()
+
+    def test_set_os_username(self):
+        """Test reading the environment for OpenStack plugin configuration."""
+        os.environ["OS_NOTIFIER_URI"] = "test"
+        self.cfg.read_environ()
+
+        self.assertEqual(self.cfg.OS_NOTIFIER_URI, "test")
diff --git a/osm_mon/test/plugins/VMware/__init__.py b/osm_mon/test/plugins/VMware/__init__.py
new file mode 100644 (file)
index 0000000..64d5d51
--- /dev/null
@@ -0,0 +1,37 @@
+# -*- coding: utf-8 -*-
+
+##
+# Copyright 2017-2018 VMware Inc.
+# This file is part of ETSI OSM
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+#
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact:  osslegalrouting@vmware.com
+##
+
+"""VMware MON plugin tests."""
+
+import logging
+import sys
+
+from osm_mon.core.settings import Config
+
+cfg = Config.instance()
+logging.basicConfig(stream=sys.stdout,
+                    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
+                    datefmt='%m/%d/%Y %I:%M:%S %p',
+                    level=logging.getLevelName(cfg.OSMMON_LOG_LEVEL))
+log = logging.getLogger(__name__)
+
diff --git a/osm_mon/test/plugins/VMware/test_mon_plugin_vrops.py b/osm_mon/test/plugins/VMware/test_mon_plugin_vrops.py
new file mode 100644 (file)
index 0000000..2d10d1b
--- /dev/null
@@ -0,0 +1,3083 @@
+# -*- coding: utf-8 -*-
+
+##
+# Copyright 2017-2018 VMware Inc.
+# This file is part of ETSI OSM
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+#
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact:  osslegalrouting@vmware.com
+##
+
+""" Mock tests for VMware vROPs Mon plugin """
+
+import os
+import sys
+import unittest
+
+import mock
+import requests
+
+sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", "..", ".."))
+
+from osm_mon.plugins.vRealiseOps import mon_plugin_vrops as monPlugin
+
+from pyvcloud.vcd.client import Client
+
+
+class TestMonPlugin(unittest.TestCase):
+    """Test class for vROPs Mon Plugin class methods"""
+
+    def setUp(self):
+        """Setup the tests for Mon Plugin class methods"""
+        super(TestMonPlugin, self).setUp()
+
+        self.m_vim_access_config = {'vrops_site': 'abc',
+                                    'vrops_user': 'user',
+                                    'vrops_password': 'passwd',
+                                    'vim_url': 'vcd_url',
+                                    'admin_username': 'admin',
+                                    'admin_password': 'admin_passwd',
+                                    'vim_uuid': '1',
+                                    'tenant_id': 'org_vdc_1'}
+        self.mon_plugin = monPlugin.MonPlugin(self.m_vim_access_config)
+        # create client object
+        self.vca = Client('test', verify_ssl_certs=False)
+        # create session
+        self.session = requests.Session()
+
+    def test_get_default_Params_valid_metric_alarm_name(self):
+        """Test get default params method"""
+
+        # Mock valid metric_alarm_name and response
+        metric_alarm_name = "Average_Memory_Usage_Above_Threshold"
+        expected_return = {'impact': 'risk', 'cancel_cycles': 2, 'adapter_kind': 'VMWARE',
+                           'repeat': False, 'cancel_period': 300, 'alarm_type': 16,
+                           'vrops_alarm': 'Avg_Mem_Usage_Above_Thr', 'enabled': True, 'period': 300,
+                           'resource_kind': 'VirtualMachine', 'alarm_subType': 19,
+                           'action': 'acknowledge', 'evaluation': 2, 'unit': '%'}
+
+        # call get default param function under test
+        actual_return = self.mon_plugin.get_default_Params(metric_alarm_name)
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    def test_get_default_Params_invalid_metric_alarm_name(self):
+        """Test get default params method invalid metric alarm"""
+
+        # Mock valid metric_alarm_name and response
+        metric_alarm_name = "Invalid_Alarm"
+        expected_return = {}
+
+        # call get default param function under test
+        actual_return = self.mon_plugin.get_default_Params(metric_alarm_name)
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'post')
+    def test_create_symptom_valid_req_response(self, m_post):
+        """Test create symptom method-valid request"""
+
+        # Mock valid symptom params and mock responses
+        symptom_param = {'threshold_value': 0, 'cancel_cycles': 1, 'adapter_kind_key': 'VMWARE',
+                         'resource_kind_key': 'VirtualMachine', 'severity': 'CRITICAL',
+                         'symptom_name': \
+                             'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                         'operation': 'GT', 'wait_cycles': 1, 'metric_key': 'cpu|usage_average'}
+
+        m_post.return_value.status_code = 201
+        m_post.return_value.content = \
+            '{"id":"SymptomDefinition-351c23b4-bc3c-4c7b-b4af-1ad90a673c5d",\
+            "name":"CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4",\
+            "adapterKindKey":"VMWARE","resourceKindKey":"VirtualMachine",\
+            "waitCycles":1,"cancelCycles":1,\
+            "state":{"severity":"CRITICAL","condition":{"type":"CONDITION_HT",\
+            "key":"cpu|usage_average","operator":"GT","value":"0.0",\
+            "valueType":"NUMERIC",\
+            "instanced":false,"thresholdType":"STATIC"}}}'
+
+        expected_return = "SymptomDefinition-351c23b4-bc3c-4c7b-b4af-1ad90a673c5d"
+
+        # call create symptom method under test
+        actual_return = self.mon_plugin.create_symptom(symptom_param)
+
+        # verify that mocked method is called
+        m_post.assert_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'post')
+    def test_create_symptom_invalid_req_response(self, m_post):
+        """Test create symptom method-invalid response"""
+
+        # Mock valid symptom params and invalid  mock responses
+        symptom_param = {'threshold_value': 0, 'cancel_cycles': 1, 'adapter_kind_key': 'VMWARE',
+                         'resource_kind_key': 'VirtualMachine', 'severity': 'CRITICAL',
+                         'symptom_name': \
+                             'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                         'operation': 'GT', 'wait_cycles': 1, 'metric_key': 'cpu|usage_average'}
+
+        m_post.return_value.status_code = 404
+        m_post.return_value.content = '404 Not Found'
+
+        expected_return = None
+
+        # call create symptom method under test
+        actual_return = self.mon_plugin.create_symptom(symptom_param)
+
+        # verify that mocked method is called
+        m_post.assert_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'post')
+    def test_create_symptom_incorrect_data(self, m_post):
+        """Test create symptom method-incorrect data"""
+
+        # Mock valid symptom params and invalid  mock responses
+        symptom_param = {'threshold_value': 0, 'cancel_cycles': 1, 'adapter_kind_key': 'VMWARE',
+                         'resource_kind_key': 'VirtualMachine', 'severity': 'CRITICAL',
+                         'symptom_name': \
+                             'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                         'operation': 'GT', 'metric_key': 'cpu|usage_average'}
+
+        expected_return = None
+
+        # call create symptom method under test
+        actual_return = self.mon_plugin.create_symptom(symptom_param)
+
+        # verify that mocked method is not called
+        m_post.assert_not_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'post')
+    def test_create_alarm_definition_valid_req_response(self, m_post):
+        """Test create alarm definition method-valid response"""
+
+        # Mock valid alarm params and mock responses
+        alarm_param = {'description': 'CPU_Utilization_Above_Threshold', 'cancelCycles': 1,
+                       'subType': 19, 'waitCycles': 1,
+                       'severity': 'CRITICAL', 'impact': 'risk', 'adapterKindKey': 'VMWARE',
+                       'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                       'resourceKindKey': 'VirtualMachine', 'type': 16,
+                       'symptomDefinitionId': \
+                           'SymptomDefinition-25278b06-bff8-4409-a141-9b4e064235df'}
+
+        m_post.return_value.status_code = 201
+        m_post.return_value.content = \
+            '{"id":"AlertDefinition-d4f21e4b-770a-45d6-b298-022eaf489115",\
+            "name":"CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4",\
+            "description":"CPU_Utilization_Above_Threshold","adapterKindKey":"VMWARE",\
+            "resourceKindKey":"VirtualMachine","waitCycles":1,"cancelCycles":1,\
+            "type":16,"subType":19,\
+            "states":[{"severity":"CRITICAL","base-symptom-set":{"type":"SYMPTOM_SET",\
+            "relation":"SELF","aggregation":"ALL","symptomSetOperator":"AND",\
+            "symptomDefinitionIds":\
+            ["SymptomDefinition-25278b06-bff8-4409-a141-9b4e064235df"]},\
+            "impact":{"impactType":"BADGE","detail":"risk"}}]}'
+
+        expected_return = "AlertDefinition-d4f21e4b-770a-45d6-b298-022eaf489115"
+
+        # call create alarm definition method under test
+        actual_return = self.mon_plugin.create_alarm_definition(alarm_param)
+
+        # verify that mocked method is called
+        m_post.assert_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'post')
+    def test_create_alarm_definition_invalid_req_response(self, m_post):
+        """Test create alarm definition method-invalid response"""
+
+        # Mock valid alarm params and mock responses
+        alarm_param = {'description': 'CPU_Utilization_Above_Threshold', 'cancelCycles': 1,
+                       'subType': 19, 'waitCycles': 1,
+                       'severity': 'CRITICAL', 'impact': 'risk', 'adapterKindKey': 'VMWARE',
+                       'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                       'resourceKindKey': 'VirtualMachine', 'type': 16,
+                       'symptomDefinitionId': \
+                           'SymptomDefinition-25278b06-bff8-4409-a141-9b4e064235df'}
+
+        m_post.return_value.status_code = 404
+        m_post.return_value.content = '404 Not Found'
+
+        expected_return = None
+
+        # call create alarm definition method under test
+        actual_return = self.mon_plugin.create_alarm_definition(alarm_param)
+
+        # verify that mocked method is called
+        m_post.assert_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'post')
+    def test_create_alarm_definition_incorrect_data(self, m_post):
+        """Test create alarm definition method-incorrect data"""
+
+        # Mock incorrect alarm param
+        alarm_param = {'description': 'CPU_Utilization_Above_Threshold', 'cancelCycles': 1,
+                       'subType': 19, 'waitCycles': 1, 'type': 16,
+                       'severity': 'CRITICAL', 'impact': 'risk', 'adapterKindKey': 'VMWARE',
+                       'symptomDefinitionId': \
+                           'SymptomDefinition-25278b06-bff8-4409-a141-9b4e064235df'}
+        expected_return = None
+
+        # call create symptom method under test
+        actual_return = self.mon_plugin.create_alarm_definition(alarm_param)
+
+        # verify that mocked method is not called
+        m_post.assert_not_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.DatabaseManager, 'save_alarm')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_notification_rule')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_definition')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_symptom')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_by_name')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
+    def test_configure_alarm_valid_req(self, m_get_default_Params,
+                                       m_get_alarm_defination_by_name,
+                                       m_create_symptom,
+                                       m_create_alarm_definition,
+                                       m_get_vm_moref_id,
+                                       m_get_vm_resource_id,
+                                       m_create_alarm_notification_rule,
+                                       m_save_alarm):
+        """Test configure alarm valid request creating alarm"""
+
+        # Mock input configuration dictionary
+        config_dict = {'threshold_value': 0, 'severity': 'CRITICAL',
+                       'alarm_name': 'CPU_Utilization_Above_Threshold',
+                       'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                       'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
+                       'statistic': 'AVERAGE', 'metric_name': 'cpu_utilization',
+                       'vdu_name': 'vdu1', 'vnf_member_index': 'index1', 'ns_id': 'nsd1',
+                       'operation': 'GT', 'unit': '%',
+                       'description': 'CPU_Utilization_Above_Threshold'}
+
+        # symptom parameters to be passed for symptom creation
+        symptom_params = {'threshold_value': 0,
+                          'cancel_cycles': 1,
+                          'adapter_kind_key': 'VMWARE',
+                          'resource_kind_key': 'VirtualMachine',
+                          'severity': 'CRITICAL',
+                          'symptom_name': \
+                              'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                          'operation': 'GT',
+                          'wait_cycles': 1,
+                          'metric_key': 'cpu|usage_average'}
+
+        # alarm parameters to  be passed for alarm creation
+        alarm_params = {'description': 'CPU_Utilization_Above_Threshold',
+                        'cancelCycles': 1, 'subType': 19,
+                        'waitCycles': 1, 'severity': 'CRITICAL',
+                        'impact': 'risk', 'adapterKindKey': 'VMWARE',
+                        'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                        'resourceKindKey': 'VirtualMachine',
+                        'symptomDefinitionId': \
+                            'SymptomDefinition-2e8f9ddc-9f7b-4cd6-b85d-7d7fe3a8a804',
+                        'type': 16}
+
+        vm_moref_id = 'vm-6626'
+        vrops_alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+        alarm_def = 'AlertDefinition-0f3cdcb3-4e1b-4a0b-86d0-66d4b3f65220'
+        resource_id = 'ac87622f-b761-40a0-b151-00872a2a456e'
+        alarm_def_uuid = '0f3cdcb3-4e1b-4a0b-86d0-66d4b3f65220'
+
+        # Mock default Parameters for alarm & metric configuration
+        m_get_default_Params.side_effect = [{'impact': 'risk', 'cancel_cycles': 1,
+                                             'adapter_kind': 'VMWARE', 'repeat': False,
+                                             'cancel_period': 300, 'alarm_type': 16,
+                                             'vrops_alarm': 'CPU_Utilization_Above_Thr',
+                                             'enabled': True, 'period': 300,
+                                             'resource_kind': 'VirtualMachine',
+                                             'alarm_subType': 19, 'action': 'acknowledge',
+                                             'evaluation': 1, 'unit': 'msec'},
+                                            {'metric_key': 'cpu|usage_average', 'unit': '%'}
+                                            ]
+
+        # set mocked function return values
+        m_get_alarm_defination_by_name.return_value = []
+        m_create_symptom.return_value = 'SymptomDefinition-2e8f9ddc-9f7b-4cd6-b85d-7d7fe3a8a804'
+        m_create_alarm_definition.return_value = \
+            'AlertDefinition-0f3cdcb3-4e1b-4a0b-86d0-66d4b3f65220'
+        m_get_vm_moref_id.return_value = vm_moref_id
+        m_get_vm_resource_id.return_value = 'ac87622f-b761-40a0-b151-00872a2a456e'
+        m_create_alarm_notification_rule.return_value = 'f37900e7-dd01-4383-b84c-08f519530d71'
+
+        # Call configure_alarm method under test
+        return_value = self.mon_plugin.configure_alarm(config_dict)
+
+        # Verify that mocked methods are called with correct parameters
+        self.assertEqual(m_get_default_Params.call_count, 2)
+        m_get_alarm_defination_by_name.assert_called_with(vrops_alarm_name)
+        m_create_symptom.assert_called_with(symptom_params)
+        m_create_alarm_definition.assert_called_with(alarm_params)
+        m_get_vm_moref_id.assert_called_with(config_dict['resource_uuid'])
+        m_get_vm_resource_id.assert_called_with(vm_moref_id)
+        m_create_alarm_notification_rule.assert_called_with(vrops_alarm_name,
+                                                            alarm_def,
+                                                            resource_id)
+        m_save_alarm.assert_called_with(vrops_alarm_name, '1',
+                                        config_dict['threshold_value'],
+                                        config_dict['operation'],
+                                        config_dict['metric_name'],
+                                        config_dict['vdu_name'],
+                                        config_dict['vnf_member_index'],
+                                        config_dict['ns_id'])
+
+        # Verify return value with expected value of alarm_def_uuid
+        self.assertEqual(return_value, alarm_def_uuid)
+
+    @mock.patch.object(monPlugin.DatabaseManager, 'save_alarm')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_notification_rule')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_definition')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_symptom')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_by_name')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
+    def test_configure_alarm_invalid_alarm_name_req(self, m_get_default_Params,
+                                                    m_get_alarm_defination_by_name,
+                                                    m_create_symptom,
+                                                    m_create_alarm_definition,
+                                                    m_get_vm_moref_id,
+                                                    m_get_vm_resource_id,
+                                                    m_create_alarm_notification_rule,
+                                                    m_save_alarm):
+        """Test configure alarm invalid test: for invalid alarm name"""
+
+        # Mock input configuration dictionary
+        config_dict = {'threshold_value': 0, 'severity': 'CRITICAL',
+                       'alarm_name': 'CPU_Utilization_Above_Threshold',
+                       'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                       'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
+                       'statistic': 'AVERAGE', 'metric_name': 'CPU_UTILIZATION',
+                       'operation': 'GT', 'unit': '%',
+                       'description': 'CPU_Utilization_Above_Threshold'}
+
+        alarm_def_uuid = None
+
+        # Mock default Parameters return value to None
+        m_get_default_Params.return_value = {}
+
+        # Call configure_alarm method under test
+        return_value = self.mon_plugin.configure_alarm(config_dict)
+
+        # Verify that mocked methods are called with correct parameters
+        m_get_default_Params.assert_called_with(config_dict['alarm_name'])
+        m_get_alarm_defination_by_name.assert_not_called()
+        m_create_symptom.assert_not_called()
+        m_create_alarm_definition.assert_not_called()
+        m_get_vm_moref_id.assert_not_called()
+        m_get_vm_resource_id.assert_not_called()
+        m_create_alarm_notification_rule.assert_not_called()
+        m_save_alarm.assert_not_called()
+
+        # Verify return value with expected value i.e. None
+        self.assertEqual(return_value, alarm_def_uuid)
+
+    @mock.patch.object(monPlugin.DatabaseManager, 'save_alarm')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_notification_rule')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_definition')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_symptom')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_by_name')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
+    def test_configure_alarm_invalid_metric_name_req(self, m_get_default_Params,
+                                                     m_get_alarm_defination_by_name,
+                                                     m_create_symptom,
+                                                     m_create_alarm_definition,
+                                                     m_get_vm_moref_id,
+                                                     m_get_vm_resource_id,
+                                                     m_create_alarm_notification_rule,
+                                                     m_save_alarm):
+        """Test configure alarm invalid test: for invalid metric name"""
+
+        # Mock input configuration dictionary
+        config_dict = {'threshold_value': 0, 'severity': 'CRITICAL',
+                       'alarm_name': 'CPU_Utilization_Above_Threshold',
+                       'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                       'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
+                       'statistic': 'AVERAGE', 'metric_name': 'CPU_UTILIZATION',
+                       'operation': 'GT', 'unit': '%',
+                       'description': 'CPU_Utilization_Above_Threshold'}
+
+        alarm_def_uuid = None
+
+        # Mock default Parameters return values for metrics to None
+        m_get_default_Params.side_effect = [{'impact': 'risk', 'cancel_cycles': 1,
+                                             'adapter_kind': 'VMWARE', 'repeat': False,
+                                             'cancel_period': 300, 'alarm_type': 16,
+                                             'vrops_alarm': 'CPU_Utilization_Above_Thr',
+                                             'enabled': True, 'period': 300,
+                                             'resource_kind': 'VirtualMachine',
+                                             'alarm_subType': 19, 'action': 'acknowledge',
+                                             'evaluation': 1, 'unit': 'msec'},
+                                            {}
+                                            ]
+
+        # Call configure_alarm method under test
+        return_value = self.mon_plugin.configure_alarm(config_dict)
+
+        # Verify that mocked methods are called with correct parameters
+        self.assertEqual(m_get_default_Params.call_count, 2)
+        m_get_alarm_defination_by_name.assert_not_called()
+        m_create_symptom.assert_not_called()
+        m_create_alarm_definition.assert_not_called()
+        m_get_vm_moref_id.assert_not_called()
+        m_get_vm_resource_id.assert_not_called()
+        m_create_alarm_notification_rule.assert_not_called()
+        m_save_alarm.assert_not_called()
+
+        # Verify return value with expected value i.e. None
+        self.assertEqual(return_value, alarm_def_uuid)
+
+    @mock.patch.object(monPlugin.DatabaseManager, 'save_alarm')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_notification_rule')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_definition')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_symptom')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_by_name')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
+    def test_configure_alarm_invalid_already_exists(self, m_get_default_Params,
+                                                    m_get_alarm_defination_by_name,
+                                                    m_create_symptom,
+                                                    m_create_alarm_definition,
+                                                    m_get_vm_moref_id,
+                                                    m_get_vm_resource_id,
+                                                    m_create_alarm_notification_rule,
+                                                    m_save_alarm):
+        """Test configure alarm invalid test: for alarm that already exists"""
+
+        # Mock input configuration dictionary
+        config_dict = {'threshold_value': 0, 'severity': 'CRITICAL',
+                       'alarm_name': 'CPU_Utilization_Above_Threshold',
+                       'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                       'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
+                       'statistic': 'AVERAGE', 'metric_name': 'CPU_UTILIZATION',
+                       'operation': 'GT', 'unit': '%',
+                       'description': 'CPU_Utilization_Above_Threshold'}
+
+        vrops_alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+        alarm_def_uuid = None
+
+        # Mock default Parameters for alarm & metric configuration
+        m_get_default_Params.side_effect = [{'impact': 'risk', 'cancel_cycles': 1,
+                                             'adapter_kind': 'VMWARE', 'repeat': False,
+                                             'cancel_period': 300, 'alarm_type': 16,
+                                             'vrops_alarm': 'CPU_Utilization_Above_Thr',
+                                             'enabled': True, 'period': 300,
+                                             'resource_kind': 'VirtualMachine',
+                                             'alarm_subType': 19, 'action': 'acknowledge',
+                                             'evaluation': 1, 'unit': 'msec'},
+                                            {'metric_key': 'cpu|usage_average', 'unit': '%'}
+                                            ]
+        # set mocked function return value
+        m_get_alarm_defination_by_name.return_value = ['mocked_alarm_CPU_Utilization_Above_Thr']
+
+        # Call configure_alarm method under test
+        return_value = self.mon_plugin.configure_alarm(config_dict)
+
+        # Verify that mocked methods are called with correct parameters
+        self.assertEqual(m_get_default_Params.call_count, 2)
+        m_get_alarm_defination_by_name.assert_called_with(vrops_alarm_name)
+        m_create_symptom.assert_not_called()
+        m_create_alarm_definition.assert_not_called()
+        m_get_vm_moref_id.assert_not_called()
+        m_get_vm_resource_id.assert_not_called()
+        m_create_alarm_notification_rule.assert_not_called()
+        m_save_alarm.assert_not_called()
+        # Verify return value with expected value of alarm_def_uuid
+        self.assertEqual(return_value, alarm_def_uuid)
+
+    @mock.patch.object(monPlugin.DatabaseManager, 'save_alarm')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_notification_rule')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_definition')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_symptom')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_by_name')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
+    def test_configure_alarm_failed_symptom_creation(self, m_get_default_Params,
+                                                     m_get_alarm_defination_by_name,
+                                                     m_create_symptom,
+                                                     m_create_alarm_definition,
+                                                     m_get_vm_moref_id,
+                                                     m_get_vm_resource_id,
+                                                     m_create_alarm_notification_rule,
+                                                     m_save_alarm):
+        """Test configure alarm: failed to create symptom"""
+
+        # Mock input configuration dictionary
+        config_dict = {'threshold_value': 0, 'severity': 'CRITICAL',
+                       'alarm_name': 'CPU_Utilization_Above_Threshold',
+                       'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                       'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
+                       'statistic': 'AVERAGE', 'metric_name': 'CPU_UTILIZATION',
+                       'operation': 'GT', 'unit': '%',
+                       'description': 'CPU_Utilization_Above_Threshold'}
+
+        # symptom parameters to be passed for symptom creation
+        symptom_params = {'threshold_value': 0,
+                          'cancel_cycles': 1,
+                          'adapter_kind_key': 'VMWARE',
+                          'resource_kind_key': 'VirtualMachine',
+                          'severity': 'CRITICAL',
+                          'symptom_name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                          'operation': 'GT',
+                          'wait_cycles': 1,
+                          'metric_key': 'cpu|usage_average'}
+        vrops_alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+        alarm_def_uuid = None
+
+        # Mock default Parameters for alarm & metric configuration
+        m_get_default_Params.side_effect = [{'impact': 'risk', 'cancel_cycles': 1,
+                                             'adapter_kind': 'VMWARE', 'repeat': False,
+                                             'cancel_period': 300, 'alarm_type': 16,
+                                             'vrops_alarm': 'CPU_Utilization_Above_Thr',
+                                             'enabled': True, 'period': 300,
+                                             'resource_kind': 'VirtualMachine',
+                                             'alarm_subType': 19, 'action': 'acknowledge',
+                                             'evaluation': 1, 'unit': 'msec'},
+                                            {'metric_key': 'cpu|usage_average', 'unit': '%'}
+                                            ]
+        # set mocked function return values
+        m_get_alarm_defination_by_name.return_value = []
+        m_create_symptom.return_value = None
+
+        # Call configure_alarm method under test
+        return_value = self.mon_plugin.configure_alarm(config_dict)
+
+        # Verify that mocked methods are called with correct parameters
+        self.assertEqual(m_get_default_Params.call_count, 2)
+        m_get_alarm_defination_by_name.assert_called_with(vrops_alarm_name)
+        m_create_symptom.assert_called_with(symptom_params)
+        m_create_alarm_definition.assert_not_called()
+        m_get_vm_moref_id.assert_not_called()
+        m_get_vm_resource_id.assert_not_called()
+        m_create_alarm_notification_rule.assert_not_called()
+        m_save_alarm.assert_not_called()
+
+        # Verify return value with expected value of alarm_def_uuid
+        self.assertEqual(return_value, alarm_def_uuid)
+
+    @mock.patch.object(monPlugin.DatabaseManager, 'save_alarm')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_notification_rule')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_definition')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_symptom')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_by_name')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
+    def test_configure_alarm_failed_alert_creation(self, m_get_default_Params,
+                                                   m_get_alarm_defination_by_name,
+                                                   m_create_symptom,
+                                                   m_create_alarm_definition,
+                                                   m_get_vm_moref_id,
+                                                   m_get_vm_resource_id,
+                                                   m_create_alarm_notification_rule,
+                                                   m_save_alarm):
+        """Test configure alarm: failed to create alert in vROPs"""
+
+        # Mock input configuration dictionary
+        config_dict = {'threshold_value': 0, 'severity': 'CRITICAL',
+                       'alarm_name': 'CPU_Utilization_Above_Threshold',
+                       'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                       'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
+                       'statistic': 'AVERAGE', 'metric_name': 'CPU_UTILIZATION',
+                       'operation': 'GT', 'unit': '%',
+                       'description': 'CPU_Utilization_Above_Threshold'}
+
+        # symptom parameters to be passed for symptom creation
+        symptom_params = {'threshold_value': 0,
+                          'cancel_cycles': 1,
+                          'adapter_kind_key': 'VMWARE',
+                          'resource_kind_key': 'VirtualMachine',
+                          'severity': 'CRITICAL',
+                          'symptom_name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                          'operation': 'GT',
+                          'wait_cycles': 1,
+                          'metric_key': 'cpu|usage_average'}
+
+        # alarm parameters to  be passed for alarm creation
+        alarm_params = {'description': 'CPU_Utilization_Above_Threshold',
+                        'cancelCycles': 1, 'subType': 19,
+                        'waitCycles': 1, 'severity': 'CRITICAL',
+                        'impact': 'risk', 'adapterKindKey': 'VMWARE',
+                        'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                        'resourceKindKey': 'VirtualMachine',
+                        'symptomDefinitionId': 'SymptomDefinition-2e8f9ddc-9f7b-4cd6-b85d-7d7fe3a8a804',
+                        'type': 16}
+
+        vrops_alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+        alarm_def_uuid = None
+
+        # Mock default Parameters for alarm & metric configuration
+        m_get_default_Params.side_effect = [{'impact': 'risk', 'cancel_cycles': 1,
+                                             'adapter_kind': 'VMWARE', 'repeat': False,
+                                             'cancel_period': 300, 'alarm_type': 16,
+                                             'vrops_alarm': 'CPU_Utilization_Above_Thr',
+                                             'enabled': True, 'period': 300,
+                                             'resource_kind': 'VirtualMachine',
+                                             'alarm_subType': 19, 'action': 'acknowledge',
+                                             'evaluation': 1, 'unit': 'msec'},
+                                            {'metric_key': 'cpu|usage_average', 'unit': '%'}
+                                            ]
+        # set mocked function return values
+        m_get_alarm_defination_by_name.return_value = []
+        m_create_symptom.return_value = 'SymptomDefinition-2e8f9ddc-9f7b-4cd6-b85d-7d7fe3a8a804'
+        m_create_alarm_definition.return_value = None
+
+        # Call configure_alarm method under test
+        return_value = self.mon_plugin.configure_alarm(config_dict)
+
+        # Verify that mocked methods are called with correct parameters
+        self.assertEqual(m_get_default_Params.call_count, 2)
+        m_get_alarm_defination_by_name.assert_called_with(vrops_alarm_name)
+        m_create_symptom.assert_called_with(symptom_params)
+        m_create_alarm_definition.assert_called_with(alarm_params)
+        m_get_vm_moref_id.assert_not_called()
+        m_get_vm_resource_id.assert_not_called()
+        m_create_alarm_notification_rule.assert_not_called()
+        m_save_alarm.assert_not_called()
+
+        # Verify return value with expected value of alarm_def_uuid
+        self.assertEqual(return_value, alarm_def_uuid)
+
+    @mock.patch.object(monPlugin.DatabaseManager, 'save_alarm')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_notification_rule')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_definition')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_symptom')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_by_name')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
+    def test_configure_alarm_failed_to_get_vm_moref_id(self, m_get_default_Params,
+                                                       m_get_alarm_defination_by_name,
+                                                       m_create_symptom,
+                                                       m_create_alarm_definition,
+                                                       m_get_vm_moref_id,
+                                                       m_get_vm_resource_id,
+                                                       m_create_alarm_notification_rule,
+                                                       m_save_alarm):
+        """Test configure alarm: failed to get vm_moref_id"""
+
+        # Mock input configuration dictionary
+        config_dict = {'threshold_value': 0, 'severity': 'CRITICAL',
+                       'alarm_name': 'CPU_Utilization_Above_Threshold',
+                       'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                       'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
+                       'statistic': 'AVERAGE', 'metric_name': 'CPU_UTILIZATION',
+                       'operation': 'GT', 'unit': '%',
+                       'description': 'CPU_Utilization_Above_Threshold'}
+
+        # symptom parameters to be passed for symptom creation
+        symptom_params = {'threshold_value': 0,
+                          'cancel_cycles': 1,
+                          'adapter_kind_key': 'VMWARE',
+                          'resource_kind_key': 'VirtualMachine',
+                          'severity': 'CRITICAL',
+                          'symptom_name': \
+                              'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                          'operation': 'GT',
+                          'wait_cycles': 1,
+                          'metric_key': 'cpu|usage_average'}
+
+        # alarm parameters to  be passed for alarm creation
+        alarm_params = {'description': 'CPU_Utilization_Above_Threshold',
+                        'cancelCycles': 1, 'subType': 19,
+                        'waitCycles': 1, 'severity': 'CRITICAL',
+                        'impact': 'risk', 'adapterKindKey': 'VMWARE',
+                        'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                        'resourceKindKey': 'VirtualMachine',
+                        'symptomDefinitionId': \
+                            'SymptomDefinition-2e8f9ddc-9f7b-4cd6-b85d-7d7fe3a8a804',
+                        'type': 16}
+
+        vrops_alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+        alarm_def_uuid = None
+
+        # Mock default Parameters for alarm & metric configuration
+        m_get_default_Params.side_effect = [{'impact': 'risk', 'cancel_cycles': 1,
+                                             'adapter_kind': 'VMWARE', 'repeat': False,
+                                             'cancel_period': 300, 'alarm_type': 16,
+                                             'vrops_alarm': 'CPU_Utilization_Above_Thr',
+                                             'enabled': True, 'period': 300,
+                                             'resource_kind': 'VirtualMachine',
+                                             'alarm_subType': 19, 'action': 'acknowledge',
+                                             'evaluation': 1, 'unit': 'msec'},
+                                            {'metric_key': 'cpu|usage_average', 'unit': '%'}
+                                            ]
+        # set mocked function return values
+        m_get_alarm_defination_by_name.return_value = []
+        m_create_symptom.return_value = 'SymptomDefinition-2e8f9ddc-9f7b-4cd6-b85d-7d7fe3a8a804'
+        m_create_alarm_definition.return_value = \
+            'AlertDefinition-0f3cdcb3-4e1b-4a0b-86d0-66d4b3f65220'
+        m_get_vm_moref_id.return_value = None
+
+        # Call configure_alarm method under test
+        return_value = self.mon_plugin.configure_alarm(config_dict)
+
+        # Verify that mocked methods are called with correct parameters
+        self.assertEqual(m_get_default_Params.call_count, 2)
+        m_get_alarm_defination_by_name.assert_called_with(vrops_alarm_name)
+        m_create_symptom.assert_called_with(symptom_params)
+        m_create_alarm_definition.assert_called_with(alarm_params)
+        m_get_vm_moref_id.assert_called_with(config_dict['resource_uuid'])
+        m_get_vm_resource_id.assert_not_called()
+        m_create_alarm_notification_rule.assert_not_called()
+        m_save_alarm.assert_not_called()
+
+        # Verify return value with expected value of alarm_def_uuid
+        self.assertEqual(return_value, alarm_def_uuid)
+
+    @mock.patch.object(monPlugin.DatabaseManager, 'save_alarm')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_notification_rule')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_definition')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_symptom')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_by_name')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
+    def test_configure_alarm_failed_to_get_vm_resource_id(self, m_get_default_Params,
+                                                          m_get_alarm_defination_by_name,
+                                                          m_create_symptom,
+                                                          m_create_alarm_definition,
+                                                          m_get_vm_moref_id,
+                                                          m_get_vm_resource_id,
+                                                          m_create_alarm_notification_rule,
+                                                          m_save_alarm):
+        """Test configure alarm: failed to get vm resource_id"""
+
+        # Mock input configuration dictionary
+        config_dict = {'threshold_value': 0, 'severity': 'CRITICAL',
+                       'alarm_name': 'CPU_Utilization_Above_Threshold',
+                       'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                       'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
+                       'statistic': 'AVERAGE', 'metric_name': 'CPU_UTILIZATION',
+                       'operation': 'GT', 'unit': '%',
+                       'description': 'CPU_Utilization_Above_Threshold'}
+
+        # symptom parameters to be passed for symptom creation
+        symptom_params = {'threshold_value': 0,
+                          'cancel_cycles': 1,
+                          'adapter_kind_key': 'VMWARE',
+                          'resource_kind_key': 'VirtualMachine',
+                          'severity': 'CRITICAL',
+                          'symptom_name': \
+                              'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                          'operation': 'GT',
+                          'wait_cycles': 1,
+                          'metric_key': 'cpu|usage_average'}
+
+        # alarm parameters to  be passed for alarm creation
+        alarm_params = {'description': 'CPU_Utilization_Above_Threshold',
+                        'cancelCycles': 1, 'subType': 19,
+                        'waitCycles': 1, 'severity': 'CRITICAL',
+                        'impact': 'risk', 'adapterKindKey': 'VMWARE',
+                        'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                        'resourceKindKey': 'VirtualMachine',
+                        'symptomDefinitionId': \
+                            'SymptomDefinition-2e8f9ddc-9f7b-4cd6-b85d-7d7fe3a8a804',
+                        'type': 16}
+
+        vrops_alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+        vm_moref_id = 'vm-6626'
+        alarm_def_uuid = None
+
+        # Mock default Parameters for alarm & metric configuration
+        m_get_default_Params.side_effect = [{'impact': 'risk', 'cancel_cycles': 1,
+                                             'adapter_kind': 'VMWARE', 'repeat': False,
+                                             'cancel_period': 300, 'alarm_type': 16,
+                                             'vrops_alarm': 'CPU_Utilization_Above_Thr',
+                                             'enabled': True, 'period': 300,
+                                             'resource_kind': 'VirtualMachine',
+                                             'alarm_subType': 19, 'action': 'acknowledge',
+                                             'evaluation': 1, 'unit': 'msec'},
+                                            {'metric_key': 'cpu|usage_average', 'unit': '%'}
+                                            ]
+        # set mocked function return values
+        m_get_alarm_defination_by_name.return_value = []
+        m_create_symptom.return_value = 'SymptomDefinition-2e8f9ddc-9f7b-4cd6-b85d-7d7fe3a8a804'
+        m_create_alarm_definition.return_value = \
+            'AlertDefinition-0f3cdcb3-4e1b-4a0b-86d0-66d4b3f65220'
+        m_get_vm_moref_id.return_value = vm_moref_id
+        m_get_vm_resource_id.return_value = None
+        m_save_alarm.assert_not_called()
+
+        # Call configure_alarm method under test
+        return_value = self.mon_plugin.configure_alarm(config_dict)
+
+        # Verify that mocked methods are called with correct parameters
+        self.assertEqual(m_get_default_Params.call_count, 2)
+        m_get_alarm_defination_by_name.assert_called_with(vrops_alarm_name)
+        m_create_symptom.assert_called_with(symptom_params)
+        m_create_alarm_definition.assert_called_with(alarm_params)
+        m_get_vm_moref_id.assert_called_with(config_dict['resource_uuid'])
+        m_get_vm_resource_id.assert_called_with(vm_moref_id)
+        m_create_alarm_notification_rule.assert_not_called()
+
+        # Verify return value with expected value of alarm_def_uuid
+        self.assertEqual(return_value, alarm_def_uuid)
+
+    @mock.patch.object(monPlugin.DatabaseManager, 'save_alarm')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_notification_rule')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_alarm_definition')
+    @mock.patch.object(monPlugin.MonPlugin, 'create_symptom')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_by_name')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
+    def test_configure_alarm_failed_to_create_alarm_notification_rule(self, m_get_default_Params,
+                                                                      m_get_alarm_defination_by_name,
+                                                                      m_create_symptom,
+                                                                      m_create_alarm_definition,
+                                                                      m_get_vm_moref_id,
+                                                                      m_get_vm_resource_id,
+                                                                      m_create_alarm_notification_rule,
+                                                                      m_save_alarm):
+        """Test configure alarm: failed to create alarm notification rule"""
+
+        # Mock input configuration dictionary
+        config_dict = {'threshold_value': 0, 'severity': 'CRITICAL',
+                       'alarm_name': 'CPU_Utilization_Above_Threshold',
+                       'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                       'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
+                       'statistic': 'AVERAGE', 'metric_name': 'CPU_UTILIZATION',
+                       'operation': 'GT', 'unit': '%',
+                       'description': 'CPU_Utilization_Above_Threshold'}
+
+        # symptom parameters to be passed for symptom creation
+        symptom_params = {'threshold_value': 0,
+                          'cancel_cycles': 1,
+                          'adapter_kind_key': 'VMWARE',
+                          'resource_kind_key': 'VirtualMachine',
+                          'severity': 'CRITICAL',
+                          'symptom_name': \
+                              'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                          'operation': 'GT',
+                          'wait_cycles': 1,
+                          'metric_key': 'cpu|usage_average'}
+
+        # alarm parameters to  be passed for alarm creation
+        alarm_params = {'description': 'CPU_Utilization_Above_Threshold',
+                        'cancelCycles': 1, 'subType': 19,
+                        'waitCycles': 1, 'severity': 'CRITICAL',
+                        'impact': 'risk', 'adapterKindKey': 'VMWARE',
+                        'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                        'resourceKindKey': 'VirtualMachine',
+                        'symptomDefinitionId': \
+                            'SymptomDefinition-2e8f9ddc-9f7b-4cd6-b85d-7d7fe3a8a804',
+                        'type': 16}
+
+        vrops_alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+        vm_moref_id = 'vm-6626'
+        alarm_def = 'AlertDefinition-0f3cdcb3-4e1b-4a0b-86d0-66d4b3f65220'
+        resource_id = 'ac87622f-b761-40a0-b151-00872a2a456e'
+        alarm_def_uuid = None
+
+        # Mock default Parameters for alarm & metric configuration
+        m_get_default_Params.side_effect = [{'impact': 'risk', 'cancel_cycles': 1,
+                                             'adapter_kind': 'VMWARE', 'repeat': False,
+                                             'cancel_period': 300, 'alarm_type': 16,
+                                             'vrops_alarm': 'CPU_Utilization_Above_Thr',
+                                             'enabled': True, 'period': 300,
+                                             'resource_kind': 'VirtualMachine',
+                                             'alarm_subType': 19, 'action': 'acknowledge',
+                                             'evaluation': 1, 'unit': 'msec'},
+                                            {'metric_key': 'cpu|usage_average', 'unit': '%'}
+                                            ]
+        # set mocked function return values
+        m_get_alarm_defination_by_name.return_value = []
+        m_create_symptom.return_value = 'SymptomDefinition-2e8f9ddc-9f7b-4cd6-b85d-7d7fe3a8a804'
+        m_create_alarm_definition.return_value = \
+            'AlertDefinition-0f3cdcb3-4e1b-4a0b-86d0-66d4b3f65220'
+        m_get_vm_moref_id.return_value = vm_moref_id
+        m_get_vm_resource_id.return_value = 'ac87622f-b761-40a0-b151-00872a2a456e'
+        m_create_alarm_notification_rule.return_value = None
+
+        # Call configure_alarm method under test
+        return_value = self.mon_plugin.configure_alarm(config_dict)
+
+        # Verify that mocked methods are called with correct parameters
+        self.assertEqual(m_get_default_Params.call_count, 2)
+        m_get_alarm_defination_by_name.assert_called_with(vrops_alarm_name)
+        m_create_symptom.assert_called_with(symptom_params)
+        m_create_alarm_definition.assert_called_with(alarm_params)
+        m_get_vm_moref_id.assert_called_with(config_dict['resource_uuid'])
+        m_get_vm_resource_id.assert_called_with(vm_moref_id)
+        m_create_alarm_notification_rule.assert_called_with(vrops_alarm_name, alarm_def, resource_id)
+        m_save_alarm.assert_not_called()
+
+        # Verify return value with expected value of alarm_def_uuid
+        self.assertEqual(return_value, alarm_def_uuid)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    def test_get_alarm_defination_details_valid_rest_req_response(self, m_get):
+        """Test get_alarm_defination_details: For a valid REST request response"""
+
+        alarm_uuid = '9a6d8a14-9f25-4d81-bf91-4d773497444d'
+
+        # Set mocked function's return values
+        m_get.return_value.status_code = 200
+        m_get.return_value.content = '{"id":"AlertDefinition-9a6d8a14-9f25-4d81-bf91-4d773497444d",\
+                            "name":"CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4",\
+                            "description":"CPU_Utilization_Above_Threshold",\
+                            "adapterKindKey":"VMWARE","resourceKindKey":"VirtualMachine",\
+                            "waitCycles":1,"cancelCycles":1,"type":16,"subType":19,\
+                            "states":[{"severity":"CRITICAL","base-symptom-set":\
+                            {"type":"SYMPTOM_SET","relation":"SELF",\
+                            "aggregation":"ALL","symptomSetOperator":"AND","symptomDefinitionIds":\
+                            ["SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278"]},\
+                            "impact":{"impactType":"BADGE","detail":"risk"}}]}'
+
+        expected_alarm_details = {'adapter_kind': 'VMWARE', 'symptom_definition_id': \
+            'SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278',
+                                  'alarm_name': \
+                                      'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                                  'alarm_id': 'AlertDefinition-9a6d8a14-9f25-4d81-bf91-4d773497444d',
+                                  'resource_kind': 'VirtualMachine', 'type': 16, 'sub_type': 19}
+
+        expected_alarm_details_json = {'states':
+                                           [{'impact':
+                                                 {'impactType': 'BADGE', 'detail': 'risk'}, 'severity': 'CRITICAL',
+                                             'base-symptom-set': {'symptomDefinitionIds': \
+                                                 [
+                                                     'SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278'],
+                                                 'relation': 'SELF', 'type': 'SYMPTOM_SET',
+                                                 'aggregation': 'ALL', 'symptomSetOperator': 'AND'}}],
+                                       'adapterKindKey': 'VMWARE',
+                                       'description': 'CPU_Utilization_Above_Threshold',
+                                       'type': 16, 'cancelCycles': 1,
+                                       'resourceKindKey': 'VirtualMachine',
+                                       'subType': 19, 'waitCycles': 1,
+                                       'id': 'AlertDefinition-9a6d8a14-9f25-4d81-bf91-4d773497444d',
+                                       'name': \
+                                           'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'}
+
+        # Call get_alarm_defination_details method under test
+        alarm_details_json, alarm_details = self.mon_plugin.get_alarm_defination_details(alarm_uuid)
+
+        # Verify that mocked method is called
+        m_get.assert_called()
+
+        # Verify return value with expected value
+        self.assertEqual(expected_alarm_details, alarm_details)
+        self.assertEqual(expected_alarm_details_json, alarm_details_json)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    def test_get_alarm_defination_details_invalid_rest_req_response(self, m_get):
+        """Test get_alarm_defination_details: For an invalid REST request response"""
+
+        alarm_uuid = '9a6d8a14-9f25-4d81-bf91-4d773497444d'
+
+        # Set mocked function's return values
+        m_get.return_value.status_code = 404
+        m_get.return_value.content = '{"message": "No such AlertDefinition - \
+                                        AlertDefinition-9a6d8a14-9f25-4d81-bf91-4d773497444.",\
+                                        "httpStatusCode": 404,"apiErrorCode": 404}'
+
+        expected_alarm_details = None
+        expected_alarm_details_json = None
+
+        # Call get_alarm_defination_details method under test
+        alarm_details_json, alarm_details = self.mon_plugin.get_alarm_defination_details(alarm_uuid)
+
+        # verify that mocked method is called
+        m_get.assert_called()
+
+        # Verify return value with expected value
+        self.assertEqual(expected_alarm_details, alarm_details)
+        self.assertEqual(expected_alarm_details_json, alarm_details_json)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    def test_get_alarm_defination_by_name_valid_rest_req_response(self, m_get):
+        """Test get_alarm_defination_by_name: For a valid REST request response"""
+
+        alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+
+        # Set mocked function's return values
+        m_get.return_value.status_code = 200
+        m_get.return_value.content = '{"pageInfo": {"totalCount": 1,"page": 0,"pageSize": 1000},\
+                                    "links": [\
+                                        {"href": "/suite-api/api/alertdefinitions?page=0&amp;pageSize=1000",\
+                                        "rel": "SELF","name": "current"},\
+                                        {"href": "/suite-api/api/alertdefinitions?page=0&amp;pageSize=1000",\
+                                         "rel": "RELATED","name": "first"},\
+                                        {"href": "/suite-api/api/alertdefinitions?page=0&amp;pageSize=1000",\
+                                         "rel": "RELATED","name": "last"}],\
+                                    "alertDefinitions": [{\
+                                        "id": "AlertDefinition-9a6d8a14-9f25-4d81-bf91-4d773497444d",\
+                                        "name": "CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4",\
+                                        "description": "CPU_Utilization_Above_Threshold",\
+                                        "adapterKindKey": "VMWARE","resourceKindKey": "VirtualMachine",\
+                                        "waitCycles": 1,"cancelCycles": 1,"type": 16,"subType": 19,\
+                                        "states": [{"impact": {"impactType": "BADGE","detail": "risk"},\
+                                            "severity": "CRITICAL",\
+                                            "base-symptom-set": {"type": "SYMPTOM_SET",\
+                                            "relation": "SELF","aggregation": "ALL",\
+                                            "symptomSetOperator": "AND",\
+                                            "symptomDefinitionIds": [\
+                                            "SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278"]}}]\
+                                        }]}'
+
+        # Expected return match list
+        Exp_alert_match_list = [{'states':
+                                     [{'impact': {'impactType': 'BADGE', 'detail': 'risk'},
+                                       'severity': 'CRITICAL',
+                                       'base-symptom-set': {
+                                           'symptomDefinitionIds': \
+                                               ['SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278'],
+                                           'relation': 'SELF',
+                                           'type': 'SYMPTOM_SET',
+                                           'aggregation': 'ALL',
+                                           'symptomSetOperator': 'AND'}
+                                       }],
+                                 'adapterKindKey': 'VMWARE',
+                                 'description': 'CPU_Utilization_Above_Threshold',
+                                 'type': 16,
+                                 'cancelCycles': 1,
+                                 'resourceKindKey': 'VirtualMachine',
+                                 'subType': 19, 'waitCycles': 1,
+                                 'id': 'AlertDefinition-9a6d8a14-9f25-4d81-bf91-4d773497444d',
+                                 'name': \
+                                     'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+                                 }]
+
+        # Call get_alarm_defination_by_name method under test
+        alert_match_list = self.mon_plugin.get_alarm_defination_by_name(alarm_name)
+
+        # Verify that mocked method is called
+        m_get.assert_called()
+
+        # Verify return value with expected value
+        self.assertEqual(Exp_alert_match_list, alert_match_list)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    def test_get_alarm_defination_by_name_no_valid_alarm_found(self, m_get):
+        """Test get_alarm_defination_by_name: With no valid alarm found in returned list"""
+
+        alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda5'
+
+        # Set mocked function's return values
+        m_get.return_value.status_code = 200
+        m_get.return_value.content = '{"pageInfo": {"totalCount": 1,"page": 0,"pageSize": 1000},\
+                                    "links": [\
+                                        {"href": "/suite-api/api/alertdefinitions?page=0&amp;pageSize=1000",\
+                                        "rel": "SELF","name": "current"},\
+                                        {"href": "/suite-api/api/alertdefinitions?page=0&amp;pageSize=1000",\
+                                         "rel": "RELATED","name": "first"},\
+                                        {"href": "/suite-api/api/alertdefinitions?page=0&amp;pageSize=1000",\
+                                         "rel": "RELATED","name": "last"}],\
+                                    "alertDefinitions": [{\
+                                        "id": "AlertDefinition-9a6d8a14-9f25-4d81-bf91-4d773497444d",\
+                                        "name": "CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4",\
+                                        "description": "CPU_Utilization_Above_Threshold",\
+                                        "adapterKindKey": "VMWARE","resourceKindKey": "VirtualMachine",\
+                                        "waitCycles": 1,"cancelCycles": 1,"type": 16,"subType": 19,\
+                                        "states": [{"impact": {"impactType": "BADGE","detail": "risk"},\
+                                            "severity": "CRITICAL",\
+                                            "base-symptom-set": {"type": "SYMPTOM_SET",\
+                                            "relation": "SELF","aggregation": "ALL",\
+                                            "symptomSetOperator": "AND",\
+                                            "symptomDefinitionIds": [\
+                                            "SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278"]}}]\
+                                        }]}'
+
+        # Expected return match list
+        Exp_alert_match_list = []
+
+        # Call get_alarm_defination_by_name method under test
+        alert_match_list = self.mon_plugin.get_alarm_defination_by_name(alarm_name)
+
+        # Verify that mocked method is called
+        m_get.assert_called()
+
+        # Verify return value with expected value
+        self.assertEqual(Exp_alert_match_list, alert_match_list)
+
+    @mock.patch.object(monPlugin.requests, 'put')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_symptom_defination_details')
+    def test_update_symptom_defination_valid_symptom_req_response(self,
+                                                                  m_get_symptom_defination_details,
+                                                                  m_put):
+        """Test update_symptom_defination: With valid REST response, update symptom"""
+
+        # Expected symptom to be updated
+        symptom_defination_id = 'SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278'
+        new_alarm_config = {'severity': "CRITICAL",
+                            'operation': 'GT',
+                            'threshold_value': 5,
+                            'alarm_uuid': '9a6d8a14-9f25-4d81-bf91-4d773497444d'
+                            }
+
+        # Set mocked function's return values
+        m_get_symptom_defination_details.return_value = {
+            "id": "SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278",
+            "name": "CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4",
+            "adapterKindKey": "VMWARE",
+            "resourceKindKey": "VirtualMachine",
+            "waitCycles": 1,
+            "cancelCycles": 1,
+            "state": {"severity": "CRITICAL",
+                      "condition": {
+                          "type": "CONDITION_HT",
+                          "key": "cpu|usage_average", "operator": "GT", "value": "0.0",
+                          "valueType": "NUMERIC", "instanced": False,
+                          "thresholdType": "STATIC"}
+                      }
+        }
+
+        m_put.return_value.status_code = 200
+        m_put.return_value.content = '{\
+            "id":"SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278",\
+            "name":"CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4",\
+            "adapterKindKey":"VMWARE","resourceKindKey":"VirtualMachine","waitCycles":1,\
+            "cancelCycles":1,\
+            "state":{\
+                "severity":"CRITICAL",\
+                "condition":{\
+                    "type":"CONDITION_HT","key":"cpu|usage_average","operator":"GT","value":"5.0",\
+                    "valueType":"NUMERIC","instanced":False,"thresholdType":"STATIC"}}}'
+
+        # Call update_symptom_defination method under test
+        symptom_uuid = self.mon_plugin.update_symptom_defination(symptom_defination_id,
+                                                                 new_alarm_config)
+
+        # Verify that mocked method is called with required parameters
+        m_get_symptom_defination_details.assert_called_with(symptom_defination_id)
+        # m_put.assert_called_with(symptom_defination_id,new_alarm_config)
+
+        # Verify return value with expected value
+        self.assertEqual(symptom_defination_id, symptom_uuid)
+
+    @mock.patch.object(monPlugin.requests, 'put')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_symptom_defination_details')
+    def test_update_symptom_defination_invalid_symptom_req_response(self,
+                                                                    m_get_symptom_defination_details,
+                                                                    m_put):
+        """Test update_symptom_defination: If invalid REST response received, return None"""
+
+        # Expected symptom to be updated
+        symptom_defination_id = 'SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278'
+        new_alarm_config = {'severity': "CRITICAL",
+                            'operation': 'GT',
+                            'threshold_value': 5,
+                            'alarm_uuid': '9a6d8a14-9f25-4d81-bf91-4d773497444d'
+                            }
+
+        # Set mocked function's return values
+        m_get_symptom_defination_details.return_value = {
+            "id": "SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278",
+            "name": "CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4",
+            "adapterKindKey": "VMWARE",
+            "resourceKindKey": "VirtualMachine",
+            "waitCycles": 1,
+            "cancelCycles": 1,
+            "state": {"severity": "CRITICAL",
+                      "condition": {
+                          "type": "CONDITION_HT",
+                          "key": "cpu|usage_average", "operator": "GT", "value": "0.0",
+                          "valueType": "NUMERIC", "instanced": False,
+                          "thresholdType": "STATIC"}
+                      }
+        }
+
+        m_put.return_value.status_code = 500
+        m_put.return_value.content = '{\
+            "message": "Internal Server error, cause unknown.",\
+            "moreInformation": [\
+                {"name": "errorMessage",\
+                 "value": "Symptom Definition CPU_Utilization_Above_Thr-e14b203c-\
+                 6bf2-4e2f-a91c-8c19d240eda4 does not exist and hence cannot be updated."},\
+                {"name": "localizedMessage",\
+                 "value": "Symptom Definition CPU_Utilization_Above_Thr-e14b203c-\
+                 6bf2-4e2f-a91c-8c19d240eda4 does not exist and hence cannot be updated.;"}],\
+            "httpStatusCode": 500,"apiErrorCode": 500}'
+
+        # Call update_symptom_defination method under test
+        symptom_uuid = self.mon_plugin.update_symptom_defination(symptom_defination_id,
+                                                                 new_alarm_config)
+
+        # Verify that mocked method is called with required parameters
+        m_get_symptom_defination_details.assert_called_with(symptom_defination_id)
+        m_put.assert_called()
+
+        # Verify return value with expected value
+        self.assertEqual(symptom_uuid, None)
+
+    @mock.patch.object(monPlugin.requests, 'put')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_symptom_defination_details')
+    def test_update_symptom_defination_failed_to_get_symptom_defination(self,
+                                                                        m_get_symptom_defination_details,
+                                                                        m_put):
+        """Test update_symptom_defination: if fails to get symptom_defination returns None"""
+
+        # Expected symptom to be updated
+        symptom_defination_id = 'SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278'
+        new_alarm_config = {'severity': "CRITICAL",
+                            'operation': 'GT',
+                            'threshold_value': 5,
+                            'alarm_uuid': '9a6d8a14-9f25-4d81-bf91-4d773497444d'
+                            }
+
+        # Set mocked function's return values
+        m_get_symptom_defination_details.return_value = None
+
+        # Call update_symptom_defination method under test
+        symptom_uuid = self.mon_plugin.update_symptom_defination(symptom_defination_id,
+                                                                 new_alarm_config)
+
+        # Verify that mocked method is called with required parameters
+        m_get_symptom_defination_details.assert_called_with(symptom_defination_id)
+        m_put.assert_not_called()
+
+        # Verify return value with expected value
+        self.assertEqual(symptom_uuid, None)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    def test_get_symptom_defination_details_valid_req_response(self, m_get):
+        """Test update_symptom_defination: With valid REST response symptom is created"""
+
+        # Expected symptom to be updated
+        symptom_uuid = 'SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278'
+
+        # Set mocked function's return values
+        m_get.return_value.status_code = 200
+        m_get.return_value.content = '{\
+            "id": "SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278",\
+            "name": "CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4",\
+            "adapterKindKey": "VMWARE","resourceKindKey": "VirtualMachine","waitCycles": 1,\
+            "cancelCycles": 1,"state": {"severity": "CRITICAL","condition": {"type": "CONDITION_HT",\
+            "key": "cpu|usage_average","operator": "GT","value": "6.0","valueType": "NUMERIC",\
+            "instanced": false,"thresholdType": "STATIC"}}}'
+        expected_symptom_details = { \
+            "id": "SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278",
+            "name": "CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4",
+            "adapterKindKey": "VMWARE", "resourceKindKey": "VirtualMachine", "waitCycles": 1,
+            "cancelCycles": 1, "state": {"severity": "CRITICAL", "condition": {"type": "CONDITION_HT",
+                                                                               "key": "cpu|usage_average",
+                                                                               "operator": "GT", "value": "6.0",
+                                                                               "valueType": "NUMERIC",
+                                                                               "instanced": False,
+                                                                               "thresholdType": "STATIC"}}}
+
+        # Call update_symptom_defination method under test
+        symptom_details = self.mon_plugin.get_symptom_defination_details(symptom_uuid)
+
+        # Verify that mocked method is called with required parameters
+        m_get.assert_called()
+
+        # Verify return value with expected value
+        self.assertEqual(expected_symptom_details, symptom_details)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    def test_get_symptom_defination_details_invalid_req_response(self, m_get):
+        """Test update_symptom_defination: if invalid REST response received return None"""
+
+        # Expected symptom to be updated
+        symptom_uuid = 'SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278'
+
+        # Set mocked function's return values
+        m_get.return_value.status_code = 404
+        m_get.return_value.content = '{"message": "No such SymptomDefinition\
+        - SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278.",\
+        "httpStatusCode": 404,"apiErrorCode": 404}'
+
+        expected_symptom_details = None
+
+        # Call update_symptom_defination method under test
+        symptom_details = self.mon_plugin.get_symptom_defination_details(symptom_uuid)
+
+        # Verify that mocked method is called with required parameters
+        m_get.assert_called()
+
+        # Verify return value with expected value
+        self.assertEqual(expected_symptom_details, symptom_details)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    def test_get_symptom_defination_details_symptom_uuid_not_provided(self, m_get):
+        """Test update_symptom_defination: if required symptom uuid is not provided"""
+
+        # Expected symptom to be updated
+        symptom_uuid = None
+        expected_symptom_details = None
+
+        # Call update_symptom_defination method under test
+        symptom_details = self.mon_plugin.get_symptom_defination_details(symptom_uuid)
+
+        # Verify that mocked method is called with required parameters
+        m_get.assert_not_called()
+
+        # Verify return value with expected value
+        self.assertEqual(expected_symptom_details, symptom_details)
+
+    @mock.patch.object(monPlugin.requests, 'put')
+    def test_reconfigure_alarm_valid_req_response(self, m_put):
+        """Test reconfigure_alarm: for valid REST response"""
+
+        # Set input parameters to reconfigure_alarm
+        alarm_details_json = {
+            'id': 'AlertDefinition-9a6d8a14-9f25-4d81-bf91-4d773497444d',
+            'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+            'description': 'CPU_Utilization_Above_Threshold', 'adapterKindKey': 'VMWARE',
+            'states': [{'impact': {'impactType': 'BADGE', 'detail': 'risk'}, 'severity': 'CRITICAL',
+                        'base-symptom-set': {
+                            'symptomDefinitionIds': ['SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278'],
+                            'relation': 'SELF', 'type': 'SYMPTOM_SET', 'aggregation': 'ALL',
+                            'symptomSetOperator': 'AND'}}],
+            'type': 16, 'cancelCycles': 1, 'resourceKindKey': 'VirtualMachine', 'subType': 19,
+            'waitCycles': 1}
+
+        new_alarm_config = {'severity': 'WARNING',
+                            'description': 'CPU_Utilization_Above_Threshold_Warning'}
+
+        # Set mocked function's return values
+        m_put.return_value.status_code = 200
+        m_put.return_value.content = '{"id":"AlertDefinition-9a6d8a14-9f25-4d81-bf91-4d773497444d",\
+            "name":"CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4",\
+            "description":"CPU_Utilization_Above_Threshold_Warning","adapterKindKey":"VMWARE",\
+            "resourceKindKey":"VirtualMachine","waitCycles":1,"cancelCycles":1,"type":16,\
+            "subType":19,"states":[{"severity":"WARNING","base-symptom-set":{"type":"SYMPTOM_SET",\
+            "relation":"SELF","aggregation":"ALL","symptomSetOperator":"AND",\
+            "symptomDefinitionIds":["SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278"]},\
+            "impact":{"impactType":"BADGE","detail":"risk"}}]}'
+
+        # Expected alarm_def_uuid to be returned
+        expected_alarm_def_uuid = '9a6d8a14-9f25-4d81-bf91-4d773497444d'
+
+        # Call reconfigure_alarm method under test
+        alarm_def_uuid = self.mon_plugin.reconfigure_alarm(alarm_details_json, new_alarm_config)
+
+        # Verify that mocked method is called with required parameters
+        m_put.assert_called()
+
+        # Verify return value with expected value
+        self.assertEqual(expected_alarm_def_uuid, alarm_def_uuid)
+
+    @mock.patch.object(monPlugin.requests, 'put')
+    def test_reconfigure_alarm_invalid_req_response(self, m_put):
+        """Test reconfigure_alarm: for invalid REST response, return None"""
+
+        # Set input parameters to reconfigure_alarm
+        alarm_details_json = {
+            'id': 'AlertDefinition-9a6d8a14-9f25-4d81-bf91-4d773497444d',
+            'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+            'description': 'CPU_Utilization_Above_Threshold', 'adapterKindKey': 'VMWARE',
+            'states': [{'impact': {'impactType': 'BADGE', 'detail': 'risk'}, 'severity': 'CRITICAL',
+                        'base-symptom-set': {
+                            'symptomDefinitionIds': ['SymptomDefinition-bcc2cb36-a67b-4deb-bcd3-9b5884973278'],
+                            'relation': 'SELF', 'type': 'SYMPTOM_SET', 'aggregation': 'ALL',
+                            'symptomSetOperator': 'AND'}}],
+            'type': 16, 'cancelCycles': 1, 'resourceKindKey': 'VirtualMachine', 'subType': 19,
+            'waitCycles': 1}
+
+        new_alarm_config = {'severity': 'WARNING',
+                            'description': 'CPU_Utilization_Above_Threshold_Warning'}
+
+        # Set mocked function's return values
+        m_put.return_value.status_code = 500
+        m_put.return_value.content = '{"message": "Internal Server error, cause unknown.",\
+            "moreInformation": [{"name": "errorMessage",\
+            "value": "Cannot update Alert Definition CPU_Utilization_Above_Thr-\
+            e14b203c-6bf2-4e2f-a91c-8c19d240eda4 since it does not exist"},\
+            {"name": "localizedMessage",\
+            "value": "Cannot update Alert Definition CPU_Utilization_Above_Thr-\
+            e14b203c-6bf2-4e2f-a91c-8c19d240eda4 since it does not exist;"}],\
+            "httpStatusCode": 500,"apiErrorCode": 500}'
+
+        # Expected alarm_def_uuid to be returned
+        expected_alarm_def_uuid = None
+
+        # Call reconfigure_alarm method under test
+        alarm_def_uuid = self.mon_plugin.reconfigure_alarm(alarm_details_json, new_alarm_config)
+
+        # Verify that mocked method is called with required parameters
+        m_put.assert_called()
+
+        # Verify return value with expected value
+        self.assertEqual(expected_alarm_def_uuid, alarm_def_uuid)
+
+    @mock.patch.object(monPlugin.MonPlugin, 'delete_symptom_definition')
+    @mock.patch.object(monPlugin.MonPlugin, 'delete_alarm_defination')
+    @mock.patch.object(monPlugin.MonPlugin, 'delete_notification_rule')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_details')
+    def test_delete_alarm_configuration_successful_alarm_deletion(self,
+                                                                  m_get_alarm_defination_details,
+                                                                  m_delete_notification_rule,
+                                                                  m_delete_alarm_defination,
+                                                                  m_delete_symptom_definition):
+        """Test delete_alarm_configuration: for successful alarm deletion, return alarm uuid"""
+
+        # Set input parameters to delete_alarm_configuration
+        delete_alarm_req_dict = {'alarm_uuid': '9a6d8a14-9f25-4d81-bf91-4d773497444d'}
+
+        # Set mocked function's return values
+        alarm_details_json = {
+            'id': 'AlertDefinition-9a6d8a14-9f25-4d81-4d773497444d',
+            'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-8c19d240eda4',
+            'symptomDefinitionIds': ['SymptomDefinition-bcc2cb36-a67b-4deb-9b5884973278']}
+        alarm_details = {
+            'alarm_name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-8c19d240eda4',
+            'alarm_id': 'AlertDefinition-9a6d8a14-9f25-4d81-4d773497444d',
+            'symptom_definition_id': 'SymptomDefinition-bcc2cb36-a67b-4deb-9b5884973278'}
+
+        m_get_alarm_defination_details.return_value = (alarm_details_json, alarm_details)
+        m_delete_notification_rule.return_value = '989e7293-d78d-4405-92e30ec4f247'
+        m_delete_alarm_defination.return_value = alarm_details['alarm_id']
+        m_delete_symptom_definition.return_value = alarm_details['symptom_definition_id']
+
+        # Call reconfigure_alarm method under test
+        alarm_uuid = self.mon_plugin.delete_alarm_configuration(delete_alarm_req_dict)
+
+        # Verify that mocked method is called with required parameters
+        m_get_alarm_defination_details.assert_called_with(delete_alarm_req_dict['alarm_uuid'])
+        m_delete_notification_rule.assert_called_with(alarm_details['alarm_name'])
+        m_delete_alarm_defination.assert_called_with(alarm_details['alarm_id'])
+        m_delete_symptom_definition.assert_called_with(alarm_details['symptom_definition_id'])
+
+        # Verify return value with expected value
+        self.assertEqual(alarm_uuid, delete_alarm_req_dict['alarm_uuid'])
+
+    @mock.patch.object(monPlugin.MonPlugin, 'delete_symptom_definition')
+    @mock.patch.object(monPlugin.MonPlugin, 'delete_alarm_defination')
+    @mock.patch.object(monPlugin.MonPlugin, 'delete_notification_rule')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_details')
+    def test_delete_alarm_configuration_failed_to_get_alarm_defination(self,
+                                                                       m_get_alarm_defination_details,
+                                                                       m_delete_notification_rule,
+                                                                       m_delete_alarm_defination,
+                                                                       m_delete_symptom_definition):
+        """Test delete_alarm_configuration: if failed to get alarm definition, return None"""
+
+        # Set input parameters to delete_alarm_configuration
+        delete_alarm_req_dict = {'alarm_uuid': '9a6d8a14-9f25-4d81-bf91-4d773497444d'}
+
+        # Set mocked function's return values
+        alarm_details_json = None
+        alarm_details = None
+
+        m_get_alarm_defination_details.return_value = (alarm_details_json, alarm_details)
+
+        # Call reconfigure_alarm method under test
+        alarm_uuid = self.mon_plugin.delete_alarm_configuration(delete_alarm_req_dict)
+
+        # Verify that mocked method is called with required parameters
+        m_get_alarm_defination_details.assert_called_with(delete_alarm_req_dict['alarm_uuid'])
+        m_delete_notification_rule.assert_not_called()
+        m_delete_alarm_defination.assert_not_called()
+        m_delete_symptom_definition.assert_not_called()
+
+        # Verify return value with expected value
+        self.assertEqual(alarm_uuid, None)
+
+    @mock.patch.object(monPlugin.MonPlugin, 'delete_symptom_definition')
+    @mock.patch.object(monPlugin.MonPlugin, 'delete_alarm_defination')
+    @mock.patch.object(monPlugin.MonPlugin, 'delete_notification_rule')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_details')
+    def test_delete_alarm_configuration_failed_to_delete_notification_rule(self,
+                                                                           m_get_alarm_defination_details,
+                                                                           m_delete_notification_rule,
+                                                                           m_delete_alarm_defination,
+                                                                           m_delete_symptom_definition):
+        """Test delete_alarm_configuration: if failed to delete notification rule, return None"""
+
+        # Set input parameters to delete_alarm_configuration
+        delete_alarm_req_dict = {'alarm_uuid': '9a6d8a14-9f25-4d81-bf91-4d773497444d'}
+
+        # Set mocked function's return values
+        alarm_details_json = {
+            'id': 'AlertDefinition-9a6d8a14-9f25-4d81-4d773497444d',
+            'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-8c19d240eda4',
+            'symptomDefinitionIds': ['SymptomDefinition-bcc2cb36-a67b-4deb-9b5884973278']}
+        alarm_details = {
+            'alarm_name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-8c19d240eda4',
+            'alarm_id': 'AlertDefinition-9a6d8a14-9f25-4d81-4d773497444d',
+            'symptom_definition_id': 'SymptomDefinition-bcc2cb36-a67b-4deb-9b5884973278'}
+
+        m_get_alarm_defination_details.return_value = (alarm_details_json, alarm_details)
+        m_delete_notification_rule.return_value = None
+
+        # Call reconfigure_alarm method under test
+        alarm_uuid = self.mon_plugin.delete_alarm_configuration(delete_alarm_req_dict)
+
+        # Verify that mocked method is called with required parameters
+        m_get_alarm_defination_details.assert_called_with(delete_alarm_req_dict['alarm_uuid'])
+        m_delete_notification_rule.assert_called_with(alarm_details['alarm_name'])
+        m_delete_alarm_defination.assert_not_called()
+        m_delete_symptom_definition.assert_not_called()
+
+        # Verify return value with expected value
+        self.assertEqual(alarm_uuid, None)
+
+    @mock.patch.object(monPlugin.MonPlugin, 'delete_symptom_definition')
+    @mock.patch.object(monPlugin.MonPlugin, 'delete_alarm_defination')
+    @mock.patch.object(monPlugin.MonPlugin, 'delete_notification_rule')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_details')
+    def test_delete_alarm_configuration_failed_to_delete_alarm_defination(self,
+                                                                          m_get_alarm_defination_details,
+                                                                          m_delete_notification_rule,
+                                                                          m_delete_alarm_defination,
+                                                                          m_delete_symptom_definition):
+        """Test delete_alarm_configuration: if failed to delete alarm definition, return None"""
+
+        # Set input parameters to delete_alarm_configuration
+        delete_alarm_req_dict = {'alarm_uuid': '9a6d8a14-9f25-4d81-bf91-4d773497444d'}
+
+        # Set mocked function's return values
+        alarm_details_json = {
+            'id': 'AlertDefinition-9a6d8a14-9f25-4d81-4d773497444d',
+            'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-8c19d240eda4',
+            'symptomDefinitionIds': ['SymptomDefinition-bcc2cb36-a67b-4deb-9b5884973278']}
+        alarm_details = {
+            'alarm_name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-8c19d240eda4',
+            'alarm_id': 'AlertDefinition-9a6d8a14-9f25-4d81-4d773497444d',
+            'symptom_definition_id': 'SymptomDefinition-bcc2cb36-a67b-4deb-9b5884973278'}
+
+        m_get_alarm_defination_details.return_value = (alarm_details_json, alarm_details)
+        m_delete_notification_rule.return_value = '989e7293-d78d-4405-92e30ec4f247'
+        m_delete_alarm_defination.return_value = None
+
+        # Call reconfigure_alarm method under test
+        alarm_uuid = self.mon_plugin.delete_alarm_configuration(delete_alarm_req_dict)
+
+        # Verify that mocked method is called with required parameters
+        m_get_alarm_defination_details.assert_called_with(delete_alarm_req_dict['alarm_uuid'])
+        m_delete_notification_rule.assert_called_with(alarm_details['alarm_name'])
+        m_delete_alarm_defination.assert_called_with(alarm_details['alarm_id'])
+        m_delete_symptom_definition.assert_not_called()
+
+        # Verify return value with expected value
+        self.assertEqual(alarm_uuid, None)
+
+    @mock.patch.object(monPlugin.MonPlugin, 'delete_symptom_definition')
+    @mock.patch.object(monPlugin.MonPlugin, 'delete_alarm_defination')
+    @mock.patch.object(monPlugin.MonPlugin, 'delete_notification_rule')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_details')
+    def test_delete_alarm_configuration_failed_to_delete_symptom_definition(self,
+                                                                            m_get_alarm_defination_details,
+                                                                            m_delete_notification_rule,
+                                                                            m_delete_alarm_defination,
+                                                                            m_delete_symptom_definition):
+        """Test delete_alarm_configuration: if failed to delete symptom definition, return None"""
+
+        # Set input parameters to delete_alarm_configuration
+        delete_alarm_req_dict = {'alarm_uuid': '9a6d8a14-9f25-4d81-bf91-4d773497444d'}
+
+        # Set mocked function's return values
+        alarm_details_json = {
+            'id': 'AlertDefinition-9a6d8a14-9f25-4d81-4d773497444d',
+            'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-8c19d240eda4',
+            'symptomDefinitionIds': ['SymptomDefinition-bcc2cb36-a67b-4deb-9b5884973278']}
+        alarm_details = {
+            'alarm_name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-8c19d240eda4',
+            'alarm_id': 'AlertDefinition-9a6d8a14-9f25-4d81-4d773497444d',
+            'symptom_definition_id': 'SymptomDefinition-bcc2cb36-a67b-4deb-9b5884973278'}
+
+        m_get_alarm_defination_details.return_value = (alarm_details_json, alarm_details)
+        m_delete_notification_rule.return_value = '989e7293-d78d-4405-92e30ec4f247'
+        m_delete_alarm_defination.return_value = alarm_details['alarm_id']
+        m_delete_symptom_definition.return_value = None
+
+        # Call reconfigure_alarm method under test
+        alarm_uuid = self.mon_plugin.delete_alarm_configuration(delete_alarm_req_dict)
+
+        # Verify that mocked method is called with required parameters
+        m_get_alarm_defination_details.assert_called_with(delete_alarm_req_dict['alarm_uuid'])
+        m_delete_notification_rule.assert_called_with(alarm_details['alarm_name'])
+        m_delete_alarm_defination.assert_called_with(alarm_details['alarm_id'])
+        m_delete_symptom_definition.assert_called_with(alarm_details['symptom_definition_id'])
+
+        # Verify return value with expected value
+        self.assertEqual(alarm_uuid, None)
+
+    @mock.patch.object(monPlugin.requests, 'delete')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_notification_rule_id_by_alarm_name')
+    def test_delete_notification_rule_successful_deletion_req_response(self,
+                                                                       m_get_notification_rule_id_by_alarm_name,
+                                                                       m_delete):
+        """Test delete_notification_rule: Valid notification rule is deleted & returns rule_id"""
+
+        # Set input parameters to delete_notification_rule
+        alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-8c19d240eda4'
+
+        # Set mocked function's return values
+        m_get_notification_rule_id_by_alarm_name.return_value = '8db86441-71d8-4830-9e1a-a90be3776d12'
+        m_delete.return_value.status_code = 204
+
+        # Call delete_notification_rule method under test
+        rule_id = self.mon_plugin.delete_notification_rule(alarm_name)
+
+        # Verify that mocked method is called with required parameters
+        m_get_notification_rule_id_by_alarm_name.assert_called_with(alarm_name)
+        m_delete.assert_called()
+
+        # Verify return value with expected value
+        self.assertEqual(rule_id, '8db86441-71d8-4830-9e1a-a90be3776d12')
+
+    @mock.patch.object(monPlugin.requests, 'delete')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_notification_rule_id_by_alarm_name')
+    def test_delete_notification_rule_failed_to_get_notification_rule_id(self,
+                                                                         m_get_notification_rule_id_by_alarm_name,
+                                                                         m_delete):
+        """Test delete_notification_rule: if notification rule is not found, returns None"""
+
+        # Set input parameters to delete_notification_rule
+        alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-8c19d240eda4'
+
+        # Set mocked function's return values
+        m_get_notification_rule_id_by_alarm_name.return_value = None
+
+        # Call delete_notification_rule method under test
+        rule_id = self.mon_plugin.delete_notification_rule(alarm_name)
+
+        # Verify that mocked method is called with required parameters
+        m_get_notification_rule_id_by_alarm_name.assert_called_with(alarm_name)
+        m_delete.assert_not_called()
+
+        # verify return value with expected value
+        self.assertEqual(rule_id, None)
+
+    @mock.patch.object(monPlugin.requests, 'delete')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_notification_rule_id_by_alarm_name')
+    def test_delete_notification_rule_invalid_deletion_req_response(self,
+                                                                    m_get_notification_rule_id_by_alarm_name,
+                                                                    m_delete):
+        """Test delete_notification_rule: If an invalid response is received, returns None"""
+
+        # Set input parameters to delete_notification_rule
+        alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-8c19d240eda4'
+
+        # Set mocked function's return values
+        m_get_notification_rule_id_by_alarm_name.return_value = '8db86441-71d8-4830-9e1a-a90be3776d12'
+        m_delete.return_value.status_code = 404
+
+        # Call delete_notification_rule method under test
+        rule_id = self.mon_plugin.delete_notification_rule(alarm_name)
+
+        # Verify that mocked method is called with required parameters
+        m_get_notification_rule_id_by_alarm_name.assert_called_with(alarm_name)
+        m_delete.assert_called()
+
+        # Verify return value with expected value
+        self.assertEqual(rule_id, None)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    def test_get_notification_rule_id_by_alarm_name_valid_req_response(self, m_get):
+        """Test get_notification_rule_id_by_alarm_name: A valid request response received,
+            returns notification_id
+        """
+
+        # Set input parameters to get_notification_rule_id_by_alarm_name
+        alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+
+        # Set mocked function's return values
+        m_get.return_value.status_code = 200
+        m_get.return_value.content = '{\
+        "pageInfo": {"totalCount": 0,"page": 0,"pageSize": 1000},\
+        "links": [\
+            {"href": "/suite-api/api/notifications/rules?page=0&amp;pageSize=1000",\
+            "rel": "SELF","name": "current"},\
+            {"href": "/suite-api/api/notifications/rules?page=0&amp;pageSize=1000",\
+            "rel": "RELATED","name": "first"},\
+            {"href": "/suite-api/api/notifications/rules?page=0&amp;pageSize=1000",\
+            "rel": "RELATED","name": "last"}],\
+        "notification-rule": [{\
+        "id": "2b86fa23-0c15-445c-a2b1-7bd725c46f59",\
+        "name": "notify_CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4",\
+        "pluginId": "03053f51-f829-438d-993d-cc33a435d76a",\
+        "links": [{"href": "/suite-api/api/notifications/rules/2b86fa23-0c15-445c-a2b1-7bd725c46f59",\
+        "rel": "SELF","name": "linkToSelf"}]}]}'
+
+        # Call get_notification_rule_id_by_alarm_name method under test
+        notification_id = self.mon_plugin.get_notification_rule_id_by_alarm_name(alarm_name)
+
+        # Verify that mocked method is called with required parameters
+        m_get.assert_called()
+
+        # Verify return value with expected value
+        self.assertEqual(notification_id, '2b86fa23-0c15-445c-a2b1-7bd725c46f59')
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    def test_get_notification_rule_id_by_alarm_name_invalid_req_response(self, m_get):
+        """Test get_notification_rule_id_by_alarm_name: If an invalid response received,\
+            returns None
+        """
+
+        # Set input parameters to delete_alarm_configuration
+        alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+
+        # Set mocked function's return values
+        m_get.return_value.status_code = 404
+
+        # Call get_notification_rule_id_by_alarm_name method under test
+        notification_id = self.mon_plugin.get_notification_rule_id_by_alarm_name(alarm_name)
+
+        # Verify that mocked method is called with required parameters
+        m_get.assert_called()
+
+        # Verify return value with expected value
+        self.assertEqual(notification_id, None)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    def test_get_notification_rule_id_by_alarm_name_rule_not_found(self, m_get):
+        """Test get_notification_rule_id_by_alarm_name: If a notification rule is not found,
+            returns None
+        """
+
+        # Set input parameters to delete_alarm_configuration
+        alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda'
+
+        # Set mocked function's return values
+        m_get.return_value.status_code = 200
+        m_get.return_value.content = '{\
+        "pageInfo": {"totalCount": 0,"page": 0,"pageSize": 1000},\
+        "links": [\
+            {"href": "/suite-api/api/notifications/rules?page=0&amp;pageSize=1000",\
+            "rel": "SELF","name": "current"},\
+            {"href": "/suite-api/api/notifications/rules?page=0&amp;pageSize=1000",\
+            "rel": "RELATED","name": "first"},\
+            {"href": "/suite-api/api/notifications/rules?page=0&amp;pageSize=1000",\
+            "rel": "RELATED","name": "last"}],\
+        "notification-rule": [{\
+        "id": "2b86fa23-0c15-445c-a2b1-7bd725c46f59",\
+        "name": "notify_CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4",\
+        "pluginId": "03053f51-f829-438d-993d-cc33a435d76a",\
+        "links": [{"href": "/suite-api/api/notifications/rules/2b86fa23-0c15-445c-a2b1-7bd725c46f59",\
+        "rel": "SELF","name": "linkToSelf"}]}]}'
+
+        # Call get_notification_rule_id_by_alarm_name method under test
+        notification_id = self.mon_plugin.get_notification_rule_id_by_alarm_name(alarm_name)
+
+        # Verify that mocked method is called with required parameters
+        m_get.assert_called()
+
+        # Verify return value with expected value
+        self.assertEqual(notification_id, None)
+
+    @mock.patch.object(monPlugin.requests, 'delete')
+    def test_delete_alarm_defination_valid_req_response(self, m_delete):
+        """Test delete_alarm_defination: A valid request response received,
+            returns symptom_id
+        """
+
+        # Set input parameters to delete_alarm_definition
+        alarm_definition_id = 'AlertDefinition-9a6d8a14-9f25-4d81-4d773497444d'
+
+        # Set mocked function's return values
+        m_delete.return_value.status_code = 204
+
+        # Call delete_alarm_defination method under test
+        actual_alarm_id = self.mon_plugin.delete_alarm_defination(alarm_definition_id)
+
+        # Verify that mocked method is called with required parameters
+        m_delete.assert_called()
+
+        # Verify return value with expected value
+        self.assertEqual(actual_alarm_id, alarm_definition_id)
+
+    @mock.patch.object(monPlugin.requests, 'delete')
+    def test_delete_alarm_defination_invalid_req_response(self, m_delete):
+        """Test delete_alarm_defination: If an invalid request response received,
+            returns None
+        """
+
+        # Set input parameters to delete_alarm_definition
+        alarm_definition_id = 'AlertDefinition-9a6d8a14-9f25-4d81-4d773497444d'
+
+        # Set mocked function's return values
+        m_delete.return_value.status_code = 404
+
+        # Call delete_alarm_defination method under test
+        actual_alarm_id = self.mon_plugin.delete_alarm_defination(alarm_definition_id)
+
+        # Verify that mocked method is called with required parameters
+        m_delete.assert_called()
+
+        # Verify return value with expected value
+        self.assertEqual(actual_alarm_id, None)
+
+    @mock.patch.object(monPlugin.requests, 'delete')
+    def test_delete_symptom_definition_valid_req_response(self, m_delete):
+        """Test delete_symptom_definition: A valid request response received,
+            returns symptom_id
+        """
+
+        # Set input parameters to delete_symptom_definition
+        symptom_definition_id = 'SymptomDefinition-bcc2cb36-a67b-4deb-9b5884973278'
+
+        # Set mocked function's return values
+        m_delete.return_value.status_code = 204
+
+        # Call delete_symptom_definition method under test
+        actual_symptom_id = self.mon_plugin.delete_symptom_definition(symptom_definition_id)
+
+        # Verify that mocked method is called with required parameters
+        m_delete.assert_called()
+
+        # Verify return value with expected value
+        self.assertEqual(actual_symptom_id, symptom_definition_id)
+
+    @mock.patch.object(monPlugin.requests, 'delete')
+    def test_delete_symptom_definition_invalid_req_response(self, m_delete):
+        """Test delete_symptom_definition: If an invalid request response received,
+            returns None
+        """
+
+        # Set input parameters to delete_symptom_definition
+        symptom_definition_id = 'SymptomDefinition-bcc2cb36-a67b-4deb-9b5884973278'
+
+        # Set mocked function's return values
+        m_delete.return_value.status_code = 404
+
+        # Call delete_symptom_definition method under test
+        actual_symptom_id = self.mon_plugin.delete_symptom_definition(symptom_definition_id)
+
+        # Verify that mocked method is called with required parameters
+        m_delete.assert_called()
+
+        # Verify return value with expected value
+        self.assertEqual(actual_symptom_id, None)
+
+    @mock.patch.object(monPlugin.requests, 'post')
+    @mock.patch.object(monPlugin.MonPlugin, 'check_if_plugin_configured')
+    def test_configure_rest_plugin_valid_plugin_id(self, m_check_if_plugin_configured, m_post):
+        """Test configure rest plugin method-valid plugin id"""
+
+        # mock return values
+        expected_return = m_check_if_plugin_configured.return_value = "mock_pluginid"
+
+        # call configure rest plugin method under test
+        actual_return = self.mon_plugin.configure_rest_plugin()
+
+        # verify that mocked method is called
+        m_check_if_plugin_configured.assert_called()
+        m_post.assert_not_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.MonPlugin, 'enable_rest_plugin')
+    @mock.patch.object(monPlugin.requests, 'post')
+    @mock.patch.object(monPlugin.MonPlugin, 'check_if_plugin_configured')
+    def est_configure_rest_plugin_invalid_plugin_id(self, m_check_if_plugin_configured, m_post, m_enable_rest_plugin):
+        """Test configure rest plugin method-invalid plugin id"""
+
+        # mock return values
+        m_check_if_plugin_configured.return_value = None  # not configured
+        m_post.return_value.status_code = 201  # success
+        m_post.return_value.content = '{"pluginTypeId":"RestPlugin","pluginId":"1ef15663-9739-49fe-8c41-022bcc9f690c",\
+                                        "name":"MON_module_REST_Plugin","version":1518693747871,"enabled":false,\
+                                        "configValues":[{"name":"Url","value":"https://MON.lxd:8080/notify/"},\
+                                        {"name":"Content-type","value":"application/json"},{"name":"Certificate",\
+                                        "value":"AA:E7:3E:A5:34:E0:25:FB:28:84:3B:74:B2:18:74:C0:C3:E8:26:50"},\
+                                        {"name":"ConnectionCount","value":"20"}]}'
+
+        m_enable_rest_plugin.return_value = True  # success
+        expected_return = '1ef15663-9739-49fe-8c41-022bcc9f690c'
+
+        # call configure rest plugin method under test
+        actual_return = self.mon_plugin.configure_rest_plugin()
+
+        # verify that mocked method is called
+        m_check_if_plugin_configured.assert_called()
+        m_post.assert_called()
+        m_enable_rest_plugin.assert_called_with('1ef15663-9739-49fe-8c41-022bcc9f690c', 'MON_module_REST_Plugin')
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.MonPlugin, 'enable_rest_plugin')
+    @mock.patch.object(monPlugin.requests, 'post')
+    @mock.patch.object(monPlugin.MonPlugin, 'check_if_plugin_configured')
+    def est_configure_rest_plugin_failed_to_enable_plugin(self, m_check_if_plugin_configured, m_post,
+                                                          m_enable_rest_plugin):
+        """Test configure rest plugin method-failed to enable plugin case"""
+
+        # mock return values
+        m_check_if_plugin_configured.return_value = None  # not configured
+        m_post.return_value.status_code = 201  # success
+        m_post.return_value.content = '{"pluginTypeId":"RestPlugin","pluginId":"1ef15663-9739-49fe-8c41-022bcc9f690c",\
+                                        "name":"MON_module_REST_Plugin","version":1518693747871,"enabled":false,\
+                                        "configValues":[{"name":"Url","value":"https://MON.lxd:8080/notify/"},\
+                                        {"name":"Content-type","value":"application/json"},{"name":"Certificate",\
+                                        "value":"AA:E7:3E:A5:34:E0:25:FB:28:84:3B:74:B2:18:74:C0:C3:E8:26:50"},\
+                                        {"name":"ConnectionCount","value":"20"}]}'
+
+        m_enable_rest_plugin.return_value = False  # return failure
+        expected_return = None
+
+        # call configure rest plugin method under test
+        actual_return = self.mon_plugin.configure_rest_plugin()
+
+        # verify that mocked method is called
+        m_check_if_plugin_configured.assert_called()
+        m_post.assert_called()
+        m_enable_rest_plugin.assert_called_with('1ef15663-9739-49fe-8c41-022bcc9f690c', 'MON_module_REST_Plugin')
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    def test_check_if_plugin_configured_valid_req_response(self, m_get):
+        """Test check if plugin configured method-valid request response"""
+
+        plugin_name = 'MON_module_REST_Plugin'
+        # mock return values
+        m_get.return_value.status_code = 200
+        expected_return = '1ef15663-9739-49fe-8c41-022bcc9f690c'
+        m_get.return_value.content = '{"notificationPluginInstances":\
+                                       [{"pluginTypeId":"RestPlugin",\
+                                        "pluginId":"1ef15663-9739-49fe-8c41-022bcc9f690c",\
+                                        "name":"MON_module_REST_Plugin","version":1518694966987,\
+                                        "enabled":true,"configValues":[{"name":"Url",\
+                                        "value":"https://MON.lxd:8080/notify/"},\
+                                        {"name":"Content-type","value":"application/json"},\
+                                        {"name":"Certificate",\
+                                        "value":"AA:E7:3E:A5:34:E0:25:FB:28:84:3B:74:B2:18:74:C0"},\
+                                        {"name":"ConnectionCount","value":"20"}]}]}'
+
+        # call check if plugin configured method under test
+        actual_return = self.mon_plugin.check_if_plugin_configured(plugin_name)
+
+        # verify that mocked method is called
+        m_get.assert_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    def test_check_if_plugin_configured_invalid_req_response(self, m_get):
+        """Test check if plugin configured method-invalid request response"""
+
+        plugin_name = 'MON_module_REST_Plugin'
+        # mock return values
+        m_get.return_value.status_code = 201
+        expected_return = None
+        m_get.return_value.content = '{"notificationPluginInstances":\
+                                       [{"pluginTypeId":"RestPlugin",\
+                                        "pluginId":"1ef15663-9739-49fe-8c41-022bcc9f690c",\
+                                        "name":"MON_module_REST_Plugin","version":1518694966987,\
+                                        "enabled":true,"configValues":[{"name":"Url",\
+                                        "value":"https://MON.lxd:8080/notify/"},\
+                                        {"name":"Content-type","value":"application/json"},\
+                                        {"name":"Certificate",\
+                                        "value":"AA:E7:3E:A5:34:E0:25:FB:28:84:3B:74:B2:18:74:C0"},\
+                                        {"name":"ConnectionCount","value":"20"}]}]}'
+
+        # call check if plugin configured method under test
+        actual_return = self.mon_plugin.check_if_plugin_configured(plugin_name)
+
+        # verify that mocked method is called
+        m_get.assert_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'put')
+    def test_enable_rest_plugin_valid_req_response(self, m_put):
+        """Test enable rest plugin method-valid request response"""
+
+        plugin_name = 'MON_module_REST_Plugin'
+        plugin_id = '1ef15663-9739-49fe-8c41-022bcc9f690c'
+        # mock return values
+        m_put.return_value.status_code = 204
+        expected_return = True
+        m_put.return_value.content = ''
+
+        # call enable rest plugin configured method under test
+        actual_return = self.mon_plugin.enable_rest_plugin(plugin_id, plugin_name)
+
+        # verify that mocked method is called
+        m_put.assert_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'put')
+    def test_enable_rest_plugin_invalid_req_response(self, m_put):
+        """Test enable rest plugin method-invalid request response"""
+
+        plugin_name = 'MON_module_REST_Plugin'
+        plugin_id = '08018c0f-8879-4ca1-9b92-00e22d2ff81b'  # invalid plugin id
+        # mock return values
+        m_put.return_value.status_code = 404  # api Error code
+        expected_return = False
+        m_put.return_value.content = '<?xml version="1.0" encoding="UTF-8" standalone="yes"?><ops:\
+                                      error xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" \
+                                      xmlns:xs="http://www.w3.org/2001/XMLSchema" xmlns:ops=\
+                                      "http://webservice.vmware.com/vRealizeOpsMgr/1.0/" \
+                                      httpStatusCode="404" apiErrorCode="404"><ops:message>\
+                                      No such Notification Plugin - 08018c0f-8879-4ca1-9b92-\
+                                      00e22d2ff81b.</ops:message></ops:error>'
+
+        # call enable rest plugin configured method under test
+        actual_return = self.mon_plugin.enable_rest_plugin(plugin_id, plugin_name)
+
+        # verify that mocked method is called
+        m_put.assert_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'post')
+    @mock.patch.object(monPlugin.MonPlugin, 'check_if_plugin_configured')
+    def test_create_alarm_notification_rule_valid_req(self, m_check_if_plugin_configured, m_post):
+        """Test create alarm notification rule method valid request response"""
+
+        alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+        alarm_id = 'AlertDefinition-f1163767-6eac-438f-8e60-a7a867257e14'
+        res_id = 'ac87622f-b761-40a0-b151-00872a2a456e'
+        expected_return = "8db86441-71d8-4830-9e1a-a90be3776d12"
+
+        # mock return values
+        m_check_if_plugin_configured.return_value = '03053f51-f829-438d-993d-cc33a435d76a'
+        m_post.return_value.status_code = 201
+        m_post.return_value.content = '{"id":"8db86441-71d8-4830-9e1a-a90be3776d12",\
+                                      "name":"notify_CPU_Utilization_Above_Thr-e14b203c",\
+                                      "pluginId":"03053f51-f829-438d-993d-cc33a435d76a",\
+                                      "alertControlStates":[],"alertStatuses":[],\
+                                      "resourceFilter":{"matchResourceIdOnly":true,\
+                                      "childrenResourceKindFilters":[],\
+                                      "resourceId":"ac87622f-b761-40a0-b151-00872a2a456e"},\
+                                      "alertTypeFilters":[],"alertDefinitionIdFilters":{"values":[\
+                                      "AlertDefinition-f1163767-6eac-438f-8e60-a7a867257e14"]}}'
+
+        # call enable rest plugin configured method under test
+        actual_return = self.mon_plugin.create_alarm_notification_rule(alarm_name, alarm_id, res_id)
+
+        # verify that mocked method is called
+        m_check_if_plugin_configured.assert_called_with('MON_module_REST_Plugin')
+        m_post.assert_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'post')
+    @mock.patch.object(monPlugin.MonPlugin, 'check_if_plugin_configured')
+    def test_create_alarm_notification_rule_invalid_req(self, m_check_if_plugin_configured, m_post):
+        """Test create alarm notification rule method invalid request response"""
+
+        alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+        alarm_id = 'AlertDefinition-f1163767-6eac-438f-8e60-a7a867257e14'
+        res_id = 'ac87622f-b761-40a0-b151-00872a2a456e'
+        expected_return = None  # invalid req should retrun none
+
+        # mock return values
+        m_check_if_plugin_configured.return_value = '03053f51-f829-438d-993d-cc33a435d76a'
+        m_post.return_value.status_code = 500
+        m_post.return_value.content = '{"message":"Internal Server error, cause unknown.",\
+                                        "moreInformation":[{"name":"errorMessage","value":\
+                                        "there is already a rule with the same rule name"},\
+                                       {"name":"localizedMessage","value":"there is already \
+                                        a rule with the same rule name;"}],"httpStatusCode":500,\
+                                        "apiErrorCode":500}'
+
+        # call enable rest plugin configured method under test
+        actual_return = self.mon_plugin.create_alarm_notification_rule(alarm_name, alarm_id, res_id)
+
+        # verify that mocked method is called
+        m_check_if_plugin_configured.assert_called_with('MON_module_REST_Plugin')
+        m_post.assert_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'post')
+    @mock.patch.object(monPlugin.MonPlugin, 'check_if_plugin_configured')
+    def test_create_alarm_notification_rule_failed_to_get_plugin_id(self,
+                                                                    m_check_if_plugin_configured, m_post):
+        """Test create alarm notification rule method invalid plugin id"""
+
+        alarm_name = 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+        alarm_id = 'AlertDefinition-f1163767-6eac-438f-8e60-a7a867257e14'
+        res_id = 'ac87622f-b761-40a0-b151-00872a2a456e'
+        expected_return = None  # invalid req should retrun none
+
+        # mock return values
+        m_check_if_plugin_configured.return_value = None
+
+        # call enable rest plugin configured method under test
+        actual_return = self.mon_plugin.create_alarm_notification_rule(alarm_name, alarm_id, res_id)
+
+        # verify that mocked method is called
+        m_check_if_plugin_configured.assert_called_with('MON_module_REST_Plugin')
+        m_post.assert_not_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
+    def test_get_metrics_data_valid_rest_req_response(self, m_get_default_Params,
+                                                      m_get_vm_moref_id,
+                                                      m_get_vm_resource_id,
+                                                      m_get):
+        """Test get metrics data of resource method valid request response"""
+
+        metrics = {'collection_period': 1, 'metric_name': 'CPU_UTILIZATION', 'metric_uuid': None,
+                   'schema_version': 1.0, 'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                   'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
+                   'schema_type': 'read_metric_data_request', 'vim_type': 'VMware',
+                   'collection_unit': 'HR', 'vim_uuid': '1'}
+
+        # mock return value
+        m_get_default_Params.return_value = {'metric_key': 'cpu|usage_average', 'unit': '%'}
+        vm_moref_id = m_get_vm_moref_id.return_value = 'vm-6626'
+        m_get_vm_resource_id.return_value = 'ac87622f-b761-40a0-b151-00872a2a456e'
+        m_get.return_value.status_code = 200
+        m_get.return_value.content = '{"values":[{"resourceId":"ac87622f-b761-40a0-b151-\
+                                       00872a2a456e","stat-list":{"stat":[{"timestamps":\
+                                      [1519716874297,1519717174294,1519717474295,1519717774298,\
+                                      1519718074300,1519718374299,1519718674314,1519718974325,\
+                                      1519719274304,1519719574298,1519719874298,1519720174301],\
+                                      "statKey":{"key":"cpu|usage_average"},"intervalUnit":\
+                                      {"quantifier":1},"data":[0.1120000034570694,\
+                                      0.11866666376590729,0.11599999666213989,0.11400000005960464,\
+                                      0.12066666781902313,0.11533333361148834,0.11800000071525574,\
+                                      0.11533333361148834,0.12200000137090683,0.11400000005960464,\
+                                      0.1459999978542328,0.12133333086967468]}]}}]}'
+
+        # call get matrics data method under test
+        actual_return = self.mon_plugin.get_metrics_data(metrics)
+
+        # verify that mocked method is called
+        m_get_default_Params.assert_called_with(metrics['metric_name'])
+        m_get_vm_moref_id.assert_called_with(metrics['resource_uuid'])
+        m_get_vm_resource_id.assert_called_with(vm_moref_id)
+        m_get.assert_called()
+
+        # verify return value with expected value
+        # self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
+    def test_get_metrics_data_invalid_rest_req_response(self, m_get_default_Params,
+                                                        m_get_vm_moref_id,
+                                                        m_get_vm_resource_id,
+                                                        m_get):
+        """Test get metrics data of resource method invalid request response"""
+
+        metrics = {'collection_period': 1, 'metric_name': 'CPU_UTILIZATION', 'metric_uuid': None,
+                   'schema_version': 1.0, 'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                   'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
+                   'schema_type': 'read_metric_data_request', 'vim_type': 'VMware',
+                   'collection_unit': 'HR', 'vim_uuid': '1'}
+
+        # mock return value
+        m_get_default_Params.return_value = {'metric_key': 'cpu|usage_average', 'unit': '%'}
+        vm_moref_id = m_get_vm_moref_id.return_value = 'vm-6626'
+        m_get_vm_resource_id.return_value = 'ac87622f-b761-40a0-b151-00872a2a456e'
+        m_get.return_value.status_code = 400
+        expected_return = {'metric_name': 'CPU_UTILIZATION', 'metric_uuid': '0',
+                           'schema_version': '1.0',
+                           'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                           'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
+                           'metrics_data': {'time_series': [], 'metrics_series': []},
+                           'schema_type': 'read_metric_data_response',
+                           'unit': '%', 'vim_uuid': '1'}
+
+        # call get metrics data method under test
+        actual_return = self.mon_plugin.get_metrics_data(metrics)
+
+        # verify that mocked method is called
+        m_get_default_Params.assert_called_with(metrics['metric_name'])
+        m_get_vm_moref_id.assert_called_with(metrics['resource_uuid'])
+        m_get_vm_resource_id.assert_called_with(vm_moref_id)
+        m_get.assert_called()
+
+        m_get.return_value.content = '{"message":"Invalid request... #1 violations found.",\
+                                       "validationFailures":[{"failureMessage":"Invalid Parameter",\
+                                       "violationPath":"end"}],"httpStatusCode":400,\
+                                       "apiErrorCode":400}'
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
+    def test_get_metrics_data_metric_not_supported(self, m_get_default_Params,
+                                                   m_get_vm_moref_id,
+                                                   m_get_vm_resource_id,
+                                                   m_get):
+        """Test get metrics data of resource method invalid metric name"""
+
+        metrics = {'collection_period': 1, 'metric_name': 'invalid_metric', 'metric_uuid': None,
+                   'schema_version': 1.0,
+                   'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                   'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
+                   'schema_type': 'read_metric_data_request', 'vim_type': 'VMware',
+                   'collection_unit': 'HR', 'vim_uuid': '1'}
+
+        # mock return value
+        m_get_default_Params.return_value = {}  # returns empty dict
+
+        expected_return = {'metric_name': 'invalid_metric', 'metric_uuid': '0', 'vim_uuid': '1',
+                           'schema_version': '1.0', 'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                           'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
+                           'metrics_data': {'time_series': [], 'metrics_series': []},
+                           'schema_type': 'read_metric_data_response', 'unit': None}
+
+        # call get matrics data method under test
+        actual_return = self.mon_plugin.get_metrics_data(metrics)
+
+        # verify that mocked method is called/not called
+        m_get_default_Params.assert_called_with(metrics['metric_name'])
+        m_get_vm_moref_id.assert_not_called()
+        m_get_vm_resource_id.assert_not_called()
+        m_get.assert_not_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
+    def test_get_metrics_data_failed_to_get_vm_moref_id(self, m_get_default_Params,
+                                                        m_get_vm_moref_id,
+                                                        m_get_vm_resource_id,
+                                                        m_get):
+        """Test get metrics data method negative scenario- invalid resource id"""
+
+        metrics = {'collection_period': 1, 'metric_name': 'cpu_utilization', 'metric_uuid': None,
+                   'schema_version': 1.0, 'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                   'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
+                   'schema_type': 'read_metric_data_request', 'vim_type': 'VMware',
+                   'collection_unit': 'HR', 'vim_uuid': '1'}
+
+        # mock return value
+        m_get_default_Params.return_value = {'metric_key': 'cpu|usage_average', 'unit': '%'}
+        m_get_vm_moref_id.return_value = None
+        expected_return = {'metric_name': 'cpu_utilization', 'metric_uuid': '0',
+                           'schema_version': '1.0',
+                           'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                           'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
+                           'metrics_data': {'time_series': [], 'metrics_series': []},
+                           'schema_type': 'read_metric_data_response',
+                           'unit': '%', 'vim_uuid': '1'}
+
+        # call get matrics data method under test
+        actual_return = self.mon_plugin.get_metrics_data(metrics)
+
+        # verify that mocked method is called/not called
+        m_get_default_Params.assert_called_with(metrics['metric_name'])
+        m_get_vm_moref_id.assert_called_with(metrics['resource_uuid'])
+        m_get_vm_resource_id.assert_not_called()
+        m_get.assert_not_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
+    def test_get_metrics_data_failed_to_get_vm_resource_id(self, m_get_default_Params,
+                                                           m_get_vm_moref_id,
+                                                           m_get_vm_resource_id,
+                                                           m_get):
+        """Test get metrics data method negative scenario- invalid moref id"""
+
+        metrics = {'collection_period': 1, 'metric_name': 'CPU_UTILIZATION', 'metric_uuid': None,
+                   'schema_version': 1.0, 'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                   'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
+                   'schema_type': 'read_metric_data_request', 'vim_type': 'VMware',
+                   'collection_unit': 'HR', 'vim_uuid': '1'}
+
+        # mock return value
+        m_get_default_Params.return_value = {'metric_key': 'cpu|usage_average', 'unit': '%'}
+        m_get_vm_moref_id.return_value = 'Invalid-vm-6626'
+        m_get_vm_resource_id.return_value = None
+        expected_return = {'metric_name': 'CPU_UTILIZATION', 'metric_uuid': '0',
+                           'schema_version': '1.0',
+                           'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                           'correlation_id': 'e14b203c-6bf2-4e2f-a91c-8c19d2abcdef',
+                           'metrics_data': {'time_series': [], 'metrics_series': []},
+                           'schema_type': 'read_metric_data_response',
+                           'unit': '%', 'vim_uuid': '1'}
+
+        # call get matrics data method under test
+        actual_return = self.mon_plugin.get_metrics_data(metrics)
+
+        # verify that mocked method is called/not called
+        m_get_default_Params.assert_called_with(metrics['metric_name'])
+        m_get_vm_moref_id.assert_called_with(metrics['resource_uuid'])
+        m_get_vm_resource_id.assert_called()
+        m_get_vm_resource_id.assert_called_with('Invalid-vm-6626')
+        m_get.assert_not_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.MonPlugin, 'reconfigure_alarm')
+    @mock.patch.object(monPlugin.MonPlugin, 'update_symptom_defination')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_details')
+    def test_update_alarm_configuration_successful_updation(self, m_get_alarm_defination_details,
+                                                            m_update_symptom_defination,
+                                                            m_reconfigure_alarm):
+        """Test update alarm configuration method"""
+
+        alarm_config = {'alarm_uuid': 'f1163767-6eac-438f-8e60-a7a867257e14',
+                        'correlation_id': 14203,
+                        'description': 'CPU_Utilization_Above_Threshold_L', 'operation': 'GT'}
+
+        # mock return value
+        alarm_details_json = {'states': [{'impact': {'impactType': 'BADGE', 'detail': 'risk'},
+                                          'severity': 'CRITICAL', 'base-symptom-set': {'symptomDefinitionIds':
+                [
+                    'SymptomDefinition-47c88675-bea8-436a-bb41-8d2231428f44'],
+                'relation': 'SELF',
+                'type': 'SYMPTOM_SET',
+                'aggregation': 'ALL'}}],
+                              'description': 'CPU_Utilization_Above_Threshold', 'type': 16,
+                              'id': 'AlertDefinition-f1163767-6eac-438f-8e60-a7a867257e14',
+                              'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d2'}
+        alarm_details = {'symptom_definition_id': 'SymptomDefinition-47c88675-bea8-436a-bb41-\
+                        8d2231428f44', 'alarm_name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-\
+                        a91c-8c19d2', 'alarm_id': 'AlertDefinition-f1163767-6eac-438f-8e60-\
+                        a7a867257e14', 'resource_kind': 'VirtualMachine', 'type': 16}
+        m_get_alarm_defination_details.return_value = (alarm_details_json, alarm_details)
+        m_update_symptom_defination.return_value = 'SymptomDefinition-47c88675-bea8-436a-bb41-\
+                                                   8d2231428f44'
+        expected_return = m_reconfigure_alarm.return_value = 'f1163767-6eac-438f-8e60-a7a867257e14'
+
+        # call update alarm configuration method under test
+        actual_return = self.mon_plugin.update_alarm_configuration(alarm_config)
+
+        # verify that mocked method is called
+        m_get_alarm_defination_details.assert_called_with(alarm_config['alarm_uuid'])
+        m_update_symptom_defination.assert_called_with(alarm_details['symptom_definition_id'],
+                                                       alarm_config)
+        m_reconfigure_alarm.assert_called_with(alarm_details_json, alarm_config)
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.MonPlugin, 'reconfigure_alarm')
+    @mock.patch.object(monPlugin.MonPlugin, 'update_symptom_defination')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_details')
+    def test_update_alarm_configuration_failed_to_reconfigure_alarm(self,
+                                                                    m_get_alarm_defination_details,
+                                                                    m_update_symptom_defination,
+                                                                    m_reconfigure_alarm):
+        """Test update alarm configuration method- failed to reconfigure alarm"""
+
+        alarm_config = {'alarm_uuid': 'f1163767-6eac-438f-8e60-a7a867257e14',
+                        'correlation_id': 14203,
+                        'description': 'CPU_Utilization_Above_Threshold_L', 'operation': 'GT'}
+
+        # mock return value
+        alarm_details_json = {'states': [{'impact': {'impactType': 'BADGE', 'detail': 'risk'},
+                                          'severity': 'CRITICAL', 'base-symptom-set': {'symptomDefinitionIds':
+                [
+                    'SymptomDefinition-47c88675-bea8-436a-bb41-8d2231428f44'],
+                'relation': 'SELF',
+                'type': 'SYMPTOM_SET',
+                'aggregation': 'ALL'}}],
+                              'description': 'CPU_Utilization_Above_Threshold', 'type': 16,
+                              'id': 'AlertDefinition-f1163767-6eac-438f-8e60-a7a867257e14',
+                              'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d2'}
+        alarm_details = {'symptom_definition_id': 'SymptomDefinition-47c88675-bea8-436a-bb41-\
+                        8d2231428f44', 'alarm_name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-\
+                        a91c-8c19d2', 'alarm_id': 'AlertDefinition-f1163767-6eac-438f-8e60-\
+                        a7a867257e14', 'resource_kind': 'VirtualMachine', 'type': 16}
+        m_get_alarm_defination_details.return_value = (alarm_details_json, alarm_details)
+        m_update_symptom_defination.return_value = 'SymptomDefinition-47c88675-bea8-436a-bb41-\
+                                                    8d2231428f44'
+        expected_return = m_reconfigure_alarm.return_value = None  # failed to reconfigure
+
+        # call update alarm configuration method under test
+        actual_return = self.mon_plugin.update_alarm_configuration(alarm_config)
+
+        # verify that mocked method is called
+        m_get_alarm_defination_details.assert_called_with(alarm_config['alarm_uuid'])
+        m_update_symptom_defination.assert_called_with(alarm_details['symptom_definition_id'],
+                                                       alarm_config)
+        m_reconfigure_alarm.assert_called_with(alarm_details_json, alarm_config)
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.MonPlugin, 'reconfigure_alarm')
+    @mock.patch.object(monPlugin.MonPlugin, 'update_symptom_defination')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_alarm_defination_details')
+    def test_update_alarm_configuration_failed_to_update_symptom(self,
+                                                                 m_get_alarm_defination_details,
+                                                                 m_update_symptom_defination,
+                                                                 m_reconfigure_alarm):
+        """Test update alarm configuration method- failed to update alarm"""
+
+        alarm_config = {'alarm_uuid': 'f1163767-6eac-438f-8e60-a7a867257e14',
+                        'correlation_id': 14203,
+                        'description': 'CPU_Utilization_Above_Threshold_L', 'operation': 'GT'}
+
+        # mock return value
+        alarm_details_json = {'states': [{'impact': {'impactType': 'BADGE', 'detail': 'risk'},
+                                          'severity': 'CRITICAL', 'base-symptom-set': {'symptomDefinitionIds':
+                [
+                    'SymptomDefinition-47c88675-bea8-436a-bb41-8d2231428f44'],
+                'relation': 'SELF',
+                'type': 'SYMPTOM_SET',
+                'aggregation': 'ALL'}}],
+                              'description': 'CPU_Utilization_Above_Threshold', 'type': 16,
+                              'id': 'AlertDefinition-f1163767-6eac-438f-8e60-a7a867257e14',
+                              'name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d2'}
+        alarm_details = {'symptom_definition_id': 'Invalid-47c88675-bea8-436a-bb41-\
+                        8d2231428f44', 'alarm_name': 'CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-\
+                        a91c-8c19d2', 'alarm_id': 'AlertDefinition-f1163767-6eac-438f-8e60-\
+                        a7a867257e14', 'resource_kind': 'VirtualMachine', 'type': 16}
+        m_get_alarm_defination_details.return_value = (alarm_details_json, alarm_details)
+        expected_return = m_update_symptom_defination.return_value = None
+
+        # call update alarm configuration method under test
+        actual_return = self.mon_plugin.update_alarm_configuration(alarm_config)
+
+        # verify that mocked method is called
+        m_get_alarm_defination_details.assert_called_with(alarm_config['alarm_uuid'])
+        m_update_symptom_defination.assert_called_with(alarm_details['symptom_definition_id'],
+                                                       alarm_config)
+        m_reconfigure_alarm.assert_not_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
+    def test_verify_metric_support_metric_supported_with_unit(self, m_get_default_Params):
+        """Test verify metric support method for supported metric"""
+
+        # mock return value
+        metric_info = {'metric_unit': '%', 'metric_name': 'cpu_utilization',
+                       'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4'}
+        m_get_default_Params.return_value = {'metric_key': 'cpu|usage_average', 'unit': '%'}
+        expected_return = True  # supported metric returns True
+
+        # call verify metric support method under test
+        actual_return = self.mon_plugin.verify_metric_support(metric_info)
+
+        # verify that mocked method is called
+        m_get_default_Params.assert_called_with(metric_info['metric_name'])
+        # m_get_default_Params.assert_called_with(metric_info)
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
+    def test_verify_metric_support_metric_not_supported(self, m_get_default_Params):
+        """Test verify metric support method for un-supported metric"""
+
+        # mock return value
+        metric_info = {'metric_unit': '%', 'metric_name': 'invalid_metric',
+                       'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4'}
+        m_get_default_Params.return_value = {}
+        expected_return = False  # supported metric returns True
+
+        # call verify metric support method under test
+        actual_return = self.mon_plugin.verify_metric_support(metric_info)
+
+        # verify that mocked method is called
+        m_get_default_Params.assert_called_with(metric_info['metric_name'])
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.MonPlugin, 'get_default_Params')
+    def test_verify_metric_support_metric_supported_with_mismatched_unit(self,
+                                                                         m_get_default_Params):
+        """Test verify metric support method for supported metric with mismatched unit"""
+
+        # mock return value
+        metric_info = {'metric_unit': '', 'metric_name': 'invalid_metric',
+                       'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4'}
+        m_get_default_Params.return_value = {'metric_key': 'cpu|usage_average', 'unit': '%'}
+        expected_return = True  # supported metric returns True
+
+        # call verify metric support method under test
+        actual_return = self.mon_plugin.verify_metric_support(metric_info)
+
+        # verify that mocked method is called
+        m_get_default_Params.assert_called_with(metric_info['metric_name'])
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.MonPlugin, 'get_triggered_alarms_on_resource')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vrops_resourceid_from_ro_uuid')
+    def test_get_triggered_alarms_list_returns_triggered_alarms(self,
+                                                                m_get_vrops_resourceid,
+                                                                m_triggered_alarms):
+        """Test get triggered alarm list method valid input"""
+
+        # Mock list alarm input
+        list_alarm_input = {'severity': 'CRITICAL',
+                            'correlation_id': 'e14b203c',
+                            'alarm_name': 'CPU_Utilization_Above_Threshold',
+                            'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4'}
+
+        resource_id = m_get_vrops_resourceid.return_value = 'ac87622f-b761-40a0-b151-00872a2a456e'
+        expected_return = m_triggered_alarms.return_value = [{'status': 'ACTIVE',
+                                                              'update_date': '2018-01-12T08:34:05',
+                                                              'severity': 'CRITICAL', 'resource_uuid': 'e14b203c',
+                                                              'cancel_date': '0000-00-00T00:00:00',
+                                                              'alarm_instance_uuid': 'd9e3bc84',
+                                                              'alarm_uuid': '5714977d', 'vim_type': 'VMware',
+                                                              'start_date': '2018-01-12T08:34:05'},
+                                                             {'status': 'CANCELED',
+                                                              'update_date': '2017-12-20T09:37:57',
+                                                              'severity': 'CRITICAL', 'resource_uuid': 'e14b203c',
+                                                              'cancel_date': '2018-01-12T06:49:19',
+                                                              'alarm_instance_uuid': 'd3bbeef6',
+                                                              'alarm_uuid': '7ba1bf3e', 'vim_type': 'VMware',
+                                                              'start_date': '2017-12-20T09:37:57'}]
+
+        # call get triggered alarms list method under test
+        actual_return = self.mon_plugin.get_triggered_alarms_list(list_alarm_input)
+
+        # verify that mocked method is called
+        m_get_vrops_resourceid.assert_called_with(list_alarm_input['resource_uuid'])
+        m_triggered_alarms.assert_called_with(list_alarm_input['resource_uuid'], resource_id)
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.MonPlugin, 'get_triggered_alarms_on_resource')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vrops_resourceid_from_ro_uuid')
+    def test_get_triggered_alarms_list_invalid_resource_uuid(self,
+                                                             m_get_vrops_resourceid,
+                                                             m_triggered_alarms):
+        """Test get triggered alarm list method invalid resource uuid"""
+
+        # Mock list alarm input
+        list_alarm_input = {'severity': 'CRITICAL',
+                            'correlation_id': 'e14b203c',
+                            'alarm_name': 'CPU_Utilization_Above_Threshold',
+                            'resource_uuid': '12345'}  # invalid resource uuid
+
+        expected_return = m_get_vrops_resourceid.return_value = None  # returns empty list
+
+        # call get triggered alarms list method under test
+        actual_return = self.mon_plugin.get_triggered_alarms_list(list_alarm_input)
+
+        # verify that mocked method is called
+        m_get_vrops_resourceid.assert_called_with(list_alarm_input['resource_uuid'])
+        m_triggered_alarms.assert_not_called()
+
+        # verify return value with expected value
+        self.assertEqual([], actual_return)
+
+    @mock.patch.object(monPlugin.MonPlugin, 'get_triggered_alarms_on_resource')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vrops_resourceid_from_ro_uuid')
+    def test_get_triggered_alarms_list_resource_uuid_not_present(self,
+                                                                 m_get_vrops_resourceid,
+                                                                 m_triggered_alarms):
+        """Test get triggered alarm list method resource not present"""
+
+        # Mock list alarm input
+        list_alarm_input = {'severity': 'CRITICAL',
+                            'correlation_id': 'e14b203c',
+                            'alarm_name': 'CPU_Utilization_Above_Threshold'}
+
+        # call get triggered alarms list method under test
+        actual_return = self.mon_plugin.get_triggered_alarms_list(list_alarm_input)
+
+        # verify that mocked method is called
+        m_get_vrops_resourceid.assert_not_called()
+        m_triggered_alarms.assert_not_called()
+
+        # verify return value with expected value
+        self.assertEqual([], actual_return)
+
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
+    def test_get_vrops_resourceid_from_ro_uuid(self, m_get_vm_moref_id, m_get_vm_resource_id):
+        """Test get vrops resourceid from ro uuid method"""
+
+        # Mock the inputs
+        ro_resource_uuid = 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+        vm_moref_id = m_get_vm_moref_id.return_value = 'vm-6626'
+        expected_return = m_get_vm_resource_id.return_value = 'ac87622f-b761-40a0-b151-00872a2a456e'
+
+        # call get_vrops_resourceid_from_ro_uuid method under test
+        actual_return = self.mon_plugin.get_vrops_resourceid_from_ro_uuid(ro_resource_uuid)
+
+        # verify that mocked method is called
+        m_get_vm_moref_id.assert_called_with(ro_resource_uuid)
+        m_get_vm_resource_id.assert_called_with(vm_moref_id)
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
+    def test_get_vrops_resourceid_from_ro_uuid_failed_to_get_vm_resource_id(self,
+                                                                            m_get_vm_moref_id,
+                                                                            m_get_vm_resource_id):
+        """Test get vrops resourceid from ro uuid method negative scenario"""
+
+        # Mock the inputs
+        ro_resource_uuid = 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+        vm_moref_id = m_get_vm_moref_id.return_value = 'vm-6626'
+        expected_return = m_get_vm_resource_id.return_value = None
+
+        # call get_vrops_resourceid_from_ro_uuid method under test
+        actual_return = self.mon_plugin.get_vrops_resourceid_from_ro_uuid(ro_resource_uuid)
+
+        # verify that mocked method is called
+        m_get_vm_moref_id.assert_called_with(ro_resource_uuid)
+        m_get_vm_resource_id.assert_called_with(vm_moref_id)
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_resource_id')
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vm_moref_id')
+    def test_get_vrops_resourceid_from_ro_uuid_failed_to_get_vm_moref_id(self,
+                                                                         m_get_vm_moref_id,
+                                                                         m_get_vm_resource_id):
+        """Test get vrops resourceid from ro uuid method negative scenario"""
+
+        # Mock the inputs
+        ro_resource_uuid = 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+        expected_return = vm_moref_id = m_get_vm_moref_id.return_value = None
+
+        # call get_vrops_resourceid_from_ro_uuid method under test
+        actual_return = self.mon_plugin.get_vrops_resourceid_from_ro_uuid(ro_resource_uuid)
+
+        # verify that mocked method is called
+        m_get_vm_moref_id.assert_called_with(ro_resource_uuid)
+        m_get_vm_resource_id.assert_not_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    def test_get_triggered_alarms_on_resource_valid_req_response(self, m_get):
+        """Test get triggered alarms on resource method for valid request"""
+
+        # Mock the inputs
+        ro_resource_uuid = 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+        vrops_resource_id = 'ac87622f-b761-40a0-b151-00872a2a456e'
+        m_get.return_value.status_code = 200
+        expected_return = [{'status': 'ACTIVE', 'update_date': '2018-01-12T08:34:05',
+                            'severity': 'CRITICAL', 'start_date': '2018-01-12T08:34:05',
+                            'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                            'cancel_date': '2018-02-12T08:24:48', 'vim_type': 'VMware',
+                            'alarm_instance_uuid': 'd9e3bc84-dcb4-4905-b592-00a55f4cdaf1',
+                            'alarm_uuid': '5714977d-56f6-4222-adc7-43fa6c6e7e39'}]
+
+        m_get.return_value.content = '{"alerts": [\
+        {\
+            "alertId": "d9e3bc84-dcb4-4905-b592-00a55f4cdaf1",\
+            "resourceId": "ac87622f-b761-40a0-b151-00872a2a456e",\
+            "alertLevel": "CRITICAL",\
+            "status": "ACTIVE",\
+            "startTimeUTC": 1515746045278,\
+            "cancelTimeUTC": 1518423888708,\
+            "updateTimeUTC": 1515746045278,\
+            "alertDefinitionId": "AlertDefinition-5714977d-56f6-4222-adc7-43fa6c6e7e39",\
+            "alertDefinitionName": "CPU_Utilization_Above_Thr-e14b203c-6bf2-4e2f-a91c-8c19d240eda4"\
+        },\
+        {\
+            "alertId": "5fb5e940-e161-4253-a729-7255c6d6b1f5",\
+            "resourceId": "ac87622f-b761-40a0-b151-00872a2a456e",\
+            "alertLevel": "WARNING",\
+            "status": "CANCELED",\
+            "startTimeUTC": 1506684979154,\
+            "cancelTimeUTC": 0,\
+            "updateTimeUTC": 1520471975507,\
+            "alertDefinitionId": "AlertDefinition-9ec5a921-1a54-411d-85ec-4c1c9b26dd02",\
+            "alertDefinitionName": "VM_CPU_Usage_Alarm"\
+        }]}'
+
+        # call get_vrops_resourceid_from_ro_uuid method under test
+        actual_return = self.mon_plugin.get_triggered_alarms_on_resource(ro_resource_uuid,
+                                                                         vrops_resource_id)
+
+        # verify that mocked method is called
+        m_get.assert_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    def test_get_triggered_alarms_on_resource_invalid_req_response(self, m_get):
+        """Test get triggered alarms on resource method for invalid request"""
+
+        # Mock the inputs
+        ro_resource_uuid = 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+        vrops_resource_id = 'ac87622f-b761-40a0-b151-00872a2a456e'
+        m_get.return_value.status_code = 204
+        expected_return = None
+
+        # call get_vrops_resourceid_from_ro_uuid method under test
+        actual_return = self.mon_plugin.get_triggered_alarms_on_resource(ro_resource_uuid,
+                                                                         vrops_resource_id)
+
+        # verify that mocked method is called
+        m_get.assert_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    def test_get_triggered_alarms_on_resource_no_alarms_present(self, m_get):
+        """Test get triggered alarms on resource method for no alarms present"""
+
+        # Mock the inputs
+        ro_resource_uuid = 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+        vrops_resource_id = 'ac87622f-b761-40a0-b151-00872a2a456e'
+        m_get.return_value.status_code = 200
+        expected_return = []
+        m_get.return_value.content = '{"alerts": []}'
+
+        # call get_vrops_resourceid_from_ro_uuid method under test
+        actual_return = self.mon_plugin.get_triggered_alarms_on_resource(ro_resource_uuid,
+                                                                         vrops_resource_id)
+
+        # verify that mocked method is called
+        m_get.assert_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    def test_convert_date_time_valid_date_time(self):
+        """Test convert date time method valid input"""
+
+        # Mock the inputs
+        date_time = 1515746045278
+        expected_return = '2018-01-12T08:34:05'
+
+        # call convert_date_time method under test
+        actual_return = self.mon_plugin.convert_date_time(date_time)
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    def test_convert_date_time_invalid_date_time(self):
+        """Test convert date time method invalid input"""
+
+        # Mock the inputs
+        date_time = 0
+        expected_return = '0000-00-00T00:00:00'
+
+        # call convert_date_time method under test
+        actual_return = self.mon_plugin.convert_date_time(date_time)
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    def test_get_vm_resource_id_rest_valid_req_response(self, m_get):
+        """Test get vms resource id valid request"""
+
+        # Mock the inputs
+        vm_moref_id = 'vm-6626'
+        m_get.return_value.status_code = 200
+        expected_return = "ac87622f-b761-40a0-b151-00872a2a456e"
+        m_get.return_value.content = \
+            '{ \
+                "resourceList": [\
+                   {\
+                       "creationTime": 1497770174130,\
+                       "resourceKey": {\
+                           "name": "OCInst2.ubuntu(4337d51f-1e65-4ab0-9c08-4897778d4fda)",\
+                           "adapterKindKey": "VMWARE",\
+                           "resourceKindKey": "VirtualMachine",\
+                           "resourceIdentifiers": [\
+                               {\
+                                   "identifierType": {\
+                                   "name": "VMEntityObjectID",\
+                                   "dataType": "STRING",\
+                                   "isPartOfUniqueness": true\
+                                   },\
+                                   "value": "vm-6626"\
+                               }\
+                           ]\
+                       },\
+                       "identifier": "ac87622f-b761-40a0-b151-00872a2a456e"\
+                    }\
+                ]\
+            }'
+
+        # call get_vm_resource_id method under test
+        actual_return = self.mon_plugin.get_vm_resource_id(vm_moref_id)
+
+        # verify that mocked method is called
+        m_get.assert_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    def test_get_vm_resource_id_rest_invalid_req_response(self, m_get):
+        """Test get vms resource id invalid request"""
+
+        # Mock the inputs
+        vm_moref_id = 'vm-6626'
+        m_get.return_value.status_code = 406
+        expected_return = None
+        m_get.return_value.content = '406 Not Acceptable'
+
+        # call get_vm_resource_id method under test
+        actual_return = self.mon_plugin.get_vm_resource_id(vm_moref_id)
+
+        # verify that mocked method is called
+        m_get.assert_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    def test_get_vm_resource_id_rest_invalid_response(self, m_get):
+        """Test get vms resource id invalid response"""
+
+        # Mock the inputs
+        vm_moref_id = 'vm-6626'
+        m_get.return_value.status_code = 200
+        expected_return = None
+        m_get.return_value.content = \
+            '{ \
+                "resourceList": \
+                   {\
+                       "creationTime": 1497770174130,\
+                       "resourceKey": {\
+                           "name": "OCInst2.ubuntu(4337d51f-1e65-4ab0-9c08-4897778d4fda)",\
+                           "adapterKindKey": "VMWARE",\
+                           "resourceKindKey": "VirtualMachine",\
+                           "resourceIdentifiers": [\
+                               {\
+                                   "identifierType": {\
+                                   "name": "VMEntityObjectID",\
+                                   "dataType": "STRING",\
+                                   "isPartOfUniqueness": true\
+                                   },\
+                                   "value": "vm-6626"\
+                               }\
+                           ]\
+                       },\
+                       "identifier": "ac87622f-b761-40a0-b151-00872a2a456e"\
+                    }\
+            }'
+
+        # call get_vm_resource_id method under test
+        actual_return = self.mon_plugin.get_vm_resource_id(vm_moref_id)
+
+        # verify that mocked method is called
+        # m_get.assert_called
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vapp_details_rest')
+    def test_get_vm_moref_id_valid_id_found(self, m_get_vapp_details_rest):
+        """Test get vm moref id valid scenario"""
+
+        # mock the inputs
+        vapp_uuid = 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+        m_get_vapp_details_rest.return_value = {'vm_vcenter_info': {'vm_moref_id': 'vm-6626'}}
+        expected_return = 'vm-6626'
+
+        # call get_vm_resource_id method under test
+        actual_return = self.mon_plugin.get_vm_moref_id(vapp_uuid)
+
+        # verify that mocked method is called
+        m_get_vapp_details_rest.assert_called_with(vapp_uuid)
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.MonPlugin, 'get_vapp_details_rest')
+    def test_get_vm_moref_id_valid_id_not_found(self, m_get_vapp_details_rest):
+        """Test get vm moref id invalid scenario"""
+
+        # mock the inputs
+        vapp_uuid = 'e14b203c-6bf2-4e2f-a91c-8c19d240eda'  # invalid uuid
+        m_get_vapp_details_rest.return_value = {}
+        expected_return = None
+
+        # call get_vm_resource_id method under test
+        actual_return = self.mon_plugin.get_vm_moref_id(vapp_uuid)
+
+        # verify that mocked method is called
+        m_get_vapp_details_rest.assert_called_with(vapp_uuid)
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    @mock.patch.object(monPlugin.MonPlugin, 'connect_as_admin')
+    def test_get_vapp_details_rest_valid_req_response(self, m_connect_as_admin, m_get):
+        """Test get vapp details rest method for valid request response"""
+
+        # mock the inputs
+        vapp_uuid = 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+        m_connect_as_admin.return_value = self.vca
+        self.vca._session = self.session
+        self.vca._session.headers['x-vcloud-authorization'] = '2ec69b2cc6264ad0a47aaf4e3e280d16'
+        m_get.return_value.status_code = 200
+        expected_return = {'vm_vcenter_info': {'vm_moref_id': 'vm-6626'}}
+        m_get.return_value.content = '<?xml version="1.0" encoding="UTF-8"?>\
+        <VApp xmlns="http://www.vmware.com/vcloud/v1.5"  xmlns:vmext="http://www.vmware.com/vcloud/extension/v1.5" >\
+            <Children>\
+                <Vm needsCustomization="false"  type="application/vnd.vmware.vcloud.vm+xml">\
+                    <VCloudExtension required="false">\
+                        <vmext:VmVimInfo>\
+                            <vmext:VmVimObjectRef>\
+                                <vmext:VimServerRef  type="application/vnd.vmware.admin.vmwvirtualcenter+xml"/>\
+                                <vmext:MoRef>vm-6626</vmext:MoRef>\
+                                <vmext:VimObjectType>VIRTUAL_MACHINE</vmext:VimObjectType>\
+                            </vmext:VmVimObjectRef>\
+                        </vmext:VmVimInfo>\
+                    </VCloudExtension>\
+                </Vm>\
+            </Children>\
+        </VApp>'
+
+        # call get_vapp_details_rest method under test
+        actual_return = self.mon_plugin.get_vapp_details_rest(vapp_uuid)
+
+        # verify that mocked method is called
+        m_connect_as_admin.assert_called_with()
+        m_get.assert_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    @mock.patch.object(monPlugin.MonPlugin, 'connect_as_admin')
+    def test_get_vapp_details_rest_invalid_req_response(self, m_connect_as_admin, m_get):
+        """Test get vapp details rest method for invalid request response"""
+
+        # mock the inputs
+        vapp_uuid = 'Invalid-e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+        m_connect_as_admin.return_value = self.vca
+        self.vca._session = self.session
+        self.vca._session.headers['x-vcloud-authorization'] = '2ec69b2cc6264ad0a47aaf4e3e280d16'
+        m_get.return_value.status_code = 400
+        expected_return = {}
+        m_get.return_value.content = 'Bad Request'
+
+        # call get_vapp_details_rest method under test
+        actual_return = self.mon_plugin.get_vapp_details_rest(vapp_uuid)
+
+        # verify that mocked method is called
+        m_connect_as_admin.assert_called_with()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    @mock.patch.object(monPlugin.MonPlugin, 'connect_as_admin')
+    def test_get_vapp_details_rest_failed_to_connect_vcd(self, m_connect_as_admin, m_get):
+        """Test get vapp details rest method for failed to connect to vcd"""
+
+        # mock the inputs
+        vapp_uuid = 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+        m_connect_as_admin.return_value = None
+        expected_return = {}
+
+        # call get_vapp_details_rest method under test
+        actual_return = self.mon_plugin.get_vapp_details_rest(vapp_uuid)
+
+        # verify that mocked method is called
+        m_connect_as_admin.assert_called_with()
+        m_get.assert_not_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.requests, 'get')
+    @mock.patch.object(monPlugin.MonPlugin, 'connect_as_admin')
+    def test_get_vapp_details_rest_invalid_response(self, m_connect_as_admin, m_get):
+        """Test get vapp details rest method for invalid response"""
+
+        # mock the inputs
+        vapp_uuid = 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4'
+        m_connect_as_admin.return_value = self.vca
+        self.vca._session = self.session
+        self.vca._session.headers['x-vcloud-authorization'] = '2ec69b2cc6264ad0a47aaf4e3e280d16'
+        m_get.return_value.status_code = 200
+        expected_return = {}
+        m_get.return_value.content = '<?xml version="1.0" encoding="UTF-8"?>\
+        <VApp xmlns="http://www.vmware.com/vcloud/v1.5"  xmlns:vmext="http://www.vmware.com/vcloud/extension/v1.5" >\
+            <Children>\
+                <Vm needsCustomization="false"  type="application/vnd.vmware.vcloud.vm+xml">\
+                    <VCloudExtension required="false">\
+                        <vmext:VmVimInfo>\
+                            <vmext:VmVimObjectRef>\
+                                <vmext:VimServerRef  type="application/vnd.vmware.admin.vmwvirtualcenter+xml"/>\
+                                <vmext:MoRef>vm-6626</vmext:MoRef>\
+                                <vmext:VimObjectType>VIRTUAL_MACHINE</vmext:VimObjectType>\
+                        </vmext:VmVimInfo>\
+                    </VCloudExtension>\
+                </Vm>\
+            </Children>\
+        </VApp>'
+
+        # call get_vapp_details_rest method under test
+        actual_return = self.mon_plugin.get_vapp_details_rest(vapp_uuid)
+
+        # verify that mocked method is called
+        m_connect_as_admin.assert_called_with()
+        m_get.assert_called()
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+    @mock.patch.object(monPlugin.Client, 'set_credentials')
+    @mock.patch.object(monPlugin, 'Client')
+    def test_connect_as_admin(self, m_client, m_set_credentials):
+        """Test connect as admin to vCD method"""
+
+        # mock the inputs and mocked returns
+        expected_return = m_client.return_value = self.vca
+        m_set_credentials.retrun_value = True
+
+        # call connect_as_admin method under test
+        actual_return = self.mon_plugin.connect_as_admin()
+
+        # verify that mocked method is called
+        m_client.assert_called_with(self.m_vim_access_config['vim_url'],
+                                    verify_ssl_certs=False)
+
+        # verify return value with expected value
+        self.assertEqual(expected_return, actual_return)
+
+# For testing purpose
+# if __name__ == '__main__':
+#   unittest.main()
diff --git a/osm_mon/test/plugins/VMware/test_plugin_receiver.py b/osm_mon/test/plugins/VMware/test_plugin_receiver.py
new file mode 100644 (file)
index 0000000..cc5dea9
--- /dev/null
@@ -0,0 +1,927 @@
+# -*- coding: utf-8 -*-
+
+##
+# Copyright 2017-2018 VMware Inc.
+# This file is part of ETSI OSM
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+#
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact:  osslegalrouting@vmware.com
+##
+
+""" Mock tests for VMware vROPs plugin recevier """
+
+import json
+import logging
+import os
+import sys
+import unittest
+from io import UnsupportedOperation
+
+import mock
+
+# sys.path.append("/root/MON/")
+
+log = logging.getLogger(__name__)
+
+sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", "..", ".."))
+
+from osm_mon.plugins.vRealiseOps import plugin_receiver as monPluginRec
+from osm_mon.core.database import VimCredentials
+
+
+class Message(object):
+    """A class to mock a message object value for alarm and matric requests"""
+
+    def __init__(self):
+        """Initialize a mocked message instance"""
+        self.topic = "alarm_or_metric_request"
+        self.key = None
+        self.value = json.dumps({"mock_value": "mock_details"})
+        self.partition = 1
+        self.offset = 100
+
+
+class TestPluginReceiver(unittest.TestCase):
+    """Test class for Plugin Receiver class methods"""
+
+    def setUp(self):
+        """Setup the tests for plugin_receiver class methods"""
+        super(TestPluginReceiver, self).setUp()
+        self.plugin_receiver = monPluginRec.PluginReceiver()
+
+    @mock.patch.object(monPluginRec.PluginReceiver, 'publish_create_alarm_status')
+    @mock.patch.object(monPluginRec.PluginReceiver, 'create_alarm')
+    def test_consume_create_alarm_request_key(self, m_create_alarm, m_publish_create_alarm_status):
+        """Test functionality of consume for create_alarm_request key"""
+
+        vim_uuid = "f85fc39e-723d-4172-979b-de28b36465bb"
+
+        value = {"vim_uuid": vim_uuid, "alarm_create_request": "alarm_details"}
+        m_create_alarm.return_value = "test_alarm_id"
+
+        # Call the consume method of plugin_receiver
+        self.plugin_receiver.handle_alarm_requests('create_alarm_request', value, vim_uuid)
+
+        # verify if create_alarm and publish methods called with correct params
+        m_create_alarm.assert_called_with(value)
+        m_publish_create_alarm_status.assert_called_with("test_alarm_id", value)
+
+    @mock.patch.object(monPluginRec.PluginReceiver, 'publish_update_alarm_status')
+    @mock.patch.object(monPluginRec.PluginReceiver, 'update_alarm')
+    def test_consume_update_alarm_request_key(self, m_update_alarm,
+                                              m_publish_update_alarm_status):
+        """Test functionality of consume for update_alarm_request key"""
+
+        vim_uuid = "f85fc39e-723d-4172-979b-de28b36465bb"
+
+        value = {"vim_uuid": vim_uuid, "alarm_update_request": "alarm_details"}
+
+        # set return value to mocked method
+        m_update_alarm.return_value = "test_alarm_id"
+
+        # Call the consume method of plugin_receiver
+        self.plugin_receiver.handle_alarm_requests('update_alarm_request', value, vim_uuid)
+
+        # verify update_alarm and publish method called with correct params
+        m_update_alarm.assert_called_with(value)
+        m_publish_update_alarm_status.assert_called_with("test_alarm_id", value)
+
+    @mock.patch.object(monPluginRec.PluginReceiver, 'publish_delete_alarm_status')
+    @mock.patch.object(monPluginRec.PluginReceiver, 'delete_alarm')
+    def test_consume_delete_alarm_request_key(self, m_delete_alarm,
+                                              m_publish_delete_alarm_status):
+        """Test functionality of consume for delete_alarm_request key"""
+
+        vim_uuid = "f85fc39e-723d-4172-979b-de28b36465bb"
+
+        value = {"vim_uuid": vim_uuid, "alarm_delete_request": "alarm_details"}
+        m_delete_alarm.return_value = "test_alarm_id"
+
+        # Call the consume method of plugin_receiver and check delete_alarm request
+        self.plugin_receiver.handle_alarm_requests('delete_alarm_request', value, vim_uuid)
+        m_delete_alarm.assert_called_with(value)
+
+        # Check if publish method called with correct parameters
+        m_publish_delete_alarm_status.assert_called_with("test_alarm_id", value)
+
+    @mock.patch.object(monPluginRec.PluginReceiver, 'publish_list_alarm_response')
+    @mock.patch.object(monPluginRec.PluginReceiver, 'list_alarms')
+    def test_consume_list_alarm_request_key(self, m_list_alarms,
+                                            m_publish_list_alarm_response):
+        """ Test functionality of list alarm request key"""
+
+        vim_uuid = "f85fc39e-723d-4172-979b-de28b36465bb"
+        value = {"vim_uuid": vim_uuid, "alarm_list_request": "alarm_details"}
+
+        test_alarm_list = [{"alarm_uuid": "alarm1_details"}, {"alarm_uuid": "alarm2_details"}]
+
+        m_list_alarms.return_value = test_alarm_list
+
+        # Call the consume method of plugin_receiver and check delete_alarm request
+        self.plugin_receiver.handle_alarm_requests('list_alarm_request', value, vim_uuid)
+        m_list_alarms.assert_called_with(value)
+
+        # Check if publish method called with correct parameters
+        m_publish_list_alarm_response.assert_called_with(test_alarm_list, value)
+
+    @mock.patch.object(monPluginRec.PluginReceiver, 'publish_create_alarm_status')
+    @mock.patch.object(monPluginRec.PluginReceiver, 'create_alarm')
+    def test_consume_invalid_alarm_request_key(self, m_create_alarm,
+                                               m_publish_create_alarm_status):
+        """Test functionality of consume for vim_access_credentials invalid request key"""
+
+        vim_uuid = "f85fc39e-723d-4172-979b-de28b36465bb"
+
+        # Call the consume method of plugin_receiver
+        with self.assertRaises(UnsupportedOperation):
+            self.plugin_receiver.handle_alarm_requests('invalid_key', {}, vim_uuid)
+
+        # verify that create_alarm and publish_create_alarm_status methods not called
+        m_create_alarm.assert_not_called()
+        m_publish_create_alarm_status.assert_not_called()
+
+    @mock.patch.object(monPluginRec.PluginReceiver, 'publish_metrics_data_status')
+    @mock.patch.object(monPluginRec.MonPlugin, 'get_metrics_data')
+    def test_consume_invalid_metric_request_key(self, m_get_metrics_data,
+                                                m_publish_metric_data_status):
+        """Test functionality of invalid metric key request"""
+
+        vim_uuid = "f85fc39e-723d-4172-979b-de28b36465bb"
+
+        # Call the consume method of plugin_receiver
+        with self.assertRaises(UnsupportedOperation):
+            self.plugin_receiver.handle_metric_requests('invalid_key', {}, vim_uuid)
+
+        # verify that get metrics data and publish methods not called
+        m_get_metrics_data.assert_not_called()
+        m_publish_metric_data_status.assert_not_called()
+
+    @mock.patch.object(monPluginRec.PluginReceiver, 'publish_metrics_data_status')
+    @mock.patch.object(monPluginRec.MonPlugin, 'get_metrics_data')
+    @mock.patch.object(monPluginRec.PluginReceiver, 'get_vim_access_config')
+    def test_consume_read_metric_data_request_key(self, m_get_vim_access_config,
+                                                  m_get_metrics_data,
+                                                  m_publish_metric_data_status):
+        """Test functionality of consume for read_metric_data_request key"""
+
+        vim_uuid = "f85fc39e-723d-4172-979b-de28b36465bb"
+
+        value = {"vim_uuid": vim_uuid, "metric_name": "metric_details"}
+        m_get_metrics_data.return_value = {"metrics_data": "metrics_details"}
+
+        m_get_vim_access_config.return_value = {'vrops_site': 'abc',
+                                                'vrops_user': 'user',
+                                                'vrops_password': 'passwd',
+                                                'vim_url': 'vcd_url',
+                                                'admin_username': 'admin',
+                                                'admin_password': 'admin_passwd',
+                                                'vim_uuid': '1',
+                                                'tenant_id': 'org_vdc_1'}
+
+        # Call the consume method of plugin_receiver
+        self.plugin_receiver.handle_metric_requests('read_metric_data_request', value, vim_uuid)
+        m_get_metrics_data.assert_called_with(value)
+
+        # Check if publish method called with correct parameters
+        m_publish_metric_data_status.assert_called_with({"metrics_data": "metrics_details"})
+
+    @mock.patch.object(monPluginRec.PluginReceiver, 'publish_create_metric_response')
+    @mock.patch.object(monPluginRec.PluginReceiver, 'verify_metric')
+    def test_consume_create_metric_request_key(self, m_verify_metric,
+                                               m_publish_create_metric_response):
+        """Test functionality of consume for create_metric_request key"""
+
+        vim_uuid = "f85fc39e-723d-4172-979b-de28b36465bb"
+        value = {"vim_uuid": vim_uuid, "metric_create": "metric_details"}
+
+        # set the return value
+        m_verify_metric.return_value = True
+
+        # Call the consume method of plugin_receiver
+        self.plugin_receiver.handle_metric_requests('create_metric_request', value, vim_uuid)
+        m_verify_metric.assert_called_with(value)
+
+        # Check if publish method called with correct parameters
+        m_publish_create_metric_response.assert_called_with(value, True)
+
+    @mock.patch.object(monPluginRec.PluginReceiver, 'publish_update_metric_response')
+    @mock.patch.object(monPluginRec.PluginReceiver, 'verify_metric')
+    def test_consume_update_metric_request_key(self, m_verify_metric,
+                                               m_publish_update_metric_response):
+        """Test functionality of update metric request key"""
+
+        vim_uuid = "f85fc39e-723d-4172-979b-de28b36465bb"
+
+        value = {"vim_uuid": vim_uuid, "metric_create": "metric_details"}
+
+        # set the return value
+        m_verify_metric.return_value = True
+
+        # Call the consume method of plugin_receiver
+        self.plugin_receiver.handle_metric_requests('update_metric_request', value, vim_uuid)
+
+        # verify mocked methods called with correct parameters
+        m_verify_metric.assert_called_with(value)
+        m_publish_update_metric_response.assert_called_with(value, True)
+
+    @mock.patch.object(monPluginRec.PluginReceiver, 'publish_delete_metric_response')
+    def test_consume_delete_metric_request_key(self, m_publish_delete_metric_response):
+        """Test functionality of consume for delete_metric_request key"""
+
+        # Note: vROPS doesn't support deleting metric data
+        vim_uuid = "f85fc39e-723d-4172-979b-de28b36465bb"
+
+        value = {"vim_uuid": vim_uuid, "metric_name": "metric_details"}
+
+        # Call the consume method of plugin_receiver
+        self.plugin_receiver.handle_metric_requests('delete_metric_request', value, vim_uuid)
+
+        # Check if publish method called with correct parameters
+        m_publish_delete_metric_response.assert_called_with(value)
+
+    @mock.patch.object(monPluginRec.MonPlugin, 'configure_alarm')
+    @mock.patch.object(monPluginRec.MonPlugin, 'configure_rest_plugin')
+    @mock.patch.object(monPluginRec.PluginReceiver, 'get_vim_access_config')
+    def test_create_alarm_successful(self, m_get_vim_access_config,
+                                     m_configure_rest_plugin,
+                                     m_configure_alarm):
+        """ Test functionality of create alarm method-positive case"""
+
+        # Mock config_alarm_info
+        config_alarm_info = {"schema_version": 1.0,
+                             "schema_type": "create_alarm_request",
+                             "vim_type": "VMware",
+                             "vim_uuid": "1",
+                             "alarm_create_request": {"correlation_id": 1,
+                                                      "alarm_name": "CPU_Utilize_Threshold",
+                                                      "metric_name": "CPU_UTILIZATION",
+                                                      "tenant_uuid": "tenant_uuid",
+                                                      "resource_uuid": "resource_uuid",
+                                                      "description": "test_create_alarm",
+                                                      "severity": "CRITICAL",
+                                                      "operation": "GT",
+                                                      "threshold_value": 10,
+                                                      "unit": "%",
+                                                      "statistic": "AVERAGE"}}
+
+        # set return value to plugin uuid
+        m_get_vim_access_config.return_value = {'vrops_site': 'abc',
+                                                'vrops_user': 'user',
+                                                'vrops_password': 'passwd',
+                                                'vim_url': 'vcd_url',
+                                                'admin_username': 'admin',
+                                                'admin_password': 'admin_passwd',
+                                                'vim_uuid': '1',
+                                                'tenant_id': 'org_vdc_1'}
+
+        m_configure_rest_plugin.retrun_value = "plugin_uuid"
+        m_configure_alarm.return_value = "alarm_uuid"
+
+        # call create alarm method under test
+        self.plugin_receiver.create_alarm(config_alarm_info)
+
+        # verify mocked methods get called with correct params
+        m_get_vim_access_config.assert_called_with(config_alarm_info['vim_uuid'])
+        m_configure_rest_plugin.assert_called_with()
+        m_configure_alarm.assert_called_with(config_alarm_info["alarm_create_request"])
+
+    @mock.patch.object(monPluginRec.MonPlugin, 'configure_alarm')
+    @mock.patch.object(monPluginRec.MonPlugin, 'configure_rest_plugin')
+    @mock.patch.object(monPluginRec.PluginReceiver, 'get_vim_access_config')
+    def test_create_alarm_failed(self, m_get_vim_access_config,
+                                 m_configure_rest_plugin,
+                                 m_configure_alarm):
+        """ Test functionality of create alarm method negative case"""
+
+        # Mock config_alarm_info
+        config_alarm_info = {"schema_version": 1.0,
+                             "schema_type": "create_alarm_request",
+                             "vim_type": "VMware",
+                             "vim_uuid": "1",
+                             "alarm_create_request": {"correlation_id": 1,
+                                                      "alarm_name": "CPU_Utilize_Threshold",
+                                                      "metric_name": "CPU_UTILIZATION",
+                                                      "tenant_uuid": "tenant_uuid",
+                                                      "resource_uuid": "resource_uuid",
+                                                      "description": "test_create_alarm",
+                                                      "severity": "CRITICAL",
+                                                      "operation": "GT",
+                                                      "threshold_value": 10,
+                                                      "unit": "%",
+                                                      "statistic": "AVERAGE"}}
+
+        # set return value to plugin uuid and alarm_uuid to None
+        m_get_vim_access_config.return_value = {'vrops_site': 'abc',
+                                                'vrops_user': 'user',
+                                                'vrops_password': 'passwd',
+                                                'vim_url': 'vcd_url',
+                                                'admin_username': 'admin',
+                                                'admin_password': 'admin_passwd',
+                                                'vim_uuid': '1',
+                                                'tenant_id': 'org_vdc_1'}
+        m_configure_rest_plugin.retrun_value = "plugin_uuid"
+        m_configure_alarm.return_value = None
+
+        # call create alarm method under test
+        alarm_uuid = self.plugin_receiver.create_alarm(config_alarm_info)
+
+        # verify mocked method called with correct params
+        m_get_vim_access_config.assert_called_with(config_alarm_info['vim_uuid'])
+        m_configure_rest_plugin.assert_called_with()
+        m_configure_alarm.assert_called_with(config_alarm_info["alarm_create_request"])
+
+        # verify create alarm method returns None when failed
+        self.assertEqual(alarm_uuid, None)
+
+    @mock.patch.object(monPluginRec.MonPlugin, 'update_alarm_configuration')
+    @mock.patch.object(monPluginRec.PluginReceiver, 'get_vim_access_config')
+    def test_update_alarm_successful(self, m_get_vim_access_config, m_update_alarm_configuration):
+        """ Test functionality of update alarm method-positive case"""
+
+        # Mock update_alarm_info
+        update_alarm_info = {"schema_version": 1.0, "schema_type": "update_alarm_request",
+                             "vim_type": "VMware", "vim_uuid": "1",
+                             "alarm_update_request": {'alarm_uuid': 'abc', 'correlation_id': 14203}}
+
+        # set return value to mocked method
+        m_update_alarm_configuration.return_value = "alarm_uuid"
+        m_get_vim_access_config.return_value = {'vrops_site': 'abc',
+                                                'vrops_user': 'user',
+                                                'vrops_password': 'passwd',
+                                                'vim_url': 'vcd_url',
+                                                'admin_username': 'admin',
+                                                'admin_password': 'admin_passwd',
+                                                'vim_uuid': '1',
+                                                'tenant_id': 'org_vdc_1'}
+
+        # check update alarm gets called and returned correct value
+        ret_value = self.plugin_receiver.update_alarm(update_alarm_info)
+
+        # check mocked method get called with correct param
+        m_get_vim_access_config.assert_called_with(update_alarm_info['vim_uuid'])
+        m_update_alarm_configuration.assert_called_with(update_alarm_info["alarm_update_request"])
+
+        # check return value and passed values are correct
+        self.assertEqual(ret_value, "alarm_uuid")
+
+    @mock.patch.object(monPluginRec.MonPlugin, 'update_alarm_configuration')
+    @mock.patch.object(monPluginRec.PluginReceiver, 'get_vim_access_config')
+    def test_update_alarm_failed(self, m_get_vim_access_config, m_update_alarm_configuration):
+        """ Test functionality of update alarm method negative case"""
+
+        # Mock update_alarm_info
+        update_alarm_info = {"schema_version": 1.0, "schema_type": "update_alarm_request",
+                             "vim_type": "VMware", "vim_uuid": "1",
+                             "alarm_update_request": {'alarm_uuid': 'abc', 'correlation_id': 14203}}
+
+        # set return value to mocked method
+        m_update_alarm_configuration.return_value = None
+        m_get_vim_access_config.return_value = {'vrops_site': 'abc',
+                                                'vrops_user': 'user',
+                                                'vrops_password': 'passwd',
+                                                'vim_url': 'vcd_url',
+                                                'admin_username': 'admin',
+                                                'admin_password': 'admin_passwd',
+                                                'vim_uuid': '1',
+                                                'tenant_id': 'org_vdc_1'}
+
+        # check update alarm gets called and returned correct value
+        ret_value = self.plugin_receiver.update_alarm(update_alarm_info)
+
+        # check mocked method get called with correct param
+        m_get_vim_access_config.assert_called_with(update_alarm_info['vim_uuid'])
+        m_update_alarm_configuration.assert_called_with(update_alarm_info["alarm_update_request"])
+
+        # check return value and passed values are correct
+        self.assertEqual(ret_value, None)
+
+    @mock.patch.object(monPluginRec.MonPlugin, 'delete_alarm_configuration')
+    @mock.patch.object(monPluginRec.PluginReceiver, 'get_vim_access_config')
+    def test_delete_alarm_successful(self, m_get_vim_access_config, m_delete_alarm_configuration):
+        """ Test functionality of delete alarm method-positive case"""
+
+        # Mock delete_alarm_info
+        delete_alarm_info = {"schema_version": 1.0, "schema_type": "delete_alarm_request",
+                             "vim_type": "VMware", "vim_uuid": "1",
+                             "alarm_delete_request": {'alarm_uuid': 'abc', 'correlation_id': 14203}}
+
+        # set return value to mocked method
+        m_delete_alarm_configuration.return_value = "alarm_uuid"
+        m_get_vim_access_config.return_value = {'vrops_site': 'abc',
+                                                'vrops_user': 'user',
+                                                'vrops_password': 'passwd',
+                                                'vim_url': 'vcd_url',
+                                                'admin_username': 'admin',
+                                                'admin_password': 'admin_passwd',
+                                                'vim_uuid': '1',
+                                                'tenant_id': 'org_vdc_1'}
+
+        # check delete alarm gets called and returned correct value
+        ret_value = self.plugin_receiver.delete_alarm(delete_alarm_info)
+
+        # check mocked method get called with correct param
+        m_get_vim_access_config.assert_called_with(delete_alarm_info['vim_uuid'])
+        m_delete_alarm_configuration.assert_called_with(delete_alarm_info["alarm_delete_request"])
+
+        # check return value and passed values are correct
+        self.assertEqual(ret_value, "alarm_uuid")
+
+    @mock.patch.object(monPluginRec.MonPlugin, 'delete_alarm_configuration')
+    @mock.patch.object(monPluginRec.PluginReceiver, 'get_vim_access_config')
+    def test_delete_alarm_failed(self, m_get_vim_access_config, m_delete_alarm_configuration):
+        """ Test functionality of delete alarm method-negative case"""
+
+        # Mock update_alarm_info
+        delete_alarm_info = {"schema_version": 1.0, "schema_type": "delete_alarm_request",
+                             "vim_type": "VMware", "vim_uuid": "1",
+                             "alarm_delete_request": {'alarm_uuid': 'abc', 'correlation_id': 14203}}
+
+        # set return value to mocked method
+        m_delete_alarm_configuration.return_value = None
+        m_get_vim_access_config.return_value = {'vrops_site': 'abc',
+                                                'vrops_user': 'user',
+                                                'vrops_password': 'passwd',
+                                                'vim_url': 'vcd_url',
+                                                'admin_username': 'admin',
+                                                'admin_password': 'admin_passwd',
+                                                'vim_uuid': '1',
+                                                'tenant_id': 'org_vdc_1'}
+
+        # check delete alarm gets called and returned correct value
+        ret_value = self.plugin_receiver.delete_alarm(delete_alarm_info)
+
+        # check mocked method get called with correct param
+        m_get_vim_access_config.assert_called_with(delete_alarm_info['vim_uuid'])
+        m_delete_alarm_configuration.assert_called_with(delete_alarm_info["alarm_delete_request"])
+
+        # check return value to check failed status
+        self.assertEqual(ret_value, None)
+
+    def test_publish_create_alarm_status(self):
+        """ Test functionality of publish create alarm status method"""
+
+        # Mock config_alarm_info
+        config_alarm_info = {'vim_type': 'VMware', "vim_uuid": "1",
+                             'alarm_create_request': {
+                                 'threshold_value': 0,
+                                 'severity': 'CRITICAL',
+                                 'alarm_name': 'CPU_Utilization_Above_Threshold',
+                                 'resource_uuid': 'e14b203c-6bf2-4e2f-a91c-8c19d240eda4',
+                                 'correlation_id': 1234,
+                                 'statistic': 'AVERAGE',
+                                 'metric_name': 'CPU_UTILIZATION'}
+                             }
+
+        alarm_uuid = "xyz"
+
+        # call publish create status method under test
+        self.plugin_receiver.publish_create_alarm_status(alarm_uuid, config_alarm_info)
+
+        # verify mocked method called with correct params
+        # TODO(diazb): Validate payload generation (self.assertEquals(response, expected_message))
+
+    def test_publish_update_alarm_status(self):
+        """ Test functionality of publish update alarm status method"""
+
+        # Mock update_alarm_info
+        update_alarm_info = {'vim_type': 'VMware',
+                             'vim_uuid': '1',
+                             'schema_type': 'update_alarm_request',
+                             'alarm_update_request': {'alarm_uuid': '6486e69',
+                                                      'correlation_id': 14203,
+                                                      'operation': 'GT'
+                                                      }
+                             }
+
+        alarm_uuid = "xyz"
+
+        # call publish update alarm status method under test
+        self.plugin_receiver.publish_update_alarm_status(alarm_uuid, update_alarm_info)
+
+        # verify mocked method called with correct params
+        # TODO(diazb): Validate payload generation (self.assertEquals(response, expected_message))
+
+    def test_publish_delete_alarm_status(self):
+        """ Test functionality of publish delete alarm status method"""
+
+        # Mock delete_alarm_info
+        delete_alarm_info = {'vim_type': 'VMware',
+                             "vim_uuid": "1",
+                             'schema_type': 'delete_alarm_request',
+                             'alarm_delete_request': {'alarm_uuid': '6486e69',
+                                                      'correlation_id': 14203,
+                                                      'operation': 'GT'
+                                                      }
+                             }
+
+        alarm_uuid = "xyz"
+
+        # call publish delete alarm status method under test
+        self.plugin_receiver.publish_delete_alarm_status(alarm_uuid, delete_alarm_info)
+
+        # verify mocked method called with correct params
+        # TODO(diazb): Validate payload generation (self.assertEquals(response, expected_message))
+
+    def test_publish_metrics_data_status(self):
+        """ Test functionality of publish metrics data status method"""
+
+        # Mock metrics data
+        metrics_data = {
+            'vim_uuid': '1',
+            'metric_name': 'CPU_UTILIZATION', 'metric_uuid': '0',
+            'resource_uuid': 'e14b20', 'correlation_id': 14203,
+            'metrics_data': {'time_series': [15162011, 15162044],
+                             'metrics_series': [0.1166666671, 0.1266666650]},
+            'tenant_uuid': 123, 'unit': '%'
+        }
+
+        # call publish metrics data status method under test
+        self.plugin_receiver.publish_metrics_data_status(metrics_data)
+
+        # verify mocked method called with correct params
+        # TODO(diazb): Validate payload generation (self.assertEquals(response, expected_message))
+
+    @mock.patch.object(monPluginRec.MonPlugin, 'verify_metric_support')
+    @mock.patch.object(monPluginRec.PluginReceiver, 'get_vim_access_config')
+    def test_verify_metric_supported_metric(self, m_get_vim_access_config,
+                                            m_verify_metric_support):
+        """ Test functionality of verify metric method"""
+
+        # mock metric_info
+        metric_info = {'vim_uuid': '1',
+                       'metric_create_request': {'metric_unit': '%',
+                                                 'metric_name': 'CPU_UTILIZATION',
+                                                 'resource_uuid': 'e14b203'}}
+
+        # set mocked function return value to true
+        m_verify_metric_support.return_value = True
+        m_get_vim_access_config.return_value = {'vrops_site': 'abc',
+                                                'vrops_user': 'user',
+                                                'vrops_password': 'passwd',
+                                                'vim_url': 'vcd_url',
+                                                'admin_username': 'admin',
+                                                'admin_password': 'admin_passwd',
+                                                'vim_uuid': '1',
+                                                'tenant_id': 'org_vdc_1'}
+
+        # call verify_metric method under test
+        ret_value = self.plugin_receiver.verify_metric(metric_info)
+
+        # verify mocked method called with correct params
+        m_get_vim_access_config.assert_called_with(metric_info['vim_uuid'])
+        m_verify_metric_support.assert_called_with(metric_info['metric_create_request'])
+
+        # verify the return value
+        self.assertEqual(ret_value, True)
+
+    @mock.patch.object(monPluginRec.MonPlugin, 'verify_metric_support')
+    @mock.patch.object(monPluginRec.PluginReceiver, 'get_vim_access_config')
+    def test_verify_metric_unsupported_metric(self, m_get_vim_access_config,
+                                              m_verify_metric_support):
+        """ Test functionality of verify metric method-negative case"""
+
+        # mock metric_info with unsupported metrics name
+        metric_info = {'vim_uuid': '1',
+                       'metric_create_request': {'metric_unit': '%',
+                                                 'metric_name': 'Invalid',
+                                                 'resource_uuid': 'e14b203'}}
+
+        # set mocked function return value to true
+        m_verify_metric_support.return_value = False
+        m_get_vim_access_config.return_value = {'vrops_site': 'abc',
+                                                'vrops_user': 'user',
+                                                'vrops_password': 'passwd',
+                                                'vim_url': 'vcd_url',
+                                                'admin_username': 'admin',
+                                                'admin_password': 'admin_passwd',
+                                                'vim_uuid': '1',
+                                                'tenant_id': 'org_vdc_1'}
+
+        # call verify_metric method under test
+        ret_value = self.plugin_receiver.verify_metric(metric_info)
+
+        # verify mocked method called with correct params
+        m_get_vim_access_config.assert_called_with(metric_info['vim_uuid'])
+        m_verify_metric_support.assert_called_with(metric_info['metric_create_request'])
+
+        # verify the return value
+        self.assertEqual(ret_value, False)
+
+    def test_publish_create_metric_response(self):
+        """ Test functionality of publish create metric response method"""
+
+        # Mock metric_info
+        metric_info = {
+            'vim_uuid': '1',
+            'vim_type': 'VMware',
+            'correlation_id': 14203,
+            'schema_type': 'create_metric_request',
+            'metric_create_request': {
+                'resource_uuid': '6486e69',
+                'metric_name': 'CPU_UTILIZATION',
+                'metric_unit': '%'
+            }
+        }
+
+        metric_status = True
+
+        # call publish create metric method under test
+        self.plugin_receiver.publish_create_metric_response(metric_info, metric_status)
+
+        # verify mocked method called with correct params
+        # TODO(diazb): Validate payload generation (self.assertEquals(response, expected_message))
+
+    def test_publish_update_metric_response(self):
+        """ Test functionality of publish update metric response method"""
+
+        # Mock metric_info
+        metric_info = {
+            'vim_uuid': '1',
+            'vim_type': 'VMware',
+            'correlation_id': 14203,
+            'schema_type': 'update_metric_request',
+            'metric_create': {
+                'resource_uuid': '6486e69',
+                'metric_name': 'CPU_UTILIZATION',
+                'metric_unit': '%'
+            }
+        }
+
+        metric_status = True
+
+        # call publish update metric method under test
+        self.plugin_receiver.publish_update_metric_response(metric_info, metric_status)
+
+        # verify mocked method called with correct params
+        # TODO(diazb): Validate payload generation (self.assertEquals(response, expected_message))
+
+    def test_publish_delete_metric_response(self):
+        """ Test functionality of publish delete metric response method"""
+
+        # Mock metric_info
+        metric_info = {'vim_uuid': '1', 'vim_type': 'VMware', 'correlation_id': 14203,
+                       'metric_uuid': 'e14b203c', 'resource_uuid': '6486e69',
+                       'metric_name': 'CPU_UTILIZATION',
+                       'schema_type': 'delete_metric_request'}
+
+        # call publish delete metric method under test-vROPS doesn't support
+        # delete metric,just returns response with success
+        self.plugin_receiver.publish_delete_metric_response(metric_info)
+
+        # verify mocked method called with correct params
+        # TODO(diazb): Validate payload generation (self.assertEquals(response, expected_message))
+
+    @mock.patch.object(monPluginRec.MonPlugin, 'get_triggered_alarms_list')
+    @mock.patch.object(monPluginRec.PluginReceiver, 'get_vim_access_config')
+    def test_list_alarms(self, m_get_vim_access_config, m_get_triggered_alarms_list):
+        """ Test functionality of list alarms method"""
+
+        # Mock list alarm input
+        list_alarm_input = {
+            'vim_uuid': '1',
+            'vim_type': 'VMware',
+            'alarm_list_request': {
+                'severity': 'CRITICAL',
+                'correlation_id': 14203,
+                'alarm_name': 'CPU_Utilization_Above_Threshold',
+                'resource_uuid': 'd14b203c'}}
+
+        # set return value to mocked method
+        m_return = [{'status': 'ACTIVE', 'update_date': '2018-01-12T08:34:05',
+                     'severity': 'CRITICAL', 'resource_uuid': 'e14b203c',
+                     'cancel_date': '0000-00-00T00:00:00', 'alarm_instance_uuid': 'd9e3bc84',
+                     'alarm_uuid': '5714977d', 'vim_type': 'VMware',
+                     'start_date': '2018-01-12T08:34:05'},
+                    {'status': 'CANCELED', 'update_date': '2017-12-20T09:37:57',
+                     'severity': 'CRITICAL', 'resource_uuid': 'e14b203c',
+                     'cancel_date': '2018-01-12T06:49:19', 'alarm_instance_uuid': 'd3bbeef6',
+                     'alarm_uuid': '7ba1bf3e', 'vim_type': 'VMware',
+                     'start_date': '2017-12-20T09:37:57'}]
+        m_get_triggered_alarms_list.return_value = m_return
+
+        m_get_vim_access_config.return_value = {'vrops_site': 'abc',
+                                                'vrops_user': 'user',
+                                                'vrops_password': 'passwd',
+                                                'vim_url': 'vcd_url',
+                                                'admin_username': 'admin',
+                                                'admin_password': 'admin_passwd',
+                                                'vim_uuid': '1',
+                                                'tenant_id': 'org_vdc_1'}
+
+        # call list alarms method under test
+        return_value = self.plugin_receiver.list_alarms(list_alarm_input)
+
+        # verify mocked method called with correct params
+        m_get_vim_access_config.assert_called_with(list_alarm_input['vim_uuid'])
+        m_get_triggered_alarms_list.assert_called_with(list_alarm_input['alarm_list_request'])
+
+        # verify list alarm method returns correct list
+        self.assertEqual(return_value, m_return)
+
+    def test_publish_list_alarm_response(self):
+        """ Test functionality of publish list alarm response method"""
+
+        # Mock list alarm input
+        msg_key = 'list_alarm_response'
+        topic = 'alarm_response'
+        list_alarm_input = {'vim_uuid': '1',
+                            'vim_type': 'VMware',
+                            'alarm_list_request': {
+                                'severity': 'CRITICAL',
+                                'correlation_id': 14203,
+                                'alarm_name': 'CPU_Utilization_Above_Threshold',
+                                'resource_uuid': 'd14b203c'}}
+
+        triggered_alarm_list = [{'status': 'ACTIVE', 'update_date': '2018-01-12T08:34:05',
+                                 'severity': 'CRITICAL', 'resource_uuid': 'e14b203c',
+                                 'cancel_date': '0000-00-00T00:00:00',
+                                 'start_date': '2018-01-12T08:34:05',
+                                 'alarm_instance_uuid': 'd9e3bc84',
+                                 'alarm_uuid': '5714977d',
+                                 'vim_type': 'VMware'
+                                 }]
+
+        # call publish list alarm response method under test
+        response = self.plugin_receiver.publish_list_alarm_response(triggered_alarm_list, list_alarm_input)
+
+        # verify mocked method called with correct params
+        # TODO(diazb): Validate payload generation (self.assertEquals(response, expected_message))
+
+    def test_publish_access_update_response(self):
+        """ Test functionality of publish access update response method"""
+
+        # Mock required inputs
+        access_update_status = True
+        access_info_req = {'vim_type': 'VMware',
+                           'vim_uuid': '1',
+                           'access_config': {'vrops_password': 'vmware',
+                                             'vcloud-site': 'https://192.169.241.105',
+                                             'vrops_user': 'Admin', 'correlation_id': 14203,
+                                             'tenant_id': 'Org2'}
+                           }
+
+        # call publish access update response method under test
+        response = self.plugin_receiver.publish_access_update_response(access_update_status, access_info_req)
+
+        # verify mocked method called with correct params
+        # TODO(diazb): Validate payload generation (self.assertEquals(response, expected_message))
+
+    @mock.patch.object(monPluginRec.PluginReceiver, 'write_access_config')
+    def test_update_access_credentials_successful(self, m_write_access_config):
+        """ Test functionality of update access credentials-positive case"""
+
+        # Mock access_info
+        access_info = {'vrops_site': 'https://192.169.241.13', 'vrops_user': 'admin',
+                       'vrops_password': 'vmware', 'vcloud-site': 'https://192.169.241.15',
+                       'admin_username': 'admin', 'admin_password': 'vmware',
+                       'vcenter_ip': '192.169.241.13', 'vcenter_port': '443',
+                       'vcenter_user': 'admin', 'vcenter_password': 'vmware',
+                       'vim_tenant_name': 'Org2', 'orgname': 'Org2', 'tenant_id': 'Org2'}
+
+        # Mock return values
+        expected_status = m_write_access_config.return_value = True
+
+        # call publish update access credentials method under test
+        actual_status = self.plugin_receiver.update_access_credentials(access_info)
+
+        # check write_access_config called with correct params
+        m_write_access_config.assert_called_with(access_info)
+
+        # verify update access credentials returns correct status
+        self.assertEqual(expected_status, actual_status)
+
+    @mock.patch.object(monPluginRec.PluginReceiver, 'write_access_config')
+    def test_update_access_credentials_less_config_params(self, m_write_access_config):
+        """ Test functionality of update access credentials-negative case"""
+
+        # Mock access_info
+        access_info = {'vrops_site': 'https://192.169.241.13', 'vrops_user': 'admin',
+                       'vrops_password': 'vmware', 'vcloud-site': 'https://192.169.241.15',
+                       'admin_username': 'admin', 'admin_password': 'vmware',
+                       'vcenter_ip': '192.169.241.13', 'vcenter_port': '443', 'vcenter_user': 'admin',
+                       'vim_tenant_name': 'Org2', 'orgname': 'Org2', 'tenant_id': 'Org2'}
+
+        # Mock return values
+        expected_status = m_write_access_config.return_value = False
+
+        # call publish update access credentials method under test
+        actual_status = self.plugin_receiver.update_access_credentials(access_info)
+
+        # check if mocked method not called
+        m_write_access_config.assert_not_called()
+
+        # verify update access credentials returns correct status
+        self.assertEqual(expected_status, actual_status)
+
+    @mock.patch.object(monPluginRec.PluginReceiver, 'write_access_config')
+    def test_update_access_credentials_failed(self, m_write_access_config):
+        """ Test functionality of update access credentials-failed case """
+
+        # Mock access_info
+        access_info = {'vrops_site': 'https://192.169.241.13', 'vrops_user': 'admin',
+                       'vrops_password': 'vmware', 'vcloud-site': 'https://192.169.241.15',
+                       'admin_username': 'admin', 'admin_password': 'vmware',
+                       'vcenter_ip': '192.169.241.13', 'vcenter_port': '443',
+                       'vcenter_user': 'admin', 'vcenter_password': 'vmware',
+                       'vim_tenant_name': 'Org2', 'orgname': 'Org2', 'tenant_id': 'Org2'}
+
+        # Mock return values
+        expected_status = m_write_access_config.return_value = False
+
+        # call publish update access credentials method under test
+        actual_status = self.plugin_receiver.update_access_credentials(access_info)
+
+        # check write_access_config called with correct params
+        m_write_access_config.assert_called_with(access_info)
+
+        # verify update access credentials returns correct status
+        self.assertEqual(expected_status, actual_status)
+
+    def test_write_access_config_successful(self):
+        """ Test functionality of write access config method-positive case"""
+
+        # Mock access_info
+        access_info = {'vrops_sit': 'https://192.169.241.13', 'vrops_user': 'admin',
+                       'vrops_password': 'vmware', 'vcloud-site': 'https://192.169.241.15',
+                       'admin_username': 'admin', 'admin_password': 'vmware',
+                       'vcenter_ip': '192.169.241.13', 'vcenter_port': '443',
+                       'vcenter_user': 'admin', 'vcenter_password': 'vmware',
+                       'vim_tenant_name': 'Org2', 'orgname': 'Org2', 'tenant_id': 'Org2'}
+
+        # call write access config method under test
+        actual_status = self.plugin_receiver.write_access_config(access_info)
+
+        # verify write access config returns correct status
+        self.assertEqual(True, actual_status)
+
+    def test_write_access_config_failed(self):
+        """ Test functionality of write access config method-negative case"""
+
+        # Mock access_info
+        access_info = []  # provided incorrect info to generate error
+
+        # call write access config method under test
+        actual_status = self.plugin_receiver.write_access_config(access_info)
+
+        # verify write access config returns correct status
+        self.assertEqual(False, actual_status)
+
+    @mock.patch.object(monPluginRec.AuthManager, 'get_credentials')
+    def test_get_vim_access_config(self, m_get_credentials):
+        """ Test functionality of get_vim_access_config method-positive case"""
+
+        # Mock vim_uuid & access_info
+        vim_uuid = '1'
+        vim_details = VimCredentials()
+        vim_details.name = "vrops_vcd82"
+        vim_details.password = "passwd"
+        vim_details.tenant_name = "MANO-VDC"
+        vim_details.type = "VMware"
+        vim_details.url = "https://10.10.1.1"
+        vim_details.user = "admin"
+        vim_details.uuid = "1"
+        vim_details.config = '{"orgname": "MANO-Org", "tenant_id": "MANO-VDC",\
+                        "admin_username": "administrator","admin_password": "vcd_pwd",\
+                        "vrops_user": "admin", "vrops_password": "vrops_pwd",\
+                        "vrops_site": "https://10.10.1.2","nsx_user": "admin",\
+                        "nsx_manager": "https://10.10.1.3", "nsx_password":"nsx_pwd",\
+                        "sdn_controller": "None", "sdn_port_mapping": "None",\
+                        "vcenter_ip": "10.10.1.4", "vcenter_user": "admin@vsphere.local",\
+                        "vcenter_password": "vcenter_pwd", "vcenter_port": "443"}'
+        m_get_credentials.return_value = vim_details
+        expected_config = {'vrops_password': 'vrops_pwd', 'vcenter_password': 'vcenter_pwd',
+                           'name': 'vrops_vcd82', 'org_user': 'admin',
+                           'org_password': 'passwd', 'nsx_user': 'admin', 'vim_tenant_name': 'MANO-VDC',
+                           'admin_username': 'administrator', 'vcenter_port': '443',
+                           'vim_url': 'https://10.10.1.1', 'orgname': 'MANO-Org',
+                           'admin_password': 'vcd_pwd', 'vrops_user': 'admin', 'vcenter_ip': '10.10.1.4',
+                           'vrops_site': 'https://10.10.1.2', 'nsx_manager': 'https://10.10.1.3',
+                           'nsx_password': 'nsx_pwd', 'vim_type': 'VMware', 'vim_uuid': '1',
+                           'vcenter_user': 'admin@vsphere.local'}
+
+        # call get_vim_access_config method under test
+        actual_config = self.plugin_receiver.get_vim_access_config('1')
+
+        # verify that mocked method is called
+        m_get_credentials.assert_called_with(vim_uuid)
+
+        # Verify return value with expected value
+        self.assertEqual(expected_config, actual_config)
+
+# For testing purpose
+# if __name__ == '__main__':
+
+#    unittest.main()
diff --git a/osm_mon/test/plugins/__init__.py b/osm_mon/test/plugins/__init__.py
new file mode 100644 (file)
index 0000000..2d39b96
--- /dev/null
@@ -0,0 +1,22 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2018 Whitestack, LLC
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Whitestack, LLC
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+#         http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: bdiaz@whitestack.com or glavado@whitestack.com
+##
\ No newline at end of file
index fd9c838..f9b4407 100644 (file)
@@ -18,7 +18,7 @@
 
 # For those usages not covered by the Apache License, Version 2.0 please
 # contact: prithiv.mohan@intel.com or adrian.hoban@intel.com
-kafka==1.3.*
+kafka-python==1.4.*
 requests==2.18.*
 cherrypy==14.0.*
 jsmin==2.2.*
@@ -33,4 +33,6 @@ six==1.11.*
 bottle==0.12.*
 peewee==3.1.*
 pyyaml==3.*
-git+https://osm.etsi.org/gerrit/osm/common.git#egg=osm-common
\ No newline at end of file
+prometheus_client==0.4.*
+git+https://osm.etsi.org/gerrit/osm/common.git#egg=osm-common
+git+https://osm.etsi.org/gerrit/osm/N2VC.git#egg=n2vc
\ No newline at end of file
index 4f2250b..1015de4 100644 (file)
--- a/setup.py
+++ b/setup.py
@@ -28,8 +28,8 @@ def parse_requirements(requirements):
 
 
 _name = 'osm_mon'
-_version_command = ('git describe --match v* --tags --long --dirty', 'pep440-git')
-_description = 'OSM Policy Module'
+_version_command = ('git describe --match v* --tags --long --dirty', 'pep440-git-full')
+_description = 'OSM Monitoring Module'
 _author = "Benjamín Díaz"
 _author_email = 'bdiaz@whitestack.com'
 _maintainer = 'Gianpietro Lavado'
@@ -52,8 +52,31 @@ setup(
     package_dir={_name: _name},
     scripts=['osm_mon/plugins/vRealiseOps/vROPs_Webservice/vrops_webservice',
              'osm_mon/core/message_bus/common_consumer.py'],
-    install_requires=parse_requirements('requirements.txt'),
+    install_requires=[
+        "kafka-python==1.4.*",
+        "requests==2.18.*",
+        "cherrypy==14.0.*",
+        "jsmin==2.2.*",
+        "jsonschema==2.6.*",
+        "python-keystoneclient==3.15.*",
+        "boto==2.48",
+        "python-cloudwatchlogs-logging==0.0.3",
+        "py-cloudwatch==0.0.1",
+        "pyvcloud==19.1.1",
+        "pyopenssl==17.5.*",
+        "six==1.11.*",
+        "bottle==0.12.*",
+        "peewee==3.1.*",
+        "pyyaml==3.*",
+        "osm-common",
+        "n2vc"
+    ],
     include_package_data=True,
+    entry_points={
+        "console_scripts": [
+            "osm-mon-prometheus-exporter = osm_mon.cmd.mon_prometheus_exporter:main",
+        ]
+    },
     dependency_links=[
         'git+https://osm.etsi.org/gerrit/osm/common.git#egg=osm-common'
     ],
diff --git a/stdeb.cfg b/stdeb.cfg
new file mode 100644 (file)
index 0000000..bc035de
--- /dev/null
+++ b/stdeb.cfg
@@ -0,0 +1,3 @@
+[DEFAULT]
+X-Python3-Version : >= 3.4
+Depends3 : libmysqlclient-dev, libssl-dev, libffi-dev, libxml2-dev, libxslt-dev, python3-pip, python3-osm-common
\ No newline at end of file