From b726161f9fe605ceb418d9b5df12d295bff28c3a Mon Sep 17 00:00:00 2001 From: Benjamin Diaz Date: Fri, 11 May 2018 18:00:16 -0300 Subject: [PATCH] Adds support for vdu_name, ns_id and vnf_member_index Signed-off-by: Benjamin Diaz Change-Id: I194753f7d581287dc85850378391dcf1585e78c3 --- Dockerfile | 19 +- MANIFEST.in | 5 +- Makefile | 83 ------- devops-stages/stage-build.sh | 6 +- devops-stages/stage-test.sh | 2 +- osm_mon/core/message_bus/common_consumer.py | 221 +++++++++++------- osm_mon/core/models/.gitkeep | 1 - osm_mon/core/models/acknowledge_alarm.json | 11 +- ...reate_alarm.json => create_alarm_req.json} | 15 +- osm_mon/core/models/create_metric_req.json | 17 +- osm_mon/core/models/create_metric_resp.json | 5 +- osm_mon/core/models/delete_alarm_req.json | 11 +- osm_mon/core/models/delete_metric_req.json | 10 +- osm_mon/core/models/list_alarm_req.json | 7 +- osm_mon/core/models/list_alarm_resp.json | 2 +- osm_mon/core/models/list_metric_req.json | 9 +- osm_mon/core/models/notify_alarm.json | 43 ++-- osm_mon/core/models/read_metric_data_req.json | 8 +- osm_mon/core/models/update_alarm_req.json | 14 +- osm_mon/core/models/update_metric_req.json | 13 +- osm_mon/core/settings.py | 3 +- osm_mon/plugins/CloudWatch/metric_alarms.py | 2 +- osm_mon/plugins/CloudWatch/plugin_alarm.py | 10 +- osm_mon/plugins/CloudWatch/plugin_metric.py | 8 +- osm_mon/plugins/OpenStack/Aodh/alarming.py | 30 +-- osm_mon/plugins/OpenStack/Gnocchi/metrics.py | 20 +- osm_mon/plugins/OpenStack/response.py | 2 +- .../plugins/vRealiseOps/mon_plugin_vrops.py | 145 ++++++------ .../plugins/vRealiseOps/plugin_receiver.py | 20 +- osm_mon/plugins/vRealiseOps/vrops_config.xml | 46 +--- .../integration/test_alarm_integration.py | 35 +-- .../integration/test_metric_integration.py | 32 +-- osm_mon/test/OpenStack/unit/test_alarm_req.py | 38 +-- osm_mon/test/OpenStack/unit/test_alarming.py | 45 ++-- .../test/OpenStack/unit/test_metric_calls.py | 8 +- .../test/OpenStack/unit/test_metric_req.py | 45 ++-- osm_mon/test/OpenStack/unit/test_notifier.py | 4 +- osm_mon/test/VMware/test_mon_plugin_vrops.py | 1 + osm_mon/test/VMware/test_plugin_receiver.py | 8 +- osm_mon/test/core/test_common_consumer.py | 63 +++++ requirements.txt | 1 - setup.py | 46 ++-- test-requirements.txt | 13 -- tox.ini | 33 +-- 44 files changed, 550 insertions(+), 610 deletions(-) delete mode 100644 Makefile delete mode 100644 osm_mon/core/models/.gitkeep rename osm_mon/core/models/{create_alarm.json => create_alarm_req.json} (84%) create mode 100644 osm_mon/test/core/test_common_consumer.py diff --git a/Dockerfile b/Dockerfile index bb419b4..3fa3cfd 100644 --- a/Dockerfile +++ b/Dockerfile @@ -24,20 +24,7 @@ FROM ubuntu:16.04 RUN apt-get update && \ - DEBIAN_FRONTEND=noninteractive apt-get --yes install git tox make python python-pip debhelper && \ - DEBIAN_FRONTEND=noninteractive apt-get --yes install wget python-dev python-software-properties python-stdeb&& \ - DEBIAN_FRONTEND=noninteractive pip install -U pip && \ - DEBIAN_FRONTENT=noninteractive pip install -U requests logutils jsonschema lxml kafka mock && \ - DEBIAN_FRONTEND=noninteractive pip install -U setuptools setuptools-version-command stdeb jsmin && \ - DEBIAN_FRONTEND=noninteractive pip install -U six pyvcloud==19.1.1 bottle cherrypy pyopenssl && \ + DEBIAN_FRONTEND=noninteractive apt-get --yes install git tox make python python-pip python3 python3-pip debhelper && \ + DEBIAN_FRONTEND=noninteractive apt-get --yes install wget python-dev python-software-properties python-stdeb && \ DEBIAN_FRONTEND=noninteractive apt-get --yes install default-jre libmysqlclient-dev && \ - DEBIAN_FRONTEND=noninteractive apt-get --yes install libmysqlclient-dev libxml2 && \ - DEBIAN_FRONTEND=noninteractive pip install -U MySQL-python \ - python-openstackclient \ - python-keystoneclient \ - aodhclient \ - gnocchiclient \ - boto==2.48 \ - python-cloudwatchlogs-logging \ - py-cloudwatch \ - peewee==3.1.* + DEBIAN_FRONTEND=noninteractive apt-get --yes install libmysqlclient-dev libxml2 python3-all diff --git a/MANIFEST.in b/MANIFEST.in index 79f2b1c..0887e73 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -21,7 +21,6 @@ include requirements.txt include README.rst -include kafkad -recursive-include osm_mon * +recursive-include osm_mon *.py recursive-include devops-stages * -recursive-include test * +recursive-include test *.py diff --git a/Makefile b/Makefile deleted file mode 100644 index a881380..0000000 --- a/Makefile +++ /dev/null @@ -1,83 +0,0 @@ -# Copyright 2017 Intel Research and Development Ireland Limited -# ************************************************************* - -# This file is part of OSM Monitoring module -# All Rights Reserved to Intel Corporation - -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at - -# http://www.apache.org/licenses/LICENSE-2.0 - -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -# For those usages not covered by the Apache License, Version 2.0 please -# contact: prithiv.mohan@intel.com or adrian.hoban@intel.com - -#__author__ = "Prithiv Mohan" -#__date__ = "14/Sep/2017" - -SHELL := /bin/bash -all: package install - -clean_deb: - rm -rf .build - -clean: - rm -rf build - rm -rf .build - find . -name '*.pyc' -delete - -prepare: - #apt-get --yes install python-stdeb python-pip libmysqlclient-dev debhelper - #pip install --upgrade setuptools - mkdir -p build/ - cp tox.ini build/ - cp MANIFEST.in build/ - cp requirements.txt build/ - cp test-requirements.txt build/ - cp README.rst build/ - cp setup.py build/ - cp kafkad build/ - cp -r osm_mon build/ - cp -r devops-stages build/ - cp -r scripts build/ - #pip install -r requirements.txt - #pip install -r test-requirements.txt - -build: clean openstack_plugins prepare - python -m py_compile build/osm_mon/plugins/OpenStack/*.py - -build: clean vrops_plugins prepare - python -m py_compile build/osm_mon/plugins/vRealiseOps/*.py - -build: clean cloudwatch_plugins prepare - python -m py_compile build/osm_mon/plugins/CloudWatch/*.py - -build: clean core prepare - python -m py_compile build/osm_mon/core/message_bus/*.py - -pip: prepare - cd build ./setup.py sdist - -package: clean clean_deb prepare - cd build && python setup.py --command-packages=stdeb.command sdist_dsc --with-python2=True --with-python3=False bdist_deb - mkdir -p .build - cp build/deb_dist/python-*.deb .build/ - -develop: prepare - cd build && ./setup.py develop - -install: - DEBIAN_FRONTEND=noninteractive apt-get update && \ - DEBIAN_FRONTEND=noninteractive apt-get install --yes python-pip && \ - pip install --upgrade pip - dpkg -i build/deb_dist/*.deb - -build-docker-from-source: - docker build -t osm:MON -f docker/Dockerfile diff --git a/devops-stages/stage-build.sh b/devops-stages/stage-build.sh index e78acc0..4251b1c 100755 --- a/devops-stages/stage-build.sh +++ b/devops-stages/stage-build.sh @@ -23,5 +23,7 @@ #__date__ = "14/Sep/2017" #!/bin/bash -make clean all BRANCH=master -make package +rm -rf deb_dist +rm -rf dist +rm -rf osm_mon.egg-info +tox -e build diff --git a/devops-stages/stage-test.sh b/devops-stages/stage-test.sh index c647565..d588666 100755 --- a/devops-stages/stage-test.sh +++ b/devops-stages/stage-test.sh @@ -23,4 +23,4 @@ #__date__ = "14/Sep/2017" #!/bin/bash -echo "UNITTEST" +tox diff --git a/osm_mon/core/message_bus/common_consumer.py b/osm_mon/core/message_bus/common_consumer.py index 10cd3d5..69abee5 100755 --- a/osm_mon/core/message_bus/common_consumer.py +++ b/osm_mon/core/message_bus/common_consumer.py @@ -21,23 +21,14 @@ import json import logging -import os import sys +import six import yaml -from osm_mon.core.settings import Config - -logging.basicConfig(stream=sys.stdout, - format='%(asctime)s %(message)s', - datefmt='%m/%d/%Y %I:%M:%S %p', - level=logging.INFO) -log = logging.getLogger(__name__) - -sys.path.append(os.path.abspath(os.path.join(os.path.realpath(__file__), '..', '..', '..', '..'))) - from kafka import KafkaConsumer +from osm_mon.core.settings import Config from osm_mon.plugins.OpenStack.Aodh import alarming from osm_mon.plugins.OpenStack.Gnocchi import metrics @@ -51,90 +42,144 @@ from osm_mon.plugins.vRealiseOps import plugin_receiver from osm_mon.core.auth import AuthManager from osm_mon.core.database import DatabaseManager -cfg = Config.instance() -cfg.read_environ() +from osm_common import dbmongo -# Initialize consumers for alarms and metrics -common_consumer = KafkaConsumer(bootstrap_servers=cfg.BROKER_URI, - key_deserializer=bytes.decode, - value_deserializer=bytes.decode, - group_id="mon-consumer") +logging.basicConfig(stream=sys.stdout, + format='%(asctime)s %(message)s', + datefmt='%m/%d/%Y %I:%M:%S %p', + level=logging.INFO) +log = logging.getLogger(__name__) -auth_manager = AuthManager() -database_manager = DatabaseManager() -database_manager.create_tables() -# Create OpenStack alarming and metric instances -openstack_metrics = metrics.Metrics() -openstack_alarms = alarming.Alarming() +def get_vim_type(db_manager, vim_uuid): + """Get the vim type that is required by the message.""" + credentials = db_manager.get_credentials(vim_uuid) + return credentials.type -# Create CloudWatch alarm and metric instances -cloudwatch_alarms = plugin_alarms() -cloudwatch_metrics = plugin_metrics() -aws_connection = Connection() -aws_access_credentials = AccessCredentials() -# Create vROps plugin_receiver class instance -vrops_rcvr = plugin_receiver.PluginReceiver() +def get_vdur(common_db, nsr_id, member_index, vdu_name): + vnfr = get_vnfr(common_db, nsr_id, member_index) + for vdur in vnfr['vdur']: + if vdur['vdu-id-ref'] == vdu_name: + return vdur + raise ValueError('vdur not found for nsr-id %s, member_index %s and vdu_name %s', nsr_id, member_index, vdu_name) -def get_vim_type(vim_uuid): - """Get the vim type that is required by the message.""" - try: - credentials = database_manager.get_credentials(vim_uuid) - return credentials.type - except Exception: - log.exception("Error getting vim_type: ") - return None - - -# Define subscribe the consumer for the plugins -topics = ['metric_request', 'alarm_request', 'access_credentials', 'vim_account'] -# TODO: Remove access_credentials -common_consumer.subscribe(topics) - -log.info("Listening for alarm_request and metric_request messages") -for message in common_consumer: - log.info("Message arrived: %s", message) - try: +def get_vnfr(common_db, nsr_id, member_index): + vnfr = common_db.get_one(table="vnfrs", filter={"nsr-id-ref": nsr_id, "member-vnf-index-ref": str(member_index)}) + return vnfr + + +def main(): + cfg = Config.instance() + cfg.read_environ() + + auth_manager = AuthManager() + database_manager = DatabaseManager() + database_manager.create_tables() + + # Create OpenStack alarming and metric instances + openstack_metrics = metrics.Metrics() + openstack_alarms = alarming.Alarming() + + # Create CloudWatch alarm and metric instances + cloudwatch_alarms = plugin_alarms() + cloudwatch_metrics = plugin_metrics() + aws_connection = Connection() + aws_access_credentials = AccessCredentials() + + # Create vROps plugin_receiver class instance + vrops_rcvr = plugin_receiver.PluginReceiver() + + common_db = dbmongo.DbMongo() + common_db_uri = cfg.MONGO_URI.split(':') + common_db.db_connect({'host': common_db_uri[0], 'port': int(common_db_uri[1]), 'name': 'osm'}) + + # Initialize consumers for alarms and metrics + common_consumer = KafkaConsumer(bootstrap_servers=cfg.BROKER_URI, + key_deserializer=bytes.decode, + value_deserializer=bytes.decode, + group_id="mon-consumer") + + # Define subscribe the consumer for the plugins + topics = ['metric_request', 'alarm_request', 'access_credentials', 'vim_account'] + # TODO: Remove access_credentials + common_consumer.subscribe(topics) + + log.info("Listening for alarm_request and metric_request messages") + for message in common_consumer: + log.info("Message arrived: %s", message) try: - values = json.loads(message.value) - except ValueError: - values = yaml.safe_load(message.value) - - if message.topic == "vim_account": - if message.key == "create" or message.key == "edit": - auth_manager.store_auth_credentials(values) - if message.key == "delete": - auth_manager.delete_auth_credentials(values) - - else: - # Check the vim desired by the message - vim_type = get_vim_type(values['vim_uuid']) - if vim_type == "openstack": - log.info("This message is for the OpenStack plugin.") - if message.topic == "metric_request": - openstack_metrics.metric_calls(message) - if message.topic == "alarm_request": - openstack_alarms.alarming(message) - - elif vim_type == "aws": - log.info("This message is for the CloudWatch plugin.") - aws_conn = aws_connection.setEnvironment() - if message.topic == "metric_request": - cloudwatch_metrics.metric_calls(message, aws_conn) - if message.topic == "alarm_request": - cloudwatch_alarms.alarm_calls(message, aws_conn) - if message.topic == "access_credentials": - aws_access_credentials.access_credential_calls(message) - - elif vim_type == "vmware": - log.info("This metric_request message is for the vROPs plugin.") - vrops_rcvr.consume(message) + try: + values = json.loads(message.value) + except ValueError: + values = yaml.safe_load(message.value) - else: - log.debug("vim_type is misconfigured or unsupported; %s", - vim_type) + if message.topic == "vim_account": + if message.key == "create" or message.key == "edit": + auth_manager.store_auth_credentials(values) + if message.key == "delete": + auth_manager.delete_auth_credentials(values) - except Exception: - log.exception("Exception processing message: ") + else: + # Get ns_id from message + # TODO: Standardize all message models to avoid the need of figuring out where are certain fields + contains_list = False + list_index = None + ns_id = None + for k, v in six.iteritems(values): + if isinstance(v, dict): + if 'ns_id' in v: + ns_id = v['ns_id'] + contains_list = True + list_index = k + if not contains_list and 'ns_id' in values: + ns_id = values['ns_id'] + + vnf_index = values[list_index]['vnf_member_index'] if contains_list else values['vnf_member_index'] + + # Check the vim desired by the message + vnfr = get_vnfr(common_db, ns_id, vnf_index) + vim_uuid = vnfr['vim-account-id'] + vim_type = get_vim_type(database_manager, vim_uuid) + + if (contains_list and 'vdu_name' in values[list_index]) or 'vdu_name' in values: + vdu_name = values[list_index]['vdu_name'] if contains_list else values['vdu_name'] + vdur = get_vdur(common_db, ns_id, vnf_index, vdu_name) + if contains_list: + values[list_index]['resource_uuid'] = vdur['vim-id'] + else: + values['resource_uuid'] = vdur['vim-id'] + message = message._replace(value=json.dumps(values)) + + if vim_type == "openstack": + log.info("This message is for the OpenStack plugin.") + if message.topic == "metric_request": + openstack_metrics.metric_calls(message, vim_uuid) + if message.topic == "alarm_request": + openstack_alarms.alarming(message, vim_uuid) + + elif vim_type == "aws": + log.info("This message is for the CloudWatch plugin.") + aws_conn = aws_connection.setEnvironment() + if message.topic == "metric_request": + cloudwatch_metrics.metric_calls(message, aws_conn) + if message.topic == "alarm_request": + cloudwatch_alarms.alarm_calls(message, aws_conn) + if message.topic == "access_credentials": + aws_access_credentials.access_credential_calls(message) + + elif vim_type == "vmware": + log.info("This metric_request message is for the vROPs plugin.") + vrops_rcvr.consume(message) + + else: + log.debug("vim_type is misconfigured or unsupported; %s", + vim_type) + + except Exception: + log.exception("Exception processing message: ") + + +if __name__ == '__main__': + main() diff --git a/osm_mon/core/models/.gitkeep b/osm_mon/core/models/.gitkeep deleted file mode 100644 index 2272ebb..0000000 --- a/osm_mon/core/models/.gitkeep +++ /dev/null @@ -1 +0,0 @@ -#gitkeep file to keep the initial empty directory structure. diff --git a/osm_mon/core/models/acknowledge_alarm.json b/osm_mon/core/models/acknowledge_alarm.json index a3734ee..6a05f07 100644 --- a/osm_mon/core/models/acknowledge_alarm.json +++ b/osm_mon/core/models/acknowledge_alarm.json @@ -28,14 +28,13 @@ "ack_details": { "alarm_uuid": { "type": "string" }, - "resource_uuid": { "type": "string" }, - "tenant_uuid": { "type": "string" } + "ns_id": { "type": "string"}, + "vnf_member_index": { "type": "integer"}, + "vdu_name": { "type": "string"} }, "required": [ "schema_version", "schema_type", - "vim_type", - "vim_uuid", "alarm_uuid", - "resource_uuid", - "tenant_uuid" ] + "ns_id", + "vnf_member_index" ] } diff --git a/osm_mon/core/models/create_alarm.json b/osm_mon/core/models/create_alarm_req.json similarity index 84% rename from osm_mon/core/models/create_alarm.json rename to osm_mon/core/models/create_alarm_req.json index a25ea7b..2b98017 100644 --- a/osm_mon/core/models/create_alarm.json +++ b/osm_mon/core/models/create_alarm_req.json @@ -22,33 +22,30 @@ { "schema_version": { "type": "string" }, "schema_type": { "type": "string" }, - "vim_type": { "type": "string "}, - "vim_uuid": { "type": "string" }, "alarm_create_request": { "correlation_id": { "type": "integer" }, "alarm_name": { "type": "string" }, "metric_name": { "type": "string" }, - "resource_uuid": { "type": "string" }, - "vdu_id": { "type": "string"}, + "ns_id": { "type": "string"}, + "vnf_member_index": { "type": "integer"}, + "vdu_name": { "type": "string"}, "description": { "type": "string" }, "severity": { "type": "string" }, "operation": { "type": "string" }, "threshold_value": { "type": "integer" }, - "unit": { "type": "string" }, "statistic": { "type": "string" } }, "required": [ "schema_version", "schema_type", - "vim_type", - "vim_uuid", "correlation_id", "alarm_name", "metric_name", - "resource_uuid", + "ns_id", + "vnf_member_index", + "vdu_name", "severity", "operation", "threshold_value", - "unit", "statistic" ] } diff --git a/osm_mon/core/models/create_metric_req.json b/osm_mon/core/models/create_metric_req.json index 6fa0972..d419824 100644 --- a/osm_mon/core/models/create_metric_req.json +++ b/osm_mon/core/models/create_metric_req.json @@ -23,22 +23,21 @@ { "schema_version": { "type": "string" }, "schema_type": { "type": "string" }, - "correlation_id": { "type": "integer" }, - "vim_type": { "type": "string" }, - "vim_uuid": { "type": "string" }, - "metric_create": + "metric_create_request": { + "correlation_id": { "type": "integer" }, "metric_name": { "type" : "string" }, "metric_unit": { "type": "string" }, - "resource_uuid": { "type": "string" }, - "vdu_id": { "type": "string"} + "ns_id": { "type": "string"}, + "vnf_member_index": { "type": "integer"}, + "vdu_name": { "type": "string"} }, "required": [ "schema_version", "schema_type", "correlation_id", - "vim_type", - "vim_uuid", "metric_name", "metric_unit", - "resource_uuid" ] + "ns_id", + "vnf_member_index", + "vdu_name" ] } diff --git a/osm_mon/core/models/create_metric_resp.json b/osm_mon/core/models/create_metric_resp.json index 8e7df4e..bbe36ea 100644 --- a/osm_mon/core/models/create_metric_resp.json +++ b/osm_mon/core/models/create_metric_resp.json @@ -22,12 +22,9 @@ { "schema_version": { "type": "string" }, "schema_type": { "type": "string" }, - "correlation_id": { "type": "integer" }, - "vim_uuid": { "type": "string" }, "metric_create_response": { - "metric_uuid": { "type": "string" }, - "resource_uuid": { "type": "string" }, + "correlation_id": { "type": "integer" }, "status": { "type": "boolean" } }, "required": [ "schema_type", diff --git a/osm_mon/core/models/delete_alarm_req.json b/osm_mon/core/models/delete_alarm_req.json index a5b99e6..8c3d88b 100644 --- a/osm_mon/core/models/delete_alarm_req.json +++ b/osm_mon/core/models/delete_alarm_req.json @@ -22,19 +22,18 @@ { "schema_version": { "type": "string" }, "schema_type": { "type": "string" }, - "vim_type": { "type": "string" }, - "vim_uuid": { "type": "string" }, "alarm_delete_request": { - "alarm_uuid": { "type": "string" }, "correlation_id": { "type": "integer" }, - "vdu_id": { "type": "string"} + "alarm_uuid": { "type": "string" }, + "ns_id": { "type": "string"}, + "vnf_member_index": { "type": "integer"} }, "required": [ "schema_version", "schema_type", - "vim_type", - "vim_uuid", "alarm_uuid", + "ns_id", + "vnf_member_index", "correlation_id" ] } diff --git a/osm_mon/core/models/delete_metric_req.json b/osm_mon/core/models/delete_metric_req.json index c5788cd..077646e 100644 --- a/osm_mon/core/models/delete_metric_req.json +++ b/osm_mon/core/models/delete_metric_req.json @@ -22,13 +22,11 @@ { "schema_version": { "type": "string" }, "schema_type": { "type": "string" }, - "metric_name": { "type": "string" }, - "metric_uuid": { "type": "string" }, - "resource_uuid": { "type": "string" }, - "vdu_id": { "type": "string"}, "correlation_id": { "type": "integer" }, - "vim_type": { "type": "string" }, - "vim_uuid": { "type": "string" }, + "metric_name": { "type": "string" }, + "ns_id": { "type": "string"}, + "vnf_member_index": { "type": "integer"}, + "vdu_name": { "type": "string"}, "required": [ "schema_version", "schema_type", "metric_name", diff --git a/osm_mon/core/models/list_alarm_req.json b/osm_mon/core/models/list_alarm_req.json index 7dd9785..7ecc127 100644 --- a/osm_mon/core/models/list_alarm_req.json +++ b/osm_mon/core/models/list_alarm_req.json @@ -22,13 +22,12 @@ { "schema_version": { "type": "string" }, "schema_type": { "type": "string" }, - "vim_type": { "type": "string" }, - "vim_uuid": { "type": "string" }, "alarm_list_request": { "correlation_id": { "type": "integer" }, - "resource_uuid": { "type": "string" }, - "vdu_id": { "type": "string"}, + "ns_id": { "type": "string"}, + "vnf_member_index": { "type": "integer"}, + "vdu_name": { "type": "string"}, "alarm_name": { "type": "string" }, "severity": { "type" : "string" } }, diff --git a/osm_mon/core/models/list_alarm_resp.json b/osm_mon/core/models/list_alarm_resp.json index cec2ef7..077e24c 100644 --- a/osm_mon/core/models/list_alarm_resp.json +++ b/osm_mon/core/models/list_alarm_resp.json @@ -24,5 +24,5 @@ "schema_type": { "type": "string" }, "vim_type": { "type": "string" }, "vim_uuid": { "type": "string" }, - "list_alarm_resp": { "$ref": "definitions.json#/notify_details" } + "list_alarm_response": { "$ref": "definitions.json#/notify_details" } } diff --git a/osm_mon/core/models/list_metric_req.json b/osm_mon/core/models/list_metric_req.json index c684208..9612e44 100644 --- a/osm_mon/core/models/list_metric_req.json +++ b/osm_mon/core/models/list_metric_req.json @@ -22,14 +22,13 @@ { "schema_version": { "type": "string" }, "schema_type": { "type": "string" }, - "vim_type": { "type": "string" }, - "vim_uuid": { "type": "string" }, "metrics_list_request": { - "metric_name": { "type": "string" }, "correlation_id": { "type": "integer" }, - "resource_uuid": { "type": "string" }, - "vdu_id": { "type": "string"} + "metric_name": { "type": "string" }, + "ns_id": { "type": "string"}, + "vnf_member_index": { "type": "integer"}, + "vdu_name": { "type": "string"} }, "required": [ "schema_version", "schema_type", diff --git a/osm_mon/core/models/notify_alarm.json b/osm_mon/core/models/notify_alarm.json index 0430d6a..1fcd18e 100644 --- a/osm_mon/core/models/notify_alarm.json +++ b/osm_mon/core/models/notify_alarm.json @@ -22,30 +22,23 @@ { "schema_version": { "type": "string" }, "schema_type": { "type": "string" }, - "definitions": + "notify_details": { - "notify_details": - { - "alarm_uuid": { "type": "string" }, - "resource_uuid": { "type": "string" }, - "vdu_id": { "type": "string"}, - "description": { "type": "string" }, - "vim_type": { "type": "string" }, - "vim_uuid": { "type": "string" }, - "severity": { "type" : "string" }, - "status": { "type": "string" }, - "start_date": { "type": "string" }, - "update_date": { "type": "string" }, - "cancel_date": { "type": "string" } - }, - "required": [ "schema_version", - "schema_type", - "alarm_uuid", - "resource_uuid", - "vim_type", - "vim_uuid", - "severity", - "status", - "start_date" ] - } + "alarm_uuid": { "type": "string" }, + "description": { "type": "string" }, + "severity": { "type" : "string" }, + "status": { "type": "string" }, + "start_date": { "type": "string" }, + "update_date": { "type": "string" }, + "cancel_date": { "type": "string" } + }, + "required": [ "schema_version", + "schema_type", + "alarm_uuid", + "resource_uuid", + "vim_type", + "vim_uuid", + "severity", + "status", + "start_date" ] } diff --git a/osm_mon/core/models/read_metric_data_req.json b/osm_mon/core/models/read_metric_data_req.json index 2554be8..7964163 100644 --- a/osm_mon/core/models/read_metric_data_req.json +++ b/osm_mon/core/models/read_metric_data_req.json @@ -23,12 +23,10 @@ "schema_version": { "type": "string" }, "schema_type": { "type": "string" }, "metric_name": { "type": "string" }, - "metric_uuid": { "type": "string" }, - "resource_uuid": { "type": "string" }, - "vdu_id": { "type": "string"}, + "ns_id": { "type": "string"}, + "vnf_member_index": { "type": "integer"}, + "vdu_name": { "type": "string"}, "correlation_id": { "type": "integer" }, - "vim_type": { "type": "string" }, - "vim_uuid": { "type": "string" }, "collection_period": { "type": "integer" }, "collection_unit": { "type": "string" }, "required": ["schema_version", diff --git a/osm_mon/core/models/update_alarm_req.json b/osm_mon/core/models/update_alarm_req.json index a8a0f82..f71766f 100644 --- a/osm_mon/core/models/update_alarm_req.json +++ b/osm_mon/core/models/update_alarm_req.json @@ -22,23 +22,23 @@ { "schema_version": { "type": "string" }, "schema_type": { "type": "string" }, - "vim_type": { "type": "string" }, - "vim_uuid": { "type": "string" }, "alarm_update_request": - { +{ "correlation_id": { "type": "integer" }, + "vim_uuid": { "type": "string" }, "alarm_uuid": { "type": "string" }, - "metric_uuid": { "type": "string" }, - "vdu_id": { "type": "string"}, + "metric_name": { "type": "string" }, + "ns_id": { "type": "string"}, + "vnf_member_index": { "type": "integer"}, + "vdu_name": { "type": "string"}, "description": { "type": "string" }, "severity": { "type": "string" }, "operation": { "type": "string" }, "threshold_value": { "type": "string" }, - "unit": { "type": "string" }, "statistic": { "type": "string" } }, "required": [ "schema_version", - "scema_type", + "schema_type", "vim_type", "vim_uuid", "correlation_id", diff --git a/osm_mon/core/models/update_metric_req.json b/osm_mon/core/models/update_metric_req.json index 4b1c157..2a76722 100644 --- a/osm_mon/core/models/update_metric_req.json +++ b/osm_mon/core/models/update_metric_req.json @@ -22,15 +22,16 @@ { "schema_version": { "type": "string" }, "schema_type": { "type": "string" }, - "correlation_id": { "type": "integer" }, "vim_type": { "type": "string" }, "vim_uuid": { "type": "string" }, - "metric_create": + "metric_update_request": { - "metric_name": { "type": "string" }, - "metric_unit": { "type": "string" }, - "resource_uuid": { "type": "string" }, - "vdu_id": { "type": "string"} + "correlation_id": { "type": "integer" }, + "metric_name": { "type": "string" }, + "metric_unit": { "type": "string" }, + "ns_id": { "type": "string"}, + "vnf_member_index": { "type": "integer"}, + "vdu_name": { "type": "string"} }, "required": [ "schema_version", "schema_type", diff --git a/osm_mon/core/settings.py b/osm_mon/core/settings.py index ae717e0..db78b4a 100644 --- a/osm_mon/core/settings.py +++ b/osm_mon/core/settings.py @@ -60,6 +60,7 @@ class Config(object): _configuration = [ CfgParam('BROKER_URI', "localhost:9092", six.text_type), + CfgParam('MONGO_URI', "mongo:27017", six.text_type), CfgParam('DATABASE', "sqlite:///mon_sqlite.db", six.text_type), CfgParam('OS_NOTIFIER_URI', "http://localhost:8662", six.text_type), CfgParam('OS_DEFAULT_GRANULARITY', "300", six.text_type), @@ -80,5 +81,5 @@ class Config(object): val = str(os.environ[key]) setattr(self, key, val) except KeyError as exc: - log.warning("Environment variable not present: %s", exc) + log.debug("Environment variable not present: %s", exc) return diff --git a/osm_mon/plugins/CloudWatch/metric_alarms.py b/osm_mon/plugins/CloudWatch/metric_alarms.py index 7b03f73..8e5b6fe 100644 --- a/osm_mon/plugins/CloudWatch/metric_alarms.py +++ b/osm_mon/plugins/CloudWatch/metric_alarms.py @@ -244,7 +244,7 @@ class MetricAlarm(): alarm_info['schema_version'] = str(list_info['schema_version']) alarm_info['schema_type'] = 'list_alarm_response' - alarm_info['list_alarm_resp'] = alarm_list + alarm_info['list_alarm_response'] = alarm_list return alarm_info except Exception as e: diff --git a/osm_mon/plugins/CloudWatch/plugin_alarm.py b/osm_mon/plugins/CloudWatch/plugin_alarm.py index c2ac6a7..dea2b06 100644 --- a/osm_mon/plugins/CloudWatch/plugin_alarm.py +++ b/osm_mon/plugins/CloudWatch/plugin_alarm.py @@ -107,7 +107,7 @@ class plugin_alarms(): ack_details = self.get_ack_details(alarm_info) payload = json.dumps(ack_details) file = open('../../core/models/notify_alarm.json','wb').write((payload)) - self.producer.notify_alarm(key='notify_alarm',message=payload,topic = 'alarm_response') + self.producer.notify_alarm(key='notify_alarm',message=payload) log.info("Acknowledge sent: %s", ack_details) else: @@ -127,13 +127,13 @@ class plugin_alarms(): if update_resp == None: payload = json.dumps(update_resp) file = open('../../core/models/update_alarm_resp.json','wb').write((payload)) - self.producer.update_alarm_response(key='update_alarm_response',message=payload,topic = 'alarm_response') + self.producer.update_alarm_response(key='update_alarm_response',message=payload) log.debug("Alarm Already exists") else: payload = json.dumps(update_resp) file = open('../../core/models/update_alarm_resp.json','wb').write((payload)) - self.producer.update_alarm_response(key='update_alarm_response',message=payload,topic = 'alarm_response') + self.producer.update_alarm_response(key='update_alarm_response',message=payload) log.info("Alarm Updated with alarm info: %s", update_resp) else: @@ -146,7 +146,7 @@ class plugin_alarms(): del_resp = self.delete_alarm(del_info) payload = json.dumps(del_resp) file = open('../../core/models/delete_alarm_resp.json','wb').write((payload)) - self.producer.delete_alarm_response(key='delete_alarm_response',message=payload,topic = 'alarm_response') + self.producer.delete_alarm_response(key='delete_alarm_response',message=payload) log.info("Alarm Deleted with alarm info: %s", del_resp) @@ -158,7 +158,7 @@ class plugin_alarms(): list_resp = self.get_alarms_list(alarm_info)#['alarm_names'] payload = json.dumps(list_resp) file = open('../../core/models/list_alarm_resp.json','wb').write((payload)) - self.producer.list_alarm_response(key='list_alarm_response',message=payload,topic = 'alarm_response') + self.producer.list_alarm_response(key='list_alarm_response',message=payload) else: log.error("Resource ID is Incorrect") diff --git a/osm_mon/plugins/CloudWatch/plugin_metric.py b/osm_mon/plugins/CloudWatch/plugin_metric.py index 3b7029f..dc687db 100644 --- a/osm_mon/plugins/CloudWatch/plugin_metric.py +++ b/osm_mon/plugins/CloudWatch/plugin_metric.py @@ -84,8 +84,8 @@ class plugin_metrics(): log.info("Action required against: %s" % (message.topic)) if message.key == "create_metric_request": - if self.check_resource(metric_info['metric_create']['resource_uuid']) == True: - metric_resp = self.create_metric_request(metric_info['metric_create']) #alarm_info = message.value + if self.check_resource(metric_info['metric_create_request']['resource_uuid']) == True: + metric_resp = self.create_metric_request(metric_info['metric_create_request']) #alarm_info = message.value metric_response['schema_version'] = metric_info['schema_version'] metric_response['schema_type'] = "create_metric_response" metric_response['metric_create_response'] = metric_resp @@ -97,8 +97,8 @@ class plugin_metrics(): return metric_response elif message.key == "update_metric_request": - if self.check_resource(metric_info['metric_create']['resource_uuid']) == True: - update_resp = self.update_metric_request(metric_info['metric_create']) + if self.check_resource(metric_info['metric_create_request']['resource_uuid']) == True: + update_resp = self.update_metric_request(metric_info['metric_create_request']) metric_response['schema_version'] = metric_info['schema_version'] metric_response['schema_type'] = "update_metric_response" metric_response['metric_update_response'] = update_resp diff --git a/osm_mon/plugins/OpenStack/Aodh/alarming.py b/osm_mon/plugins/OpenStack/Aodh/alarming.py index 2c145ee..7dd5d4b 100644 --- a/osm_mon/plugins/OpenStack/Aodh/alarming.py +++ b/osm_mon/plugins/OpenStack/Aodh/alarming.py @@ -36,17 +36,6 @@ from osm_mon.plugins.OpenStack.response import OpenStack_Response log = logging.getLogger(__name__) -ALARM_NAMES = { - "average_memory_usage_above_threshold": "average_memory_utilization", - "disk_read_ops": "disk_read_ops", - "disk_write_ops": "disk_write_ops", - "disk_read_bytes": "disk_read_bytes", - "disk_write_bytes": "disk_write_bytes", - "net_packets_dropped": "packets_dropped", - "packets_in_above_threshold": "packets_received", - "packets_out_above_threshold": "packets_sent", - "cpu_utilization_above_threshold": "cpu_utilization"} - METRIC_MAPPINGS = { "average_memory_utilization": "memory.percent", "disk_read_ops": "disk.read.requests", @@ -126,7 +115,7 @@ class Alarming(object): log.warning("Failed to create the alarm: %s", exc) return None, False - def alarming(self, message): + def alarming(self, message, vim_uuid): """Consume info from the message bus to manage alarms.""" try: values = json.loads(message.value) @@ -134,7 +123,6 @@ class Alarming(object): values = yaml.safe_load(message.value) log.info("OpenStack alarm action required.") - vim_uuid = values['vim_uuid'] auth_token = Common.get_auth_token(vim_uuid) @@ -152,11 +140,10 @@ class Alarming(object): alarm_endpoint, metric_endpoint, auth_token, alarm_details, vim_config) # Generate a valid response message, send via producer + if alarm_status is True: + log.info("Alarm successfully created") + self._database_manager.save_alarm(alarm_id, vim_uuid) try: - if alarm_status is True: - log.info("Alarm successfully created") - self._database_manager.save_alarm(alarm_id, vim_uuid) - resp_message = self._response.generate_response( 'create_alarm_response', status=alarm_status, alarm_id=alarm_id, @@ -276,9 +263,6 @@ class Alarming(object): # Checking what fields are specified for a list request try: name = list_details['alarm_name'].lower() - if name not in ALARM_NAMES.keys(): - log.warning("This alarm is not supported, won't be used!") - name = None except KeyError as exc: log.info("Alarm name isn't specified.") name = None @@ -369,8 +353,7 @@ class Alarming(object): resource_id = rule['resource_id'] metric_name = [key for key, value in six.iteritems(METRIC_MAPPINGS) if value == rule['metric']][0] except Exception as exc: - log.warning("Failed to retrieve existing alarm info: %s.\ - Can only update OSM alarms.", exc) + log.exception("Failed to retrieve existing alarm info. Can only update OSM alarms.") return None, False # Generates and check payload configuration for alarm update @@ -387,8 +370,7 @@ class Alarming(object): return json.loads(update_alarm.text)['alarm_id'], True except Exception as exc: - log.warning("Alarm update could not be performed: %s", exc) - return None, False + log.exception("Alarm update could not be performed: ") return None, False def check_payload(self, values, metric_name, resource_id, diff --git a/osm_mon/plugins/OpenStack/Gnocchi/metrics.py b/osm_mon/plugins/OpenStack/Gnocchi/metrics.py index 9e69ee7..bb396e5 100644 --- a/osm_mon/plugins/OpenStack/Gnocchi/metrics.py +++ b/osm_mon/plugins/OpenStack/Gnocchi/metrics.py @@ -75,7 +75,7 @@ class Metrics(object): # Initializer a producer to send responses back to SO self._producer = KafkaProducer("metric_response") - def metric_calls(self, message): + def metric_calls(self, message, vim_uuid): """Consume info from the message bus to manage metric requests.""" try: values = json.loads(message.value) @@ -83,16 +83,16 @@ class Metrics(object): values = yaml.safe_load(message.value) log.info("OpenStack metric action required.") - auth_token = Common.get_auth_token(values['vim_uuid']) + auth_token = Common.get_auth_token(vim_uuid) - endpoint = Common.get_endpoint("metric", values['vim_uuid']) + endpoint = Common.get_endpoint("metric", vim_uuid) if 'metric_name' in values and values['metric_name'] not in METRIC_MAPPINGS.keys(): raise ValueError('Metric ' + values['metric_name'] + ' is not supported.') if message.key == "create_metric_request": # Configure metric - metric_details = values['metric_create'] + metric_details = values['metric_create_request'] metric_id, resource_id, status = self.configure_metric( endpoint, auth_token, metric_details) @@ -100,7 +100,7 @@ class Metrics(object): try: resp_message = self._response.generate_response( 'create_metric_response', status=status, - cor_id=values['correlation_id'], + cor_id=metric_details['correlation_id'], metric_id=metric_id, r_id=resource_id) log.info("Response messages: %s", resp_message) self._producer.create_metrics_resp( @@ -156,7 +156,7 @@ class Metrics(object): # Log and send a response back to this effect log.warning("Gnocchi doesn't support metric configuration\ updates.") - req_details = values['metric_create'] + req_details = values['metric_create_request'] metric_name = req_details['metric_name'] resource_id = req_details['resource_uuid'] metric_id = self.get_metric_id( @@ -166,13 +166,13 @@ class Metrics(object): try: resp_message = self._response.generate_response( 'update_metric_response', status=False, - cor_id=values['correlation_id'], + cor_id=req_details['correlation_id'], r_id=resource_id, m_id=metric_id) log.info("Response message: %s", resp_message) self._producer.update_metric_response( 'update_metric_response', resp_message) except Exception as exc: - log.warning("Failed to send an update response:%s", exc) + log.exception("Failed to send an update response:") elif message.key == "list_metric_request": list_details = values['metrics_list_request'] @@ -223,7 +223,7 @@ class Metrics(object): 'unit': values['metric_unit']}} result = Common.perform_request( res_url, auth_token, req_type="post", - payload=json.dumps(payload)) + payload=json.dumps(payload, sort_keys=True)) # Get id of newly created metric for row in json.loads(result.text): if row['name'] == metric_name: @@ -243,7 +243,7 @@ class Metrics(object): resource_payload = json.dumps({'id': resource_id, 'metrics': { - metric_name: metric}}) + metric_name: metric}}, sort_keys=True) resource = Common.perform_request( url, auth_token, req_type="post", diff --git a/osm_mon/plugins/OpenStack/response.py b/osm_mon/plugins/OpenStack/response.py index bd1133e..c41f772 100644 --- a/osm_mon/plugins/OpenStack/response.py +++ b/osm_mon/plugins/OpenStack/response.py @@ -68,7 +68,7 @@ class OpenStack_Response(object): alarm_list_resp = {"schema_version": schema_version, "schema_type": "list_alarm_response", "correlation_id": kwargs['cor_id'], - "list_alarm_resp": kwargs['alarm_list']} + "list_alarm_response": kwargs['alarm_list']} return json.dumps(alarm_list_resp) def create_alarm_response(self, **kwargs): diff --git a/osm_mon/plugins/vRealiseOps/mon_plugin_vrops.py b/osm_mon/plugins/vRealiseOps/mon_plugin_vrops.py index 17f4fbd..bd86a50 100644 --- a/osm_mon/plugins/vRealiseOps/mon_plugin_vrops.py +++ b/osm_mon/plugins/vRealiseOps/mon_plugin_vrops.py @@ -28,6 +28,7 @@ Monitoring metrics & creating Alarm definitions in vROPs import requests import logging +import six from pyvcloud.vcd.client import BasicLoginCredentials from pyvcloud.vcd.client import Client API_VERSION = '5.9' @@ -41,8 +42,8 @@ import os import datetime from socket import getfqdn -from requests.packages.urllib3.exceptions import InsecureRequestWarning -requests.packages.urllib3.disable_warnings(InsecureRequestWarning) +import urllib3 +urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) OPERATION_MAPPING = {'GE':'GT_EQ', 'LE':'LT_EQ', 'GT':'GT', 'LT':'LT', 'EQ':'EQ'} severity_mano2vrops = {'WARNING':'WARNING', 'MINOR':'WARNING', 'MAJOR':"IMMEDIATE",\ @@ -129,18 +130,18 @@ class MonPlugin(): #1) get alarm & metrics parameters from plugin specific file def_a_params = self.get_default_Params(config_dict['alarm_name']) if not def_a_params: - self.logger.warn("Alarm not supported: {}".format(config_dict['alarm_name'])) + self.logger.warning("Alarm not supported: {}".format(config_dict['alarm_name'])) return None metric_key_params = self.get_default_Params(config_dict['metric_name']) if not metric_key_params: - self.logger.warn("Metric not supported: {}".format(config_dict['metric_name'])) + self.logger.warning("Metric not supported: {}".format(config_dict['metric_name'])) return None #1.2) Check if alarm definition already exists vrops_alarm_name = def_a_params['vrops_alarm']+ '-' + config_dict['resource_uuid'] alert_def_list = self.get_alarm_defination_by_name(vrops_alarm_name) if alert_def_list: - self.logger.warn("Alarm already exists: {}. Try updating by update_alarm_request"\ + self.logger.warning("Alarm already exists: {}. Try updating by update_alarm_request"\ .format(vrops_alarm_name)) return None @@ -158,13 +159,13 @@ class MonPlugin(): if symptom_uuid is not None: self.logger.info("Symptom defined: {} with ID: {}".format(symptom_params['symptom_name'],symptom_uuid)) else: - self.logger.warn("Failed to create Symptom: {}".format(symptom_params['symptom_name'])) + self.logger.warning("Failed to create Symptom: {}".format(symptom_params['symptom_name'])) return None #3) create alert definition #To Do - Get type & subtypes for all 5 alarms alarm_params = {'name':vrops_alarm_name, 'description':config_dict['description']\ - if config_dict.has_key('description') and config_dict['description'] is not None else config_dict['alarm_name'], + if 'description' in config_dict and config_dict['description'] is not None else config_dict['alarm_name'], 'adapterKindKey':def_a_params['adapter_kind'], 'resourceKindKey':def_a_params['resource_kind'], 'waitCycles':1, 'cancelCycles':1, @@ -175,7 +176,7 @@ class MonPlugin(): alarm_def = self.create_alarm_definition(alarm_params) if alarm_def is None: - self.logger.warn("Failed to create Alert: {}".format(alarm_params['name'])) + self.logger.warning("Failed to create Alert: {}".format(alarm_params['name'])) return None self.logger.info("Alarm defined: {} with ID: {}".format(alarm_params['name'],alarm_def)) @@ -183,13 +184,13 @@ class MonPlugin(): #4) Find vm_moref_id from vApp uuid in vCD vm_moref_id = self.get_vm_moref_id(config_dict['resource_uuid']) if vm_moref_id is None: - self.logger.warn("Failed to find vm morefid for vApp in vCD: {}".format(config_dict['resource_uuid'])) + self.logger.warning("Failed to find vm morefid for vApp in vCD: {}".format(config_dict['resource_uuid'])) return None #5) Based on vm_moref_id, find VM's corresponding resource_id in vROPs to set notification resource_id = self.get_vm_resource_id(vm_moref_id) if resource_id is None: - self.logger.warn("Failed to find resource in vROPs: {}".format(config_dict['resource_uuid'])) + self.logger.warning("Failed to find resource in vROPs: {}".format(config_dict['resource_uuid'])) return None #6) Configure alarm notification for a particular VM using it's resource_id @@ -285,7 +286,7 @@ class MonPlugin(): data=json.dumps(data)) if resp.status_code != 201: - self.logger.warn("Failed to create Symptom definition: {}, response {}"\ + self.logger.warning("Failed to create Symptom definition: {}, response {}"\ .format(symptom_params['symptom_name'], resp.content)) return None @@ -296,7 +297,7 @@ class MonPlugin(): return symptom_id except Exception as exp: - self.logger.warn("Error creating symptom definition : {}\n{}"\ + self.logger.warning("Error creating symptom definition : {}\n{}"\ .format(exp, traceback.format_exc())) @@ -359,7 +360,7 @@ class MonPlugin(): data=json.dumps(data)) if resp.status_code != 201: - self.logger.warn("Failed to create Alarm definition: {}, response {}"\ + self.logger.warning("Failed to create Alarm definition: {}, response {}"\ .format(alarm_params['name'], resp.content)) return None @@ -370,7 +371,7 @@ class MonPlugin(): return alarm_uuid except Exception as exp: - self.logger.warn("Error creating alarm definition : {}\n{}".format(exp, traceback.format_exc())) + self.logger.warning("Error creating alarm definition : {}\n{}".format(exp, traceback.format_exc())) def configure_rest_plugin(self): @@ -427,7 +428,7 @@ class MonPlugin(): data=json.dumps(data)) if resp.status_code is not 201: - self.logger.warn("Failed to create REST Plugin: {} for url: {}, \nresponse code: {},"\ + self.logger.warning("Failed to create REST Plugin: {} for url: {}, \nresponse code: {},"\ "\nresponse content: {}".format(plugin_name, webhook_url,\ resp.status_code, resp.content)) return None @@ -437,13 +438,13 @@ class MonPlugin(): plugin_id = resp_data['pluginId'] if plugin_id is None: - self.logger.warn("Failed to get REST Plugin ID for {}, url: {}".format(plugin_name, webhook_url)) + self.logger.warning("Failed to get REST Plugin ID for {}, url: {}".format(plugin_name, webhook_url)) return None else: self.logger.info("Created REST Plugin: {} with ID : {} for url: {}".format(plugin_name, plugin_id, webhook_url)) status = self.enable_rest_plugin(plugin_id, plugin_name) if status is False: - self.logger.warn("Failed to enable created REST Plugin: {} for url: {}".format(plugin_name, webhook_url)) + self.logger.warning("Failed to enable created REST Plugin: {} for url: {}".format(plugin_name, webhook_url)) return None else: self.logger.info("Enabled REST Plugin: {} for url: {}".format(plugin_name, webhook_url)) @@ -463,7 +464,7 @@ class MonPlugin(): verify = False, headers = headers) if resp.status_code is not 200: - self.logger.warn("Failed to REST GET Alarm plugin details \nResponse code: {}\nResponse content: {}"\ + self.logger.warning("Failed to REST GET Alarm plugin details \nResponse code: {}\nResponse content: {}"\ .format(resp.status_code, resp.content)) return None @@ -475,7 +476,7 @@ class MonPlugin(): plugin_id = notify_plugin.get('pluginId') if plugin_id is None: - self.logger.warn("REST plugin {} not found".format(plugin_name)) + self.logger.warning("REST plugin {} not found".format(plugin_name)) return None else: self.logger.info("Found REST Plugin: {}".format(plugin_name)) @@ -502,7 +503,7 @@ class MonPlugin(): verify = False) if resp.status_code is not 204: - self.logger.warn("Failed to enable REST plugin {}. \nResponse code {}\nResponse Content: {}"\ + self.logger.warning("Failed to enable REST plugin {}. \nResponse code {}\nResponse Content: {}"\ .format(plugin_name, resp.status_code, resp.content)) return False @@ -510,7 +511,7 @@ class MonPlugin(): return True except Exception as exp: - self.logger.warn("Error enabling REST plugin for {} plugin: Exception: {}\n{}"\ + self.logger.warning("Error enabling REST plugin for {} plugin: Exception: {}\n{}"\ .format(plugin_name, exp, traceback.format_exc())) def create_alarm_notification_rule(self, alarm_name, alarm_id, resource_id): @@ -531,7 +532,7 @@ class MonPlugin(): #1) Find the REST Plugin id details for - MON_module_REST_Plugin plugin_id = self.check_if_plugin_configured(plugin_name) if plugin_id is None: - self.logger.warn("Failed to get REST plugin_id for : {}".format('MON_module_REST_Plugin')) + self.logger.warning("Failed to get REST plugin_id for : {}".format('MON_module_REST_Plugin')) return None #2) Create Alarm notification rule @@ -556,7 +557,7 @@ class MonPlugin(): data=json.dumps(data)) if resp.status_code is not 201: - self.logger.warn("Failed to create Alarm notification rule {} for {} alarm."\ + self.logger.warning("Failed to create Alarm notification rule {} for {} alarm."\ "\nResponse code: {}\nResponse content: {}"\ .format(notification_name, alarm_name, resp.status_code, resp.content)) return None @@ -583,7 +584,7 @@ class MonPlugin(): return vm_moref_id except Exception as exp: - self.logger.warn("Error occurred while getting VM moref ID for VM : {}\n{}"\ + self.logger.warning("Error occurred while getting VM moref ID for VM : {}\n{}"\ .format(exp, traceback.format_exc())) @@ -606,7 +607,7 @@ class MonPlugin(): vca = self.connect_as_admin() if not vca: - self.logger.warn("Failed to connect to vCD") + self.logger.warning("Failed to connect to vCD") return parsed_respond url_list = [self.vcloud_site, '/api/vApp/vapp-', vapp_uuid] @@ -620,7 +621,7 @@ class MonPlugin(): verify=False) if response.status_code != 200: - self.logger.warn("REST API call {} failed. Return status code {}"\ + self.logger.warning("REST API call {} failed. Return status code {}"\ .format(get_vapp_restcall, response.content)) return parsed_respond @@ -645,7 +646,7 @@ class MonPlugin(): parsed_respond["vm_vcenter_info"]= vm_vcenter_info except Exception as exp : - self.logger.warn("Error occurred calling rest api for getting vApp details: {}\n{}"\ + self.logger.warning("Error occurred calling rest api for getting vApp details: {}\n{}"\ .format(exp, traceback.format_exc())) return parsed_respond @@ -669,7 +670,7 @@ class MonPlugin(): client_as_admin.set_credentials(BasicLoginCredentials(self.admin_username, org,\ self.admin_password)) except Exception as e: - self.logger.warn("Can't connect to a vCloud director as: {} with exception {}"\ + self.logger.warning("Can't connect to a vCloud director as: {} with exception {}"\ .format(self.admin_username, e)) return client_as_admin @@ -689,7 +690,7 @@ class MonPlugin(): verify = False, headers = headers) if resp.status_code is not 200: - self.logger.warn("Failed to get resource details from vROPs for {}"\ + self.logger.warning("Failed to get resource details from vROPs for {}"\ "\nResponse code:{}\nResponse Content: {}"\ .format(vm_moref_id, resp.status_code, resp.content)) return None @@ -713,7 +714,7 @@ class MonPlugin(): .format(vm_resource_id, vm_moref_id)) except Exception as exp: - self.logger.warn("get_vm_resource_id: Error in parsing {}\n{}"\ + self.logger.warning("get_vm_resource_id: Error in parsing {}\n{}"\ .format(exp, traceback.format_exc())) return vm_resource_id @@ -753,13 +754,13 @@ class MonPlugin(): return_data['tenant_uuid'] = None return_data['unit'] = None #return_data['tenant_id'] = self.tenant_id - #self.logger.warn("return_data: {}".format(return_data)) + #self.logger.warning("return_data: {}".format(return_data)) #1) Get metric details from plugin specific file & format it into vROPs metrics metric_key_params = self.get_default_Params(metric['metric_name']) if not metric_key_params: - self.logger.warn("Metric not supported: {}".format(metric['metric_name'])) + self.logger.warning("Metric not supported: {}".format(metric['metric_name'])) #To Do: Return message return return_data @@ -769,12 +770,12 @@ class MonPlugin(): #2.a) Find vm_moref_id from vApp uuid in vCD vm_moref_id = self.get_vm_moref_id(metric['resource_uuid']) if vm_moref_id is None: - self.logger.warn("Failed to find vm morefid for vApp in vCD: {}".format(metric['resource_uuid'])) + self.logger.warning("Failed to find vm morefid for vApp in vCD: {}".format(metric['resource_uuid'])) return return_data #2.b) Based on vm_moref_id, find VM's corresponding resource_id in vROPs to set notification resource_id = self.get_vm_resource_id(vm_moref_id) if resource_id is None: - self.logger.warn("Failed to find resource in vROPs: {}".format(metric['resource_uuid'])) + self.logger.warning("Failed to find resource in vROPs: {}".format(metric['resource_uuid'])) return return_data #3) Calculate begin & end time for period & period unit @@ -799,21 +800,21 @@ class MonPlugin(): verify = False, headers = headers) if resp.status_code is not 200: - self.logger.warn("Failed to retrive Metric data from vROPs for {}\nResponse code:{}\nResponse Content: {}"\ + self.logger.warning("Failed to retrive Metric data from vROPs for {}\nResponse code:{}\nResponse Content: {}"\ .format(metric['metric_name'], resp.status_code, resp.content)) return return_data #5) Convert to required format metrics_data = {} json_data = json.loads(resp.content) - for resp_key,resp_val in json_data.iteritems(): + for resp_key,resp_val in six.iteritems(json_data): if resp_key == 'values': data = json_data['values'][0] - for data_k,data_v in data.iteritems(): + for data_k,data_v in six.iteritems(data): if data_k == 'stat-list': stat_list = data_v - for stat_list_k,stat_list_v in stat_list.iteritems(): - for stat_keys,stat_vals in stat_list_v[0].iteritems(): + for stat_list_k,stat_list_v in six.iteritems(stat_list): + for stat_keys,stat_vals in six.iteritems(stat_list_v[0]): if stat_keys == 'timestamps': metrics_data['time_series'] = stat_list_v[0]['timestamps'] if stat_keys == 'data': @@ -827,7 +828,7 @@ class MonPlugin(): """Update alarm configuration (i.e. Symptom & alarm) as per request """ if new_alarm_config.get('alarm_uuid') is None: - self.logger.warn("alarm_uuid is required to update an Alarm") + self.logger.warning("alarm_uuid is required to update an Alarm") return None #1) Get Alarm details from it's uuid & find the symptom defination alarm_details_json, alarm_details = self.get_alarm_defination_details(new_alarm_config['alarm_uuid']) @@ -862,7 +863,7 @@ class MonPlugin(): """Get alarm details based on alarm UUID """ if alarm_uuid is None: - self.logger.warn("get_alarm_defination_details: Alarm UUID not provided") + self.logger.warning("get_alarm_defination_details: Alarm UUID not provided") return None, None alarm_details = {} @@ -875,7 +876,7 @@ class MonPlugin(): verify = False, headers = headers) if resp.status_code is not 200: - self.logger.warn("Alarm to be updated not found: {}\nResponse code:{}\nResponse Content: {}"\ + self.logger.warning("Alarm to be updated not found: {}\nResponse code:{}\nResponse Content: {}"\ .format(alarm_uuid, resp.status_code, resp.content)) return None, None @@ -890,7 +891,7 @@ class MonPlugin(): alarm_details['sub_type'] = json_data['subType'] alarm_details['symptom_definition_id'] = json_data['states'][0]['base-symptom-set']['symptomDefinitionIds'][0] except Exception as exp: - self.logger.warn("Exception while retriving alarm defination details: {}".format(exp)) + self.logger.warning("Exception while retriving alarm defination details: {}".format(exp)) return None, None return json_data, alarm_details @@ -903,7 +904,7 @@ class MonPlugin(): alert_match_list = [] if alarm_name is None: - self.logger.warn("get_alarm_defination_by_name: Alarm name not provided") + self.logger.warning("get_alarm_defination_by_name: Alarm name not provided") return alert_match_list json_data = {} @@ -915,7 +916,7 @@ class MonPlugin(): verify = False, headers = headers) if resp.status_code is not 200: - self.logger.warn("get_alarm_defination_by_name: Error in response: {}\nResponse code:{}"\ + self.logger.warning("get_alarm_defination_by_name: Error in response: {}\nResponse code:{}"\ "\nResponse Content: {}".format(alarm_name, resp.status_code, resp.content)) return alert_match_list @@ -923,14 +924,14 @@ class MonPlugin(): json_data = json.loads(resp.content) if json_data['alertDefinitions'] is not None: alerts_list = json_data['alertDefinitions'] - alert_match_list = filter(lambda alert: alert['name'] == alarm_name, alerts_list) + alert_match_list = list(filter(lambda alert: alert['name'] == alarm_name, alerts_list)) status = False if not alert_match_list else True #self.logger.debug("Found alert_match_list: {}for larm_name: {},\nstatus: {}".format(alert_match_list, alarm_name,status)) return alert_match_list except Exception as exp: - self.logger.warn("Exception while searching alarm defination: {}".format(exp)) + self.logger.warning("Exception while searching alarm defination: {}".format(exp)) return alert_match_list @@ -943,18 +944,18 @@ class MonPlugin(): if symptom_details is None: return None - if new_alarm_config.has_key('severity') and new_alarm_config['severity'] is not None: + if 'severity' in new_alarm_config and new_alarm_config['severity'] is not None: symptom_details['state']['severity'] = severity_mano2vrops[new_alarm_config['severity']] - if new_alarm_config.has_key('operation') and new_alarm_config['operation'] is not None: + if 'operation' in new_alarm_config and new_alarm_config['operation'] is not None: symptom_details['state']['condition']['operator'] = OPERATION_MAPPING[new_alarm_config['operation']] - if new_alarm_config.has_key('threshold_value') and new_alarm_config['threshold_value'] is not None: + if 'threshold_value' in new_alarm_config and new_alarm_config['threshold_value'] is not None: symptom_details['state']['condition']['value'] = new_alarm_config['threshold_value'] #Find vrops metric key from metric_name, if required """ - if new_alarm_config.has_key('metric_name') and new_alarm_config['metric_name'] is not None: + if 'metric_name' in new_alarm_config and new_alarm_config['metric_name'] is not None: metric_key_params = self.get_default_Params(new_alarm_config['metric_name']) if not metric_key_params: - self.logger.warn("Metric not supported: {}".format(config_dict['metric_name'])) + self.logger.warning("Metric not supported: {}".format(config_dict['metric_name'])) return None symptom_details['state']['condition']['key'] = metric_key_params['metric_key'] """ @@ -970,7 +971,7 @@ class MonPlugin(): data=data) if resp.status_code != 200: - self.logger.warn("Failed to update Symptom definition: {}, response {}"\ + self.logger.warning("Failed to update Symptom definition: {}, response {}"\ .format(symptom_uuid, resp.content)) return None @@ -980,7 +981,7 @@ class MonPlugin(): .format(symptom_uuid, new_alarm_config['alarm_uuid'])) return symptom_uuid else: - self.logger.warn("Failed to update Symptom Defination {} for : {}"\ + self.logger.warning("Failed to update Symptom Defination {} for : {}"\ .format(symptom_uuid, new_alarm_config['alarm_uuid'])) return None @@ -990,7 +991,7 @@ class MonPlugin(): """ symptom_details = {} if symptom_uuid is None: - self.logger.warn("get_symptom_defination_details: Symptom UUID not provided") + self.logger.warning("get_symptom_defination_details: Symptom UUID not provided") return None api_url = '/suite-api/api/symptomdefinitions/' @@ -1001,7 +1002,7 @@ class MonPlugin(): verify = False, headers = headers) if resp.status_code is not 200: - self.logger.warn("Symptom defination not found {} \nResponse code:{}\nResponse Content: {}"\ + self.logger.warning("Symptom defination not found {} \nResponse code:{}\nResponse Content: {}"\ .format(symptom_uuid, resp.status_code, resp.content)) return None @@ -1013,9 +1014,9 @@ class MonPlugin(): def reconfigure_alarm(self, alarm_details_json, new_alarm_config): """Reconfigure alarm defination as per input """ - if new_alarm_config.has_key('severity') and new_alarm_config['severity'] is not None: + if 'severity' in new_alarm_config and new_alarm_config['severity'] is not None: alarm_details_json['states'][0]['severity'] = new_alarm_config['severity'] - if new_alarm_config.has_key('description') and new_alarm_config['description'] is not None: + if 'description' in new_alarm_config and new_alarm_config['description'] is not None: alarm_details_json['description'] = new_alarm_config['description'] api_url = '/suite-api/api/alertdefinitions' @@ -1028,7 +1029,7 @@ class MonPlugin(): data=data) if resp.status_code != 200: - self.logger.warn("Failed to update Alarm definition: {}, response code {}, response content: {}"\ + self.logger.warning("Failed to update Alarm definition: {}, response code {}, response content: {}"\ .format(alarm_details_json['id'], resp.status_code, resp.content)) return None else: @@ -1080,7 +1081,7 @@ class MonPlugin(): auth=(self.vrops_user, self.vrops_password), verify = False, headers = headers) if resp.status_code is not 204: - self.logger.warn("Failed to delete notification rules for {}".format(alarm_name)) + self.logger.warning("Failed to delete notification rules for {}".format(alarm_name)) return None else: self.logger.info("Deleted notification rules for {}".format(alarm_name)) @@ -1097,12 +1098,12 @@ class MonPlugin(): verify = False, headers = headers) if resp.status_code is not 200: - self.logger.warn("Failed to get notification rules details for {}"\ + self.logger.warning("Failed to get notification rules details for {}"\ .format(alarm_name)) return None notifications = json.loads(resp.content) - if notifications is not None and notifications.has_key('notification-rule'): + if notifications is not None and 'notification-rule' in notifications: notifications_list = notifications['notification-rule'] for dict in notifications_list: if dict['name'] is not None and dict['name'] == alarm_notify_id: @@ -1111,7 +1112,7 @@ class MonPlugin(): .format(notification_id, alarm_name)) return notification_id - self.logger.warn("Notification id to be deleted not found for {}"\ + self.logger.warning("Notification id to be deleted not found for {}"\ .format(alarm_name)) return None @@ -1124,7 +1125,7 @@ class MonPlugin(): auth=(self.vrops_user, self.vrops_password), verify = False, headers = headers) if resp.status_code is not 204: - self.logger.warn("Failed to delete alarm definition {}".format(alarm_id)) + self.logger.warning("Failed to delete alarm definition {}".format(alarm_id)) return None else: self.logger.info("Deleted alarm definition {}".format(alarm_id)) @@ -1139,7 +1140,7 @@ class MonPlugin(): auth=(self.vrops_user, self.vrops_password), verify = False, headers = headers) if resp.status_code is not 204: - self.logger.warn("Failed to delete symptom definition {}".format(symptom_id)) + self.logger.warning("Failed to delete symptom definition {}".format(symptom_id)) return None else: self.logger.info("Deleted symptom definition {}".format(symptom_id)) @@ -1157,7 +1158,7 @@ class MonPlugin(): return status metric_key_params = self.get_default_Params(metric_info['metric_name']) if not metric_key_params: - self.logger.warn("Metric not supported: {}".format(metric_info['metric_name'])) + self.logger.warning("Metric not supported: {}".format(metric_info['metric_name'])) return status else: #If Metric is supported, verify optional metric unit & return status @@ -1179,7 +1180,7 @@ class MonPlugin(): triggered_alarms_list = [] if list_alarm_input.get('resource_uuid') is None: - self.logger.warn("Resource UUID is required to get triggered alarms list") + self.logger.warning("Resource UUID is required to get triggered alarms list") return triggered_alarms_list #1)Find vROPs resource ID using RO resource UUID @@ -1197,13 +1198,13 @@ class MonPlugin(): #1) Find vm_moref_id from vApp uuid in vCD vm_moref_id = self.get_vm_moref_id(ro_resource_uuid) if vm_moref_id is None: - self.logger.warn("Failed to find vm morefid for vApp in vCD: {}".format(ro_resource_uuid)) + self.logger.warning("Failed to find vm morefid for vApp in vCD: {}".format(ro_resource_uuid)) return None #2) Based on vm_moref_id, find VM's corresponding resource_id in vROPs to set notification vrops_resource_id = self.get_vm_resource_id(vm_moref_id) if vrops_resource_id is None: - self.logger.warn("Failed to find resource in vROPs: {}".format(ro_resource_uuid)) + self.logger.warning("Failed to find resource in vROPs: {}".format(ro_resource_uuid)) return None return vrops_resource_id @@ -1219,12 +1220,12 @@ class MonPlugin(): verify = False, headers = headers) if resp.status_code is not 200: - self.logger.warn("Failed to get triggered alarms for {}"\ + self.logger.warning("Failed to get triggered alarms for {}"\ .format(ro_resource_uuid)) return None all_alerts = json.loads(resp.content) - if all_alerts.has_key('alerts'): + if 'alerts' in all_alerts: if not all_alerts['alerts']: self.logger.info("No alarms present on resource {}".format(ro_resource_uuid)) return resource_alarms @@ -1241,7 +1242,7 @@ class MonPlugin(): alarm_instance['vim_type'] = 'VMware' #find severity of alarm severity = None - for key,value in severity_mano2vrops.iteritems(): + for key,value in six.iteritems(severity_mano2vrops): if value == alarm['alertLevel']: severity = key if severity is None: diff --git a/osm_mon/plugins/vRealiseOps/plugin_receiver.py b/osm_mon/plugins/vRealiseOps/plugin_receiver.py index 9526e64..193ea16 100644 --- a/osm_mon/plugins/vRealiseOps/plugin_receiver.py +++ b/osm_mon/plugins/vRealiseOps/plugin_receiver.py @@ -34,6 +34,8 @@ import traceback #Core producer +import six + from osm_mon.plugins.vRealiseOps.mon_plugin_vrops import MonPlugin sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', '..', '..')) @@ -122,18 +124,18 @@ class PluginReceiver(): self.publish_metrics_data_status(metrics_data) elif message.key == "create_metric_request": metric_info = json.loads(message.value) - metric_status = self.verify_metric(metric_info['metric_create']) + metric_status = self.verify_metric(metric_info['metric_create_request']) #Publish message using producer self.publish_create_metric_response(metric_info, metric_status) elif message.key == "update_metric_request": metric_info = json.loads(message.value) - metric_status = self.verify_metric(metric_info['metric_create']) + metric_status = self.verify_metric(metric_info['metric_create_request']) #Publish message using producer self.publish_update_metric_response(metric_info, metric_status) elif message.key == "delete_metric_request": metric_info = json.loads(message.value) #Deleting Metric Data is not allowed. Publish status as False - self.logger.warn("Deleting Metric is not allowed: {}".format(metric_info['metric_name'])) + self.logger.warning("Deleting Metric is not allowed: {}".format(metric_info['metric_name'])) #Publish message using producer self.publish_delete_metric_response(metric_info) elif message.topic == 'access_credentials': @@ -253,7 +255,7 @@ class PluginReceiver(): "metric_create_response": { "metric_uuid":'0', - "resource_uuid":metric_info['metric_create']['resource_uuid'], + "resource_uuid":metric_info['metric_create_request']['resource_uuid'], "status":metric_status } } @@ -273,7 +275,7 @@ class PluginReceiver(): "metric_update_response": { "metric_uuid":'0', - "resource_uuid":metric_info['metric_create']['resource_uuid'], + "resource_uuid":metric_info['metric_create_request']['resource_uuid'], "status":metric_status } } @@ -287,7 +289,7 @@ class PluginReceiver(): """ topic = 'metric_response' msg_key = 'delete_metric_response' - if metric_info.has_key('tenant_uuid') and metric_info['tenant_uuid'] is not None: + if 'tenant_uuid' in metric_info and metric_info['tenant_uuid'] is not None: tenant_uuid = metric_info['tenant_uuid'] else: tenant_uuid = None @@ -322,7 +324,7 @@ class PluginReceiver(): response_msg = {"schema_version":schema_version, "schema_type":"list_alarm_response", "correlation_id":list_alarm_input['alarm_list_request']['correlation_id'], - "list_alarm_resp":triggered_alarm_list + "list_alarm_response":triggered_alarm_list } self.logger.info("Publishing response:\nTopic={}\nKey={}\nValue={}"\ .format(topic, msg_key, response_msg)) @@ -357,7 +359,7 @@ class PluginReceiver(): for config in root: if config.tag == 'Access_Config': for param in config: - for key,val in access_info.iteritems(): + for key,val in six.iteritems(access_info): if param.tag == key: #print param.tag, val param.text = val @@ -365,7 +367,7 @@ class PluginReceiver(): tree.write(CONFIG_FILE_PATH) wr_status = True except Exception as exp: - self.logger.warn("Failed to update Access Config Parameters: {}".format(exp)) + self.logger.warning("Failed to update Access Config Parameters: {}".format(exp)) return wr_status diff --git a/osm_mon/plugins/vRealiseOps/vrops_config.xml b/osm_mon/plugins/vRealiseOps/vrops_config.xml index 67774e4..cb4ab7e 100644 --- a/osm_mon/plugins/vRealiseOps/vrops_config.xml +++ b/osm_mon/plugins/vRealiseOps/vrops_config.xml @@ -1,25 +1,3 @@ - Avg_Mem_Usage_Above_Thr @@ -147,22 +125,20 @@ https://192.169.241.123 - Admin - VMware1! - https://mano-vcd-1.corp.local - administrator - VMware1! + admin + vmware + https://192.169.241.15 + admin + vmware https://192.169.241.104 admin VMware1! - 192.169.241.103 + 192.169.241.13 443 - administrator@vsphere.local - VMware1! - Org2-VDC-PVDC1 + admin + vmware + Org2 Org2 - Org2-VDC-PVDC1 + Org2 - - - + \ No newline at end of file diff --git a/osm_mon/test/OpenStack/integration/test_alarm_integration.py b/osm_mon/test/OpenStack/integration/test_alarm_integration.py index b0cfd32..b04019b 100644 --- a/osm_mon/test/OpenStack/integration/test_alarm_integration.py +++ b/osm_mon/test/OpenStack/integration/test_alarm_integration.py @@ -72,9 +72,7 @@ class AlarmIntegrationTest(unittest.TestCase): def test_update_alarm_req(self, resp, update_alarm, update_resp, get_creds): """Test Aodh update alarm request message from KafkaProducer.""" # Set-up message, producer and consumer for tests - payload = {"vim_type": "OpenSTACK", - "vim_uuid": "test_id", - "alarm_update_request": + payload = {"alarm_update_request": {"correlation_id": 123, "alarm_uuid": "alarm_id", "metric_uuid": "metric_id"}} @@ -85,11 +83,10 @@ class AlarmIntegrationTest(unittest.TestCase): value=json.dumps(payload)) for message in self.req_consumer: - # Check the vim desired by the message if message.key == "update_alarm_request": # Mock a valid alarm update update_alarm.return_value = "alarm_id", True - self.alarms.alarming(message) + self.alarms.alarming(message, 'test_id') # A response message is generated and sent via MON's producer resp.assert_called_with( @@ -111,9 +108,7 @@ class AlarmIntegrationTest(unittest.TestCase): def test_create_alarm_req(self, resp, config_alarm, create_resp, get_creds): """Test Aodh create alarm request message from KafkaProducer.""" # Set-up message, producer and consumer for tests - payload = {"vim_type": "OpenSTACK", - "vim_uuid": "test_id", - "alarm_create_request": + payload = {"alarm_create_request": {"correlation_id": 123, "alarm_name": "my_alarm", "metric_name": "my_metric", @@ -126,11 +121,10 @@ class AlarmIntegrationTest(unittest.TestCase): value=json.dumps(payload)) for message in self.req_consumer: - # Check the vim desired by the message if message.key == "create_alarm_request": # Mock a valid alarm creation config_alarm.return_value = "alarm_id", True - self.alarms.alarming(message) + self.alarms.alarming(message, 'test_id') # A response message is generated and sent via MON's produce resp.assert_called_with( @@ -151,9 +145,7 @@ class AlarmIntegrationTest(unittest.TestCase): def test_list_alarm_req(self, resp, list_alarm, list_resp, get_creds): """Test Aodh list alarm request message from KafkaProducer.""" # Set-up message, producer and consumer for tests - payload = {"vim_type": "OpenSTACK", - "vim_uuid": "test_id", - "alarm_list_request": + payload = {"alarm_list_request": {"correlation_id": 123, "resource_uuid": "resource_id", }} @@ -163,11 +155,10 @@ class AlarmIntegrationTest(unittest.TestCase): get_creds.return_value = mock_creds for message in self.req_consumer: - # Check the vim desired by the message if message.key == "list_alarm_request": # Mock an empty list generated by the request list_alarm.return_value = [] - self.alarms.alarming(message) + self.alarms.alarming(message, 'test_id') # Response message is generated resp.assert_called_with( @@ -189,9 +180,7 @@ class AlarmIntegrationTest(unittest.TestCase): def test_delete_alarm_req(self, resp, del_resp, del_alarm, get_creds): """Test Aodh delete alarm request message from KafkaProducer.""" # Set-up message, producer and consumer for tests - payload = {"vim_type": "OpenSTACK", - "vim_uuid": "test_id", - "alarm_delete_request": + payload = {"alarm_delete_request": {"correlation_id": 123, "alarm_uuid": "alarm_id", }} @@ -201,9 +190,8 @@ class AlarmIntegrationTest(unittest.TestCase): get_creds.return_value = mock_creds for message in self.req_consumer: - # Check the vim desired by the message if message.key == "delete_alarm_request": - self.alarms.alarming(message) + self.alarms.alarming(message, 'test_id') # Response message is generated and sent by MON's producer resp.assert_called_with( @@ -222,9 +210,7 @@ class AlarmIntegrationTest(unittest.TestCase): def test_ack_alarm_req(self, ack_alarm, get_creds): """Test Aodh acknowledge alarm request message from KafkaProducer.""" # Set-up message, producer and consumer for tests - payload = {"vim_type": "OpenSTACK", - "vim_uuid": "test_id", - "ack_details": + payload = {"ack_details": {"alarm_uuid": "alarm_id", }} self.producer.send('alarm_request', key="acknowledge_alarm", @@ -233,9 +219,8 @@ class AlarmIntegrationTest(unittest.TestCase): get_creds.return_value = mock_creds for message in self.req_consumer: - # Check the vim desired by the message if message.key == "acknowledge_alarm": - self.alarms.alarming(message) + self.alarms.alarming(message, 'test_id') return self.fail("No message received in consumer") diff --git a/osm_mon/test/OpenStack/integration/test_metric_integration.py b/osm_mon/test/OpenStack/integration/test_metric_integration.py index c130973..45a34d3 100644 --- a/osm_mon/test/OpenStack/integration/test_metric_integration.py +++ b/osm_mon/test/OpenStack/integration/test_metric_integration.py @@ -73,23 +73,18 @@ class MetricIntegrationTest(unittest.TestCase): def test_create_metric_req(self, resp, create_resp, config_metric): """Test Gnocchi create metric request message from producer.""" # Set-up message, producer and consumer for tests - payload = {"vim_type": "OpenSTACK", - "vim_uuid": "1", - "correlation_id": 123, - "metric_create": - {"metric_name": "cpu_utilization", - "resource_uuid": "resource_id"}} + payload = {"metric_create_request": {"correlation_id": 123, + "metric_name": "cpu_utilization", + "resource_uuid": "resource_id"}} self.producer.send('metric_request', key="create_metric_request", value=json.dumps(payload)) for message in self.req_consumer: - # Check the vim desired by the message - vim_type = json.loads(message.value)["vim_type"].lower() - if vim_type == "openstack": + if message.key == "create_metric_request": # A valid metric is created config_metric.return_value = "metric_id", "resource_id", True - self.metric_req.metric_calls(message) + self.metric_req.metric_calls(message, 'test_id') # A response message is generated and sent by MON's producer resp.assert_called_with( @@ -122,7 +117,7 @@ class MetricIntegrationTest(unittest.TestCase): if message.key == "delete_metric_request": # Metric has been deleted del_metric.return_value = True - self.metric_req.metric_calls(message) + self.metric_req.metric_calls(message, 'test_id') # A response message is generated and sent by MON's producer resp.assert_called_with( @@ -157,7 +152,7 @@ class MetricIntegrationTest(unittest.TestCase): if message.key == "read_metric_data_request": # Mock empty lists generated by the request message read_data.return_value = [], [] - self.metric_req.metric_calls(message) + self.metric_req.metric_calls(message, 'test_id') # A response message is generated and sent by MON's producer resp.assert_called_with( @@ -191,7 +186,7 @@ class MetricIntegrationTest(unittest.TestCase): if message.key == "list_metric_request": # Mock an empty list generated by the request list_metrics.return_value = [] - self.metric_req.metric_calls(message) + self.metric_req.metric_calls(message, 'test_id') # A response message is generated and sent by MON's producer resp.assert_called_with( @@ -210,12 +205,9 @@ class MetricIntegrationTest(unittest.TestCase): def test_update_metrics_req(self, resp, update_resp, get_id): """Test Gnocchi update metric request message from KafkaProducer.""" # Set-up message, producer and consumer for tests - payload = {"vim_type": "OpenSTACK", - "vim_uuid": "test_id", - "correlation_id": 123, - "metric_create": - {"metric_name": "my_metric", - "resource_uuid": "resource_id", }} + payload = {"metric_create_request": {"metric_name": "my_metric", + "correlation_id": 123, + "resource_uuid": "resource_id", }} self.producer.send('metric_request', key="update_metric_request", value=json.dumps(payload)) @@ -225,7 +217,7 @@ class MetricIntegrationTest(unittest.TestCase): if message.key == "update_metric_request": # Gnocchi doesn't support metric updates get_id.return_value = "metric_id" - self.metric_req.metric_calls(message) + self.metric_req.metric_calls(message, 'test_id') # Response message is generated and sent via MON's producer # No metric update has taken place diff --git a/osm_mon/test/OpenStack/unit/test_alarm_req.py b/osm_mon/test/OpenStack/unit/test_alarm_req.py index 15cf63b..41b6836 100644 --- a/osm_mon/test/OpenStack/unit/test_alarm_req.py +++ b/osm_mon/test/OpenStack/unit/test_alarm_req.py @@ -30,7 +30,8 @@ import unittest import mock from osm_mon.core.auth import AuthManager -from osm_mon.core.database import VimCredentials +from osm_mon.core.database import VimCredentials, DatabaseManager +from osm_mon.core.message_bus.producer import KafkaProducer from osm_mon.plugins.OpenStack.Aodh import alarming as alarm_req from osm_mon.plugins.OpenStack.common import Common @@ -47,9 +48,10 @@ class Message(object): """Initialize a mocked message instance.""" self.topic = 'alarm_request' self.key = None - self.value = json.dumps({'vim_uuid': 'test_id', 'mock_value': 'mock_details'}) + self.value = json.dumps({'mock_value': 'mock_details'}) +@mock.patch.object(KafkaProducer, 'publish', mock.Mock()) class TestAlarmKeys(unittest.TestCase): """Integration test for alarm request keys.""" @@ -69,7 +71,7 @@ class TestAlarmKeys(unittest.TestCase): get_creds.return_value = mock_creds - self.alarming.alarming(message) + self.alarming.alarming(message, 'test_id') get_token.assert_called_with('test_id') get_endpoint.assert_any_call('alarming', 'test_id') @@ -83,14 +85,16 @@ class TestAlarmKeys(unittest.TestCase): # Mock a message value and key message = Message() message.key = 'delete_alarm_request' - message.value = json.dumps({'vim_uuid': 'test_id', - 'alarm_delete_request': - {'alarm_uuid': 'my_alarm_id'}}) + message.value = json.dumps({'alarm_delete_request': { + 'correlation_id': 1, + 'alarm_uuid': 'my_alarm_id' + }}) get_creds.return_value = mock_creds + del_alarm.return_value = {} # Call the alarming functionality and check delete request - self.alarming.alarming(message) + self.alarming.alarming(message, 'test_id') del_alarm.assert_called_with(mock.ANY, mock.ANY, 'my_alarm_id') @mock.patch.object(Common, 'get_endpoint', mock.Mock()) @@ -102,13 +106,15 @@ class TestAlarmKeys(unittest.TestCase): # Mock a message with list alarm key and value message = Message() message.key = 'list_alarm_request' - message.value = json.dumps({'vim_uuid': 'test_id', 'alarm_list_request': 'my_alarm_details'}) + message.value = json.dumps({'alarm_list_request': {'correlation_id': 1}}) get_creds.return_value = mock_creds + list_alarm.return_value = [] + # Call the alarming functionality and check list functionality - self.alarming.alarming(message) - list_alarm.assert_called_with(mock.ANY, mock.ANY, 'my_alarm_details') + self.alarming.alarming(message, 'test_id') + list_alarm.assert_called_with(mock.ANY, mock.ANY, {'correlation_id': 1}) @mock.patch.object(Common, 'get_auth_token', mock.Mock()) @mock.patch.object(Common, 'get_endpoint', mock.Mock()) @@ -119,18 +125,18 @@ class TestAlarmKeys(unittest.TestCase): # Mock a message with acknowledge alarm key and value message = Message() message.key = 'acknowledge_alarm' - message.value = json.dumps({'vim_uuid': 'test_id', - 'ack_details': + message.value = json.dumps({'ack_details': {'alarm_uuid': 'my_alarm_id'}}) get_creds.return_value = mock_creds # Call alarming functionality and check acknowledge functionality - self.alarming.alarming(message) + self.alarming.alarming(message, 'test_id') ack_alarm.assert_called_with(mock.ANY, mock.ANY, 'my_alarm_id') @mock.patch.object(Common, 'get_auth_token', mock.Mock()) @mock.patch.object(Common, 'get_endpoint', mock.Mock()) + @mock.patch.object(DatabaseManager, 'save_alarm', mock.Mock()) @mock.patch.object(AuthManager, 'get_credentials') @mock.patch.object(alarm_req.Alarming, 'configure_alarm') def test_config_alarm_key(self, config_alarm, get_creds): @@ -138,11 +144,11 @@ class TestAlarmKeys(unittest.TestCase): # Mock a message with config alarm key and value message = Message() message.key = 'create_alarm_request' - message.value = json.dumps({'vim_uuid': 'test_id', 'alarm_create_request': 'alarm_details'}) + message.value = json.dumps({'alarm_create_request': {'correlation_id': 1}}) get_creds.return_value = mock_creds # Call alarming functionality and check config alarm call config_alarm.return_value = 'my_alarm_id', True - self.alarming.alarming(message) - config_alarm.assert_called_with(mock.ANY, mock.ANY, mock.ANY, 'alarm_details', {}) + self.alarming.alarming(message, 'test_id') + config_alarm.assert_called_with(mock.ANY, mock.ANY, mock.ANY, {'correlation_id': 1}, {}) diff --git a/osm_mon/test/OpenStack/unit/test_alarming.py b/osm_mon/test/OpenStack/unit/test_alarming.py index 5726f69..c45c052 100644 --- a/osm_mon/test/OpenStack/unit/test_alarming.py +++ b/osm_mon/test/OpenStack/unit/test_alarming.py @@ -95,6 +95,8 @@ class TestAlarming(unittest.TestCase): check_metric.return_value = "my_metric_id" check_pay.return_value = "my_payload" + perf_req.return_value = type('obj', (object,), {'text': '{"alarm_id":"1"}'}) + self.alarming.configure_alarm(alarm_endpoint, metric_endpoint, auth_token, values, {}) perf_req.assert_called_with( "alarm_endpoint/v2/alarms/", auth_token, @@ -152,6 +154,8 @@ class TestAlarming(unittest.TestCase): """Test update alarm with invalid get response.""" values = {"alarm_uuid": "my_alarm_id"} + perf_req.return_value = type('obj', (object,), {'invalid_prop': 'Invalid response'}) + self.alarming.update_alarm(alarm_endpoint, auth_token, values, {}) perf_req.assert_called_with(mock.ANY, auth_token, req_type="get") @@ -164,8 +168,8 @@ class TestAlarming(unittest.TestCase): resp = Response({"name": "my_alarm", "state": "alarm", "gnocchi_resources_threshold_rule": - {"resource_id": "my_resource_id", - "metric": "my_metric"}}) + {"resource_id": "my_resource_id", + "metric": "my_metric"}}) perf_req.return_value = resp check_pay.return_value = None values = {"alarm_uuid": "my_alarm_id"} @@ -179,11 +183,12 @@ class TestAlarming(unittest.TestCase): @mock.patch.object(Common, "perform_request") def test_update_alarm_valid(self, perf_req, check_pay): """Test valid update alarm request.""" - resp = Response({"name": "my_alarm", + resp = Response({"alarm_id": "1", + "name": "my_alarm", "state": "alarm", "gnocchi_resources_threshold_rule": - {"resource_id": "my_resource_id", - "metric": "disk.write.requests"}}) + {"resource_id": "my_resource_id", + "metric": "disk.write.requests"}}) perf_req.return_value = resp values = {"alarm_uuid": "my_alarm_id"} @@ -214,13 +219,13 @@ class TestAlarming(unittest.TestCase): self.assertDictEqual( json.loads(payload), {"name": "alarm_name", "gnocchi_resources_threshold_rule": - {"resource_id": "r_id", - "metric": "disk.write.requests", - "comparison_operator": "gt", - "aggregation_method": "count", - "threshold": 12, - "granularity": 300, - "resource_type": "generic"}, + {"resource_id": "r_id", + "metric": "disk.write.requests", + "comparison_operator": "gt", + "aggregation_method": "count", + "threshold": 12, + "granularity": 300, + "resource_type": "generic"}, "severity": "low", "state": "ok", "type": "gnocchi_resources_threshold", @@ -243,13 +248,13 @@ class TestAlarming(unittest.TestCase): self.assertEqual( json.loads(payload), {"name": "alarm_name", "gnocchi_resources_threshold_rule": - {"resource_id": "r_id", - "metric": "disk.write.requests", - "comparison_operator": "gt", - "aggregation_method": "count", - "threshold": 12, - "granularity": 300, - "resource_type": "generic"}, + {"resource_id": "r_id", + "metric": "disk.write.requests", + "comparison_operator": "gt", + "aggregation_method": "count", + "threshold": 12, + "granularity": 300, + "resource_type": "generic"}, "severity": "low", "state": "alarm", "type": "gnocchi_resources_threshold", @@ -266,6 +271,8 @@ class TestAlarming(unittest.TestCase): @mock.patch.object(Common, "perform_request") def test_get_alarm_state(self, perf_req): """Test the get alarm state function.""" + perf_req.return_value = type('obj', (object,), {'text': '{"alarm_id":"1"}'}) + self.alarming.get_alarm_state(alarm_endpoint, auth_token, "alarm_id") perf_req.assert_called_with( diff --git a/osm_mon/test/OpenStack/unit/test_metric_calls.py b/osm_mon/test/OpenStack/unit/test_metric_calls.py index 51282b1..8785534 100644 --- a/osm_mon/test/OpenStack/unit/test_metric_calls.py +++ b/osm_mon/test/OpenStack/unit/test_metric_calls.py @@ -131,11 +131,13 @@ class TestMetricCalls(unittest.TestCase): "name": "metric_name", "unit": "units"}}} + perf_req.return_value = type('obj', (object,), {'text': '{"id":"1"}'}) + self.metrics.configure_metric(endpoint, auth_token, values) perf_req.assert_called_with( "/v1/resource/generic", auth_token, req_type="post", - payload=json.dumps(payload)) + payload=json.dumps(payload, sort_keys=True)) @mock.patch.object(Common, "perform_request") def test_delete_metric_req(self, perf_req): @@ -148,7 +150,7 @@ class TestMetricCalls(unittest.TestCase): @mock.patch.object(Common, "perform_request") def test_delete_metric_invalid_status(self, perf_req): """Test invalid response for delete request.""" - perf_req.return_value = "404" + perf_req.return_value = type('obj', (object,), {"status_code": "404"}) status = self.metrics.delete_metric(endpoint, auth_token, "metric_id") @@ -247,6 +249,8 @@ class TestMetricCalls(unittest.TestCase): "collection_unit": "DAY", "collection_period": 1} + perf_req.return_value = type('obj', (object,), {'text': '{"metric_data":"[]"}'}) + get_metric.return_value = "metric_id" self.metrics.read_metric_data(endpoint, auth_token, values) diff --git a/osm_mon/test/OpenStack/unit/test_metric_req.py b/osm_mon/test/OpenStack/unit/test_metric_req.py index 0869b56..de39ebb 100644 --- a/osm_mon/test/OpenStack/unit/test_metric_req.py +++ b/osm_mon/test/OpenStack/unit/test_metric_req.py @@ -29,6 +29,7 @@ import unittest import mock +from osm_mon.core.message_bus.producer import KafkaProducer from osm_mon.plugins.OpenStack.Gnocchi import metrics as metric_req from osm_mon.plugins.OpenStack.common import Common @@ -43,9 +44,10 @@ class Message(object): """Initialize a mocked message instance.""" self.topic = "metric_request" self.key = None - self.value = json.dumps({"vim_uuid": "test_id", "mock_message": "message_details"}) + self.value = json.dumps({"mock_message": "message_details"}) +@mock.patch.object(KafkaProducer, 'publish', mock.Mock()) class TestMetricReq(unittest.TestCase): """Integration test for metric request keys.""" @@ -54,17 +56,6 @@ class TestMetricReq(unittest.TestCase): super(TestMetricReq, self).setUp() self.metrics = metric_req.Metrics() - @mock.patch.object(Common, 'get_endpoint') - @mock.patch.object(Common, "get_auth_token") - def test_access_cred_metric_auth(self, get_token, get_endpoint): - """Test authentication with access credentials.""" - message = Message() - - self.metrics.metric_calls(message) - - get_token.assert_called_with('test_id') - get_endpoint.assert_any_call('metric', 'test_id') - @mock.patch.object(Common, "get_auth_token", mock.Mock()) @mock.patch.object(Common, 'get_endpoint', mock.Mock()) @mock.patch.object(metric_req.Metrics, "delete_metric") @@ -74,11 +65,13 @@ class TestMetricReq(unittest.TestCase): # Mock a message value and key message = Message() message.key = "delete_metric_request" - message.value = json.dumps({"vim_uuid": "test_id", "metric_name": "disk_write_ops", "resource_uuid": "my_r_id"}) + message.value = json.dumps({"metric_name": "disk_write_ops", "resource_uuid": "my_r_id", "correlation_id": 1}) + + del_metric.return_value = True # Call the metric functionality and check delete request get_metric_id.return_value = "my_metric_id" - self.metrics.metric_calls(message) + self.metrics.metric_calls(message, 'test_id') del_metric.assert_called_with(mock.ANY, mock.ANY, "my_metric_id") @mock.patch.object(Common, "get_auth_token", mock.Mock()) @@ -89,11 +82,13 @@ class TestMetricReq(unittest.TestCase): # Mock a message with list metric key and value message = Message() message.key = "list_metric_request" - message.value = json.dumps({"vim_uuid": "test_id", "metrics_list_request": "metric_details"}) + message.value = json.dumps({"metrics_list_request": {"correlation_id": 1}}) + + list_metrics.return_value = [] # Call the metric functionality and check list functionality - self.metrics.metric_calls(message) - list_metrics.assert_called_with(mock.ANY, mock.ANY, "metric_details") + self.metrics.metric_calls(message, 'test_id') + list_metrics.assert_called_with(mock.ANY, mock.ANY, {"correlation_id": 1}) @mock.patch.object(Common, "get_auth_token", mock.Mock()) @mock.patch.object(Common, 'get_endpoint', mock.Mock()) @@ -107,14 +102,14 @@ class TestMetricReq(unittest.TestCase): # Mock a message with update metric key and value message = Message() message.key = "update_metric_request" - message.value = json.dumps({"vim_uuid": "test_id", - "metric_create": - {"metric_name": "my_metric", + message.value = json.dumps({"metric_create_request": + {"correlation_id": 1, + "metric_name": "my_metric", "resource_uuid": "my_r_id"}}) # Call metric functionality and confirm no function is called # Gnocchi does not support updating a metric configuration - self.metrics.metric_calls(message) + self.metrics.metric_calls(message, 'test_id') config_metric.assert_not_called() list_metrics.assert_not_called() delete_metric.assert_not_called() @@ -128,11 +123,11 @@ class TestMetricReq(unittest.TestCase): # Mock a message with create metric key and value message = Message() message.key = "create_metric_request" - message.value = json.dumps({"vim_uuid": "test_id", "metric_create": "metric_details"}) + message.value = json.dumps({"metric_create_request": "metric_details"}) # Call metric functionality and check config metric config_metric.return_value = "metric_id", "resource_id", True - self.metrics.metric_calls(message) + self.metrics.metric_calls(message, 'test_id') config_metric.assert_called_with(mock.ANY, mock.ANY, "metric_details") @mock.patch.object(Common, "get_auth_token", mock.Mock()) @@ -143,10 +138,10 @@ class TestMetricReq(unittest.TestCase): # Mock a message with a read data key and value message = Message() message.key = "read_metric_data_request" - message.value = json.dumps({"vim_uuid": "test_id", "alarm_uuid": "alarm_id"}) + message.value = json.dumps({"alarm_uuid": "alarm_id"}) # Call metric functionality and check read data metrics read_data.return_value = "time_stamps", "data_values" - self.metrics.metric_calls(message) + self.metrics.metric_calls(message, 'test_id') read_data.assert_called_with( mock.ANY, mock.ANY, json.loads(message.value)) diff --git a/osm_mon/test/OpenStack/unit/test_notifier.py b/osm_mon/test/OpenStack/unit/test_notifier.py index a1ce1c6..4841013 100644 --- a/osm_mon/test/OpenStack/unit/test_notifier.py +++ b/osm_mon/test/OpenStack/unit/test_notifier.py @@ -148,7 +148,7 @@ class NotifierHandler(BaseHTTPRequestHandler): sev=values['severity'], date=a_date, state=values['current'], vim_type="OpenStack") self._producer.notify_alarm( - 'notify_alarm', resp_message, 'alarm_response') + 'notify_alarm', resp_message) except Exception: pass @@ -278,4 +278,4 @@ class TestNotifier(unittest.TestCase): self.handler.notify_alarm(json.loads(post_data)) notify.assert_called_with( - "notify_alarm", valid_notify_resp, "alarm_response") + "notify_alarm", valid_notify_resp) diff --git a/osm_mon/test/VMware/test_mon_plugin_vrops.py b/osm_mon/test/VMware/test_mon_plugin_vrops.py index 59affc2..40f9417 100644 --- a/osm_mon/test/VMware/test_mon_plugin_vrops.py +++ b/osm_mon/test/VMware/test_mon_plugin_vrops.py @@ -1036,6 +1036,7 @@ class TestMonPlugin(unittest.TestCase): @mock.patch.object(monPlugin.requests, 'get') + # @unittest.skip("NEEDS FIX") def test_get_alarm_defination_by_name_no_valid_alarm_found(self, m_get): """Test get_alarm_defination_by_name: With no valid alarm found in returned list""" diff --git a/osm_mon/test/VMware/test_plugin_receiver.py b/osm_mon/test/VMware/test_plugin_receiver.py index 1f1e38b..b32c7f6 100644 --- a/osm_mon/test/VMware/test_plugin_receiver.py +++ b/osm_mon/test/VMware/test_plugin_receiver.py @@ -256,7 +256,7 @@ class TestPluginReceiver(unittest.TestCase): msg.topic = "metric_request" msg.key = "create_metric_request" - msg.value = json.dumps({"metric_create":"metric_details"}) + msg.value = json.dumps({"metric_create_request":"metric_details"}) # set the return value m_verify_metric.return_value = True @@ -281,7 +281,7 @@ class TestPluginReceiver(unittest.TestCase): msg.topic = "metric_request" msg.key = "update_metric_request" - msg.value = json.dumps({"metric_create":"metric_details"}) + msg.value = json.dumps({"metric_create_request":"metric_details"}) # set the return value m_verify_metric.return_value = True @@ -594,7 +594,7 @@ class TestPluginReceiver(unittest.TestCase): # Mock metric_info metric_info = {'vim_type' : 'VMware','correlation_id': 'e14b203c', - 'metric_create':{ + 'metric_create_request':{ 'resource_uuid': '6486e69', 'metric_name': 'CPU_UTILIZATION', 'metric_unit': '%' @@ -617,7 +617,7 @@ class TestPluginReceiver(unittest.TestCase): # Mock metric_info metric_info = {'vim_type' : 'VMware','correlation_id': 'e14b203c', - 'metric_create':{ + 'metric_create_request':{ 'resource_uuid': '6486e69', 'metric_name': 'CPU_UTILIZATION', 'metric_unit': '%' diff --git a/osm_mon/test/core/test_common_consumer.py b/osm_mon/test/core/test_common_consumer.py new file mode 100644 index 0000000..56ac492 --- /dev/null +++ b/osm_mon/test/core/test_common_consumer.py @@ -0,0 +1,63 @@ +import unittest + +import mock + +from osm_mon.core.database import VimCredentials +from osm_mon.core.message_bus.common_consumer import * + + +class CommonConsumerTest(unittest.TestCase): + @mock.patch.object(DatabaseManager, "get_credentials") + def test_get_vim_type(self, get_creds): + mock_creds = VimCredentials() + mock_creds.id = 'test_id' + mock_creds.user = 'user' + mock_creds.url = 'url' + mock_creds.password = 'password' + mock_creds.tenant_name = 'tenant_name' + mock_creds.type = 'openstack' + + get_creds.return_value = mock_creds + + db_manager = DatabaseManager() + vim_type = get_vim_type(db_manager, 'test_id') + + self.assertEqual(vim_type, 'openstack') + + @mock.patch.object(dbmongo.DbMongo, "get_one") + def test_get_vdur(self, get_one): + get_one.return_value = {'_id': 'a314c865-aee7-4d9b-9c9d-079d7f857f01', + '_admin': { + 'projects_read': ['admin'], 'created': 1526044312.102287, + 'modified': 1526044312.102287, 'projects_write': ['admin'] + }, + 'vim-account-id': 'c1740601-7287-48c8-a2c9-bce8fee459eb', + 'nsr-id-ref': '5ec3f571-d540-4cb0-9992-971d1b08312e', + 'vdur': [ + { + 'internal-connection-point': [], + 'vdu-id-ref': 'ubuntuvnf_vnfd-VM', + 'id': 'ffd73f33-c8bb-4541-a977-44dcc3cbe28d', + 'vim-id': '27042672-5190-4209-b844-95bbaeea7ea7' + } + ], + 'vnfd-ref': 'ubuntuvnf_vnfd', + 'member-vnf-index-ref': '1', + 'created-time': 1526044312.0999322, + 'vnfd-id': 'a314c865-aee7-4d9b-9c9d-079d7f857f01', + 'id': 'a314c865-aee7-4d9b-9c9d-079d7f857f01'} + + common_db = dbmongo.DbMongo() + vdur = get_vdur(common_db, '5ec3f571-d540-4cb0-9992-971d1b08312e', '1', 'ubuntuvnf_vnfd-VM') + expected_vdur = { + 'internal-connection-point': [], + 'vdu-id-ref': 'ubuntuvnf_vnfd-VM', + 'id': 'ffd73f33-c8bb-4541-a977-44dcc3cbe28d', + 'vim-id': '27042672-5190-4209-b844-95bbaeea7ea7' + } + + self.assertDictEqual(vdur, expected_vdur) + + +if __name__ == '__main__': + unittest.main() diff --git a/requirements.txt b/requirements.txt index 98752ea..071104c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -18,7 +18,6 @@ # For those usages not covered by the Apache License, Version 2.0 please # contact: prithiv.mohan@intel.com or adrian.hoban@intel.com -stdeb==0.8.* kafka==1.3.* lxml==4.2.* requests==2.18.* diff --git a/setup.py b/setup.py index 163d17f..20d4068 100644 --- a/setup.py +++ b/setup.py @@ -20,9 +20,15 @@ # contact: prithiv.mohan@intel.com or adrian.hoban@intel.com __author__ = "Prithiv Mohan" -__date__ = "14/Sep/2017" +__date__ = "14/Sep/2017" from setuptools import setup -from os import system + + +def parse_requirements(requirements): + with open(requirements) as f: + return [l.strip('\n') for l in f if l.strip('\n') and not l.startswith('#') and '://' not in l] + + _name = 'osm_mon' _version = '1.0' _description = 'OSM Monitoring Module' @@ -33,23 +39,25 @@ _maintainer_email = 'adrian.hoban@intel.com' _license = 'Apache 2.0' _url = 'https://osm.etsi.org/gitweb/?p=osm/MON.git;a=tree' setup(name="osm_mon", - version = _version, - description = _description, - long_description = open('README.rst').read(), - author = _author, - author_email = _author_email, - maintainer = _maintainer, - maintainer_email = _maintainer_email, - url = _url, - license = _license, - packages = [_name], - package_dir = {_name: _name}, - package_data = {_name: ['osm_mon/core/message_bus/*.py', 'osm_mon/core/models/*.json', - 'osm_mon/plugins/OpenStack/Aodh/*.py', 'osm_mon/plugins/OpenStack/Gnocchi/*.py', - 'osm_mon/plugins/vRealiseOps/*', 'osm_mon/plugins/CloudWatch/*']}, - data_files = [('/etc/systemd/system/', ['scripts/kafka.sh']), - ], + version=_version, + description=_description, + long_description=open('README.rst').read(), + author=_author, + author_email=_author_email, + maintainer=_maintainer, + maintainer_email=_maintainer_email, + url=_url, + license=_license, + packages=[_name], + package_dir={_name: _name}, + package_data={_name: ['osm_mon/core/message_bus/*.py', 'osm_mon/core/models/*.json', + 'osm_mon/plugins/OpenStack/Aodh/*.py', 'osm_mon/plugins/OpenStack/Gnocchi/*.py', + 'osm_mon/plugins/vRealiseOps/*', 'osm_mon/plugins/CloudWatch/*']}, scripts=['osm_mon/plugins/vRealiseOps/vROPs_Webservice/vrops_webservice', - 'kafkad', 'osm_mon/core/message_bus/common_consumer.py'], + 'osm_mon/core/message_bus/common_consumer.py'], + install_requires=parse_requirements('requirements.txt'), include_package_data=True, + dependency_links=[ + 'git+https://osm.etsi.org/gerrit/osm/common.git@857731b#egg=osm-common' + ] ) diff --git a/test-requirements.txt b/test-requirements.txt index b8a1cb5..b404738 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -19,18 +19,5 @@ # For those usages not covered by the Apache License, Version 2.0 please # contact: helena.mcgough@intel.com or adrian.hoban@intel.com ## - -hacking>=0.10.0,<0.11 - flake8<3.0 mock -oslosphinx>=2.5.0 # Apache-2.0 -oslotest>=1.10.0 # Apache-2.0 -os-testr -testrepository>=0.0.18 -pylint -python-subunit>=0.0.18 -pytest -testscenarios>=0.4 -testtools>=1.4.0 -kafka diff --git a/tox.ini b/tox.ini index 722fbf6..5940b95 100644 --- a/tox.ini +++ b/tox.ini @@ -24,31 +24,34 @@ # in multiple virtualenvs. This configuration file will run the # test suite on all supported python versions. To use it, "pip install tox" # and then run "tox" from this directory. - [tox] -minversion = 1.6 -envlist = py27 -skipsdist = True +envlist = py3 +toxworkdir={homedir}/.tox [testenv] -usedevelop = True -install_command = pip install -r requirements.txt -U {opts} {packages} -commands = sh tools/pretty_tox.sh '{posargs}' +basepythons = python3 +commands=python3 -m unittest discover -v +install_command = python3 -m pip install -r requirements.txt -U {opts} {packages} deps = -r{toxinidir}/test-requirements.txt -whitelist_externals = sh -setenv = - VIRTUAL_ENV={envdir} -[testenv:pep8] -commands = flake8 plugins +[testenv:flake8] +basepython = python3 +deps = flake8 +commands = + flake8 osm_mon -[pep8] -max-line-length = 80 +[testenv:build] +basepython = python3 +deps = stdeb + setuptools-version-command +commands = python3 setup.py --command-packages=stdeb.command bdist_deb [flake8] # E123, E125 skipped as they are invalid PEP-8. -max-line-length = 80 +max-line-length = 120 show-source = True ignore = E123,E125,E241 builtins = _ exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,build,devops_stages/*,.rst + + -- 2.25.1