- Separated the schemas to models.
- Updated message bus schemas.
- Common producer for SO, monitoring tool plugins.
Signed-off-by: prithiv <prithiv.mohan@intel.com>
+++ /dev/null
-from kafka import KafkaConsumer
-from kafka.errors import KafkaError
-import logging
-
-class KafkaConsumer(object):
- """Adds messages to a kafka topic. Topic is hardcoded as 'alarms' and group as
- 'my_group' for now.
-
- """
-
- def __init__(self, uri):
- """Init
-
- uri - kafka connection details
- """
- if not cfg.CONF.kafka.uri:
- raise Exception("Kafka URI Not Found. Check the config file for Kafka URI")
- else:
- broker = cfg.CONF.kafka.uri
- consumer = KafkaConsumer('alarms',
- group_id='my_group',
- bootstrap_servers=broker, api_version=(0,10))
- #KafkaConsumer(value_deserializer=lambda m: json.loads(m.decode('ascii')))
-
- def consume(self, topic, messages):
- for message in self._consumer:
- print ("%s:%d:%d: key=%s value=%s" % (message.topic, message.partition, message.offset, message.key, message.value))
+++ /dev/null
-from kafka import KafkaProducer
-from kafka.errors import KafkaError
-import logging
-import json
-import os
-
-
-class KafkaProducer(object):
-
- def __init__(self, topic, message):
-
- self._topic= topic
- self._message = message
-
- if "ZOOKEEPER_URI" in os.environ:
- broker = os.getenv("ZOOKEEPER_URI")
- else:
- broker = "SO-ub.lxd:2181"
-
- '''
- If the zookeeper broker URI is not set in the env, by default,
- SO-ub.lxd container is taken as the host because an instance of
- is already running.
- '''
-
- producer = KafkaProducer(key_serializer=str.encode,
- value_serializer=lambda v: json.dumps(v).encode('ascii'),
- bootstrap_servers=broker, api_version=(0,10))
-
-
- def publish(self, key, message, topic=None):
- try:
- future = producer.send('alarms', key, payload)
- producer.flush()
- except Exception:
- log.exception("Error publishing to {} topic." .format(topic))
- raise
- try:
- record_metadata = future.get(timeout=10)
- self._log.debug("TOPIC:", record_metadata.topic)
- self._log.debug("PARTITION:", record_metadata.partition)
- self._log.debug("OFFSET:", record_metadata.offset)
- except KafkaError:
- pass
-
- def configure_alarm(self, key, message, topic):
-
- payload_configure = {
- "alarm_configuration":
- {
- "schema_version": 1.0,
- "schema_type": "configure_alarm",
- "alarm_configuration":
- {
- "metric_name": { "type": "string" },
- "tenant_uuid": { "type": "string" },
- "resource_uuid": { "type": "string" },
- "description": { "type": "string" },
- "severity": { "type": "string" },
- "operation": { "type": "string" },
- "threshold_value": { "type": "integer" },
- "unit": { "type": "string" },
- "statistic": { "type": "string" }
- },
- "required": [ "schema_version",
- "schema_type",
- "metric_name",
- "resource_uuid",
- "severity",
- "operation",
- "threshold_value",
- "unit",
- "statistic" ]
- }
- }
-
- publish(key, value=json.dumps(payload_configure), topic='alarms')
-
- def notify_alarm(self, key, message, topic):
-
- payload_notify = {
- "notify_alarm":
- {
- "schema_version": 1.0,
- "schema_type": "notify_alarm",
- "notify_details":
- {
- "alarm_uuid": { "type": "string" },
- "resource_uuid": { "type": "string" },
- "description": { "type": "string" },
- "tenant_uuid": { "type": "string" },
- "severity": { "type" : ["integer", "string"] },
- "status": { "type": "string" },
- "start_date": { "type": "date-time" },
- "update_date": { "type": "date-time" },
- "cancel_date": { "type": "date-time" }
- },
- "required": [ "schema_version",
- "schema_type",
- "alarm_uuid",
- "resource_uuid",
- "tenant_uuid",
- "severity",
- "status",
- "start_date" ]
- }
- }
-
- publish(key, value=json.dumps(payload_notify), topic='alarms')
-
- def alarm_ack(self, key, message, topic):
-
- payload_ack = {
- "alarm_ack":
- {
- "schema_version": 1.0,
- "schema_type": "alarm_ack",
- "ack_details":
- {
- "alarm_uuid": { "type": "string" },
- "tenant_uuid": { "type": "string" },
- "resource_uuid": { "type": "string" }
- },
- "required": [ "schema_version",
- "schema_type",
- "alarm_uuid",
- "tenant_uuid",
- "resource_uuid" ]
- }
- }
-
- publish(key, value.json.dumps(payload_ack), topic='alarms')
-
- def configure_metrics(self, key, message, topic):
-
- payload_configure_metrics = {
- "configure_metrics":
- {
- "schema_version": 1.0,
- "schema_type": "configure_metrics",
- "tenant_uuid": { "type": "string" },
- "metrics_configuration":
- {
- "metric_name": { "type": "string" },
- "metric_unit": { "type": "string" },
- "resource_uuid": { "type": "string" }
- },
- "required": [ "schema_version",
- "schema_type",
- "metric_name",
- "metric_unit",
- "resource_uuid" ]
- }
- }
-
- publish(key, value.json.dumps(payload_configure_metrics), topic='metrics')
-
- def metric_data_request(self, key, message, topic):
-
- payload_metric_data_request = {
- "metric_data_request":
- {
- "schema_version": 1.0,
- "schema_type": "metric_data_request",
- "metric_name": { "type": "string" },
- "resource_uuid": { "type": "string" },
- "tenant_uuid": { "type": "string" },
- "collection_period": { "type": "string" }
- },
- "required": ["schema_version",
- "schema_type",
- "tenant_uuid",
- "metric_name",
- "collection_period",
- "resource_uuid"]
- }
-
- publish(key, value.json.dumps(payload_metric_data_request), topic='metrics')
-
- def metric_data_response(self, key, message, topic):
-
- payload_metric_data_response = {
- "metric_data_response":
- {
- "schema_version": 1.0,
- "schema_type": "metric_data_response",
- "metrics_name": { "type": "string" },
- "resource_uuid": { "type": "string" },
- "tenant_uuid": { "type": "string" },
- "metrics_data":
- {
- "time_series": { "type": "array" },
- "metrics_series": { "type": "array" }
- }
- },
- "required": [ "schema_version",
- "schema_type",
- "metrics_name",
- "resource_uuid",
- "tenant_uuid",
- "time_series",
- "metrics_series" ]
- }
-
- publish(key, value.json.dumps(payload_metric_data_response), topic='metrics')
-
- def access_credentials(self, key, message, topic):
-
- payload_access_credentials = {
- "access_credentials":
- {
- "schema_version": 1.0,
- "schema_type": "vim_access_credentials",
- "vim_type": { "type": "string" },
- "required": [ "schema_version",
- "schema_type",
- "vim_type" ],
- "access_config":
- {
- "if":
- {
- "vim_type": "openstack"
- },
- "then":
- {
- "openstack-site": { "type": "string" },
- "user": { "type": "string" },
- "password": { "type": "string",
- "options": { "hidden": true }},
- "vim_tenant_name": { "type": "string" }
- },
- "required": [ "openstack_site",
- "user",
- "password",
- "vim_tenant_name" ],
- "else":
- {
- "vim_type": "aws"
- },
- "then":
- {
- "aws_site": { "type": "string" },
- "user": { "type": "string" },
- "password": { "type": "string",
- "options": { "hidden": true }},
- "vim_tenant_name": { "type": "string" }
- },
- "required": [ "aws_site",
- "user",
- "password",
- "vim_tenant_name" ],
- "else":
- {
- "vim_type": "VMWare"
- },
- "then":
- {
- "vrops_site": { "type": "string" },
- "vrops_user": { "type": "string" },
- "vrops_password": { "type": "string",
- "options": { "hidden": true }},
- "vcloud_site": { "type": "string" },
- "admin_username": { "type": "string" },
- "admin_password": { "type": "string",
- "options": { "hidden": true }},
- "nsx_manager": { "type": "string" },
- "nsx_user": { "type": "string" },
- "nsx_password": { "type": "string",
- "options": { "hidden": true }},
- "vcenter_ip": { "type": "string" },
- "vcenter_port": { "type": "string" },
- "vcenter_user": { "type": "string" },
- "vcenter_password": { "type": "string",
- "options": { "hidden": true }},
- "vim_tenant_name": { "type": "string" },
- "org_name": { "type": "string" }
- },
- "required": [ "vrops_site",
- "vrops_user",
- "vrops_password",
- "vcloud_site",
- "admin_username",
- "admin_password",
- "vcenter_ip",
- "vcenter_port",
- "vcenter_user",
- "vcenter_password",
- "vim_tenant_name",
- "orgname" ]
- }
- }
- }
-
- publish(key, value.json.dumps(payload_access_credentials), topic='access_credentials')
--- /dev/null
+
+# Copyright© 2017 Intel Research and Development Ireland Limited
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: prithiv.mohan@intel.com or adrian.hoban@intel.com
+##
+
+'''
+This is a kafka consumer app that reads the messages from the message bus for
+alarms and metrics responses.
+
+#TODO: (Prithiv Mohan)
+ - Modify topics based on new schema definitions
+ - Include consumer logging
+'''
+
+__author__ = "Prithiv Mohan"
+__date__ = "06/Sep/2017"
+
+from kafka import KafkaConsumer
+from kafka.errors import KafkaError
+import logging
+
+class KafkaConsumer(object):
+ """Adds messages to a kafka topic. Topic is hardcoded as 'alarms' and group as
+ 'my_group' for now.
+
+ """
+
+ def __init__(self, uri):
+ """Init
+
+ uri - kafka connection details
+ """
+ if not cfg.CONF.kafka.uri:
+ raise Exception("Kafka URI Not Found. Check the config file for Kafka URI")
+ else:
+ broker = cfg.CONF.kafka.uri
+ consumer = KafkaConsumer('alarms',
+ group_id='my_group',
+ bootstrap_servers=broker, api_version=(0,10))
+ #KafkaConsumer(value_deserializer=lambda m: json.loads(m.decode('ascii')))
+
+ def consume(self, topic, messages):
+ for message in self._consumer:
+ print ("%s:%d:%d: key=%s value=%s" % (message.topic, message.partition, message.offset, message.key, message.value))
--- /dev/null
+# Copyright© 2017 Intel Research and Development Ireland Limited
+# *************************************************************
+
+# This file is part of OSM Monitoring module
+# All Rights Reserved to Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: prithiv.mohan@intel.com or adrian.hoban@intel.com
+##
+
+'''
+This is a kafka producer app that interacts with the SO and the plugins of the
+datacenters like OpenStack, VMWare, AWS.
+#TODO: Interfacing with the APIs of the monitoring tool plugins (Prithiv Mohan).
+'''
+
+__author__ = "Prithiv Mohan"
+__date__ = "06/Sep/2017"
+
+
+from kafka import KafkaProducer
+from kafka.errors import KafkaError
+import logging
+import json
+import os
+from os import listdir
+
+
+
+class KafkaProducer(object):
+
+ def __init__(self, topic, message):
+
+ self._topic= topic
+ self._message = message
+
+ if "ZOOKEEPER_URI" in os.environ:
+ broker = os.getenv("ZOOKEEPER_URI")
+ else:
+ broker = "localhost:2181"
+
+ '''
+ If the zookeeper broker URI is not set in the env, by default,
+ localhost container is taken as the host because an instance of
+ is already running.
+ '''
+
+ producer = KafkaProducer(key_serializer=str.encode,
+ value_serializer=lambda v: json.dumps(v).encode('ascii'),
+ bootstrap_servers=broker, api_version=(0,10))
+
+
+ def publish(self, key, message, topic=None):
+ try:
+ future = producer.send('alarms', key, payload)
+ producer.flush()
+ except Exception:
+ log.exception("Error publishing to {} topic." .format(topic))
+ raise
+ try:
+ record_metadata = future.get(timeout=10)
+ self._log.debug("TOPIC:", record_metadata.topic)
+ self._log.debug("PARTITION:", record_metadata.partition)
+ self._log.debug("OFFSET:", record_metadata.offset)
+ except KafkaError:
+ pass
+
+ json_path = os.path.join(os.pardir+"/models/")
+
+ def create_alarm_request(self, key, message, topic):
+
+ #External to MON
+
+ payload_create_alarm = json.loads(open(os.path.join(json_path,
+ 'create_alarm.json')).read())
+ publish(key,
+ value=json.dumps(payload_create_alarm),
+ topic='alarm_request')
+
+ def create_alarm_response(self, key, message, topic):
+
+ #Internal to MON
+
+ payload_create_alarm_resp = json.loads(open(os.path.join(json_path,
+ 'create_alarm_resp.json')).read())
+
+ publish(key,
+ value = json.dumps(payload_create_alarm_resp),
+ topic = 'alarm_response')
+
+
+ def list_alarm_request(self, key, message, topic):
+
+ #External to MON
+
+ payload_alarm_list_req = json.loads(open(os.path.join(json_path,
+ 'list_alarm_req.json')).read())
+
+ publish(key,
+ value=json.dumps(payload_alarm_list_req),
+ topic='alarm_request')
+
+ def notify_alarm(self, key, message, topic):
+
+ payload_notify_alarm = json.loads(open(os.path.join(json_path,
+ 'notify_alarm.json')).read())
+
+ publish(key,
+ value=json.dumps(payload_notify_alarm),
+ topic='alarm_response')
+
+ def list_alarm_response(self, key, message, topic):
+
+ payload_list_alarm_resp = json.loads(open(os.path.join(json_path,
+ 'list_alarm_resp.json')).read())
+
+ publish(key,
+ value=json.dumps(payload_list_alarm_resp),
+ topic='alarm_response')
+
+
+ def update_alarm_request(self, key, message, topic):
+
+ # External to Mon
+
+ payload_update_alarm_req = json.loads(open(os.path.join(json_path,
+ 'update_alarm_req.json')).read())
+
+ publish(key,
+ value=json.dumps(payload_update_alarm_req),
+ topic='alarm_request')
+
+
+ def update_alarm_response(self, key, message, topic):
+
+ # Internal to Mon
+
+ payload_update_alarm_resp = json.loads(open(os.path.join(json_path,
+ 'update_alarm_resp.json')).read())
+
+ publish(key,
+ value=json.dumps(payload_update_alarm_resp),
+ topic='alarm_response')
+
+
+ def delete_alarm_request(self, key, message, topic):
+
+ # External to Mon
+
+ payload_delete_alarm_req = json.loads(open(os.path.join(json_path,
+ 'delete_alarm_req.json')).read())
+
+ publish(key,
+ value=json.dumps(payload_delete_alarm_req),
+ topic='alarm_request')
+
+ def delete_alarm_response(self, key, message, topic):
+
+ # Internal to Mon
+
+ payload_delete_alarm_resp = json.loads(open(os.path.join(json_path,
+ 'delete_alarm_resp.json')).read())
+
+ publish(key,
+ value=json.dumps(payload_delete_alarm_resp),
+ topic='alarm_response')
+
+
+
+ def create_metrics_request(self, key, message, topic):
+
+ # External to Mon
+
+ payload_create_metrics_req = json.loads(open(os.path.join(json_path,
+ 'create_metric_req.json')).read())
+
+ publish(key,
+ value=json.dumps(payload_create_metrics_req),
+ topic='metric_request')
+
+
+ def create_metrics_resp(self, key, message, topic):
+
+ # Internal to Mon
+
+ payload_create_metrics_resp = json.loads(open(os.path.join(json_path,
+ 'create_metric_resp.json')).read())
+
+ publish(key,
+ value=json.dumps(payload_create_metrics_resp),
+ topic='metric_response')
+
+
+ def read_metric_data_request(self, key, message, topic):
+
+ # External to Mon
+
+ payload_read_metric_data_request = json.loads(open(os.path.join(json_path,
+ 'read_metric_data_req.json')).read())
+
+ publish(key,
+ value=json.dumps(payload_read_metric_data_request),
+ topic='metric_request')
+
+
+ def read_metric_data_response(self, key, message, topic):
+
+ # Internal to Mon
+
+ payload_metric_data_response = json.loads(open(os.path.join(json_path,
+ 'read_metric_data_resp.json')).read())
+
+ publish(key,
+ value=json.dumps(payload_metric_data_response),
+ topic='metric_response')
+
+
+ def list_metric_request(self, key, message, topic):
+
+ #External to MON
+
+ payload_metric_list_req = json.loads(open(os.path.join(json_path,
+ 'list_metric_req.json')).read())
+
+ publish(key,
+ value=json.dumps(payload_metric_list_req),
+ topic='metric_request')
+
+ def list_metric_response(self, key, message, topic):
+
+ #Internal to MON
+
+ payload_metric_list_resp = json.loads(open(os.path.join(json_path,
+ 'list_metrics_resp.json')).read())
+
+ publish(key,
+ value=json.dumps(payload_metric_list_resp),
+ topic='metric_response')
+
+
+ def delete_metric_request(self, key, message, topic):
+
+ # External to Mon
+
+ payload_delete_metric_req = json.loads(open(os.path.join(json_path,
+ 'delete_metric_req.json')).read())
+
+ publish(key,
+ value=json.dumps(payload_delete_metric_req),
+ topic='metric_request')
+
+
+ def delete_metric_response(self, key, message, topic):
+
+ # Internal to Mon
+
+ payload_delete_metric_resp = json.loads(open(os.path.join(json_path,
+ 'delete_metric_resp.json')).read())
+
+ publish(key,
+ value=json.dumps(payload_delete_metric_resp),
+ topic='metric_response')
+
+
+ def update_metric_request(self, key, message, topic):
+
+ # External to Mon
+
+ payload_update_metric_req = json.loads(open(os.path.join(json_path,
+ 'update_metric_req.json')).read())
+
+ publish(key,
+ value=json.dumps(payload_update_metric_req),
+ topic='metric_request')
+
+
+ def update_metric_response(self, key, message, topic):
+
+ # Internal to Mon
+
+ payload_update_metric_resp = json.loads(open(os.path.join(json_path,
+ 'update_metric_resp.json')).read())
+
+ publish(key,
+ value=json.dumps(payload_update_metric_resp),
+ topic='metric_response)
+
+ def access_credentials(self, key, message, topic):
+
+ payload_access_credentials = json.loads(open(os.path.join(json_path,
+ 'access_credentials.json')).read())
+
+ publish(key,
+ value=json.dumps(payload_access_credentials),
+ topic='access_credentials')
--- /dev/null
+{
+ "schema_version": { "type": "string" },
+ "schema_type": { "type": "string" },
+ "ack_details":
+ {
+ "alarm_uuid": { "type": "string" },
+ "resource_uuid": { "type": "string" },
+ "tenant_uuid": { "type": "string" },
+ "vim_type": { "type": "string" }
+ },
+ "required": [ "schema_version",
+ "schema_type",
+ "alarm_uuid",
+ "resource_uuid",
+ "tenant_uuid",
+ "vim_type" ]
+}
\ No newline at end of file
--- /dev/null
+{
+ "schema_version": { "type": "string" },
+ "schema_type": { "type": "string" },
+ "alarm_create_request":
+ {
+ "correlation_id": { "type": "integer" },
+ "alarm_name": { "type": "string" },
+ "tenant_uuid": { "type": "string" },
+ "resource_uuid": { "type": "string" },
+ "vim_type": { "type": "string" },
+ "description": { "type": "string" },
+ "severity": { "type": "string" },
+ "operation": { "type": "string" },
+ "threshold_value": { "type": "integer" },
+ "unit": { "type": "string" },
+ "statistic": { "type": "string" }
+ },
+ "required": [ "schema_version",
+ "schema_type",
+ "correlation_id",
+ "alarm_name",
+ "resource_uuid",
+ "vim_type",
+ "severity",
+ "operation",
+ "threshold_value",
+ "unit",
+ "statistic" ]
+}
--- /dev/null
+{
+ "schema_version": { "type": "string" },
+ "schema_type": { "type": "string" },
+ "alarm_create_response":
+ {
+ "correlation_id": { "type": "integer" },
+ "alarm_uuid": { "type": "string" },
+ "status": { "type": "boolean" }
+ },
+ "required": [ "schema_version",
+ "schema_type",
+ "correlation_id",
+ "alarm_uuid",
+ "status" ]
+}
--- /dev/null
+{
+ "schema_version": { "type": "string" },
+ "schema_type": { "type": "string" },
+ "tenant_uuid": { "type": "string" },
+ "correlation_id": { "type": "integer" },
+ "vim_type": { "type": "string" },
+ "metrics_configuration":
+ {
+ "metric_uuid": { "type": "string" },
+ "metric_unit": { "type": "string" },
+ "resource_uuid": { "type": "string" }
+ },
+ "required": [ "schema_version",
+ "schema_type",
+ "correlation_id",
+ "vim_type",
+ "metric_name",
+ "metric_unit",
+ "resource_uuid" ]
+}
\ No newline at end of file
--- /dev/null
+{
+ "schema_version": { "type": "string" },
+ "schema_type": { "type": "string" },
+ "correlation_id": { "type": "integer" },
+ "metrics_create_response":
+ {
+ "metric_uuid": { "type": "string" },
+ "resource_uuid": { "type": "string" },
+ "status": { "type": "boolean" }
+ },
+ "required": [ "schema_type",
+ "schema_version",
+ "correlation_id",
+ "metric_uuid",
+ "resource_uuid",
+ "status" ]
+}
--- /dev/null
+{
+ "schema_version": { "type": "string" },
+ "schema_type": { "type": "string" },
+ "alarm_delete_request":
+ {
+ "alarm_uuid": { "type": "string" },
+ "correlation_id": { "type": "integer" },
+ "vim_type": { "type": "string" }
+ },
+ "required": [ "schema_version",
+ "schema_type",
+ "alarm_uuid",
+ "correlation_id",
+ "vim_type" ]
+}
--- /dev/null
+{
+ "schema_version": { "type": "string" },
+ "schema_type": { "type": "string" },
+ "alarm_deletion_response":
+ {
+ "correlation_id": { "type": "integer" },
+ "alarm_uuid": { "type": "string" },
+ "status": { "type": "string" }
+ },
+ "required": [ "schema_version",
+ "schema_type",
+ "correlation_id",
+ "alarm_uuid",
+ "status" ]
+}
--- /dev/null
+{
+ "schema_version": { "type": "string" },
+ "schema_type": { "type": "string" },
+ "metric_name": { "type": "string" },
+ "metric_uuid": { "type": "string" },
+ "resource_uuid": { "type": "string" },
+ "tenant_uuid": { "type": "string" },
+ "correlation_uuid": { "type": "integer" },
+ "vim_type": { "type": "string" },
+ "required": [ "schema_verion",
+ "schema_type",
+ "metric_name",
+ "metric_uuid",
+ "resource_uuid",
+ "tenant_uuid",
+ "correlation_uuid",
+ "vim_type" ]
+}
\ No newline at end of file
--- /dev/null
+{
+ "schema_version": { "type": "string" },
+ "schema_type": { "type": "string" },
+ "metric_name": { "type": "string" },
+ "metric_uuid": { "type": "string" },
+ "resource_uuid": { "type": "string" },
+ "tenant_uuid": { "type": "string" },
+ "correlation_id": { "type": "integer" },
+ "required": [ "schema_version",
+ "schema_type",
+ "metric_name",
+ "metric_uuid",
+ "resource_uuid",
+ "tenant_uuid",
+ "correlation_id" ]
+}
--- /dev/null
+{
+ "schema_version": { "type": "string" },
+ "schema_type": { "type": "string" },
+ "alarm_list_request":
+ {
+ "correlation_id": { "type": "integer" },
+ "resource_uuid": { "type": "string" },
+ "alarm_name": { "type": "string" },
+ "vim_type": { "type": "string" },
+ "severity": { "type" : "string" }
+ },
+ "required": [ "schema_version",
+ "schema_type",
+ "correlation_id",
+ "vim_type"
+ ]
+}
\ No newline at end of file
--- /dev/null
+{
+ "schema_version": { "type": "string" },
+ "schema_type": { "type": "string" },
+ "list_alarm_resp": { "type": "array" }
+}
\ No newline at end of file
--- /dev/null
+{
+ "schema_version": { "type": "string" },
+ "schema_type": { "type": "string" },
+ "metrics_list_request":
+ {
+ "metric_name": { "type": "string" },
+ "correlation_id": { "type": "integer" },
+ "resource_uuid": { "type": "string" },
+ "vim_type": { "type": "string" }
+ },
+ "required": [ "schema_version",
+ "schema_type",
+ "metric_name",
+ "correlation_id",
+ "resource_uuid",
+ "vim_type"]
+}
\ No newline at end of file
--- /dev/null
+{
+"schema_version": { "type": "string" },
+"schema_type": { "type", "string" },
+"tenant_uuid": { "type": "string" },
+"correlation_uuid": { "type": "string" },
+"vim_type": { "type": "string" },
+"metrics_list":
+ {
+ "metric_name": { "type": "string" },
+ "metric_uuid": { "type": "string" },
+ "metric_unit": { "type": "string" },
+ "resource_uuid": { "type": "string" }
+ },
+ "required": [ "schema_version",
+ "schema_type",
+ "tenant_uuid",
+ "correlation_uuid",
+ "vim_type",
+ "metric_name",
+ "metric_uuid",
+ "metric_unit",
+ "resource_uuid" ]
+}
\ No newline at end of file
--- /dev/null
+{
+ "schema_version": { "type": "string" },
+ "schema_type": { "type": "string" },
+ "notify_details":
+ {
+ "alarm_uuid": { "type": "string" },
+ "resource_uuid": { "type": "string" },
+ "description": { "type": "string" },
+ "tenant_uuid": { "type": "string" },
+ "vim_type": { "type": "string" },
+ "severity": { "type" : ["integer", "string"] },
+ "status": { "type": "string" },
+ "start_date": { "type": "date-time" },
+ "update_date": { "type": "date-time" },
+ "cancel_date": { "type": "date-time" }
+ },
+ "required": [ "schema_version",
+ "schema_type",
+ "alarm_uuid",
+ "resource_uuid",
+ "tenant_uuid",
+ "vim_type",
+ "severity",
+ "status",
+ "start_date" ]
+}
\ No newline at end of file
--- /dev/null
+{
+ "schema_version": { "type": "string" },
+ "schema_type": { "type": "string" },
+ "metric_name": { "type": "string" },
+ "metric_uuid": { "type": "String" },
+ "resource_uuid": { "type": "string" },
+ "tenant_uuid": { "type": "string" },
+ "correlation_uuid": { "type": "string" },
+ "vim_type": { "type": "string" },
+ "collection_period": { "type": "string" },
+ "required": ["schema_version",
+ "schema_type",
+ "tenant_uuid",
+ "metric_name",
+ "metric_uuid",
+ "correlation_uuid",
+ "vim_type",
+ "collection_period",
+ "resource_uuid"]
+}
--- /dev/null
+{
+ "schema_version": { "type": "string" },
+ "schema_type": "metric_data_response",
+ "metrics_name": { "type": "string" },
+ "metric_uuid": { "type": "string" },
+ "correlation_id": { "type": "integer" },
+ "resource_uuid": { "type": "string" },
+ "tenant_uuid": { "type": "string" },
+ "metrics_data":
+ {
+ "time_series": { "type": "array" },
+ "metrics_series": { "type": "array" },
+ "unit": { "type": "string" }
+ },
+ "required": [ "schema_version",
+ "schema_type",
+ "metric_name",
+ "metric_uuid",
+ "resource_uuid",
+ "tenant_uuid",
+ "time_series",
+ "metrics_series" ]
+}
--- /dev/null
+{
+ "schema_version": { "type": "string" },
+ "schema_type": { "type": "string" },
+ "alarm_update_request":
+ {
+ "alarm_uuid": { "type": "string" },
+ "correlation_id": { "type": "integer" },
+ "vim_type": { "type": "string" },
+ "description": { "type": "string" },
+ "severity": { "type": "string" },
+ "operation": { "type": "string" },
+ "threshold_value": { "type": "string" },
+ "unit": { "type": "string" },
+ "statistic": { "type": "string" }
+ },
+ "required": [ "schema_version",
+ "scema_type",
+ "correlation_id",
+ "alarm_uuid",
+ "vim_type" ]
+}
--- /dev/null
+{
+ "schema_version": { "type": "string" },
+ "schema_type": { "type": "string" },
+ "alarm_update_response":
+ {
+ "correlation_id": { "type": "integer" },
+ "alarm_uuid": { "type": "string" },
+ "status": { "type": "string" }
+ },
+ "required": [ "schema_version",
+ "schema_type",
+ "correlation_id",
+ "alarm_uuid",
+ "status" ]
+}
--- /dev/null
+{
+ "schema_version": { "type": "string" },
+ "schema_type": { "type": "string" },
+ "tenant_uuid": { "type": "string" },
+ "correlation_id": { "type": "integer" },
+ "vim_type": { "type": "string" },
+ "metric_update_request":
+ {
+ "metric_name": { "type": "string" },
+ "metric_uuid": { "type": "string" },
+ "metric_unit": { "type": "string" },
+ "resource_uuid": { "type": "string" }
+ },
+ "required": [ "schema_version",
+ "schema_type",
+ "tenant_uuid",
+ "correlation_id",
+ "vim_type",
+ "metric_name",
+ "metric_uuid",
+ "resource_uuid",
+ "metric_unit"]
+}
\ No newline at end of file
--- /dev/null
+{
+ "schema_version": { "type": "string" },
+ "schema_type": { "type": "string" },
+ "correlation_id": { "type": "integer" },
+ "metric_update_response":
+ {
+ "metric_uuid": { "type": "string" },
+ "status": { "type": "boolean" },
+ "resource_uuid": { "type": "string" }
+ },
+ "required": [ "schema_version",
+ "schema_type",
+ "correlation_id",
+ "metric_uuid",
+ "resource_uuid",
+ "status"]
+}
\ No newline at end of file