X-Git-Url: https://osm.etsi.org/gitweb/?a=blobdiff_plain;f=osm_mon%2Fplugins%2FOpenStack%2FGnocchi%2Fmetrics.py;h=1dc9496cf04d50c8775d3925a680abdb1cc722b9;hb=refs%2Fchanges%2F20%2F5920%2F1;hp=8e2ab4e612be52d0b79464f5576e22c285dcbb59;hpb=effeb7c52a650308e47ef3eb0ded8315f21f9cba;p=osm%2FMON.git diff --git a/osm_mon/plugins/OpenStack/Gnocchi/metrics.py b/osm_mon/plugins/OpenStack/Gnocchi/metrics.py index 8e2ab4e..1dc9496 100644 --- a/osm_mon/plugins/OpenStack/Gnocchi/metrics.py +++ b/osm_mon/plugins/OpenStack/Gnocchi/metrics.py @@ -27,7 +27,10 @@ import logging import time +import six + from osm_mon.core.message_bus.producer import KafkaProducer +from osm_mon.plugins.OpenStack.common import Common from osm_mon.plugins.OpenStack.response import OpenStack_Response from osm_mon.plugins.OpenStack.settings import Config @@ -36,14 +39,14 @@ log = logging.getLogger(__name__) METRIC_MAPPINGS = { "average_memory_utilization": "memory.percent", - "disk_read_ops": "disk.disk_ops", - "disk_write_ops": "disk.disk_ops", - "disk_read_bytes": "disk.disk_octets", - "disk_write_bytes": "disk.disk_octets", + "disk_read_ops": "disk.read.requests", + "disk_write_ops": "disk.write.requests", + "disk_read_bytes": "disk.read.bytes", + "disk_write_bytes": "disk.write.bytes", "packets_dropped": "interface.if_dropped", "packets_received": "interface.if_packets", "packets_sent": "interface.if_packets", - "cpu_utilization": "cpu.percent", + "cpu_utilization": "cpu_util", } PERIOD_MS = { @@ -62,12 +65,10 @@ class Metrics(object): """Initialize the metric actions.""" # Configure an instance of the OpenStack metric plugin config = Config.instance() - config.read_environ("gnocchi") + config.read_environ() # Initialise authentication for API requests - self.auth_token = None - self.endpoint = None - self._common = None + self._common = Common() # Use the Response class to generate valid json response messages self._response = OpenStack_Response() @@ -75,32 +76,20 @@ class Metrics(object): # Initializer a producer to send responses back to SO self._producer = KafkaProducer("metric_response") - def metric_calls(self, message, common, auth_token): + def metric_calls(self, message): """Consume info from the message bus to manage metric requests.""" values = json.loads(message.value) - self._common = common log.info("OpenStack metric action required.") - # Generate and auth_token and endpoint for request - if auth_token is not None: - if self.auth_token != auth_token: - log.info("Auth_token for metrics set by access_credentials.") - self.auth_token = auth_token - else: - log.info("Auth_token has not been updated.") - else: - log.info("Using environment variables to set Gnocchi auth_token.") - self.auth_token = self._common._authenticate() + auth_token = Common.get_auth_token(values['vim_uuid']) - if self.endpoint is None: - log.info("Generating a new endpoint for Gnocchi.") - self.endpoint = self._common.get_endpoint("metric") + endpoint = Common.get_endpoint("metric", values['vim_uuid']) if message.key == "create_metric_request": # Configure metric metric_details = values['metric_create'] metric_id, resource_id, status = self.configure_metric( - self.endpoint, self.auth_token, metric_details) + endpoint, auth_token, metric_details) # Generate and send a create metric response try: @@ -118,7 +107,7 @@ class Metrics(object): elif message.key == "read_metric_data_request": # Read all metric data related to a specified metric timestamps, metric_data = self.read_metric_data( - self.endpoint, self.auth_token, values) + endpoint, auth_token, values) # Generate and send a response message try: @@ -140,7 +129,7 @@ class Metrics(object): # delete the specified metric in the request metric_id = values['metric_uuid'] status = self.delete_metric( - self.endpoint, self.auth_token, metric_id) + endpoint, auth_token, metric_id) # Generate and send a response message try: @@ -165,7 +154,7 @@ class Metrics(object): metric_name = req_details['metric_name'] resource_id = req_details['resource_uuid'] metric_id = self.get_metric_id( - self.endpoint, self.auth_token, metric_name, resource_id) + endpoint, auth_token, metric_name, resource_id) # Generate and send a response message try: @@ -184,7 +173,7 @@ class Metrics(object): list_details = values['metrics_list_request'] metric_list = self.list_metrics( - self.endpoint, self.auth_token, list_details) + endpoint, auth_token, list_details) # Generate and send a response message try: @@ -212,8 +201,8 @@ class Metrics(object): return None, None, False # Check/Normalize metric name - metric_name, norm_name = self.get_metric_name(values) - if norm_name is None: + norm_name, metric_name = self.get_metric_name(values) + if metric_name is None: log.warn("This metric is not supported by this plugin.") return None, resource_id, False @@ -228,7 +217,7 @@ class Metrics(object): res_url = base_url.format(endpoint) % resource_id payload = {metric_name: {'archive_policy_name': 'high', 'unit': values['metric_unit']}} - result = self._common._perform_request( + result = Common.perform_request( res_url, auth_token, req_type="post", payload=json.dumps(payload)) # Get id of newly created metric @@ -252,7 +241,7 @@ class Metrics(object): 'metrics': { metric_name: metric}}) - resource = self._common._perform_request( + resource = Common.perform_request( url, auth_token, req_type="post", payload=resource_payload) @@ -276,10 +265,10 @@ class Metrics(object): def delete_metric(self, endpoint, auth_token, metric_id): """Delete metric.""" - url = "{}/v1/metric/%s".format(endpoint) % (metric_id) + url = "{}/v1/metric/%s".format(endpoint) % metric_id try: - result = self._common._perform_request( + result = Common.perform_request( url, auth_token, req_type="delete") if str(result.status_code) == "404": log.warn("Failed to delete the metric.") @@ -292,7 +281,6 @@ class Metrics(object): def list_metrics(self, endpoint, auth_token, values): """List all metrics.""" - url = "{}/v1/metric/".format(endpoint) # Check for a specified list try: @@ -312,9 +300,23 @@ class Metrics(object): resource = None try: - result = self._common._perform_request( + url = "{}/v1/metric?sort=name:asc".format(endpoint) + result = Common.perform_request( url, auth_token, req_type="get") - metrics = json.loads(result.text) + metrics = [] + metrics_partial = json.loads(result.text) + for metric in metrics_partial: + metrics.append(metric) + + while len(json.loads(result.text)) > 0: + last_metric_id = metrics_partial[-1]['id'] + url = "{}/v1/metric?sort=name:asc&marker={}".format(endpoint, last_metric_id) + result = Common.perform_request( + url, auth_token, req_type="get") + if len(json.loads(result.text)) > 0: + metrics_partial = json.loads(result.text) + for metric in metrics_partial: + metrics.append(metric) if metrics is not None: # Format the list response @@ -349,7 +351,7 @@ class Metrics(object): try: # Try return the metric id if it exists - result = self._common._perform_request( + result = Common.perform_request( url, auth_token, req_type="get") return json.loads(result.text)['metrics'][metric_name] except Exception: @@ -358,6 +360,7 @@ class Metrics(object): def get_metric_name(self, values): """Check metric name configuration and normalize.""" + metric_name = None try: # Normalize metric name metric_name = values['metric_name'].lower() @@ -367,7 +370,7 @@ class Metrics(object): return metric_name, None def read_metric_data(self, endpoint, auth_token, values): - """Collectd metric measures over a specified time period.""" + """Collect metric measures over a specified time period.""" timestamps = [] data = [] try: @@ -377,6 +380,7 @@ class Metrics(object): collection_period = values['collection_period'] # Define the start and end time based on configurations + # FIXME: Local timezone may differ from timezone set in Gnocchi, causing discrepancies in measures stop_time = time.strftime("%Y-%m-%d") + "T" + time.strftime("%X") end_time = int(round(time.time() * 1000)) if collection_unit == 'YEAR': @@ -391,7 +395,7 @@ class Metrics(object): "0": metric_id, "1": start_time, "2": stop_time} # Perform metric data request - metric_data = self._common._perform_request( + metric_data = Common.perform_request( url, auth_token, req_type="get") # Generate a list of the requested timestamps and data @@ -412,35 +416,40 @@ class Metrics(object): # Create required lists for row in metric_list: # Only list OSM metrics - if row['name'] in METRIC_MAPPINGS.keys(): - metric = {"metric_name": row['name'], + name = None + if row['name'] in METRIC_MAPPINGS.values(): + for k,v in six.iteritems(METRIC_MAPPINGS): + if row['name'] == v: + name = k + metric = {"metric_name": name, "metric_uuid": row['id'], "metric_unit": row['unit'], "resource_uuid": row['resource_id']} - resp_list.append(str(metric)) + resp_list.append(metric) # Generate metric_name specific list - if metric_name is not None: - if row['name'] == metric_name: - metric = {"metric_name": row['name'], + if metric_name is not None and name is not None: + if metric_name in METRIC_MAPPINGS.keys() and row['name'] == METRIC_MAPPINGS[metric_name]: + metric = {"metric_name": metric_name, "metric_uuid": row['id'], "metric_unit": row['unit'], "resource_uuid": row['resource_id']} - name_list.append(str(metric)) + name_list.append(metric) # Generate resource specific list - if resource is not None: + if resource is not None and name is not None: if row['resource_id'] == resource: - metric = {"metric_name": row['name'], + metric = {"metric_name": name, "metric_uuid": row['id'], "metric_unit": row['unit'], "resource_uuid": row['resource_id']} - res_list.append(str(metric)) + res_list.append(metric) # Join required lists if metric_name is not None and resource is not None: - return list(set(res_list).intersection(name_list)) + # Return intersection of res_list and name_list + return [i for i in res_list for j in name_list if i['metric_uuid'] == j['metric_uuid']] elif metric_name is not None: return name_list elif resource is not None: - return list(set(res_list).intersection(resp_list)) + return res_list else: return resp_list