Adds support for OSMMON_DATABASE_COMMONKEY to decrypt vim passwords
[osm/MON.git] / osm_mon / plugins / OpenStack / Gnocchi / metric_handler.py
1 # Copyright 2017 Intel Research and Development Ireland Limited
2 # *************************************************************
3
4 # This file is part of OSM Monitoring module
5 # All Rights Reserved to Intel Corporation
6
7 # Licensed under the Apache License, Version 2.0 (the "License"); you may
8 # not use this file except in compliance with the License. You may obtain
9 # a copy of the License at
10
11 # http://www.apache.org/licenses/LICENSE-2.0
12
13 # Unless required by applicable law or agreed to in writing, software
14 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
15 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
16 # License for the specific language governing permissions and limitations
17 # under the License.
18
19 # For those usages not covered by the Apache License, Version 2.0 please
20 # contact: helena.mcgough@intel.com or adrian.hoban@intel.com
21 ##
22 """Carry out OpenStack metric requests via Gnocchi API."""
23
24 import datetime
25 import json
26 import logging
27 import time
28
29 import six
30
31 from osm_mon.core.auth import AuthManager
32 from osm_mon.core.settings import Config
33 from osm_mon.plugins.OpenStack.common import Common
34 from osm_mon.plugins.OpenStack.response import OpenStackResponseBuilder
35
36 log = logging.getLogger(__name__)
37
38 METRIC_MAPPINGS = {
39 "average_memory_utilization": "memory.usage",
40 "disk_read_ops": "disk.read.requests",
41 "disk_write_ops": "disk.write.requests",
42 "disk_read_bytes": "disk.read.bytes",
43 "disk_write_bytes": "disk.write.bytes",
44 "packets_dropped": "interface.if_dropped",
45 "packets_received": "interface.if_packets",
46 "packets_sent": "interface.if_packets",
47 "cpu_utilization": "cpu_util",
48 }
49
50 PERIOD_MS = {
51 "HR": 3600000,
52 "DAY": 86400000,
53 "WEEK": 604800000,
54 "MONTH": 2629746000,
55 "YEAR": 31556952000
56 }
57
58
59 class OpenstackMetricHandler(object):
60 """OpenStack metric requests performed via the Gnocchi API."""
61
62 def __init__(self):
63 """Initialize the metric actions."""
64 self._cfg = Config.instance()
65
66 # Use the Response class to generate valid json response messages
67 self._response = OpenStackResponseBuilder()
68
69 self._auth_manager = AuthManager()
70
71 def handle_request(self, key: str, values: dict, vim_uuid: str) -> dict:
72 """
73 Processes metric request message depending on it's key
74 :param key: Kafka message key
75 :param values: Dict containing metric request data. Follows models defined in core.models.
76 :param vim_uuid: UUID of the VIM to handle the metric request.
77 :return: Dict containing metric response data. Follows models defined in core.models.
78 """
79
80 log.info("OpenStack metric action required.")
81
82 if 'metric_name' in values and values['metric_name'] not in METRIC_MAPPINGS.keys():
83 raise ValueError('Metric ' + values['metric_name'] + ' is not supported.')
84
85 verify_ssl = self._auth_manager.is_verify_ssl(vim_uuid)
86
87 endpoint = Common.get_endpoint("metric", vim_uuid, verify_ssl=verify_ssl)
88
89 auth_token = Common.get_auth_token(vim_uuid, verify_ssl=verify_ssl)
90
91 if key == "create_metric_request":
92 metric_details = values['metric_create_request']
93 status = False
94 metric_id = None
95 resource_id = None
96 try:
97 # Configure metric
98 metric_id, resource_id = self.configure_metric(endpoint, auth_token, metric_details, verify_ssl)
99 log.info("Metric successfully created")
100 status = True
101 except Exception as e:
102 log.exception("Error creating metric")
103 raise e
104 finally:
105 return self._response.generate_response('create_metric_response',
106 cor_id=metric_details['correlation_id'],
107 status=status,
108 metric_id=metric_id,
109 resource_id=resource_id)
110
111 elif key == "read_metric_data_request":
112 metric_id = None
113 timestamps = []
114 metric_data = []
115 status = False
116 try:
117 metric_id = self.get_metric_id(endpoint,
118 auth_token,
119 METRIC_MAPPINGS[values['metric_name']],
120 values['resource_uuid'],
121 verify_ssl)
122 # Read all metric data related to a specified metric
123 timestamps, metric_data = self.read_metric_data(endpoint, auth_token, values, verify_ssl)
124 log.info("Metric data collected successfully")
125 status = True
126 except Exception as e:
127 log.exception("Error reading metric data")
128 raise e
129 finally:
130 return self._response.generate_response('read_metric_data_response',
131 cor_id=values['correlation_id'],
132 status=status,
133 metric_id=metric_id,
134 metric_name=values['metric_name'],
135 resource_id=values['resource_uuid'],
136 times=timestamps,
137 metrics=metric_data)
138
139 elif key == "delete_metric_request":
140 metric_id = None
141 status = False
142 try:
143 # delete the specified metric in the request
144 metric_id = self.get_metric_id(endpoint, auth_token, METRIC_MAPPINGS[values['metric_name']],
145 values['resource_uuid'], verify_ssl)
146 self.delete_metric(
147 endpoint, auth_token, metric_id, verify_ssl)
148 log.info("Metric deleted successfully")
149 status = True
150
151 except Exception as e:
152 log.exception("Error deleting metric")
153 raise e
154 finally:
155 return self._response.generate_response('delete_metric_response',
156 cor_id=values['correlation_id'],
157 metric_id=metric_id,
158 metric_name=values['metric_name'],
159 status=status,
160 resource_id=values['resource_uuid'])
161
162 elif key == "update_metric_request":
163 # Gnocchi doesn't support configuration updates
164 # Log and send a response back to this effect
165 log.warning("Gnocchi doesn't support metric configuration updates.")
166 req_details = values['metric_update_request']
167 metric_name = req_details['metric_name']
168 resource_id = req_details['resource_uuid']
169 metric_id = self.get_metric_id(endpoint, auth_token, metric_name, resource_id, verify_ssl)
170 return self._response.generate_response('update_metric_response',
171 cor_id=req_details['correlation_id'],
172 status=False,
173 resource_id=resource_id,
174 metric_id=metric_id)
175
176 elif key == "list_metric_request":
177 list_details = values['metrics_list_request']
178 metric_list = []
179 status = False
180 try:
181 metric_list = self.list_metrics(
182 endpoint, auth_token, list_details, verify_ssl)
183 log.info("Metrics listed successfully")
184 status = True
185 except Exception as e:
186 log.exception("Error listing metrics")
187 raise e
188 finally:
189 return self._response.generate_response('list_metric_response',
190 cor_id=list_details['correlation_id'],
191 status=status,
192 metric_list=metric_list)
193
194 else:
195 raise ValueError("Unknown key {}, no action will be performed.".format(key))
196
197 def configure_metric(self, endpoint, auth_token, values, verify_ssl):
198 """Create the new metric in Gnocchi."""
199 required_fields = ['resource_uuid', 'metric_name']
200 for field in required_fields:
201 if field not in values:
202 raise ValueError("Missing field: " + field)
203
204 resource_id = values['resource_uuid']
205 metric_name = values['metric_name'].lower()
206
207 # Check for an existing metric for this resource
208 metric_id = self.get_metric_id(
209 endpoint, auth_token, metric_name, resource_id, verify_ssl)
210
211 if metric_id is None:
212 # Try appending metric to existing resource
213 try:
214 base_url = "{}/v1/resource/generic/%s/metric"
215 res_url = base_url.format(endpoint) % resource_id
216 payload = {metric_name: {'archive_policy_name': 'high',
217 'unit': values['metric_unit']}}
218 result = Common.perform_request(
219 res_url,
220 auth_token,
221 req_type="post",
222 verify_ssl=verify_ssl,
223 payload=json.dumps(payload, sort_keys=True))
224 # Get id of newly created metric
225 for row in json.loads(result.text):
226 if row['name'] == metric_name:
227 metric_id = row['id']
228 log.info("Appended metric to existing resource.")
229
230 return metric_id, resource_id
231 except Exception as exc:
232 # Gnocchi version of resource does not exist creating a new one
233 log.info("Failed to append metric to existing resource:%s",
234 exc)
235 url = "{}/v1/resource/generic".format(endpoint)
236 metric = {'name': metric_name,
237 'archive_policy_name': 'high',
238 'unit': values['metric_unit'], }
239
240 resource_payload = json.dumps({'id': resource_id,
241 'metrics': {
242 metric_name: metric}}, sort_keys=True)
243
244 resource = Common.perform_request(
245 url,
246 auth_token,
247 req_type="post",
248 payload=resource_payload,
249 verify_ssl=verify_ssl)
250
251 # Return the newly created resource_id for creating alarms
252 new_resource_id = json.loads(resource.text)['id']
253 log.info("Created new resource for metric: %s",
254 new_resource_id)
255
256 metric_id = self.get_metric_id(
257 endpoint, auth_token, metric_name, new_resource_id, verify_ssl)
258
259 return metric_id, new_resource_id
260
261 else:
262 raise ValueError("Metric already exists for this resource")
263
264 def delete_metric(self, endpoint, auth_token, metric_id, verify_ssl):
265 """Delete metric."""
266 url = "{}/v1/metric/%s".format(endpoint) % metric_id
267
268 result = Common.perform_request(
269 url,
270 auth_token,
271 req_type="delete",
272 verify_ssl=verify_ssl)
273 if not str(result.status_code).startswith("2"):
274 log.warning("Failed to delete the metric.")
275 raise ValueError("Error deleting metric. Aodh API responded with code " + str(result.status_code))
276
277 def list_metrics(self, endpoint, auth_token, values, verify_ssl):
278 """List all metrics."""
279
280 # Check for a specified list
281 metric_name = None
282 if 'metric_name' in values:
283 metric_name = values['metric_name'].lower()
284
285 resource = None
286 if 'resource_uuid' in values:
287 resource = values['resource_uuid']
288
289 if resource:
290 url = "{}/v1/resource/generic/{}".format(endpoint, resource)
291 result = Common.perform_request(
292 url, auth_token, req_type="get", verify_ssl=verify_ssl)
293 resource_data = json.loads(result.text)
294 metrics = resource_data['metrics']
295
296 if metric_name:
297 if metrics.get(METRIC_MAPPINGS[metric_name]):
298 metric_id = metrics[METRIC_MAPPINGS[metric_name]]
299 url = "{}/v1/metric/{}".format(endpoint, metric_id)
300 result = Common.perform_request(
301 url, auth_token, req_type="get", verify_ssl=verify_ssl)
302 metric_list = json.loads(result.text)
303 log.info("Returning an %s resource list for %s metrics",
304 metric_name, resource)
305 return metric_list
306 else:
307 log.info("Metric {} not found for {} resource".format(metric_name, resource))
308 return []
309 else:
310 metric_list = []
311 for k, v in metrics.items():
312 url = "{}/v1/metric/{}".format(endpoint, v)
313 result = Common.perform_request(
314 url, auth_token, req_type="get", verify_ssl=verify_ssl)
315 metric = json.loads(result.text)
316 metric_list.append(metric)
317 if metric_list:
318 log.info("Return a list of %s resource metrics", resource)
319 return metric_list
320
321 else:
322 log.info("There are no metrics available")
323 return []
324 else:
325 url = "{}/v1/metric?sort=name:asc".format(endpoint)
326 result = Common.perform_request(
327 url, auth_token, req_type="get", verify_ssl=verify_ssl)
328 metrics = []
329 metrics_partial = json.loads(result.text)
330 for metric in metrics_partial:
331 metrics.append(metric)
332
333 while len(json.loads(result.text)) > 0:
334 last_metric_id = metrics_partial[-1]['id']
335 url = "{}/v1/metric?sort=name:asc&marker={}".format(endpoint, last_metric_id)
336 result = Common.perform_request(
337 url, auth_token, req_type="get", verify_ssl=verify_ssl)
338 if len(json.loads(result.text)) > 0:
339 metrics_partial = json.loads(result.text)
340 for metric in metrics_partial:
341 metrics.append(metric)
342
343 if metrics is not None:
344 # Format the list response
345 if metric_name is not None:
346 metric_list = self.response_list(
347 metrics, metric_name=metric_name)
348 log.info("Returning a list of %s metrics", metric_name)
349 else:
350 metric_list = self.response_list(metrics)
351 log.info("Returning a complete list of metrics")
352 return metric_list
353 else:
354 log.info("There are no metrics available")
355 return []
356
357 def get_metric_id(self, endpoint, auth_token, metric_name, resource_id, verify_ssl):
358 """Check if the desired metric already exists for the resource."""
359 url = "{}/v1/resource/generic/%s".format(endpoint) % resource_id
360 try:
361 # Try return the metric id if it exists
362 result = Common.perform_request(
363 url,
364 auth_token,
365 req_type="get",
366 verify_ssl=verify_ssl)
367 return json.loads(result.text)['metrics'][metric_name]
368 except KeyError as e:
369 log.error("Metric doesn't exist. No metric_id available")
370 raise e
371
372 def read_metric_data(self, endpoint, auth_token, values, verify_ssl):
373 """Collect metric measures over a specified time period."""
374 timestamps = []
375 data = []
376 # get metric_id
377 metric_id = self.get_metric_id(endpoint, auth_token, METRIC_MAPPINGS[values['metric_name']],
378 values['resource_uuid'], verify_ssl)
379 # Try and collect measures
380 collection_unit = values['collection_unit'].upper()
381 collection_period = values['collection_period']
382
383 # Define the start and end time based on configurations
384 # FIXME: Local timezone may differ from timezone set in Gnocchi, causing discrepancies in measures
385 stop_time = time.strftime("%Y-%m-%d") + "T" + time.strftime("%X")
386 end_time = int(round(time.time() * 1000))
387 diff = collection_period * PERIOD_MS[collection_unit]
388 s_time = (end_time - diff) / 1000.0
389 start_time = datetime.datetime.fromtimestamp(s_time).strftime(
390 '%Y-%m-%dT%H:%M:%S.%f')
391 base_url = "{}/v1/metric/%(0)s/measures?start=%(1)s&stop=%(2)s"
392 url = base_url.format(endpoint) % {
393 "0": metric_id, "1": start_time, "2": stop_time}
394
395 # Perform metric data request
396 metric_data = Common.perform_request(
397 url,
398 auth_token,
399 req_type="get",
400 verify_ssl=verify_ssl)
401
402 # Generate a list of the requested timestamps and data
403 for r in json.loads(metric_data.text):
404 timestamp = r[0].replace("T", " ")
405 timestamps.append(timestamp)
406 data.append(r[2])
407
408 return timestamps, data
409
410 def response_list(self, metric_list, metric_name=None, resource=None):
411 """Create the appropriate lists for a list response."""
412 resp_list, name_list, res_list = [], [], []
413
414 # Create required lists
415 for row in metric_list:
416 # Only list OSM metrics
417 name = None
418 if row['name'] in METRIC_MAPPINGS.values():
419 for k, v in six.iteritems(METRIC_MAPPINGS):
420 if row['name'] == v:
421 name = k
422 metric = {"metric_name": name,
423 "metric_uuid": row['id'],
424 "metric_unit": row['unit'],
425 "resource_uuid": row['resource_id']}
426 resp_list.append(metric)
427 # Generate metric_name specific list
428 if metric_name is not None and name is not None:
429 if metric_name in METRIC_MAPPINGS.keys() and row['name'] == METRIC_MAPPINGS[metric_name]:
430 metric = {"metric_name": metric_name,
431 "metric_uuid": row['id'],
432 "metric_unit": row['unit'],
433 "resource_uuid": row['resource_id']}
434 name_list.append(metric)
435 # Generate resource specific list
436 if resource is not None and name is not None:
437 if row['resource_id'] == resource:
438 metric = {"metric_name": name,
439 "metric_uuid": row['id'],
440 "metric_unit": row['unit'],
441 "resource_uuid": row['resource_id']}
442 res_list.append(metric)
443
444 # Join required lists
445 if metric_name is not None and resource is not None:
446 # Return intersection of res_list and name_list
447 return [i for i in res_list for j in name_list if i['metric_uuid'] == j['metric_uuid']]
448 elif metric_name is not None:
449 return name_list
450 elif resource is not None:
451 return res_list
452 else:
453 return resp_list