Skip to content
Snippets Groups Projects
Commit d824bdd5 authored by marchettim's avatar marchettim Committed by Gerrit Code Review
Browse files

Merge "Refreshing osm_metrics stack"

parents 5fb201de 6021624a
No related branches found
No related tags found
No related merge requests found
......@@ -46,6 +46,15 @@ services:
- netOSM
volumes:
- mongo_db:/data/db
prometheus:
image: prom/prometheus:${PROMETHEUS_TAG:-latest}
hostname: prometheus
ports:
- "${OSM_PROM_PORTS:-9091:9090}"
volumes:
- ./prometheus.yml:/etc/prometheus/prometheus.yml:ro
networks:
- netOSM
keystone-db:
image: mariadb:10
networks:
......
......@@ -4,22 +4,6 @@ networks:
external:
name: ${OSM_NETWORK:-netOSM}
services:
kafka-exporter:
image: osm/kafka-exporter
hostname: kafka-exporter
ports:
- "12340:12340"
networks:
- netOSM
prometheus:
image: prom/prometheus
hostname: prometheus
volumes:
- ./prometheus.yml:/etc/prometheus/prometheus.yml:ro
networks:
- netOSM
depends_on:
- kafka-exporter
grafana:
image: grafana/grafana
volumes:
......@@ -31,5 +15,3 @@ services:
- "${OSM_PM_PORTS:-3000:3000}"
networks:
- netOSM
depends_on:
- prometheus
FROM phusion/baseimage
MAINTAINER Gianpietro Lavado "glavado@whitestack.com"
EXPOSE 12340
RUN apt-get update && apt-get install -y python python-pip default-jdk maven git
RUN pip install pyaml && pip install kafka
RUN mkdir -p /kafka-topic-exporter/config
RUN cd /tmp && git clone https://github.com/ogibayashi/kafka-topic-exporter.git && cd /tmp/kafka-topic-exporter/ && git checkout v0.0.5 && mvn install
RUN cp /tmp/kafka-topic-exporter/target/kafka-topic-exporter-0.0.5-jar-with-dependencies.jar /kafka-topic-exporter/
RUN rm -rf /tmp/kafka-topic-exporter && apt-get remove -y maven git
COPY kafka-topic-exporter.properties /kafka-topic-exporter/config/kafka-topic-exporter.properties
COPY mon_to_kafka_exporter.py /kafka-topic-exporter/mon_to_kafka_exporter.py
COPY initscript.sh /kafka-topic-exporter/initscript.sh
WORKDIR /kafka-topic-exporter
ENTRYPOINT ["./initscript.sh"]
#!/bin/bash
# metric_response topic to kafka_exporter_topic
nohup python /kafka-topic-exporter/mon_to_kafka_exporter.py kafka:9092 &
# kafka_exporter_topic to prometheus web service
java -jar /kafka-topic-exporter/kafka-topic-exporter-0.0.5-jar-with-dependencies.jar /kafka-topic-exporter/config/kafka-topic-exporter.properties
exporter.port=12340
exporter.metric.expire.seconds=10
kafka.consumer.topics=kafka_exporter_topic
bootstrap.servers=kafka:9092
group.id=kte-group
client.id=kte
from kafka import KafkaConsumer, KafkaProducer
from kafka.errors import KafkaError
import logging
import yaml
import json
import sys
import re
import datetime
import time
logging.basicConfig(stream=sys.stdout,
format='%(asctime)s %(message)s',
datefmt='%m/%d/%Y %I:%M:%S %p',
level=logging.INFO)
log = logging.getLogger(__name__)
def main():
if len(sys.argv) <= 1:
print ("Usage: metric-transformer.py kafka_server")
exit()
kafka_server = sys.argv.pop(1)
kafka_host = kafka_server.split(':')[0]
kafka_port = kafka_server.split(':')[1]
transform_messages(kafka_host=kafka_host,
kafka_port=kafka_port)
def transform_messages(kafka_host, kafka_port):
bootstrap_servers = '{}:{}'.format(kafka_host, kafka_port)
producer = KafkaProducer(bootstrap_servers=bootstrap_servers,
key_serializer=str.encode,
value_serializer=str.encode)
consumer = KafkaConsumer(bootstrap_servers=bootstrap_servers,
key_deserializer=str.encode,
value_deserializer=str.encode)
consumer.subscribe(["metric_response"])
for message in consumer:
try:
if message.topic == "metric_response":
if message.key == "read_metric_data_response":
values = json.loads(message.value)
new_msg = {
'name': values['metric_name'],
'value': values['metrics_data']['metrics_series'][-1],
'labels': {
'resource_uuid': values['resource_uuid']
}
}
log.info("Message to kafka exporter: %s", new_msg)
future = producer.send(topic='kafka_exporter_topic', key='kafka-exporter-key',
value=json.dumps(new_msg))
response = future.get()
log.info("Response from Kafka: %s", response)
except Exception as e:
log.exception("Error processing message: ")
if __name__ == '__main__':
main()
......@@ -85,11 +85,11 @@
"steppedLine": false,
"targets": [
{
"expr": "kafka_exporter_topic_cpu_utilization",
"expr": "cpu_utilization",
"format": "time_series",
"interval": "",
"intervalFactor": 1,
"legendFormat": "{{resource_uuid}}",
"legendFormat": "{{vdu_name}}",
"refId": "A"
}
],
......@@ -166,11 +166,11 @@
"steppedLine": false,
"targets": [
{
"expr": "kafka_exporter_topic_average_memory_utilization",
"expr": "average_memory_utilization",
"format": "time_series",
"interval": "",
"intervalFactor": 1,
"legendFormat": "{{resource_uuid}}",
"legendFormat": "{{vdu_name}}",
"refId": "A"
}
],
......
......@@ -42,4 +42,4 @@ scrape_configs:
# scheme defaults to 'http'.
static_configs:
- targets: ['kafka-exporter:12340']
- targets: ['mon:8000']
......@@ -646,7 +646,7 @@ function generate_docker_images() {
BUILD_ARGS+=(--build-arg RELEASE="$RELEASE")
BUILD_ARGS+=(--build-arg REPOSITORY_KEY="$REPOSITORY_KEY")
BUILD_ARGS+=(--build-arg REPOSITORY_BASE="$REPOSITORY_BASE")
if [ -z "$TO_REBUILD" ] || echo $TO_REBUILD | grep -q KAFKA ; then
sg docker -c "docker pull wurstmeister/zookeeper" || FATAL "cannot get zookeeper docker image"
sg docker -c "docker pull wurstmeister/kafka:${KAFKA_TAG}" || FATAL "cannot get kafka docker image"
......@@ -656,6 +656,10 @@ function generate_docker_images() {
sg docker -c "docker pull mongo" || FATAL "cannot get mongo docker image"
fi
if [ -z "$TO_REBUILD" ] || echo $TO_REBUILD | grep -q PROMETHEUS ; then
sg docker -c "docker pull prom/prometheus:${PROMETHEUS_TAG}" || FATAL "cannot get prometheus docker image"
fi
if [ -n "$PULL_IMAGES" ]; then
sg docker -c "docker pull ${DOCKER_USER}/mon:${OSM_DOCKER_TAG}" || FATAL "cannot pull MON docker image"
sg docker -c "docker pull ${DOCKER_USER}/pol:${OSM_DOCKER_TAG}" || FATAL "cannot pull POL docker image"
......@@ -722,6 +726,7 @@ function cmp_overwrite() {
function generate_config_log_folders() {
echo "Generating config and log folders"
$WORKDIR_SUDO cp -b ${OSM_DEVOPS}/installers/docker/docker-compose.yaml $OSM_DOCKER_WORK_DIR/docker-compose.yaml
$WORKDIR_SUDO cp -b ${OSM_DEVOPS}/installers/docker/prometheus.yml $OSM_DOCKER_WORK_DIR/prometheus.yml
echo "Finished generation of config and log folders"
}
......@@ -791,15 +796,18 @@ function deploy_lightweight() {
OSM_KEYSTONE_PORT=5000
OSM_UI_PORT=80
OSM_MON_PORT=8662
OSM_PROM_PORT=9090
OSM_PROM_HOSTPORT=9091
[ -n "$INSTALL_ELK" ] && OSM_ELK_PORT=5601
[ -n "$INSTALL_PERFMON" ] && OSM_PM_PORT=3000
if [ -n "$NO_HOST_PORTS" ]; then
OSM_PORTS+=(OSM_NBI_PORTS=$OSM_NBI_PORT)
OSM_PORTS+=(OSM_RO_PORTS=$OSM_RO_PORT)
OSM_PORTS+=(OSM_KEYSTONE_PORTS=$OSM_KEYSTONE_PORT)
OSM_PORTS+=(OSM_UI_PORTS=$OSM_UI_PORT)
OSM_PORTS+=(OSM_MON_PORTS=$OSM_MON_PORT)
OSM_PORTS+=(OSM_PROM_PORTS=$OSM_PROM_PORT)
[ -n "$INSTALL_PERFMON" ] && OSM_PORTS+=(OSM_PM_PORTS=$OSM_PM_PORT)
[ -n "$INSTALL_ELK" ] && OSM_PORTS+=(OSM_ELK_PORTS=$OSM_ELK_PORT)
else
......@@ -808,6 +816,7 @@ function deploy_lightweight() {
OSM_PORTS+=(OSM_KEYSTONE_PORTS=$OSM_KEYSTONE_PORT:$OSM_KEYSTONE_PORT)
OSM_PORTS+=(OSM_UI_PORTS=$OSM_UI_PORT:$OSM_UI_PORT)
OSM_PORTS+=(OSM_MON_PORTS=$OSM_MON_PORT:$OSM_MON_PORT)
OSM_PORTS+=(OSM_PROM_PORTS=$OSM_PROM_HOSTPORT:$OSM_PROM_PORT)
[ -n "$INSTALL_PERFMON" ] && OSM_PORTS+=(OSM_PM_PORTS=$OSM_PM_PORT:$OSM_PM_PORT)
[ -n "$INSTALL_ELK" ] && OSM_PORTS+=(OSM_ELK_PORTS=$OSM_ELK_PORT:$OSM_ELK_PORT)
fi
......@@ -816,6 +825,7 @@ function deploy_lightweight() {
echo "export TAG=${OSM_DOCKER_TAG}" | $WORKDIR_SUDO tee --append $OSM_DOCKER_WORK_DIR/osm_ports.sh
echo "export DOCKER_USER=${DOCKER_USER}" | $WORKDIR_SUDO tee --append $OSM_DOCKER_WORK_DIR/osm_ports.sh
echo "export KAFKA_TAG=${KAFKA_TAG}" | $WORKDIR_SUDO tee --append $OSM_DOCKER_WORK_DIR/osm_ports.sh
echo "export PROMETHEUS_TAG=${PROMETHEUS_TAG}" | $WORKDIR_SUDO tee --append $OSM_DOCKER_WORK_DIR/osm_ports.sh
......@@ -1075,6 +1085,7 @@ OSM_WORK_DIR="/etc/osm"
OSM_DOCKER_TAG=latest
DOCKER_USER=osm
KAFKA_TAG=2.11-1.0.2
PROMETHEUS_TAG=v2.4.3
while getopts ":hy-:b:r:k:u:R:l:p:D:o:m:H:S:s:w:t:" o; do
case "${o}" in
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment