Ubuntu 22.04 and Python 3.10 preparation
[osm/common.git] / osm_common / msgkafka.py
1 # -*- coding: utf-8 -*-
2
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
12 # implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
15
16 import asyncio
17 import logging
18
19 from aiokafka import AIOKafkaConsumer
20 from aiokafka import AIOKafkaProducer
21 from aiokafka.errors import KafkaError
22 from osm_common.msgbase import MsgBase, MsgException
23 import yaml
24
25 __author__ = (
26 "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>, "
27 "Guillermo Calvino <guillermo.calvinosanchez@altran.com>"
28 )
29
30
31 class MsgKafka(MsgBase):
32 def __init__(self, logger_name="msg", lock=False):
33 super().__init__(logger_name, lock)
34 self.host = None
35 self.port = None
36 self.consumer = None
37 self.producer = None
38 self.broker = None
39 self.group_id = None
40
41 def connect(self, config):
42 try:
43 if "logger_name" in config:
44 self.logger = logging.getLogger(config["logger_name"])
45 self.host = config["host"]
46 self.port = config["port"]
47 self.broker = str(self.host) + ":" + str(self.port)
48 self.group_id = config.get("group_id")
49
50 except Exception as e: # TODO refine
51 raise MsgException(str(e))
52
53 def disconnect(self):
54 try:
55 pass
56 except Exception as e: # TODO refine
57 raise MsgException(str(e))
58
59 def write(self, topic, key, msg):
60 """
61 Write a message at kafka bus
62 :param topic: message topic, must be string
63 :param key: message key, must be string
64 :param msg: message content, can be string or dictionary
65 :return: None or raises MsgException on failing
66 """
67 retry = 2 # Try two times
68 while retry:
69 try:
70 asyncio.run(self.aiowrite(topic=topic, key=key, msg=msg))
71 break
72 except Exception as e:
73 retry -= 1
74 if retry == 0:
75 raise MsgException(
76 "Error writing {} topic: {}".format(topic, str(e))
77 )
78
79 def read(self, topic):
80 """
81 Read from one or several topics.
82 :param topic: can be str: single topic; or str list: several topics
83 :return: topic, key, message; or None
84 """
85 try:
86 return asyncio.run(self.aioread(topic))
87 except MsgException:
88 raise
89 except Exception as e:
90 raise MsgException("Error reading {} topic: {}".format(topic, str(e)))
91
92 async def aiowrite(self, topic, key, msg):
93 """
94 Asyncio write
95 :param topic: str kafka topic
96 :param key: str kafka key
97 :param msg: str or dictionary kafka message
98 :return: None
99 """
100 try:
101 self.producer = AIOKafkaProducer(
102 key_serializer=str.encode,
103 value_serializer=str.encode,
104 bootstrap_servers=self.broker,
105 )
106 await self.producer.start()
107 await self.producer.send(
108 topic=topic, key=key, value=yaml.safe_dump(msg, default_flow_style=True)
109 )
110 except Exception as e:
111 raise MsgException(
112 "Error publishing topic '{}', key '{}': {}".format(topic, key, e)
113 )
114 finally:
115 await self.producer.stop()
116
117 async def aioread(
118 self,
119 topic,
120 callback=None,
121 aiocallback=None,
122 group_id=None,
123 from_beginning=None,
124 **kwargs
125 ):
126 """
127 Asyncio read from one or several topics.
128 :param topic: can be str: single topic; or str list: several topics
129 :param callback: synchronous callback function that will handle the message in kafka bus
130 :param aiocallback: async callback function that will handle the message in kafka bus
131 :param group_id: kafka group_id to use. Can be False (set group_id to None), None (use general group_id provided
132 at connect inside config), or a group_id string
133 :param from_beginning: if True, messages will be obtained from beginning instead of only new ones.
134 If group_id is supplied, only the not processed messages by other worker are obtained.
135 If group_id is None, all messages stored at kafka are obtained.
136 :param kwargs: optional keyword arguments for callback function
137 :return: If no callback defined, it returns (topic, key, message)
138 """
139 if group_id is False:
140 group_id = None
141 elif group_id is None:
142 group_id = self.group_id
143 try:
144 if isinstance(topic, (list, tuple)):
145 topic_list = topic
146 else:
147 topic_list = (topic,)
148 self.consumer = AIOKafkaConsumer(
149 bootstrap_servers=self.broker,
150 group_id=group_id,
151 auto_offset_reset="earliest" if from_beginning else "latest",
152 )
153 await self.consumer.start()
154 self.consumer.subscribe(topic_list)
155
156 async for message in self.consumer:
157 if callback:
158 callback(
159 message.topic,
160 yaml.safe_load(message.key),
161 yaml.safe_load(message.value),
162 **kwargs
163 )
164 elif aiocallback:
165 await aiocallback(
166 message.topic,
167 yaml.safe_load(message.key),
168 yaml.safe_load(message.value),
169 **kwargs
170 )
171 else:
172 return (
173 message.topic,
174 yaml.safe_load(message.key),
175 yaml.safe_load(message.value),
176 )
177 except KafkaError as e:
178 raise MsgException(str(e))
179 finally:
180 await self.consumer.stop()