# See the License for the specific language governing permissions and
# limitations under the License.
-import logging
import asyncio
-import yaml
+import logging
+
from aiokafka import AIOKafkaConsumer
from aiokafka import AIOKafkaProducer
from aiokafka.errors import KafkaError
from osm_common.msgbase import MsgBase, MsgException
-# import json
+import yaml
-__author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>, " \
- "Guillermo Calvino <guillermo.calvinosanchez@altran.com>"
+__author__ = (
+ "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>, "
+ "Guillermo Calvino <guillermo.calvinosanchez@altran.com>"
+)
class MsgKafka(MsgBase):
- def __init__(self, logger_name='msg'):
- self.logger = logging.getLogger(logger_name)
+ def __init__(self, logger_name="msg", lock=False):
+ super().__init__(logger_name, lock)
self.host = None
self.port = None
self.consumer = None
self.producer = None
- self.loop = None
self.broker = None
self.group_id = None
self.logger = logging.getLogger(config["logger_name"])
self.host = config["host"]
self.port = config["port"]
- self.loop = asyncio.get_event_loop()
self.broker = str(self.host) + ":" + str(self.port)
self.group_id = config.get("group_id")
def disconnect(self):
try:
pass
- # self.loop.close()
except Exception as e: # TODO refine
raise MsgException(str(e))
:param msg: message content, can be string or dictionary
:return: None or raises MsgException on failing
"""
- try:
- self.loop.run_until_complete(self.aiowrite(topic=topic, key=key, msg=msg))
-
- except Exception as e:
- raise MsgException("Error writing {} topic: {}".format(topic, str(e)))
+ retry = 2 # Try two times
+ while retry:
+ try:
+ asyncio.run(self.aiowrite(topic=topic, key=key, msg=msg))
+ break
+ except Exception as e:
+ retry -= 1
+ if retry == 0:
+ raise MsgException(
+ "Error writing {} topic: {}".format(topic, str(e))
+ )
def read(self, topic):
"""
:return: topic, key, message; or None
"""
try:
- return self.loop.run_until_complete(self.aioread(topic, self.loop))
+ return asyncio.run(self.aioread(topic))
except MsgException:
raise
except Exception as e:
raise MsgException("Error reading {} topic: {}".format(topic, str(e)))
- async def aiowrite(self, topic, key, msg, loop=None):
-
- if not loop:
- loop = self.loop
+ async def aiowrite(self, topic, key, msg):
+ """
+ Asyncio write
+ :param topic: str kafka topic
+ :param key: str kafka key
+ :param msg: str or dictionary kafka message
+ :return: None
+ """
try:
- self.producer = AIOKafkaProducer(loop=loop, key_serializer=str.encode, value_serializer=str.encode,
- bootstrap_servers=self.broker)
+ self.producer = AIOKafkaProducer(
+ key_serializer=str.encode,
+ value_serializer=str.encode,
+ bootstrap_servers=self.broker,
+ )
await self.producer.start()
- await self.producer.send(topic=topic, key=key, value=yaml.safe_dump(msg, default_flow_style=True))
+ await self.producer.send(
+ topic=topic, key=key, value=yaml.safe_dump(msg, default_flow_style=True)
+ )
except Exception as e:
- raise MsgException("Error publishing topic '{}', key '{}': {}".format(topic, key, e))
+ raise MsgException(
+ "Error publishing topic '{}', key '{}': {}".format(topic, key, e)
+ )
finally:
await self.producer.stop()
- async def aioread(self, topic, loop=None, callback=None, *args):
+ async def aioread(
+ self,
+ topic,
+ callback=None,
+ aiocallback=None,
+ group_id=None,
+ from_beginning=None,
+ **kwargs
+ ):
"""
- Asyncio read from one or several topics. It blocks
+ Asyncio read from one or several topics.
:param topic: can be str: single topic; or str list: several topics
- :param loop: asyncio loop
- :callback: callback function that will handle the message in kafka bus
- :*args: optional arguments for callback function
- :return: topic, key, message
+ :param callback: synchronous callback function that will handle the message in kafka bus
+ :param aiocallback: async callback function that will handle the message in kafka bus
+ :param group_id: kafka group_id to use. Can be False (set group_id to None), None (use general group_id provided
+ at connect inside config), or a group_id string
+ :param from_beginning: if True, messages will be obtained from beginning instead of only new ones.
+ If group_id is supplied, only the not processed messages by other worker are obtained.
+ If group_id is None, all messages stored at kafka are obtained.
+ :param kwargs: optional keyword arguments for callback function
+ :return: If no callback defined, it returns (topic, key, message)
"""
-
- if not loop:
- loop = self.loop
+ if group_id is False:
+ group_id = None
+ elif group_id is None:
+ group_id = self.group_id
try:
if isinstance(topic, (list, tuple)):
topic_list = topic
else:
topic_list = (topic,)
-
- self.consumer = AIOKafkaConsumer(loop=loop, bootstrap_servers=self.broker, group_id=self.group_id)
+ self.consumer = AIOKafkaConsumer(
+ bootstrap_servers=self.broker,
+ group_id=group_id,
+ auto_offset_reset="earliest" if from_beginning else "latest",
+ )
await self.consumer.start()
self.consumer.subscribe(topic_list)
async for message in self.consumer:
if callback:
- callback(message.topic, yaml.load(message.key), yaml.load(message.value), *args)
+ callback(
+ message.topic,
+ yaml.safe_load(message.key),
+ yaml.safe_load(message.value),
+ **kwargs
+ )
+ elif aiocallback:
+ await aiocallback(
+ message.topic,
+ yaml.safe_load(message.key),
+ yaml.safe_load(message.value),
+ **kwargs
+ )
else:
- return message.topic, yaml.load(message.key), yaml.load(message.value)
+ return (
+ message.topic,
+ yaml.safe_load(message.key),
+ yaml.safe_load(message.value),
+ )
except KafkaError as e:
raise MsgException(str(e))
finally: