百度kafka 接收消息推送

This commit is contained in:
ping 2025-11-21 15:25:42 +08:00
parent e9b0e01a53
commit fcbbb6056f

View File

@ -6,29 +6,30 @@ async def time_convert(resoucetime=None):
return beijing_time.strftime("%Y-%m-%d %H:%M:%S")
async def baidu_sms_kafka_consumer(ns={}):
import os
consumer = BaiduKafKaConsumer({
# 接入点
'bootstrap.servers': '120.48.10.223:9095,180.76.96.108:9095,180.76.147.36:9095',
# 接入协议
'security.protocol': 'SASL_SSL',
'ssl.endpoint.identification.algorithm': 'none',
# 证书文件路径
'ssl.ca.location': 'baidu_kafka_ca.pem',
# SASL 机制
'sasl.mechanism': 'SCRAM-SHA-512',
# SASL 用户名
'sasl.username': 'kaiyuanyun',
# SASL 用户密码
'sasl.password': 'Kyy250609#',
# 消费组id
'group.id': 'kaiyuanyun_msg_group',
'auto.offset.reset': 'latest',
'fetch.message.max.bytes': '1024*512',
})
# consumer = BaiduKafKaConsumer({
# # 接入点
# 'bootstrap.servers': '120.48.10.223:9095,180.76.96.108:9095,180.76.147.36:9095',
# # 接入协议
# 'security.protocol': 'SASL_SSL',
# 'ssl.endpoint.identification.algorithm': 'none',
# # 证书文件路径
# 'ssl.ca.location': 'baidu_kafka_ca.pem',
# # SASL 机制
# 'sasl.mechanism': 'SCRAM-SHA-512',
# # SASL 用户名
# 'sasl.username': 'kaiyuanyun',
# # SASL 用户密码
# 'sasl.password': 'Kyy250609#',
# # 消费组id
# 'group.id': 'kaiyuanyun_msg_group',
# 'auto.offset.reset': 'latest',
# 'fetch.message.max.bytes': '1024*512',
# })
# 订阅的主题名称
consumer.subscribe(['kaiyuanyun_msg_topic'])
# # 订阅的主题名称
# consumer.subscribe(['kaiyuanyun_msg_topic'])
import os
files = ["baidu_kafka_msg.txt", "baidu_kafka_id.txt", "baidu_kafka_error.txt"]
for filename in files:
@ -38,26 +39,13 @@ async def baidu_sms_kafka_consumer(ns={}):
else:
pass
total_count = 0
for i in range(10):
# if i == 0:
# # 写入文件记录轮询开始时间 时间格式: YYYY-MM-DD HH:MM:SS
# with open('baidu_kafka_msg.txt', 'a', encoding='utf-8') as f:
# f.write(f"轮询开始时间:{datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n")
msg = ns.get('msg')
msg = consumer.poll(0.01) # 单次轮询获取消息
if msg is None:
if i == 10:
with open('baidu_kafka_msg.txt', 'a', encoding='utf-8') as f:
f.write(f"轮询第10次消息为None{datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n")
continue
elif msg.error():
if msg.error():
# 写入日志文件记录错误信息
with open('baidu_kafka_error.txt', 'a', encoding='utf-8') as f:
f.write(f"{datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')} - 消费者错误: {msg.error()}\n")
else:
total_count += 1
try:
# 解析消息内容为字典(避免变量名冲突)
msg_data_ori = json.loads(msg.value().decode('utf-8'))
@ -71,7 +59,7 @@ async def baidu_sms_kafka_consumer(ns={}):
if messageid in content:
print(f"文件中已存在 '{messageid}',跳过写入")
continue
return
else:
# 追加写入目标内容
with open('baidu_kafka_id.txt', 'a', encoding='utf-8') as f:
@ -87,9 +75,7 @@ async def baidu_sms_kafka_consumer(ns={}):
exist_msg = await sor.R('baidu_kafka_msg', {'messageid': messageid, 'taskid': taskid})
if exist_msg:
print(f"消息id {messageid} 已存在,跳过处理")
continue
# consumer.close()
# return
return
# 2. 构建小写key的ns字典完整映射所有字段
ns_msg = {
@ -166,15 +152,16 @@ async def baidu_sms_kafka_consumer(ns={}):
}
except Exception as e:
print(f"处理异常: {str(e)}")
import traceback
with open('baidu_kafka_error.txt', 'w') as f:
f.write(str(e)+ traceback.format_exc())
traceback.print_exc()
return {
'status': False,
'msg': f"处理异常: {str(e)}"
}
# 记录total_count
# with open('baidu_kafka_msg.txt', 'a', encoding='utf-8') as f:
# f.write(f"本次轮询共处理消息数:{total_count}\n")
consumer.close() # 确保消费者关闭
# consumer.close() # 确保消费者关闭
return {
'status': True,
'msg': '获取信息执行结束'