kafka监控获取指定topic的消息总量示例


Posted in Python onDecember 23, 2019

我就废话不多说了,直接 上代码吧!

import kafka.api.PartitionOffsetRequestInfo;
import kafka.common.TopicAndPartition;
import kafka.javaapi.OffsetResponse;
import kafka.javaapi.PartitionMetadata;
import kafka.javaapi.TopicMetadata;
import kafka.javaapi.TopicMetadataRequest;
import kafka.javaapi.consumer.SimpleConsumer;
 
import java.util.*;
import java.util.Map.Entry;
 
public class KafkaOffsetTools {
public final static String KAFKA_TOPIC_NAME_ADAPTER = "sample";
public final static String KAFKA_TOPIC_NAME_EXCEPTION = "exception";
public final static String KAFKA_TOPIC_NAME_AUDIT = "audit";
private static final String rawTopicTotal = "rawTopicTotalRecordCounter";
private static final String avroTopicTotal = "avroTopicTotalRecordCounter";
private static final String exceptionTopicTotal = "exceptionTopicTotalRecordCounter";
 
public KafkaOffsetTools() {
}
 
public static long getLastOffset(SimpleConsumer consumer, String topic,
int partition, long whichTime, String clientName) {
TopicAndPartition topicAndPartition = new TopicAndPartition(topic,
partition);
Map, PartitionOffsetRequestInfo> requestInfo = new HashMap, PartitionOffsetRequestInfo>();
requestInfo.put(topicAndPartition, new PartitionOffsetRequestInfo(
whichTime, 1));
kafka.javaapi.OffsetRequest request = new kafka.javaapi.OffsetRequest(
requestInfo, kafka.api.OffsetRequest.CurrentVersion(),
clientName);
OffsetResponse response = consumer.getOffsetsBefore(request);
 
if (response.hasError()) {
System.err.println("Error fetching data Offset Data the Broker. Reason: " + response.errorCode(topic, partition));
return 0;
}
long[] offsets = response.offsets(topic, partition);
return offsets[0];
}
 
private TreeMap, PartitionMetadata> findLeader(List a_seedBrokers, String a_topic) {
TreeMap, PartitionMetadata> map = new TreeMap, PartitionMetadata>();
loop:
for (String seed : a_seedBrokers) {
SimpleConsumer consumer = null;
try {
String[] hostAndPort;
hostAndPort = seed.split(":");
consumer = new SimpleConsumer(hostAndPort[0], Integer.valueOf(hostAndPort[1]), 100000, 64 * 1024,
"leaderLookup" + new Date().getTime());
List topics = Collections.singletonList(a_topic);
TopicMetadataRequest req = new TopicMetadataRequest(topics);
kafka.javaapi.TopicMetadataResponse resp = consumer.send(req);
 
List metaData = resp.topicsMetadata();
for (TopicMetadata item : metaData) {
for (PartitionMetadata part : item.partitionsMetadata()) {
map.put(part.partitionId(), part);
}
}
} catch (Exception e) {
System.out.println("Error communicating with Broker [" + seed
+ "] to find Leader for [" + a_topic + ", ] Reason: " + e);
} finally {
if (consumer != null)
consumer.close();
}
}
return map;
}
 
public static void main(String[] args) {
String kafkaBrokerList = System.getenv("metadata.broker.list");
if(kafkaBrokerList == null || kafkaBrokerList.length() == 0){
System.err.println("No config kafka metadata.broker.list,it is null .");
//for test
kafkaBrokerList = "localhost:9092,localhost:9093";
System.err.println("Use this broker list for test,metadata.broker.list="+kafkaBrokerList);
}
//init topic,logSize = 0
Map,Integer> topics = new HashMap,Integer>();
topics.put(KAFKA_TOPIC_NAME_ADAPTER,0);
topics.put(KAFKA_TOPIC_NAME_EXCEPTION,0);
topics.put(KAFKA_TOPIC_NAME_AUDIT,0);
//init kafka broker list
String[] kafkaHosts;
kafkaHosts = kafkaBrokerList.split(",");
if (kafkaHosts == null || kafkaHosts.length == 0) {
System.err.println("No config kafka metadata.broker.list,it is null .");
System.exit(1);
}
List seeds = new ArrayList();
for (int i = 0; i < kafkaHosts.length; i++) {
seeds.add(kafkaHosts[i]);
}
 
KafkaOffsetTools kot = new KafkaOffsetTools();
 
for(String topicName : topics.keySet()){
TreeMap, PartitionMetadata> metadatas = kot.findLeader(seeds, topicName);
int logSize = 0;
for (Entry, PartitionMetadata> entry : metadatas.entrySet()) {
int partition = entry.getKey();
String leadBroker = entry.getValue().leader().host();
String clientName = "Client_" + topicName + "_" + partition;
SimpleConsumer consumer = new SimpleConsumer(leadBroker, entry.getValue().leader().port(), 100000,
64 * 1024, clientName);
long readOffset = getLastOffset(consumer, topicName, partition,
kafka.api.OffsetRequest.LatestTime(), clientName);
logSize += readOffset;
if (consumer != null) consumer.close();
}
topics.put(topicName,logSize);
}
System.out.println(topics.toString());
System.out.println(rawTopicTotal+"="+topics.get(KAFKA_TOPIC_NAME_ADAPTER)+" "+System.currentTimeMillis());
System.out.println(avroTopicTotal+"="+topics.get(KAFKA_TOPIC_NAME_AUDIT)+" "+System.currentTimeMillis());
System.out.println(exceptionTopicTotal+"="+topics.get(KAFKA_TOPIC_NAME_EXCEPTION)+" "+System.currentTimeMillis());
}
}

以上这篇kafka监控获取指定topic的消息总量示例就是小编分享给大家的全部内容了,希望能给大家一个参考,也希望大家多多支持三水点靠木。

Python 相关文章推荐
Python continue语句用法实例
Mar 11 Python
python解析中国天气网的天气数据
Mar 21 Python
python中Genarator函数用法分析
Apr 08 Python
Python基于matplotlib实现绘制三维图形功能示例
Jan 18 Python
在Django中URL正则表达式匹配的方法
Dec 20 Python
对python实现合并两个排序链表的方法详解
Jan 23 Python
Python3.5 Pandas模块缺失值处理和层次索引实例详解
Apr 23 Python
Python编译为二进制so可执行文件实例
Dec 23 Python
python3 实现口罩抽签的功能
Mar 11 Python
使用Python内置模块与函数进行不同进制的数的转换
Apr 26 Python
解决启动django,浏览器显示“服务器拒绝访问”的问题
May 13 Python
pytorch 使用半精度模型部署的操作
May 24 Python
kafka-python 获取topic lag值方式
Dec 23 #Python
python3连接kafka模块pykafka生产者简单封装代码
Dec 23 #Python
python pyenv多版本管理工具的使用
Dec 23 #Python
Python测试Kafka集群(pykafka)实例
Dec 23 #Python
Python with语句和过程抽取思想
Dec 23 #Python
python每5分钟从kafka中提取数据的例子
Dec 23 #Python
Python彻底删除文件夹及其子文件方式
Dec 23 #Python
You might like
使用adodb lite解决问题
2006/12/31 PHP
PHP Opcache安装和配置方法介绍
2015/05/28 PHP
php bootstrap实现简单登录
2016/03/08 PHP
php+mysql开发的最简单在线题库(在线做题系统)完整案例
2019/03/30 PHP
Javascript匿名函数的一种应用 代码封装
2010/06/27 Javascript
js实现的复制兼容chrome和IE
2014/04/03 Javascript
JavaScript函数详解
2015/02/27 Javascript
Vue实现双向数据绑定
2017/05/03 Javascript
iscroll-probe实现下拉刷新和下拉加载效果
2017/06/28 Javascript
Angularjs按需查询实例代码
2017/10/30 Javascript
浅谈手写node可读流之流动模式
2018/06/01 Javascript
Vue 递归多级菜单的实例代码
2019/05/05 Javascript
vue登录页面cookie的使用及页面跳转代码
2019/07/10 Javascript
如何基于JS截获动态代码
2019/12/25 Javascript
nuxt 自定义 auth 中间件实现令牌的持久化操作
2020/11/05 Javascript
夯基础之手撕javascript继承详解
2020/11/09 Javascript
python网络编程学习笔记(八):XML生成与解析(DOM、ElementTree)
2014/06/09 Python
浅析Python的web.py框架中url的设定方法
2016/07/11 Python
Python自动生成代码 使用tkinter图形化操作并生成代码框架
2019/09/18 Python
django实现用户注册实例讲解
2019/10/30 Python
Python实现转换图片背景颜色代码
2020/04/30 Python
HTML5视频支持检测(检查浏览器是否支持视频播放)
2013/06/08 HTML / CSS
法律专业自我鉴定
2013/10/03 职场文书
专科毕业生求职简历的自我评价
2013/10/12 职场文书
区三好学生主要事迹
2014/01/30 职场文书
《记承天寺夜游》教学反思
2014/02/16 职场文书
对标管理实施方案
2014/03/12 职场文书
现金出纳岗位职责
2014/03/15 职场文书
环保项目建议书
2014/08/26 职场文书
我为党旗添光彩演讲稿
2014/09/10 职场文书
公司委托书格式范文
2014/10/09 职场文书
放射科岗位职责
2015/02/14 职场文书
学校团代会开幕词
2016/03/04 职场文书
详解Python魔法方法之描述符类
2021/05/26 Python
redis使用不当导致应用卡死bug的过程解析
2021/07/01 Redis
Python使用DFA算法过滤内容敏感词
2022/04/22 Python