kafka监控获取指定topic的消息总量示例


Posted in Python onDecember 23, 2019

我就废话不多说了,直接 上代码吧!

import kafka.api.PartitionOffsetRequestInfo;
import kafka.common.TopicAndPartition;
import kafka.javaapi.OffsetResponse;
import kafka.javaapi.PartitionMetadata;
import kafka.javaapi.TopicMetadata;
import kafka.javaapi.TopicMetadataRequest;
import kafka.javaapi.consumer.SimpleConsumer;
 
import java.util.*;
import java.util.Map.Entry;
 
public class KafkaOffsetTools {
public final static String KAFKA_TOPIC_NAME_ADAPTER = "sample";
public final static String KAFKA_TOPIC_NAME_EXCEPTION = "exception";
public final static String KAFKA_TOPIC_NAME_AUDIT = "audit";
private static final String rawTopicTotal = "rawTopicTotalRecordCounter";
private static final String avroTopicTotal = "avroTopicTotalRecordCounter";
private static final String exceptionTopicTotal = "exceptionTopicTotalRecordCounter";
 
public KafkaOffsetTools() {
}
 
public static long getLastOffset(SimpleConsumer consumer, String topic,
int partition, long whichTime, String clientName) {
TopicAndPartition topicAndPartition = new TopicAndPartition(topic,
partition);
Map, PartitionOffsetRequestInfo> requestInfo = new HashMap, PartitionOffsetRequestInfo>();
requestInfo.put(topicAndPartition, new PartitionOffsetRequestInfo(
whichTime, 1));
kafka.javaapi.OffsetRequest request = new kafka.javaapi.OffsetRequest(
requestInfo, kafka.api.OffsetRequest.CurrentVersion(),
clientName);
OffsetResponse response = consumer.getOffsetsBefore(request);
 
if (response.hasError()) {
System.err.println("Error fetching data Offset Data the Broker. Reason: " + response.errorCode(topic, partition));
return 0;
}
long[] offsets = response.offsets(topic, partition);
return offsets[0];
}
 
private TreeMap, PartitionMetadata> findLeader(List a_seedBrokers, String a_topic) {
TreeMap, PartitionMetadata> map = new TreeMap, PartitionMetadata>();
loop:
for (String seed : a_seedBrokers) {
SimpleConsumer consumer = null;
try {
String[] hostAndPort;
hostAndPort = seed.split(":");
consumer = new SimpleConsumer(hostAndPort[0], Integer.valueOf(hostAndPort[1]), 100000, 64 * 1024,
"leaderLookup" + new Date().getTime());
List topics = Collections.singletonList(a_topic);
TopicMetadataRequest req = new TopicMetadataRequest(topics);
kafka.javaapi.TopicMetadataResponse resp = consumer.send(req);
 
List metaData = resp.topicsMetadata();
for (TopicMetadata item : metaData) {
for (PartitionMetadata part : item.partitionsMetadata()) {
map.put(part.partitionId(), part);
}
}
} catch (Exception e) {
System.out.println("Error communicating with Broker [" + seed
+ "] to find Leader for [" + a_topic + ", ] Reason: " + e);
} finally {
if (consumer != null)
consumer.close();
}
}
return map;
}
 
public static void main(String[] args) {
String kafkaBrokerList = System.getenv("metadata.broker.list");
if(kafkaBrokerList == null || kafkaBrokerList.length() == 0){
System.err.println("No config kafka metadata.broker.list,it is null .");
//for test
kafkaBrokerList = "localhost:9092,localhost:9093";
System.err.println("Use this broker list for test,metadata.broker.list="+kafkaBrokerList);
}
//init topic,logSize = 0
Map,Integer> topics = new HashMap,Integer>();
topics.put(KAFKA_TOPIC_NAME_ADAPTER,0);
topics.put(KAFKA_TOPIC_NAME_EXCEPTION,0);
topics.put(KAFKA_TOPIC_NAME_AUDIT,0);
//init kafka broker list
String[] kafkaHosts;
kafkaHosts = kafkaBrokerList.split(",");
if (kafkaHosts == null || kafkaHosts.length == 0) {
System.err.println("No config kafka metadata.broker.list,it is null .");
System.exit(1);
}
List seeds = new ArrayList();
for (int i = 0; i < kafkaHosts.length; i++) {
seeds.add(kafkaHosts[i]);
}
 
KafkaOffsetTools kot = new KafkaOffsetTools();
 
for(String topicName : topics.keySet()){
TreeMap, PartitionMetadata> metadatas = kot.findLeader(seeds, topicName);
int logSize = 0;
for (Entry, PartitionMetadata> entry : metadatas.entrySet()) {
int partition = entry.getKey();
String leadBroker = entry.getValue().leader().host();
String clientName = "Client_" + topicName + "_" + partition;
SimpleConsumer consumer = new SimpleConsumer(leadBroker, entry.getValue().leader().port(), 100000,
64 * 1024, clientName);
long readOffset = getLastOffset(consumer, topicName, partition,
kafka.api.OffsetRequest.LatestTime(), clientName);
logSize += readOffset;
if (consumer != null) consumer.close();
}
topics.put(topicName,logSize);
}
System.out.println(topics.toString());
System.out.println(rawTopicTotal+"="+topics.get(KAFKA_TOPIC_NAME_ADAPTER)+" "+System.currentTimeMillis());
System.out.println(avroTopicTotal+"="+topics.get(KAFKA_TOPIC_NAME_AUDIT)+" "+System.currentTimeMillis());
System.out.println(exceptionTopicTotal+"="+topics.get(KAFKA_TOPIC_NAME_EXCEPTION)+" "+System.currentTimeMillis());
}
}

以上这篇kafka监控获取指定topic的消息总量示例就是小编分享给大家的全部内容了,希望能给大家一个参考,也希望大家多多支持三水点靠木。

Python 相关文章推荐
python多线程编程中的join函数使用心得
Sep 02 Python
python使用cPickle模块序列化实例
Sep 25 Python
在Python中使用成员运算符的示例
May 13 Python
浅谈pyhton学习中出现的各种问题(新手必看)
May 17 Python
Python中关于Sequence切片的下标问题详解
Jun 15 Python
机器学习经典算法-logistic回归代码详解
Dec 22 Python
运用TensorFlow进行简单实现线性回归、梯度下降示例
Mar 05 Python
tensorflow如何继续训练之前保存的模型实例
Jan 21 Python
python+adb+monkey实现Rom稳定性测试详解
Apr 23 Python
Python 实现二叉查找树的示例代码
Dec 21 Python
pandas针对excel处理的实现
Jan 15 Python
Python Socket编程详解
Apr 25 Python
kafka-python 获取topic lag值方式
Dec 23 #Python
python3连接kafka模块pykafka生产者简单封装代码
Dec 23 #Python
python pyenv多版本管理工具的使用
Dec 23 #Python
Python测试Kafka集群(pykafka)实例
Dec 23 #Python
Python with语句和过程抽取思想
Dec 23 #Python
python每5分钟从kafka中提取数据的例子
Dec 23 #Python
Python彻底删除文件夹及其子文件方式
Dec 23 #Python
You might like
PHP图片上传类带图片显示
2006/11/25 PHP
php下批量挂马和批量清马代码
2011/02/27 PHP
PHP防止注入攻击实例分析
2014/11/03 PHP
PHP实现递归目录的5种方法
2016/10/27 PHP
WHOOPS PHP调试库的使用
2017/09/29 PHP
深入理解JavaScript系列(6) 强大的原型和原型链
2012/01/15 Javascript
jquery动态分页效果堪比时光网
2014/09/25 Javascript
jQuery对指定元素中指定字符串进行替换的方法
2015/03/17 Javascript
12个超实用的JQuery代码片段
2015/11/02 Javascript
详解AngularJS控制器的使用
2016/03/09 Javascript
jquery遍历json对象集合详解
2016/05/18 Javascript
Javascript json object 与string 相互转换的简单实现
2016/09/27 Javascript
JS实现图片垂直居中显示小结
2016/12/13 Javascript
利用原生js实现html5小游戏之打砖块(附源码)
2018/01/03 Javascript
如何制作一个Node命令行图像识别工具
2018/12/12 Javascript
vue项目打包之后背景样式丢失的解决方案
2019/01/17 Javascript
JS多个表单数据提交下的serialize()应用实例分析
2019/08/27 Javascript
vue实现百度语音合成的实例讲解
2019/10/14 Javascript
Vue程序化的事件监听器(实例方案详解)
2020/01/07 Javascript
python连接sql server乱码的解决方法
2013/01/28 Python
Python实现保证只能运行一个脚本实例
2015/06/24 Python
python使用json序列化datetime类型实例解析
2018/02/11 Python
对于Python深浅拷贝的理解
2019/07/29 Python
解决pandas展示数据输出时列名不能对齐的问题
2019/11/18 Python
Django中密码的加密、验密、解密操作
2019/12/19 Python
Jar包的作用是什么
2014/03/30 面试题
电大会计学自我鉴定
2014/02/06 职场文书
违纪检讨书2000字
2014/02/08 职场文书
国旗下演讲稿
2014/05/08 职场文书
经贸日语专业自荐信
2014/09/02 职场文书
弄虚作假心得体会
2014/09/10 职场文书
2014年城管个人工作总结
2014/12/08 职场文书
小学教师见习总结
2015/06/23 职场文书
幼儿园心得体会范文
2016/01/21 职场文书
Java并发编程之详解CyclicBarrier线程同步
2021/06/23 Java/Android
Nginx 配置 HTTPS的详细过程
2022/05/30 Servers