public ConsumerKafka(){
//绑定配置文件
ResourceBundle rb = ResourceBundle.getBundle("kafka_int", Locale.getDefault());
Properties originalProps = new Properties();
//zookeeper 配置,通过zk 可以负载均衡的获取broker
originalProps.put("zookeeper.connect", rb.getString("consumer.zookeeper.connect").trim());
//group 代表一个消费组
originalProps.put("group.id", rb.getString("consumer.group.id").trim());
//zk连接超时时间
originalProps.put("zookeeper.session.timeout.ms", rb.getString("consumer.zookeeper.session.timeout.ms").trim());
//zk同步时间
originalProps.put("zookeeper.sync.time.ms", rb.getString("consumer.zookeeper.sync.time.ms").trim());
//自动提交间隔时间
originalProps.put("auto.commit.interval.ms", rb.getString("consumer.auto.commit.interval.ms").trim());
//消息日志自动偏移量,防止宕机后数据无法读取
originalProps.put("auto.offset.reset", "smallest");
//序列化类
originalProps.put("serializer.class", rb.getString("consumer.serializer.class").trim());
//指定topic
//TOPIC = rb.getString("topic.alarmTopic").trim();
if (rb.containsKey("topic.alarmTopic")) {
TOPIC = rb.getString("topic.alarmTopic").trim();
} else {
TOPIC = null;
}
//构建consumer connection 对象
consumer = Consumer.createJavaConsumerConnector(new ConsumerConfig(originalProps));
this.webSocketUtil = ApplicationContextProvider.getBean(WebSocketUtil.class);
}
kafka_int.properties配置文件
consumer.zookeeper.connect=127.0.0.1:8099
consumer.group.id=aralm_popup1
consumer.zookeeper.session.timeout.ms=10000
consumer.zookeeper.sync.time.ms=200
consumer.auto.commit.interval.ms=1000
consumer.auto.offset.reset=largest
consumer.serializer.class=kafka.serializer.StringEncoder
topic.alarmTopic=test1