maven
org.apache.kafka
kafka-clients
0.9.0.1
org.slf4j
slf4j-log4j12
log4j
log4j
org.scala-lang
scala-library
2.10.4
com.lmax
disruptor
3.3.0
log4j2.xml
localhost:9092
10000
test
@Test
public void consumeErrorLog(){
Properties props = new Properties();
props.put("zookeeper.connect", "localhost:2181");
props.put("auto.offset.reset","smallest");
props.put("group.id", "testgroup6");
props.put("enable.auto.commit", "true");
props.put("zookeeper.session.timeout.ms", "400");
props.put("zookeeper.sync.time.ms", "2000"); // 从200修改成2000 太短有rebalance错误
props.put("auto.commit.interval.ms", "1000");
ConsumerConfig consumerConfig = new kafka.consumer.ConsumerConfig(props);
ConsumerConnector consumerConnector = kafka.consumer.Consumer.createJavaConsumerConnector(consumerConfig);
Map topicCountMap = new HashMap();
int localConsumerCount = 1;
topicCountMap.put(TOPIC, localConsumerCount);
Map>> consumerMap = consumerConnector
.createMessageStreams(topicCountMap);
List> streams = consumerMap.get(TOPIC);
streams.stream().forEach(stream -> {
ConsumerIterator it = stream.iterator();
while (it.hasNext()) {
System.out.println(new String(it.next().message()));
}
});
}
docs
如何使用Kafka实时的收集与存储这些Log4j产生的日志呢?一种方案是使用其他组件(比如Flume,或者自己开发程序)实时监控这些日志文件,然后发送至Kafka。而另外一种比较便捷的方案是使用Kafka自带的Log4jAppender,在Log4j配置文件中进行相应的配置,即可完成将Log4j产生的日志实时发送至Kafka中。