package com.ctcc.kafka.examples;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.ctcc.utils.ConfigUtils;
import kafka.consumer.Consumer;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
/**
* 消费者演示类
* @author heaven
*
*/
public class ConsumerDemo {
private static final Logger logger = LoggerFactory.getLogger(ConsumerDemo.class);
/**
* 定义consumer
*/
private final ConsumerConnector consumer;
/**
* 定义topic
*/
private final String topic;
/**
* 构造方法
* @param zookeeper
* @param groupId
* @param topic
*/
public ConsumerDemo(String topic) {
Properties props = new Properties();
//配置zookeeper连接
props.put("zookeeper.connect", ConfigUtils.getType("zookeeper.connect"));
//配置分组
props.put("group.id", ConfigUtils.getType("group.id"));
//连接zookeeper的超时时间
props.put("zookeeper.session.timeout.ms", ConfigUtils.getType("zookeeper.session.timeout.ms"));
// zkclient与zookeeper建立连接的最长等待时间
props.put("zookeeper.connection.timeout.ms", ConfigUtils.getType("zookeeper.connection.timeout.ms"));
// zookeeper和zookeeper leader同步时间
props.put("zookeeper.sync.time.ms", ConfigUtils.getType("zookeeper.sync.time.ms"));
// consumer向zookeeper提交offset值的时间
props.put("auto.commit.interval.ms", ConfigUtils.getType("auto.commit.interval.ms"));
consumer = Consumer.createJavaConsumerConnector(new ConsumerConfig(props));
this.topic = topic;
}
/**
* 消费消息的方法
*/
public void consumeMessage() {
Map<String, Integer> topicCount = new HashMap<String, Integer>();
// Define single thread for topic
topicCount.put(topic, new Integer(1));
Map<String, List<KafkaStream<byte[], byte[]>>> consumerStreams = consumer.createMessageStreams(topicCount);
List<KafkaStream<byte[], byte[]>> streams = consumerStreams.get(topic);
for (final KafkaStream stream : streams) {
ConsumerIterator<byte[], byte[]> consumerIte = stream.iterator();
while (consumerIte.hasNext()) {
System.out.println("begin consume:" + new String(consumerIte.next().message()));
}
}
if (consumer != null) {
consumer.shutdown();
}
}
/**
* 主方法
* @param args
*/
public static void main(String[] args) {
if (args == null || args.length == 0 ) {
logger.error("please input topic name!");
//System.exit(0);
args=new String[]{"wanghouda"};
}
String topic = args[0];
ConsumerDemo simpleHLConsumer = new ConsumerDemo(topic);
simpleHLConsumer.consumeMessage();
}
}
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.ctcc.utils.ConfigUtils;
import kafka.consumer.Consumer;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
/**
* 消费者演示类
* @author heaven
*
*/
public class ConsumerDemo {
private static final Logger logger = LoggerFactory.getLogger(ConsumerDemo.class);
/**
* 定义consumer
*/
private final ConsumerConnector consumer;
/**
* 定义topic
*/
private final String topic;
/**
* 构造方法
* @param zookeeper
* @param groupId
* @param topic
*/
public ConsumerDemo(String topic) {
Properties props = new Properties();
//配置zookeeper连接
props.put("zookeeper.connect", ConfigUtils.getType("zookeeper.connect"));
//配置分组
props.put("group.id", ConfigUtils.getType("group.id"));
//连接zookeeper的超时时间
props.put("zookeeper.session.timeout.ms", ConfigUtils.getType("zookeeper.session.timeout.ms"));
// zkclient与zookeeper建立连接的最长等待时间
props.put("zookeeper.connection.timeout.ms", ConfigUtils.getType("zookeeper.connection.timeout.ms"));
// zookeeper和zookeeper leader同步时间
props.put("zookeeper.sync.time.ms", ConfigUtils.getType("zookeeper.sync.time.ms"));
// consumer向zookeeper提交offset值的时间
props.put("auto.commit.interval.ms", ConfigUtils.getType("auto.commit.interval.ms"));
consumer = Consumer.createJavaConsumerConnector(new ConsumerConfig(props));
this.topic = topic;
}
/**
* 消费消息的方法
*/
public void consumeMessage() {
Map<String, Integer> topicCount = new HashMap<String, Integer>();
// Define single thread for topic
topicCount.put(topic, new Integer(1));
Map<String, List<KafkaStream<byte[], byte[]>>> consumerStreams = consumer.createMessageStreams(topicCount);
List<KafkaStream<byte[], byte[]>> streams = consumerStreams.get(topic);
for (final KafkaStream stream : streams) {
ConsumerIterator<byte[], byte[]> consumerIte = stream.iterator();
while (consumerIte.hasNext()) {
System.out.println("begin consume:" + new String(consumerIte.next().message()));
}
}
if (consumer != null) {
consumer.shutdown();
}
}
/**
* 主方法
* @param args
*/
public static void main(String[] args) {
if (args == null || args.length == 0 ) {
logger.error("please input topic name!");
//System.exit(0);
args=new String[]{"wanghouda"};
}
String topic = args[0];
ConsumerDemo simpleHLConsumer = new ConsumerDemo(topic);
simpleHLConsumer.consumeMessage();
}
}