import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import kafka.consumer.Consumer;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.ConsumerTimeoutException;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
import kafka.message.MessageAndMetadata;
import com.newegg.mq.conf.Config;
import com.newegg.mq.custom.MessageDecoder;
import com.newegg.mq.custom.MessageMetaData;
import com.newegg.mq.custom.bean.MessageData;
/**
* message receiver
* one should override method {@code handleMessage() }
* @author bw67
*
*/
public class MessageAsynReceiver {
private Properties props;
private ConsumerConnector connector;
private ConsumerIterator<String,MessageData> stream;
public MessageAsynReceiver(){
setProperties(new Properties());
addProperty("zookeeper.connect", Config.getValue("message.consumer.connect"));
addProperty("group.id", Config.getValue("message.consumer.group"));
}
public MessageAsynReceiver(Properties props){
this.props = props;
}
/**
* add a consumer property
* @param key
* @param val
*/
public void addProperty(String key, String val){
props.setProperty(key, val);
}
public void setProperties(Properties props){
this.props = props;
}
public Properties getProperties(){
return props;
}
public List<String> getDefaultTopicList(){
return Config.getValueForList("default_topic_list");
}
/**
* connect to kafka broker
*/
public void connect(){
ConsumerConfig config = new ConsumerConfig(props);
connector = Consumer.createJavaConsumerConnector(config);
System.out.println("Consumer is running...");
}
private Map<String,Integer> getTopicCountMap(String...topic){
Map<String,Integer> topicCountMap = new HashMap<String, Integer>();
if(topic == null){
topic = getDefaultTopicList().toArray(new String[]{});
}
List<String> topicNameList = new ArrayList<String>();
for(String tp : topic){
if(tp.indexOf(":") != -1){
String[] _tp = tp.split(":");
topicCountMap.put(_tp[0],Integer.parseInt(_tp[1]));
topicNameList.add(_tp[0]);
}else{
topicCountMap.put(tp, 1);
topicNameList.add(tp);
}
}
return topicCountMap;
}
/**
* consume one message
* @param topic
*/
private boolean consumeOne(String topic, ConsumerIterator<String,MessageData> streamIterator){
boolean ack = false;
try{
if(streamIterator.hasNext()){
MessageAndMetadata<String,MessageData> item = streamIterator.next();
MessageMetaData mmd = new MessageMetaData();
mmd.setKey(item.key());
mmd.setOffset(item.offset());
mmd.setPartition(item.partition());
mmd.setTopic(item.topic());
handleMessage(mmd,item.message());
ack = true;
}
}catch(ConsumerTimeoutException e){
//in case "consumer.timeout.ms" being set
}
return ack;
}
/**
* single-thread consume the next message with one topic
* @param topic
* @return
*/
public boolean consumeNextMessageFrom(String topic){
if(stream == null){
Map<String,Integer> topicCountMap = getTopicCountMap(topic);
Map<String, List<KafkaStream<String,MessageData>>> streams = connector.createMessageStreams(
topicCountMap,new MessageDecoder<String>(),new MessageDecoder<MessageData>());
stream = streams.get(topic).get(0).iterator();
}
return consumeOne(topic,stream);
}
/**
* fetch message from kafka broker
* @param topics
*/
public void receive(String...topics){
//topic-count map
Map<String,Integer> topicCountMap = getTopicCountMap(topics);
//<topic,List<stream<key,message>>>
Map<String, List<KafkaStream<String,MessageData>>> streams = connector.createMessageStreams(
topicCountMap,new MessageDecoder<String>(),new MessageDecoder<MessageData>());
//create thread pool
int threadCount = 0;
for(String topic : topicCountMap.keySet()){
threadCount += topicCountMap.get(topic);
}
ExecutorService executor = Executors.newFixedThreadPool(threadCount);
//consume messages
List<KafkaStream<String,MessageData>> messageStreamList;
for(final String topic : topicCountMap.keySet()){//each topic
messageStreamList = streams.get(topic);
for(final KafkaStream<String,MessageData> stream : messageStreamList){//each stream
executor.execute(new Runnable(){
public void run(){
while(consumeOne(topic,stream.iterator())){} //each message
}
});
}
}
// try {
// executor.awaitTermination(1, TimeUnit.HOURS);
// } catch (InterruptedException e) {
// e.printStackTrace();
// }
}
/**
* need override this method
* @param message
*/
public void handleMessage(MessageData message){
System.out.println("date:" + message.getInDate());
}
/**
* the handle method for each message
* should override this method for further processing
* @param stream
*/
public void handleMessage(MessageMetaData mmd, MessageData message) {
/*
System.out.println("partiton:" + mmd.getPartition());
System.out.println("offset:" + mmd.getOffset());
System.out.println("key:" + mmd.getKey());
System.out.println("topic:" + mmd.getTopic());
System.out.println("hashcode:" + message.hashCode());
*/
handleMessage(message);
}
/**
* update offset after consume the message
*/
public void commitOffsets(){
connector.commitOffsets();
}
/**
* close the consumer
*/
public void close(){
if (connector != null) {
try {
connector.shutdown();
}
finally {
connector = null;
}
}
stream = null;
}
public static void main(String[] args){
MessageAsynReceiver mr = new MessageAsynReceiver();
mr.connect();
//mr.receive("storm");
int count = 0;
while(mr.consumeNextMessageFrom("storm")){
count++;
}
System.out.println("count: " + count);
}
}
public class MessageEncoder<T> implements Encoder<T> {
public MessageEncoder(VerifiableProperties props){
}
@Override
public byte[] toBytes(T event) {
try {
//序列化对象
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ObjectOutputStream oos = new ObjectOutputStream(baos);
oos.writeObject(event);
oos.close();
byte[] tmpBytes = baos.toByteArray();
baos.close();
return tmpBytes;
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
}