kafka的安装与启动http://blog.csdn.net/u010081710/article/details/50835039
下面java代码中生产者和消费者栗子,
producer采用池管理,spring容器启动的时候初始化一定数量的生产者,由连接池管理,使用对象的时候borrowObject(),使用完成后returnObject().
producer代码
package com.service;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import kafka.javaapi.producer.Producer;
import kafka.producer.KeyedMessage;
import kafka.producer.ProducerConfig;
import kafka.serializer.StringEncoder;
import org.apache.commons.pool2.impl.GenericObjectPool;
import org.apache.log4j.Logger;
import org.springframework.core.io.support.PropertiesLoaderUtils;
import com.producer.PooledProducer;
import com.producer.ProducerPool;
public class ProducerService extends Thread {
private static final Logger logger=Logger.getLogger(ProducerService.class);
/****
* kafkaServerAdrr可以为null或者空串
* @param topic
* @param message
* @param kafkaServerAdrr
*/
public void sendMessage(String topic,String message,String kafkaServerAdrr){
try {
Producer producer=null;
if("".equals(kafkaServerAdrr)||(kafkaServerAdrr==null)){
producer = createProducer();
}else{
producer = createProducer(kafkaServerAdrr);
}
KeyedMessage keymessage = new KeyedMessage(topic,message);
producer.send(keymessage);
producer.close();
} catch (Exception e) {
logger.error("Class=ProducerService....Method=sendMessage....");
}
}
/****
* kafkaServerAdrr可以为null或者空串
* @param topic
* @param messagelist
* @param kafkaServerAdrr
*/
public void sendListMessage(String topic,List<String> messagelist,String kafkaServerAdrr){
try {
Producer producer=null;
if("".equals(kafkaServerAdrr)||(kafkaServerAdrr==null)){
producer = createProducer();
}else{
producer = createProducer(kafkaServerAdrr);
}
List<KeyedMessage> keymessagelist=new ArrayList<KeyedMessage>();
for(int i=0;i<messagelist.size();i++){
KeyedMessage keymessage = new KeyedMessage(topic,messagelist.get(i));
keymessagelist.add(i, keymessage);
}
producer.send(keymessagelist);
producer.close();
} catch (Exception e) {
logger.error("Class=ProducerService....Method=sendListMessage....");
}
}
/****
* 生产消息
* @param topic
* @param messagelist
* common-pool池子管理生产者
* @return
*/
public void SendMessageUsePoolProducer(String topic,List<String> messagelist) {
try {
GenericObjectPool producertPool = ProducerPool.getProducerConnection();
PooledProducer producer = (PooledProducer) producertPool.borrowObject();
List<KeyedMessage> keymessagelist=new ArrayList<KeyedMessage>();
for(int i=0;i<messagelist.size();i++){
KeyedMessage keymessage = new KeyedMessage(topic,messagelist.get(i));
keymessagelist.add(i, keymessage);
}
producer.send(keymessagelist);
System.err.println("producertPool====="+producertPool);
producertPool.returnObject(producer);
} catch (Exception e) {
e.printStackTrace();
}
}
/****
* 创建生产者
* @return
*/
private Producer createProducer() {
Properties properties = new Properties();
Properties props;
Producer producer=null;
try {
props = PropertiesLoaderUtils.loadAllProperties("kafka.properties");
properties.put("metadata.broker.list",props.get("metadata.broker.list"));
properties.put("serializer.class", StringEncoder.class.getName());
producer=new Producer<Integer, String>(new ProducerConfig(properties));
} catch (IOException e) {
e.printStackTrace();
}
return producer;
}
/****
* 创建生产者
* @return
*/
private Producer createProducer(String kafkaServerAdrr) {
Properties properties = new Properties();
Producer producer=null;
try {
properties.put("metadata.broker.list",kafkaServerAdrr);
properties.put("serializer.class", StringEncoder.class.getName());
producer=new Producer<Integer, String>(new ProducerConfig(properties));
} catch (Exception e) {
e.printStackTrace();
}
return producer;
}
public static void main(String[] args) throws InterruptedException {
ProducerService producerservice=new ProducerService();
List<String> messagelist=new ArrayList<String>();
messagelist.add("message1");
messagelist.add("message2");
messagelist.add("message3");
messagelist.add("message4");
messagelist.add("message5");
String consumerAMessage = "clusterConsumerAMessage";
producerservice.sendMessage("test-kafka0.8", consumerAMessage,"");
producerservice.sendListMessage("test-kafka0.8", messagelist,"10.180.192.16:9092");
for(int i=0;i<50;i++){
producerservice.SendMessageUsePoolProducer("H_BIZ_COLLECTION", messagelist);
Thread.sleep(6000);
}
}
}
consumer代码,consumer可以采用多线程消费,以下代码没有用到多线程
package com.service;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import kafka.consumer.Consumer;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
import org.springframework.core.io.support.PropertiesLoaderUtils;
public class ConsumerService {
Map<String, List<KafkaStream<byte[], byte[]>>> messageStreams;
/****
* 默认的消费者
* @param topic
*/
public void DefaultConsumeMessage(String topic) {
int count = 0;
ConsumerConnector consumer = ConsumerService.createConsumer();
Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
topicCountMap.put(topic, 1); // 一次从主题中获取一个数据
messageStreams = consumer.createMessageStreams(topicCountMap);
KafkaStream<byte[], byte[]> stream = messageStreams.get(topic).get(0);// 获取每次接收到的这个数据
ConsumerIterator<byte[], byte[]> iterator = stream.iterator();
while (iterator.hasNext()) {
String message = new String(iterator.next().message());
try {
consumer.commitOffsets();
if (message != null && message.length() > 0)
System.out.println("consum: " + (++count));
// AppendFile.appendDataToFile(message);
} catch (Exception e) {
e.printStackTrace();
}
}
}
/*****
* 消费者消费消息
* @param topic
* @param zookeeperAdrr
* @param groupName
*/
public void ConsumeMessage(String topic, String zookeeperAdrr,
String groupName) {
int count = 0;
ConsumerConnector consumer = ConsumerService.createConsumer(zookeeperAdrr, groupName);
Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
topicCountMap.put(topic, 1); // 一次从主题中获取一个数据
messageStreams = consumer.createMessageStreams(topicCountMap);
KafkaStream<byte[], byte[]> stream = messageStreams.get(topic).get(0);// 获取每次接收到的这个数据
ConsumerIterator<byte[], byte[]> iterator = stream.iterator();
while (iterator.hasNext()) {
String message = new String(iterator.next().message());
try {
consumer.commitOffsets();
if (message != null && message.length() > 0)
System.out.println("consum: " + (++count));
// AppendFile.appendDataToFile(message);
} catch (Exception e) {
e.printStackTrace();
}
}
}
/****
* 创建消费者
* @param zookeeperAdrr
* @param groupName
* @return
*/
private static ConsumerConnector createConsumer(String zookeeperAdrr,String groupName) {
Properties properties = new Properties();
ConsumerConnector consumer = null;
try {
if (zookeeperAdrr != null && groupName != null
&& (!"".equals(zookeeperAdrr) && (!"".equals(groupName)))) {
properties.put("zookeeper.connect", zookeeperAdrr);
properties.put("group.id", groupName);
consumer = Consumer.createJavaConsumerConnector(new ConsumerConfig(properties));
}
} catch (Exception e) {
e.printStackTrace();
}
return consumer;
}
/****
* 创建消费者
* @return
*/
private static ConsumerConnector createConsumer() {
Properties properties = new Properties();
Properties props;
ConsumerConnector consumer = null;
try {
props = PropertiesLoaderUtils.loadAllProperties("kafka.properties");
properties.put("zookeeper.connect", props.get("zookeeper.connect"));
properties.put("group.id", props.get("group.id"));
consumer = Consumer.createJavaConsumerConnector(new ConsumerConfig(properties));
} catch (IOException e) {
e.printStackTrace();
}
return consumer;
}
public static void main(String[] args) {
ConsumerService consumerservice = new ConsumerService();
consumerservice.DefaultConsumeMessage("testProducer");
// consumerservice.ConsumeMessage("20160405test", "10.180.192.16:2181","test-consumer-group");
}
}
下面试配置kafka.properties
#produce
#serializer.class=StringEncoder.class.getName()
request.required.acks=1
metadata.broker.list=ip:9092
#consumer
zookeeper.connect=ip:2181
group.id=test-consumer-group
#producePool
produceMaxIdle=2
produceMaxTotal=10
produceMaxWaitMillis=3000