Kafka8使用Demo
消费者:
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import kafka.consumer.Consumer;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
/**
* @author 作者:
* @date 创建时间: 2016-7-13 下午3:37:24
* @version 版本: 1.0
*/
public class kafkaConsumer extends Thread{
private String topic;
public kafkaConsumer(String topic){
super();
this.topic = topic;
}
@Override
public void run() {
ConsumerConnector consumer = createConsumer();
Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
topicCountMap.put(topic, 1); // 一次从主题中获取一个数据
Map<String, List<KafkaStream<byte[], byte[]>>> messageStreams = consumer.createMessageStreams(topicCountMap);
KafkaStream<byte[], byte[]> stream = messageStreams.get(topic).get(0);// 获取每次接收到的这个数据
ConsumerIterator<byte[], byte[]> iterator = stream.iterator();
while(iterator.hasNext()){
String message = new String(iterator.next().message());
System.out.println("接收到: " + message);
}
}
private ConsumerConnector createConsumer() {
Properties properties = new Properties();
properties.put("zookeeper.connect", "172.16.13.64:2181");//声明zk
properties.put("zk.connectiontimeout.ms", "15000");
properties.put("group.id", "group2");// 必须要使用别的组名称, 如果生产者和消费者都在同一组,则不能访问同一组内的topic数据
properties.put("auto.offset.reset", "smallest");
return Consumer.createJavaConsumerConnector(new ConsumerConfig(properties));
}
public static void main(String[] args) {
new kafkaConsumer("test").start();// 使用kafka集群中创建好的主题 test
}
}
生产者:
import java.util.Properties;
import kafka.javaapi.producer.Producer;
import kafka.producer.KeyedMessage;
import kafka.producer.ProducerConfig;
import kafka.serializer.StringEncoder;
/**
* @author 作者:
* @date 创建时间: 2016-7-13 下午2:52:43
* @version 版本: 1.0
*/
public class kafkaProducer extends Thread{
private String topic;
public kafkaProducer(String topic){
super();
this.topic = topic;
}
@Override
public void run() {
Producer producer = createProducer();
for (int i = 0; i <= 20000; i++) {
try {
Thread.sleep(2);
} catch (InterruptedException e) {
e.printStackTrace();
}
System.out.println(i);
String a = "{\"number\":\""+i+"\",\"ClientAcctNo\":\"6217457934581230\",\"TranDate\":\"20170101\",\"TranTime\":\"174100123\"}";
producer.send(new KeyedMessage<String, String>(topic, String.valueOf(i),a));
}
// while(i<10){
// producer.send(new KeyedMessage<String, String>(topic, "four test message from persion pc"+i));
// i++;
// }
}
private Producer createProducer() {
Properties properties = new Properties();
properties.put("zookeeper.connect", "172.16.13.123:2181,172.16.13.124:2181,172.16.13.125:2181");//声明zk
properties.put("serializer.class", StringEncoder.class.getName());
properties.put("metadata.broker.list", "172.16.13.123:9092,172.16.13.124:9092,172.16.13.125:9092");// 声明kafka broker
return new Producer<String, String>(new ProducerConfig(properties));
}
public static void main(String[] args) {
new kafkaProducer("qxltest").start();// 使用kafka集群中创建好的主题 test
}
}