Kafka8使用Demo

Kafka8使用Demo


消费者:

import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;

import kafka.consumer.Consumer;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;

/** 
 * @author 作者: 
 * @date 创建时间: 2016-7-13 下午3:37:24 
 * @version 版本: 1.0 
 */
public class kafkaConsumer extends Thread{  
  
    private String topic;  
      
    public kafkaConsumer(String topic){  
        super();  
        this.topic = topic;  
    }  
      
      
    @Override  
    public void run() {  
        ConsumerConnector consumer = createConsumer();  
         Map<String, Integer> topicCountMap = new HashMap<String, Integer>();  
         topicCountMap.put(topic, 1); // 一次从主题中获取一个数据  
         Map<String, List<KafkaStream<byte[], byte[]>>>  messageStreams = consumer.createMessageStreams(topicCountMap);  
         KafkaStream<byte[], byte[]> stream = messageStreams.get(topic).get(0);// 获取每次接收到的这个数据  
         ConsumerIterator<byte[], byte[]> iterator =  stream.iterator();  
         while(iterator.hasNext()){  
             String message = new String(iterator.next().message());  
             System.out.println("接收到: " + message);  
         }  
    }  
  
    private ConsumerConnector createConsumer() {  
        Properties properties = new Properties();  
        properties.put("zookeeper.connect", "172.16.13.64:2181");//声明zk  
        properties.put("zk.connectiontimeout.ms", "15000");
        properties.put("group.id", "group2");// 必须要使用别的组名称, 如果生产者和消费者都在同一组,则不能访问同一组内的topic数据  
        properties.put("auto.offset.reset", "smallest");
        return Consumer.createJavaConsumerConnector(new ConsumerConfig(properties));  
     }  
      
      
    public static void main(String[] args) {  
        new kafkaConsumer("test").start();// 使用kafka集群中创建好的主题 test   
          
    }  
       
}  

生产者:

import java.util.Properties;

import kafka.javaapi.producer.Producer;
import kafka.producer.KeyedMessage;
import kafka.producer.ProducerConfig;
import kafka.serializer.StringEncoder;

/** 
 * @author 作者: 
 * @date 创建时间: 2016-7-13 下午2:52:43 
 * @version 版本: 1.0 
 */
public class kafkaProducer extends Thread{
	    private String topic;  
       
	    public kafkaProducer(String topic){  
	        super();  
	        this.topic = topic;  
	    }  
	      
	      
	    @Override  
	    public void run() {  
	        Producer producer = createProducer();  
	        for (int i = 0; i <= 20000; i++) {
				try {
					Thread.sleep(2);
				} catch (InterruptedException e) {
					e.printStackTrace();
				}
	        	System.out.println(i);
				String a = "{\"number\":\""+i+"\",\"ClientAcctNo\":\"6217457934581230\",\"TranDate\":\"20170101\",\"TranTime\":\"174100123\"}";
				producer.send(new KeyedMessage<String, String>(topic, String.valueOf(i),a));
			}
//	        while(i<10){
//	        	producer.send(new KeyedMessage<String, String>(topic, "four test message from persion pc"+i));
//	        	i++;
//	        }
	    }  
	  
	    private Producer createProducer() {  
	        Properties properties = new Properties();  
	        properties.put("zookeeper.connect", "172.16.13.123:2181,172.16.13.124:2181,172.16.13.125:2181");//声明zk  
	        properties.put("serializer.class", StringEncoder.class.getName());  
	        properties.put("metadata.broker.list", "172.16.13.123:9092,172.16.13.124:9092,172.16.13.125:9092");// 声明kafka broker  
	        return new Producer<String, String>(new ProducerConfig(properties));  
	     }  
	      
	      
	    public static void main(String[] args) {  
	        new kafkaProducer("qxltest").start();// 使用kafka集群中创建好的主题 test   
	          
	    }  
	       
}


  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值