java kafka 开发,Kafka JAVA API开发-基础案例

配置

首先在maven项目目录中的pom.xml中添加dependency

0.9.0.0

org.apache.kafka

kafka_2.11

${kafka.version}

Producer 开发

开发Properties类

定义静态常量

public class KafkaProperties {

public static final String ZK = "192.168.1.113:2181";

public static final String TOPIC = "hello_topic";

public static final String BROKER_LIST = "192.168.1.113:9092";

}

开发 KafkaProducer 类

创建需要的属性成员

private String topic;

// import kafka.javaapi.producer.Producer;

private Producer producer;

创建构造方法

public KafkaProducer(String topic) {

this.topic = topic;

// 配置 ProducerConfig 需要的Properties类型参数

Properties properties = new Properties();

// 指定broker list

properties.put("metadata.broker.list",KafkaProperties.BROKER_LIST);

// 指定序列化类,具体类型参见源代码

properties.put("serializer.class","kafka.serializer.StringEncoder");

properties.put("request.required.acks","1");

//import kafka.producer.ProducerConfig;

producer = new Producer(new ProducerConfig(properties));

}

配置工作线程

// 将KafkaProducer继承Thread

public class KafkaProducer extends Thread{

@Override

public void run() {

int messageNo = 1;

while(true) {

String message = "message_" + messageNo;

//import kafka.producer.KeyedMessage;

producer.send(new KeyedMessage(topic, message));

System.out.println("Sent: " + message);

messageNo ++ ;

try{

Thread.sleep(2000);

} catch (Exception e){

e.printStackTrace();

}

}

}

}

开发 Consumer 类

开发KafkaConsumer类

定义常量和构造方法

private String topic;

public KafkaConsumer(String topic){

this.topic = topic;

}

开发 createConnector 方法

private ConsumerConnector createConnector(){

Properties properties = new Properties();

// 定义zookeeper链接

properties.put("zookeeper.connect",KafkaProperties.ZK);

// 定义 group id

properties.put("group.id",KafkaProperties.GROUP_ID);

// import kafka.consumer.ConsumerConfig;

return Consumer.createJavaConsumerConnector(new ConsumerConfig(properties));

}

配置工作线程

public class KafkaConsumer extends Thread{

@Override

public void run(){

// import kafka.javaapi.consumer.ConsumerConnector;

ConsumerConnector consumer = createConnector();

Map topicCountMap = new HashMap();

topicCountMap.put(topic,1);

// String: topic

// List> 对应的数据流

Map>> messagetream = consumer.createMessageStreams(topicCountMap);

// 获取我们每次

// import kafka.consumer.KafkaStream;

KafkaStream stream = messagetream.get(topic).get(0);

// import kafka.consumer.ConsumerIterator;

ConsumerIterator iterator = stream.iterator();

while(iterator.hasNext()){

String message = new String(iterator.next().message());

System.out.println("rec:"+message);

}

}

}

完整代码

KafkaProperties.java

package com.imooc.spark.kafka;

/**

* Kafka 常用配置文件

*/

public class KafkaProperties {

public static final String ZK = "192.168.1.113:2181";

public static final String TOPIC = "hello_topic";

public static final String BROKER_LIST = "192.168.1.113:9092";

public static final String GROUP_ID = "test_group1";

}

KafkaProducer.java

package com.imooc.spark.kafka;

import kafka.javaapi.producer.Producer;

import kafka.producer.KeyedMessage;

import kafka.producer.ProducerConfig;

import java.util.Properties;

/**

* Kafka生产者

*/

public class KafkaProducer extends Thread{

private String topic;

// import kafka.javaapi.producer.Producer;

private Producer producer;

public KafkaProducer(String topic) {

this.topic = topic;

// 配置 ProducerConfig 需要的Properties类型参数

Properties properties = new Properties();

// 指定broker list

properties.put("metadata.broker.list",KafkaProperties.BROKER_LIST);

// 指定序列化类,具体类型参见源代码

properties.put("serializer.class","kafka.serializer.StringEncoder");

properties.put("request.required.acks","1");

//import kafka.producer.ProducerConfig;

producer = new Producer(new ProducerConfig(properties));

}

@Override

public void run() {

int messageNo = 1;

while(true) {

String message = "message_" + messageNo;

//import kafka.producer.KeyedMessage;

producer.send(new KeyedMessage(topic, message));

System.out.println("Sent: " + message);

messageNo ++ ;

try{

Thread.sleep(2000);

} catch (Exception e){

e.printStackTrace();

}

}

}

}

KafkaConsumer 类

package com.imooc.spark.kafka;

import kafka.consumer.Consumer;

import kafka.consumer.ConsumerConfig;

import kafka.consumer.ConsumerIterator;

import kafka.consumer.KafkaStream;

import kafka.javaapi.consumer.ConsumerConnector;

import java.util.HashMap;

import java.util.List;

import java.util.Map;

import java.util.Properties;

public class KafkaConsumer extends Thread{

private String topic;

public KafkaConsumer(String topic){

this.topic = topic;

}

private ConsumerConnector createConnector(){

Properties properties = new Properties();

// 定义zookeeper链接

properties.put("zookeeper.connect",KafkaProperties.ZK);

// 定义 group id

properties.put("group.id",KafkaProperties.GROUP_ID);

// import kafka.consumer.ConsumerConfig;

return Consumer.createJavaConsumerConnector(new ConsumerConfig(properties));

}

@Override

public void run(){

// import kafka.javaapi.consumer.ConsumerConnector;

ConsumerConnector consumer = createConnector();

Map topicCountMap = new HashMap();

topicCountMap.put(topic,1);

// String: topic

// List> 对应的数据流

Map>> messagetream = consumer.createMessageStreams(topicCountMap);

// 获取我们每次

// import kafka.consumer.KafkaStream;

KafkaStream stream = messagetream.get(topic).get(0);

// import kafka.consumer.ConsumerIterator;

ConsumerIterator iterator = stream.iterator();

while(iterator.hasNext()){

String message = new String(iterator.next().message());

System.out.println("rec:"+message);

}

}

}

测试类 KafkaClientApp.java

package com.imooc.spark.kafka;

/**

* Kafka Java API 测试

*/

public class KafkaClientApp {

public static void main(String[] args) {

new KafkaProducer(KafkaProperties.TOPIC).start();

new KafkaConsumer(KafkaProperties.TOPIC).start();

}

}

References

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值