Kafka Producer和Consumer开发基础
可以当做是开发时候的模版吧
Producer开发
package com.producer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import java.util.Properties;
public class KafkaProducerAPI {
//1.初始化Kafka 服务,创建topic
//2.Properties 进行参数配置
//3.KafkaProducer API进行消息发送
//设置Topic名
private final String TOPIC = "KafkaAPIDemo";
private Properties props = new Properties();
public KafkaProducerAPI() {
}
public void run() {
//key的serializer
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
//value的serializer
props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
//设置压缩方式
props.put("compression.codec", "gzip");
//设置bootstrap.servers
props.put("bootstrap.servers", "Hadoop01:9092");
//根据参数实例化producer
KafkaProducer<String, String> producer = new KafkaProducer<String, String>(props);
//message
int data = 1;
while (data < 200) {
String message = new String(data + "_msg");
try {
//使用producer.send方法发送Record
//new一个Record,指定Topic,key,value
producer.send(new ProducerRecord<String, String>(TOPIC, Integer.toString(data), message));
System.out.println(message);
Thread.sleep(1000);
data++;
} catch (Exception e) {
e.printStackTrace();
}
}
//生产完数据之后需要close掉
producer.close();
}
public static void main(String[] args) {
new KafkaProducerAPI().run();
}
}
Consumer开发
package com.producer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import java.util.Arrays;
import java.util.Properties;
public class KafkaConsumerDemo {
//指定Topic
private final String TOPIC="KafkaAPIDemo";
//properties
private final Properties props = new Properties();
KafkaConsumerDemo(){}
public void run(){
//设置bootstrap和zk
props.put("bootstrap.servers","Hadoop01:9092");
props.put("zookeeper.connect","Hadoop01:2181");
//指定group.id
props.put("group.id","test");
props.put("auto.commit.interval.ms","1000");
//set key deserializer
props.put("key.deserializer","org.apache.kafka.common.serialization.StringDeserializer");
//set value deserializer
props.put("value.deserializer","org.apache.kafka.common.serialization.StringDeserializer");
//创建消费者的时候,自动提交偏移量
props.put("enable.auto.commit","true");
//定义consumer
KafkaConsumer<String,String> consumer=new KafkaConsumer<String, String>(props);
//订阅给定的主题列表以获取动态分配的分区
consumer.subscribe(Arrays.asList(TOPIC));
while(true){
//读取数据,设置获取延时
ConsumerRecords<String,String> records=consumer.poll(1000);
for (ConsumerRecord<String,String> record:records){
System.out.println(record.offset()+" "+record.value());
}
}
}
public static void main(String[] args){
new KafkaConsumerDemo().run();
}
}