1、环境准备
1.创建maven项目,导入maven依赖
<!-- kafka客户端 依赖 -->
<!-- https://mvnrepository.com/artifact/org.apache.kafka/kafka -->
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>${kafka.version}</version>
</dependency>
2.启动kafka集群,创建主题,打开一个消费者
kafka-topics.sh --creater --zookeeper zhaohui01:2181 --topic first01 --partitions 3 --replication-factor 1
kafka-console-consumer.sh --topic first01 --bootstrap-server zhaohui01:9092
2、创建生产者javaAPI
1.新API
package com.zch.kafka.producer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import java.util.Properties;
/**
* Author: zhaoHui
* Date: 2021/12/31
* Time: 12:33
* Description:
*/
public class MyProducer {
public static void main(String[] args) {
// 创建Kafka生产者的配置信息
Properties properties = new Properties();
// Kafka服务端的主机名和端口号
props.put("bootstrap.servers", "node01:9092");
// 等待所有副本节点的应答
props.put("acks", "all");
// 消息发送最大尝试次数
props.put("retries", 0);
// 一批消息处理大小
props.put("batch.size", 16384);
// 请求延时
props.put("linger.ms", 1);
// 发送缓存区内存大小
props.put("buffer.memory", 33554432);
// 序列化
properties.put("key.serializer","org.apache.kafka.common.serialization.StringSerializer");
properties.put("value.serializer","org.apache.kafka.common.serialization.StringSerializer");
// 创建生产者对象
KafkaProducer<String, String> producer = new KafkaProducer<String, String>(properties);
// 发送数据
for (int i = 0; i < 10; i++) {
producer.send(new ProducerRecord<String, String>("first01","zhaochaohui--"+i));
}
// 关闭资源
producer.close();
}
}
2.带回调函数
package com.zch.kafka.producer;
import com.sun.org.apache.bcel.internal.generic.NEW;
import org.apache.kafka.clients.producer.*;
import java.util.Properties;
/**
* Author: zhaoHui
* Date: 2021/12/31
* Time: 15:34
* Description:
*/
public class CallBackProducer {
public static void main(String[] args) {
// 配置信息
Properties properties = new Properties();
properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"zhaohui01:9092");
properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,
"org.apache.kafka.common.serialization.StringSerializer");
properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
"org.apache.kafka.common.serialization.StringSerializer");
// 创建生产者对象
KafkaProducer<String, String> producer = new KafkaProducer<String, String>(properties);
// 发送数据
for (int i = 0; i < 10; i++) {
producer.send(new ProducerRecord<String, String>("first02", "zhaohui--" + i)
, new Callback() {
public void onCompletion(RecordMetadata metadata, Exception exception) {
System.out.println("分区:"+metadata.partition()+"-- 主题:"+metadata.offset());
}
});
}
producer.close();
}
}
3、创建消费者API
package com.zch.kafka.consumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import java.util.Arrays;
import java.util.Properties;
/**
* Author: zhaoHui
* Date: 2022/01/01
* Time: 13:08
* Description:consumer test
*/
public class MyConsumer {
public static void main(String[] args) {
// 创建消费者配置信息
Properties properties = new Properties();
// 给配置信息赋值
// 连接集群
properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "zhaohui01:9092");
// 开启自动提交
properties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, true);
// 设置自动提交延迟
properties.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "1000");
// key,value反序列化
properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,
"org.apache.kafka.common.serialization.StringSerializer");
properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
"org.apache.kafka.common.serialization.StringSerializer");
// 消费者组
properties.put(ConsumerConfig.GROUP_ID_CONFIG, "__consumer");
// 创建消费者
KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(properties);
// 订阅主题
// 传一个集合:一个消费者可以订阅多个主题
consumer.subscribe(Arrays.asList("first01", "first02"));
// 获取数据 批量获取
// 开启consumer永久获取数据
while (true){
ConsumerRecords<String, String> poll = consumer.poll(100);
// 解析并打印
for (ConsumerRecord<String, String> consumerRecord : poll) {
System.out.println(consumerRecord.key() + "---" + consumerRecord.value());
}
}
// // 关闭连接
// consumer.close();
}
}