Kafka基础API
1、Topic基本操作 DML管理
2、生产者
3、消费者 sub/assign
4、自定义分区
5、序列化
6、拦截器
Topic常用API:
package com.dyz.dml;
import org.apache.kafka.clients.admin.*;
import org.apache.kafka.common.KafkaFuture;
import java.util.*;
import java.util.concurrent.ExecutionException;
public class KafkaTopicDml {
public static void main(String[] args) throws ExecutionException, InterruptedException {
Properties properties = new Properties();
properties.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG,
"CentOSA:9092,CentOSB:9092,CentOSC:9092");
KafkaAdminClient adminClient= (KafkaAdminClient) KafkaAdminClient.create(properties);
//查询topics列表
KafkaFuture<Set<String>> nameFutures = adminClient.listTopics().names();
for (String name : nameFutures.get()) {
System.out.println(name);
}
//创建Topics,异步的
CreateTopicsResult topicCreate = adminClient.createTopics(Arrays.asList(new NewTopic("topic02", 2, (short) 3)));
//同步执行
topicCreate.all().get();
//删除Topic
DeleteTopicsResult topicDelete = adminClient.deleteTopics(Arrays.asList("topic02"));
//同步删除
topicDelete.all().get();
//查看Topic详情
DescribeTopicsResult describeTopics =
adminClient.describeTopics(Arrays.asList("topic01"));
Map<String, TopicDescription> tdm = describeTopics.all().get();
for (Map.Entry<String, TopicDescription> entry : tdm.entrySet()) {
System.out.println(entry.getKey()+"\t"+entry.getValue());
}
//关闭KafkaAdminClient
adminClient.close();
}
}
Producer常用API:
package com.dyz.quickstart;
import org.apache.kafka.clients.admin.AdminClientConfig;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.StringSerializer;
import java.util.Properties;
public class KafkaProducerQuickStart {
public static void main(String[] args) {
//设置连接信息
Properties properties = new Properties();
properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"CentOSA:9092,CentOSB:9092,CentOSC:9092");
properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,StringSerializer.class.getName());
//创建kafkaproduce
KafkaProducer<String, String> kafkaProducer = new KafkaProducer<String, String>(properties);
//创建10条消息
for (int i=0;i<10;i++){
ProducerRecord<String, String> record = new ProducerRecord<String, String>("topic01", "key" + i, "value" + i);
//发送消息给服务器
kafkaProducer.send(record);
}
//关闭kafka
kafkaProducer.close();
}
}
Consumer常用API:
package com.dyz.quickstart;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.common.serialization.StringDeserializer;
import java.time.Duration;
import java.util.Iterator;
import java.util.Properties;
import java.util.regex.Pattern;
public class KafkaConsumerQuickStart {
public static void main(String[] args) {
//设置连接配置
Properties properties = new Properties();
properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"CentOSA:9092,CentOSB:9092,CentOSC:9092");
properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,StringDeserializer.class.getName());
properties.put(ConsumerConfig.GROUP_ID_CONFIG,"g2");
//创建kafkaConsumer
KafkaConsumer<String, String> kafkaConsumer = new KafkaConsumer<String, String>(properties);
//订阅相关的Topics,使用正则表达式:消费所有topic开头的消息
kafkaConsumer.subscribe(Pattern.compile("^topic.*"));
//遍历消息队列
while (true){
//设置多久消费一次消息
ConsumerRecords<String, String> consumerRecords = kafkaConsumer.poll(Duration.ofSeconds(1));
//如果从队列中取到数据,遍历数据
if (!consumerRecords.isEmpty()){
Iterator<ConsumerRecord<String, String>> iterator = consumerRecords.iterator();
while (iterator.hasNext()){
//获取一个消费消息
ConsumerRecord<String, String> record = iterator.next();
String topic = record.topic();
int partition = record.partition();
long offset = record.offset();
String key = record.key();
String value = record.value();
long timestamp = record.timestamp();
System.out.println(topic+"\t"+partition+"\t"+offset+"\t"+key+"\t"+value+"" +"\t"+timestamp);
}
}
}
}
}
自定义分区方式消费消息:
package com.dyz.quickstart;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.internals.Topic;
import org.apache.kafka.common.serialization.StringDeserializer;
import java.time.Duration;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.Properties;
import java.util.regex.Pattern;
public class KafkaConsumerQuickStart_custom {
public static void main(String[] args) {
//设置连接配置
Properties properties = new Properties();
properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"CentOSA:9092,CentOSB:9092,CentOSC:9092");
properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,StringDeserializer.class.getName());
//创建kafkaConsumer
KafkaConsumer<String, String> kafkaConsumer = new KafkaConsumer<String, String>(properties);
//订阅相关的Topics,手动指定消费分区,失去组管理特性
List<TopicPartition> topicList = Arrays.asList(new TopicPartition("topic01", 0));
kafkaConsumer.assign(topicList);
//指定消费分区的位置,(偏移量)
//从第0个分区,第一个消息开始消费
kafkaConsumer.seekToBeginning(topicList);
//从第0个分区的第一个位置开始消费
// kafkaConsumer.seek(new TopicPartition("topic01",0),1);
//遍历消息队列
while (true){
//设置多久消费一次消息
ConsumerRecords<String, String> consumerRecords = kafkaConsumer.poll(Duration.ofSeconds(1));
//如果从队列中取到数据,遍历数据
if (!consumerRecords.isEmpty()){
Iterator<ConsumerRecord<String, String>> iterator = consumerRecords.iterator();
while (iterator.hasNext()){
//获取一个消费消息
ConsumerRecord<String, String> record = iterator.next();
String topic = record.topic();
int partition = record.partition();
long offset = record.offset();
String key = record.key();
String value = record.value();
long timestamp = record.timestamp();
System.out.println(topic+"\t"+partition+"\t"+offset+"\t"+key+"\t"+value+"" +"\t"+timestamp);
}
}
}
}
}
自定义分区方式发送消息:
kafka默认分区策略:new ProducerRecord指定分区,则发送给指定的分区;未指定且有key值,使用hash;无key值使用轮询。
- 自定义分区策略类:UserDefinePartition(实现kafka的Partitioner接口)
package com.dyz.partition;
import org.apache.kafka.clients.producer.Partitioner;
import org.apache.kafka.common.Cluster;
import org.apache.kafka.common.PartitionInfo;
import org.apache.kafka.common.utils.Utils;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
public class UserDefinePartition implements Partitioner {
private AtomicInteger counter=new AtomicInteger(0);
/**
* 获取分区信息
* @param topic
* @param key
* @param keyBytes
* @param value
* @param valueBytes
* @param cluster
* @return
*/
@Override
public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) {
//获取所有分区
List<PartitionInfo> partitions = cluster.partitionsForTopic(topic);
int numPartitions = partitions.size();
if (keyBytes==null){
//轮询
int andIncrement = counter.getAndIncrement();
return (andIncrement & Integer.MAX_VALUE) % numPartitions;
}else {
//取模
return Utils.toPositive(Utils.murmur2(keyBytes)) % numPartitions;
}
}
/**
* 关闭生产者时的回调方法
*/
@Override
public void close() {
System.out.println("close");
}
/**
* 打印生产者信息
* @param map
*/
@Override
public void configure(Map<String, ?> map) {
System.out.println(map);
}
}
- 使用自定义分区策略,生产者生产消息:
package com.dyz.partition;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.StringSerializer;
import java.util.Properties;
public class KafkaProducerPartition {
public static void main(String[] args) {
//设置连接信息
Properties properties = new Properties();
properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"CentOSA:9092,CentOSB:9092,CentOSC:9092");
properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,StringSerializer.class.getName());
//指定使用的分区策略实现类
properties.put(ProducerConfig.PARTITIONER_CLASS_CONFIG,UserDefinePartition.class.getName());
//创建kafkaproduce
KafkaProducer<String, String> kafkaProducer = new KafkaProducer<String, String>(properties);
//创建10条消息
for (int i=0;i<10;i++){
ProducerRecord<String, String> record = new ProducerRecord<String, String>("topic01", "key" + i, "value" + i);//指定key值
// ProducerRecord<String, String> record = new ProducerRecord<String, String>("topic01","value" + i);//不指定key值
//发送消息给服务器
kafkaProducer.send(record);
}
//关闭kafka
kafkaProducer.close();
}
}
自定义序列化消息
- 自定义序列化:UserDefineSerializer
package com.dyz.serializer;
import org.apache.commons.lang3.SerializationUtils;
import org.apache.kafka.common.serialization.Serializer;
import java.io.Serializable;
import java.util.Map;
/**
* 自定义实现消费序列化
*/
public class UserDefineSerializer implements Serializer<Object> {
@Override
public void configure(Map map, boolean b) {
System.out.println("configure");
}
@Override
public byte[] serialize(String topic, Object data) {
return SerializationUtils.serialize((Serializable) data);
}
@Override
public void close() {
System.out.println("close");
}
}
- 自定义反序列化:UserDefineDeSerializer
package com.dyz.serializer;
import org.apache.commons.lang3.SerializationUtils;
import org.apache.kafka.common.serialization.Deserializer;
import java.util.Map;
/**
* 自定义反序列化接口
*/
public class UserDefineDeSerializer implements Deserializer<Object> {
@Override
public void configure(Map map, boolean b) {
System.out.println("configure");
}
@Override
public Object deserialize(String topic, byte[] data) {
return SerializationUtils.deserialize(data);
}
@Override
public void close() {
System.out.println("close");
}
}
- 创建实体类对象:
package com.dyz.serializer;
import java.io.Serializable;
import java.util.Date;
public class User implements Serializable {
private String name;
private Date birthday;
public User() {
}
public User(String name, Date birthday) {
this.name = name;
this.birthday = birthday;
}
@Override
public String toString() {
return "User{" +
"name='" + name + '\'' +
", birthday='" + birthday + '\'' +
'}';
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Date getBirthday() {
return birthday;
}
public void setBirthday(Date birthday) {
this.birthday = birthday;
}
}
- 生产者生产消息:KafkaProducerUser
package com.dyz.serializer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import java.util.Date;
import java.util.Properties;
public class KafkaProducerUser {
public static void main(String[] args) {
//设置连接信息
Properties properties = new Properties();
properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"CentOSA:9092,CentOSB:9092,CentOSC:9092");
properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, UserDefineSerializer.class.getName());
properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,UserDefineSerializer.class.getName());
//创建kafkaproduce
KafkaProducer<String, User> kafkaProducer = new KafkaProducer<String, User>(properties);
//创建10条消息
for (int i=0;i<10;i++){
ProducerRecord<String, User> record = new ProducerRecord<String, User>("topic01",
"key" + i,new User("dyz",new Date()) );
//发送消息给服务器
kafkaProducer.send(record);
}
//关闭kafka
kafkaProducer.close();
}
}
- 消费者消费消息:KafkaConsumerUser
package com.dyz.serializer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import java.time.Duration;
import java.util.Iterator;
import java.util.Properties;
import java.util.regex.Pattern;
public class KafkaConsumerUser {
public static void main(String[] args) {
//设置连接配置
Properties properties = new Properties();
properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"CentOSA:9092,CentOSB:9092,CentOSC:9092");
properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, UserDefineDeSerializer.class.getName());
properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,UserDefineDeSerializer.class.getName());
properties.put(ConsumerConfig.GROUP_ID_CONFIG,"g2");
//创建kafkaConsumer
KafkaConsumer<String, User> kafkaConsumer = new KafkaConsumer<String, User>(properties);
//订阅相关的Topics,使用正则表达式:消费所有topic开头的消息
kafkaConsumer.subscribe(Pattern.compile("^topic.*"));
//遍历消息队列
while (true){
//设置多久消费一次消息
ConsumerRecords<String, User> consumerRecords = kafkaConsumer.poll(Duration.ofSeconds(1));
//如果从队列中取到数据,遍历数据
if (!consumerRecords.isEmpty()){
Iterator<ConsumerRecord<String, User>> iterator = consumerRecords.iterator();
while (iterator.hasNext()){
//获取一个消费消息
ConsumerRecord<String, User> record = iterator.next();
String topic = record.topic();
int partition = record.partition();
long offset = record.offset();
String key = record.key();
User value = record.value();
long timestamp = record.timestamp();
System.out.println(topic+"\t"+partition+"\t"+offset+"\t"+key+"\t"+value+"" +"\t"+timestamp);
}
}
}
}
}
自定义拦截器
- 自定义拦截器:
package com.dyz.interceptors;
import org.apache.kafka.clients.producer.ProducerInterceptor;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import java.util.Map;
public class UserDefineProducerInterceptors implements ProducerInterceptor {
/**
* 发送消息时追加信息
* @param record
* @return
*/
@Override
public ProducerRecord onSend(ProducerRecord record) {
return new ProducerRecord(record.topic(),record.key(),record.value()+"--我要花开丶");
}
/**
* 通知:发送消息时调用:发送成功或失败都会返回信息
* @param recordMetadata
* @param e
*/
@Override
public void onAcknowledgement(RecordMetadata recordMetadata, Exception e) {
System.out.println("recordMetadata:"+recordMetadata+",e:"+e);
}
@Override
public void close() {
}
@Override
public void configure(Map<String, ?> map) {
}
}
- 生产者生产消息:
package com.dyz.interceptors;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.StringSerializer;
import java.util.Properties;
/**
* 自定义消息拦截器
*/
public class KafkaProducerInterceptors {
public static void main(String[] args) {
//设置连接信息
Properties properties = new Properties();
properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"CentOSA:9092,CentOSB:9092,CentOSC:9092");
properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,StringSerializer.class.getName());
properties.put(ProducerConfig.INTERCEPTOR_CLASSES_CONFIG,UserDefineProducerInterceptors.class.getName());
//创建kafkaproduce
KafkaProducer<String, String> kafkaProducer = new KafkaProducer<String, String>(properties);
//创建10条消息
for (int i=0;i<10;i++){
ProducerRecord<String, String> record = new ProducerRecord<String, String>("topic02", "key" + i, "value" + i);
//发送消息给服务器
kafkaProducer.send(record);
}
//关闭kafka
kafkaProducer.close();
}
}
- 消费者消费消息:
package com.dyz.interceptors;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;
import java.time.Duration;
import java.util.Arrays;
import java.util.Iterator;
import java.util.Properties;
import java.util.regex.Pattern;
public class KafkaConsumerInterceptors {
public static void main(String[] args) {
//设置连接配置
Properties properties = new Properties();
properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"CentOSA:9092,CentOSB:9092,CentOSC:9092");
properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,StringDeserializer.class.getName());
properties.put(ConsumerConfig.GROUP_ID_CONFIG,"g1");
//创建kafkaConsumer
KafkaConsumer<String, String> kafkaConsumer = new KafkaConsumer<String, String>(properties);
//订阅相关的Topics,使用正则表达式:消费所有topic开头的消息
kafkaConsumer.subscribe(Arrays.asList("topic02"));
//遍历消息队列
while (true){
//设置多久消费一次消息
ConsumerRecords<String, String> consumerRecords = kafkaConsumer.poll(Duration.ofSeconds(1));
//如果从队列中取到数据,遍历数据
if (!consumerRecords.isEmpty()){
Iterator<ConsumerRecord<String, String>> iterator = consumerRecords.iterator();
while (iterator.hasNext()){
//获取一个消费消息
ConsumerRecord<String, String> record = iterator.next();
String topic = record.topic();
int partition = record.partition();
long offset = record.offset();
String key = record.key();
String value = record.value();
long timestamp = record.timestamp();
System.out.println(topic+"\t"+partition+"\t"+offset+"\t"+key+"\t"+value+"" +"\t"+timestamp);
}
}
}
}
}
附录:pom.xml
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.dyz</groupId>
<artifactId>kafka_mq</artifactId>
<version>1.0-SNAPSHOT</version>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>8</source>
<target>8</target>
</configuration>
</plugin>
</plugins>
</build>
<dependencies>
<!-- https://mvnrepository.com/artifact/org.apache.kafka/kafka-clients -->
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>2.2.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/log4j/log4j -->
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.17</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.slf4j/slf4j-api -->
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>1.7.25</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.slf4j/slf4j-log4j12 -->
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>1.7.25</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.commons/commons-lang3 -->
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<version>3.9</version>
</dependency>
</dependencies>
</project>