pom文件需要导的依赖
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>2.2.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/log4j/log4j -->
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.17</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.slf4j/slf4j-api -->
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>1.7.25</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.slf4j/slf4j-log4j12 -->
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>1.7.25</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<version>3.8.1</version>
</dependency>
自定义拦截器
package com.baizhi.jsy.interceptor;
import org.apache.kafka.clients.producer.ProducerInterceptor;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import java.util.Map;
public class UserDefineProducerInterceptor implements ProducerInterceptor {
@Override
public ProducerRecord onSend(ProducerRecord record) {
ProducerRecord wrapRecord = new ProducerRecord(record.topic(), record.key(), record.value());
wrapRecord.headers().add("user","baizhi".getBytes());
wrapRecord.headers().add("password","970215".getBytes());
return wrapRecord;
}
@Override
public void onAcknowledgement(RecordMetadata metadata, Exception exception) {
System.out.println("metadata:"+metadata+",exception:"+exception);
}
@Override
public void close() {
System.out.println("close");
}
@Override
public void configure(Map<String, ?> configs) {
System.out.println("configure");
}
}
生产者
package com.baizhi.jsy.interceptor;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.StringSerializer;
import java.text.DecimalFormat;
import java.util.Date;
import java.util.Properties;
public class ProductKafkaUserInterceptor {
public static void main(String[] args) {
Properties properties = new Properties();
properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"Centos:9092");
properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "com.baizhi.jsy.serializer.Userserializer" );
properties.put(ProducerConfig.INTERCEPTOR_CLASSES_CONFIG,UserDefineProducerInterceptor.class.getName());
properties.put(ProducerConfig.BATCH_SIZE_CONFIG,1024*1024);
properties.put(ProducerConfig.LINGER_MS_CONFIG,500);
KafkaProducer<String, User> kafkaProducer = new KafkaProducer<String, User>(properties);
for(int i=0;i<10;i++){
DecimalFormat decimalFormat = new DecimalFormat("00");
String format = decimalFormat.format(i);
ProducerRecord<String, User> record = new ProducerRecord<String, User>("topic02", "key" + format, new User(format,"User"+format,new Date()));
kafkaProducer.send(record);
}
kafkaProducer.flush();
kafkaProducer.close();
}
}
消费者
package com.baizhi.jsy.interceptor;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.header.Header;
import org.apache.kafka.common.serialization.StringDeserializer;
import java.time.Duration;
import java.util.Arrays;
import java.util.Iterator;
import java.util.Properties;
public class ConsumerKafkaUserInterceptor {
public static void main(String[] args) {
Properties properties = new Properties();
properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"Centos:9092");
properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,"com.baizhi.jsy.serializer.UserDeserializer");
properties.put(ConsumerConfig.GROUP_ID_CONFIG,"g2");
KafkaConsumer<String, User> kafkaConsumer = new KafkaConsumer<>(properties);
kafkaConsumer.subscribe(Arrays.asList("topic02"));
try {
while (true){
ConsumerRecords<String, User> consumerRecords = kafkaConsumer.poll(Duration.ofSeconds(1));
if(!consumerRecords.isEmpty()){
Iterator<ConsumerRecord<String, User>> iterator = consumerRecords.iterator();
while (iterator.hasNext()){
ConsumerRecord<String, User> next = iterator.next();
Header[] headers = next.headers().toArray();
StringBuilder stringBuilder = new StringBuilder();
for (int i = 0; i < headers.length; i++) {
Header header = headers[i];
stringBuilder.append(header.key()+"\t"+new String(header.value()));
}
String topic = next.topic();
String key = next.key();
User user = next.value();
long offset = next.offset();
int partition = next.partition();
long timestamp = next.timestamp();
System.out.println("消息头:"+stringBuilder+"\t"+"topic = " + topic+"\t"+"key = " + key+"\t"+"User = " + user+"\t"+"offset = " + offset+"\t"+"partition = " + partition+"\t"+"timestamp = " + timestamp);
System.out.println();
}
}
}
} catch (Exception e) {
e.printStackTrace();
}finally {
kafkaConsumer.close();
}
}
}
封装对象
package com.baizhi.jsy.interceptor;
import java.io.Serializable;
import java.util.Date;
public class User implements Serializable {
private String id;
private String name;
private Date date;
public User() {
}
public User(String id, String name, Date date) {
this.id = id;
this.name = name;
this.date = date;
}
@Override
public String toString() {
return "User{" +
"id='" + id + '\'' +
", name='" + name + '\'' +
", date=" + date +
'}';
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Date getDate() {
return date;
}
public void setDate(Date date) {
this.date = date;
}
}
序列化
package com.baizhi.jsy.interceptor;
import org.apache.commons.lang3.SerializationUtils;
import org.apache.kafka.common.serialization.Serializer;
import java.io.Serializable;
import java.util.Map;
public class Userserializer implements Serializer<Object> {
@Override
public void configure(Map<String, ?> map, boolean b){}
@Override
public byte[] serialize(String topic, Object data) {
return SerializationUtils.serialize((Serializable) data);
}
@Override
public void close(){}
}
反序列化
package com.baizhi.jsy.interceptor;
import org.apache.commons.lang3.SerializationUtils;
import org.apache.kafka.common.serialization.Deserializer;
import java.util.Map;
public class UserDeserializer implements Deserializer<Object>{
@Override
public void configure(Map<String, ?> map, boolean b) {}
@Override
public Object deserialize(String s, byte[] bytes) {
return SerializationUtils.deserialize(bytes);
}
@Override
public void close(){}
}