Kafka: ------ 拦截器 发送元数据前提之上可以加上额外需要的消息头

pom文件需要导的依赖

 <dependency>
      <groupId>org.apache.kafka</groupId>
      <artifactId>kafka-clients</artifactId>
      <version>2.2.0</version>
    </dependency>

    <!-- https://mvnrepository.com/artifact/log4j/log4j -->
    <dependency>
      <groupId>log4j</groupId>
      <artifactId>log4j</artifactId>
      <version>1.2.17</version>
    </dependency>
    <!-- https://mvnrepository.com/artifact/org.slf4j/slf4j-api -->
    <dependency>
      <groupId>org.slf4j</groupId>
      <artifactId>slf4j-api</artifactId>
      <version>1.7.25</version>
    </dependency>
    <!-- https://mvnrepository.com/artifact/org.slf4j/slf4j-log4j12 -->
    <dependency>
      <groupId>org.slf4j</groupId>
      <artifactId>slf4j-log4j12</artifactId>
      <version>1.7.25</version>
    </dependency>
    <dependency>
      <groupId>org.apache.commons</groupId>
      <artifactId>commons-lang3</artifactId>
      <version>3.8.1</version>
    </dependency>

自定义拦截器

package com.baizhi.jsy.interceptor;
import org.apache.kafka.clients.producer.ProducerInterceptor;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import java.util.Map;
public class UserDefineProducerInterceptor implements ProducerInterceptor {
    @Override
    public ProducerRecord onSend(ProducerRecord record) {
        ProducerRecord wrapRecord = new ProducerRecord(record.topic(), record.key(), record.value());
        wrapRecord.headers().add("user","baizhi".getBytes());
        wrapRecord.headers().add("password","970215".getBytes());
        return wrapRecord;
    }
    @Override
    public void onAcknowledgement(RecordMetadata metadata, Exception exception) {
        System.out.println("metadata:"+metadata+",exception:"+exception);
    }
    @Override
    public void close() {
        System.out.println("close");
    }
    @Override
    public void configure(Map<String, ?> configs) {
        System.out.println("configure");
    }
}

生产者

package com.baizhi.jsy.interceptor;

import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.StringSerializer;

import java.text.DecimalFormat;
import java.util.Date;
import java.util.Properties;

public class ProductKafkaUserInterceptor {
    public static void main(String[] args) {
        //创建生产者
        Properties properties = new Properties();
        properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"Centos:9092");
        properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "com.baizhi.jsy.serializer.Userserializer" );
        properties.put(ProducerConfig.INTERCEPTOR_CLASSES_CONFIG,UserDefineProducerInterceptor.class.getName());
        //优化参数
        properties.put(ProducerConfig.BATCH_SIZE_CONFIG,1024*1024);//生产者尝试缓存记录,为每一个分区缓存一个mb的数据
        properties.put(ProducerConfig.LINGER_MS_CONFIG,500);//最多等待0.5秒.
        KafkaProducer<String, User> kafkaProducer = new KafkaProducer<String, User>(properties);
        for(int i=0;i<10;i++){
            DecimalFormat decimalFormat = new DecimalFormat("00");
            String format = decimalFormat.format(i);
            ProducerRecord<String, User> record = new ProducerRecord<String, User>("topic02", "key" + format, new User(format,"User"+format,new Date()));
            kafkaProducer.send(record);
        }
        kafkaProducer.flush();
        kafkaProducer.close();
    }
}

消费者

package com.baizhi.jsy.interceptor;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.header.Header;
import org.apache.kafka.common.serialization.StringDeserializer;

import java.time.Duration;
import java.util.Arrays;
import java.util.Iterator;
import java.util.Properties;
public class ConsumerKafkaUserInterceptor {
    public static void main(String[] args) {
        //创建消费者
        Properties properties = new Properties();
        properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"Centos:9092");
        properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,"com.baizhi.jsy.serializer.UserDeserializer");
        //第一次订阅读取的位置,默认是最新的latest
        //properties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,"earliest");
        properties.put(ConsumerConfig.GROUP_ID_CONFIG,"g2");
        KafkaConsumer<String, User> kafkaConsumer = new KafkaConsumer<>(properties);
        kafkaConsumer.subscribe(Arrays.asList("topic02"));
        try {
            while (true){
                //设置间隔多长时间取一次数据
                ConsumerRecords<String, User> consumerRecords = kafkaConsumer.poll(Duration.ofSeconds(1));
                //判断数据是否是空的
                if(!consumerRecords.isEmpty()){
                    Iterator<ConsumerRecord<String, User>> iterator = consumerRecords.iterator();
                    while (iterator.hasNext()){
                        ConsumerRecord<String, User> next = iterator.next();
                        Header[] headers = next.headers().toArray();
                        StringBuilder stringBuilder = new StringBuilder();
                        for (int i = 0; i < headers.length; i++) {
                            Header header = headers[i];
                            stringBuilder.append(header.key()+"\t"+new String(header.value()));
                        }
                        String topic = next.topic();
                        String key = next.key();
                        User user = next.value();
                        long offset = next.offset();
                        int partition = next.partition();
                        long timestamp = next.timestamp();
                        System.out.println("消息头:"+stringBuilder+"\t"+"topic = " + topic+"\t"+"key = " + key+"\t"+"User = " + user+"\t"+"offset = " + offset+"\t"+"partition = " + partition+"\t"+"timestamp = " + timestamp);
                        System.out.println();
                    }
                }
            }
        } catch (Exception e) {
            e.printStackTrace();
        }finally {
            kafkaConsumer.close();
        }

    }
}

在这里插入图片描述

封装对象

package com.baizhi.jsy.interceptor;
import java.io.Serializable;
import java.util.Date;
public class User implements Serializable {
    private String id;
    private String name;
    private Date date;
    public User() {
    }
    public User(String id, String name, Date date) {
        this.id = id;
        this.name = name;
        this.date = date;
    }
    @Override
    public String toString() {
        return "User{" +
                "id='" + id + '\'' +
                ", name='" + name + '\'' +
                ", date=" + date +
                '}';
    }
    public String getId() {
        return id;
    }
    public void setId(String id) {
        this.id = id;
    }
    public String getName() {
        return name;
    }
    public void setName(String name) {
        this.name = name;
    }
    public Date getDate() {
        return date;
    }
    public void setDate(Date date) {
        this.date = date;
    }
}

序列化

package com.baizhi.jsy.interceptor;
import org.apache.commons.lang3.SerializationUtils;
import org.apache.kafka.common.serialization.Serializer;
import java.io.Serializable;
import java.util.Map;
public class Userserializer implements Serializer<Object> {
    @Override
    public void configure(Map<String, ?> map, boolean b){}
    @Override
    public byte[] serialize(String topic, Object data) {
        //导入commons-lang3包 用SerializationUtils直接序列化
        return SerializationUtils.serialize((Serializable) data);
    }
    @Override
    public void close(){}
}

反序列化

package com.baizhi.jsy.interceptor;
import org.apache.commons.lang3.SerializationUtils;
import org.apache.kafka.common.serialization.Deserializer;
import java.util.Map;
public class UserDeserializer implements Deserializer<Object>{
    @Override
    public void configure(Map<String, ?> map, boolean b) {}
    @Override
    public Object deserialize(String s, byte[] bytes) {
        return SerializationUtils.deserialize(bytes);
    }
    @Override
    public void close(){}
}

  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值