Kafka:自定义序列化器和反序列化器

56 篇文章 1 订阅

实体类:

@Data
public class User {

    private Integer userId;
    private String username;
}

 自定义序列化

import org.apache.kafka.common.errors.SerializationException;
import org.apache.kafka.common.serialization.Serializer;
import java.io.UnsupportedEncodingException;
import java.nio.Buffer;
import java.nio.ByteBuffer;

public class UserSerializer implements Serializer<User> {
    
    @Override
    public void configure(Map<String, ?> configs, boolean isKey) {
        // do nothing
    }
    @Override
    public byte[] serialize(String topic, User data) {
        try {
            // 如果数据是null,则返回null
            if (data == null) return null;

            Integer userId = data.getUserId();
            String username = data.getUsername();
            int length = 0;
            byte[] bytes = null;

            if (null != username) {
                bytes = username.getBytes("utf-8");
                length = bytes.length;
            }

            // 第一个4个字节用于存储userId的值
            // 第二个4个字节用于存储username字节数组的长度int值
            // 第三个长度,用于存储username序列化之后的字节数组
            ByteBuffer buffer = ByteBuffer.allocate(4 + 4 + length);
            // 设置userId
            buffer.putInt(userId);
            // 设置username字节数组长度
            buffer.putInt(length);
            // 设置username字节数组
            buffer.put(bytes);
            // 以字节数组形式返回user对象的值
            return buffer.array();
        } catch (UnsupportedEncodingException e) {
            throw new SerializationException("序列化数据异常");
        }
    }
    @Override
    public void close() {
        // do nothing
    }
}

生产者:

import org.apache.kafka.clients.producer.*;
import org.apache.kafka.common.serialization.StringSerializer;

public class MyProducer {
    public static void main(String[] args) {

        Map<String, Object> configs = new HashMap<>();
        configs.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.25.129:9092");
        configs.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);

        // 设置自定义的序列化类
        configs.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, UserSerializer.class);
        KafkaProducer<String, User> producer = new KafkaProducer<String, User>(configs);

        User user = new User();
        user.setUserId(1001);
        user.setUsername("张三");
        ProducerRecord<String, User> record = new ProducerRecord<>(
            "test",    // 主题
            0,         // 分区
            user.getUsername(),    // key
            user                   // value
        );

        producer.send(record, (metadata, exception) -> {
            if (exception == null) {
                System.out.println("消息发送成功:"
                                   + metadata.topic() + "\t"
                                   + metadata.partition() + "\t"
                                   + metadata.offset());
            } else {
                System.out.println("消息发送异常");
            }
        });
        // 关闭生产者
        producer.close();
    }
}

自定义反序列化

import com.lagou.kafka.demo.entity.User;
import org.apache.kafka.common.serialization.Deserializer;
import java.nio.ByteBuffer;

public class UserDeserializer implements Deserializer<User> {
    
    @Override
    public void configure(Map<String, ?> configs, boolean isKey) {
    }
    
    @Override
    public User deserialize(String topic, byte[] data) {
        
        ByteBuffer allocate = ByteBuffer.allocate(data.length);
        
        allocate.put(data);
        allocate.flip();
        
        int userId = allocate.getInt();
        int length = allocate.getInt();
        System.out.println(length);
        
        String username = new String(data, 8, length);
        return new User(userId, username);
    }
    
    @Override
    public void close() {
    }
}

消费者:

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;
import java.util.function.Consumer;

public class MyConsumer {
    public static void main(String[] args) {

        Map<String, Object> configs = new HashMap<>();
        configs.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.25.129:9092");
        configs.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        // 自定义反序列化器
        configs.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, UserDeserializer.class);

        configs.put(ConsumerConfig.GROUP_ID_CONFIG, "test-consumer-group"); 消费者组
        configs.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
        configs.put(ConsumerConfig.CLIENT_ID_CONFIG, "con1");
        KafkaConsumer<String, User> consumer = new KafkaConsumer<String,User>(configs);
        consumer.subscribe(Collections.singleton("test")); /主题
        
        ConsumerRecords<String, User> records = consumer.poll(Long.MAX_VALUE);
        
        records.forEach(new Consumer<ConsumerRecord<String, User>>() {
            @Override
            public void accept(ConsumerRecord<String, User> record) {
                System.out.println(record.value());
            }
        });
        // 关闭消费者
        consumer.close();
    }
}

 

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 打赏
    打赏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

程序员无羡

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值