7-1 序列化集成 Kryo

1、为什么使用二进制编码

kafka中提供了很多内置列化器,但是在生产中我们的消息对象或者格式更为复杂,这个时候我们可以采取将消息的java对象转成json字符串,这样producer使用StringSerializer进行序列化,consumer使用StringDeserializer然后再将json转成对象,

使用json有很多好处,方便操作,还有就是跨语言也方便,但是缺点就是性能太低,码流太大。

使用java对象进行网络消息传递的时候,应当考虑二进制编解码,比如谷歌的ProtoBuf, FaceBook开源的Thrift,hessian。现在rpc也是使用二进制的序列化。

以前做rpc编解码的时候对比过jdk序列化编解码和二进制的编解码,使用java对象的反复jemeter测试,二进制编解码性能能高出4-5倍。如果使用protobuf序列化的码流大概只有json的十分之一。所以采用优秀的三方序列化是有必要的。

2、集成Kryo
1、引入Kryo依赖
<dependency>
    <groupId>com.esotericsoftware</groupId>
    <artifactId>kryo</artifactId>
    <version>5.0.0-RC1</version>
</dependency>
2、构建消息对象
package org.example.learn2.entity;

import com.esotericsoftware.kryo.DefaultSerializer;

import java.io.Serializable;
import java.util.List;

/**
 * 消息对象
 *
 * @author
 * @date 2019/6/15
 */
public class KafkaMessage implements Serializable {
    private String id;
    private Long value;
    //.....其他属性

    public String getId() {
        return id;
    }

    public void setId(String id) {
        this.id = id;
    }

    public Long getValue() {
        return value;
    }

    public void setValue(Long value) {
        this.value = value;
    }


}
3、producer 编码器
package org.example.learn2.producer;

import com.esotericsoftware.kryo.Kryo;
import com.esotericsoftware.kryo.io.Output;
import com.esotericsoftware.kryo.serializers.CompatibleFieldSerializer;
import com.esotericsoftware.kryo.util.DefaultInstantiatorStrategy;
import org.apache.kafka.common.serialization.Serializer;
import org.objenesis.strategy.StdInstantiatorStrategy;

import java.io.ByteArrayOutputStream;
import java.util.Map;

/**
 * kryo编码器
 *
 * @author
 * @date 2019/6/15
 */
public class KryoSerializer implements Serializer {

    private Kryo kryo;

    @Override
    public void configure(Map configs, boolean isKey) {
        this.kryo = Serializer.getKryo();
    }

    @Override
    public byte[] serialize(String topic, Object data) {

        try {
            if (data == null) {
                return null;
            }
            return Serializer.serialize(data, kryo);
        } catch (Exception e) {
            System.out.println(e);
            return null;
        }

    }


    @Override
    public void close() {

    }

    private static class Serializer {
        private static byte[] serialize(Object object, Kryo kryo) {
            ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
            Output output = new Output(1024, 102400);
            output.setOutputStream(outputStream);
            kryo.writeClassAndObject(output, object);
            output.close();
            return outputStream.toByteArray();
        }

        private static Kryo getKryo() {
            Kryo kryo = new Kryo();
            DefaultInstantiatorStrategy defaultInstantiatorStrategy = new DefaultInstantiatorStrategy(new StdInstantiatorStrategy());
            kryo.setInstantiatorStrategy(defaultInstantiatorStrategy);
            kryo.setReferences(false);
            kryo.setRegistrationRequired(false);
            kryo.setDefaultSerializer(CompatibleFieldSerializer.class);
            return kryo;
        }
    }

}
4、producer 配置
package org.example.learn2.producer;

import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringSerializer;

import java.util.Properties;

/**
 * 生产者配置
 *
 * @author maochao
 * @date 2019/6/15
 */
public class ProducerConf {
    public static final String url = "39.100.104.199:9092";
    public static final String topic = "learn2";

    public static Properties initProperties() {

        Properties properties = new Properties();
        properties.put(org.apache.kafka.clients.producer.ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, url);
        properties.put(org.apache.kafka.clients.producer.ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
properties.put(org.apache.kafka.clients.producer.ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KryoSerializer.class.getName());
        return properties;
    }
}

produer

package org.example.learn2.producer;

import org.example.LearnApplication;
import org.example.learn2.entity.KafkaMessage;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.ComponentScan;

/**
 * 生产者
 *
 * @author
 * @date 2019/6/15
 */
//@SpringBootApplication
@ComponentScan("org")
public class Producer {
    public static void main(String[] args) throws InterruptedException {
        //SpringApplication.run(LearnApplication.class, args);

        KafkaProducer<String, String> producer = new KafkaProducer(ProducerConf.initProperties());
        KafkaMessage message = new KafkaMessage();
        message.setId("123");
        message.setValue(12123L);
        ProducerRecord<String, String> record = new ProducerRecord(ProducerConf.topic, "hellotest", message);

      while (true) {
            producer.send(record, (metadata, exception) -> {
                System.out.println("exception ==》" + exception);
                System.out.println("metadata ==》" + metadata);
            });
            producer.flush();
            Thread.sleep(2000);
           // producer.close();
        }


    }

}
5、consumer 配置
package org.example.learn2.consumer;

import org.apache.kafka.common.serialization.StringDeserializer;

import java.util.Properties;
import java.util.UUID;

/**
 * 消费者配置
 *
 * @author maochao
 * @date 2019/6/15
 */
public class ConsumerConf {
    public static final String url = "39.100.104.199:9092";
    public static final String topic = "learn2";

    public static Properties initProperties() {
        Properties properties = new Properties();
        properties.put(org.apache.kafka.clients.consumer.ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        properties.put(org.apache.kafka.clients.consumer.ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, url);
        properties.put(org.apache.kafka.clients.consumer.ConsumerConfig.GROUP_ID_CONFIG, UUID.randomUUID().toString());
        // properties.put(org.apache.kafka.clients.consumer.ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");

        //自定义解码器
        properties.put(org.apache.kafka.clients.consumer.ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, KryoDeserializer.class.getName());
        return properties;
    }
}
6、consumer 解码器
package org.example.learn2.consumer;

import com.esotericsoftware.kryo.Kryo;
import com.esotericsoftware.kryo.io.Input;
import com.esotericsoftware.kryo.io.Output;
import com.esotericsoftware.kryo.serializers.CompatibleFieldSerializer;
import com.esotericsoftware.kryo.util.DefaultInstantiatorStrategy;
import org.apache.kafka.common.serialization.Deserializer;
import org.objenesis.strategy.StdInstantiatorStrategy;

import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.util.Map;

/**
 * kryo解码器
 *
 * @author
 * @date 2019/6/15
 */
public class KryoDeserializer implements Deserializer<Object> {
    @Override
    public void configure(Map<String, ?> configs, boolean isKey) {

    }

    @Override
    public Object deserialize(String topic, byte[] bytes) {
        try {
            if (bytes == null) {
                return null;
            } else {
                return Deserializer.deserialize(bytes);
            }
        } catch (Exception e) {
            System.out.println(e);
            return null;
        }
    }

    @Override
    public void close() {

    }


    private static class Deserializer {
        private static Object deserialize(byte[] bytes) {
            Kryo kryo = getKryo();
            Input input = new Input();
            ByteArrayInputStream stream = new ByteArrayInputStream(bytes);
            input.setInputStream(stream);
            Object o = kryo.readClassAndObject(input);
            input.close();
            return o;
        }

        private static Kryo getKryo() {
            Kryo kryo = new Kryo();
            StdInstantiatorStrategy stdInstantiatorStrategy = new StdInstantiatorStrategy();
            DefaultInstantiatorStrategy defaultInstantiatorStrategy = new DefaultInstantiatorStrategy(stdInstantiatorStrategy);
            kryo.setInstantiatorStrategy(defaultInstantiatorStrategy);
            kryo.setReferences(false);
            kryo.setRegistrationRequired(false);
            kryo.setDefaultSerializer(CompatibleFieldSerializer.class);
            return kryo;
        }
    }
}
7、comsumer
package org.example.learn2.consumer;

import org.example.learn2.entity.KafkaMessage;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.header.internals.RecordHeaders;

import java.util.Collections;

/**
 * 消费者
 *
 * @author
 * @date 2019/6/15
 */
public class Consumer {

    public static void main(String[] args) {
        KafkaConsumer<String, Object> consumer = new KafkaConsumer(ConsumerConf.initProperties());
        consumer.subscribe(Collections.singletonList(ConsumerConf.topic));
        while (true) {
            System.out.println("拉取消息一次");
            ConsumerRecords<String, Object> records = consumer.poll(1000);
            handleMess(records);
        }

    }

    /**
     * 处理消息
     *
     * @param records
     */
    private static void handleMess(ConsumerRecords<String, Object> records) {
        for (ConsumerRecord<String, Object> record : records) {
            if(record.value() instanceof KafkaMessage){
                System.out.println("value = "+((KafkaMessage) record.value()).getId());
            }

            System.out.printf("topic = %s, key = %s", record.topic(), record.key());
            /**
             * 消息头
             */
            RecordHeaders headers = (RecordHeaders) record.headers();
            headers.forEach(header -> {
                System.out.println("key=" + header.key() + "value=" + new String(header.value()));
            });
        }
    }
}
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值