Kafka 多维度系统精讲(6)- kafka producer集成

1 produer 集成

1.1 application.yml

server:
    port: 8080

template:
    templates:
        - { "templateId": "1","templateFilePath": "D:/Data/JavaProject/kafka-springboot/src/main/resources/template/template.json","active": true }
        - { "templateId": "2","templateFilePath": "D:/Data/JavaProject/kafka-springboot/src/main/resources/template/template.json","active": false }
    template-result-type: 0 #  // 0-文件获取,1-数据库获取
    template-result-filePath: "D:/Data/JavaProject/kafka-springboot/src/main/resources/template/templateResult.json"


wechat:
    kafka:
        bootstrap_servers: "192.168.10.100:9092"
        acks_config: "all"
        partitioner_class: "com.tzb.kafka.producer.SamplePartition"

1.2 KafkaProperties

package com.tzb.kafka.wechat.conf;

import lombok.Data;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Configuration;

/**
 * @Description TODO
 * @Author tzb
 * @Date 2020/9/25 20:45
 * @Version 1.0
 **/
@Data
@Configuration
@ConfigurationProperties(prefix = "wechat.kafka")
public class KafkaProperties {

    private String  bootstrapServers;

    private String acksConfig;

    private String  partitionerClass;

}

1.3 KafkaConf

  • producer 要设计为单例
package com.tzb.kafka.wechat.conf;

import org.apache.kafka.clients.producer.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;

import java.util.Properties;

/**
 * @Description TODO
 * @Author tzb
 * @Date 2020/9/25 20:46
 * @Version 1.0
 **/
@Configuration
public class KafkaConf {

    @Autowired
    private KafkaProperties kafkaProperties;

    @Bean
    public Producer kafkaProducer() {
        Properties properties = new Properties();
        properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaProperties.getBootstrapServers());
        properties.put(ProducerConfig.ACKS_CONFIG, kafkaProperties.getAcksConfig());
        properties.put(ProducerConfig.RETRIES_CONFIG, "0");
        properties.put(ProducerConfig.BATCH_SIZE_CONFIG, "16384");
        properties.put(ProducerConfig.LINGER_MS_CONFIG, "1");
        properties.put(ProducerConfig.BUFFER_MEMORY_CONFIG, "33554432");
        properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
        properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");

        // Producer 主对象
        Producer<String, String> producer = new KafkaProducer<>(properties);

        return producer;
    }
}

1.4 WechatTemplateServiceImpl

@Autowired
    private WechatTemplateProperties properties;

    @Autowired
    private Producer producer;

   /**
     * @param reportInfo
     */
    @Override
    public void templateReported(JSONObject reportInfo) {
        // 用 kafka producer 把数据推送到 kakfa Topic
        log.info("templateReported : [{}]", reportInfo);
        String topicName = "tzb-new-topic";

        // 发送kafka数据
        String templateId = reportInfo.getString("templateId");
        JSONArray reportData = reportInfo.getJSONArray("result");

        // 如果templateId相同,后续在统计分析时,可以考虑将相同的 id 的内容放入同一个 partition,便于分析
        ProducerRecord<String, Object> record =
                new ProducerRecord<>(topicName, templateId, reportData);

        /*
        1、 kafka producer 是线程安全的,建议多线程复用,如果每个线程都创建,就会出现大量的上下文切换或者争抢的情况,影响效率
        2、Kafka producer 的key 是一个很重要的内容,
           2.1 可以根据 key 完成 Partition 的负载均衡
           2.2 flink 做更快速的处理
        3、ack=all, kafka 就已经有了只有一次的消息投递保障,如果想不丢数据,最好自己处理
         */

        try{
            producer.send(record);
        }catch (Exception e){
            // 将数据加入重发队列,redis,es
            e.printStackTrace();
        }

    }
©️2020 CSDN 皮肤主题: 书香水墨 设计师:CSDN官方博客 返回首页