SpringBoot 集成Kafka demo全部代码,含配置文件与sasl

Util方式配置

import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.clients.producer.*;
import org.apache.kafka.common.config.SaslConfigs;
import org.apache.kafka.common.header.Header;
import org.apache.kafka.common.header.internals.RecordHeader;

import java.util.*;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;

/**
 */
@Slf4j
public class KafkaUtils {

    private static KafkaProducer<String, String> producer = null;

    static {
        Properties kfkProperties = new Properties();
//System.out.println(Thread.currentThread().getContextClassLoader().getResource("kafka_client_jaas.conf").getPath());
        //System.setProperty("java.security.auth.login.config", "/opt/cloudera/parcels/KAFKA-4.0.0-1.4.0.0.p0.1/etc/kafka/conf.dist/kafka_client_jaas.conf");
        kfkProperties.put("bootstrap.servers", "10.1.213.172:9092,10.1.213.172:9093,10.1.213.172:9094");
        kfkProperties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        kfkProperties.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        //设置kafka的 Acks以及retries
        kfkProperties.put(ProducerConfig.ACKS_CONFIG, "all");
        //不包含第一次发送,如果尝试3次都失败,则系统放弃发送
        kfkProperties.put(ProducerConfig.RETRIES_CONFIG, 3);
        //将检测超时的时间为1ms -- 为测试retries现象
        kfkProperties.put(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG, 1);
        //开启kafka 幂等性  注意:在使用幂等时 必须开启acks=all和retires
        //kfkProperties.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, true);
        //保证信息有序
        kfkProperties.put(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION, 1);
        kfkProperties.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT");
        kfkProperties.put(SaslConfigs.SASL_MECHANISM, "SCRAM-SHA-256");
        kfkProperties.put(SaslConfigs.SASL_JAAS_CONFIG, "org.apache.kafka.common.security.scram.ScramLoginModule required username=qysjsc password=ZDUcWAU0;");
        //kfkProperties.put("listener.name.sasl_ssl.scram-sha-256.sasl.jaas.config", "org.apache.kafka.common.security.scram.ScramLoginModule required username=qysjsc password=ZDUcWAU0;");
        producer = new KafkaProducer<>(kfkProperties);
    }

    /**
     *   * producer 发送消息异步回调返回消息
     */
    public static void sendAll(String topic, String content) {
        //record = new ProducerRecord<>(topic, content, "CallBack");
        //record = new ProducerRecord(topic, "123");
        producer.send(new ProducerRecord<>(topic, content));
            /*producer.send(record, new Callback() {
                @Override
                public void onCompletion(RecordMetadata recordMetadata, Exception e) {
                    if (null != e) {
                        e.printStackTrace();
                    }
                    System.out.println("时间戳,主题,分区,位移: " + recordMetadata.timestamp() + ", " + recordMetadata.topic() + "," + recordMetadata.partition() + " " + recordMetadata.offset());
                }
            });*/
        producer.flush();

    }
}

集成方式配置:

spring:
  application:
    name: kafka-tools
  kafka:
    bootstrap-servers: 0.0.0.0:9092,0.0.0.0:9093,0.0.0.0:9094
    producer:
      key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
      value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
      properties:
        sasl.mechanism: PLAIN
        security.protocol: SASL_PLAINTEXT
        sasl.jaas.config: org.apache.kafka.common.security.scram.ScramLoginModule required username="username" password="password";
    consumer:
      group-id: test-group
      auto-offset-reset: latest
      enable-auto-commit: true
      auto-commit-interval: 100
      key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
      value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
      properties:
        sasl.mechanism: PLAIN
        security.protocol: SASL_PLAINTEXT
        sasl.jaas.config: org.apache.kafka.common.security.scram.ScramLoginModule required username="username" password="password";

注意,这里配置sasl的账号密码必须加双引号,否则在填入值首字符为数字的情况下读取不到参数

消费者

@Slf4j
@Component
public class TestTopic {

    @KafkaListener(topics  = "AAA")
    public void reportTopic(@Payload String data,
                            @Header(KafkaHeaders.RECEIVED_TOPIC) String topic,
                            @Headers MessageHeaders messageHeaders,
                            @Header(KafkaHeaders.OFFSET) String offset) {
        log.info("[Receive data] topic:{},  offset:{}", topic, offset);
    }
}

生产者

@Slf4j
@Component
public class KafkaProducer {

    @Resource
    private KafkaTemplate<String, String> kafkaTemplate;


    public void send(String topic, String msg){
        log.info("Send topic: {}, length:{}", topic, msg.length());
        kafkaTemplate.send(new ProducerRecord<String, String>(topic, msg));
    }
}

SASL

配置sasl有三种方式,这里面用的是第一种:

第一种:初始化kafk的Properties时

props.put(SaslConfigs.SASL_MECHANISM, "PLAIN");
props.put("sasl.jaas.config","org.apache.kafka.common.security.plain.PlainLoginModule required username=\"producer\" password=\"prod-sec\";");


第二种:设置系统属性

System.setProperty("java.security.auth.login.config", "d:/jaas.conf");//指定绝对路径


第三种:启动参数加入参数-Djava.security.auth.login.config=jaas.conf的绝对路径

java -jar -Djava.security.auth.login.config=D:/jaas.conf xx.jar //指定绝对路径

  • 1
    点赞
  • 7
    收藏
    觉得还不错? 一键收藏
  • 打赏
    打赏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

扶朕去网吧

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值