kafka-sasl消费示范例子

此博客展示了如何配置和使用Kafka消费者进行SASL_PLAINTEXT安全协议连接,采用SCRAM-SHA-256身份验证。代码示例中详细配置了消费者属性,包括安全协议、SASL机制、消费者组ID等,并提供了Sasl登录模块的配置文件示例。
摘要由CSDN通过智能技术生成

kafka-sasl消费示范例子

package cn.cuiot.dmp.rocketmq;

import javax.crypto.Mac;
import javax.crypto.spec.SecretKeySpec;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.config.SaslConfigs;
import org.apache.kafka.common.config.SslConfigs;

import javax.crypto.Mac;
import javax.crypto.spec.SecretKeySpec;
import java.util.Arrays;
import java.util.Properties;


public class AclConsumerMain extends AbstractConsumer {
    //consumer
    private static org.apache.kafka.clients.consumer.KafkaConsumer<String, String> consumer;


        public static void main(String[] args) {
            System.out.println("enter args quantity error, e.g. " +
                    "java -cp 《jar名》《启动类》《kafka Bootstrap servers地址》《sasl配置文件目录》《groupId》" +
                    "《topic》\n");
            String kafkaBootstrapServers = "kafka.cuiot.cn:9093,kafka.cuiot.cn:9193,kafka.cuiot.cn:9293";
            String kafkaSaslConfDir = "C:\\data\\consumer.conf";
            String groupIdConfig = "";
            String topic = "";
            if (args.length == 7) {
                kafkaBootstrapServers = args[0];
                kafkaSaslConfDir = args[1];
                groupIdConfig = args[2];
                topic = args[3];
            }
            System.setProperty("java.security.auth.login.config", kafkaSaslConfDir);
            Properties properties = new Properties();
            properties.setProperty("bootstrap.servers", kafkaBootstrapServers);
            properties.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
            properties.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
            properties.setProperty("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
            properties.setProperty("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");

            properties.setProperty("security.protocol", "SASL_PLAINTEXT");
            properties.setProperty(SaslConfigs.SASL_MECHANISM, "SCRAM-SHA-256");

            //指定消费者组
            properties.put(ConsumerConfig.GROUP_ID_CONFIG, groupIdConfig);
            properties.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, 30000);
            //设置单次拉取的量,走公网访问时,该参数会有较大影响
            properties.put(ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG, 32000);
            properties.put(ConsumerConfig.FETCH_MAX_BYTES_CONFIG, 32000);
            //每次poll的最大数量
            //注意该值不要改得太大,如果poll太多数据,而不能在下次poll之前消费完,则会触发一次负载均衡,产生卡顿
            properties.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, 30);
            //hostname校验改成空
            properties.put(SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG, "");
            //自动提交offset
//        properties.setProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, ConfigUtils.getInstance().getString("enable.auto.commit"));
            //提交offset间隔
//        properties.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, ConfigUtils.getInstance().getString("auto.commit.interval.ms"));
//        properties.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, ConfigUtils.getInstance().getString("max.poll.records"));
//        properties.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, ConfigUtils.getInstance().getString("session.timeout.ms"));
//        properties.put("transaction.timeout.ms", 1000 * 60 * 5 + "");
            consumer = new org.apache.kafka.clients.consumer.KafkaConsumer<>(properties);
            consumer.subscribe(Arrays.asList(topic));

            System.out.println("---------开始Sasl消费---------");

            exeConsumer(consumer);

    }





        public static String sha256_mac(String message, String key) {
            String outPut = null;
            try {
                Mac sha256Hmac = Mac.getInstance("HmacSHA256");
                SecretKeySpec secretKey = new SecretKeySpec(key.getBytes(), "HmacSHA256");
                sha256Hmac.init(secretKey);
                byte[] bytes = sha256Hmac.doFinal(message.getBytes());
                outPut = byteArrayToHexString(bytes);
            } catch (Exception e) {
                System.out.println("Error HmacSHA256========" + e.getMessage());
            }
            return outPut;
        }

        private static String byteArrayToHexString(byte[] b) {
            StringBuilder sb = new StringBuilder();
            String stmp;
            for (int n = 0; b != null && n < b.length; n++) {
                stmp = Integer.toHexString(b[n] & 0XFF);
                if (stmp.length() == 1) {
                    sb.append('0');
                }
                sb.append(stmp);
            }
            return sb.toString().toLowerCase();
        }

//        public static void main(String[] strs) {
//            String consumerGroupId = "";
//            String secretKey = "";
//            System.out.println(sha256_mac(consumerGroupId, secretKey));
//        }


}

consumer.conf

KafkaClient{
org.apache.kafka.common.security.scram.ScramLoginModule required
username="918593438985748480"
password="6a4b5dbc795aa5eb3ad0be248f308c7bba4b230cdcce9aee4a5f62b810abadb0";
};

评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值