Kafka配置JAAS
推荐本地测试使用,生产环境尽量使用KAFKA监听,或者将此类方法添加定时开关与生产者同步避免循环打印
public static void main(String[] args) {
Properties props = new Properties();
props.put(SaslConfigs.SASL_MECHANISM, "PLAIN");
props.put("sasl.jaas.config","org.apache.kafka.common.security.plain.PlainLoginModule required username=\"***\" password=\"***\";");
props.put("bootstrap.servers", "***");
props.put("enable.auto.commit", "true");
props.put("auto.commit.interval.ms", "1000");
props.put("group.id", "***");
props.put("session.timeout.ms", "60000");
props.put("max.poll.records", 1000);
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.put("value.deserializer","org.apache.kafka.common.serialization.StringDeserializer");
props.put("security.protocol", "SASL_PLAINTEXT");
props.put("sasl.mechanism","PLAIN");
KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
consumer.subscribe(Arrays.asList("***"));
//以下根据业务场景进行逻辑处理
while (true) {
long startTime = System.currentTimeMillis();
ConsumerRecords<String, String> records = consumer.poll(1000);
System.out.println(System.currentTimeMillis() - startTime);
System.out.println("recieve message number is"+ records.count());
for (ConsumerRecord<String, String> record : records) {
System.out.printf("success" + "offset =%d,key =%s,value =%s,partition = %d %n",
record.offset(), record.key(), record.value(),
record.partition());
}
}
}