Util方式配置
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.clients.producer.*;
import org.apache.kafka.common.config.SaslConfigs;
import org.apache.kafka.common.header.Header;
import org.apache.kafka.common.header.internals.RecordHeader;
import java.util.*;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
/**
*/
@Slf4j
public class KafkaUtils {
private static KafkaProducer<String, String> producer = null;
static {
Properties kfkProperties = new Properties();
//System.out.println(Thread.currentThread().getContextClassLoader().getResource("kafka_client_jaas.conf").getPath());
//System.setProperty("java.security.auth.login.config", "/opt/cloudera/parcels/KAFKA-4.0.0-1.4.0.0.p0.1/etc/kafka/conf.dist/kafka_client_jaas.conf");
kfkProperties.put("bootstrap.servers", "10.1.213.172:9092,10.1.213.172:9093,10.1.213.172:9094");
kfkProperties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
kfkProperties.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
//设置kafka的 Acks以及retries
kfkProperties.put(ProducerConfig.ACKS_CONFIG, "all");
//不包含第一次发送,如果尝试3次都失败,则系统放弃发送
kfkProperties.put(ProducerConfig.RETRIES_CONFIG, 3);
//将检测超时的时间为1ms -- 为测试retries现象
kfkProperties.put(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG, 1);
//开启kafka 幂等性 注意:在使用幂等时 必须开启acks=all和retires
//kfkProperties.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, true);
//保证信息有序
kfkProperties.put(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION, 1);
kfkProperties.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT");
kfkProperties.put(SaslConfigs.SASL_MECHANISM, "SCRAM-SHA-256");
kfkProperties.put(SaslConfigs.SASL_JAAS_CONFIG, "org.apache.kafka.common.security.scram.ScramLoginModule required username=qysjsc password=ZDUcWAU0;");
//kfkProperties.put("listener.name.sasl_ssl.scram-sha-256.sasl.jaas.config", "org.apache.kafka.common.security.scram.ScramLoginModule required username=qysjsc password=ZDUcWAU0;");
producer = new KafkaProducer<>(kfkProperties);
}
/**
* * producer 发送消息异步回调返回消息
*/
public static void sendAll(String topic, String content) {
//record = new ProducerRecord<>(topic, content, "CallBack");
//record = new ProducerRecord(topic, "123");
producer.send(new ProducerRecord<>(topic, content));
/*producer.send(record, new Callback() {
@Override
public void onCompletion(RecordMetadata recordMetadata, Exception e) {
if (null != e) {
e.printStackTrace();
}
System.out.println("时间戳,主题,分区,位移: " + recordMetadata.timestamp() + ", " + recordMetadata.topic() + "," + recordMetadata.partition() + " " + recordMetadata.offset());
}
});*/
producer.flush();
}
}
集成方式配置:
spring:
application:
name: kafka-tools
kafka:
bootstrap-servers: 0.0.0.0:9092,0.0.0.0:9093,0.0.0.0:9094
producer:
key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
properties:
sasl.mechanism: PLAIN
security.protocol: SASL_PLAINTEXT
sasl.jaas.config: org.apache.kafka.common.security.scram.ScramLoginModule required username="username" password="password";
consumer:
group-id: test-group
auto-offset-reset: latest
enable-auto-commit: true
auto-commit-interval: 100
key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
properties:
sasl.mechanism: PLAIN
security.protocol: SASL_PLAINTEXT
sasl.jaas.config: org.apache.kafka.common.security.scram.ScramLoginModule required username="username" password="password";
注意,这里配置sasl的账号密码必须加双引号,否则在填入值首字符为数字的情况下读取不到参数
消费者
@Slf4j
@Component
public class TestTopic {
@KafkaListener(topics = "AAA")
public void reportTopic(@Payload String data,
@Header(KafkaHeaders.RECEIVED_TOPIC) String topic,
@Headers MessageHeaders messageHeaders,
@Header(KafkaHeaders.OFFSET) String offset) {
log.info("[Receive data] topic:{}, offset:{}", topic, offset);
}
}
生产者
@Slf4j
@Component
public class KafkaProducer {
@Resource
private KafkaTemplate<String, String> kafkaTemplate;
public void send(String topic, String msg){
log.info("Send topic: {}, length:{}", topic, msg.length());
kafkaTemplate.send(new ProducerRecord<String, String>(topic, msg));
}
}
SASL
配置sasl有三种方式,这里面用的是第一种:
第一种:初始化kafk的Properties时
props.put(SaslConfigs.SASL_MECHANISM, "PLAIN");
props.put("sasl.jaas.config","org.apache.kafka.common.security.plain.PlainLoginModule required username=\"producer\" password=\"prod-sec\";");
第二种:设置系统属性
System.setProperty("java.security.auth.login.config", "d:/jaas.conf");//指定绝对路径
第三种:启动参数加入参数-Djava.security.auth.login.config=jaas.conf的绝对路径
java -jar -Djava.security.auth.login.config=D:/jaas.conf xx.jar //指定绝对路径