Kafka的SSL密码安全验证
1.生成本地SSL相关证书
- 第一步
keytool -keystore ./server.keystore.jks -alias ds-kafka-single -validity 365 -genkey -ke
ypass 123456 -keyalg RSA -dname "CN=master,OU=aspire,O=aspire,L=beijing,S=beijing,C=cn" -storepass 123456 -ext SAN=DNS:master
注:CN和DNS为本机的主机名 -keypass 123456 为验证密码
- 第二步
openssl req -new -x509 -keyout ./ca-key -out ./ca-cert -days 365 -passout pass:123456 -s
ubj "/C=cn/ST=beijing/L=beijing/O=aspire/OU=aspire/CN=master"
- 第三步
keytool -keystore ./client.truststore.jks -alias CARoot -import -file ./ca-cert -storepa
ss 123456
- 第四步
keytool -keystore ./server.truststore.jks -alias CARoot -import -file ./ca-cert -storepass 123456
-
第五步
-
第一小步
keytool -keystore ./server.keystore.jks -alias ds-kafka-single -certreq -file ./server.cert-file -storepass 123456
-
第二小步
openssl x509 -req -CA ./ca-cert -CAkey ./ca-key -in ./server.cert-file -out ./server.cert-signed -days 365 -CAcreateserial -passin pass:123456
-
第三小步
keytool -keystore ./server.keystore.jks -alias CARoot -import -file ./ca-cert -storepass 123456
-
第四小步
keytool -keystore ./server.keystore.jks -alias ds-kafka-single -import -file ./server.cert-signed -storepass 123456
-
-
第六步
将生成的密钥发送一份到另一个broker节点
2.修改kafka的config目录下的server.properties文件
broker1:
#Java远程连接访问的连接
listeners=SSL://master:9092
advertised.listeners=SSL://master:9092
#认证文件位置和密码
ssl.keystore.location=/usr/ca/service/server.keystore.jks
ssl.keystore.password=123456
ssl.key.password=123456
#信任库位置和密码
ssl.truststore.location=/usr/ca/service/server.truststore.jks
ssl.truststore.password=123456
#开启客户端认证
ssl.client.auth=required
ssl.enabled.protocols=TLSv1.2,TLSv1.1,TLSv1
ssl.keystore.type=JKS
ssl.truststore.type=JKS
#algorithm 设置为空,ssl在验证过程中不对主机名进行校验,默认为HTTPS进行校验
ssl.endpoint.identification.algorithm=
# 设置内部访问也用SSL
security.inter.broker.protocol=SSL
broker2: 操作与broker1相同
listeners=SSL://base:9092
advertised.listeners=SSL://base:9092
ssl.keystore.location=/usr/ca/service/server.keystore.jks
ssl.keystore.password=123456
ssl.key.password=123456
ssl.truststore.location=/usr/ca/service/server.truststore.jks
ssl.truststore.password=123456
ssl.client.auth=required
ssl.enabled.protocols=TLSv1.2,TLSv1.1,TLSv1
ssl.keystore.type=JKS
ssl.truststore.type=JKS
ssl.endpoint.identification.algorithm=
security.inter.broker.protocol=SSL
3.测试SSL加密有效性
#启动kafka
kafka-server-start.sh ./server.properties
#使用spenssl测试 listeners:master:9092
openssl s_client -debug -connect master:9092 -tls1
4.使用shell验证SSL
vim producer.properties
>>
bootstrap.servers=master:9092,base:9092
security.protocol=SSL
ssl.truststore.location=/usr/ca/service/server.truststore.jks
ssl.truststore.password=123456
ssl.keystore.password=123456
ssl.keystore.location=/usr/ca/service/server.keystore.jks
<<
kafka-console-producer.sh --broker-list master:9092,base:9092 --topic test3 --producer.config producer.properties
vim consumer.properties
>>
security.protocol=SSL
group.id=test-group
ssl.truststore.location=/usr/ca/service/server.truststore.jks
ssl.truststore.password=123456
ssl.keystore.password=123456
ssl.keystore.location=/usr/ca/service/server.keystore.jks
<<
kafka-console-consumer.sh --bootstrap-server master:9092,base:9092 --topic test3 --from-beginning --consumer.config consumer.properties
5.使用Java客户端连接
Producer.java:
public static void main(String[] args) {
Properties props = new Properties();
props.put("bootstrap.servers", "master:9092,base:9092");
props.put("security.protocol", "SSL");
// 将生成的SSL安全认证文件复制一份到本地,给出对应的文件位置和密码
props.put("ssl.truststore.location", "D:\\tmp\\server.truststore.jks");
props.put("ssl.truststore.password", "123456");
props.put("ssl.keystore.location", "D:\\tmp\\server.keystore.jks");
props.put("ssl.keystore.password", "123456");
props.put("ssl.endpoint.identification.algorithm","");
props.put("acks", "all");
props.put("retries", 0);
props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
Producer<String, String> producer = new KafkaProducer<String, String>(props);
for (long i = 0; i < 10; i++) {
ProducerRecord<String, String> data = new ProducerRecord<String, String>(
"test3", "key-" + i, "message-" + i);
producer.send(data);
}
producer.close();
}
Consumer.java
public static void main(String[] args) {
Properties props = new Properties();
props.put("bootstrap.servers", "master:9092,base:9092");
props.put("security.protocol", "SSL");
props.put("ssl.truststore.location", "D:\\tmp\\server.truststore.jks");
props.put("ssl.truststore.password", "123456");
props.put("ssl.endpoint.identification.algorithm", "");
props.put("ssl.keystore.location", "D:\\tmp\\server.keystore.jks");
props.put("ssl.keystore.password", "123456");
props.put("group.id", "my-group");
props.put("auto.offset.reset", "earliest");
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
ArrayList<String> list = new ArrayList<>();
list.add("test3");
consumer.subscribe(list);
while (true) {
ConsumerRecords<String, String> records = consumer.poll(Duration.ofSeconds(1));
Iterator<ConsumerRecord<String, String>> it = records.iterator();
while (it.hasNext()) {
ConsumerRecord<String, String> record = it.next();
String value = record.value();
System.out.println(value);
}
}
}