Kafka 配置SASL安全认证
Kafka 版本 kafka_2.12-2.0.0.tar.gz
第1步 、在kafka config 目录下 创建以下配置文件
kafka_client_jaas.conf内容如下
KafkaClient {
org.apache.kafka.common.security.plain.PlainLoginModule required
username="superadmin"
password="superadmin1234";
};
kafka_server_jaas.conf内容如下
KafkaServer {
org.apache.kafka.common.security.plain.PlainLoginModule required
username="superadmin"
password="superadmin1234"
user_superadmin="superadmin1234"
user_test="test1234"
user_alice="alice1234";
};
KafkaClient {
org.apache.kafka.common.security.plain.PlainLoginModule required
username="superadmin"
password="superadmin1234";
};
Client {
org.apache.kafka.common.security.plain.PlainLoginModule required
username="superadmin"
password="superadmin1234";
};
kafka_zoo_jaas.conf内容如下
ZKServer{
org.apache.kafka.common.security.plain.PlainLoginModule required
username="superadmin"
password="superadmin1234"
user_superadmin="superadmin1234";
};
第2步 、修改kafka的bin文件夹中的zookeeper-server-start.sh 在最上面添加:
export KAFKA_OPTS=" -Djava.security.auth.login.config=/opt/kafka/config/kafka_zoo_jaas.conf -Dzookeeper.sasl.serverconfig=ZKServer"
第3步、修改kafka的bin文件夹中的kafka-server-start.sh 在最上面添加:
export KAFKA_OPTS=" -Djava.security.auth.login.config=/opt/kafka/config/kafka_server_jaas.conf"
第4步、修改kafka的bin文件夹中的kafka-console-producer.sh 在最上面添加:
export KAFKA_OPTS=" -Djava.security.auth.login.config=/opt/kafka/config/kafka_client_jaas.conf"
第5步、修改kafka的bin文件夹中的kafka-console-consumer.sh 在最上面添加:
export KAFKA_OPTS=" -Djava.security.auth.login.config=/opt/kafka/config/kafka_client_jaas.conf"
第6步、修改kafka的config文件夹中的consumer.properties 添加:
security.protocol=SASL_PLAINTEXT
sasl.mechanism=PLAIN
第7步、修改kafka的config文件夹中的producer.properties,添加:
security.protocol=SASL_PLAINTEXT
sasl.mechanism=PLAIN
第8步、修改kafka的config文件夹中的zookeeper.properties,添加:
authProvider.1=org.apache.zookeeper.server.auth.SASLAuthenticationProvider
requireClientAuthScheme=sasl
jaasLoginRenew=3600000
第9步,修改kafka的config文件夹中的server.properties,修改:
listeners=SASL_PLAINTEXT://192.168.20.83:9092
# 这里绑定外网地址
advertised.listeners=SASL_PLAINTEXT://175.24.197.228:9092
添加:
# 使用的认证协议
security.inter.broker.protocol=SASL_PLAINTEXT
# SASL机制
sasl.enabled.mechanisms=PLAIN
sasl.mechanism.inter.broker.protocol=PLAIN
# 完成身份验证的类
authorizer.class.name=kafka.security.auth.SimpleAclAuthorizer
# 如果没有找到ACL(访问控制列表)配置,则允许任何操作。
#allow.everyone.if.no.acl.found=true
super.users=User:superadmin
delete.topic.enable=true
auto.create.topics.enable=false
第10步、启动zookeeper服务,执行:
sh bin/zookeeper-server-start.sh config/zookeeper.properties
第11步、启动kafka服务,执行:
sh bin/kafka-server-start.sh config/server.properties
第12步、查看topic列表,执行:
sh bin/kafka-topics.sh --list --zookeeper hadoop:2181
第13步、创建新的topic、执行:
sh bin/kafka-topics.sh --create --zookeeper hadoop:2181 --replication-factor 1 --partitions 16 --topic test
第14步、给admin用户授权,执行:
sh bin/kafka-acls.sh --authorizer-properties zookeeper.connect=hadoop:2181 --add --allow-principal User:superadmin --group=* --topic=*
第15步、给用户test授予某个topic的读写的权限,执行:
sh bin/kafka-acls.sh --authorizer-properties zookeeper.connect=hadoop:2181 --add --allow-principal User:test --operation Read --operation Write --topic test --group=*
说明:
控制读写:–operation read–operation write
控制消费组:不控制组 --group=*,指定消费组 --grouptest-comsumer-group
第16步、移除权限,执行: 192.168.1.101 为执行test 用户访问地址
sh bin/kafka-acls.sh --authorizer-properties zookeeper.connect=hadoop:2181 --remove --allow-principal User:test --allow-host 192.168.1.101 --operation Read --operation Write --topic test
sh bin/kafka-acls.sh --authorizer-properties zookeeper.connect=hadoop:2181 --remove --allow-principal User:test --allow-host 175.24.197.228 --operation Read --operation Write --topic test
第17步、列出topic为test的所有权限账户,执行
sh bin/kafka-acls.sh --authorizer-properties zookeeper.connect=hadoop:2181 --list --topic test
第18步、测试启动消费者,执行
sh bin/kafka-console-consumer.sh --bootstrap-server 192.168.20.83:9092 --topic test --from-beginning --consumer.config config/consumer.properties
sh bin/kafka-console-consumer.sh --bootstrap-server hadoop:9092 --topic test --from-beginning --consumer.config config/consumer.properties
第19步,测试启动生产者,执行
sh bin/kafka-console-producer.sh --broker-list 192.168.20.83:9092 --topic test --producer.config config/producer.properties
sh bin/kafka-console-producer.sh --broker-list hadoop:9092 --topic test --producer.config config/producer.properties
JAVA代码读写Kafka 案例
pom.xml
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>2.6.0</version>
</dependency>
<!-- Jackson JSON Mapper -->
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
<version>1.6.4</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
<version>2.11.3</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-json</artifactId>
<version>2.3.3.RELEASE</version>
</dependency>
生产者 TestProducer
import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.config.SaslConfigs;
import org.apache.kafka.common.serialization.StringSerializer;
import java.util.Properties;
public class TestProducer {
public static String topic = "test";
public static int partition = 1;
private static Properties producerProperties = new Properties();
public static void main(String[] args) throws InterruptedException {
initProperties("175.24.197.228:9092");
KafkaProducer<String, String> producer = new KafkaProducer(producerProperties);
int count = 0;
for (int i =1;i< 1000;i++) {
String message = "my Message record" + count++;
producer.send(new ProducerRecord(topic, String.valueOf(message.hashCode()), message));
System.out.println("发送者---------发送消息: " + message);
//隔1s发一次
Thread.sleep(1000);
}
}
private static void initProperties(String serverAddress) {
producerProperties.setProperty("bootstrap.servers", serverAddress);
producerProperties.put("acks", "all");
producerProperties.put("retries", "3");
producerProperties.put("key.serializer", StringSerializer.class);
producerProperties.put("value.serializer", StringSerializer.class);
//设置SASL连接
producerProperties.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT");
producerProperties.put(SaslConfigs.SASL_MECHANISM, "PLAIN");
producerProperties.put("sasl.jaas.config",
"org.apache.kafka.common.security.plain.PlainLoginModule required username=\"superadmin\" password=\"superadmin1234\";");
}
}
消费者 TestCustomer
import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.config.SaslConfigs;
import org.apache.kafka.common.serialization.StringDeserializer;
import java.time.Duration;
import java.util.Collections;
import java.util.Iterator;
import java.util.Properties;
public class TestCustomer {
public static String topic = "test";
private static Properties customerProperties = new Properties();
public static void main(String[] args) {
initProperties("175.24.197.228:9092");
KafkaConsumer<String, String> consumer = new KafkaConsumer(customerProperties);
consumer.subscribe(Collections.singleton(topic));
while(true) {
ConsumerRecords<String, String> msgs = consumer.poll(Duration.ofSeconds(1));
Iterator iterator = msgs.iterator();
while(iterator.hasNext()) {
ConsumerRecord msg = (ConsumerRecord)iterator.next();
System.out.println("消费者-------------收到消息: " + msg.value());
}
}
}
private static void initProperties(String serverAddress) {
customerProperties.setProperty("bootstrap.servers", serverAddress);
customerProperties.put("key.deserializer", StringDeserializer.class);
customerProperties.put("value.deserializer", StringDeserializer.class);
customerProperties.put("group.id", "group1");
//设置SASL连接
customerProperties.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT");
customerProperties.put(SaslConfigs.SASL_MECHANISM, "PLAIN");
customerProperties.put("sasl.jaas.config",
"org.apache.kafka.common.security.plain.PlainLoginModule required username=\"superadmin\" password=\"superadmin1234\";");
}
}