kafka集群添加kerberos认证demo

这里kafka版本是0.10.0,版本有点老,不过方法大同小异;

    当kafka开启Kerberos认证后,如何使用java API生产或消费数据呢?其实就是在生产消费者的代码中加入jaas、keytab这些认证有关的配置,下面我们直接看代码:

1.其实连接Kerberos集群很简单,需要下面三个文件:

    1).KerberosServer的配置文件krb5.conf,让程序知道我应该哪个kdc去登录认证;

[libdefaults]  udp_preference_limit = 1   renew_lifetime = 3650d  forwardable = true  default_realm = CHINAUNICOM  ticket_lifetime = 3650d  dns_lookup_realm = false  dns_lookup_kdc = false  default_ccache_name = /tmp/krb5cc_%{uid}  #default_tgs_enctypes = aes des3-cbc-sha1 rc4 des-cbc-md5  #default_tkt_enctypes = aes des3-cbc-sha1 rc4 des-cbc-md5[domain_realm]  .CHINAUNICOM = CHINAUNICOM[logging]  default = FILE:/var/log/krb5kdc.log  admin_server = FILE:/var/log/kadmind.log  kdc = FILE:/var/log/krb5kdc.log[realms]  CHINAUNICOM = {    admin_server = master98.hadoop.ljs    kdc = master98.hadoop.ljs  }
 

 2).认证肯定需要指定认证方式这里需要一个jaas.conf文件,一般集群的conf目录下都有;

KafkaClient {    com.sun.security.auth.module.Krb5LoginModule required    useKeyTab=true    keyTab="D:\\kafkaSSL\\kafka.service.keytab"    storeKey=true    useTicketCache=false    principal="kafka/salver32.hadoop.unicom@CHINAUNICOM"    serviceName=kafka;};
 

 3).就是用户的登录认证票据和认证文件,票据和keytab文件这里就不在贴了;

2.pom.xml文件依赖

<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
	xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
	<modelVersion>4.0.0</modelVersion>
	<parent>
		<groupId>org.springframework.boot</groupId>
		<artifactId>spring-boot-starter-parent</artifactId>
		<version>2.3.12.RELEASE</version>
	</parent>

	<groupId>org.fline</groupId>
	<artifactId>ppp</artifactId>
	<version>1.0-SNAPSHOT</version>

	<name>ppp</name>
	<description>ppp项目</description>

	<properties>
		<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
		<maven.compiler.source>1.8</maven.compiler.source>
		<maven.compiler.target>1.8</maven.compiler.target>
	</properties>

	<dependencies>
		<dependency>
			<groupId>org.springframework.boot</groupId>
			<artifactId>spring-boot-starter-web</artifactId>
			<exclusions>
				<exclusion>
					<artifactId>log4j-to-slf4j</artifactId>
					<groupId>org.apache.logging.log4j</groupId>
				</exclusion>
				<exclusion>
					<artifactId>logback-classic</artifactId>
					<groupId>ch.qos.logback</groupId>
				</exclusion>
			</exclusions>
		</dependency>
		<!--<dependency> <groupId>junit</groupId> <artifactId>junit</artifactId> 
			<version>4.11</version> <scope>test</scope> </dependency> -->

		<!-- https://mvnrepository.com/artifact/io.springfox/springfox-swagger2 -->
		<dependency>
			<groupId>io.springfox</groupId>
			<artifactId>springfox-swagger2</artifactId>
			<version>2.9.2</version>
		</dependency>

		<!-- https://mvnrepository.com/artifact/io.springfox/springfox-swagger-ui -->
		<dependency>
			<groupId>io.springfox</groupId>
			<artifactId>springfox-swagger-ui</artifactId>
			<version>2.9.2</version>
		</dependency>

		<!-- kafka 开启Kerberos 认证 -->
		<dependency>
			<groupId>org.apache.kafka</groupId>
			<artifactId>kafka-clients</artifactId>
			<version>2.7.0</version>
		</dependency>
		<dependency>
			<groupId>org.apache.kafka</groupId>
			<artifactId>kafka-streams</artifactId>
		</dependency>
		<dependency>
			<groupId>org.springframework.kafka</groupId>
			<artifactId>spring-kafka</artifactId>
		</dependency>
		<dependency>
			<groupId>cn.hutool</groupId>
			<artifactId>hutool-all</artifactId>
			<version>4.6.2</version>
		</dependency>
	</dependencies>

	<build>
		<plugins>
			<!-- <plugin> -->
			<!-- <groupId>org.springframework.boot</groupId> -->
			<!-- <artifactId>spring-boot-maven-plugin</artifactId> -->
			<!-- </plugin> -->
		</plugins>
	</build>
</project>


 

3.Java生产者发送消息,代码实例:

package com.fline.kafka.customer;

import org.apache.kafka.clients.producer.Callback;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;

import java.util.Date;
import java.util.Properties;

/**
 * @Author panrenqing
 * @Date 2022/11/4 16:49
 * @Version 1.0
 */
public class KafkaKerberosProducer3 {

    public static final String krb5Conf="D:\\kafkaSSL\\krb5.conf";
    public static final String kafkaJaasConf="D:\\kafkaSSL\\kafka_client_jaas.conf";
    public static final String bootstrapServers="47.92.170.121:9092,47.92.166.91:9092,47.92.162.206:9092";
    public static final String topic="topic1";
    private static long count =5;

    public static void main(String[] args) {
        //Kerberos认证必须添加
//        System.setProperty("java.security.krb5.conf", krb5Conf);
//        System.setProperty("java.security.auth.login.config", kafkaJaasConf);
        System.setProperty("java.security.auth.login.config", ".\\src\\main\\resources\\kerberos\\kafka-client-jaas.conf");
        System.setProperty("java.security.krb5.conf", ".\\src\\main\\resources\\kerberos\\krb5.conf");

        Properties props = new Properties();
        props.put("bootstrap.servers", bootstrapServers);
        props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        //Kerberos认证必须添加以下三行
        props.put("security.protocol", "SASL_PLAINTEXT");
        props.put("sasl.kerberos.service.name", "kafka");
        props.put("sasl.mechanism", "GSSAPI");
        org.apache.kafka.clients.producer.KafkaProducer<String, String> kafkaProducer = new org.apache.kafka.clients.producer.KafkaProducer<>(props);
        int i=1;
        while (true){
            String message = "{\"id\":" + i + ",\"ip\":\"192.168.0." + i + "\",\"date\":" + new Date().toString() + "}";
            System.out.println(message);
            kafkaProducer.send(new ProducerRecord<String, String>(topic, message));
            try {
                Thread.sleep(200);
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
            if(i++>count){
                break;
            }
        }


    }

}

4.Java消费者接收消息,代码实例:

package com.hadoop.ljs.kafka010.security;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import java.util.Arrays;
import java.util.Properties;
/**
 * @author: Created By lujisen
 * @company ChinaUnicom Software JiNan
 * @date: 2020-02-28 15:04
 * @version: v1.0
 * @description: com.hadoop.ljs.kafka010.security
 */
public class KafkaKerberosConsumer {
    public static final String krb5Conf="D:\\kafkaSSL\\krb5.conf";
    public static final String kafkaJaasConf="D:\\kafkaSSL\\kafka_client_jaas.conf";
    public static final String bootstrapServers="salver31.hadoop.ljs:6667,salver32.hadoop.ljs:6667";
    public static final String topic="topic1";
    public static final String comsumerGroup="group_topic1";

    public static void main(String[] args) {
        /*kerberos认证,需要添加以下两行*/
        System.setProperty("java.security.krb5.conf", krb5Conf);
        System.setProperty("java.security.auth.login.config", kafkaJaasConf);

        Properties props = new Properties();
        props.put("bootstrap.servers", bootstrapServers);
        props.put("group.id", comsumerGroup);
        props.put("enable.auto.commit", "false");
        props.put("auto.commit.interval.ms", "1000");
        props.put("auto.offset.reset", "earliest");
        props.put("session.timeout.ms", "30000");
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        /*kerberos认证,需要添加以下三个属性*/
        props.put("security.protocol", "SASL_PLAINTEXT");
        props.put("sasl.mechanism", "GSSAPI");
        props.put("sasl.kerberos.service.name", "kafka");

        KafkaConsumer kafkaConsumer = new KafkaConsumer<>(props);
        kafkaConsumer.subscribe(Arrays.asList(topic));
        while (true) {
            ConsumerRecords<String, String> records = kafkaConsumer.poll(1);
            for (ConsumerRecord<String, String> record : records)
                System.out.println("Partition: " + record.partition() + " Offset: " + record.offset() + " Value: " + record.value() + " ThreadID: " + Thread.currentThread().getId());
        }
    }
}

 

  • 1
    点赞
  • 5
    收藏
    觉得还不错? 一键收藏
  • 1
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值