自动化创建kafaka用户

pom.xml引入依赖

     <dependency>
		  <groupId>ch.ethz.ganymed</groupId>
		  <artifactId>ganymed-ssh2</artifactId>
		  <version>262</version>
	  </dependency>

自动创建kafaka用户类:ExecuteCommand

import ch.ethz.ssh2.Connection;
import ch.ethz.ssh2.Session;
import ch.ethz.ssh2.StreamGobbler;
import com.jmc.business.common.exception.JmcBusinessException;
import lombok.extern.slf4j.Slf4j;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;

@Slf4j
public class ExecuteCommand {

    private static Connection conn;

    private static Boolean login(String kafkaHostName,String kafkaServerUser,String kafkaServerPassword) {
        boolean flag = false;
        try {
            conn = new Connection(kafkaHostName);
            conn.connect();
            flag = conn.authenticateWithPassword(kafkaServerUser, kafkaServerPassword);
        } catch (IOException ex) {
            log.error("连接KAFKA服务器主机异常", ex.getMessage());
            throw new JmcBusinessException("连接KAFKA服务器主机异常");
        }
        return flag;
    }

    public static boolean execute(String kafkaHostName,String kafkaServerUser,String kafkaServerPassword,String kafkaServerAddress,String zookeeperPort,String serverPort,
                                  String username,String password,String topicName,String userGroup) {
        boolean result = login(kafkaHostName, kafkaServerUser, kafkaServerPassword);
        try {
            if (result) { //登录成功

                StringBuilder sb1 = new StringBuilder();
                sb1.append(kafkaServerAddress+"/bin/kafka-topics.sh --create --bootstrap-server " + kafkaHostName + ":" + serverPort + " --topic " + topicName);
                remoteExec(sb1.toString());

                StringBuilder sb2 = new StringBuilder();
                sb2.append(kafkaServerAddress+"/bin/kafka-configs.sh --bootstrap-server " + kafkaHostName + ":" + serverPort + " --alter --add-config 'SCRAM-SHA-256=[iterations=8192,password=" + password + "],SCRAM-SHA-512=[password=" + password + "]' --entity-type users --entity-name " + username);
                remoteExec(sb2.toString());

                StringBuilder sb3 = new StringBuilder();
                sb3.append(kafkaServerAddress+"/bin/kafka-acls.sh --authorizer kafka.security.authorizer.AclAuthorizer --authorizer-properties zookeeper.connect=" + kafkaHostName + ":" + zookeeperPort + " --add --allow-principal User:" + username + " --operation Read --topic " + topicName );
                remoteExec(sb3.toString());

                StringBuilder sb4 = new StringBuilder();
                sb4.append(kafkaServerAddress+"/bin/kafka-acls.sh --authorizer kafka.security.authorizer.AclAuthorizer --authorizer-properties zookeeper.connect=" + kafkaHostName + ":" + zookeeperPort + " --add --allow-principal User:" + username + " --operation Read --group " + userGroup);
                remoteExec(sb4.toString());

            }else{
                throw new JmcBusinessException("登录kafka服务器失败");
            }
        } catch (Exception ex) {
            log.error("执行命令异常", ex);
            throw new JmcBusinessException("执行命令异常"+ex.getMessage());
        }finally {
            if (conn != null) {
                conn.close();
            }
        }
        return true;
    }

    // 执行远程linux命令行
    public static String remoteExec(String cmd ) throws IOException {
        log.info(cmd);
        System.out.println(cmd);
        //登录,获取连接
        Session session = null;
        BufferedReader br = null;
        InputStream is = null;
        StringBuffer res = new StringBuffer();
        try {
            // 开启会话
            session = conn.openSession();
            // 执行命令
            session.execCommand(cmd, "UTF-8");
            // 处理输出内容
            is = new StreamGobbler(session.getStdout());
            br = new BufferedReader(new InputStreamReader(is));
            String line;
            while ((line = br.readLine()) != null) {
                res.append(line);
            }
        } finally {
            //关闭资源
            try {
                if (is != null) {
                    is.close();
                }
                if (br != null) {
                    br.close();
                }
                if (session != null) {
                    session.close();
                }
            } catch (Exception e) {
                e.printStackTrace();
            }
        }
        return res.toString();
    }


    public static void main(String[] args) {
        //linux服务ip
        String kafkaHostName= "192.168.1.176";
        //服务器用户名,需要有权限的
        String kafkaServerUser= "root";
        //服务器用户名对应密码
        String kafkaServerPassword= "Rw6L1NkJY%";
        //kafka服务器路径
        String kafkaServerAddress= "/home/soft/kafka_2.12-3.3.1";
        //zookeeper端口
        String zookeeperPort = "2181";
        //kafka端口
        String serverPort = "9092";
        //需要创建的用户名
        String username = "zhou03";
        //需要创建的用户名对应密码
        String certPassword ="cnki123456";
        //需要创建的主题
        String topicName = "topic-zhou03";
        //需要创建和授权的用户组
        String userGroup ="group-zhou03";
        ExecuteCommand.execute(kafkaHostName,kafkaServerUser,kafkaServerPassword,kafkaServerAddress,zookeeperPort,serverPort,username,certPassword,topicName,userGroup);
    }

}

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值