所需jar包,commons-io-2.6.jar, ganymed-ssh2-262.jar 。
import ch.ethz.ssh2.ChannelCondition;
import ch.ethz.ssh2.Connection;
import ch.ethz.ssh2.Session;
import ch.ethz.ssh2.StreamGobbler;
import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.PrintWriter;
/**
* 本机调用linux上的命令,提交hadoop的MapReduce
* jar包与hadoop在同一个机器上
*
* java 远程执行Shell命令-通过ganymed-ssh2连接
* JAVA利用SSH2登录LINUX并执行命令_运维_yezhuanxu的博客-CSDN博客
* Java 远程执行 Linux 的命令
*/
public class DemoHadoopRunShell1 {
/**
* 需要把文件wordcount_data.txt上传到hdfs的根目录/
* @param args
*/
public static void main(String[] args) {
//190 CDH
/* String hadoopMasterHome = "/opt/cloudera/parcels/CDH/bin/";
String hadoopMasterIP = "192.168.137.190";
String masterhadoopusername = "root";
String masterhadooppassword = "111111";
*/
//163 Ambari
String hadoopMasterHome = "/usr/bin/";
String hadoopMasterIP = "192.168.137.163";
String masterhadoopusername = "root";
String masterhadooppassword = "111111";
String myjarpath = "/home/tony_dir/hadoop-mapreduce-examples-3.1.2.jar";
String mymainclass = "wordcount";
String mymasterhadoopinputdatapath = "/wordcount_data.txt";
String mymasterhadoopoutputpath = "/wordcount_res";
Connection connection = null;
Session session = null;
InputStream stdOut = null;
InputStream stdErr = null;
//判断是否存在
String checkhdfsfile = hadoopMasterHome.trim() + "/hadoop fs -test -e " + mymasterhadoopoutputpath;
String checkhdfsfileres = "if [ $? -ne 0 ]; then " +
" echo 0 " +
"else " +
" echo 1 " +
"fi ";
try {
//删除原有计算结果文件
String rmdircmd = hadoopMasterHome.trim() + "/hadoop fs -rm -r " + mymasterhadoopoutputpath;
String runcmd = hadoopMasterHome.trim() + "/hadoop jar " + myjarpath + " "
+ mymainclass.trim() + " " + mymasterhadoopinputdatapath.trim() + " "
+ mymasterhadoopoutputpath.trim();
connection = new Connection(hadoopMasterIP);
connection.connect();//连接
boolean isAuthenticated = connection.authenticateWithPassword(masterhadoopusername, masterhadooppassword);
if (isAuthenticated) {
System.out.println("连接成功" + hadoopMasterIP);
} else {
System.out.println("连接失败" + hadoopMasterIP );
throw new Exception("连接失败" + hadoopMasterIP );
}
session = connection.openSession();
session.requestPTY("bash"); //建立虚拟终端
session.startShell();//打开一个Shell
stdOut = new StreamGobbler(session.getStdout());
stdErr = new StreamGobbler(session.getStderr());
BufferedReader stdoutReader = new BufferedReader(new InputStreamReader(stdOut));//命令行的输出
BufferedReader stderrReader = new BufferedReader(new InputStreamReader(stdErr));
PrintWriter out = new PrintWriter(session.getStdin()); //准备输入命令
// System.out.println("文件夹是否存在 " + checkhdfsfile);
// out.println(checkhdfsfile);
// System.out.println("文件夹是否存在结果 " + checkhdfsfileres);
// out.println(checkhdfsfileres);
String suhdfs="su hdfs";
System.out.println(suhdfs);
out.println(suhdfs);
String exportHADOOP_USER_NAME="HADOOP_USER_NAME=hdfs";
System.out.println(exportHADOOP_USER_NAME);
out.println(exportHADOOP_USER_NAME);
System.out.println("删除已存在文件夹 " + rmdircmd);
out.println(rmdircmd);
//执行jar包
System.out.println("执行命令 " + runcmd);
out.println(runcmd);
out.println("exit"); //退出hdfs用户,因为前面有su hdfs
out.println("exit"); //退出root用户
out.close();// 6. 关闭输入流
// 7. 等待,除非1.连接关闭;2.输出数据传送完毕;3.进程状态为退出;4.超时
session.waitForCondition(ChannelCondition.CLOSED | ChannelCondition.EOF | ChannelCondition.EXIT_STATUS, 1000 * 3600);
System.out.println("Here is the output from stdout:");
while (true) {
String line = stdoutReader.readLine();
if (line == null)
break;
System.out.println(line);
}
System.out.println("Here is the output from stderr:");
while (true) {
String line = stderrReader.readLine();
if (line == null)
break;
System.out.println(line);
}
int exitstatus = session.getExitStatus();
System.out.println("退出状态: " + exitstatus);
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
if (stdErr != null) {
stdErr.close();
}
if (stdOut != null) {
stdOut.close();
}
} catch (Exception e) {
e.printStackTrace();
}
if (session != null) {
session.close();
}
if (connection != null) {
connection.close();
}
}
}
}