Processor executor = Runtime.getRuntime().exec(cmdLine, env, new File(workDir));
StreamPrinter outPrinter = new StreamPrinter(
executor.getInputStream(), null,
SessionState.getConsole().getChildOutStream());
StreamPrinter errPrinter = new StreamPrinter(
executor.getErrorStream(), null,
SessionState.getConsole().getChildErrStream());
outPrinter.start();
errPrinter.start();
int exitVal = jobExecHelper.progressLocal(executor, getId());
public static class StreamPrinter extends Thread {
InputStream is;
String type;
PrintStream os;
public StreamPrinter(InputStream is, String type, PrintStream os) {
this.is = is;
this.type = type;
this.os = os;
}
@Override
public void run() {
BufferedReader br = null;
try {
InputStreamReader isr = new InputStreamReader(is);
br = new BufferedReader(isr);
String line = null;
if (type != null) {
while ((line = br.readLine()) != null) {
os.println(type + ">" + line);
}
} else {
while ((line = br.readLine()) != null) {
os.println(line);
}
}
br.close();
br=null;
} catch (IOException ioe) {
ioe.printStackTrace();
}finally{
IOUtils.closeStream(br);
}
}
}
public int progressLocal(Process runningJob, String taskId) {
int exitVal = -101;
try {
exitVal = runningJob.waitFor(); //TODO: poll periodically
} catch (InterruptedException e) {
}
if (exitVal != 0) {
console.printError("Execution failed with exit status: " + exitVal);
console.printError("Obtaining error information");
if (HiveConf.getBoolVar(job, HiveConf.ConfVars.SHOW_JOB_FAIL_DEBUG_INFO)) {
// Since local jobs are run sequentially, all relevant information is already available
// Therefore, no need to fetch job debug info asynchronously
localJobDebugger(exitVal, taskId);
}
} else {
console.printInfo("Execution completed successfully");
console.printInfo("Mapred Local Task Succeeded . Convert the Join into MapJoin");
}
return exitVal;
}