请问,有没有人遇到该问题,如何解决的,本人刚接触hadoop我用Windows7装的eclipse,安装好插件后,连接另一台部署的伪分布集群机器,现在可以向hdfs文件系统中上传和删除文件及目录。但是运行Wordcount列子报空指针错误。。谢谢。。。。
//代码
=== =======================================================Configuration conf = new Configuration();
//String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
String[] otherArgs = new String[] {
"/input/README.txt",
"/output"
};
if (otherArgs.length != 2) {
System.err.println("Usage: wordcount ");
System.exit(2);
}
Job job = new Job(conf, "WCDemo");
//JobClient job=new JobClient();
job.setJarByClass(WordCount.class);
job.setMapperClass(TokenizerMapper.class);
job.setCombinerClass(IntSumReducer.class);
job.setReducerClass(IntSumReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
FileInputFormat.addInputPath(job, new Path(otherArgs[0]));
FileOutputFormat.setOutputPath(job, new Path(otherArgs[1]));
System.exit(job.waitForCompletion(true) ? 0 : 1);
//控制台信息
log4j: WARN No appenders could be found
for logger(org.apache.hadoop.metrics2.lib.MutableMetricsFactory).log4j: WARN Please initialize the log4j system properly.log4j: WARN See http: //logging.apache.org/log4j/1.2/faq.html#noconfig for more info.
Exception in thread "main"java.lang.NullPointerException at java.lang.ProcessBuilder.start(ProcessBuilder.java: 441) at org.apache.hadoop.util.Shell.runCommand(Shell.java: 404) at org.apache.hadoop.util.Shell.run(Shell.java: 379) at org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java: 589) at org.apache.hadoop.util.Shell.execCommand(Shell.java: 678) at org.apache.hadoop.util.Shell.execCommand(Shell.java: 661) at org.apache.hadoop.fs.RawLocalFileSystem.setPermission(RawLocalFileSystem.java: 639) at org.apache.hadoop.fs.RawLocalFileSystem.mkdirs(RawLocalFileSystem.java: 435) at org.apache.hadoop.fs.FilterFileSystem.mkdirs(FilterFileSystem.java: 277) at org.apache.hadoop.mapreduce.JobSubmissionFiles.getStagingDir(JobSubmissionFiles.java: 125) at org.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java: 344) at org.apache.hadoop.mapreduce.Job$10.run(Job.java: 1268) at org.apache.hadoop.mapreduce.Job$10.run(Job.java: 1265) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java: 396) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java: 1491) at org.apache.hadoop.mapreduce.Job.submit(Job.java: 1265) at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java: 1286) at com.hadoop.example.WordCount.main(WordCount.java: 113)