链接:https://pan.baidu.com/s/12b8Ef7Tem7WsMchCJbWFTw
提取码:ja19
复制这段内容后打开百度网盘手机App,操作更方便哦
坑:org.apache.hadoop.io.nativeio.NativeIO$Windows.createDirectoryWithMode0(Ljava/lang/String;I)V
原因:是你的hadoop.dll 文件和你当前的hadoop版本不匹配。
过程:我的版本是hadoop 2.7.2,前期使用的是从网上下载的用于hadoop-2.6-x64-bin。最后导致的结果是:start-dfs.cmd可以执行,也就是hdfs系统可以启动,但是在启动start-yarn.cmd的时候,就报错和你一样。可能是createDirectoryWithMode0方法在hadoop.dll中没有做本地实现。
解决:重新编译适用于你的hadoop版本的hadoop.dll文件。我前期编译不成功,可能是项目依赖的windows sdk 7不正确,升级到windows10后,直接用visual studio 10 旗舰版编译f:\hadoop_2.7\src\hadoop-common-project\hadoop-common\src\main\native\native.sln 就可以搞定了。
如果需要,我这里有2.7.2的hadoop.dll.
缺少hadoop.dll,把这个文件拷贝到C:\Windows\System32下面即可。
C:\Windows\System32
2.报错
java.io.IOException: Could not locate executable null\bin\winutils.exe in the Hadoop binaries.
因为没有设置HADOOP_HOME,在windows上需要重启,或者在代码中设置:System.setProperty("hadoop.home.dir", "E:\\Work\\hadoop-2.7.2");
3.报错
org.apache.hadoop.security.AccessControlException:Permission denied:user=Administrator,access=WRITE,inode="tmp":root:supergroup:rwxr-xr-x 。
去掉权限校验
修改hdfs-site.xml
<property>
<name>dfs.permissions</name>
<value>false</value>
</property>
4.hadoop eclipse插件
Map/Reduce(V2) master,port对应的是yarn-site.xml中的yarn.resourcemanager.scheduler.address的值
DFS master port对应的是core-site.xml中的fs.defaultFS的值
5.如何运行的时候没有报错信息,但是又不成功
拷贝一个log4j.properties文件到classpath目录,即可看到未输出的错误日志
6 管理员运行权限 运行idea避免以下问题:
permission
例子:
先配置本地的hadoop 2.7.2 bin sbin配置到环境变量里面
然后配置winutil.exe hadoop.dll
package com.example.demo.mapred;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
public class WCApp {
// psvm
public static void main(String []args) throws Exception{
// String []args1={"d:\\mr","d:\\mr\\out"};//建立输入的文件 输出的文件
// System.setProperty("hadoop.home.dir", "D:\\vmbak\\hadoop-2.7.2");
Configuration conf = new Configuration();
//
// conf.set("yarn.resourcemanager.hostname", "0.0.0.0");
// conf.set("fs.defaultFS", "file:///");
// conf.set("mapreduce.app-submission.cross-platform","true");
Job job = Job.getInstance(conf);
//设置job的各种属性
job.setJobName("WCApp"); //作业名称
job.setJarByClass(WCApp.class); //搜索类
job.setInputFormatClass(TextInputFormat.class); //设置输入格式
//设置输出格式类
//job.setOutputFormatClass(SequenceFileOutputFormat.class);
//添加输入路径
FileInputFormat.addInputPath(job,new Path(args[0]));
//设置输出路径
FileOutputFormat.setOutputPath(job,new Path(args[1]));
//设置最大切片数
//FileInputFormat.setMaxInputSplitSize(job,13);
//最小切片数
//FileInputFormat.setMinInputSplitSize(job,1L);
//设置分区类
job.setPartitionerClass(MyPartitioner.class); //设置自定义分区
//设置合成类
job.setCombinerClass(WCReducer.class); //设置combiner类
job.setMapperClass(WCMapper.class); //mapper类
job.setReducerClass(WCReducer.class); //reducer类
job.setNumReduceTasks(3); //reduce个数
job.setMapOutputKeyClass(Text.class); //
job.setMapOutputValueClass(IntWritable.class); //
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class); //
boolean res = job.waitForCompletion(true);
System.exit(res ? 0 : 1);
}
}
package com.example.demo.mapred;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import java.io.IOException;
public class WCMapper extends Mapper<LongWritable, Text, Text, IntWritable> {
protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
Text keyOut = new Text();
IntWritable valueOut = new IntWritable();
String[] arr = value.toString().split(" ");
for(String s : arr){
keyOut.set(s);
valueOut.set(1);
context.write(keyOut,valueOut);
}
}
}
package com.example.demo.mapred;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
import java.io.IOException;
public class WCReducer extends Reducer<Text, IntWritable, Text, IntWritable>{
/**
* reduce
*/
protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
int count = 0 ;
for(IntWritable iw : values){
count = count + iw.get() ;
}
String tno = Thread.currentThread().getName();
System.out.println(tno + " : WCReducer :" + key.toString() + "=" + count);
context.write(key,new IntWritable(count));
}
}
package com.example.demo.mapred;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Partitioner;
public class MyPartitioner extends Partitioner<Text, IntWritable>{
public int getPartition(Text text, IntWritable intWritable, int numPartitions) {
System.out.println("MyPartitioner");
return 0;
}
}
programme arguments设置:
D:\mr D:\mr\output