IDEA提交到Yarn可能会出现一系列问题,只要添加一些参数就可以解决
package com.ruozedata.bigdata.hadoop.mapreduce.wc;
import com.ruozedata.bigdata.hadoop.utils.FileUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import java.io.IOException;
import java.util.Properties;
public class WordCountOnYarn {
public static void main(String[] args) throws Exception{
//解决出现 Permission denied问题
Properties properties = System.getProperties();
properties.setProperty("HADOOP_USER_NAME", "hadoop");
//获取job对象
Configuration configuration = new Configuration();
// 解决User classes may not be found,需要再本地打包jar,使用本地的jar包
configuration.set("mapred.jar","E:\\Maven\\ruoedatahadoop\\target\\ruoedata-hadoop-1.0.jar");
//指定是job运行在yarn上,默认local
configuration.set("mapreduce.framework.name", "yarn");
//解决ould only be replicated to 0 nodes instead of minReplication (=1).
//There are 1 datanode(s) running and 1 node(s) are excluded in this operation问题
configuration.set("yarn.resourcemanager.hostname", "hadoop001");
// client通过hostname可以连接datanode
configuration.set("dfs.client.use.datanode.hostname", "true");
//跨平台提交的参数
configuration.set("mapreduce.app-submission.cross-platform", "true");
Job job = Job.getInstance(configuration);
//指定云主机中HDFS的路径
String input="hdfs://hadoop001:9000/user/hadoop/input1/pk.txt";
String output="hdfs://hadoop001:9000/user/hadoop/output1";
FileUtils.deleteTarget(output,configuration);
//设置jar相关信息
job.setJarByClass(WordCountOnYarn.class);
//设置自定义的mapper跟Reducer
job.setMapperClass(MyMapper.class);
job.setReducerClass(MyReducer.class);
//设置mapper阶段输出的key跟value类型
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(IntWritable.class);
//设置reducer阶段输出的key跟vlaue类型
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
//设置输入输出路径
FileInputFormat.setInputPaths(job,new Path(input));
FileOutputFormat.setOutputPath(job,new Path(output));
//提交job
boolean result = job.waitForCompletion(true);
System.exit(result?0:1);
}
public static class MyMapper extends Mapper<LongWritable, Text,Text, IntWritable>{
IntWritable ONE= new IntWritable(1);
@Override
protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
String[] splits = value.toString().split(",");
for(String split:splits){
context.write(new Text(split),ONE);
}
}
}
public static class MyReducer extends Reducer<Text, IntWritable,Text, IntWritable>{
@Override
protected void reduce(Text word, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
int count = 0;
for(IntWritable value:values){
count += value.get();
}
context.write(word,new IntWritable(count));
}
}
}