1,Windows本地测试
项目打成jar包在windows本地
public class WordCountMain {
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
// 1 配置对象
System.setProperty("HADOOP_USER_NAME", "root");
// 设置访问文件系统
conf.set("fs.defaultFS", "hdfs://linux01:9000");
// 设置MR程序运行模式 yarn
conf.set("mapreduce.framework.name", "yarn");
// yarn的resourcemanager的位置
conf.set("yarn.resourcemanager.hostname", "linux01");
// 设置MapReduce程序运行在windows上的跨平台参数
conf.set("mapreduce.app-submission.cross-platform","true");
Job job = Job.getInstance(conf);
// 1、封装参数:jar包所在的位置
job.setJar("F:\\WorkSpace\\MapReducePro\\target\\MapReducePro-1.0-SNAPSHOT.jar");
//job.setJarByClass(WordCountMain.class);
job.setMapperClass(WordCountMapper.class);
job.setReducerClass(WordCountReduce.class);
job.setNumReduceTasks(1);
job.setMapOutputKeyClass(Text .class);
job.setMapOutputValueClass(IntWritable.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable .class);
FileInputFormat.setInputPaths(job, new Path("/wc/"));
FileOutputFormat.setOutputPath(job, new Path("/output"));
job.waitForCompletion(true);
}
}
2 ,Linux
项目打包后上传到Linux磁盘上
命令:hadoop jar 指定jar包和主类
hadoop jar ./MapReducePro-1.0-SNAPSHOT.jar com.demo.mapreduce.wordCount.WordCountMain
配置代码:
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
conf.set("mapreduce.framework.name", "yarn");
// yarn的resourcemanager的位置
conf.set("yarn.resourcemanager.hostname", "linux01");
// 2 创建任务对象
Job job = Job.getInstance(conf);
job.setJarByClass(WordCountMain.class);
job.setMapperClass(WordCountMapper.class);
job.setReducerClass(WordCountReduce.class);
job.setNumReduceTasks(1);
job.setMapOutputKeyClass(Text .class);
job.setMapOutputValueClass(IntWritable.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable .class);
FileInputFormat.setInputPaths(job, new Path("/wc/"));
FileOutputFormat.setOutputPath(job, new Path("/output"));
job.waitForCompletion(true);
}
运行中: