Mapreduce模板
简洁开发模板,主要是把里面的设置进行了简单的整理,并无难点。
package com.ucky.mr; import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; public class MapreduceModule extends Configured implements Tool{ static class Map extends Mapper<LongWritable, Text, LongWritable, Text>{ @Override protected void setup(Context context) throws IOException, InterruptedException { super.setup(context); } @Override protected void map(LongWritable key, Text value,Context context) throws IOException, InterruptedException { super.map(key, value, context); } @Override protected void cleanup(Context context) throws IOException, InterruptedException { super.cleanup(context); } } static class Reduce extends Reducer<LongWritable, Text,LongWritable, Text>{ @Override protected void setup(Context context) throws IOException, InterruptedException { super.setup(context); } @Override protected void reduce(LongWritable key, Iterable<Text> value, Context context) throws IOException, InterruptedException { super.reduce(key, value, context); } @Override protected void cleanup(Context context) throws IOException, InterruptedException { super.cleanup(context); } } @Override public int run(String[] args) throws Exception { //配置信息 Configuration conf = new Configuration(); //获取到job Job job = praseInputAndOutput(this,conf,args); //指定job运行类 job.setJarByClass(MapreduceModule.class); //指定map运行类 job.setMapperClass(Map.class); //指定reduce运行类 job.setReducerClass(Reduce.class); //指定map输出key类型 job.setMapOutputKeyClass(LongWritable.class); //指定map输出value类型 job.setMapOutputValueClass(Text.class); //指定reduce输出key类型 job.setOutputKeyClass(LongWritable.class); //指定reduce输出value类型 job.setOutputValueClass(Text.class); //获取执行后的状态信息 boolean isSuccess = job.waitForCompletion(true); return isSuccess?0:1; } public Job praseInputAndOutput(Tool tool ,Configuration conf,String[] args) throws Exception{ //判断输入参数 if(args.length !=2){ //如果参数输入不对,会提示相应信息 System.err.printf("Usage:%s [generic options <input> <output>]\n", tool.getClass().getSimpleName()); } //new job Job job = new Job(conf,MapreduceModule.class.getSimpleName()); //添加输入路径 FileInputFormat.addInputPath(job, new Path(args[0])); //设置输出路径 FileOutputFormat.setOutputPath(job, new Path(args[1])); return job; } public static void main(String[] args) throws Exception { int status = ToolRunner.run(new MapreduceModule(), args); System.exit(status); } }