1、Driver
package bigdata.hanjiaxiaozhi.cn.mapreduce.model;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import java.io.IOException;
public class MRDriver extends Configured implements Tool {
@Override
public int run(String[] args) throws Exception {
Job job = Job.getInstance(this.getConf(),"model");
job.setJarByClass(MRDriver.class);
Path inputPath1 = new Path(args[0]);
Path inputPath2 = new Path(args[1]);
TextInputFormat.setInputPaths(job,inputPath1,inputPath2);
job.setMapperClass(MRMapper.class);
job.setMapOutputKeyClass(NullWritable.class);
job.setMapOutputValueClass(NullWritable.class);
job.setReducerClass(MRReducer.class);
job.setOutputKeyClass(NullWritable.class);
job.setOutputValueClass(NullWritable.class);
job.setNumReduceTasks(1);
Path outputPath = new Path(args[2]);
FileSystem hdfs = FileSystem.get(this.getConf());
if(hdfs.exists(outputPath)){
hdfs.delete(outputPath,true);
}
TextOutputFormat.setOutputPath(job,outputPath);
return job.waitForCompletion(true) ? 0:-1;
}
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
int status = ToolRunner.run(conf, new MRDriver(), args);
System.exit(status);
}
}
2、Mapper
package bigdata.hanjiaxiaozhi.cn.mapreduce.model;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import java.io.IOException;
public class MRMapper extends Mapper<LongWritable, Text, NullWritable,NullWritable> {
@Override
protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
}
}
3、Reducer
package bigdata.hanjiaxiaozhi.cn.mapreduce.model;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.Reducer;
import java.io.IOException;
public class MRReducer extends Reducer<NullWritable,NullWritable,NullWritable,NullWritable> {
@Override
protected void reduce(NullWritable key, Iterable<NullWritable> values, Context context) throws IOException, InterruptedException {
}
}