解决方案一:
package com.stu.mapred;
import java.io.IOException;
import java.util.Comparator;
import java.util.Set;
import java.util.TreeSet;
import java.util.stream.Stream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
/**
* @类名 Max3.java
* @作者 jinbanglong
* @版本 V1.0
* @日期 2019年8月7日-下午7:34:23
* @描述
*
*/
//100w条数据(数字) 每一行一个 这个文件中最大的3个数 设计mapreduce的实现方案 效率高一些
//最大的一个
//cleanup 每一个maptask运行结束 运行一次
public class Max3 {
static class Max3Mapper extends Mapper<LongWritable, Text, IntWritable, IntWritable>{
Set<Integer> set = new TreeSet<>() ; //有序不重复
@Override
protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
int e = Integer.parseInt(value.toString().trim());
set.add(-e);//倒叙排列
}
IntWritable mk = new IntWritable();
IntWritable mv = new IntWritable(1);//lambda 使用null 会报错
@Override
protected void cleanup(Context context) throws IOException, InterruptedException {
//每个任务只取三条数据
//这里完全可以用set 遍历前3条数据
set.stream().limit(3).forEach( (a)->{
try {
mk.set(a);
context.write(mk,mv);
} catch (IOException | InterruptedException e) {
e.printStackTrace();
}
});
}
}
static class Max3Reducer extends Reducer<IntWritable, IntWritable, IntWritable, NullWritable>{
int k=1;
IntWritable mk = new IntWritable();
@Override
protected void reduce(IntWritable key, Iterable<IntWritable> values, Context context)
throws IOException, InterruptedException {
if(k<=3) {
mk.set(-key.get());
context.write(mk,NullWritable.get());
k++;
}
}
}
public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
Configuration conf = new Configuration();
Job job =Job.getInstance(conf);
job.setJarByClass(Max3.class);
job.setMapperClass(Max3Mapper.class);
job.setReducerClass(Max3Reducer.class);
job.setMapOutputKeyClass(IntWritable.class);
job.setMapOutputValueClass(IntWritable.class);
job.setOutputKeyClass(IntWritable.class);
job.setOutputValueClass(IntWritable.class);
FileInputFormat.addInputPath(job, new Path("D://text/num_in"));
FileSystem fs = FileSystem.get(conf);
Path path = new Path("D:/text/num_out");
if(fs.exists(path)) {
fs.delete(path, true);
}
FileOutputFormat.setOutputPath(job, path);
job.waitForCompletion(true);
}
}