产生的原因
1根本原因主要是在shuffle过程中,由于不同的key对应的数据量不同导致不同maptask处理的数据量不一样的问题。
数据倾斜是在map/reduce执行程序时,reduce大部分节点执行完毕,但有一个或者少数几个节点执行很慢,导致其他程序一直处于等待的状态,使得整个程序执行时间较长。
解决
1:避免shuffle
2:在map端使用Combiner组件合并
代码思想,:如A,B,C,D,E,F,G中
A和G比较数据多其他的数据比较少
这样运算的话会导致其他少的运算完毕了,A和G的还没有。
我们根据hashCode取模,让A和G添加Random随机数,让A和G到不同分区
这样分散了一个Reduce处理大量数据
添加了A1 A2 A1 A2后,
在进行一次MapReduce去除添加的数据
让count相加完成数据统计
package com._yzq55iou.dataskew;
import com._51doit.dataskew.MyDataSkew2;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
import java.io.IOException;
import java.util.Random;
/**
*在数据如,a.b.c.d.e。中。a.b的数量很多,cde的数量很少,这就会发生数据倾斜
* 专业术语:~ - -
* 数据倾斜根本原因是因为shuffer,不同的key数量不同。maptask处理的数量也就不同,(有的多有的少,少的处理完毕后等多的处理完毕任务才结束)
* 解决方法:避免shuffer
* 在map端使用Combiner组件合并
*
*
*/
public class SkewStress {
public static class SkewStressMapper extends Mapper<LongWritable, Text,Text, IntWritable>{
int numReduceTasks=0;
Text k=new Text();
IntWritable v=new IntWritable(1);
Random r= new Random();
@Override
protected void setup(Context context) throws IOException, InterruptedException {
numReduceTasks = context.getNumReduceTasks();
}
@Override
protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
String words = value.toString();
String[] split = words.split("\\s+");
for (String word : split) {
k.set(word+"--"+r.nextInt(numReduceTasks));
context.write(k,v);
}
}
}
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
Job job = Job.getInstance(conf);
job.setMapperClass(SkewStressMapper.class);
job.setReducerClass(SkewStressReduce1.class);
job.setNumReduceTasks(2);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
job.setOutputFormatClass(SequenceFileOutputFormat.class);
// job.setCombinerClass(MyCombiner.class);
FileInputFormat.setInputPaths(job,new Path("E:\\anliwenjianjia\\mrdata\\skew\\input"));
FileOutputFormat.setOutputPath(job,new Path("E:\\anliwenjianjia\\mrdata\\skew\\out5"));
job.waitForCompletion(true);
package com._yzq55iou.dataskew;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
import java.io.IOException;
public class SkewStressCombiner extends Reducer<Text, IntWritable,Text,IntWritable> {
@Override
protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
int count =0;
for (IntWritable value : values) {
count+=value.get();
}
context.write(key,new IntWritable(count));
}
}
package com._yzq55iou.dataskew;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import java.io.IOException;
public class SkewStress2 {
static class SkewStress2Mapper extends Mapper<Text, Text,Text, IntWritable>{
Text k=new Text();
IntWritable v=new IntWritable();
@Override
protected void map(Text key, Text value, Context context) throws IOException, InterruptedException {
String sp = value.toString();
String[] split = sp.split("--");
String[] split1 = split[1].split("\t");
k.set(split[0]);
v.set(Integer.parseInt(split1[1]));
context.write(k,v);
}
}
static class SkewStress2Reduce extends Reducer<Text, IntWritable,Text, IntWritable> {
@Override
protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
int count=0;
for (IntWritable value : values) {
count+=value.get();
}
context.write(key,new IntWritable(count));
}
}
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
Job job = Job.getInstance(conf);
job.setMapperClass(SkewStress2Mapper.class);
job.setReducerClass(SkewStress2Reduce.class);
// job.setNumReduceTasks(2);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
job.setInputFormatClass(SequenceFileInputFormat.class);
// job.setCombinerClass(MyCombiner.class);
FileInputFormat.setInputPaths(job,new Path("E:\\anliwenjianjia\\mrdata\\skew\\out5"));
FileOutputFormat.setOutputPath(job,new Path("E:\\anliwenjianjia\\mrdata\\skew\\out6"));
job.waitForCompletion(true);
}
}
}
}