数据倾斜含义:少数几个reduce节点运行的数据量比其他reduce数据节点多很多,拖慢整个MR的进程。
下面是解决办法(个人跟着老师学习时理解,如果雷同,纯属巧合)
第一步:让数据倾斜出现
原始的数据:
1.txt
a b1
a b2
a b3
a b4
a b5
a b6
a b7
a b8
a b9
a b10
a b11
a b12
a b13
a b14
a b15
2.txt
a b16
a b17
a b18
a b19
a b20
a b21
a b22
a b23
a b24
a b25
a b26
a b27
a b28
a b29
a b30
3.txt
a b31
a b32
a b33
a b34
a b35
a b36
a b37
a b38
a b39
a b40
a b41
a b42
a b43
a b44
a b45
使用MR程序出现数据倾斜:(程序如下)
package com.gsd.skew;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import java.io.IOException;
public class SkewMapper extends Mapper<LongWritable,Text,Text,IntWritable>{
@Override
protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
String[] arr=value.toString().split(" ");
Text keyOut=new Text();
IntWritable valueOut= new IntWritable();
for(String s:arr) {
keyOut.set(s);
valueOut.set(1);
context.write(keyOut,valueOut);
}
}
}
package com.gsd.skew;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
import java.io.IOException;
public class SkewReducer extends Reducer<Text,IntWritable,Text,IntWritable> {
@Override
protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
int count=0;
for(IntWritable iw:values){
count+=iw.get();
}
context.write(key,new IntWritable(count));
}
}