hadoop topN mapreduce编程

1.编写map,解析单词

package com.wwei.had241.mr.topn;


import java.io.IOException;


import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;


public class TopnMap1 extends  Mapper<LongWritable,Text,Text,LongWritable>{


@Override
protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, LongWritable>.Context context)
throws IOException, InterruptedException {

//accept
String line = value.toString();
//split
String[] words = line.split(" ");
//loop
for(String w : words){
//send
context.write(new Text(w), new LongWritable(1));
}

}
}


2.写reduce统计单词

package com.wwei.had241.mr.topn;


import java.io.IOException;


import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;


public class TopnRed1  extends Reducer<Text, LongWritable, Text, LongWritable>{

@Override
protected void reduce(Text key, Iterable<LongWritable> values,
Context context) throws IOException, InterruptedException {
long count = 0l;
for(LongWritable l : values){
count += l.get();
}
context.write(new Text(key), new LongWritable(count));
}
}

3.写map2,以对象的形式作为输出,已经排好序。

package com.wwei.had241.mr.topn;


import java.io.IOException;


import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;


public class TopnMap2 extends  Mapper<LongWritable,Text,WordEntity,NullWritable>{

private static Log logger = LogFactory.getLog(TopnMap2.class);

    @Override
protected void map(LongWritable key, Text value, Context context)
throws IOException, InterruptedException {
WordEntity wEntity = new WordEntity();
// split 
String line = value.toString();
String[] fields = line.split("\t");
// get useful field
logger.info("fields" + fields);

if(fields != null && fields.length > 0){
String word = fields[0];
long count = Long.parseLong(fields[1]);
wEntity.set(word, count);
context.write(wEntity, NullWritable.get());
}
}
}


4.编写实体类,实现WritableComparable接口。

package com.wwei.had241.mr.topn;


import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;


import org.apache.hadoop.io.WritableComparable;




public class WordEntity implements WritableComparable<WordEntity>{


private String word; //单词
private long count; //单词词频


public void set(String word,long count){
this.word = word;
this.count = count;
}

public WordEntity() {

}


public WordEntity(String word, long count) {
this.word = word;
this.count = count;
}



@Override
public void write(DataOutput out) throws IOException {
out.writeUTF(word);
out.writeLong(count);

}


@Override
public void readFields(DataInput in) throws IOException {
this.word = in.readUTF();
this.count = in.readLong();
}


@Override
public int compareTo(WordEntity o) {
return this.count > o.getCount() ? -1 : 1;
}


public String getWord() {
return word;
}


public void setWord(String word) {
this.word = word;
}


public long getCount() {
return count;
}


public void setCount(long count) {
this.count = count;
}


@Override
public String toString() {
return  word + "\t" + count ;
}



}



5.写reduce2 输出前n个

package com.wwei.had241.mr.topn;


import java.io.IOException;
import java.util.ArrayList;
import java.util.List;


import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.Reducer;




public class TopnRed2  extends Reducer<WordEntity, NullWritable, WordEntity, NullWritable>{


private static Log logger = LogFactory.getLog(TopnRed2.class);



private int n;
private List<WordEntity> list;


@Override
protected void setup(Context context)
throws IOException, InterruptedException {
n = context.getConfiguration().getInt("N", 10);
logger.info("n==="+n);
list = new ArrayList<WordEntity>(n);
}


@Override
protected void reduce(WordEntity bean, Iterable<NullWritable> values,
Context context)
throws IOException, InterruptedException {
logger.info("TopnRed2......."+bean);
logger.info("n======"+n);
logger.info("list.size====="+list.size());
if(0<n){
n--;
context.write(bean, NullWritable.get());
}


}


@Override
protected void cleanup(Context context)
throws IOException, InterruptedException {

}
}


6.组装mapreduce 任务。


package com.wwei.had241.mr.topn;


import java.io.IOException;


import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.jobcontrol.ControlledJob;
import org.apache.hadoop.mapreduce.lib.jobcontrol.JobControl;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;


public class TopnMain {

public static void main(String[] args) throws IOException {


Configuration conf = new Configuration();
conf.setInt("N", Integer.parseInt(args[0]));
Job job1 = Job.getInstance(conf);
job1.setJarByClass(TopnMain.class);

//设置map
job1.setMapperClass(TopnMap1.class);
job1.setMapOutputKeyClass(Text.class);
job1.setMapOutputValueClass(LongWritable.class);
FileInputFormat.setInputPaths(job1, new Path("/word.txt"));

//设置reduce
job1.setReducerClass(TopnRed1.class);
job1.setOutputKeyClass(Text.class);
job1.setOutputValueClass(LongWritable.class);
FileOutputFormat.setOutputPath(job1, new Path("/topn/wcount.txt"));




// Job job2 = Job.getInstance();
@SuppressWarnings("deprecation")
Job job2 = new Job(conf,"root");
job2.setJarByClass(TopnMain.class);

//设置map
job2.setMapperClass(TopnMap2.class);
job2.setMapOutputKeyClass(WordEntity.class);
job2.setMapOutputValueClass(NullWritable.class);
FileInputFormat.setInputPaths(job2, new Path("/topn/wcount.txt"));

//设置reduce
job2.setReducerClass(TopnRed2.class);
job2.setOutputKeyClass(WordEntity.class);
job2.setOutputValueClass(NullWritable.class);
FileOutputFormat.setOutputPath(job2, new Path("/topn/result.txt"));

ControlledJob conJob1 = new ControlledJob(conf);
conJob1.setJob(job1);

ControlledJob conJob2 = new ControlledJob(conf);
conJob2.setJob(job2);
conJob2.addDependingJob(conJob1);//job2 依赖job1

JobControl jc = new JobControl("xyzjob");
jc.addJob(conJob1);
jc.addJob(conJob2);
Thread th = new Thread(jc);
th.start();

while(true){
if(jc.allFinished()){
jc.stop();
break;
}
}
}


}


  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值