整体思路:在eclipse里开发计算学生平均成绩,打成jar包后在Hadoop中运行。
学生成绩纯文本文件如下:
we 90
sdf 89
sf 87
sf 90
sdf 78
sdf 67
we 98
注意地方:运行Hadoop程序一定要用root把防火墙关掉:service iptables stop。不然运行会提示PriviledgedActionException异常。
怎么运行hadoop程序,此处不表,在wordcount中有介绍。
源码:
package com.apache.hadoop.examples;
import java.io.IOException;
import java.util.Iterator;
import java.util.StringTokenizer;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Mapper.Context;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
//实现统计学生平均成绩功能,源文件中学生姓名和成绩空格相连,每行只有一门科目;若某个学生有多门成绩,则分多行。
//实现Tool工具类,是为了初始化一个hadoop配置实例
public class AvgSore implements Tool{
Configuration configuration;
public static final Logger log = LoggerFactory.getLogger(AvgSore.class);
public static class MyMap extends Mapper<Object, Text, Text, IntWritable>{
@Override
protected void map(Object key, Text value, Context context)
throws IOException, InterruptedException {
//将输入的纯文本的数据转化成String
String stuInfo = value.toString();
System.out.println("studentInfo:"+stuInfo);
log.info("MapStudentInfo:"+stuInfo);
//将输入的数据先按行进行分割
StringTokenizer tokenizerArticles = new StringTokenizer(stuInfo, "\n");
//分别对每一行进行处理
while(tokenizerArticles.hasMoreTokens()){
//每行按空格进行划分
StringTokenizer tokenizer = new StringTokenizer(tokenizerArticles.nextToken());
String name = tokenizer.nextToken();//学生姓名
String score = tokenizer.nextToken();//学成成绩
Text stu = new Text(name);
int intscore = Integer.parseInt(score);
log.info("MapStu:"+stu.toString()+" "+intscore);
context.write(stu, new IntWritable(intscore));//输出学生姓名和成绩
}
}
}
public static class MyReduce extends Reducer<Text, IntWritable, Text, IntWritable>{
@Override
protected void reduce(Text key, Iterable<IntWritable> values, Context context)
throws IOException, InterruptedException {
int sum = 0;
int count=0;
Iterator<IntWritable> iterator = values.iterator();
while(iterator.hasNext()){
sum += iterator.next().get();
count++;
}
int avg = (int)sum/count;
context.write(key, new IntWritable(avg));//输出学生姓名和成绩平均值
}
}
@Override
public Configuration getConf() {
return configuration;
}
@Override
public void setConf(Configuration conf) {
conf = new Configuration();
configuration = conf;
}
@Override
public int run(String[] args) throws Exception {
Job job = new Job(getConf(), "AvgScore");
job.setJarByClass(AvgSore.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
job.setMapperClass(MyMap.class);
job.setReducerClass(MyReduce.class);
job.setCombinerClass(MyReduce.class);
job.setInputFormatClass(TextInputFormat.class);
job.setOutputFormatClass(TextOutputFormat.class);
FileInputFormat.addInputPath(job, new Path(args[0]));//设置文件输入路径
FileOutputFormat.setOutputPath(job, new Path(args[1]));//设置文件输出路径
boolean success = job.waitForCompletion(true);
return success ? 0:1;
}
public static void main(String[] args) throws Exception{
//在eclipse工具上配置输入和输出参数
int ret = ToolRunner.run(new AvgSore(), args);
System.exit(ret);
}
}