1App.java
package com.qst.wordcount;
import java.io.IOException;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import com.sun.jersey.core.impl.provider.entity.XMLJAXBElementProvider.Text;
public class App
{
public static void main( String[] args ) throws Exception
{
Job job=Job.getInstance();
job.setJarByClass(App.class);
job.setJobName("wordcount");
// 设置输入输出路径
FileInputFormat.addInputPath(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));
//设置输出k,v
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
//指定map,reduce类
job.setMapperClass(WCMapper.class);
job.setReducerClass(WCReducer.class);
}
}
import java.io.IOException;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import com.sun.jersey.core.impl.provider.entity.XMLJAXBElementProvider.Text;
public class App
{
public static void main( String[] args ) throws Exception
{
Job job=Job.getInstance();
job.setJarByClass(App.class);
job.setJobName("wordcount");
// 设置输入输出路径
FileInputFormat.addInputPath(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));
//设置输出k,v
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
//指定map,reduce类
job.setMapperClass(WCMapper.class);
job.setReducerClass(WCReducer.class);
}
}
2.WCMapper类
package com.qst.wordcount;
import java.io.IOException;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapreduce.Mapper;
public class WCMapper extends Mapper<Writable, Text, Text, IntWritable> {
long start = System.currentTimeMillis();
int count = 0;
@Override
protected void map(Writable key, Text value, Mapper<Writable, Text, Text, IntWritable>.Context context)
throws IOException, InterruptedException {
String line =value.toString();
String[] arr=line.split(" ");
for( String word:arr){
context.write(new Text(word), new IntWritable(1));
}
}
}
import java.io.IOException;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapreduce.Mapper;
public class WCMapper extends Mapper<Writable, Text, Text, IntWritable> {
long start = System.currentTimeMillis();
int count = 0;
@Override
protected void map(Writable key, Text value, Mapper<Writable, Text, Text, IntWritable>.Context context)
throws IOException, InterruptedException {
String line =value.toString();
String[] arr=line.split(" ");
for( String word:arr){
context.write(new Text(word), new IntWritable(1));
}
}
}
3WCReducer类
package com.qst.wordcount;
import java.io.IOException;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
public class WCReducer extends Reducer<Text, IntWritable, Text, IntWritable> {
int num=0;
@Override
protected void reduce(Text key, Iterable<IntWritable> values,
Reducer<Text, IntWritable, Text, IntWritable>.Context context) throws IOException, InterruptedException {
for( IntWritable value : values){
num += value.get();
}
context.write(key, new IntWritable(num));
}
}
import java.io.IOException;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
public class WCReducer extends Reducer<Text, IntWritable, Text, IntWritable> {
int num=0;
@Override
protected void reduce(Text key, Iterable<IntWritable> values,
Reducer<Text, IntWritable, Text, IntWritable>.Context context) throws IOException, InterruptedException {
for( IntWritable value : values){
num += value.get();
}
context.write(key, new IntWritable(num));
}
}