java 代码 package cn.AAA.demo; import java.io.IOException; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; /** * * @author DELL * */ public class MaxTemperature { // MAP static class MaxTempertureMapper extends Mapper<LongWritable, Text, Text, IntWritable> { @Override protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException { String line = value.toString(); for(int i=0;i<line.length();i++) { char c = line.charAt(i); context.write(new Text(c+""),new IntWritable(1)); } } } // REDUCE static class MaxTempertureReduce extends Reducer<Text, IntWritable, Text, IntWritable> { @Override protected void reduce(Text text, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException { int count = 0; for (IntWritable value:values) { count = count+Integer.parseInt(value+""); } context.write(text,new IntWritable(count)); } } public static void main(String [] args)throws Exception { Job job = new Job(); job.setJarByClass(MaxTemperature.class); FileInputFormat.addInputPath(job, new Path("/workspace/rsync/map/in2.txt")); FileOutputFormat.setOutputPath(job, new Path("/workspace/rsync/map/out2")); job.setMapperClass(MaxTempertureMapper.class); job.setReducerClass(MaxTempertureReduce.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); System.exit(job.waitForCompletion(true)?0:1); } }