eclipse使用wordcount

package org.apache.hadoop.example;

import java.io.IOException;
import java.util.StringTokenizer;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
public class WordCount {
//输入键类型、输入值类型、输出键类型和输出值类型,在本例中分别为普通对象(Object)、字符串(Text)、字符串和整数型(IntWritable)
      public static class TokenizerMapper extends Mapper<Object, Text, Text, IntWritable> {
            private final static IntWritable one = new IntWritable(1);
            private Text word = new Text();
            //输入键key、输入值value和环境变量context。输入键默认为行号,输入值为每一行的文本字符串
            public void map(Object key, Text value, Context context) throws IOException, InterruptedException {
                  StringTokenizer itr = new StringTokenizer(value.toString());
                  while (itr.hasMoreTokens()) {
                        word.set(itr.nextToken());//设置键的值
                        context.write(word, one);//函数context.write()输出键为单词值为1

                  }
            }
      }
      //输入键类型、输入值类型、输出键类型和输出值类型,在本例中分别为字符串、整数型、字符串和整数型
      public static class IntSumReducer extends Reducer<Text, IntWritable, Text, IntWritable> {
            private IntWritable result = new IntWritable();
            //输入键key、输入值列表values和环境变量context
            public void reduce(Text key, Iterable<IntWritable> values, Context context)throws IOException, InterruptedException {
                  int sum = 0;
                  for (IntWritable val : values) {
                        sum += val.get();
                  }
                  result.set(sum);
                  context.write(key, result);//输出键为单词值为单词出现次数的键值对
            }
      }
      public static void main(String[] args) throws Exception {
            Configuration conf = new Configuration();  //初始化hadoop集群组件配置
            String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();//解析命令行参数
            if (otherArgs.length < 2) {
                  System.err.println("Usage: wordcount <in> [<in>...] <out>");
                  System.exit(2);
            }
            @SuppressWarnings("deprecation")
            Job job = new Job(conf, "word count"); //Job命名为"word count"
            job.setJarByClass(WordCount.class);    //设置主类
            job.setMapperClass(TokenizerMapper.class);//设置mapper类
            job.setReducerClass(IntSumReducer.class);//设置reducer类
            job.setOutputKeyClass(Text.class);//设置键输出类
            job.setOutputValueClass(IntWritable.class);//设置值输出类
            for (int i = 0; i < otherArgs.length - 1; ++i) {
                  FileInputFormat.addInputPath(job, new Path(otherArgs[i]));//设置作业输入路径
            }
            FileOutputFormat.setOutputPath(job, new Path(otherArgs[otherArgs.length - 1]));//设置作业输出路径
            System.exit(job.waitForCompletion(true) ? 0 : 1);//调用函数System.exit()等待作业退出,并给出退出状态。
      }
}

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值