Hadoop MapReduce入门

基本类型

BooleanWritalbe:标准的布尔类型
ByteWritable : 字节类型
DoubleWritable : 双字节
FloatWritable : 浮点型
IntWritable : 整型
LongWritable :长整型
ObjectWritable:Obejct
Text : UTF8 存储
NullWritable : 当

源码:

package org.bigdata.util;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.io.WritableComparator;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

/**
 * 统计字符串出现次数
 * 
 * @author wwhhf
 * 
 */
public class WordCountMapReduce {

    public static class WordCountMapper extends
            Mapper<LongWritable, Text, Text, LongWritable> {

        @Override
        protected void map(LongWritable key, Text value, Context context)
                throws IOException, InterruptedException {
            String values[] = value.toString().split(" ");
            for (int i = 0, len = values.length; i < len; i++) {
                context.write(new Text(values[i]), new LongWritable(1));
            }
        }

    }

    public static class WordCountCombiner extends
            Reducer<Text, LongWritable, Text, LongWritable> {

        @Override
        protected void reduce(Text key, Iterable<LongWritable> values,
                Context context) throws IOException, InterruptedException {
            Long sum = 0L;
            for (LongWritable value : values) {
                sum = sum + value.get();
            }
            context.write(key, new LongWritable(sum));
        }

    }

    public static class WordCountReducer extends
            Reducer<Text, LongWritable, Text, LongWritable> {

        @Override
        protected void reduce(Text key, Iterable<LongWritable> values,
                Context context) throws IOException, InterruptedException {
            Long sum = 0L;
            for (LongWritable value : values) {
                sum = sum + value.get();
            }
            context.write(key, new LongWritable(sum));
        }

    }

    public static class TextDescComparator extends WritableComparator {

        protected TextDescComparator() {
            super(Text.class, true);
        }

        @Override
        public int compare(WritableComparable a, WritableComparable b) {
            return -super.compare(a, b);
        }

    }

    public static void main(String[] args) {
        try {

            Configuration cfg = HadoopCfg.getConfiguration();
            Job job = Job.getInstance(cfg);
            job.setJobName("Word Count");
            job.setJarByClass(WordCountMapReduce.class);

            // mapper
            job.setMapperClass(WordCountMapper.class);
            job.setMapOutputKeyClass(Text.class);
            job.setMapOutputValueClass(LongWritable.class);

            // combiner
            job.setCombinerClass(WordCountCombiner.class);
            job.setOutputKeyClass(Text.class);
            job.setOutputValueClass(LongWritable.class);

            // reducer
            job.setReducerClass(WordCountReducer.class);
            job.setOutputKeyClass(Text.class);
            job.setOutputValueClass(LongWritable.class);

            job.setSortComparatorClass(TextDescComparator.class);

            FileInputFormat.addInputPath(job, new Path("/input/input.txt"));
            FileOutputFormat.setOutputPath(job, new Path("/output/"));

            System.exit(job.waitForCompletion(true) ? 0 : 1);

        } catch (IllegalStateException | IllegalArgumentException
                | ClassNotFoundException | IOException | InterruptedException e) {
            e.printStackTrace();
        }
    }

}
package org.bigdata.util;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

import org.ansj.domain.Term;
import org.ansj.splitWord.analysis.NlpAnalysis;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.bigdata.util.WeatherMapReduce.WeatherMapper;
import org.bigdata.util.WeatherMapReduce.WeatherReducer;

/**
 * 统计红楼梦出现最多的名字
 * 
 * @author wwhhf
 * 
 */
public class RedHouseMapReduce {

    /**
     * 分词
     * 
     * @param s
     * @return
     */
    private static List<String> parse(String s) {
        List<Term> terms = NlpAnalysis.parse(s);
        List<String> words = new ArrayList<String>();
        for (Term term : terms) {
            // 词性为人名
            if ("nr".equals(term.getNatureStr())) {
                words.add(term.getName());
            }
        }
        return words;
    }

    private static class RedHouseMapper extends
            Mapper<LongWritable, Text, Text, LongWritable> {

        @Override
        protected void map(LongWritable key, Text value, Context context)
                throws IOException, InterruptedException {
            List<String> names = parse(value.toString());
            for (String name : names) {
                context.write(new Text(name), new LongWritable(1));
            }
        }
    }

    private static class RedHouseReducer extends
            Reducer<Text, LongWritable, Text, LongWritable> {

        @Override
        protected void reduce(Text key, Iterable<LongWritable> values,
                Context context) throws IOException, InterruptedException {
            Long sum = 0L;
            for (LongWritable value : values) {
                sum = sum + value.get();
            }
            context.write(key, new LongWritable(sum));
        }

    }

    public static void main(String[] args) {
        try {

            Configuration cfg = HadoopCfg.getConfiguration();
            Job job = Job.getInstance(cfg);
            job.setJobName("RedHourse");
            job.setJarByClass(RedHouseMapReduce.class);

            // mapper
            job.setMapperClass(RedHouseMapper.class);
            job.setMapOutputKeyClass(LongWritable.class);
            job.setMapOutputValueClass(Text.class);

            // reducer
            job.setReducerClass(RedHouseReducer.class);
            job.setOutputKeyClass(Text.class);
            job.setOutputValueClass(LongWritable.class);

            FileInputFormat.addInputPath(job, new Path("/input/red.txt"));
            FileOutputFormat.setOutputPath(job, new Path("/red_out/"));

            System.exit(job.waitForCompletion(true) ? 0 : 1);

        } catch (IllegalStateException | IllegalArgumentException
                | ClassNotFoundException | IOException | InterruptedException e) {
            e.printStackTrace();
        }
    }
}
  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值