黑猴子的家:MapReduce 倒排索引(多job串联)

1、数据

https://www.jianshu.com/p/e33ff7e367db

2、输出结果

(1)第一次预期输出结果

victor--a.txt   3
victor--b.txt   2
victor--c.txt   2
pingping--a.txt  1
pingping--b.txt 3
pingping--c.txt  1
ss--a.txt   2
ss--b.txt   1
ss--c.txt   1

(2)第二次预期输出结果

victor  c.txt-->2   b.txt-->2   a.txt-->3   
pingping    c.txt-->1   b.txt-->3   a.txt-->1   
ss  c.txt-->1   b.txt-->1   a.txt-->2   

9193428-da2df7d8ce40f42d.png

3、第一次处理

OneIndexMapper

import java.io.IOException;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;

// k victor--a.txt
// v  个数
public class OneIndexMapper extends Mapper<LongWritable, Text, Text, IntWritable> {
    
    String name;
    Text k = new Text();
    IntWritable v = new IntWritable(1);
    
    @Override
    protected void setup(Mapper<LongWritable, Text, Text, IntWritable>.Context context)
            throws IOException, InterruptedException {

        // 获取名字
        FileSplit inputSplit = (FileSplit) context.getInputSplit();
        name = inputSplit.getPath().getName();

    }
    

    @Override
    protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, IntWritable>.Context context)
            throws IOException, InterruptedException {

        // 1 获取一行
        // victor pingping
        String line = value.toString();

        // 2 切割
        // victor
        // pingping
        String[] words = line.split(" ");

        // 3 遍历输出
        for (String word : words) {
            // victor--a.txt
            k.set(word + "--" + name);

            context.write(k, v);
        }
    }
}

OneIndexReducer

import java.io.IOException;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

public class OneIndexReducer extends Reducer<Text, IntWritable, Text, IntWritable> {

    @Override
    protected void reduce(Text key, Iterable<IntWritable> values, Context context)
            throws IOException, InterruptedException {

        // 累加求和
        int count = 0;
        for (IntWritable value : values) {
            count += value.get();
        }
        
        // 写出
        context.write(key, new IntWritable(count));
    }
}

OneIndexDriver

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class OneIndexDriver {

    public static void main(String[] args) throws Exception {

        args = new String[] { "e:/inputoneindex", "e:/output5" };

        Configuration conf = new Configuration();

        Job job = Job.getInstance(conf);
        job.setJarByClass(OneIndexDriver.class);

        job.setMapperClass(OneIndexMapper.class);
        job.setReducerClass(OneIndexReducer.class);

        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(IntWritable.class);

        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(IntWritable.class);

        FileInputFormat.setInputPaths(job, new Path(args[0]));
        FileOutputFormat.setOutputPath(job, new Path(args[1]));

        job.waitForCompletion(true);

    }
}

第一次输出结果

victor--a.txt   3
victor--b.txt   2
victor--c.txt   2
pingping--a.txt 1
pingping--b.txt 3
pingping--c.txt 1
ss--a.txt   2
ss--b.txt   1
ss--c.txt   1

4、第二次处理

TwoIndexMapper

import java.io.IOException;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

public class TwoIndexMapper extends Mapper<LongWritable, Text, Text, Text>{
    Text k = new  Text();
    Text v = new  Text();
    
    @Override
    protected void map(LongWritable key, Text value, Context context)
            throws IOException, InterruptedException {
        
        // victor--a.txt    3
        
        // 1 获取一行
        String line = value.toString();
        
        // 2 切割
        // victor
        // a.txt    3
        String[] fields = line.split("--");
        
        // 3 处理k和value
        k.set(fields[0]);
        v.set(fields[1]);
        
        // 4 输出
        context.write(k, v);
    }
    
}

TwoIndexReducer

import java.io.IOException;

import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

public class TwoIndexReducer extends Reducer<Text, Text, Text, Text>{

    @Override
    protected void reduce(Text key, Iterable<Text> values, Context context)
            throws IOException, InterruptedException {
//      victor  a.txt   3
//      victor  b.txt   2
//      victor  c.txt   2
        
        StringBuffer sb = new StringBuffer();
        
        for (Text value : values) {
            
            sb.append(value.toString().replace("\t", "-->") + "\t");
        }
        
        context.write(key, new Text(sb.toString()));
        
    }
}

TwoIndexDriver

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class TwoIndexDriver {

    public static void main(String[] args) throws Exception, IOException {
        args = new String[] { "e:/inputtwoindex", "e:/output6" };

        Configuration config = new Configuration();
        Job job = Job.getInstance(config);
        job.setJarByClass(TwoIndexDriver.class);
        
        job.setMapperClass(TwoIndexMapper.class);
        job.setReducerClass(TwoIndexReducer.class);

        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(Text.class);

        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(Text.class);

        FileInputFormat.setInputPaths(job, new Path(args[0]));
        FileOutputFormat.setOutputPath(job, new Path(args[1]));

        System.exit(job.waitForCompletion(true) ? 0 : 1);

    }
}

第二次输出结果

victor  c.txt-->2   b.txt-->2   a.txt-->3   
pingping    c.txt-->1   b.txt-->3   a.txt-->1   
ss  c.txt-->1   b.txt-->1   a.txt-->2   

5、Code -> GitHub

https://github.com/liufengji/hadoop_mapreduce.git

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值