耶和华靠近伤心的人,拯救灵性痛悔的人

即使是耶酥,也会有人对他怀恨在心,所以,我不需要解释什么~

SortedPriceName排序代码(基于mapreduce处理逻辑)

SortedPriceName排序代码(基于mapreduce处理逻辑)


mapper.java


package com.doggie.test;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;

import java.io.IOException;

/**
 * Created by root on 5/25/16.
 */
public class mapper extends Mapper<Object,Text,LongWritable,Text> {
    public void map(Object key,Text value,Context context)
        throws IOException,InterruptedException{
        String fileName = ((FileSplit)context.getInputSplit()).getPath().toString();
        String valueString= value.toString();
        String[] items=valueString.split(" ");

        LongWritable outputKey = null;
        Text outputValue=null;

        if(fileName.contains("price")){
            outputKey = new LongWritable(Long.valueOf(items[0]));
            outputValue = new Text(items[1]);
        }else{
            outputKey = new LongWritable(Long.valueOf(items[1]));
            outputValue = new Text("name" + items[0]);
        }
        context.write(outputKey,outputValue);
    }
}


---

reducer.java

---

package com.doggie.test;


import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;

import java.io.IOException;
import java.util.TreeSet;

/**
 * Created by root on 5/25/16.
 */
public class reducer extends Reducer<LongWritable,Text,Text,LongWritable> {
    public void reduce(LongWritable key, Iterable<Text>values, Context context)
            throws IOException, InterruptedException {

        Text itemName = null;
        TreeSet<LongWritable> queue = new TreeSet<LongWritable>();

        for (Text val : values){
            if(val.toString().startsWith("name")){
                String realName = val.toString().substring(4);
                itemName = new Text(realName);
            }else{
                LongWritable price = new LongWritable(Long.valueOf(val.toString()));
                queue.add(price);
            }
        }
        for (LongWritable val : queue) {
            context.write(itemName, val);
        }
    }
}



---

main

---
package com.doggie.mtest;


import com.doggie.test.mapper;
import com.doggie.test.reducer;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;

/**
 * Created by root on 5/25/16.
 */
public class Homework {
    public static void main(String[] args) throws Exception {
        Configuration conf = new Configuration();
        String[] otherArgs = new GenericOptionsParser(conf, args)
                .getRemainingArgs();
        if (otherArgs.length != 2) {
            System.err.println("Usage: homework");
            System.exit(2);
        }
        //conf.setInt("mapred.task.timeout",100);
        Job job = new Job(conf, "homework");
        job.setInputFormatClass(TextInputFormat.class);
        job.setJarByClass(Homework.class);
        job.setMapperClass(mapper.class);
        job.setReducerClass(reducer.class);
        job.setMapOutputKeyClass(LongWritable.class);
        job.setMapOutputValueClass(Text.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(LongWritable.class);
        job.setNumReduceTasks(1);
        FileInputFormat.addInputPath(job, new Path(otherArgs[0]));
        FileOutputFormat.setOutputPath(job, new Path(otherArgs[1]));
        System.exit(job.waitForCompletion(true) ? 0 : 1);
    }
}

[两个输入文件样本](https://yunpan.cn/cSDYkqREN9N3H  访问密码 ff1b)
阅读更多
版权声明:本文为博主原创文章,未经博主允许不得转载。 https://blog.csdn.net/superman_xxx/article/details/51553121
文章标签: mapreduce
个人分类: Mapreduce
上一篇hadoop自带wordcount代码详解
下一篇SortedWordCount源代码以及过程分析
想对作者说点什么? 我来说一句

没有更多推荐了,返回首页

关闭
关闭