MapReduce 只有Map阶段,写出到pc端

package ProOrder2;

import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.HashMap;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

public class ProOrdMapper2 extends Mapper<LongWritable, Text, Text, NullWritable>{

    HashMap<String, String> map = new HashMap<String,String>();
    @Override
    protected void setup(Context context)
            throws IOException, InterruptedException {
        BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream("E:/study/bigdata/hadoop/第8天/product/product.txt")));
        String line="";
        String pId="";
        String pName="";
        while((line=br.readLine())!=null){
            String[] strs = line.split("\t");
            pId = strs[0];
            pName = strs[3];
            map.put(pId, pName);
        }
        br.close();
    }
    @Override
    protected void map(LongWritable key, Text value, Context context)
            throws IOException, InterruptedException {
        String[] split = value.toString().split("\t");
        String orderId = split[2];
        Text text = new Text(value + "\t" +map.get(orderId));
        context.write(text, NullWritable.get()); 
    }
}

只有map阶段也是可以直接输出的

package ProOrder2;

import java.io.IOException;
import java.net.URI;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class ProOrdDriver2 {

    public static void main(String[] args) throws Exception  {
        String path1="";
        String path2="";
        if(args.length==2){
            path1 = args[0];
            path2 = args[1];
        }else{
            return;
        }


        Configuration conf = new Configuration();
        Job job = Job.getInstance(conf);
        job.setJarByClass(ProOrdDriver2.class);
        job.setMapperClass(ProOrdMapper2.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(NullWritable.class);

        FileInputFormat.setInputPaths(job, new
Path(path1));
        FileOutputFormat.setOutputPath(job, new Path(path2));

        job.addCacheFile(new URI("file:///E:/study/bigdata/hadoop/第8天/product/product.txt"));

        boolean res = job.waitForCompletion(true);
        System.exit(res?0:1);
    }
}
展开阅读全文

没有更多推荐了,返回首页