mapreduce 读写Parquet格式数据 Demo

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.parquet.example.data.Group;
import org.apache.parquet.example.data.simple.SimpleGroupFactory;
import org.apache.parquet.hadoop.ParquetInputFormat;
import org.apache.parquet.hadoop.ParquetOutputFormat;
import org.apache.parquet.hadoop.example.GroupReadSupport;
import org.apache.parquet.hadoop.example.GroupWriteSupport;
import org.apache.parquet.schema.MessageType;
import org.apache.parquet.schema.OriginalType;
import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName;
import org.apache.parquet.schema.Types;

/**
 * MR Parquet格式数据读写Demo
 */
public class ParquetReaderAndWriteMRDemo {

    public static void main(String[] args) throws Exception {
        Configuration conf = new Configuration();
        String[] otherargs=new GenericOptionsParser(conf, args).getRemainingArgs();
        if(otherargs.length!=3){
            System.out.println("<in> <out> 1");
            System.out.println("<parquet-in> <out> 2");
            System.out.println("<in> <parquet-out> 3");
            System.out.println("<parquet-in> <parquet-out> 4");
            System.exit(2);
        }
        //此demo 输入数据为2列     city  ip
        
        MessageType schema = Types.buildMessage() 
                   .required(PrimitiveTypeName.BINARY).as(OriginalType.UTF8).named("city") 
                   .required(PrimitiveTypeName.BINARY).as(OriginalType.UTF8).named("ip") 
                   .named("pair");
        System.out.println("[schema]=="+schema.toString());
        GroupWriteSupport.setSchema(schema, conf);
        
        Job job = Job.getInstance(conf, "ParquetReadMR");
        job.setJarByClass(ParquetReaderAndWriteMRDemo.class);
        
        if(otherargs[2].equals("1")){
            job.setMapperClass(NormalMapper.class);
            job.setReducerClass(NormalReducer.class);
            job.setOutputKeyClass(Text.class);
            job.setOutputValueClass(Text.class);
            FileInputFormat.setInputPaths(job,otherargs[0] );
            FileOutputFormat.setOutputPath(job, new Path(otherargs[1]));
            if (!job.waitForCompletion(true))
                return;
        }
        if(otherargs[2].equals("3")){
            job.setMapperClass(ParquetWriteMapper.class);
            job.setNumReduceTasks(0);
            FileInputFormat.setInputPaths(job,otherargs[0] );
            
            //parquet输出
            job.setOutputFormatClass(ParquetOutputFormat.class);
            ParquetOutputFormat.setWriteSupportClass(job, GroupWriteSupport.class);
//            ParquetOutputFormat.setOutputPath(job, new Path(otherargs[1]));
            FileOutputFormat.setOutputPath(job, new Path(otherargs[1]));
            if (!job.waitForCompletion(true))
                return;
        }
        
        if(otherargs[2].equals("2")){
            //parquet输入
            job.setMapperClass(ParquetReadMapper.class);
            job.setNumReduceTasks(0);
            job.setInputFormatClass(ParquetInputFormat.class);
            ParquetInputFormat.setReadSupportClass(job, GroupReadSupport.class);
            
            job.setOutputKeyClass(Text.class);
            job.setOutputValueClass(Text.class);
            FileInputFormat.setInputPaths(job,otherargs[0] );
            FileOutputFormat.setOutputPath(job, new Path(otherargs[1]));
            if (!job.waitForCompletion(true))
                return;
        }
        if(otherargs[2].equals("4")){
            //TODO 不想写了
        }
    }
    
    public static class ParquetWriteMapper extends Mapper<LongWritable, Text, Void, Group> {
        SimpleGroupFactory factory=null;
        protected void setup(Context context) throws IOException ,InterruptedException {
            factory = new SimpleGroupFactory(GroupWriteSupport.getSchema(context.getConfiguration()));
        };
        
        public void map(LongWritable _key, Text ivalue, Context context) throws IOException, InterruptedException {
            Group pair=factory.newGroup();
            String[] strs=ivalue.toString().split("\\s+");
            pair.append("city", strs[0]);
            pair.append("ip", strs[1]);
            context.write(null,pair);
        }
    }
    
    public static class ParquetReadMapper extends Mapper<Void, Group, Text, Text> {
        public void map(Void _key, Group group, Context context) throws IOException, InterruptedException {
            String city=group.getString(0, 0);
            String ip=group.getString(1, 0);
            context.write(new Text(city),new Text(ip));
        }
    }
    
    public static class NormalMapper extends Mapper<LongWritable, Text, Text, Text> {

        public void map(LongWritable ikey, Text ivalue, Context context) throws IOException, InterruptedException {
            String[] strs=ivalue.toString().split("\\s+");
            context.write(new Text(strs[0]), new Text(strs[1]));
        }
    }
        public static class NormalReducer extends Reducer<Text, Text, Text, Text> {

            public void reduce(Text _key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
                for (Text text : values) {
                    context.write(_key,text);
                }
                
            }
        }

}

 

转载于:https://www.cnblogs.com/yanghaolie/p/7389543.html

  • 0
    点赞
  • 2
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值