通过MR读数据,往Hbase中写数据

23 篇文章 0 订阅

1,首先是map类:

package com.Hbase.maptohbase;

import Java.io.IOException;
import org.apache.Hadoop.mapreduce.Mapper;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;

public class Hmap extends Mapper<LongWritable,Text,Text,Text>{
    public void map(LongWritable key,Text value,Context context){
    try {
        context.write(new Text(key.toString()), new Text(value));
    } catch (IOException e) {
        e.printStackTrace();
    } catch (InterruptedException e) {
        e.printStackTrace();
    }
  }
}
2,然后是reduce类:

package com.hbase.maptohbase;
import java.io.IOException;

import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.io.Text;

public class Hreduce extends TableReducer<Text,Text,ImmutableBytesWritable>{  
    public void reduce(Text key,Iterable<Text> value,Context context){
        String k = key.toString();
        String[] qual = {"name","starttime","endtime","price"};
        
        Put putrow = new Put(k.getBytes());
        for (Text t:value) {
            String[] v2 = t.toString().split(",");
            
            for(int i =0; i<v2.length;i++){
                putrow.add("f".getBytes(), qual[i].getBytes(), v2[i].getBytes());
            }           
        }
        try {     
            context.write(new ImmutableBytesWritable(key.getBytes()), putrow);
            
        } catch (IOException e) {
            e.printStackTrace();
        } catch (InterruptedException e) {
            e.printStackTrace();
        }  
    }
}
最后是driver类

package com.hbase.maptohbase;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
public class HmapDriver{
    public static void main(String[] args) throws Exception {
        //Hbase Configuration
        Configuration conf = HBaseConfiguration.create();
        conf.set("hbase.zookeeper.quorum.", "localhost");
        
        Job job = new Job(conf,"hmap");
        job.setJarByClass(HmapDriver.class);
        Path in = new Path("in/");
        job.setInputFormatClass(TextInputFormat.class);
        FileInputFormat.addInputPath(job, in);
        
        job.setMapperClass(Hmap.class);
        job.setReducerClass(Hreduce.class);
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(Text.class);
        
        //which table
        TableMapReduceUtil.initTableReducerJob("schedule", Hreduce.class, job);
        
        job.waitForCompletion(true);          
    }   
}

最后是数据:

1.data:

D11,12:20:22,13:22:29,100
G22,12:22:11,23:00:00,230

2.data:

T11,12:20:22,13:22:29,100
t22,12:22:11,23:00:00,230

----------------------------------

这个时候还需要在hbase下创建一个表:

hbase(main):033:0> scan 'schedule','f'

这个列f必须有的吧

然后就可以运行这个程序了:

、、、

  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值