从hbase读取内容到hdfs文件上

import java.io.IOException;
import java.util.Date;
import java.util.List;
 
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
 
 
//从hbase读取内容到hdfs文件上
public class WordCountHbaseMapreduce02 {
 
     public static class MyHBaseMap02 extends TableMapper<text, text= "" >{
         public static void main(String[] args) throws Exception {
             System.exit(run());
         }
 
         @Override
         protected void map(ImmutableBytesWritable key, Result value,
                 Mapper<immutablebyteswritable, text= "" >.Context context)
                         throws IOException, InterruptedException {
 
 
             String word= null ;
             String num= null ;
             List<cell> cs=value.listCells();
             for (Cell cell:cs){
                  word=Bytes.toString(CellUtil.cloneRow(cell));
                  num=Bytes.toString(CellUtil.cloneValue(cell));
             }
 
             context.write( new Text(word), new Text(num));
         }
 
 
         public static int run() throws Exception {
             Configuration conf = new Configuration();
             conf = HBaseConfiguration.create(conf);
             conf.set( "hbase.zookeeper.quorum" , "192.168.52.140" );
 
             Job job = Job.getInstance(conf, "wordcount2" );
             job.setJarByClass(WordCountHbaseMapreduce02. class );
 
 
             Scan scan = new Scan();
             //取对业务有用的数据 tags, nickname
             scan.addColumn(Bytes.toBytes( "wordcount" ), Bytes.toBytes( "num" ));
 
             //数据来源 hbase
             //TableInputFormat.addColumns(scan, columns);
             //ImmutableBytesWritable来自hbase数据的类型
             TableMapReduceUtil.initTableMapperJob( "word" , scan, MyHBaseMap02. class ,
                     Text. class ,  Text. class , job);
             FileOutputFormat.setOutputPath(job, new Path( "hdfs://192.168.52.140:9000/hadoop_hbase_out" + new Date().getTime()));
 
             return job.waitForCompletion( true ) ? 0 : 1 ;
         }
     }
 
}
  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值