从hdfs读取文件存到hbase

import java.io.IOException;
 
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.KeyValueTextInputFormat;
 
//从hdfs读取文件存到hbase
public class WordCountHbaseMapreduce01 {
 
     public static void main(String[] args) throws Exception {
         System.exit(run());
     }
 
     public static int run() throws Exception {
         Configuration conf = new Configuration();
         conf = HBaseConfiguration.create(conf);
         conf.set( "hbase.zookeeper.quorum" , "192.168.52.140" );
 
         Job job = Job.getInstance(conf, "wordcount" );
         job.setJarByClass(WordCountHbaseMapreduce01. class );
 
         job.setInputFormatClass(KeyValueTextInputFormat. class );
 
 
         job.setMapOutputKeyClass(Text. class );
         job.setMapOutputValueClass(Text. class );
 
         FileInputFormat.addInputPath(job, new Path(
                 "hdfs://192.168.52.140:9000/myhbase1463572723056" ));
         // 把数据写入Hbase数据库
 
         TableMapReduceUtil.initTableReducerJob( "word" ,
                 MyHbaseReducer. class , job);
         checkTable(conf);
         return job.waitForCompletion( true ) ? 0 : 1 ;
 
     }
     //创建表
     private static void checkTable(Configuration conf) throws Exception {
         Connection con = ConnectionFactory.createConnection(conf);
         Admin admin = con.getAdmin();
         TableName tn = TableName.valueOf( "word" );
         if (!admin.tableExists(tn)){
             HTableDescriptor htd = new HTableDescriptor(tn);
             HColumnDescriptor hcd = new HColumnDescriptor( "wordcount" );
             htd.addFamily(hcd);
             admin.createTable(htd);
             System.out.println( "表不存在,新创建表成功...." );
         }
     }
     //把数据存到hbase
     public static class MyHbaseReducer extends TableReducer<text, immutablebyteswritable= "" >{
 
         @Override
         protected void reduce(Text key, Iterable<text> values,
                 Reducer<text, mutation= "" >.Context context)
                         throws IOException, InterruptedException {
             // T一定要先tostring然后再转为byte,不然会词会有点不准确
 
             Put put= new Put(key.toString().getBytes());
 
             put.addColumn(Bytes.toBytes( "wordcount" ), Bytes.toBytes( "num" ), values.iterator().next().getBytes());
 
             context.write( new ImmutableBytesWritable(key.getBytes()), put);
         }
     }
 
}
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值