java读取hdfs ha,habse与hdfs 数据之间的相互转换

一、从Hbase表1中读取数据再把统计结果存到表2

在Hbase中建立相应的表1:

create'hello','cf'

put'hello','1','cf:hui','hello world'

put'hello','2','cf:hui','hello hadoop'

put'hello','3','cf:hui','hello hive'

put'hello','4','cf:hui','hello hadoop'

put'hello','5','cf:hui','hello world'

put'hello','6','cf:hui','hello world'

java代码:

importjava.io.IOException;

importjava.util.Iterator;

importorg.apache.hadoop.conf.Configuration;

importorg.apache.hadoop.hbase.HBaseConfiguration;

importorg.apache.hadoop.hbase.HColumnDescriptor;

importorg.apache.hadoop.hbase.HTableDescriptor;

importorg.apache.hadoop.hbase.client.HBaseAdmin;

importorg.apache.hadoop.hbase.client.Put;

importorg.apache.hadoop.hbase.client.Result;

importorg.apache.hadoop.hbase.client.Scan;

importorg.apache.hadoop.hbase.io.ImmutableBytesWritable;

importorg.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;

importorg.apache.hadoop.hbase.mapreduce.TableMapper;

importorg.apache.hadoop.hbase.mapreduce.TableReducer;

importorg.apache.hadoop.hbase.util.Bytes;

importorg.apache.hadoop.io.IntWritable;

importorg.apache.hadoop.io.NullWritable;

importorg.apache.hadoop.io.Text;

importorg.apache.hadoop.mapreduce.Job;

publicclassHBaseToHbase{

publicstaticvoidmain(String[] args)throwsIOException, ClassNotFoundException, InterruptedException{

String hbaseTableName1 ="hello";

String hbaseTableName2 ="mytb2";

prepareTB2(hbaseTableName2);

Configuration conf =newConfiguration();

Job job = Job.getInstance(conf);

job.setJarByClass(HBaseToHbase.class);

job.setJobName("mrreadwritehbase");

Scan scan =newScan();

scan.setCaching(500);

scan.setCacheBlocks(false);

TableMapReduceUtil.initTableMapperJob(hbaseTableName1, scan, doMapper.class, Text.class, IntWritable.class, job);

TableMapReduceUtil.initTableReducerJob(hbaseTableName2, doReducer.class, job);

System.exit(job.waitForCompletion(true) ?1:0);

}

publicstaticclassdoMapperextendsTableMapper{

privatefinalstaticIntWritable one =newIntWritable(1);

@Override

protectedvoidmap(ImmutableBytesWritable key, Result value, Context context)throwsIOException, InterruptedException{

String rowValue = Bytes.toString(value.list().get(0).getValue());

context.write(newText(rowValue), one);

}

}

publicstaticclassdoReducerextendsTableReducer{

@Override

protectedvoidreduce(Text key, Iterable values, Context context)throwsIOException, InterruptedException{

System.out.println(key.toString());

intsum =0;

Iterator haha = values.iterator();

while(haha.hasNext()) {

sum += haha.next().get();

}

Put put =newPut(Bytes.toBytes(key.toString()));

put.add(Bytes.toBytes("mycolumnfamily"), Bytes.toBytes("count"), Bytes.toBytes(String.valueOf(sum)));

context.write(NullWritable.get(), put);

}

}

publicstaticvoidprepareTB2(String hbaseTableName)throwsIOException{

HTableDescriptor tableDesc =newHTableDescriptor(hbaseTableName);

HColumnDescriptor columnDesc =newHColumnDescriptor("mycolumnfamily");

tableDesc.addFamily(columnDesc);

Configuration  cfg = HBaseConfiguration.create();

HBaseAdmin admin =newHBaseAdmin(cfg);

if(admin.tableExists(hbaseTableName)) {

System.out.println("Table exists,trying drop and create!");

admin.disableTable(hbaseTableName);

admin.deleteTable(hbaseTableName);

admin.createTable(tableDesc);

}else{

System.out.println("create table: "+ hbaseTableName);

admin.createTable(tableDesc);

}

}

}

在Linux中执行该代码:

[hadoop@h71 q1]$ /usr/jdk1.7.0_25/bin/javac HBaseToHbase.java

[hadoop@h71 q1]$ /usr/jdk1.7.0_25/bin/jar cvf xx.jar HBaseToHbase*class

[hadoop@h71 q1]$ hadoop jar xx.jar HBaseToHbase

查看mytb2表:

hbase(main):009:0> scan'mytb2'

ROW                                                          COLUMN+CELL

hello hadoop                                                column=mycolumnfamily:count, timestamp=1489817182454,value=2

hello hive                                                  column=mycolumnfamily:count, timestamp=1489817182454,value=1

hello world                                                 column=mycolumnfamily:count, timestamp=1489817182454,value=3

3row(s)in0.0260seco

作者:清风_d587

链接:https://www.jianshu.com/p/01411078c449

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值