hdfs 保存到hbase_7.从Hbase中读取数据写入hdfs

/**

public abstract classTableMapper

extends Mapper {

}

*@author duanhaitao@itcast.cn

*

*/

public class HbaseReader {

publicstatic String flow_fields_import = "flow_fields_import";

staticclass HdfsSinkMapper extends TableMapper{

@Override

protectedvoid map(ImmutableBytesWritable key, Result value, Context context) throwsIOException, InterruptedException {

byte[]bytes = key.copyBytes();

Stringphone = new String(bytes);

byte[]urlbytes = value.getValue("f1".getBytes(),"url".getBytes());

Stringurl = new String(urlbytes);

context.write(newText(phone + "\t" + url), NullWritable.get());

}

}

staticclass HdfsSinkReducer extends Reducer{

@Override

protectedvoid reduce(Text key, Iterable values, Context context)throws IOException, InterruptedException {

context.write(key,NullWritable.get());

}

}

publicstatic void main(String[] args) throws Exception {

Configurationconf = HBaseConfiguration.create();

conf.set("hbase.zookeeper.quorum","spark01");

Jobjob = Job.getInstance(conf);

job.setJarByClass(HbaseReader.class);

//            job.setMapperClass(HdfsSinkMapper.class);

Scanscan = new Scan();

TableMapReduceUtil.initTableMapperJob(flow_fields_import,scan, HdfsSinkMapper.class, Text.class, NullWritable.class, job);

job.setReducerClass(HdfsSinkReducer.class);

FileOutputFormat.setOutputPath(job,new Path("c:/hbasetest/output"));

job.setOutputKeyClass(Text.class);

job.setOutputValueClass(NullWritable.class);

job.waitForCompletion(true);

}

}

/**

public abstract classTableReducer

extends Reducer {

}

*@author duanhaitao@itcast.cn

*

*/

public class HbaseSinker {

publicstatic String flow_fields_import = "flow_fields_import";

staticclass HbaseSinkMrMapper extends Mapper{

@Override

protectedvoid map(LongWritable key, Text value, Context context) throws IOException,InterruptedException {

Stringline = value.toString();

String[] fields =line.split("\t");

Stringphone = fields[0];

Stringurl = fields[1];

FlowBeanbean = new FlowBean(phone,url);

context.write(bean,NullWritable.get());

}

}

staticclass HbaseSinkMrReducer extends TableReducer{

@Override

protectedvoid reduce(FlowBean key, Iterable values, Context context)throws IOException, InterruptedException {

Putput = new Put(key.getPhone().getBytes());

put.add("f1".getBytes(),"url".getBytes(), key.getUrl().getBytes());

context.write(newImmutableBytesWritable(key.getPhone().getBytes()), put);

}

}

publicstatic void main(String[] args) throws Exception {

Configurationconf = HBaseConfiguration.create();

conf.set("hbase.zookeeper.quorum","spark01");

HBaseAdminhBaseAdmin = new HBaseAdmin(conf);

booleantableExists = hBaseAdmin.tableExists(flow_fields_import);

if(tableExists){

hBaseAdmin.disableTable(flow_fields_import);

hBaseAdmin.deleteTable(flow_fields_import);

}

HTableDescriptordesc = new HTableDescriptor(TableName.valueOf(flow_fields_import));

HColumnDescriptorhColumnDescriptor = new HColumnDescriptor ("f1".getBytes());

desc.addFamily(hColumnDescriptor);

hBaseAdmin.createTable(desc);

Jobjob = Job.getInstance(conf);

job.setJarByClass(HbaseSinker.class);

job.setMapperClass(HbaseSinkMrMapper.class);

TableMapReduceUtil.initTableReducerJob(flow_fields_import,HbaseSinkMrReducer.class, job);

FileInputFormat.setInputPaths(job,new Path("c:/hbasetest/data"));

job.setMapOutputKeyClass(FlowBean.class);

job.setMapOutputValueClass(NullWritable.class);

job.setOutputKeyClass(ImmutableBytesWritable.class);

job.setOutputValueClass(Mutation.class);

job.waitForCompletion(true);

}

}

本文出自 “为了手指那个方向” 博客,谢绝转载!

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值