hbase与hdfs上的数据互传

https://www.cnblogs.com/dongdone/p/5687786.html
######将Hbase中表数据上传到hdfs,此时无reduce
package com.test;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;

public class HbaseToHdfs {
public static void main(String[] args){
Configuration conf =HBaseConfiguration.create();
try {
Job job=Job.getInstance(conf, HbaseToHdfs.class.getSimpleName());
job.setMapperClass(HbaseMapper.class);
job.setJarByClass(HbaseToHdfs.class);
job.setNumReduceTasks(0);//设置reduce个数为0
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(Text.class);
TableMapReduceUtil.initTableMapperJob(Bytes.toBytes(“t1”), new Scan(), HbaseMapper.class, Text.class,Text.class, job);
FileOutputFormat.setOutputPath(job, new Path(“hdfs://192.168.17.128:9000/t1”));
job.setOutputFormatClass(TextOutputFormat.class);
try {
job.waitForCompletion(true);
} catch (ClassNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}

}
public static class HbaseMapper extends TableMapper<Text, Text>{
	private Text outkey=new Text();
	private Text outvalue=new Text();
	@Override
	//ImmutableBytesWritable为rowkey
	protected void map(ImmutableBytesWritable key, Result value,
			Mapper<ImmutableBytesWritable, Result, Text, Text>.Context context)
			throws IOException, InterruptedException {
		// TODO Auto-generated method stub
		byte[] name=null;
		name=value.getValue(Bytes.toBytes("f1"), Bytes.toBytes("name"));
		byte[] age=null;
		age=value.getValue(Bytes.toBytes("f1"), Bytes.toBytes("name"));
		byte[] gender=null;
		gender=value.getValue(Bytes.toBytes("f1"), Bytes.toBytes("name"));
		byte[] birthday=null;
		birthday=value.getValue(Bytes.toBytes("f1"), Bytes.toBytes("name"));
		outkey.set(key.get());
		String temp = ((name==null || name.length==0)?"NULL":new String(name)) + "\t" + ((age==null || age.length==0)?"NULL":new String(age)) + "\t" + ((gender==null||gender.length==0)?"NULL":new String(gender)) + "\t" +  ((birthday==null||birthday.length==0)?"NULL":new String(birthday));
        System.out.println(temp);
        outvalue.set(temp);
        context.write(outkey, outvalue);
	}
}

}
###################将hdfs中的数据下载到hbase中
https://blog.csdn.net/qq_26091271/article/details/52586953

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值