生成HFile以及入库到HBase

1. MapReduce 生成 HFile

package insert.tools.hfile;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.KeyValueSortReducer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class TestHFileToHBase {

	public static class TestHFileToHBaseMapper extends Mapper {

		@Override
		protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
			String[] values = value.toString().split("/t", 2);
			byte[] row = Bytes.toBytes(values[0]);
			ImmutableBytesWritable k = new ImmutableBytesWritable(row);
			KeyValue kvProtocol = new KeyValue(row, "PROTOCOLID".getBytes(), "PROTOCOLID".getBytes(), values[1]
					.getBytes());
			context.write(k, kvProtocol);

			// KeyValue kvSrcip = new KeyValue(row, "SRCIP".getBytes(),
			// "SRCIP".getBytes(), values[1].getBytes());
			// context.write(k, kvSrcip);
//			 HFileOutputFormat.getRecordWriter 
		}

	}

	public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
		Configuration conf = HBaseConfiguration.create();
		Job job = new Job(conf, "TestHFileToHBase");
		job.setJarByClass(TestHFileToHBase.class);

		job.setOutputKeyClass(ImmutableBytesWritable.class);
		job.setOutputValueClass(KeyValue.class);

		job.setMapperClass(TestHFileToHBaseMapper.class);
		job.setReducerClass(KeyValueSortReducer.class);
//		job.setOutputFormatClass(org.apache.hadoop.hbase.mapreduce.HFileOutputFormat.class);
		job.setOutputFormatClass(HFileOutputFormat.class);
		// job.setNumReduceTasks(4);
		// job.setPartitionerClass(org.apache.hadoop.hbase.mapreduce.SimpleTotalOrderPartitioner.class);

		// HBaseAdmin admin = new HBaseAdmin(conf);
//		HTable table = new HTable(conf, "hua");

		 HFileOutputFormat.configureIncrementalLoad(job, table);

		FileInputFormat.addInputPath(job, new Path(args[0]));
		FileOutputFormat.setOutputPath(job, new Path(args[1]));

		System.exit(job.waitForCompletion(true) ? 0 : 1);
	}

}

2. HFile 入库到HBase

import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles;
import org.apache.hadoop.hbase.util.Bytes;

public class TestLoadIncrementalHFileToHBase {

	// private static final byte[] TABLE = Bytes.toBytes("hua");
	// private static final byte[] QUALIFIER = Bytes.toBytes("PROTOCOLID");
	// private static final byte[] FAMILY = Bytes.toBytes("PROTOCOLID");

	public static void main(String[] args) throws IOException {
		Configuration conf = HBaseConfiguration.create();
//		byte[] TABLE = Bytes.toBytes("hua");
		byte[] TABLE = Bytes.toBytes(args[0]);
		HTable table = new HTable(TABLE);
		LoadIncrementalHFiles loader = new LoadIncrementalHFiles(conf);
		loader.doBulkLoad(new Path(args[1]), table);
//		loader.doBulkLoad(new Path("/hua/testHFileResult/"), table);
	}

}

 http://blog.csdn.net/dajuezhao/article/details/6365053

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值