java写入hbase的几种方式


package com.txj.iov.kafka_to_hbase.hbase.test;

import java.net.URI;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;

import javax.annotation.PostConstruct;

import org.apache.commons.lang.RandomStringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.KVComparator;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.BufferedMutator;
import org.apache.hadoop.hbase.client.BufferedMutatorParams;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.io.hfile.HFileContext;
import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
import org.apache.hadoop.hbase.io.hfile.HFileWriterV3;
import org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles;
import org.apache.hadoop.hbase.util.Bytes;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Component;

import com.txj.iov.kafka_to_hbase.hbase.HBasePoolUtils;
import com.txj.iov.kafka_to_hbase.util.PropertiesUtil;

@Component
public class WriteHbaseTest {

	public WriteHbaseTest() {
		initData();
	}

	private static int totalRow = 10000 * 20;
	private static Object[] results = null;

	private static List<Put> rows = new ArrayList<Put>();

	private String tname = "test_11";

	@PostConstruct
	public void test() {
		try {
//			test1();
//			test2();
//			test3();
//			test4();
//			test5();
//			test6();
			test7();
		} catch (Exception e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}

	@Async
	public void test1() throws Exception {
		Table table = getConnection().getTable(TableName.valueOf(tname));
		long t1 = System.currentTimeMillis();
		table.put(rows);
		long t2 = System.currentTimeMillis();
		System.out.println("Test1 耗时{" + (t2 - t1) + "}");
		table.close();
	}

	@Async
	public void test2() throws Exception {
		HTable table = (HTable) getConnection().getTable(TableName.valueOf(tname));
		long t1 = System.currentTimeMillis();
		table.setAutoFlush(false, true);
		table.put(rows);
		table.flushCommits();
		long t2 = System.currentTimeMillis();
		System.out.println("Test2 耗时{" + (t2 - t1) + "}");
		table.close();
	}

	@Async
	public void test3() throws Exception {
		HTable table = (HTable) getConnection().getTable(TableName.valueOf(tname));
		long t1 = System.currentTimeMillis();
		table.setAutoFlush(false, true);
		table.setWriteBufferSize(1024 * 1024 * 50);
		table.put(rows);
		table.flushCommits();
		long t2 = System.currentTimeMillis();
		System.out.println("Test3 耗时{" + (t2 - t1) + "}");
		table.close();
	}

	@Async
	public void test4() throws Exception {
		HTable table = (HTable) getConnection().getTable(TableName.valueOf(tname));
		long t1 = System.currentTimeMillis();
		table.setAutoFlush(false, true);
		table.setWriteBufferSize(1024 * 1024 * 50);
		for (Put row : rows) {
			table.put(row);
		}
		table.flushCommits();
		long t2 = System.currentTimeMillis();
		System.out.println("Test4 耗时{" + (t2 - t1) + "}");
		table.close();
	}

	@Async
	public void test5() throws Exception {
		HTable table = (HTable) getConnection().getTable(TableName.valueOf(tname));
		long t1 = System.currentTimeMillis();
		table.setAutoFlush(false, true);
		table.setWriteBufferSize(1024 * 1024 * 50);
		table.batch(rows, results);
		table.flushCommits();
		long t2 = System.currentTimeMillis();
		System.out.println("Test5 耗时{" + (t2 - t1) + "}");
		table.close();
	}

	@Async
	public void test6() throws Exception {
		BufferedMutatorParams params = new BufferedMutatorParams(TableName.valueOf(tname));
		params.writeBufferSize(50 * 1024 * 1024);
		BufferedMutator table = getConnection().getBufferedMutator(params);
		long t1 = System.currentTimeMillis();
		table.mutate(rows);
		long t2 = System.currentTimeMillis();
		System.out.println("Test6 耗时{" + (t2 - t1) + "}");
		table.close();

	}

	public void test7() {
		try
		{
		    Properties hbaseConfig = PropertiesUtil.getConfigByFileName("hbase.properties");
			System.setProperty("HADOOP_USER_NAME","hdfs");
			Compression.Algorithm compressAlgo = Compression.Algorithm.NONE;
			Configuration conf = HBasePoolUtils.getConfiguration();
			String pathString = hbaseConfig.getProperty("path")+hbaseConfig.getProperty("family");
			Path hfilePath = new Path(pathString);
			FileSystem fs = FileSystem.get(new URI(pathString),conf,"hdfs");
			HFileContext context = new HFileContextBuilder()
					.withBlockSize(4096)
					.withIncludesTags(true)
					.withCompression(compressAlgo)
					.build();
			KVComparator comparator = new KVComparator();
			HFile.Writer writer = new HFileWriterV3(conf, new CacheConfig(conf),
					fs, hfilePath, null, comparator, context);
			for (int i = 100000; i < 600000; i++)
			{
				KeyValue keyValue = new KeyValue(Bytes.toBytes("rowkey_" + i),
						Bytes.toBytes("f1"), Bytes.toBytes("val"),
						Bytes.toBytes(RandomStringUtils.random(36)));
				writer.append(keyValue);
			}
			writer.close();
			LoadIncrementalHFiles load = new LoadIncrementalHFiles(conf);
			long l1 = System.currentTimeMillis();
			HTable table = (HTable) HBasePoolUtils.getConn().getTable(TableName.valueOf(tname));
			hfilePath = new Path(hbaseConfig.getProperty("path"));
			load.doBulkLoad(hfilePath, table);
			long l2 = System.currentTimeMillis();
			System.out.println("-----------------导入成功-----耗时:{" + (l2 - l1)+ "}---------------------");
		} 
		catch (Exception e)
		{
			e.printStackTrace();
		}
	}

	private static void initData() {
		for (int i = 0; i < totalRow; i++) {
			Put put = new Put(Bytes.toBytes("row_" + i));
			put.addColumn(Bytes.toBytes("f1"), Bytes.toBytes("val"), Bytes.toBytes(RandomStringUtils.random(50)));
			rows.add(put);
		}
	}

	private static Connection getConnection() throws Exception {
		Configuration conf = HBaseConfiguration.create();
		conf.set("hbase.zookeeper.quorum", "ip");
		conf.set("hbase.zookeeper.property.clientPort", "port");
		return ConnectionFactory.createConnection(conf);
	}
}

  • 2
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值