java 访问 hbase_Java访问Hbase

1.kerberos验证

a.下载对应版本JCE(Java Cryptography Extension),解压拷贝local_policy.jar/US_export_policy.jar到$JAVA_HOME//jre/lib/security

b.在resource即classes路径下添加hbase-site.xml

hbase.cluster.name

${hbase.cluster.name}

hbase.rpc.timeout

200

hbase.client.operation.timeout

200

in ms

hbase.client.retries.number

2

hbase.client.tablepool.maxsize

30

c.加JVM启动参数设置验证参数

-Dhadoop.property.hadoop.security.authentication=kerberos

-Djava.security.krb5.conf=${conf_path}/krb5-hadoop.conf

-Dhadoop.property.hadoop.client.keytab.file=${conf_path}/${kerberos_principal}.keytab

-Dhadoop.property.hadoop.client.kerberos.principal=${kerberos_principal}@XIAOMI.HADOOP

具体的读写代码就不列了,网上例子比较多。

2.MapReduce批量写数据到Hbase

import org.apache.commons.lang.StringUtils;

import org.apache.hadoop.conf.Configuration;

import org.apache.hadoop.fs.Path;

import org.apache.hadoop.hbase.HBaseConfiguration;

import org.apache.hadoop.hbase.client.Put;

import org.apache.hadoop.hbase.io.ImmutableBytesWritable;

import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;

import org.apache.hadoop.hbase.mapreduce.TableReducer;

import org.apache.hadoop.hbase.util.Bytes;

import org.apache.hadoop.io.IntWritable;

import org.apache.hadoop.io.LongWritable;

import org.apache.hadoop.io.Text;

import org.apache.hadoop.mapreduce.Job;

import org.apache.hadoop.mapreduce.Mapper;

import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;

import java.io.IOException;

public class WriteToHbase {

private static final String INPUT = "";

//指定hbase集群地址及表名

private static final String TABLE = "hbase://hytst-staging/namespace:tablename";

//读源文件

public static class SourceMapper

extends Mapper{

//something

}

//写入hbase

public static class WriteReducer

extends TableReducer {

private byte[] family = "W".getBytes();//列簇

private byte[] qualifier = "i".getBytes();//子列

private int rowDone;

private long startTime;

@Override

protected void setup(Context context) throws IOException, InterruptedException {

rowDone = 0;

startTime = System.currentTimeMillis();

super.setup(context);

}

public void reduce(Text key, Iterable values, Context context) {

byte[] rowkey = key.getBytes();

Put put = new Put(rowkey);

put.add(family, qualifier, Bytes.toBytes(StringUtils.join(values.iterator(), ",")));

context.write(new ImmutableBytesWritable(rowkey), put);

//或者如下

/*HTable table = new HTable(context.getConfiguration(), TABLE);

table.put(put);

table.close();*/

++rowDone;

//限制写QPS, 800/s

TableMapReduceUtil.limitScanRate(800, rowDone, System.currentTimeMillis() - startTime);

}

}

public static void main(String[] args) throws Exception {

Configuration conf = HBaseConfiguration.create();

Job job = Job.getInstance(conf, "HdfsToHbase");

job.setJarByClass(WriteToHbase.class);

// Turn off speculative to avoid write to hbase more than once

job.setSpeculativeExecution(false);

job.setMapperClass(SourceMapper.class);

job.setMapOutputKeyClass(Text.class);

job.setMapOutputValueClass(IntWritable.class);

FileInputFormat.addInputPath(job, new Path(INPUT));

//初始化TableReduceJob

TableMapReduceUtil.initTableReducerJob(TABLE, WriteReducer.class, job);

job.setNumReduceTasks(2);

System.exit(job.waitForCompletion(true) ? 0 : 1);

}

}

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值