1、创建Maven项目并添加依赖
要使用 Java API 操作 HBase,需要引入 hbase-client。这里选取的 HBase Client 的版本为 1.2.0。
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-client</artifactId>
<version>1.2.0</version>
</dependency>
2、使用Java API开发代码
可先将连接方式写入静态方法,方便调用。
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import java.io.IOException;
public class HBaseConfs {
private HBaseConfs(){}
private static Configuration getConf(){
Configuration conf= HBaseConfiguration.create();
conf.addResource(new Path("/opt/hbase/conf/hbase-site.xml"));
conf.addResource(new Path("/opt/hadoop/etc/hadoop/core-site.xml"));
return conf;
}
public static Connection getConn(){
Connection conn=null;
try {
conn= ConnectionFactory.createConnection(getConf());
} catch (IOException e) {
e.printStackTrace();
}
return conn;
}
public static Admin getAdmin(){
Admin admin=null;
try {
admin=getConn().getAdmin();
} catch (IOException e) {
e.printStackTrace();
}
return admin;
}
}
2.1创建表
import cn.kgc.kb09.test.util.HBaseConfs;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import java.io.IOException;
/**
* @Author
* @Date 2020/9/25
* @Description
*/
public class CreateTable {
public static void main(String[] args) {
/*
Configuration conf= HBaseConfiguration.create();
conf.addResource(new Path("/opt/hbase/conf/hbase-site.xml"));
conf.addResource(new Path("/opt/hadoop/etc/hadoop/core-site.xml"));
Connection conn= ConnectionFactory.createConnection(conf);
Admin admin=conn.getAdmin();
HTableDescriptor htd=new HTableDescriptor(TableName.valueOf(args[0]));
HColumnDescriptor add=new HColumnDescriptor(args[1]);
HColumnDescriptor time=new HColumnDescriptor(args[2]);
htd.addFamily(add);
htd.addFamily(time);
admin.createTable(htd);
*/
Admin admin= HBaseConfs.getAdmin();
HTableDescriptor htd=new HTableDescriptor(TableName.valueOf(args[0]));
for (int i = 1; i < args.length; i++) {
HColumnDescriptor family=new HColumnDescriptor(args[i]);
htd.addFamily(family);
}
try {
admin.createTable(htd);
} catch (IOException e) {
e.printStackTrace();
}finally {
try {
admin.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
2.2插入数据
import cn.kgc.kb09.test.util.HBaseConfs;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
import java.io.IOException;
public class InsertTable {
public static void main(String[] args) throws Exception {
Connection conn= HBaseConfs.getConn();
Admin admin=HBaseConfs.getAdmin();
TableName[] tableNames=admin.listTableNames();
for (TableName tableName : tableNames) {
System.out.println(tableName.getNameAsString());
}
Table table=conn.getTable(TableName.valueOf("abc"));
String[][] values={
{"1","娜娜","李","1st Red House","WDC"}
,{"2","明明","王","10th 唐宁街","London"}
,{"3","多多","黄","111th 东关街","Yangzhou"}
,{"4","紫紫","杨","12th xiaohang","Nanjing"}
};
for (int i = 0; i < values.length; i++) {
Put put=new Put(values[i][0].getBytes());
put.addColumn("name".getBytes(),"fname".getBytes(),values[i][1].getBytes());
put.addColumn("name".getBytes(),"lname".getBytes(),values[i][2].getBytes());
put.addColumn("time".getBytes(),"address".getBytes(),values[i][3].getBytes());
put.addColumn("time".getBytes(),"city".getBytes(),values[i][4].getBytes());
table.put(put);
}
admin.close();
conn.close();
}
}
3、打包、上传Jar包并测试
- 打包并上传jar包
- 测试建表
hadoop jar testhbase.jar 'cn.kgc.kb09.test.CreateTable' 'abc' 'name' 'time'
- 测试插入数据
hadoop jar testhbase.jar 'cn.kgc.kb09.test.InsertTable'