Java连接Hbase
- 版本:jdk 1.8 、Hbase 1.4.0 、 Maven3.5.2
- 此实例没采用 Kerberos 认证等,只做基础连接,写入测试
1. 配置Maven依赖
- Maven依赖 — pom.xml
<dependencies>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>3.8.1</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>2.6.0</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-auth</artifactId>
<version>2.6.0</version>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-client</artifactId>
<version>1.3.1</version>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-common</artifactId>
<version>1.3.1</version>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-protocol</artifactId>
<version>1.3.1</version>
</dependency>
</dependencies>
2. 连接hbase
连接hbase代码 ---- ConnectHbase.java
package example.Hbase.demo;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import java.io.IOException;
public class ConnectHbase {
private static Connection connection; //连接对象
private static String host; // ip地址
private static String port = "2181"; // 端口
public static void setHost(String host) {
ConnectHbase.host = host;
}
public static Connection getConnect() {
try {
Configuration config = HBaseConfiguration.create();
config.set("hbase.zookeeper.quorum", host); //
config.set("hbase.zookeeper.property.clientPort", port);
// 取得一个数据库连接对象
connection = ConnectionFactory.createConnection(config);
return connection;
} catch (IOException ex) {
ex.printStackTrace();
System.exit(1);
}
return connection;
}
}
3. 写入Hbase表数据
- 写入数据到hbase表 ---- TestHbase.java
- 示例代码:
- Hbase —> DEMO空间 ----> MY_TABLE1表 -----> f1 列簇
package example.Hbase.demo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test;
import java.io.IOException;
public class TestHbase {
/**
* list ---- 列出Hbase所有表
*/
@Test
public void getAllTable() throws IOException {
ConnectHbase.setHost("localhost"); //改为要连接的Hbase地址
Connection connection = ConnectHbase.getConnect();
Admin admin = connection.getAdmin();
for (HTableDescriptor table : admin.listTables()) {
System.out.println(table.getNameAsString());
}
admin.close();
connection.close();
}
/**
* put ---- 表里写入数据
*/
@Test
public void WriteToHbase() throws IOException {
ConnectHbase.setHost("localhost"); //改为要连接的Hbase地址
Connection connection = ConnectHbase.getConnect();
TableName tn = TableName.valueOf("DEMO:MY_TABLE1"); // 命名空间:表名
HTable hTable = (HTable) connection.getTable(tn);
Put p = new Put(Bytes.toBytes("nick")); // 对应Hbase表中的 Rowkey
p.addColumn(Bytes.toBytes("f1"), Bytes.toBytes("ff1"), Bytes.toBytes("test1")); // addColumn(列簇,列,值)
p.addColumn(Bytes.toBytes("f1"), Bytes.toBytes("ff2"), Bytes.toBytes("test2"));
p.addColumn(Bytes.toBytes("f1"), Bytes.toBytes("ff3"), Bytes.toBytes("test3"));
p.addColumn(Bytes.toBytes("f1"), Bytes.toBytes("ff4"), Bytes.toBytes("test4"));
hTable.put(p);
connection.close();
}
}
查看写入结果