1. mvn 依赖
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>hbaseTest</groupId>
<artifactId>hbasetest</artifactId>
<version>1.0-SNAPSHOT</version>
<dependencies>
<!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-core -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-core</artifactId>
<version>2.6.0-mr1-cdh5.12.0</version>
<type>pom</type>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.hbase/hbase -->
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase</artifactId>
<version>2.0.0-alpha-1</version>
<type>pom</type>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.hbase/hbase-client -->
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-client</artifactId>
<version>2.0.0-alpha-1</version>
</dependency>
<dependency>
<groupId>jdk.tools</groupId>
<artifactId>jdk.tools</artifactId>
<version>1.8</version>
<scope>system</scope>
<systemPath>${JAVA_HOME}/lib/tools.jar</systemPath>
</dependency>
</dependencies>
<repositories>
<repository>
<id>hadoop-cdh</id>
<name>cloudera</name>
<url>https://repository.cloudera.com/content/repositories/releases/</url>
<releases>
<enabled>true</enabled>
</releases>
<snapshots>
<enabled>false</enabled>
</snapshots>
</repository>
</repositories>
</project>
2. java 代码
package com.clean.hbase;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import java.io.IOException;
public class hbaseTest {
// public static Configuration configuration = HBaseConfiguration.create();
static {
String TABLE_NAME = "clean";
String CF_DEFAULT = "cf";
byte[] QUALIFIER = "col1".getBytes();
byte[] ROWKEY = "rowkey1".getBytes();
Configuration config = HBaseConfiguration.create();
//config.set("hbase.zookeeper.quorum", "10.1.12.107");
config.set("hbase.zookeeper.quorum", "192.168.199.128");
// config.set("hbase.zookeeper.property.clientPort", "2181");
try {
Connection connection = ConnectionFactory.createConnection(config);
//ConnectionFactory.createConnection(config);
HTableDescriptor tableDescriptor = new HTableDescriptor(TableName.valueOf(TABLE_NAME));
tableDescriptor.addFamily(new HColumnDescriptor(CF_DEFAULT));
Admin admin = connection.getAdmin();
admin.createTable(tableDescriptor);
Table table = connection.getTable(TableName.valueOf(TABLE_NAME));
try {
Put put = new Put(ROWKEY);
put.addColumn(CF_DEFAULT.getBytes(), QUALIFIER, "this is value".getBytes());
table.put(put);
Get get = new Get(ROWKEY);
Result r = table.get(get);
byte[] b = r.getValue(CF_DEFAULT.getBytes(), QUALIFIER); // returns current version of value
System.out.println(new String(b));
} finally {
if (table != null)
table.close();
}
connection.close();
} catch (IOException e) {
e.printStackTrace();
}
}
public static void main(String[] args) {
}
}
3. 遇到的问题
a. log4j:WARN No appenders could be found for logger (org.apache.hadoop.security.Groups).
将 log4j.properties 文件放入到 src/main/resources 下, 文件内容
log4j.rootLogger=DEBUG, stdout
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=%d %p [%c] - %m%n
log4j.appender.logfile=org.apache.log4j.FileAppender
log4j.appender.logfile.File=target/spring.log
log4j.appender.logfile.layout=org.apache.log4j.PatternLayout
log4j.appender.logfile.layout.ConversionPattern=%d %p [%c] - %m%n
b. 在使用java 连接 hbase 之前, 一定要执行命令 start-hbase.sh 否则就会报 connection refused ....
c. 运行程序过程中 unknow host
在window 下 C:\Windows\System32\drivers\etc\hosts文件中加入 ip 虚拟机名称 如:
192.168.199.128 server.clean.com
d. 2017-08-21 15:03:46,696 WARN [org.apache.zookeeper.ClientCnxn] - Session 0x0 for server null, unexpected error, closing socket connection and attempting reconnect
java.net.ConnectException: Connection timed out: no further information
关闭linux 防火墙
1) 临时生效,重启后复原
开启: service iptables start
关闭: service iptables stop
e. linux 下hosts 配置
vim /etc/hosts
192.168.199.128 server.clean.com localhost
ip linux主机名 localhost
f. Hbase:This server is in the failed servers list: server.clean.com/192.168.199.128:38812
在 hbase 安装目录下 conf/
vim regionservers
localhost
192.168.199.128
加入虚拟机机ip
g. 如果还不行, 禁用 linux 下的 Ipv6
http://blog.csdn.net/suplxj/article/details/7773423
h. http://你的虚拟机的ip:60010/master-status 查看hbase 服务器状态, 404
修改 hbase 安装目录 conf/ hbase-site.xml 文件内容
<configuration>
<property>
<name>hbase.rootdir</name>
<value>file:///home/hadoop/java/hbase-2.0.0-alpha-1/tmp</value>
</property>
<property>
<name>hbase.master.info.port</name>
<value>60010</value>
</property>
</configuration>
需要手动添加端口