1. 准备条件:
# linux centos7(已安装JDK8,且JAVA_HOME已配置),ip为172.20.10.9
# Win10, eclipse(已装maven插件)ip为172.20.10.10
2. 配置IP映射:
# Win10下进入C:\Windows\System32\drivers\etc,修改hosts文件,在文件后追加,如下:
172.20.10.9 hbase-master
#linux机器root用户:
vi /etc/hosts
追加:
本机ip 本机hostname (注:如果不配置此项,启动hbase shell 时会报zookeeper异常)
3. 安装Hbase:
3.2 将hbase-1.2.5-bin.tar.gz拷贝至 /hbase/soft (注:/hbase目录是hbase用户根目录)
3.3 cd /hbase/soft
tar -xzvf hbase-1.2.5-bin.tar.gz
mkdir /hbase/soft/hbase-1.2.5/data (这个目录是配置时用到)
3.4 配置hbase:
vi /hbase/soft/hbase-1.2.5/conf/hbase-site.xml
配置内容为:
<configuration>
<property>
<name>hbase.rootdir</name>
<value>file:///hbase/soft/hbase-1.2.5/data</value>
</property>
</configuration>
3.5 切换到root用户,关闭防火墙 systemctl stop firewalld.service(如果后续测试时还说无法连接则:iptables -F)
3.6 切换回hbase用户,启动hbase:
sh /hbase/soft/hbase-1.2.5/bin/start-hbase.sh
进入hbase的shell模式:sh /hbase/soft/hbase-1.2.5/bin/hbase shell ,在该模式下输入命令:list ,输出一下内容则表示可用:
[hbase@hbase-master bin]$ ./hbase shell SLF4J: Class path contains multiple SLF4J bindings. SLF4J: Found binding in [jar:file:/hbase/soft/hbase-1.2.5/lib/slf4j-log4j12-1.7.5.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: Found binding in [jar:file:/hbase/soft/hadoop-2.6.5/share/hadoop/common/lib/slf4j-log4j12-1.7.5.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation. SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory] HBase Shell; enter 'help<RETURN>' for list of supported commands. Type "exit<RETURN>" to leave the HBase Shell Version 1.2.5, rd7b05f79dee10e0ada614765bb354b93d615a157, Wed Mar 1 00:34:48 CST 2017 hbase(main):001:0> list TABLE 0 row(s) in 0.8460 seconds => [] hbase(main):002:0> |
4.测试
4.1 创建maven项目,项目pom.xml依赖
<dependencies> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-hdfs</artifactId> <version>2.7.3</version> </dependency> <dependency> <groupId>org.apache.hbase</groupId> <artifactId>hbase-client</artifactId> <version>1.2.4</version> </dependency> <dependency> <groupId>jdk.tools</groupId> <artifactId>jdk.tools</artifactId> <version>1.8</version> <scope>system</scope> <systemPath>${JAVA_HOME}/lib/tools.jar</systemPath> </dependency> </dependencies> |
4.2 示例代码
package com.wyiwei.cf_hbase; import java.io.IOException; import java.util.HashMap; import java.util.Map; import java.util.ArrayList; import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.MasterNotRunningException; import org.apache.hadoop.hbase.ZooKeeperConnectionException; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.util.Bytes; public class Test { static final String rowKey = "row1"; static HBaseAdmin hBaseAdmin; static Configuration conf; static { conf = HBaseConfiguration.create(); conf.set("hbase.zookeeper.quorum", "hbase-master"); try { hBaseAdmin = new HBaseAdmin(conf); } catch (IOException e) { e.printStackTrace(); } } public static void createTable(String tableName, String[] columns) throws Exception { dropTable(tableName); HTableDescriptor hTableDescriptor = new HTableDescriptor(tableName); for (String columnName : columns) { HColumnDescriptor column = new HColumnDescriptor(columnName); hTableDescriptor.addFamily(column); } hBaseAdmin.createTable(hTableDescriptor); System.out.println("create table successed"); } public static void dropTable(String tableName) throws Exception { if (hBaseAdmin.tableExists(tableName)) { hBaseAdmin.disableTable(tableName); hBaseAdmin.deleteTable(tableName); } System.out.println("drop table successed"); } public static HTable getHTable(String tableName) throws Exception { return new HTable(conf, tableName); } public static void insert(String tableName, Map<String, String> map) throws Exception { HTable hTable = getHTable(tableName); byte[] row1 = Bytes.toBytes(rowKey); Put p1 = new Put(row1); for (String columnName : map.keySet()) { byte[] value = Bytes.toBytes(map.get(columnName)); String[] str = columnName.split(":"); byte[] family = Bytes.toBytes(str[0]); byte[] qualifier = null; if (str.length > 1) { qualifier = Bytes.toBytes(str[1]); } p1.add(family, qualifier, value); } hTable.put(p1); Get g1 = new Get(row1); Result result = hTable.get(g1); System.out.println("Get: " + result); System.out.println("insert successed"); } public static void delete(String tableName, String rowKey) throws Exception { HTable hTable = getHTable(tableName); List<Delete> list = new ArrayList<Delete>(); Delete d1 = new Delete(Bytes.toBytes(rowKey)); list.add(d1); hTable.delete(list); Get g1 = new Get(Bytes.toBytes(rowKey)); Result result = hTable.get(g1); System.out.println("Get: " + result); System.out.println("delete successed"); } public static void selectOne(String tableName, String rowKey) throws Exception { HTable hTable = getHTable(tableName); Get g1 = new Get(Bytes.toBytes(rowKey)); Result result = hTable.get(g1); foreach(result); System.out.println("selectOne end"); } private static void foreach(Result result) throws Exception { for (KeyValue keyValue : result.raw()) { StringBuilder sb = new StringBuilder(); sb.append(Bytes.toString(keyValue.getRow())).append("\t"); sb.append(Bytes.toString(keyValue.getFamily())).append("\t"); sb.append(Bytes.toString(keyValue.getQualifier())).append("\t"); sb.append(keyValue.getTimestamp()).append("\t"); sb.append(Bytes.toString(keyValue.getValue())).append("\t"); System.out.println(sb.toString()); } } public static void selectAll(String tableName) throws Exception { HTable hTable = getHTable(tableName); Scan scan = new Scan(); ResultScanner resultScanner = null; try { resultScanner = hTable.getScanner(scan); for (Result result : resultScanner) { foreach(result); } } catch (Exception e) { e.printStackTrace(); } finally { if (resultScanner != null) { resultScanner.close(); } } System.out.println("selectAll end"); } public static void main(String[] args) throws Exception { String tableName = "tableTest"; String[] columns = new String[] { "column_A", "column_B" }; createTable(tableName, columns); Map<String, String> map = new HashMap<String, String>(); map.put("column_A", "AAA"); map.put("column_B:1", "b1"); map.put("column_B:2", "b2"); insert(tableName, map); selectOne(tableName, rowKey); selectAll(tableName); delete(tableName, rowKey); dropTable(tableName); } } |
4.3 运行输出:
log4j:WARN No appenders could be found for logger (org.apache.hadoop.security.Groups). log4j:WARN Please initialize the log4j system properly. log4j:WARN See http://logging.apache.org/log4j/1.2/faq.html#noconfig for more info. drop table successed create table successed Get: keyvalues={row1/column_A:/1523590852523/Put/vlen=3/seqid=0, row1/column_B:1/1523590852523/Put/vlen=2/seqid=0, row1/column_B:2/1523590852523/Put/vlen=2/seqid=0} insert successed row1 column_A 1523590852523 AAA row1 column_B 1 1523590852523 b1 row1 column_B 2 1523590852523 b2 selectOne end row1 column_A 1523590852523 AAA row1 column_B 1 1523590852523 b1 row1 column_B 2 1523590852523 b2 selectAll end Get: keyvalues=NONE delete successed |