当前工作中遇到一个场景,我们提供接口服务被其它系统调用.调用比较频繁,需要将调用数据库保存起来,以备以后数据分析使用. 中间碰到了很多坑,最终实现了通过java来实现操作HBASE数据库.直接贴代码吧.
一.代码
先列出最终代码吧.然后在记录下填坑的过程(开发工程为gradle,为通用性转成maven本代码为精简后的代码,实际中比这复杂,所以其中部分问题是实际工程中碰到的).
1.1 pom.xml中的依赖
<dependencies> <dependency> <groupId>junit</groupId> <artifactId>junit</artifactId> <version>3.8.1</version> <scope>test</scope> </dependency>
<dependency> <groupId>org.apache.hbase</groupId> <artifactId>hbase-client</artifactId> <version>1.1.3</version> <exclusions> <exclusion> <artifactId>guava</artifactId> <groupId>com.google.guava</groupId> </exclusion> </exclusions> </dependency> <dependency> <groupId>com.google.guava</groupId> <artifactId>guava</artifactId> <version>15.0</version> </dependency>
<dependency> <groupId>jdk.tools</groupId> <artifactId>jdk.tools</artifactId> <version>1.7</version> <scope>system</scope> <systemPath>${JAVA_HOME}/lib/tools.jar</systemPath> </dependency> </dependencies> |
1.2 java代码
package cn.bob.hbase;
import java.io.IOException; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Map.Entry;
import org.apache.commons.io.IOUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.util.Bytes;
import cn.htmlv5.bob.hbase.ObjectAndByte;
public class HbaseUtil { private static String SERIES = "s"; private static String TABLENAME = "AF_TABLE"; private static Connection conn; private static String hbaseIp = "192.168.0.1,192.168.0.2,192.168.0.3,192.168.0.4,192.168.0.5";
public static void init() { Configuration config = HBaseConfiguration.create(); config.set("hbase.zookeeper.quorum", hbaseIp); try { conn = ConnectionFactory.createConnection(config); createTable(TABLENAME, SERIES); } catch (IOException e) { e.printStackTrace(); } }
//创建表 public static void createTable(String tableName, String seriesStr) throws IllegalArgumentException, IOException { Admin admin = null; TableName table = TableName.valueOf(tableName); try { admin = conn.getAdmin(); if (!admin.tableExists(table)) { System.out.println(tableName + " table not Exists"); HTableDescriptor descriptor = new HTableDescriptor(table); String[] series = seriesStr.split(","); for (String s : series) { descriptor.addFamily(new HColumnDescriptor(s.getBytes())); } admin.createTable(descriptor); } } finally { IOUtils.closeQuietly(admin); } }
//添加数据 public static void add(String rowKey, Map<String, Object> columns) throws IOException { Table table = null; try { table = conn.getTable(TableName.valueOf(TABLENAME)); Put put = new Put(Bytes.toBytes(rowKey)); for (Map.Entry<String, Object> entry : columns.entrySet()) { put.addColumn(SERIES.getBytes(), Bytes.toBytes(entry.getKey()), new ObjectAndByte().toByteArray(entry.getValue())); } table.put(put); } finally { IOUtils.closeQuietly(table); } }
//根据rowkey获取数据 public static Map<String, String> getAllValue(String rowKey) throws IllegalArgumentException, IOException { Table table = null; Map<String, String> resultMap = null; try { table = conn.getTable(TableName.valueOf(TABLENAME)); Get get = new Get(Bytes.toBytes(rowKey)); get.addFamily(SERIES.getBytes()); Result res = table.get(get); Map<byte[], byte[]> result = res.getFamilyMap(SERIES.getBytes()); Iterator<Entry<byte[], byte[]>> it = result.entrySet().iterator(); resultMap = new HashMap<String, String>(); while (it.hasNext()) { Entry<byte[], byte[]> entry = it.next(); resultMap.put(Bytes.toString(entry.getKey()), Bytes.toString(entry.getValue())); } } finally { IOUtils.closeQuietly(table); } return resultMap; }
//根据rowkey和column获取数据 public static String getValueBySeries(String rowKey, String column) throws IllegalArgumentException, IOException { Table table = null; String resultStr = null; try { table = conn.getTable(TableName.valueOf(TABLENAME)); Get get = new Get(Bytes.toBytes(rowKey)); get.addColumn(Bytes.toBytes(SERIES), Bytes.toBytes(column)); Result res = table.get(get); byte[] result = res.getValue(Bytes.toBytes(SERIES), Bytes.toBytes(column)); resultStr = Bytes.toString(result); } finally { IOUtils.closeQuietly(table); } return resultStr; }
//根据table查询所有数据 public static void getValueByTable() throws Exception { Map<String, String> resultMap = null; Table table = null; try { table = conn.getTable(TableName.valueOf(TABLENAME)); ResultScanner rs = table.getScanner(new Scan()); for (Result r : rs) { System.out.println("获得到rowkey:" + new String(r.getRow())); for (KeyValue keyValue : r.raw()) { System.out.println( "列:" + new String(keyValue.getFamily()) + "====值:" + new String(keyValue.getValue())); } } } finally { IOUtils.closeQuietly(table); } } //删除表 public static void dropTable(String tableName) throws IOException { Admin admin = null; TableName table = TableName.valueOf(tableName); try { admin = conn.getAdmin(); if (admin.tableExists(table)) { admin.disableTable(table); admin.deleteTable(table); } } finally { IOUtils.closeQuietly(admin); } }
public static void main(String[] args) throws Exception { init();
//创建表 createTable(TABLENAME,"");
//添加数据1 String rowKey1 = "u1001c1001"; Map<String, Object> columns = new HashMap<String, Object>(); columns.put("original_data", "original_data_u1001c1001_1"); columns.put("original_data", "original_data_u1001c1001_2"); add(rowKey1,columns); //添加数据2 String rowKey2 = "u1001c1001"; Map<String, Object> columns2 = new HashMap<String, Object>(); columns2.put("original_data", "original_data_u1001c1002_1"); columns2.put("original_data", "original_data_u1001c1002_2"); add(rowKey2,columns2);
//查询数据1-1 Map<String, String> map1= getAllValue(rowKey1); for (Map.Entry<String, String> entry : map1.entrySet()) { System.out.println("map1-"+entry.getKey()+":"+entry.getValue()); } //查询数据1-2 Map<String, String> map2= getAllValue(rowKey2); for (Map.Entry<String, String> entry : map2.entrySet()) { System.out.println("map2-"+entry.getKey()+":"+entry.getValue()); }
//查询数据2 String original_data_value = getValueBySeries(rowKey1,"original_data"); System.out.println("original_data_value->"+original_data_value);
//查看表中所有数据 getValueByTable(); } } |
二. 中间碰到的坑:
2.1 Maven错误信息Missing artifactjdk.tools:jdk.tools:jar:1.6
2.1.1 问题原因
读取不到tools.jar
2.1.2 解决方法
在pom.xml中增加如下依赖:
<dependency> <groupId>jdk.tools</groupId> <artifactId>jdk.tools</artifactId> <version>1.7</version> <scope>system</scope> <systemPath>${JAVA_HOME}/lib/tools.jar</systemPath> </dependency> |
参考文章:
http://blog.csdn.net/huoyunshen88/article/details/40657895
2.2 启动报错 NoSuchMethodError
Caused by: java.lang.NoSuchMethodError:com.google.common.collect.MapMaker.expireAfterWrite(JLjava/util/concurrent/TimeUnit;)Lcom/google/common/collect/MapMaker;
2.2.1 问题原因
包冲突. ctrl+shift+T 查找com.google.common.collect.MapMaker会发现有2个包有该类. 一个在google-collections-1.0.jar中,一个在guava18.jar中.都是google的jar包,但是包名类名是一样的,需要删掉google-collections-1.0.jar,但是经查看gradle中没有用到google-collections-1.0.jar,是其它系统依赖的.
2.2.2 解决方法
2.2.2.1 gradle
查看gradle的包依赖
$ gradle project; :projects
------------------------------------------------------------ Root project ------------------------------------------------------------
Root project 'projecttest' +--- Project ':projecttest-api' +--- Project ':projecttest-boss' +--- Project ':projecttest-common' +--- Project ':projecttest-consumer' +--- Project ':projecttest-domain' \--- Project ':projecttest-service'
To see a list of the tasks of a project, run gradle <project-path>:tasks For example, try running gradle :projecttest-api:tasks
BUILD SUCCESSFUL
$ gradle :projecttest-consumer:dependencies gradle :projecttest-consumer:dependencies |grep 'google'
$ gradle :projecttest-consumer:dependencies |grep 'google' +--- com.google.guava:guava:18.0 | +--- com.google.guava:guava:18.0 | | +--- com.google.guava:guava:18.0 | +--- com.google.guava:guava:18.0 | +--- com.google.guava:guava:18.0 | +--- com.google.guava:guava:18.0 | +--- com.google.guava:guava:18.0 |
在build.gradle中增加:
|
参考文章: http://www.tuicool.com/articles/fQjimm3
2.2.2.2 maven
举例写法: <dependency> <groupId>org.apache.hbase</groupId> <artifactId>hbase-client</artifactId> <version>1.1.3</version> <exclusions> <exclusion> <artifactId>guava</artifactId> <groupId>com.google.guava</groupId> </exclusion> </exclusions> </dependency>
|
参考文章: http://fufeng.iteye.com/blog/1755167
2.2.2.3 maven转gradle
先保证本机安装了gradle 2.0以上的版本 然后在maven根目录下运行 gradle init --type pom 安装gradle之后配置gradle环境变量
变量名: GRADLE_HOME 变量值: D:\Program Files2\gradle-2.13
|
2.3.启动报错java.io.IOException
java.io.IOException: Could not locate executablenull\bin\winutils.exe in the Hadoop binaries.
2.3.1 问题原因
缺少winutils.exe
2.3.2 解决方法
2.3.2.1 配置HADOOP_HOME
变量名: HADOOP_HOME
变量值: D:\Program Files2\hadoop-2.7.2
2.3.2.2 下载并安装inutils.exe
下载地址如下: https://github.com/srccodes/hadoop-common-2.2.0-bin
将解压后的文件放置到前面配置的HADOOP_HOME所在的bin目录下.
参考文章如下: http://www.cnblogs.com/hyl8218/p/5492450.html
2.4.启动报错org.apache.zookeeper.KeeperException
2.4.1 问题原因
错详细内容如下:
2016-12-22 11:02:07.155 WARN [main] [RecoverableZooKeeper.java:275] - Possibly transient ZooKeeper, quorum=192.168.0.1:2181,1962.168.0.2:2181,192.168.0.3:2181,192.168.0.4:2181, exception=org.apache.zookeeper.KeeperException$ConnectionLossException: KeeperErrorCode = ConnectionLoss for /hbase/hbaseid 2016-12-22 11:02:07.202 INFO [main-SendThread(192.168.0.1:2181)] [ClientCnxn.java:852] - Socket connection established to 192.168.0.1/192.168.0.2:2181, initiating session 2016-12-22 11:02:07.280 INFO [main-SendThread(192.168.0.2:2181)] [ClientCnxn.java:1235] - Session establishment complete on server 192.168.0.1/192.168.0.2:2181, sessionid = 0x158d28b2bce77fe, negotiated timeout = 60000 org.apache.hadoop.hbase.DoNotRetryIOException: java.lang.IllegalAccessError: tried to access method com.google.common.base.Stopwatch.<init>()V from class org.apache.hadoop.hbase.zookeeper.MetaTableLocator at org.apache.hadoop.hbase.client.RpcRetryingCaller.translateException(RpcRetryingCaller.java:229) at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:140) at org.apache.hadoop.hbase.client.HTable.get(HTable.java:889) at org.apache.hadoop.hbase.client.HTable.get(HTable.java:855) at com.service.util.hbase.HbaseUtil.getAllValue(HbaseUtil.java:125) at com.service.util.hbase.HbaseUtil.main(HbaseUtil.java:191) Caused by: java.lang.IllegalAccessError: tried to access method |
具体原因: guava 15,不能用18
2.4.2 解决方法
2.4.2.1 maven
<dependency> <groupId>org.apache.hbase</groupId> <artifactId>hbase-client</artifactId> <version>1.1.3</version> <exclusions> <exclusion> <artifactId>guava</artifactId> <groupId>com.google.guava</groupId> </exclusion> </exclusions> </dependency> <dependency> <groupId>com.google.guava</groupId> <artifactId>guava</artifactId> <version>15.0</version> </dependency> |
2.4.2.2 gradle
compile(group: 'org.apache.hbase', name: 'hbase-client', version:'1.1.3') { exclude(module: 'guava') } compile "com.google.guava:guava:15.0" |
参考文章: http://blog.csdn.net/ludonqin/article/details/52387769
2.5.启动报错 etriesExhaustedException
报错内容:
2016-12-22 19:42:30.836 INFO [main-SendThread(192.168.0.1:2181)] [ClientCnxn.java:1235] - Session establishment complete on server 192.168.0.1/192.168.0.2:2181, sessionid = 0x158d28b2bce7bf0, negotiated timeout = 60000 org.apache.hadoop.hbase.client.RetriesExhaustedException: Failed after attempts=36, exceptions: Thu Dec 22 19:43:40 CST 2016, null, java.net.SocketTimeoutException: callTimeout=60000, callDuration=80695: row 'stdproc_approval_info,,' on table 'hbase:meta' at region=hbase:meta,,1.1588230740, hostname=slave3,16020,1480997709183, seqNum=0
at org.apache.hadoop.hbase.client.RpcRetryingCallerWithReadReplicas.throwEnrichedException(RpcRetryingCallerWithReadReplicas.java:271) at org.apache.hadoop.hbase.client.ScannerCallableWithReplicas.call(ScannerCallableWithReplicas.java:199) |
2.5.1 问题原因
没有配置host映射.
2.5.2 解决方法
文件路径: C:\Windows\System32\drivers\etc\hosts
在配置中增加hbase服务映射(如果因为权限不能直接修改,则复制到其它位置然后修改后替换该位置文件),添加的配置信息如下:
192.168.0.1 master
192.168.0.2 slave1
192.168.0.3 slave2
192.168.0.4 slave3
192.168.0.5 slave4
2.6 启动报错NoClassDefFoundError
org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'freeMarkerConfigurer' defined in class path resource [org/springframework/boot/autoconfigure/freemarker/FreeMarkerAutoConfiguration$FreeMarkerWebConfiguration.class]: Initialization of bean failed; nested exception is java.lang.NoClassDefFoundError: Could not initialize class |
2.6.1 问题原因
log4j-over-slf4j.jar 和slf4j-log4j12.jar冲突
2.6.2 解决方法
去掉slf4j-log4j12
configurations { all*.exclude group: 'org.slf4j', module: 'slf4j-log4j12' } |
参考文章: http://blog.csdn.net/ouyang111222/article/details/49700733
三其它:
3.1 Hbase增删改查例子
http://javacrazyer.iteye.com/blog/1186881
3.2 Table设计
http://blog.csdn.net/yfkiss/article/details/26380467
3.3 rowkey设计
http://www.open-open.com/lib/view/open1417612091323.html
难点是环境的搭建呀!