前几天搞了flume+hbase+hadoop的环境,今天准备从hbase查询数据展现测试了,所以搞一搞win8的本地环境问题,在网上也看了不少大神的资料,都不完全靠谱,所以自己根据自己的配置搞一个,留档观察!
下面我们就来搞一搞,首先写下查询hbase的代码,但是发现很多api都过期了,所以找了下最新的api,搞了最新的,最后封装成POJO
package com.pactera.mq;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.util.Bytes;
public class NewApiHbase {
private static Configuration conf = null;
static {
System.setProperty("hadoop.home.dir", "E:\\hadoop-2.5.1");
Configuration HBASE_CONFIG = new Configuration();
// 与hbase/conf/hbase-site.xml中hbase.zookeeper.quorum配置的值相同
HBASE_CONFIG.set("hbase.zookeeper.quorum", "192.168.1.116");
// 与hbase/conf/hbase-site.xml中hbase.zookeeper.property.clientPort配置的值相同
HBASE_CONFIG.set("hbase.zookeeper.property.clientPort", "2281");
HBASE_CONFIG.set("hbase.master", "192.168.1.116:60010");
HBASE_CONFIG.set("hbase.rootdir", "hdfs://192.168.1.116:9000/hbase");
conf = HBaseConfiguration.create(HBASE_CONFIG);
}
// 创建数据库表
public static void createTable(String tableName, String[] columnFamilys)
throws Exception {
// 新建一个数据库管理员
HBaseAdmin hAdmin = new HBaseAdmin(conf);
if (hAdmin.tableExists(tableName)) {
System.out.println("表已经存在");
System.exit(0);
} else {
// 新建一个 scores 表的描述
HTableDescriptor tableDesc = new HTableDescriptor(
TableName.valueOf(tableName));
// 在描述里添加列族
for (String columnFamily : columnFamilys) {
tableDesc.addFamily(new HColumnDescriptor(columnFamily));
}
// 根据配置好的描述建表
hAdmin.createTable(tableDesc);
System.out.println("创建表成功");
}
hAdmin.close();
}
// 删除数据库表
public static void deleteTable(String tableName) throws Exception {
// 新建一个数据库管理员
HBaseAdmin hAdmin = new HBaseAdmin(conf);
if (hAdmin.tableExists(tableName)) {
// 关闭一个表
hAdmin.disableTable(tableName);
// 删除一个表
hAdmin.deleteTable(tableName);
System.out.println("删除表成功");
} else {
System.out.println("删除的表不存在");
System.exit(0);
}
hAdmin.close();
}
// 添加一条数据
public static void addRow(String tableName, String row,
String columnFamily, String column, String value) throws Exception {
HTable table = new HTable(conf, tableName);
Put put = new Put(Bytes.toBytes(row));
// 参数出分别:列族、列、值
put.add(Bytes.toBytes(columnFamily), Bytes.toBytes(column),
Bytes.toBytes(value));
table.put(put);
table.close();
}
// 删除一条数据
public static void delRow(String tableName, String row) throws Exception {
HTable table = new HTable(conf, tableName);
Delete del = new Delete(Bytes.toBytes(row));
table.delete(del);
table.close();
}
// 删除多条数据
public static void delMultiRows(String tableName, String[] rows)
throws Exception {
HTable table = new HTable(conf, tableName);
List<Delete> list = new ArrayList<Delete>();
for (String row : rows) {
Delete del = new Delete(Bytes.toBytes(row));
list.add(del);
}
table.delete(list);
table.close();
}
// get row
public static void getRow(String tableName, String row) throws Exception {
HTable table = new HTable(conf, tableName);
Get get = new Get(Bytes.toBytes(row));
Result result = table.get(get);
// 输出结果
for (Cell rowKV : result.rawCells()) {
System.out.print("Row Name: "
+ new String(CellUtil.cloneRow(rowKV)) + " ");
System.out.print("Timestamp: " + rowKV.getTimestamp() + " ");
System.out.print("column Family: "
+ new String(CellUtil.cloneFamily(rowKV)) + " ");
System.out.print("column Name: "
+ new String(CellUtil.cloneQualifier(rowKV)) + " ");
System.out.println("Value: "
+ new String(CellUtil.cloneValue(rowKV)) + " ");
}
table.close();
}
// get all records
public static void getAllRows(String tableName) throws Exception {
HTable table = new HTable(conf, tableName);
Scan scan = new Scan();
ResultScanner results = table.getScanner(scan);
// 输出结果
for (Result result : results) {
for (Cell rowKV : result.rawCells()) {
System.out.println("GOGOGO=========================");
System.out.println("Row Name: "
+ new String(CellUtil.cloneRow(rowKV)) + " ");
System.out.println("Timestamp: " + rowKV.getTimestamp() + " ");
System.out.println("column Family: "
+ new String(CellUtil.cloneFamily(rowKV)) + " ");
System.out.println("column Name: "
+ new String(CellUtil.cloneQualifier(rowKV)) + " ");
System.out.println("Value: "
+ new String(CellUtil.cloneValue(rowKV)) + " ");
}
}
table.close();
}
/**
* 获取所有数据的key
*
* @param tableName
* @throws Exception
*/
public static List<String> getAllRowsKey(String tableName) throws Exception {
HTable table = new HTable(conf, tableName);
Scan scan = new Scan();
ResultScanner results = table.getScanner(scan);
List<String> list = new ArrayList<String>();
// 输出结果
for (Result result : results) {
for (Cell rowKV : result.rawCells()) {
list.add(new String(CellUtil.cloneRow(rowKV)));
}
}
table.close();
return list;
}
/**
* 获取行数据并封装
*
* @param tableName
* @param row
* @throws Exception
*/
public static List<sysLogs> getRow2Model(String tableName, List<String> row)
throws Exception {
long a = System.currentTimeMillis();
HTable table = new HTable(conf, tableName);
List<sysLogs> list = new ArrayList<sysLogs>();
for (String r : row) {
Get get = new Get(Bytes.toBytes(r));
Result result = table.get(get);
sysLogs logs = new sysLogs();
// 输出结果
for (Cell rowKV : result.rawCells()) {
String name = new String(CellUtil.cloneQualifier(rowKV));
String value = new String(CellUtil.cloneValue(rowKV));
if ("machineName".equals(name))
logs.setMachineName(value);
if ("funcName".equals(name))
logs.setFuncName(value);
if ("serveName".equals(name))
logs.setServeName(value);
logs.setUuid(r);
logs.setTime(rowKV.getTimestamp());
list.add(logs);
}
}
table.close();
System.out.println("data is " + row.size());
System.out
.println("\r<br>执行耗时 : "
+ (System.currentTimeMillis() - a)
/ 1000f
+ " 秒 =========================================================");
return list;
}
// main
public static void main(String[] args) {
try {
String tableName = "logs";
long a = System.currentTimeMillis();
// // 第一步:创建数据库表:“users2”
// String[] columnFamilys = { "info", "course" };
// NewApiHbase.createTable(tableName, columnFamilys);
//
// // 第二步:向数据表的添加数据
// // 添加第一行数据
// NewApiHbase.addRow(tableName, "tht", "info", "age", "20");
// NewApiHbase.addRow(tableName, "tht", "info", "sex", "boy");
// NewApiHbase.addRow(tableName, "tht", "course", "china", "97");
// NewApiHbase.addRow(tableName, "tht", "course", "math", "128");
// NewApiHbase.addRow(tableName, "tht", "course", "english", "85");
// // 添加第二行数据
// NewApiHbase.addRow(tableName, "xiaoxue", "info", "age", "19");
// NewApiHbase.addRow(tableName, "xiaoxue", "info", "sex", "boy");
// NewApiHbase.addRow(tableName, "xiaoxue", "course", "china",
// "90");
// NewApiHbase.addRow(tableName, "xiaoxue", "course", "math",
// "120");
// NewApiHbase.addRow(tableName, "xiaoxue", "course", "english",
// "90");
// // 添加第三行数据
// NewApiHbase.addRow(tableName, "qingqing", "info", "age", "18");
// NewApiHbase.addRow(tableName, "qingqing", "info", "sex", "girl");
// NewApiHbase.addRow(tableName, "qingqing", "course", "china",
// "100");
// NewApiHbase.addRow(tableName, "qingqing", "course", "math",
// "100");
// NewApiHbase
// .addRow(tableName, "qingqing", "course", "english", "99");
// // 第三步:获取一条数据
// System.out.println("获取一条数据");
// NewApiHbase.getRow(tableName, "tht");
// 第四步:获取所有数据
// System.out.println("获取所有数据");
// NewApiHbase.getAllRows(tableName);
// // 第五步:删除一条数据
// System.out.println("删除一条数据");
// NewApiHbase.delRow(tableName, "tht");
// NewApiHbase.getAllRows(tableName);
// // 第六步:删除多条数据
// System.out.println("删除多条数据");
// String[] rows = { "xiaoxue", "qingqing" };
// NewApiHbase.delMultiRows(tableName, rows);
// NewApiHbase.getAllRows(tableName);
// // 第八步:删除数据库
// System.out.println("删除数据库");
// NewApiHbase.deleteTable(tableName);
// 获取所有的key
List<String> row = NewApiHbase.getAllRowsKey(tableName);
// 封装数据
List<sysLogs> list = NewApiHbase.getRow2Model(tableName, row);
for (sysLogs log : list) {
System.out.println("GOGOGOG==================================");
System.out.print("MachineName: " + log.getMachineName() + " ");
System.out.print("Timestamp: " + log.getTime() + " ");
System.out.print("FuncName: " + log.getFuncName() + " ");
System.out.print("ServeName: " + log.getServeName() + " ");
System.out.println("uuid: " + log.getUuid() + " ");
}
System.out.println("\r<br>执行耗时 : "
+ (System.currentTimeMillis() - a) / 1000f + " 秒 ");
} catch (Exception err) {
err.printStackTrace();
}
}
}
记得一定要配置下以下属性
System.setProperty("hadoop.home.dir", "E:\\hadoop-2.5.1");这个属性是很要命的,你不想重启就加上吧
// 与hbase/conf/hbase-site.xml中hbase.zookeeper.quorum配置的值相同
HBASE_CONFIG.set("hbase.zookeeper.quorum", "192.168.1.116"); zoo的节点没得说加上
// 与hbase/conf/hbase-site.xml中hbase.zookeeper.property.clientPort配置的值相同
HBASE_CONFIG.set("hbase.zookeeper.property.clientPort", "2281");端口一定要和你hbase的一样
HBASE_CONFIG.set("hbase.master", "192.168.1.116:60010");这个是你hbase master的地址,不知道的话可以打开hbase管理页看看,当然如果上边显示localhost那你就得改下hbase的配置文件咯,改成地址OK
HBASE_CONFIG.set("hbase.rootdir", "hdfs://192.168.1.116:9000/hbase");
把你hadoop的包从服务器考下来放到 E:\\hadoop-2.5.1下直接用就可以咯,如果你的bin里面没有winutils.exe这个东西。去https://github.com/srccodes/hadoop-common-2.2.0-bin下载一个,替换下bin就OK啦。然后配置环境变量HADOOP_HOME
还有一个比较重要的,要配置下hosts,不然启动会报找不到的,这个必须要配置你hbase的master 上面显示的地址哦
比如
Master hadoop
你就要配置你得hosts 为 192.168.1.116 hadoop 因为他找的是hadoop咯,这个hadoop骑士就是你在linux上边配置的了
后边就一帆风顺没啥了