Linux系统安装了Hadoop2.5.2和Hbase-0.98.12
两者都是采用伪分布的方式进行安装。
package paic.hbase.com;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.MasterNotRunningException;
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
import org.apache.hadoop.hbase.client.HBaseAdmin;
public class HbaseTest {
private static Configuration conf;
static{
conf = HBaseConfiguration.create();
conf.set("hbase.rootdir", "hdfs://192.168.56.2:9000/hbase");
//使用eclipse时必须添加这个,否则无法定位
conf.set("hbase.zookeeper.quorum", "192.168.56.2");
conf.set("hbase.zookeeper.property.clientPort", "2181");
}
public static void createTable(String tableName){
System.out.println("start create table ......");
try {
HBaseAdmin hbAdmin = new HBaseAdmin(conf);
if(hbAdmin.tableExists(tableName)){
hbAdmin.disableTable(tableName);
hbAdmin.deleteTable(tableName);
System.out.println(tableName + " is exist,detele ....");
}
// HTableDescriptor htd = new HTableDescriptor(tableName);
HTableDescriptor htd = new HTableDescriptor(tableName);
htd.addFamily(new HColumnDescriptor("cf1"));
htd.addFamily(new HColumnDescriptor("cf2"));
htd.addFamily(new HColumnDescriptor("cf3"));
htd.addFamily(new HColumnDescriptor("cf4"));
hbAdmin.createTable(htd);
} catch (MasterNotRunningException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (ZooKeeperConnectionException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
System.out.println("end create table ......");
}
public static void main(String[] args) {
createTable("testTable");
}
}
第一次执行报错如下:
start create table ......
log4j:WARN No appenders could be found for logger (org.apache.hadoop.metrics2.lib.MutableMetricsFactory).
log4j:WARN Please initialize the log4j system properly.
log4j:WARN See http://logging.apache.org/log4j/1.2/faq.html#noconfig for more info.
java.io.IOException: Attempt to start meta tracker failed.
at org.apache.hadoop.hbase.catalog.CatalogTracker.start(CatalogTracker.java:204)
at org.apache.hadoop.hbase.client.HBaseAdmin.startCatalogTracker(HBaseAdmin.java:262)
at org.apache.hadoop.hbase.client.HBaseAdmin.getCatalogTracker(HBaseAdmin.java:235)
at org.apache.hadoop.hbase.client.HBaseAdmin.tableExists(HBaseAdmin.java:306)
at org.apache.hadoop.hbase.client.HBaseAdmin.tableExists(HBaseAdmin.java:322)
at paic.hbase.com.HbaseTest.createTable(HbaseTest.java:25)
at paic.hbase.com.HbaseTest.main(HbaseTest.java:52)
Caused by: org.apache.zookeeper.KeeperException$ConnectionLossException: KeeperErrorCode = ConnectionLoss for /hbase/meta-region-server
at org.apache.zookeeper.KeeperException.create(KeeperException.java:99)
at org.apache.zookeeper.KeeperException.create(KeeperException.java:51)
at org.apache.zookeeper.ZooKeeper.exists(ZooKeeper.java:1045)
at org.apache.hadoop.hbase.zookeeper.RecoverableZooKeeper.exists(RecoverableZooKeeper.java:222)
at org.apache.hadoop.hbase.zookeeper.ZKUtil.watchAndCheckExists(ZKUtil.java:427)
at org.apache.hadoop.hbase.zookeeper.ZooKeeperNodeTracker.start(ZooKeeperNodeTracker.java:77)
at org.apache.hadoop.hbase.catalog.CatalogTracker.start(CatalogTracker.java:200)
... 6 more
end create table ......
解决方案如下:
这种问题大概是Linux系统中防火墙没有关闭,试着去关闭一下防火墙。
由于我虚拟机上安装的CentOS7的Linux系统,因此使用如下命令:
[root@hadoop ~]# systemctl stop firewalld.service (停止防火墙)
[root@hadoop ~]#systemctl disable firewalld.service (禁止防火墙)
[root@hadoop ~]# iptables -L (查看防火墙状态)
参见http://my.oschina.net/miger/blog/320711
这样之后再次执行,发现了另一种报错:
start create table ......
log4j:WARN No appenders could be found for logger (org.apache.hadoop.metrics2.lib.MutableMetricsFactory).
log4j:WARN Please initialize the log4j system properly.
log4j:WARN See http://logging.apache.org/log4j/1.2/faq.html#noconfig for more info.
org.apache.hadoop.hbase.client.RetriesExhaustedException: Failed after attempts=35, exceptions:
Mon Jun 08 10:00:36 CST 2015, org.apache.hadoop.hbase.client.RpcRetryingCaller@28c5ea2, java.net.UnknownHostException:
unknown host: hadoop
Mon Jun 08 10:00:36 CST 2015, org.apache.hadoop.hbase.client.RpcRetryingCaller@28c5ea2, java.net.UnknownHostException:
unknown host: hadoop
Mon Jun 08 10:00:36 CST 2015, org.apache.hadoop.hbase.client.RpcRetryingCaller@28c5ea2, java.net.UnknownHostException:
unknown host: hadoop
Mon Jun 08 10:00:37 CST 2015, org.apache.hadoop.hbase.client.RpcRetryingCaller@28c5ea2, java.net.UnknownHostException:
unknown host: hadoop
Mon Jun 08 10:00:39 CST 2015, org.apache.hadoop.hbase.client.RpcRetryingCaller@28c5ea2, java.net.UnknownHostException:
unknown host: hadoop
Mon Jun 08 10:00:43 CST 2015, org.apache.hadoop.hbase.client.RpcRetryingCaller@28c5ea2, java.net.UnknownHostException:
unknown host: hadoop
Mon Jun 08 10:00:54 CST 2015, org.apache.hadoop.hbase.client.RpcRetryingCaller@28c5ea2, java.net.UnknownHostException:
unknown host: hadoop
Mon Jun 08 10:01:04 CST 2015, org.apache.hadoop.hbase.client.RpcRetryingCaller@28c5ea2, java.net.UnknownHostException:
unknown host: hadoop
Mon Jun 08 10:01:14 CST 2015, org.apache.hadoop.hbase.client.RpcRetryingCaller@28c5ea2, java.net.UnknownHostException:
unknown host: hadoop
Mon Jun 08 10:01:24 CST 2015, org.apache.hadoop.hbase.client.RpcRetryingCaller@28c5ea2, java.net.UnknownHostException:
unknown host: hadoop
Mon Jun 08 10:01:44 CST 2015, org.apache.hadoop.hbase.client.RpcRetryingCaller@28c5ea2, java.net.UnknownHostException:
unknown host: hadoop
Mon Jun 08 10:02:04 CST 2015, org.apache.hadoop.hbase.client.RpcRetryingCaller@28c5ea2, java.net.UnknownHostException:
unknown host: hadoop
Mon Jun 08 10:02:24 CST 2015, org.apache.hadoop.hbase.client.RpcRetryingCaller@28c5ea2, java.net.UnknownHostException:
unknown host: hadoop
Mon Jun 08 10:02:44 CST 2015, org.apache.hadoop.hbase.client.RpcRetryingCaller@28c5ea2, java.net.UnknownHostException:
unknown host: hadoop
Mon Jun 08 10:03:04 CST 2015, org.apache.hadoop.hbase.client.RpcRetryingCaller@28c5ea2, java.net.UnknownHostException:
unknown host: hadoop
Mon Jun 08 10:03:24 CST 2015, org.apache.hadoop.hbase.client.RpcRetryingCaller@28c5ea2, java.net.UnknownHostException:
unknown host: hadoop
Mon Jun 08 10:03:44 CST 2015, org.apache.hadoop.hbase.client.RpcRetryingCaller@28c5ea2, java.net.UnknownHostException:
unknown host: hadoop
Mon Jun 08 10:04:04 CST 2015, org.apache.hadoop.hbase.client.RpcRetryingCaller@28c5ea2, java.net.UnknownHostException:
unknown host: hadoop
Mon Jun 08 10:04:25 CST 2015, org.apache.hadoop.hbase.client.RpcRetryingCaller@28c5ea2, java.net.UnknownHostException:
unknown host: hadoop
Mon Jun 08 10:04:45 CST 2015, org.apache.hadoop.hbase.client.RpcRetryingCaller@28c5ea2, java.net.UnknownHostException:
unknown host: hadoop
Mon Jun 08 10:05:05 CST 2015, org.apache.hadoop.hbase.client.RpcRetryingCaller@28c5ea2, java.net.UnknownHostException:
unknown host: hadoop
Mon Jun 08 10:05:25 CST 2015, org.apache.hadoop.hbase.client.RpcRetryingCaller@28c5ea2, java.net.UnknownHostException:
unknown host: hadoop
Mon Jun 08 10:05:45 CST 2015, org.apache.hadoop.hbase.client.RpcRetryingCaller@28c5ea2, java.net.UnknownHostException:
unknown host: hadoop
Mon Jun 08 10:06:05 CST 2015, org.apache.hadoop.hbase.client.RpcRetryingCaller@28c5ea2, java.net.UnknownHostException:
unknown host: hadoop
Mon Jun 08 10:06:25 CST 2015, org.apache.hadoop.hbase.client.RpcRetryingCaller@28c5ea2, java.net.UnknownHostException:
unknown host: hadoop
Mon Jun 08 10:06:45 CST 2015, org.apache.hadoop.hbase.client.RpcRetryingCaller@28c5ea2, java.net.UnknownHostException:
unknown host: hadoop
Mon Jun 08 10:07:05 CST 2015, org.apache.hadoop.hbase.client.RpcRetryingCaller@28c5ea2, java.net.UnknownHostException:
unknown host: hadoop
Mon Jun 08 10:07:25 CST 2015, org.apache.hadoop.hbase.client.RpcRetryingCaller@28c5ea2, java.net.UnknownHostException:
unknown host: hadoop
Mon Jun 08 10:07:46 CST 2015, org.apache.hadoop.hbase.client.RpcRetryingCaller@28c5ea2, java.net.UnknownHostException:
unknown host: hadoop
Mon Jun 08 10:08:06 CST 2015, org.apache.hadoop.hbase.client.RpcRetryingCaller@28c5ea2, java.net.UnknownHostException:
unknown host: hadoop
Mon Jun 08 10:08:26 CST 2015, org.apache.hadoop.hbase.client.RpcRetryingCaller@28c5ea2, java.net.UnknownHostException:
unknown host: hadoop
Mon Jun 08 10:08:46 CST 2015, org.apache.hadoop.hbase.client.RpcRetryingCaller@28c5ea2, java.net.UnknownHostException:
unknown host: hadoop
Mon Jun 08 10:09:06 CST 2015, org.apache.hadoop.hbase.client.RpcRetryingCaller@28c5ea2, java.net.UnknownHostException:
unknown host: hadoop
Mon Jun 08 10:09:26 CST 2015, org.apache.hadoop.hbase.client.RpcRetryingCaller@28c5ea2, java.net.UnknownHostException:
unknown host: hadoop
Mon Jun 08 10:09:46 CST 2015, org.apache.hadoop.hbase.client.RpcRetryingCaller@28c5ea2, java.net.UnknownHostException:
unknown host: hadoop
at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:131)
at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:91)
at org.apache.hadoop.hbase.client.ClientScanner.nextScanner(ClientScanner.java:284)
at org.apache.hadoop.hbase.client.ClientScanner.initializeScannerInConstruction(ClientScanner.java:189)
at org.apache.hadoop.hbase.client.ClientScanner.<init>(ClientScanner.java:184)
at org.apache.hadoop.hbase.client.ClientScanner.<init>(ClientScanner.java:110)
at org.apache.hadoop.hbase.client.HTable.getScanner(HTable.java:775)
at org.apache.hadoop.hbase.catalog.MetaReader.fullScan(MetaReader.java:542)
at org.apache.hadoop.hbase.catalog.MetaReader.tableExists(MetaReader.java:310)
at org.apache.hadoop.hbase.client.HBaseAdmin.tableExists(HBaseAdmin.java:308)
at org.apache.hadoop.hbase.client.HBaseAdmin.tableExists(HBaseAdmin.java:322)
at paic.hbase.com.HbaseTest.createTable(HbaseTest.java:27)
at paic.hbase.com.HbaseTest.main(HbaseTest.java:54)
Caused by: java.net.UnknownHostException: unknown host: hadoop
at org.apache.hadoop.hbase.ipc.RpcClient$Connection.<init>(RpcClient.java:385)
at org.apache.hadoop.hbase.ipc.RpcClient.createConnection(RpcClient.java:351)
at org.apache.hadoop.hbase.ipc.RpcClient.getConnection(RpcClient.java:1530)
at org.apache.hadoop.hbase.ipc.RpcClient.call(RpcClient.java:1442)
at org.apache.hadoop.hbase.ipc.RpcClient.callBlockingMethod(RpcClient.java:1661)
at org.apache.hadoop.hbase.ipc.RpcClient$BlockingRpcChannelImplementation.callBlockingMethod(RpcClient.java:1719)
at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$BlockingStub.scan(ClientProtos.java:31392)
at org.apache.hadoop.hbase.client.ScannerCallable.openScanner(ScannerCallable.java:318)
at org.apache.hadoop.hbase.client.ScannerCallable.call(ScannerCallable.java:163)
at org.apache.hadoop.hbase.client.ScannerCallable.call(ScannerCallable.java:58)
at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:115)
... 12 more
end create table ......
这个问题的解决方案如下:
需要修改win7下的C:\Windows\System32\drivers\etc\hosts这个文件,但是由于这个文件修改涉及到权限的问题,需要去修改一下权限。具体参见网上的 Win7系统修改hosts文件不能保存的问题。 www.jb51.net/os/windows/75930.html
这个在hosts的文件中添加一行
192.168.56.2 hadoop
其中192.168.56.2是Linux系统的ip地址,hadoop是hostname主机名。
修改并成功保存之后,在win下ping
ping 192.168.56.2 和 ping hadoop 如果都能成功,说明是OK