hbase 查询大量数据异常并解决方法

java.lang.RuntimeException: org.apache.hadoop.hbase.client.ScannerTimeoutException: 66216ms passed since the last invocation, timeou
t is currently set to 60000
        at org.apache.hadoop.hbase.client.AbstractClientScanner$1.hasNext(AbstractClientScanner.java:94)
        at hbase.HBaseDAOImp.getWIFIRows(HBaseDAOImp.java:550)
        at task.preference.WifeTagThread.getAllWifeData(WifeTagThread.java:67)
        at task.preference.WifeTagThread.taskExcute(WifeTagThread.java:138)
        at task.preference.WifeTagTask.execute(WifeTagTask.java:11)
        at org.quartz.core.JobRunShell.run(JobRunShell.java:216)
        at org.quartz.simpl.SimpleThreadPool$WorkerThread.run(SimpleThreadPool.java:549)
Caused by: org.apache.hadoop.hbase.client.ScannerTimeoutException: 66216ms passed since the last invocation, timeout is currently se
t to 60000
        at org.apache.hadoop.hbase.client.ClientScanner.next(ClientScanner.java:371)
        at org.apache.hadoop.hbase.client.AbstractClientScanner$1.hasNext(AbstractClientScanner.java:91)
        ... 6 more
Caused by: org.apache.hadoop.hbase.UnknownScannerException: org.apache.hadoop.hbase.UnknownScannerException: Name: 4145362, already
closed?
        at org.apache.hadoop.hbase.regionserver.HRegionServer.scan(HRegionServer.java:3150)
        at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:29925)
        at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2031)
        at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:108)
        at org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:116)
        at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:96)
        at java.lang.Thread.run(Thread.java:745)

        at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
        at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
        at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
        at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
        at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
        at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:95)
        at org.apache.hadoop.hbase.protobuf.ProtobufUtil.getRemoteException(ProtobufUtil.java:284)
        at org.apache.hadoop.hbase.client.ScannerCallable.call(ScannerCallable.java:204)
        at org.apache.hadoop.hbase.client.ScannerCallable.call(ScannerCallable.java:59)
        at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:117)
        at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:93)
        at org.apache.hadoop.hbase.client.ClientScanner.next(ClientScanner.java:355)
        ... 7 more
Caused by: org.apache.hadoop.hbase.ipc.RemoteWithExtrasException(org.apache.hadoop.hbase.UnknownScannerException): org.apache.hadoop
.hbase.UnknownScannerException: Name: 4145362, already closed?
        at org.apache.hadoop.hbase.regionserver.HRegionServer.scan(HRegionServer.java:3150)
        at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:29925)
        at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2031)
        at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:108)
        at org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:116)
        at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:96)
        at java.lang.Thread.run(Thread.java:745)

        at org.apache.hadoop.hbase.ipc.RpcClient.call(RpcClient.java:1457)
        at org.apache.hadoop.hbase.ipc.RpcClient.callBlockingMethod(RpcClient.java:1661)
        at org.apache.hadoop.hbase.ipc.RpcClient$BlockingRpcChannelImplementation.callBlockingMethod(RpcClient.java:1719)
        at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$BlockingStub.scan(ClientProtos.java:30387)
        at org.apache.hadoop.hbase.client.ScannerCallable.call(ScannerCallable.java:174)
        ... 11 more
2015-08-07 13:16:16,634 [DefaultQuartzScheduler_Worker-1] [task.preference.WifeTagThread] [INFO] - quary 租户CY001total size 1239282
 .....
2015-08-07 13:16:16,634 [DefaultQuartzScheduler_Worker-1] [task.preference.WifeTagThread] [INFO] - run this is task runing time 6766
04
2015-08-07 13:16:16,635 [DefaultQuartzScheduler_Worker-1] [task.preference.WifeTagThread] [INFO] - run time as AllSaleRecord1
2015-08-07 13:17:19,198 [DefaultQuartzScheduler_Worker-1] [org.apache.hadoop.hbase.zookeeper.RecoverableZooKeeper] [INFO] - Process
identifier=hconnection-0x78de59f8 connecting to ZooKeeper ensemble=dmhadoop006:2181,dmhadoop004:2181,dmhadoop001:2181,dmhadoop003:21

81,dmhadoop005:2181

解决方法:

1方法.通过修改conf

Configuration conf = HBaseConfiguration.create() 
conf.setLong(HConstants.HBASE_REGIONSERVER_LEASE_PERIOD_KEY,120000)

通过代码实现修改 时间、这个值是在客户端应用中配置的,我测试的时候是不会被传递到远程region服务器,所以这样的修改是无效的、不知是否人通过这种测试。

 2方法直接修改配置文件

property>
    <name>hbase.regionserver.lease.period</name>
    <value>900000</value> <!-- 900 000, 15 minutes -->
  </property>
  <property>
    <name>hbase.rpc.timeout</name>
    <value>900000</value> <!-- 15 minutes -->
  </property>
在客户端直接用线程批量的进行加载数据代码如下

public List<String> getWIFIRowKey(String tableName, String startRow,
            String stopRow) {
        List<String> rowkeys = new ArrayList<String>();
        HTableInterface table = null;
        List<Result> list = null;
        try {
            table = hTablePool.getTable(tableName);
            Scan scan = new Scan();
            List<Filter> filters = new ArrayList<Filter>();
            Filter kof = new KeyOnlyFilter();
            Filter filter1 = new RowFilter(CompareOp.GREATER_OR_EQUAL,
                    new BinaryPrefixComparator(startRow.getBytes()));
            Filter filter2 = new RowFilter(CompareOp.LESS_OR_EQUAL,
                    new BinaryPrefixComparator(stopRow.getBytes()));
            filters.add(filter1);
            filters.add(filter2);
            filters.add(kof);
            FilterList filterList = new FilterList(
                    FilterList.Operator.MUST_PASS_ALL, filters);
            scan.setFilter(filterList);

            ResultScanner scanner = table.getScanner(scan);
            for (Result rs : scanner) {
                rowkeys.add(new String(rs.getRow()));
            }
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            try {
                table.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        return rowkeys;
    }

    public Map<String, List<WifiInfo>> getWIFIRows(String tableName,
            String family, List<String> filterColumn, String startRow,
            String stopRow) {
        
        List<String> rowKeys = getWIFIRowKey(tableName, startRow, stopRow);
        List<Result> list = getDatasFromHbase(tableName, family, rowKeys, filterColumn, true,
                true);
        Map<String, List<WifiInfo>> map = getWifiInfoList(list);
        return map;
    }

    public Map<String, List<WifiInfo>> getWifiInfoList(List<Result> list){
        Map<String, List<WifiInfo>> map = new HashMap<String, List<WifiInfo>>();
        for (Result r : list) {
            WifiInfo info = new WifiInfo();
            for (Cell v : r.rawCells()) {
                String column = new String(CellUtil.cloneQualifier(v));
                String value = new String(CellUtil.cloneValue(v));
                WifiInfo.setWifiInfo(info, column, value);
            }
            List<WifiInfo> wifeList = map.get(info.getTid() + "#"
                    + info.getMac() + "#" + info.getMobile());
            if (wifeList == null) {
                wifeList = new ArrayList<WifiInfo>();
            }
            wifeList.add(info);
            map.put(info.getTid() + "#" + info.getMac() + "#"
                    + info.getMobile(), wifeList);
        }
        return map;
    }
    public List<Result> getDatasFromHbase(String tableName,
            String family, final List<String> rowKeys,
            final List<String> filterColumn, boolean isContiansRowkeys,
            boolean isContainsList) {
        if (rowKeys == null || rowKeys.size() <= 0) {
            return null;
        }

        final int maxRowKeySize = 1000;
        ExecutorService pool = Executors.newFixedThreadPool(10);
        int loopSize = rowKeys.size() % maxRowKeySize == 0 ? rowKeys.size()
                / maxRowKeySize : rowKeys.size() / maxRowKeySize + 1;

        ArrayList<Future<List<Result>>> results = new ArrayList<Future<List<Result>>>();
        for (int loop = 0; loop < loopSize; loop++) {
            int end = (loop + 1) * maxRowKeySize > rowKeys.size() ? rowKeys
                    .size() : (loop + 1) * maxRowKeySize;
            List<String> partRowKeys = rowKeys.subList(loop * maxRowKeySize,
                    end);

            HbaseDataGetter hbaseDataGetter = new HbaseDataGetter(tableName,
                    family, partRowKeys, filterColumn, isContiansRowkeys,
                    isContainsList);
            Future<List<Result>> result = pool.submit(hbaseDataGetter);
            results.add(result);
        }

        List<Result> dataQueue = new ArrayList<Result>();
        try {
            for (Future<List<Result>> rs : results) {
                List<Result> rd = rs.get();
                dataQueue.addAll(rd);
            }
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            pool.shutdown();
        }
        return dataQueue;
    }

    class HbaseDataGetter implements Callable<List<Result>> {
        private String tableName;
        private String family;
        private List<String> rowKeys;
        private List<String> filterColumn;
        private boolean isContiansRowkeys;
        private boolean isContainsList;

        public HbaseDataGetter(String tableName, String family,
                List<String> rowKeys, List<String> filterColumn,
                boolean isContiansRowkeys, boolean isContainsList) {
            this.tableName = tableName;
            this.family = family;
            this.rowKeys = rowKeys;
            this.filterColumn = filterColumn;
            this.isContiansRowkeys = isContiansRowkeys;
            this.isContainsList = isContainsList;
        }

        public HbaseDataGetter(List<String> rowKeys, List<String> filterColumn,
                boolean isContiansRowkeys, boolean isContainsList) {
            this.rowKeys = rowKeys;
            this.filterColumn = filterColumn;
            this.isContiansRowkeys = isContiansRowkeys;
            this.isContainsList = isContainsList;
        }

        @Override
        public List<Result> call() throws Exception {
            Result[] rs = getDatasFromHbase(tableName, family, rowKeys,
                    filterColumn);
            List<Result> listData = new ArrayList<Result>();
            for (Result r : rs) {
                listData.add(r);
                // WifiInfoVO data = assembleData(r, filterColumn,
                // isContiansRowkeys,
                // isContainsList);
                // listData.add(data);
            }
            return listData;
        }
    }

    private Result[] getDatasFromHbase(String tableName, String family,
            List<String> rowKeys, List<String> filterColumn) {
        Result[] rs = null;
        HTableInterface hTableInterface = createTable(tableName);
        List<Get> listGets = new ArrayList<Get>();
        for (String rk : rowKeys) {
            Get get = new Get(Bytes.toBytes(rk));
            if (filterColumn != null) {
                for (String column : filterColumn) {
                    get.addColumn(family.getBytes(), column.getBytes());
                }
            }
            listGets.add(get);
        }
        try {
            rs = hTableInterface.get(listGets);
        } catch (IOException e1) {
            e1.printStackTrace();
        } finally {
            try {
                listGets.clear();
                hTableInterface.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        return rs;
    }



  • 0
    点赞
  • 3
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值