原文出自:https://blog.csdn.net/a2615381/article/details/52174671
a2615381
添加计时和注解,只要更改统计和列族名,要把hadoop解压到本地才可以运行
import java.io.IOException;
import java.text.DecimalFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.TreeSet;
import net.sf.json.JSONArray;
import net.sf.json.JSONObject;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.time.StopWatch;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.MasterNotRunningException;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.HTablePool;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.coprocessor.AggregationClient;
import org.apache.hadoop.hbase.client.coprocessor.LongColumnInterpreter;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test;
public class HbaseRowCount {
public static void main(String[] args) throws Throwable {
Configuration conf = HBaseConfiguration.create();
System.setProperty("hadoop.home.dir", "c:/hadoop");
myHbaseConf(conf);
/**
* 通过协处理器获得行数,不修改 hbase.site
*/
//查找的表名
rowCount2("test", conf);
}
private static void rowCount2(String tableName, Configuration conf) throws IllegalArgumentException, Throwable {
String coprocessorClassName = "org.apache.hadoop.hbase.coprocessor.AggregateImplementation";
//计时
StopWatch stopWatch = new StopWatch();
stopWatch.start();
System.out.println("开始统计");
HBaseAdmin admin = new HBaseAdmin(conf);
HTableDescriptor htd = admin.getTableDescriptor(Bytes.toBytes(tableName));
boolean flag = htd.hasCoprocessor(coprocessorClassName);// 有就是true 没有就是 false
if (!flag) {
admin.disableTable(tableName);
htd.addCoprocessor(coprocessorClassName);
admin.modifyTable(Bytes.toBytes(tableName), htd);
admin.enableTable(tableName);
}
AggregationClient ac = new AggregationClient(conf);
Scan scan = new Scan();
//设置列族名
scan.addFamily(Bytes.toBytes("cf1"));
long rowCount = 0;
rowCount = ac.rowCount(TableName.valueOf(Bytes.toBytes(tableName)), new LongColumnInterpreter(), scan);
System.out.println(rowCount);
stopWatch.stop();
System.out.println("统计耗时:" +stopWatch.getTime());
}
private static void myHbaseConf(Configuration conf) {
conf.set("hbase.zookeeper.quorum", "hadoop1:2181,hadoop2:2181,hadoop3:2181");
}
}