package Yishikeji.Hbase.Admin.Hbase; import org.apache.hadoop.hbase.ClusterStatus; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.HConnection; import org.apache.hadoop.hbase.ServerLoad; import org.apache.hadoop.hbase.ServerName; import java.io.IOException; import java.util.Calendar; import java.util.HashMap; /** * @Description 类描述:Hbase集群信息 * @author yangwulin * @Date 2018年9月10日 * @modify 修改记录: * */ public class HBaseClusterInfo { //Hbase集群状态信息 public static HashMap<Object, Object> HbaseClusterStatus() { HConnection conn = null; Admin admin; HashMap<Object,Object> tempMap = new HashMap<Object,Object>(); try { conn = HConnectionFactory.getInstance(); admin = conn.getAdmin(); System.out.println("cluster status : \n------------"); ClusterStatus status = admin.getClusterStatus(); //获取Master的ServerName tempMap.put("Master",status.getMaster()); //获取备份的Master tempMap.put("BackupMasters",status.getBackupMasters()); //获取备份的Master的数量 tempMap.put("BackupMastersSize",status.getBackupMastersSize()); //存活(HBASE区域服务监控)RegionServer服务器的数量 tempMap.put("LiveServersSize",status.getServersSize()); //(区域服务监控)获取当前在线的Region的总数 tempMap.put("RegionsCount",status.getRegionsCount()); //获取当前的平均Region数,即Region总数/RegionServer总数 tempMap.put("AvgLoad",status.getAverageLoad()); //存活(区域服务监控)RegionServer服务器的列表 tempMap.put("Servers",status.getServers()); //死亡(区域服务监控)RegionServer服务器的数量 tempMap.put("DeadServers",status.getDeadServers()); //死亡(区域服务监控)RegionServer服务器的名称 tempMap.put("DeadServersNames",status.getDeadServerNames()); //获取集群的请求数 tempMap.put("Requests",status.getRequestsCount()); } catch (IOException e) { e.printStackTrace(); } return tempMap; } //当前regonserver的信息 public static HashMap<Object, Object> ServerLoad(){ HConnection conn = null; Admin admin; HashMap<Object,Object> Load = new HashMap<Object,Object>(); try { conn = HConnectionFactory.getInstance(); admin = conn.getAdmin(); // System.out.println("cluster status : \n------------"); ClusterStatus status = admin.getClusterStatus(); for (ServerName serverName:status.getServers()){ ServerLoad serverLoad=status.getLoad(serverName); //regonserver读请求计数 Load.put(serverName+"读请求数",serverLoad.getReadRequestsCount()); //regonserver写请求计数 Load.put(serverName+"写请求数",serverLoad.getWriteRequestsCount()); //regonserver每秒请求数 Load.put(serverName+"每秒请求数",serverLoad.getRequestsPerSecond()); Load.put(serverName+"堆(Heap)最大值(MB)",serverLoad.getMaxHeapMB()); Load.put(serverName+"堆(Heap)使用值(MB)",serverLoad.getUsedHeapMB()); Load.put(serverName+"总请求数",serverLoad.getTotalNumberOfRequests()); Load.put(serverName+"Memstore总共(MB)",serverLoad.getMemstoreSizeInMB()); Load.put(serverName+"写请求数",serverLoad.getWriteRequestsCount()); Load.put(serverName+"获取当前时间戳", Calendar.getInstance().getTimeInMillis() / 1000); } } catch (IOException e) { e.printStackTrace(); } return Load; } }
package Yishikeji.Hbase.Admin.Hbase; import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.client.HConnection; import org.apache.hadoop.hbase.client.HConnectionManager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import Yishikeji.Hbase.Admin.Hbase.utils.PropertiesUtils; public class HConnectionFactory { protected static Logger logger = LoggerFactory.getLogger(HConnectionFactory.class); private static String CONFIG_FILE_NAME = "hbase.properties"; //Free pool capacity private static int FREE_POOL_SIZE = 1; //Pool owned by class private static HConnection[] factory = null; private static int countFree; //private static HConnection conn; private static Configuration conf; public static synchronized HConnection getInstance() { // System.out.println("getInstance start: "+System.currentTimeMillis()); HConnection result = null; //Check if the pool is empty, create a new object if so if (countFree == 0) { try { FREE_POOL_SIZE = Integer.parseInt(PropertiesUtils.getPropertyValue(CONFIG_FILE_NAME, "pool.max.total")); factory = new HConnection[FREE_POOL_SIZE]; for (int i = 0; i < FREE_POOL_SIZE; i++) { conf = HBaseConfiguration.create(); factory[countFree++] = HConnectionManager.createConnection(conf); } result = factory[--countFree]; } catch (IOException e) { logger.error("Hbase连接池初始化失败!请检查Zookeeper等配置是否正确。", e); //e.printStackTrace(); } }else { result = factory[--countFree]; // System.out.println("Get instance from the free pool."); } // System.out.println("getInstance start: "+System.currentTimeMillis()); return result; } public static synchronized void freeInstance(HConnection conn) { if (countFree < FREE_POOL_SIZE) { factory[countFree++] = conn; } else { try { conn.close(); } catch (IOException e) { logger.error("关闭HBase连接发生错误!", e); e.printStackTrace(); } } } public static int getCountFree() { return countFree; } }
package Yishikeji.Hbase.Admin.Hbase.utils; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.Properties; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * @Description * 类描述:配置文件操作类 * @author jibin * @Date 2018年8月26日 * @modify * 修改记录: */ public class PropertiesUtils { protected static Logger logger = LoggerFactory.getLogger(PropertiesUtils.class); public static Properties getProperties(String fileName) { Properties properties = null; //默认所有配置文件都放在根目录下 InputStream inputStream = PropertiesUtils.class.getResourceAsStream("/" + fileName); try { properties = new Properties(); properties.load(inputStream); } catch (IOException e) { logger.error("加载配置文件:" + fileName + "失败!请检查根目录下是否存在该文件。", e); } if (properties == null || properties.isEmpty()) { logger.error("加载配置文件:" + fileName + "不存在!请检查根目录下是否存在该文件。"); } return properties; } /** * 指定配置项名称,返回配置值 * @param propName * @return */ public static String getPropertyValue(String fileName, String propName) { if (propName == null || propName.equals("") || propName.equals("null")) return ""; Properties properties = getProperties(fileName); return properties.getProperty(propName); } /** * 设置配置项名称及其值 * @param propName * @param value */ public static void addPropertyValue(String fileName, String propName, String value) { try { Properties properties = getProperties(fileName); properties.setProperty(propName, value); //String srcPath =System.getProperty("user.dir")+"\\src" + filename; File file = new File(PropertiesUtils.class.getResource("/" + fileName).getPath()); OutputStream out = new FileOutputStream(file); properties.store(out, ""); out.close(); } catch (IOException ex) { logger.error("无法保存指定的配置文件:" + fileName + ",指定文件不存在。", ex); } } public static void main(String args[]) { PropertiesUtils.addPropertyValue("test.properties", "hello", "world"); String value = PropertiesUtils.getPropertyValue("test.properties", "hello"); System.out.println(value); } }