Springboot 项目中的HBASE 文件配置

1.properties 文件配置

#Hbase
spring.data.hbase.quorum=tdh-node1,tdh-node2,tdh-node3
spring.data.hbase.client-port=2181
spring.data.hbase.node-parent=/hyperbase1
spring.data.hbase.master-principal=hbase/_HOST@TDH
spring.data.hbase.region-server-principal=hbase/_HOST@TDH
spring.data.hbase.hbase-authentication=kerberos
spring.data.hbase.hadoop-authentication=kerberos

#Hbase 的img表信息
hbase.table.name=ocr_info
hbase.family.name=f1
hbase.column.name=s_img

#tdh security
tdh.security.krb5.path=/teis/teis-web/security/krb5.conf
tdh.security.admin.path=/teis/teis-web/security/keytab

2.加载配置文件的配置到配置类中

package com.cfcc.bigdata.base.config.hbase;

import org.springframework.boot.context.properties.ConfigurationProperties;

@ConfigurationProperties(prefix = "spring.data.hbase")
public class HbaseProperties {

    private String quorum;

    private String rootDir;

    private String nodeParent;

    private String clientPort;

    private String masterPrincipal;

    private String regionServerPrincipal;

    private String hbaseAuthentication;

    private String hadoopAuthentication;


    public String getQuorum() {
        return quorum;
    }

    public void setQuorum(String quorum) {
        this.quorum = quorum;
    }


    public String getNodeParent() {
        return nodeParent;
    }

    public void setNodeParent(String nodeParent) {
        this.nodeParent = nodeParent;
    }


    public String getRootDir() {
        return rootDir;
    }

    public void setRootDir(String rootDir) {
        this.rootDir = rootDir;
    }

    public String getClientPort() {
        return clientPort;
    }

    public void setClientPort(String clientPort) {
        this.clientPort = clientPort;
    }

    public String getMasterPrincipal() {
        return masterPrincipal;
    }

    public void setMasterPrincipal(String masterPrincipal) {
        this.masterPrincipal = masterPrincipal;
    }

    public String getRegionServerPrincipal() {
        return regionServerPrincipal;
    }

    public void setRegionServerPrincipal(String regionServerPrincipal) {
        this.regionServerPrincipal = regionServerPrincipal;
    }

    public String getHbaseAuthentication() {
        return hbaseAuthentication;
    }

    public void setHbaseAuthentication(String hbaseAuthentication) {
        this.hbaseAuthentication = hbaseAuthentication;
    }

    public String getHadoopAuthentication() {
        return hadoopAuthentication;
    }

    public void setHadoopAuthentication(String hadoopAuthentication) {
        this.hadoopAuthentication = hadoopAuthentication;
    }
}

3.配置HBASE

package com.cfcc.bigdata.base.config.hbase;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.security.UserGroupInformation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.core.env.Environment;

import java.io.IOException;
/**
* gzy
*/
@org.springframework.context.annotation.Configuration
@EnableConfigurationProperties(HbaseProperties.class)
@ConditionalOnClass(HbaseUtil.class)
public class HbaseAutoConfiguration {

    //hbase 使用zookeeper的地址
    private static final String HBASE_QUORUM = "hbase.zookeeper.quorum";
    private static final String HBASE_ZNODE_PARENT = "zookeeper.znode.parent";
    //客户端连接端口
    private static final String HBASE_ZOOKEPER_CLIENTPORT= "hbase.zookeeper.property.clientPort";
    //master的kerberos认证的主体名称
    private static final String HBASE_MASTER_PRINCIPAL="hbase.master.kerberos.principal";
    //regionserver的kerberos认证的主体名称
    private static final String HBASE_REGIONSERVER_PRINCIPAL="hbase.regionserver.kerberos.principal";
    //hbase 集群安全认证机制,目前的版本只支持kerberos安全认证
    private static final String HBASE_AUTHENTICATION="hbase.security.authentication";
    //hadoop 集群安全认证机制,目前的版本只支持kerberos安全认证
    private static final String HADOOP_AUTHENTICATION="hadoop.security.authentication";

    @Autowired
    private HbaseProperties hbaseProperties;
    @Autowired
    private Environment env;

    @Bean(name ="configuration")
    public Configuration hbaseConfiguration() throws IOException {

        String  krb5Path = env.getProperty("tdh.security.krb5.path");
        String adminPath = env.getProperty("tdh.security.admin.path");

        Configuration configuration = HBaseConfiguration.create();
        configuration.set(HBASE_QUORUM, hbaseProperties.getQuorum());
        configuration.set(HBASE_ZOOKEPER_CLIENTPORT, hbaseProperties.getClientPort());
        configuration.set(HBASE_ZNODE_PARENT, hbaseProperties.getNodeParent());
        configuration.set(HBASE_MASTER_PRINCIPAL, hbaseProperties.getMasterPrincipal());
        configuration.set(HBASE_REGIONSERVER_PRINCIPAL,     
                          hbaseProperties.getRegionServerPrincipal());
        configuration.set(HBASE_AUTHENTICATION, 
                          hbaseProperties.getHbaseAuthentication());
        configuration.set(HADOOP_AUTHENTICATION, 
                          hbaseProperties.getHadoopAuthentication());

        System.setProperty("java.security.krb5.conf",krb5Path );
        UserGroupInformation.setConfiguration(configuration);
        UserGroupInformation.loginUserFromKeytab("admin@TDHTEST", adminPath);  //配置kerberos keytab 路径
        return configuration;
    }
}

4.封装HBASE操作工具类

package com.cfcc.bigdata.base.config.hbase;

import com.cfcc.bigdata.common.base.exception.HbaseSystemException;
import com.cfcc.bigdata.common.spring.SpringContextHolder;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.filter.RowFilter;
import org.apache.hadoop.hbase.filter.SubstringComparator;
import org.apache.hadoop.hbase.util.Bytes;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.DependsOn;
import org.springframework.stereotype.Component;
import org.apache.hadoop.conf.Configuration;
import org.springframework.util.Assert;
import org.springframework.util.StopWatch;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.NavigableMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

/**
* @author gzy
* @version : 1.0
* @date : 2019/5/27 0027
*/

@DependsOn("springContextHolder")
@Component
public class HbaseUtil {

    private static final Logger LOGGER = LoggerFactory.getLogger(HbaseUtil.class);

    private static Configuration configuration =  SpringContextHolder.getBean("configuration");
    private static ExecutorService pool = Executors.newScheduledThreadPool(20);
    private static Connection connection = null;
    private static HbaseUtil instance = null;
    private static  Admin admin = null;


    private HbaseUtil() {
        if (connection == null) {
            try {
                connection = ConnectionFactory.createConnection(configuration, pool);
                admin = connection.getAdmin();

            } catch (IOException e) {
                LOGGER.error("HbaseTemplate init error:" + e.getMessage(), e);
            }
        }
    }

    public static synchronized HbaseUtil getInstance() {
        if (instance == null) {
            instance = new HbaseUtil();
        }

        return instance;
    }

    /**
     * 判断表是否存在
     * @param tableName
     * @return true 存在 false 不存在
     * @throws IOException
     */
    public  boolean isTableExist(String tableName) throws IOException {

        TableName name = TableName.valueOf(tableName);

        return admin.tableExists(name);
    }

    /**
     * 删除表
     * @param tableName
     * @throws IOException
     */
    public  void dropTable(String tableName) throws IOException{

        TableName name = TableName.valueOf(tableName);

        if(isTableExist(tableName)){
            admin.disableTable(name);
            admin.deleteTable(name);
            LOGGER.info("表 {} 删除成功!", tableName);
        }else{
            LOGGER.error("表 {} 不存在!" ,tableName );
        }
    }


    /**
     * 创建表(tableName 表名; family 列族列表)
     * @param tableName
     * @param familys
     * @throws IOException
     */
    public  void createTable(String tableName, String[] familys) throws IOException {

        TableName name = TableName.valueOf(tableName);
        if (admin.tableExists(name)) {
            LOGGER.error("create table error! this table {} already exists!", tableName);
        } else {

            HTableDescriptor descr = new HTableDescriptor(name);
            for (String family : familys) {
                descr.addFamily(new HColumnDescriptor(family)); //添加列族
            }
            admin.createTable(descr); //建表
            LOGGER.info( "table {} created successfully!", tableName);
        }
    }



    public  byte[] getFileBytes(String path) throws IOException {
        Assert.notNull(path, "file path must not be null");
        FileInputStream fis = new FileInputStream(new File(path));// 新建一个FileInputStream对象
        byte[] b = new byte[fis.available()];// 新建一个字节数组
        fis.read(b);// 将文件中的内容读取到字节数组中
        fis.close();
        return b;
    }

    /**
     * 插入记录(单行单列族-多列多值)
     *
     * @param tableName         表名
     * @param row               行名
     * @param columnFamilys     列族名
     * @param columns           列名(数组)
     * @param values            值(数组)(且需要和列一一对应)
     */
    public void insertRecords(String tableName, String row, String columnFamilys, String[] columns, String[] values) throws IOException {
        Assert.notNull(tableName, "tableName must not be null");
        Assert.notNull(row, "rowkey must not be null");
        Assert.notNull(columnFamilys, "columnFamilys must not be null");

        TableName name = TableName.valueOf(tableName);
        Table table = null;

        StopWatch sw = new StopWatch();
        sw.start();

        try {
            table = connection.getTable(name);
            Put put = new Put(Bytes.toBytes(row));
            for (int i = 0; i < columns.length; i++) {
                put.addColumn(Bytes.toBytes(columnFamilys), Bytes.toBytes(columns[i]), Bytes.toBytes(values[i]));
                table.put(put);
            }
        } catch (Throwable throwable) {
            throw new HbaseSystemException(throwable);
        } finally {
            if (null != table) {
                try {
                    table.close();
                    sw.stop();
                } catch (IOException e) {
                    LOGGER.error("hbase资源释放失败");
                }
            }
        }

    }

    /**
     * 插入记录(单行单列族-单列单值)
     *
     * @param tableName         表名
     * @param row               行名
     * @param columnFamily      列族名
     * @param column            列名
     * @param value             值
     */
    public void insertOneRecord(String tableName, String row, String columnFamily, String column, byte[] value) throws IOException {
        Assert.notNull(tableName, "tableName must not be null");
        Assert.notNull(row, "rowkey must not be null");
        Assert.notNull(columnFamily, "columnFamilys must not be null");

        TableName name = TableName.valueOf(tableName);
        Table table = null;

        StopWatch sw = new StopWatch();
        sw.start();

        try {
            table = connection.getTable(name);

            Put put = new Put(Bytes.toBytes(row));
            put.addColumn(Bytes.toBytes(columnFamily), Bytes.toBytes(column), value);
            table.put(put);
        } catch (Throwable throwable) {
            throw new HbaseSystemException(throwable);
        } finally {
            if (null != table) {
                try {
                    table.close();
                    sw.stop();
                } catch (IOException e) {
                    LOGGER.error("hbase资源释放失败");
                }
            }
        }
    }

    /**
     * 批量插入数据
     * mutations 插入的值 Put p = new Put(Bytes.toBytes("row")); list.add(p);
     * @param tableName
     *
     * @throws IOException
     */
    public void batchInsertRecord(String tableName, List<Mutation> mutations) throws IOException {

        Assert.notNull(tableName, "tableName must not be null");

        StopWatch sw = new StopWatch();
        sw.start();
        BufferedMutator mutator = null;

        try {
            BufferedMutatorParams mutatorParams = new BufferedMutatorParams(TableName.valueOf(tableName));
            mutator = connection.getBufferedMutator(mutatorParams.writeBufferSize(3 * 1024 * 1024));
            mutator.mutate(mutations); //
        } catch (Exception e){
            sw.stop();
            throw new HbaseSystemException(e);
        }finally {
            if (null != mutator) {
                try {
                    mutator.flush();
                    mutator.close();
                    sw.stop();
                } catch (IOException e) {
                    LOGGER.error("hbase mutator资源释放失败");
                }
            }

        }


    }
    
    /**
      * 删除一行记录
      *
      * @param tablename         表名
      * @param rowkey            行名
      */
    public void deleteRow(String tablename, String rowkey) throws IOException {
        Assert.notNull(tablename, "tableName must not be null");
        Assert.notNull(rowkey, "rowkey must not be null");

        TableName name = TableName.valueOf(tablename);
        StopWatch sw = new StopWatch();
        sw.start();
        Table table = null;
        try {
            table = connection.getTable(name);
            Delete d = new Delete(rowkey.getBytes());
            table.delete(d);
        } catch (Exception e) {
            throw new HbaseSystemException(e);
        } finally {
            if (null != table) {
                try {
                    table.close();
                    sw.stop();
                } catch (IOException e) {
                    LOGGER.error("hbase资源释放失败");
                }
            }
        }


    }

    /**
     * 查找一行记录
     *
     * @param tablename         表名
     * @param rowKey            行名
     */
    public String selectRow(String tablename, String rowKey) throws IOException {

        Assert.notNull(tablename, "tableName must not be null");
        Assert.notNull(rowKey, "rowkey must not be null");
        String record = "";
        TableName name=TableName.valueOf(tablename);
        StopWatch sw = new StopWatch();
        sw.start();
        Table table = null;
        try {
            table = connection.getTable(name);
            Get g = new Get(rowKey.getBytes());
            Result rs = table.get(g);
            NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> map = rs.getMap();
            for (Cell cell : rs.rawCells()) {
                StringBuffer stringBuffer = new StringBuffer().append(Bytes.toString(cell.getRow())).append("\t")
                        .append(Bytes.toString(cell.getFamily())).append("\t")
                        .append(Bytes.toString(cell.getQualifier())).append("\t")
                        .append(Bytes.toString(cell.getValue())).append("\n");
                String str = stringBuffer.toString();
                record += str;
            }
            return record;
        } catch (IOException e) {
            throw new HbaseSystemException(e);
        } finally {
            if (null != table) {
                try {
                    table.close();
                    sw.stop();
                } catch (Exception e) {
                    LOGGER.error("hbase资源释放失败");
                }
            }
        }

    }

    /**
     * 查找单行单列族单列记录
     *
     * @param tablename         表名
     * @param rowKey            行名
     * @param columnFamily      列族名
     * @param column            列名
     * @return
     */
    public  String selectValue(String tablename, String rowKey, String columnFamily, String column) throws IOException {
        TableName name=TableName.valueOf(tablename);
        Table table = null;
        StopWatch sw = new StopWatch();
        sw.start();
        try {
            table = connection.getTable(name);
            Get g = new Get(rowKey.getBytes());
            g.addColumn(Bytes.toBytes(columnFamily), Bytes.toBytes(column));
            Result rs = table.get(g);
            return Bytes.toString(rs.value());
        } catch (Exception e) {
            throw new HbaseSystemException(e);
        } finally {
            if (null != table) {
                try {
                    table.close();
                    sw.stop();
                } catch (Exception e) {
                    LOGGER.error("hbase资源释放失败");
                }
            }
        }
    }

    /**
     * 查找单行单列族单列记录 主要用于查询流式文件
     *
     * @param tablename         表名
     * @param rowKey            行名
     * @param columnFamily      列族名
     * @param column            列名
     * @return
     */
    public  byte[] selectValueAsBytes(String tablename, String rowKey, String columnFamily, String column) throws IOException {
        TableName name=TableName.valueOf(tablename);
        Table table = null;
        StopWatch sw = new StopWatch();
        sw.start();
        try {
            table = connection.getTable(name);
            Get g = new Get(rowKey.getBytes());
            g.addColumn(Bytes.toBytes(columnFamily), Bytes.toBytes(column));
            Result rs = table.get(g);
            return rs.value();
        } catch (Exception e) {
            throw new HbaseSystemException(e);
        } finally {
            if (null != table) {
                try {
                    table.close();
                    sw.stop();
                } catch (Exception e) {
                    LOGGER.error("hbase资源释放失败");
                }
            }
        }
    }

    /**
     * 查询表中所有行(Scan方式)
     *
     * @param tablename
     * @return
     */
    public String scanAllRecord(String tablename) throws IOException {
        String record = "";
        TableName name=TableName.valueOf(tablename);
        Table table = connection.getTable(name);
        Scan scan = new Scan();
        ResultScanner scanner = table.getScanner(scan);
        try {
            for(Result result : scanner){
                for (Cell cell : result.rawCells()) {
                    StringBuffer stringBuffer = new StringBuffer().append(Bytes.toString(cell.getRow())).append("\t")
                            .append(Bytes.toString(cell.getFamily())).append("\t")
                            .append(Bytes.toString(cell.getQualifier())).append("\t")
                            .append(Bytes.toString(cell.getValue())).append("\n");
                    String str = stringBuffer.toString();
                    record += str;
                }
            }
        } finally {
            if (scanner != null) {
                scanner.close();
            }
        }

        return record;
    }

    /**
     * 根据rowkey关键字查询报告记录
     *
     * @param tablename
     * @param rowKeyword
     * @return
     */
    public <T> List<T> scanReportDataByRowKeyword(String tablename, String rowKeyword) throws IOException {
        ArrayList<T> list = new ArrayList<>();

        Table table = connection.getTable(TableName.valueOf(tablename));
        Scan scan = new Scan();

        //添加行键过滤器,根据关键字匹配
        RowFilter rowFilter = new RowFilter(CompareFilter.CompareOp.EQUAL, new SubstringComparator(rowKeyword));
        scan.setFilter(rowFilter);

        ResultScanner scanner = table.getScanner(scan);
        try {
            for (Result result : scanner) {
                //TODO 此处根据业务来自定义实现
                list.add(null);
            }
        } finally {
            if (scanner != null) {
                scanner.close();
            }
        }

        return list;
    }

    /**
     * 根据rowkey关键字和时间戳范围查询报告记录
     *
     * @param tablename
     * @param rowKeyword
     * @return
     */
    public <T> List<T> scanReportDataByRowKeywordTimestamp(String tablename, String rowKeyword, Long minStamp, Long maxStamp) throws IOException {
        ArrayList<T> list = new ArrayList<>();

        Table table = connection.getTable(TableName.valueOf(tablename));
        Scan scan = new Scan();
        //添加scan的时间范围
        scan.setTimeRange(minStamp, maxStamp);

        RowFilter rowFilter = new RowFilter(CompareFilter.CompareOp.EQUAL, new SubstringComparator(rowKeyword));
        scan.setFilter(rowFilter);

        ResultScanner scanner = table.getScanner(scan);
        try {
            for (Result result : scanner) {
                //TODO 此处根据业务来自定义实现
                list.add(null);
            }
        } finally {
            if (scanner != null) {
                scanner.close();
            }
        }
        return list;
    }
}

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

独行客-编码爱好者

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值