HBase-客户端API一(第四天)

HBase-客户端API一(第四天)

一、客户端API
  • 创建 table
  • 修改 table
  • 插入数据 put ->new ArrayList()
  • 单行查询 get
  • 批量查询 scan
  • 删除列 delete ->delColumn.addColumn()
  • 删除表 disabletable deletetable
package hbaseapi;

import javafx.scene.AmbientLight;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Before;
import org.junit.Test;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;

/**
 * @Author lgr
 * @Date 2020-03-25 12:19
 */
public class HBaseAPIDemo {
    static Configuration conf = null;
    static Connection conn = null;

    /**
     * 初始化conf配置实例对象和conn连接实例对象
     * @throws IOException
     */
    @Before
    public void init() throws IOException {
        conf = HBaseConfiguration.create();
        conf.set("hbase.zookeeper.quorum","node1:2181,node2:2181,node3:2181");
        conn = ConnectionFactory.createConnection(conf);
    }

    /**
     * 创建表
     * @throws IOException
     */
    @Test
    public void createTable() throws IOException {
        Admin admin = conn.getAdmin();

        HTableDescriptor tableDescriptor = new HTableDescriptor(TableName.valueOf("t_user_info"));
        HColumnDescriptor base_info = new HColumnDescriptor("base_info");
        HColumnDescriptor extra_info = new HColumnDescriptor("extra_info");

        base_info.setVersions(1,3);

        tableDescriptor.addFamily(base_info);
        tableDescriptor.addFamily(extra_info);

        admin.createTable(tableDescriptor);

        admin.close();
        conn.close();
    }
    /**
     * 修改表
     * @throws IOException
     */
    @Test
    public void modifyTable() throws IOException {
        Admin admin = conn.getAdmin();

        HTableDescriptor t_user_info = admin.getTableDescriptor(TableName.valueOf("t_user_info"));
        HColumnDescriptor extra_info = t_user_info.getFamily("extra_info".getBytes());
        extra_info.setVersions(2,5);

        t_user_info.addFamily(new HColumnDescriptor("other_info"));

        admin.modifyTable(TableName.valueOf("t_user_info"),t_user_info);

        admin.close();
        conn.close();
    }
    /**
     * 添加数据
     */
    @Test
    public void Put() throws IOException {
        Table t_user_info = conn.getTable(TableName.valueOf("t_user_info"));
        ArrayList<Put> puts = new ArrayList<Put>();
        Put put01 = new Put("user001".getBytes());
        put01.addColumn("base_info".getBytes(),"username".getBytes(),"zhangsan".getBytes());
        Put put02 = new Put("user001".getBytes());
        put02.addColumn(Bytes.toBytes("base_info"), Bytes.toBytes("password"), Bytes.toBytes("123456"));

        Put put03 = new Put("user002".getBytes());
        put03.addColumn(Bytes.toBytes("base_info"), Bytes.toBytes("username"), Bytes.toBytes("lisi"));
        put03.addColumn(Bytes.toBytes("extra_info"), Bytes.toBytes("married"), Bytes.toBytes("false"));

        Put put04 = new Put("zhang_sh_01".getBytes());
        put04.addColumn(Bytes.toBytes("base_info"), Bytes.toBytes("username"), Bytes.toBytes("zhang01"));
        put04.addColumn(Bytes.toBytes("extra_info"), Bytes.toBytes("married"), Bytes.toBytes("false"));

        Put put05 = new Put("zhang_sh_02".getBytes());
        put05.addColumn(Bytes.toBytes("base_info"), Bytes.toBytes("username"), Bytes.toBytes("zhang02"));
        put05.addColumn(Bytes.toBytes("extra_info"), Bytes.toBytes("married"), Bytes.toBytes("false"));

        Put put06 = new Put("liu_sh_01".getBytes());
        put06.addColumn(Bytes.toBytes("base_info"), Bytes.toBytes("username"), Bytes.toBytes("liu01"));
        put06.addColumn(Bytes.toBytes("extra_info"), Bytes.toBytes("married"), Bytes.toBytes("false"));

        Put put07 = new Put("zhang_bj_01".getBytes());
        put07.addColumn(Bytes.toBytes("base_info"), Bytes.toBytes("username"), Bytes.toBytes("zhang03"));
        put07.addColumn(Bytes.toBytes("extra_info"), Bytes.toBytes("married"), Bytes.toBytes("false"));

        Put put08 = new Put("zhang_bj_01".getBytes());
        put08.addColumn(Bytes.toBytes("base_info"), Bytes.toBytes("username"), Bytes.toBytes("zhang04"));
        put08.addColumn(Bytes.toBytes("extra_info"), Bytes.toBytes("married"), Bytes.toBytes("false"));
        put08.setDurability(Durability.SYNC_WAL);
        //把所有的put对象添加到一个集合中
        puts.add(put01);
        puts.add(put02);
        puts.add(put03);
        puts.add(put04);
        puts.add(put05);
        puts.add(put06);
        puts.add(put07);
        puts.add(put08);

        t_user_info.put(puts);

        t_user_info.close();
        conn.close();
    }
    /**
     * 查询单条数据
     */
    @Test
    public void get() throws IOException {
        Table t_user_info = conn.getTable(TableName.valueOf("t_user_info"));
        Get get = new Get("user001".getBytes());
        Result result = t_user_info.get(get);
        List<Cell> cells = result.listCells();
        for (Cell cell : cells) {
//            byte[] familyArray = cell.getFamilyArray();
//            byte[] qualifierArray = cell.getQualifierArray();
//            byte[] rowArray = cell.getRowArray();
//            byte[] valueArray = cell.getValueArray();
//            System.out.println(new String(familyArray) + " " + new String(qualifierArray) + " " + new String(rowArray) + " " + new String(valueArray));
            byte[] rowkey = CellUtil.cloneRow(cell);
            byte[] family = CellUtil.cloneFamily(cell);
            byte[] column = CellUtil.cloneQualifier(cell);
            byte[] value = CellUtil.cloneValue(cell);
            System.out.println(new String(rowkey) + " " + new String(family) + " " + new String(column) + " " + new String(value));
        }

        t_user_info.close();
        conn.close();
    }
    /**
     * 批量查询
     */
    @Test
    public void scan() throws IOException {
        Table t_user_info = conn.getTable(TableName.valueOf("t_user_info"));
        Scan scan = new Scan();

        ResultScanner scanner = t_user_info.getScanner(scan);
        Iterator<Result> it = scanner.iterator();
        while (it.hasNext()) {
            Result next = it.next();
            List<Cell> cells = next.listCells();
            for (Cell cell : cells) {
                byte[] rowkey = CellUtil.cloneRow(cell);
                byte[] family = CellUtil.cloneFamily(cell);
                byte[] column = CellUtil.cloneQualifier(cell);
                byte[] value = CellUtil.cloneValue(cell);
                System.out.println(new String(rowkey) + " " + new String(family) + " " + new String(column) + " " + new String(value));
            }
            System.out.println("---------------------------------");
        }
        t_user_info.close();
        conn.close();
    }
    /**
     * 删除列
     */
    @Test
    public void deleteColumn() throws IOException {
        Table t_user_info = conn.getTable(TableName.valueOf("t_user_info"));
        Delete delColumn = new Delete("user001".getBytes());
        delColumn.addColumn("base_info".getBytes(),"password".getBytes());
        t_user_info.delete(delColumn);

        t_user_info.close();
        conn.close();
    }
    /**
     * 删表
     */
    @Test
    public void deleteTable() throws IOException {
        Admin admin = conn.getAdmin();
        admin.disableTable(TableName.valueOf("t_user_info"));
        admin.deleteTable(TableName.valueOf("t_user_info"));

        admin.close();
        conn.close();
    }
}

过滤器查询

过滤器工具类

package filter;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.filter.Filter;

import java.io.IOException;
import java.util.Iterator;

/**
 * @Author lgr
 * @Date 2020-03-25 14:22
 */
public class TestFilter {
    private Connection conn = null;
    private TableName tableName = null;
    private Filter filter =null;

    public TestFilter(Connection conn, TableName tableName, Filter filter) {
        this.conn = conn;
        this.tableName = tableName;
        this.filter = filter;
    }
    public void getValues() throws IOException {
        Table t_user_info = conn.getTable(TableName.valueOf(tableName.getName()));
        Scan scan = new Scan();
        scan.setFilter(filter);

        ResultScanner scanner = t_user_info.getScanner(scan);
        Iterator<Result> iterator = scanner.iterator();
        while (iterator.hasNext()) {
            Result next = iterator.next();
            for (Cell cell : next.listCells()) {
                byte[] rowkey = CellUtil.cloneRow(cell);
                byte[] family = CellUtil.cloneFamily(cell);
                byte[] column = CellUtil.cloneQualifier(cell);
                byte[] value = CellUtil.cloneValue(cell);
                System.out.println(new String(rowkey) + " | " + new String(family) + " | "
                        + new String(column) + " | " + new String(value));
            }
            System.out.println("---------------------------------");
        }
    }
}

比较机制 BinaryComparator,BinaryPrefixComparator,NullComparator,BitComparator,RegexStringComparator,SubstringComparator
专用过滤器
PrefixFilter 行键前缀过滤器

package filter;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.filter.PrefixFilter;

import java.io.IOException;

/**
 * @Author lgr
 * @Date 2020-03-25 14:01
 */
public class PrefixFT {
    public static void main(String[] args) throws IOException {
        Configuration conf = HBaseConfiguration.create();
        conf.set("hbase.zookeeper.quorum","node1:2181,node2:2181,node3:2181");
        Connection conn = ConnectionFactory.createConnection(conf);

        PrefixFilter prefixFilter = new PrefixFilter("liu".getBytes());
        TestFilter testFilter = new TestFilter(conn,TableName.valueOf("t_user_info"),prefixFilter);
        testFilter.getValues();
    }
}

RowFilter 行过滤器

package filter;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.filter.RowFilter;

import java.io.IOException;

/**
 * @Author lgr
 * @Date 2020-03-25 14:19
 */
public class RowFT {
    public static void main(String[] args) throws IOException {
        Configuration conf = HBaseConfiguration.create();
        conf.set("hbase.zookeeper.quorum","node1:2181,node2:2181,node3:2181");
        Connection conn = ConnectionFactory.createConnection(conf);
        RowFilter rowFilter = new RowFilter(CompareFilter.CompareOp.LESS, new BinaryComparator("user002".getBytes()));
        TestFilter testFilter = new TestFilter(conn, TableName.valueOf("t_user_info"),rowFilter);
        testFilter.getValues();
    }
}

FamilyFilter 列族过滤器

package filter;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.filter.FamilyFilter;

import java.io.IOException;

/**
 * @Author lgr
 * @Date 2020-03-25 14:55
 */
public class ColumnFamilyFT {
    public static void main(String[] args) throws IOException {
        Configuration conf = HBaseConfiguration.create();
        conf.set("hbase.zookeeper.quorum","node1:2181,node2:2181,node3:2181");
        Connection conn = ConnectionFactory.createConnection(conf);

        FamilyFilter familyFilter = new FamilyFilter(CompareFilter.CompareOp.EQUAL, new BinaryComparator("base_info".getBytes()));
        TestFilter testFilter = new TestFilter(conn, TableName.valueOf("t_user_info".getBytes()),familyFilter);
        testFilter.getValues();
    }
}

QualifierFilter 列名过滤器

package filter;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.BinaryPrefixComparator;
import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.filter.QualifierFilter;

import java.io.IOException;

/**
 * @Author lgr
 * @Date 2020-03-25 15:08
 */
public class ColumnFT {
    public static void main(String[] args) throws IOException {
        Configuration conf = HBaseConfiguration.create();
        conf.set("hbase.zookeeper.quorum","node1:2181,node2:2181,node3:2181");
        Connection conn = ConnectionFactory.createConnection(conf);

        QualifierFilter qualifierFilter = new QualifierFilter(CompareFilter.CompareOp.EQUAL, new BinaryPrefixComparator("user".getBytes()));
        TestFilter testFilter = new TestFilter(conn, TableName.valueOf("t_user_info".getBytes()), qualifierFilter);
        testFilter.getValues();
    }
}

SingleColumnValueFilter 列值过滤器

package filter;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.filter.RegexStringComparator;
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
import org.apache.hadoop.hbase.filter.SubstringComparator;

import java.io.IOException;

/**
 * @Author lgr
 * @Date 2020-03-25 15:16
 */
public class SingleValueFT {
    public static void main(String[] args) throws IOException {
        Configuration conf = HBaseConfiguration.create();
        conf.set("hbase.zookeeper.quorum","node1:2181,node2:2181,node3:2181");
        Connection conn = ConnectionFactory.createConnection(conf);

        RegexStringComparator comparator1 = new RegexStringComparator("^zhang");
        SubstringComparator comparator2 = new SubstringComparator("si");

        SingleColumnValueFilter singleColumnValueFilter = new SingleColumnValueFilter("base_info".getBytes(), "username".getBytes(), CompareFilter.CompareOp.EQUAL, comparator1);
        TestFilter testFilter = new TestFilter(conn, TableName.valueOf("t_user_info"), singleColumnValueFilter);
        testFilter.getValues();
    }
}

多个过滤器同时使用

package filter;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.filter.*;

import java.io.IOException;

/**
 * @Author lgr
 * @Date 2020-03-25 15:22
 */
public class MultiFT {
    public static void main(String[] args) throws IOException {
        Configuration conf = HBaseConfiguration.create();
        conf.set("hbase.zookeeper.quorum","node1:2181,node2:2181,node3:2181");
        Connection conn = ConnectionFactory.createConnection(conf);

        FamilyFilter familyFilter = new FamilyFilter(CompareFilter.CompareOp.EQUAL, new BinaryPrefixComparator("base".getBytes()));
        ColumnPrefixFilter columnPrefixFilter = new ColumnPrefixFilter("pass".getBytes());

        FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ONE);
        filterList.addFilter(familyFilter);
        filterList.addFilter(columnPrefixFilter);

        TestFilter testFilter = new TestFilter(conn, TableName.valueOf("t_user_info".getBytes()), filterList);
        testFilter.getValues();
    }
}

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值