HBase第四天

学习HBase第四天
1.创建表dashuju

package com.hbase;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;

import java.io.IOException;

//创建表dashuju
public class CreateUser {
    public static void main(String[] args) throws IOException {
        //创建hbase配置文件
        Configuration configuration = HBaseConfiguration.create();
		//连接zookeeper
        configuration.set("hbase.zookeeper.quorum", "hadoop1:2181,hadoop2:2181,hadoop3:2181");
        //创建hbase数据库连接对象
        //org.apache.hadoop.conf.Configuration
        Connection connection = ConnectionFactory.createConnection(configuration);

        //获取一个用户
        Admin admin = connection.getAdmin();

        //创建表的描述对象
        //创建表名对象
        TableName tableName = TableName.valueOf("dashuju");
        HTableDescriptor hTableDescriptor = new HTableDescriptor(tableName);
		//创建列族
        hTableDescriptor.addFamily(new HColumnDescriptor("f1"));
        hTableDescriptor.addFamily(new HColumnDescriptor("f2"));
        admin.createTable(hTableDescriptor);

        //关闭资源
        admin.close();
        connection.close();

    }

}

2、向表中添加数据

package com.hbase;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.util.Bytes;

import java.io.IOException;

//添加数据
public class PutData {
    public static void main(String[] args) throws IOException {
        //创建配置文件对象
        Configuration configuration = HBaseConfiguration.create();
        configuration.set("hbase.zookeeper.quorum","hadoop1:2181,hadoop2:2181,hadoop3:2181");
        //创建hbase数据库连接
        Connection connection = ConnectionFactory.createConnection(configuration);

        //创建表对象
        TableName tableName = TableName.valueOf("dashuju");
        Table table = connection.getTable(tableName);

        //添加数据
        //创建put对象
        Put put = new Put("rk001".getBytes());
        put.addColumn("f1".getBytes(),"name".getBytes(),"zhangsan".getBytes());
        put.addColumn("f1".getBytes(),"age".getBytes(), Bytes.toBytes(20));
        put.addColumn("f2".getBytes(),"address".getBytes(),"diyu".getBytes());
        put.addColumn("f2".getBytes(),"phone".getBytes(),"8888888888".getBytes());
        table.put(put);

        table.close();
        connection.close();
    }
}

3.获取单条数据

package com.hbase;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;

import java.io.IOException;

//获取单条数据
public class GetData {
    public static void main(String[] args) throws IOException {
       //创建hbase配置文件
        Configuration configuration = HBaseConfiguration.create();
        configuration.set("hbase.zookeeper.quorum","hadoop1:2181,hadoop2:2181,hadoop3:2181");
        //创建数据库连接对象
        Connection connection = ConnectionFactory.createConnection(configuration);

        //获取表名对象
        Table table = connection.getTable(TableName.valueOf("dashuju"));

        //创建get对象
        Get get = new Get("rk001".getBytes());
        Result result = table.get(get);

        //遍历
        Cell[] cells = result.rawCells();

        for (Cell cell : cells) {
            byte[] value = cell.getValue();
            //判断列名
            if ("age".equals(Bytes.toString(cell.getQualifier()))){
                System.out.println(Bytes.toInt(value));
            }
            else{
                System.out.println(Bytes.toString(value));
            }
        }
        table.close();
        connection.close();
    }
}

4.通过rowkey查询指定列族下面的指定列的值

package com.hbase;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;

import java.io.IOException;

//通过rowkey查询指定列族下面的指定列的值
public class GetFamilyData {
    public static void main(String[] args) throws IOException {
        //创建配置文件对象
        Configuration configuration = HBaseConfiguration.create();
        configuration.set("hbase.zookeeper.quorum","hadoop1:2181,hadoop2:2181,hadoop3:2181");
        //创建数据库连接对象
        Connection connection = ConnectionFactory.createConnection(configuration);

        //创建表对象
        Table table = connection.getTable(TableName.valueOf("dashuju"));

        //创建get对象
        Get get = new Get("rk001".getBytes());
        get.addColumn("f1".getBytes(),"name".getBytes());
        //开始查询数据
        Result result = table.get(get);

        //获取数据
        Cell[] cells = result.rawCells();

        //遍历
        for (Cell cell : cells) {
            System.out.println(Bytes.toString(cell.getValue()));
        }
        //关闭资源
        table.close();
        connection.close();
    }
}

5.批量添加数据

package com.hbase;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.util.Bytes;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

//批量添加数据
public class BatchData {
    public static void main(String[] args) throws IOException {
        //创建配置文件对象
        Configuration configuration = HBaseConfiguration.create();
        configuration.set("hbase.zookeeper.quorum","hadoop1:2181,hadoop2:2181,hadoop3:2181");
        //创建hbase数据库连接对
        Connection connection = ConnectionFactory.createConnection(configuration);

        //获取表对象
        Table table = connection.getTable(TableName.valueOf("dashuju"));

        //添加数据
        //创建put集合
        List<Put> list = new ArrayList<>();

        //创建put对象,并指定rowkey
        Put put = new Put("0002".getBytes());
        put.addColumn("f1".getBytes(),"id".getBytes(),Bytes.toBytes(1));
        put.addColumn("f1".getBytes(),"name".getBytes(),Bytes.toBytes("曹操"));
        put.addColumn("f1".getBytes(),"age".getBytes(),Bytes.toBytes(30));
        put.addColumn("f2".getBytes(),"sex".getBytes(),Bytes.toBytes("1"));
        put.addColumn("f2".getBytes(),"address".getBytes(),Bytes.toBytes("沛国谯县"));
        put.addColumn("f2".getBytes(),"phone".getBytes(),Bytes.toBytes("16888888888"));
        put.addColumn("f2".getBytes(),"say".getBytes(),Bytes.toBytes("helloworld"));

        Put put2 = new Put("0003".getBytes());
        put2.addColumn("f1".getBytes(),"id".getBytes(),Bytes.toBytes(2));
        put2.addColumn("f1".getBytes(),"name".getBytes(),Bytes.toBytes("刘备"));
        put2.addColumn("f1".getBytes(),"age".getBytes(),Bytes.toBytes(32));
        put2.addColumn("f2".getBytes(),"sex".getBytes(),Bytes.toBytes("1"));
        put2.addColumn("f2".getBytes(),"address".getBytes(),Bytes.toBytes("幽州涿郡涿县"));
        put2.addColumn("f2".getBytes(),"phone".getBytes(),Bytes.toBytes("17888888888"));
        put2.addColumn("f2".getBytes(),"say".getBytes(),Bytes.toBytes("talk is cheap , show me the code"));


        Put put3 = new Put("0004".getBytes());
        put3.addColumn("f1".getBytes(),"id".getBytes(),Bytes.toBytes(3));
        put3.addColumn("f1".getBytes(),"name".getBytes(),Bytes.toBytes("孙权"));
        put3.addColumn("f1".getBytes(),"age".getBytes(),Bytes.toBytes(35));
        put3.addColumn("f2".getBytes(),"sex".getBytes(),Bytes.toBytes("1"));
        put3.addColumn("f2".getBytes(),"address".getBytes(),Bytes.toBytes("下邳"));
        put3.addColumn("f2".getBytes(),"phone".getBytes(),Bytes.toBytes("12888888888"));
        put3.addColumn("f2".getBytes(),"say".getBytes(),Bytes.toBytes("what are you 弄啥嘞!"));

        Put put4 = new Put("0005".getBytes());
        put4.addColumn("f1".getBytes(),"id".getBytes(),Bytes.toBytes(4));
        put4.addColumn("f1".getBytes(),"name".getBytes(),Bytes.toBytes("诸葛亮"));
        put4.addColumn("f1".getBytes(),"age".getBytes(),Bytes.toBytes(28));
        put4.addColumn("f2".getBytes(),"sex".getBytes(),Bytes.toBytes("1"));
        put4.addColumn("f2".getBytes(),"address".getBytes(),Bytes.toBytes("四川隆中"));
        put4.addColumn("f2".getBytes(),"phone".getBytes(),Bytes.toBytes("14888888888"));
        put4.addColumn("f2".getBytes(),"say".getBytes(),Bytes.toBytes("出师表你背了嘛"));

        Put put5 = new Put("0005".getBytes());
        put5.addColumn("f1".getBytes(),"id".getBytes(),Bytes.toBytes(5));
        put5.addColumn("f1".getBytes(),"name".getBytes(),Bytes.toBytes("司马懿"));
        put5.addColumn("f1".getBytes(),"age".getBytes(),Bytes.toBytes(27));
        put5.addColumn("f2".getBytes(),"sex".getBytes(),Bytes.toBytes("1"));
        put5.addColumn("f2".getBytes(),"address".getBytes(),Bytes.toBytes("哪里人有待考究"));
        put5.addColumn("f2".getBytes(),"phone".getBytes(),Bytes.toBytes("15888888888"));
        put5.addColumn("f2".getBytes(),"say".getBytes(),Bytes.toBytes("跟诸葛亮死掐"));


        Put put6 = new Put("0006".getBytes());
        put6.addColumn("f1".getBytes(),"id".getBytes(), Bytes.toBytes(5));
        put6.addColumn("f1".getBytes(),"name".getBytes(),Bytes.toBytes("xiaobubu—吕布"));
        put6.addColumn("f1".getBytes(),"age".getBytes(),Bytes.toBytes(28));
        put6.addColumn("f2".getBytes(),"sex".getBytes(),Bytes.toBytes("1"));
        put6.addColumn("f2".getBytes(),"address".getBytes(),Bytes.toBytes("内蒙人"));
        put6.addColumn("f2".getBytes(),"phone".getBytes(),Bytes.toBytes("15788888888"));
        put6.addColumn("f2".getBytes(),"say".getBytes(),Bytes.toBytes("貂蝉去哪了"));

        list.add(put);
        list.add(put2);
        list.add(put3);
        list.add(put4);
        list.add(put5);
        list.add(put6);

        table.put(list);

        table.close();
        connection.close();

    }
}

6.通过scan进行全表扫描

package com.hbase;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;

import java.io.IOException;

//通过scan进行全表扫描
public class ScanDataa {
    public static void main(String[] args) throws IOException {
        //创建配置文件对象
        Configuration configuration = HBaseConfiguration.create();
        configuration.set("hbase.zookeeper.quorum","hadoop1:2181,hadoop2:2181,hadoop3:2181");
        //创建hbase数据库连接对
        Connection connection = ConnectionFactory.createConnection(configuration);

        //创建表对象
        Table table = connection.getTable(TableName.valueOf("dashuju"));

        //创建扫描器对象
        Scan scan = new Scan();
        ResultScanner scanner = table.getScanner(scan);

        //获取到每一行数据
        for (Result result : scanner) {
            //获取单元格数据
            Cell[] cells = result.rawCells();
            for (Cell cell : cells) {
                //获取列族
                System.out.println(Bytes.toString(cell.getFamilyArray(),
                        cell.getFamilyOffset(),cell.getFamilyLength()));
                //获取列名
                System.out.println(Bytes.toString(cell.getQualifierArray(),
                        cell.getQualifierOffset(),cell.getQualifierLength()));
                //获取列值
                System.out.println(Bytes.toString(cell.getValueArray(),
                        cell.getValueOffset(),cell.getValueLength()));
            }
        }
        table.close();
        connection.close();
    }
}

hbase 过滤器的比较运算符

  • LESS <
  • LESS_OR_EQUAL <=
  • EQUAL =
  • NOT_EQUAL <>(不等于)
  • GREATAR_OR_EQUAL >=
  • GREATER >
  • NO_OP 排除所有

Hbase过滤器的比较器(指定比较机制)

  • BinaryComparator 按字节索引顺序比较指定字节数组,采用Bytes.compareTo(byte[])
  • BinaryPrefixComparator 跟前面相同,只是比较左端的数据是否相同
  • NullCompaeator 判断给定的是否为空
  • RegexStringComparator 提供一个正则的比较器,仅支持EQUAL和非EQUAL
  • SubstringComparator 判断提供的子串是否出现在value中

7.rowkey的范围查询

package com.hbase;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.util.Bytes;

import java.io.IOException;

//rowkey的范围查询
public class ScanStartRowData {
    public static void main(String[] args) throws IOException {
        //创建hbase配置文件
        Configuration configuration = HBaseConfiguration.create();
        configuration.set("hbase.zookeeper.quorum","hadoop1:2181,hadoop2:2181,hadoop3:2181");
        //创建hbase数据库连接对
        Connection connection = ConnectionFactory.createConnection(configuration);

        //创建表对象
        Table table = connection.getTable(TableName.valueOf("dashuju"));

        //创建扫描器对象
        Scan scan = new Scan();
        scan.setStartRow("0002".getBytes());
        scan.setStopRow("0005".getBytes());
        ResultScanner scanner = table.getScanner(scan);

        for (Result result : scanner) {
            System.out.println(Bytes.toString(result.getRow()));
            //单元格数组
            Cell[] cells = result.rawCells();
            //遍历
            for (Cell cell : cells) {
                System.out.println(Bytes.toString(cell.getValueArray(),
                        cell.getValueOffset(),cell.getValueLength()));
            }
        }

        table.close();
        connection.close();


    }
}

8.通过RowFilter过滤比rowKey 0003小的所有值出来

package com.hbase;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.filter.RowFilter;
import org.apache.hadoop.hbase.util.Bytes;

import java.io.IOException;

//通过RowFilter过滤比rowKey  0003小的所有值出来
public class RowFilterData {
    public static void main(String[] args) throws IOException {
        //创建hbase配置文件
        Configuration configuration = HBaseConfiguration.create();
        configuration.set("hbase.zookeeper.quorum","hadoop1:2181,hadoop2:2181,hadoop3:2181");
        //创建hbase数据库连接对象
        Connection connection = ConnectionFactory.createConnection(configuration);

        //创建表对象
        Table table = connection.getTable(TableName.valueOf("dashuju"));

        //创建扫描器对象
        Scan scan = new Scan();

        RowFilter filter = new RowFilter(CompareFilter.CompareOp.LESS_OR_EQUAL,
                new BinaryComparator("0003".getBytes()));

        //添加过滤器
        scan.setFilter(filter);

        //开始查询数据
        ResultScanner scanner = table.getScanner(scan);
        for (Result result : scanner) {
            System.out.println(Bytes.toString(result.getRow()));
            //单元格数组
            Cell[] cells = result.rawCells();
            //遍历
            for (Cell cell : cells) {
                System.out.println(Bytes.toString(cell.getValueArray(),
                        cell.getValueOffset(),cell.getValueLength()));
            }
        }
		//关闭资源
        table.close();
        connection.close();
    }
}

9.列族过滤器FamilyFilter

package com.hbase;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.filter.FamilyFilter;
import org.apache.hadoop.hbase.util.Bytes;

import java.io.IOException;

public class FamilyFilterData {
    public static void main(String[] args) throws IOException {
        //列族过滤器
        //创建hbase配置文件
        //查询比f2列族小的所有的列族内的数据
        Configuration configuration = HBaseConfiguration.create();
        configuration.set("hbase.zookeeper.quorum","hadoop1:2181,hadoop2:2181,hadoop3:2181");
        //创建hbase数据库连接对象
        Connection connection = ConnectionFactory.createConnection(configuration);

        //创建表对象
        Table table = connection.getTable(TableName.valueOf("dashuju"));

        //创建扫描器对象
        Scan scan = new Scan();

        //创建过滤器
        FamilyFilter filter = new FamilyFilter(CompareFilter.CompareOp.LESS,
                new BinaryComparator("f2".getBytes()));

        //添加过滤器
        scan.setFilter(filter);

        //开始扫描
        ResultScanner scanner = table.getScanner(scan);
        for (Result result : scanner) {
            System.out.println(Bytes.toString(result.getRow()));
            //单元格数组
            Cell[] cells = result.rawCells();
            //遍历
            for (Cell cell : cells) {
                //获取列族
                System.out.println(Bytes.toString(cell.getFamilyArray(),
                        cell.getFamilyOffset(),cell.getFamilyLength()));
                //获取列值
                System.out.println(Bytes.toString(cell.getValueArray(),
                        cell.getValueOffset(),cell.getValueLength()));
            }
        }
        //关闭资源
        table.close();
        connection.close();
    }
}

10.列过滤器QualifierFilter

package com.hbase;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.filter.QualifierFilter;
import org.apache.hadoop.hbase.util.Bytes;

import java.io.IOException;
//只查询name列的值
//列名过滤器
public class QualifierFilterData {
    public static void main(String[] args) throws IOException {
        //创建连接数据库对象
        Configuration configuration = HBaseConfiguration.create();
        configuration.set("hbase.zookeeper.quorum","hadoop1:2181,hadoop2:2181,hadoop3:2181");
        Connection connection = ConnectionFactory.createConnection(configuration);

        //获取表对象
        Table table = connection.getTable(TableName.valueOf("dashuju"));

        //获取扫描对象
        Scan scan = new Scan();
        //创建扫描器
        QualifierFilter filter = new QualifierFilter(CompareFilter.CompareOp.EQUAL,
                new BinaryComparator("name".getBytes()));

        //添加扫描器
        scan.setFilter(filter);

        //开始扫描 获取结果
        ResultScanner scanner = table.getScanner(scan);

        //解析结果
        for (Result result : scanner) {
            System.out.println(Bytes.toString(result.getRow()));
            //获取cell
            Cell[] cells = result.rawCells();
            for (Cell cell : cells) {
                System.out.println(Bytes.toString(cell.getValueArray(),
                        cell.getValueOffset(),cell.getValueLength()));
            }
        }

        table.close();
        connection.close();
    }
}

11.列值过滤器ValueFilter

package com.hbase;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.filter.SubstringComparator;
import org.apache.hadoop.hbase.filter.ValueFilter;
import org.apache.hadoop.hbase.util.Bytes;

import java.io.IOException;

public class ValueFilterData {
    //查询包含8的数据
    public static void main(String[] args) throws IOException {
        //创建连接数据库对象
        Configuration configuration = HBaseConfiguration.create();
        configuration.set("hbase.zookeeper.quorum","hadoop1:2181,hadoop2:2181,hadoop3:2181");
        Connection connection = ConnectionFactory.createConnection(configuration);

        //获取表对象
        Table table = connection.getTable(TableName.valueOf("dashuju"));

        //获取扫描对象
        Scan scan = new Scan();
        //创建过滤器
        ValueFilter valueFilter = new ValueFilter(CompareFilter.CompareOp.EQUAL, new SubstringComparator("8"));
        //添加过滤器
        scan.setFilter(valueFilter);
        //开始查询 返回结果
        ResultScanner scanner = table.getScanner(scan);
        //解析结果
        for (Result result : scanner) {
            System.out.println(Bytes.toString(result.getRow()));

            Cell[] cells = result.rawCells();
            for (Cell cell : cells) {
                System.out.println(Bytes.toString(cell.getValueArray(),
                        cell.getValueOffset(),cell.getValueLength()));
            }
        }

        table.close();
        connection.close();
    }
}

12.单列值过滤器 SingleColumnValueFilter

package com.hbase;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
import org.apache.hadoop.hbase.util.Bytes;

import java.io.IOException;

//单列排除过滤器
public class SingleColumnValueFilterData {
    public static void main(String[] args) throws IOException {
        //创建连接数据库对象
        Configuration configuration = HBaseConfiguration.create();
        configuration.set("hbase.zookeeper.quorum","hadoop1:2181,hadoop2:2181,hadoop3:2181");
        Connection connection = ConnectionFactory.createConnection(configuration);

        //获取表对象
        Table table = connection.getTable(TableName.valueOf("dashuju"));

        //获取扫描对象
        Scan scan = new Scan();

        //创建过滤器
        //final byte [] family, final byte [] qualifier,
        //      final CompareOp compareOp, final byte[] value
        SingleColumnValueFilter filter = new SingleColumnValueFilter("f1".getBytes(),
                "name".getBytes(), CompareFilter.CompareOp.EQUAL, "刘备".getBytes());
        //添加过滤器
        scan.setFilter(filter);
        //执行查询
        ResultScanner scanner = table.getScanner(scan);
        //解析结果
        for (Result result : scanner) {
            System.out.println(Bytes.toString(result.getRow()));
            //获取cell
            Cell[] cells = result.rawCells();
            for (Cell cell : cells) {
                System.out.println(Bytes.toString(cell.getValueArray(),
                        cell.getValueOffset(), cell.getValueLength()));
            }
        }
        table.close();
        connection.close();

    }
}

13.列值排除过滤器SingleColumnValueExcludeFilter

  • 与SingleColumnValueFilter相反,会排除掉指定的列,其他的列全部返回
package com.hbase;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.filter.SingleColumnValueExcludeFilter;
import org.apache.hadoop.hbase.util.Bytes;

import java.io.IOException;

//列值排除过滤器
public class SingleColumnValueExcludeFilterData {
    public static void main(String[] args) throws IOException {
        //创建连接数据库对象
        Configuration configuration = HBaseConfiguration.create();
        configuration.set("hbase.zookeeper.quorum","hadoop1:2181,hadoop2:2181,hadoop3:2181");
        Connection connection = ConnectionFactory.createConnection(configuration);

        //获取表对象
        Table table = connection.getTable(TableName.valueOf("dashuju"));

        //获取扫描对象
        Scan scan = new Scan();

        //创建过滤器
        SingleColumnValueExcludeFilter filter = new SingleColumnValueExcludeFilter("f1".getBytes(),
                "name".getBytes(), CompareFilter.CompareOp.EQUAL,"刘备".getBytes());

        //添加过滤器
        scan.setFilter(filter);

        //执行扫描
        ResultScanner scanner = table.getScanner(scan);


        //解析结果
        for (Result result : scanner) {
            System.out.println(Bytes.toString(result.getRow()));
            //获取cell
            Cell[] cells = result.rawCells();
            for (Cell cell : cells) {
                System.out.println(Bytes.toString(cell.getValueArray(),
                        cell.getValueOffset(),cell.getValueLength()));
            }
        }

        table.close();
        connection.close();
    }
}

14.rowkey前缀过滤器PrefixFilter

package com.hbase;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.filter.PrefixFilter;
import org.apache.hadoop.hbase.util.Bytes;

import java.io.IOException;

//前缀过滤器
//查询以00开头的所有前缀的rowkey
public class PrefixFilterData {
    public static void main(String[] args) throws IOException {
        //创建连接数据库对象
        Configuration configuration = HBaseConfiguration.create();
        configuration.set("hbase.zookeeper.quorum","hadoop1:2181,hadoop2:2181,hadoop3:2181");
        Connection connection = ConnectionFactory.createConnection(configuration);

        //获取表对象
        Table table = connection.getTable(TableName.valueOf("dashuju"));

        //获取扫描对象
        Scan scan = new Scan();

        //创建过滤器
        PrefixFilter filter = new PrefixFilter("rk".getBytes());

        //添加过滤器
        scan.setFilter(filter);

        //执行查询返回结果
        ResultScanner scanner = table.getScanner(scan);
        //解析结果
        for (Result result : scanner) {
            System.out.println(Bytes.toString(result.getRow()));
            //获取cell
            Cell[] cells = result.rawCells();
            for (Cell cell : cells) {
                System.out.println(Bytes.toString(cell.getValueArray(),
                        cell.getValueOffset(),cell.getValueLength()));
            }
        }

        table.close();
        connection.close();
    }
}

15.分页过滤器PageFilter

package com.hbase;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.filter.PageFilter;
import org.apache.hadoop.hbase.util.Bytes;

import java.io.IOException;

public class PageFilterData {
    public static void main(String[] args) throws IOException {
        //创建连接数据库对象
        Configuration configuration = HBaseConfiguration.create();
        configuration.set("hbase.zookeeper.quorum","hadoop1:2181,hadoop2:2181,hadoop3:2181");
        Connection connection = ConnectionFactory.createConnection(configuration);

        //获取表对象
        Table table = connection.getTable(TableName.valueOf("dashuju"));

        //获取扫描对象
        Scan scan = new Scan();

        //定义查询第几页
        int pagenum = 3;
        int pagesize = 2;

        if (pagenum == 1){
            String startrow = "";
            scan.setStartRow(startrow.getBytes());
            //创建过滤器
            PageFilter filter = new PageFilter(pagesize);
            //添加过滤器
            scan.setFilter(filter);
            //开始查询
            ResultScanner scanner = table.getScanner(scan);
            //解析结果
            for (Result result : scanner) {
                System.out.println(Bytes.toString(result.getRow()));

                Cell[] cells = result.rawCells();
                for (Cell cell : cells) {
                    System.out.println(Bytes.toString(cell.getValueArray(),
                            cell.getValueOffset(),cell.getValueLength()));
                }
            }
        }else{
            //当前页不是第一页
            String startrow = "";
            //创建过滤器
            PageFilter upfilter = new PageFilter((pagenum - 1) * pagesize + 1);
            //添加过滤器
            scan.setFilter(upfilter);
            //执行查询
            ResultScanner scanner = table.getScanner(scan);
            //解析结果
            for (Result result : scanner) {
                startrow = Bytes.toString(result.getRow());
            }

            //设置本页过滤器
            PageFilter pageFilter = new PageFilter(pagesize);
            //创建扫描器
            Scan scan1 = new Scan();
            //添加过滤器
            scan1.setFilter(pageFilter);
            scan1.setStartRow(startrow.getBytes());
            //执行查询
            ResultScanner scanner1 = table.getScanner(scan1);

            //解析结果
            for (Result result : scanner1) {
                System.out.println(Bytes.toString(result.getRow()));
            }
        }
    }
}

16.多过滤器综合查询FilterList

package com.hbase;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.filter.PrefixFilter;
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
import org.apache.hadoop.hbase.util.Bytes;

import java.io.IOException;

//fiterlist  多过滤器查询
public class FilterListData {
    //需求:使用SingleColumnValueFilter查询f1列族,
    // name为刘备的数据,并且同时满足rowkey的前缀以00开头的数据(PrefixFilter)
    public static void main(String[] args) throws IOException {
        //创建连接数据库对象
        Configuration configuration = HBaseConfiguration.create();
        configuration.set("hbase.zookeeper.quorum","hadoop1:2181,hadoop2:2181,hadoop3:2181");
        Connection connection = ConnectionFactory.createConnection(configuration);

        //获取表对象
        Table table = connection.getTable(TableName.valueOf("dashuju"));

        //获取扫描对象
        Scan scan = new Scan();

        //创建过滤器
        SingleColumnValueFilter filter = new SingleColumnValueFilter("f1".getBytes(),
                "name".getBytes(), CompareFilter.CompareOp.EQUAL, "刘备".getBytes());
        PrefixFilter prefixFilter = new PrefixFilter("00".getBytes());

        //创建过滤器集合
        FilterList filterList = new FilterList();
        filterList.addFilter(filter);
        filterList.addFilter(prefixFilter);

        //添加过滤器
        scan.setFilter(filterList);

        //开始扫描 获取结果
        ResultScanner scanner = table.getScanner(scan);

        //解析结果
        for (Result result : scanner) {
            System.out.println(Bytes.toString(result.getRow()));
            //获取cell
            Cell[] cells = result.rawCells();
            for (Cell cell : cells) {
                System.out.println(Bytes.toString(cell.getValueArray(),
                        cell.getValueOffset(),cell.getValueLength()));
            }
        }

        table.close();
        connection.close();


    }

}

17.根据rowkey删除数据

package com.hbase;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Table;

import java.io.IOException;

//删除表数据
public class DeleteData {
    public static void main(String[] args) throws IOException {
        //创建连接数据库对象
        Configuration configuration = HBaseConfiguration.create();
        configuration.set("hbase.zookeeper.quorum","hadoop1:2181,hadoop2:2181,hadoop3:2181");
        Connection connection = ConnectionFactory.createConnection(configuration);

        //获取表对象
        Table table = connection.getTable(TableName.valueOf("dashuju"));

        //创建delete对象
        Delete delete = new Delete("rk002".getBytes());
        table.delete(delete);

        table.close();
        connection.close();
    }
}

18.删除表操作

package com.hbase;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Table;

import java.io.IOException;

//删除表 staff
public class DeleteTable {
    public static void main(String[] args) throws IOException {
        //创建连接数据库对象
        Configuration configuration = HBaseConfiguration.create();
        configuration.set("hbase.zookeeper.quorum","hadoop1:2181,hadoop2:2181,hadoop3:2181");
        Connection connection = ConnectionFactory.createConnection(configuration);

        //获取admin
        Admin admin = connection.getAdmin();

        //下线表
        admin.disableTable(TableName.valueOf("staff"));
        //删除表
        admin.deleteTable(TableName.valueOf("staff"));

        //关留
        admin.close();
        connection.close();
    }
}

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值