[大数据]连载No19 Hbase Shell和API的增删改查+与MapperReducer读写操作

本次总结如下
1、Hbase Shell的常用命令
2、Java APi 对hbase的增删改查
3、Mapper Reducer从hbase读写数数据,计算单词数量,并写回hbase

登录hbase Shell

[root@master ~]#  /home/softs/hbase-0.98.12.1-hadoop2/bin/hbase shell

1、表操作

创建表user    create 'test', 'cf'     # test表明  cf列族
查询表user    scan  'test'

2、增删改查操作
插入数据  put 'test', 'row1', 'cf:username', 'value1'    # row行唯一标识符   username列名
查询数据  list 'test'
id查询    get 'test', 'row1'
删除表    disable 'test' 然后 drop 'test'    #删除前要先禁用掉


hbase(main):001:0> scan 'user'
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/home/softs/hbase-0.98.12.1-hadoop2/lib/slf4j-log4j12-1.6.4.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/home/softs/hadoop-2.5.0/share/hadoop/common/lib/slf4j-log4j12-1.7.5.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
2018-06-29 04:03:54,274 WARN  [main] util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
ROW                                 COLUMN+CELL                                                                                            
 2                                  column=col1:count, timestamp=1530214622282, value=1                                                    
 userId1                            column=col1:age, timestamp=1530201359347, value=2                                                      
 userId1                            column=col1:name, timestamp=1530203162657, value=xiaohong                                              
 userId1                            column=col2:age, timestamp=1530203197562, value=33                                                     
 userId1                            column=col2:name, timestamp=1530201359347, value=\xE5\xB0\x8F\xE7\xBA\xA2                              
2 row(s) in 0.3550 seconds

hbase(main):002:0> 

Hbase WebUI查看集群信息



java操作hbase Api

public class HbaseCrub {

    HBaseAdmin hbase;
    HTable table;

    String user = "user";

    String col1="col1";

    String col2="col2";

    @Before
    public void before() throws  Exception{
        Configuration configuration=new Configuration();

        /**指定zookeeper集群,找到配置文件*/
        configuration.set("hbase.zookeeper.quorum","master,node1,node2");

        /**数据库连接**/
        hbase =new HBaseAdmin(configuration);


        table=new HTable(configuration,user.getBytes());

    }

    @After
    public void end() throws  Exception{

        if(hbase != null) {
            hbase.close();
        }
        if(table != null) {
            table.close();
        }
    }


    @Test
    public void createTable() throws  Exception{
        if(hbase.tableExists(user.getBytes())){

            /*禁用,删除**/
            hbase.disableTable(user.getBytes());
            hbase.deleteTable(user.getBytes());
        }

        HTableDescriptor descriptor =new HTableDescriptor(TableName.valueOf(user));

        /**要先指定列族*/
        HColumnDescriptor columnDescriptor=new HColumnDescriptor(col1.getBytes());
        /**内存缓存*/
        columnDescriptor.setInMemory(true);
        descriptor.addFamily(columnDescriptor);


        HColumnDescriptor columnDescriptor2=new HColumnDescriptor(col2.getBytes());

        /**使用数据使用内存先存放*/
        columnDescriptor2.setInMemory(false);
        descriptor.addFamily(columnDescriptor2);

        hbase.createTable(descriptor);
    }


    @Test
    public void insertUser() throws  Exception{
        /**指定rowkey*/
        String  rowKey ="userId1";

        Put put =new Put(rowKey.getBytes());
        put.add(col1.getBytes(),"name".getBytes(),"小石头".getBytes());
        put.add(col1.getBytes(),"age".getBytes(),"2".getBytes());

        put.add(col2.getBytes(),"name".getBytes(),"小红".getBytes());

        table.put(put);
    }

    @Test
    public void deleteUser() throws  Exception{

        /**指定rowkey*/
        Delete delete =new Delete("userId1".getBytes());

        delete.deleteColumn(col1.getBytes(),"name".getBytes());

        table.delete(delete);
    }


    @Test
    public void getByUserId() throws  Exception{

        /**指定rowkey*/
        Get get =new Get("userId1".getBytes());

        /**指定返回的列*/
        get.addColumn(col1.getBytes(),"age".getBytes());

        Result result= table.get(get);

        //单行记录
        Cell cell=result.getColumnLatestCell(col1.getBytes(),"age".getBytes());

        System.out.println(new String(CellUtil.cloneValue(cell)));

    }


    @Test
    public void listUsers() throws  Exception{
        /**
         * Scan 查询 返回多行数据
         * 尽量不要用全表扫描
         * 1、范围查找  起始rowkey  结束rowkey
         * 2、过滤器 filter  慎重!!
         * @throws Exception
         */

       Scan scan =new Scan();
       scan.setStartRow("userId0".getBytes());
        scan.setStopRow("userId3".getBytes());

        // 添加查询条件
        SingleColumnValueFilter filter1 = new SingleColumnValueFilter(
                col1.getBytes(), "age".getBytes(), CompareFilter.CompareOp.EQUAL, "2".getBytes());
        scan.setFilter(filter1);

        ResultScanner results= table.getScanner(scan);
        results.forEach(result -> {
            System.out.print(new String(result.getValue(col1.getBytes(),"name".getBytes()))+"\t");
            System.out.println(new String(result.getValue(col1.getBytes(),"age".getBytes())));
        });



    }

}


hbase与mapperReduce整合,读行数据,统计单词数量

job类

public static void main(String []args) throws  Exception{
    Configuration conf =new Configuration();

    /**本地运行*/
    conf.set("fs.defaultFS","hdfs://master:8020");
    conf.set("hbase.zookeeper.quorum", "master,node1,node2");


    Job job =Job.getInstance(conf);

    job.setJarByClass(WCJob.class);

    /**从hbase读取数据设置查询条件*/
    Scan scan =new Scan();

    TableMapReduceUtil.initTableMapperJob("user",scan,WCMapper.class, Text.class, IntWritable.class,job,false);


    /**
     * 最后一个参数指定为false,因为是本地运行,需要注意
     * */
    TableMapReduceUtil.initTableReducerJob("user",WCReducer.class,job,null,null,null,null,false);

    job.waitForCompletion(true);
}

Mapper类,统计单词数量,输出

public class WCMapper  extends TableMapper<Text,IntWritable>{

    /**
     * 每次读一行调用一次map方法
     * */
    @Override
    protected void map(ImmutableBytesWritable key, Result value, Context context) throws IOException, InterruptedException {

        String age =new String(value.getValue("col1".getBytes(),"age".getBytes()));


        context.write(new Text(age),new IntWritable(1));

    }
}
reducer类,计算结果,并写入到hbase
/**
 * Text,IntWritable 和mapper数据的数据类型一直
 * */
public class WCReducer extends TableReducer<Text,IntWritable,ImmutableBytesWritable> {

    @Override
    protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
        int num =0;

        for(IntWritable in : values){
            num ++;

        }

        /**以年级为rowkey,写入到hbase**/

        Put put =new Put(key.getBytes());
        put.add("col1".getBytes(),"count".getBytes(),(num+"").getBytes());

        context.write(null,put);

    }
}

结果查看,正确



  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 打赏
    打赏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

源14

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值