Hbase的API介绍
- 准备操作
- Hbase的DDL操作
- Hbase的DML操作:
- hbase的查询操作:查询hbase数据库中某一列族下的某一列的值,查询是要指定具体的rowKey
- HBase之六:HBase的RowKey设计
https://www.cnblogs.com/duanxz/p/4660784.html
hbaseprotobuf讲解
- 安装 Google Protocol Buffer
展示安装的组信息:
查看软件需要依赖的组信息
安装依赖的工具:
- 编辑phone.proto
- 利用protobuf工具生成对应的java文件
- 将多个列封装为对象,将整个对象插入到hbase数据表中
Hbase的优化
查看hbase的优化文档
Hbase与map/reducer的整合
package com.bjsxt.wc;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
public class WCRunner {
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
conf.set("fs.defaultFS", "hdfs://node02:8020");
conf.set("hbase.zookeeper.quorum", "node02,node03,node04");
Job job = Job.getInstance(conf);
job.setJarByClass(WCRunner.class);
//指定mapper和reducer
job.setMapperClass(WCMapper.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(IntWritable.class);
//最后一个参数设置为false
TableMapReduceUtil.initTableReducerJob("wc", WCReducer.class, job, null, null, null, null, false);
FileInputFormat.addInputPath(job, new Path("/user/hive/warehouse/wc/"));
job.waitForCompletion(true);
}
}
package com.bjsxt.wc;
import java.io.IOException;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
public class WCMapper extends Mapper<LongWritable, Text, Text, IntWritable>{
@Override
protected void map(LongWritable key, Text value, Context context)
throws IOException, InterruptedException {
String[] strs = value.toString().split(" ");
for (String string : strs) {
context.write(new Text(string), new IntWritable(1));
}
}
}
package com.bjsxt.wc;
import java.io.IOException;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
public class WCReducer extends TableReducer<Text, IntWritable, ImmutableBytesWritable>{
@Override
protected void reduce(Text text, Iterable<IntWritable> iterable,Context context)
throws IOException, InterruptedException {
int sum = 0;
for (IntWritable it : iterable) {
sum+=it.get();
}
Put put = new Put(text.toString().getBytes());
put.add("cf".getBytes(), "ct".getBytes(), (""+sum).getBytes());
context.write(null, put);
}
}