留作备份用,本分类下所有例子都是自己亲自测试过的。
package jyw.test;
import java.io.IOException;
import java.util.Iterator;
import java.util.StringTokenizer;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.mapreduce.TableOutputFormat;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
/*
* 简单map reduce算数
* */
public class WordCountHBase {
/* 实现 Map 类
* 输入类型 默认文件读取到的索引位置 long类型
* 输入值类型 字符串
* 输出 类型 字符串
* 输出值类型 数字
*/
public static class Map extends
Mapper<LongWritable, Text, Text, IntWritable> {
private final static IntWritable one = new IntWritable(1);
private Text word = new Text();
public void map(LongWritable key, Text value, Context context)
throws IOException, InterruptedException {
StringTokenizer itr = new StringTokenizer(value.toString());
while (itr.hasMoreTokens()) {
word.set(itr.nextToken());
context.write(word, one);
}
}
}
/* 实现 Reduce 类
* map 的输出类型
* map 的输出值类型
* tablereduce 输出类型是null,
* 输出值类型 put
*/
public static class Reduce extends
TableReducer<Text, IntWritable, NullWritable> {
public static Configuration configuration = null;
static
{
configuration = HBaseConfiguration.create();
//configuration.set("hbase.master", "192.168.0.201:60000");
// configuration.set("hbase.zookeeper.quorum", "idc01-hd-nd-03,idc01-hd-nd-04,idc01-hd-nd-05");
//configuration.set("hbase.zookeeper.property.clientPort", "2181");
}
public String selectRowKey(String tablename, String rowKey) throws IOException
{
HTable table = new HTable(configuration, tablename);
Get g = new Get(rowKey.getBytes());
Result rs = table.get(g);
String retstr="";
for (KeyValue kv : rs.raw())
{
retstr= new String(kv.getValue());
}
return retstr;
}
public void reduce(Text key, Iterable<IntWritable> values,
Context context) throws IOException, InterruptedException {
int sum = 0;
Iterator<IntWritable> iterator = values.iterator();
while (iterator.hasNext()) {
sum += iterator.next().get();
}
//测试查询
String result = selectRowKey("wordcount","product");
// Put 实例化,每个词存一行
Put put = new Put(Bytes.toBytes((result+"_"+key).toString()));
// 列族为 content,列修饰符为 count,列值为数目
put.add(Bytes.toBytes("content"), Bytes.toBytes("count"),
Bytes.toBytes(String.valueOf(sum)));
context.write(NullWritable.get(), put);
}
}
// 创建 HBase 数据表
public static void createHBaseTable(String tableName)
throws IOException {
// 创建表描述
HTableDescriptor htd = new HTableDescriptor(tableName);
// 创建列族描述
HColumnDescriptor col = new HColumnDescriptor("content");
htd.addFamily(col);
// 配置 HBase
Configuration conf = HBaseConfiguration.create();
//conf.set("hbase.zookeeper.quorum","127.0.0.1");
//conf.set("hbase.zookeeper.property.clientPort", "2181");
HBaseAdmin hAdmin = new HBaseAdmin(conf);
if (hAdmin.tableExists(tableName)) {
System.out.println("该数据表已经存在。");
// hAdmin.disableTable(tableName);
// hAdmin.deleteTable(tableName);
}else {
System.out.println("创建表:" + tableName);
hAdmin.createTable(htd);
}
}
public static void main(String[] args) throws Exception {
String tableName = "wordcount";
// 第一步:创建数据库表
WordCountHBase.createHBaseTable(tableName);
// 第二步:进行 MapReduce 处理
// 配置 MapReduce
Configuration conf = new Configuration();
// 这几句话很关键
// conf.set("mapred.job.tracker", "master:9001");
//conf.set("hbase.zookeeper.quorum","master");
//conf.set("hbase.zookeeper.property.clientPort", "2181");
conf.set(TableOutputFormat.OUTPUT_TABLE, tableName);
Job job = new Job(conf, "New Word Count");
job.setJarByClass(WordCountHBase.class);
// 设置 Map 和 Reduce 处理类
job.setMapperClass(Map.class);
job.setReducerClass(Reduce.class);
// 设置输出类型
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(IntWritable.class);
// 设置输入和输出格式
job.setInputFormatClass(TextInputFormat.class);
job.setOutputFormatClass(TableOutputFormat.class);
// 设置输入目录
FileInputFormat.addInputPath(job, new Path("hdfs://192.168.0.42:9000/user/jiayongwei/input/"));
System.exit(job.waitForCompletion(true) ? 0 : 1);
}
}