1,首先是map类:
package com.hbase.maptohbase;
import java.io.IOException;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
public class Hmap extends Mapper<LongWritable,Text,Text,Text>{
public void map(LongWritable key,Text value,Context context){
try {
context.write(new Text(key.toString()), new Text(value));
} catch (IOException e) {
e.printStackTrace();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
------------------------------------------、
2,然后是reduce类:
package com.hbase.maptohbase;
import java.io.IOException;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.io.Text;
public class Hreduce extends TableReducer<Text,Text,ImmutableBytesWritable>{
public void reduce(Text key,Iterable<Text> value,Context context){
String k = key.toString();
String[] qual = {"name","starttime","endtime","price"};
Put putrow = new Put(k.getBytes());
for (Text t:value) {
String[] v2 = t.toString().split(",");
for(int i =0; i<v2.length;i++){
putrow.add("f".getBytes(), qual[i].getBytes(), v2[i].getBytes());
}
}
try {
context.write(new ImmutableBytesWritable(key.getBytes()), putrow);
} catch (IOException e) {
e.printStackTrace();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
--------------------------------------------
最后是driver类
package com.hbase.maptohbase;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
public class HmapDriver{
public static void main(String[] args) throws Exception {
//Hbase Configuration
Configuration conf = HBaseConfiguration.create();
conf.set("hbase.zookeeper.quorum.", "localhost");
Job job = new Job(conf,"hmap");
job.setJarByClass(HmapDriver.class);
Path in = new Path("in/");
job.setInputFormatClass(TextInputFormat.class);
FileInputFormat.addInputPath(job, in);
job.setMapperClass(Hmap.class);
job.setReducerClass(Hreduce.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(Text.class);
//which table
TableMapReduceUtil.initTableReducerJob("schedule", Hreduce.class, job);
job.waitForCompletion(true);
}
}
----------------------
最后是数据:
1.data:
D11,12:20:22,13:22:29,100
G22,12:22:11,23:00:00,230
2.data:
T11,12:20:22,13:22:29,100
t22,12:22:11,23:00:00,230
----------------------------------
这个时候还需要在hbase下创建一个表:
hbase(main):033:0> scan 'schedule','f'
这个列f必须有的吧
然后就可以运行这个程序了:
原本是想搞个航班时刻表的,结果插玩数据后发现数据处理拆分的不是很好,算了。入门练手,插入算成功。