package hadoop;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.MultiTableOutputFormat;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Reducer;
public class HbaseToMHbase {
static class BMapper extends TableMapper<Text, Text>{
@Override
protected void map(ImmutableBytesWritable key, Result value,Context context)
throws IOException, InterruptedException {
for(Cell cell:value.rawCells()){
context.write(new Text(new String(CellUtil.cloneRow(cell))),
new Text(new String(CellUtil.cloneQualifier(cell))+","+new String(CellUtil.cloneValue(cell))));
}
}
}
static class BReduce extends TableReducer<Text, Text, ImmutableBytesWritable>{
@Override
protected void reduce(Text key, Iterable<Text> values,Context context)
throws IOException, InterruptedException {
int sum=0;
for(Text text : values){
String[] lines = text.toString().split(",");
Put put = new Put(key.toString().getBytes());
put.add("f".getBytes(), lines[0].getBytes(), lines[1].getBytes());
context.write(new ImmutableBytesWritable("result2".getBytes()), put);
context.write(new ImmutableBytesWritable("result3".getBytes()), put);
}
}
}
public static void main(String[] args) throws Exception {
Configuration conf = HBaseConfiguration.create();
conf.set("hbase.zookeeper.quorum", "192.168.10.250");
conf.set("hbase.zookeeper.property.clientPort", "2181");
Job job = Job.getInstance(conf,"test");
job.setJarByClass(HbaseToMHbase.class);
List<Scan> list = new ArrayList<Scan>();
Scan scan = new Scan();
scan.setCaching(200);
scan.setCacheBlocks(false);
scan.setStartRow("row0".getBytes());
scan.setStopRow("row9".getBytes());
scan.setAttribute(Scan.SCAN_ATTRIBUTES_TABLE_NAME, "mapreduce".getBytes());
list.add(scan);
TableMapReduceUtil.initTableMapperJob(list, BMapper.class, Text.class, Text.class, job);
job.setNumReduceTasks(1);
job.setReducerClass(BReduce.class);
job.setOutputFormatClass(MultiTableOutputFormat.class);
job.waitForCompletion(true);
return;
}
}