HDFS - HBASE
package com.ws.hbaseMr;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import java.io.IOException;
public class Hdfs2Hbase {
public static class M extends Mapper<LongWritable, Text,Text,IntWritable>{
@Override
protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
String[] split = value.toString().split(",");
for (String s : split) {
context.write(new Text(s),new IntWritable(1));
}
}
}
public static class R extends TableReducer<Text,IntWritable, ImmutableBytesWritable>{
@Override
protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
Put put = new Put(key.getBytes());
int i=0;
for (IntWritable value : values) {
i+=value.get();
}
put.addColumn("f".getBytes(),"n".getBytes(),(i+"").getBytes());
context.write(null,put);
}
}
public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
Configuration conf = new Configuration();
conf.set("hbase.zookeeper.quorum","dream1:2181,dream2:2181");
Job job = Job.getInstance(conf);
job.setJarByClass(Hdfs2Hbase.class);
job.setMapperClass(M.class);
job.setReducerClass(R.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(IntWritable.class);
FileInputFormat.setInputPaths(job,new Path("hdfs://dream1:9000/hd2hb/input"));
TableMapReduceUtil.initTableReducerJob("hd2hb",R.class,job);
job.waitForCompletion(true);
}
}
HBASE-HDFS
package com.ws.hbaseMr;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import java.io.IOException;
import java.util.Collections;
public class HBase2Hdfs {
public static class M extends TableMapper<Text,IntWritable> {
@Override
protected void map(ImmutableBytesWritable key, Result value, Context context) throws IOException, InterruptedException {
byte[] value1 = value.getValue("f".getBytes(), "n".getBytes());
byte[] bytes = key.get();
context.write(new Text(new String(bytes)),new IntWritable(Integer.parseInt(new String(value1))));
}
}
public static class R extends Reducer<Text,IntWritable,Text,IntWritable> {
@Override
protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
for (IntWritable value : values) {
context.write(key,value);
}
}
}
public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
Configuration conf = new Configuration();
conf.set("hbase.zookeeper.quorum","dream1:2181,dream2:2181");
System.setProperty("HADOOP_USER_NAME","root");
Job job = Job.getInstance(conf);
job.setJarByClass(HBase2Hdfs.class);
job.setReducerClass(R.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
Scan scan = new Scan();
TableMapReduceUtil.initTableMapperJob("hd2hb",scan,M.class,Text.class,IntWritable.class,job);
FileOutputFormat.setOutputPath(job,new Path("hdfs://dream1:9000/hd2hb/out"));
job.waitForCompletion(true);
}
}
HBASE-MR-HBASE
package com.ws.hbaseMr;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import java.io.IOException;
public class HBase2HBaseMr {
public static class M extends TableMapper<Text,IntWritable> {
@Override
protected void map(ImmutableBytesWritable key, Result value, Context context) throws IOException, InterruptedException {
byte[] value1 = value.getValue("f".getBytes(), "n".getBytes());
byte[] bytes = key.get();
context.write(new Text(new String(bytes)),new IntWritable(Integer.parseInt(new String(value1))));
}
}
public static class R extends TableReducer<Text,IntWritable,ImmutableBytesWritable> {
@Override
protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
Put put = new Put((values.iterator().next().get()+"").getBytes());
put.addColumn("f".getBytes(),"n".getBytes(),key.getBytes());
context.write(null,put);
}
}
public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
Configuration conf = new Configuration();
conf.set("hbase.zookeeper.quorum","dream1:2181,dream2:2181");
Job job = Job.getInstance(conf);
job.setJarByClass(HBase2HBaseMr.class);
Scan scan = new Scan();
TableMapReduceUtil.initTableMapperJob("hd2hb",scan,M.class,Text.class,IntWritable.class,job);
TableMapReduceUtil.initTableReducerJob("hb2hb",R.class,job);
job.waitForCompletion(true);
}
}