数据源格式如下:
1 | 20130512 1 - 1 - 1 13802 1 2013 - 05 - 12 07 : 26 : 22 |
2 | 20130512 1 - 1 - 1 13802 1 2013 - 05 - 12 11 : 18 : 24 |
创建hbase的表:
HDFS文件目录:
代码如下:
package dbinput;
import java.io.File;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableOutputFormat;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import test.EJob;
public class Map2Hdfs {
public static final String NAME = "ImportFromFile";
public enum Counters {
LINES
};
public static class ImportMapper extends
Mapper<LongWritable, Text, ImmutableBytesWritable, Writable> {
private byte[] family = null;
private byte[] qualifier = null;
@Override
protected void setup(Context context) throws IOException, InterruptedException {
String column = context.getConfiguration().get("conf.column");
byte[][] colkey = KeyValue.parseColumn(Bytes.toBytes(column));
family = colkey[0];
if (colkey.length > 1) {
qualifier = colkey[1];
}
}
@Override
public void map(LongWritable offset, Text line, Context context)
throws IOException, InterruptedException {
String[] lineArr = line.toString().split("\t");
Put put = new Put(Bytes.toBytes(offset + ""));
put.add(family, Bytes.toBytes("time"), Bytes.toBytes(lineArr[lineArr.length - 1]));
context.write(new ImmutableBytesWritable(Bytes.toBytes(offset + "")), put);
context.getCounter(Counters.LINES).increment(1);
}
}
public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
File jarfile = EJob.createTempJar("bin");
EJob.addClasspath("usr/hadoop/conf");
ClassLoader classLoader = EJob.getClassLoader();
Thread.currentThread().setContextClassLoader(classLoader);
Configuration conf = new Configuration();
conf.set("mapred.job.tracker", "172.30.1.245:9001");
conf.set("hbase.zookeeper.property.clientPort", "2222");
conf.set("hbase.zookeeper.quorum", "172.30.1.245");
conf.set("hbase.master", "172.30.1.245:600000");
conf.set("hbase.nameserver.address", "172.30.1.245");
conf.set("conf.column", NAME);
Job job = new Job(conf, "TestMap2Hdfs");
// job.setJarByClass(Map2Hdfs.class);
((JobConf) job.getConfiguration()).setJar(jarfile.toString());
job.setMapperClass(ImportMapper.class);
job.setOutputFormatClass(TableOutputFormat.class);
job.getConfiguration().set(TableOutputFormat.OUTPUT_TABLE, "TestMap2Hdfs");
job.setOutputKeyClass(ImmutableBytesWritable.class);
job.setOutputValueClass(Writable.class);
job.setNumReduceTasks(0);
FileInputFormat.addInputPath(job, new Path("hdfs://172.30.1.245:9000/user/hadoop/hdfs2hbase"));
System.exit(job.waitForCompletion(true) ? 0 : 1);
}
}
运行结果: