第一步:定制HBaseMain
package com.xxx.oozie;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.HashSet;
import java.util.Properties;
import org.apache.commons.lang.ClassUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.mapreduce.TableInputFormat;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableOutputFormat;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.Job;
import org.apache.oozie.action.hadoop.MapReduceMain;
public class HBaseMain extends MapReduceMain {
public static void main(String[] args) throws Exception {
run(HBaseMain.class, args);
}
@SuppressWarnings({ "rawtypes", "unchecked" })
protected String submitHBaseJob(Configuration actionConf) throws Exception {
// 加载core和hbase配置
Configuration config = HBaseConfiguration.create();
// 加载hdfs和yarn配置
JobConf jobConf = new JobConf(config);
// 加载oozie配置
addActionConf(jobConf, actionConf);
// Set for uber jar
String uberJar = actionConf.get(OOZIE_MAPREDUCE_UBER_JAR);
if (uberJar != null && uberJar.trim().length() > 0) {
jobConf.setJar(uberJar);
}
// propagate delegation related props from launcher job to MR job
if (System.getenv("HADOOP_TOKEN_FILE_LOCATION") != null) {
jobConf.set("mapreduce.job.credentials.binary",
System.getenv("HADOOP_TOKEN_FILE_LOCATION"));
}
Job job = Job.getInstance(jobConf);
Class mapperClass = job.getMapperClass();
if (ClassUtils.isAssignable(mapperClass, HBaseMapper.class)) {
// TODO 多表输入
String table = jobConf.get(TableInputFormat.INPUT_TABLE);
Object object = mapperClass.getDeclaredMethod("getScan").invoke(
mapperClass.newInstance());
Scan scan = (Scan) object;
TableMapReduceUtil.initTableMapperJob(table, scan, mapperClass,
job.getMapOutputKeyClass(), job.getMapOutputValueClass(),
job);
}
Class reduceClass = job.getReducerClass();
if (ClassUtils.isAssignable(reduceClass, TableReducer.class)) {
// TODO 多表输出
String outputTable = jobConf.get(TableOutputFormat.OUTPUT_TABLE);
TableMapReduceUtil.initTableReducerJob(outputTable, reduceClass,
job);
}
boolean b = job.waitForCompletion(true);
if (b) {
return job.getJobID().toString();
} else {
throw new IOException("error with job!");
}
}
protected void run(String[] args) throws Exception {
System.out.println();
System.out.println("Oozie Map-Reduce action configuration");
System.out.println("=======================");
// loading action conf prepared by Oozie
Configuration actionConf = new Configuration(false);
actionConf.addResource(new Path("file:///", System
.getProperty("oozie.action.conf.xml")));
logMasking("Map-Reduce job configuration:", new HashSet<String>(),
actionConf);
System.out.println("Submitting Oozie action Map-Reduce job");
System.out.println();
// submitting job
String jobId = submitHBaseJob(actionConf);
// propagating job id back to Oozie
Properties props = new Properties();
props.setProperty("id", jobId);
File idFile = new File(
System.getProperty("oozie.action.newId.properties"));
OutputStream os = new FileOutputStream(idFile);
props.store(os, "");
os.close();
System.out.println("=======================");
System.out.println();
}
}
第二步:在hbase的mapreduce里使用HBaseMain
<action name="hbasetohbase">
<map-reduce>
<configuration>
<property>
<name>oozie.launcher.action.main.class</name>
<value>com.xxx.oozie.HBaseMain</value>
</property>
..........