让oozie优雅的支持hbase

第一步:定制HBaseMain 

package com.xxx.oozie;



import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.HashSet;
import java.util.Properties;


import org.apache.commons.lang.ClassUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.mapreduce.TableInputFormat;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableOutputFormat;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.Job;
import org.apache.oozie.action.hadoop.MapReduceMain;


public class HBaseMain extends MapReduceMain {


public static void main(String[] args) throws Exception {
run(HBaseMain.class, args);
}


@SuppressWarnings({ "rawtypes", "unchecked" })
protected String submitHBaseJob(Configuration actionConf) throws Exception {
// 加载core和hbase配置
Configuration config = HBaseConfiguration.create();
// 加载hdfs和yarn配置
JobConf jobConf = new JobConf(config);
// 加载oozie配置
addActionConf(jobConf, actionConf);


// Set for uber jar
String uberJar = actionConf.get(OOZIE_MAPREDUCE_UBER_JAR);
if (uberJar != null && uberJar.trim().length() > 0) {
jobConf.setJar(uberJar);
}


// propagate delegation related props from launcher job to MR job
if (System.getenv("HADOOP_TOKEN_FILE_LOCATION") != null) {
jobConf.set("mapreduce.job.credentials.binary",
System.getenv("HADOOP_TOKEN_FILE_LOCATION"));
}
Job job = Job.getInstance(jobConf);
Class mapperClass = job.getMapperClass();
if (ClassUtils.isAssignable(mapperClass, HBaseMapper.class)) {
// TODO 多表输入
String table = jobConf.get(TableInputFormat.INPUT_TABLE);
Object object = mapperClass.getDeclaredMethod("getScan").invoke(
mapperClass.newInstance());
Scan scan = (Scan) object;
TableMapReduceUtil.initTableMapperJob(table, scan, mapperClass,
job.getMapOutputKeyClass(), job.getMapOutputValueClass(),
job);
}
Class reduceClass = job.getReducerClass();
if (ClassUtils.isAssignable(reduceClass, TableReducer.class)) {
// TODO 多表输出
String outputTable = jobConf.get(TableOutputFormat.OUTPUT_TABLE);
TableMapReduceUtil.initTableReducerJob(outputTable, reduceClass,
job);
}


boolean b = job.waitForCompletion(true);
if (b) {
return job.getJobID().toString();
} else {
throw new IOException("error with job!");
}
}


protected void run(String[] args) throws Exception {
System.out.println();
System.out.println("Oozie Map-Reduce action configuration");
System.out.println("=======================");


// loading action conf prepared by Oozie
Configuration actionConf = new Configuration(false);
actionConf.addResource(new Path("file:///", System
.getProperty("oozie.action.conf.xml")));


logMasking("Map-Reduce job configuration:", new HashSet<String>(),
actionConf);


System.out.println("Submitting Oozie action Map-Reduce job");
System.out.println();
// submitting job
String jobId = submitHBaseJob(actionConf);
// propagating job id back to Oozie
Properties props = new Properties();
props.setProperty("id", jobId);
File idFile = new File(
System.getProperty("oozie.action.newId.properties"));
OutputStream os = new FileOutputStream(idFile);
props.store(os, "");
os.close();


System.out.println("=======================");
System.out.println();
}

}


第二步:在hbase的mapreduce里使用HBaseMain

<action name="hbasetohbase">
<map-reduce>
<configuration>
<property>
<name>oozie.launcher.action.main.class</name>
<value>com.xxx.oozie.HBaseMain</value>
</property>

..........

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值