在执行MapReduce时添加参数
private void processArgs(Configuration conf2, String[] args) {
String date = "";
for (int i = 0; i < args.length; i++) {
if("-d".equals(args[i])) {
if(i+1<args.length) {
date = args[++i];
}
}
}
if(StringUtils.isBlank(date) || !TimeUtil.isValidateRunningDate(date)) {
date = TimeUtil.getYesterday();
}
conf2.set(GlobalConstants.RUNNING_DATE_PARAMES, date);
}
Hbase当做输入端调用方法:
void org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil.initTableMapperJob(List<Scan> scans, Class<? extends TableMapper> mapper, Class<? extends WritableComparable> outputKeyClass, Class<? extends Writable> outputValueClass, Job job, boolean addDependencyJars) throws IOException
样例:
TableMapReduceUtil.initTableMapperJob(initScans(job), NewInstallUserMapper.class, StatsUserDimension.class, TimeOutputValue.class, job, false);
其中的initScans()方法
private List<Scan> initScans(Job job) {
Configuration conf = job.getConfiguration();
String date = conf.get(GlobalConstants.RUNNING_DATE_PARAMES);
long time = TimeUtil.parseString2Long(date);
long endtime = time + GlobalConstants.DAY_OF_MILLISECONDS;
String startRow = String.valueOf(time);
String stopRow = String.valueOf(endtime);
Scan scan = new Scan();
//获取某天数据
scan.setStartRow(startRow.getBytes());
scan.setStopRow(stopRow.getBytes());
//获取事件值为e_l的数据
FilterList lists = new FilterList(FilterList.Operator.MUST_PASS_ALL);
SingleColumnValueFilter filter1 = new SingleColumnValueFilter(EventLogConstants.EVENT_LOGS_FAMILY_NAME.getBytes(), EventLogConstants.LOG_COLUMN_NAME_EVENT_NAME.getBytes(), CompareOp.EQUAL, "e_l".getBytes());
lists.addFilter(filter1);
//获取部分列
//定义获取的列名
String[] columns = new String[] {EventLogConstants.LOG_COLUMN_NAME_UUID,
EventLogConstants.LOG_COLUMN_NAME_BROWSER_NAME, EventLogConstants.LOG_COLUMN_NAME_BROWSER_VERSION,
EventLogConstants.LOG_COLUMN_NAME_SERVER_TIME, EventLogConstants.LOG_COLUMN_NAME_PLATFORM
};
lists.addFilter(getColumn(columns));
scan.setAttribute(Scan.SCAN_ATTRIBUTES_TABLE_NAME