package org.apache.hadoop.examples;
import java.io.IOException;
import java.util.Iterator;
import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
import org.apache.hadoop.chukwa.extraction.demux.HourlyChukwaRecordRolling;
import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord;
import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.JobPriority;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.SequenceFileInputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
public class ChukwaReader extends Configured implements Tool {
public static class EvtMapper extends MapReduceBase implements
Mapper<ChukwaRecordKey, ChukwaRecord, Text, Text> {
@Override
public void map(ChukwaRecordKey key, ChukwaRecord value,
OutputCollector<Text, Text> output, Reporter reporter)
throws IOException {
String[] fields = value.getFields();
System.out.println("====start key:"+key);
for(String field : fields){
System.out.println("field:"+field+" value:"+value.getValue(field));
}
System.out.println("====end key:"+ key);
output.collect(new Text(key.toString()), new Text(value.toString()));
}
}
public static class EvtReducer extends MapReduceBase implements
Reducer<Text, Text, Text, Text> {
@Override
public void reduce(Text key, Iterator<Text> values,
OutputCollector<Text, Text> output, Reporter reporter)
throws IOException {
while (values.hasNext()) {
output.collect(key, values.next());
}
}
}
@Override
public int run(String[] args) throws Exception {
JobConf conf = new JobConf(new ChukwaConfiguration(),
HourlyChukwaRecordRolling.class);
conf.setJobName("HourlyChukwa-Rolling");
conf.setInputFormat(SequenceFileInputFormat.class);
conf.setMapperClass(EvtMapper.class);
conf.setReducerClass(EvtReducer.class);
conf.setOutputKeyClass(Text.class);
conf.setOutputValueClass(Text.class);
FileInputFormat.setInputPaths(conf, args[0]);
FileOutputFormat.setOutputPath(conf, new Path(args[1]));
conf.setJobPriority(JobPriority.LOW);
conf.setNumReduceTasks(1);
JobClient.runJob(conf);
return 0;
}
public static void main(String args[]){
try {
ToolRunner.run(new ChukwaReader(), args);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
下面是程序里打印出来的信息:可以直接对body进行数据的处理
====start key:'typeone,'1395068400000/ubuntu/1395068400000
field:body value:afdsafdafdas
field:capp value:/opt/typeone.log
field:csource value:ubuntu
field:ctags value: cluster="chukwa"
====end key:'typeone,'1395068400000/ubuntu/1395068400000