MainTest.java
package MyoutputFormat;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
public class MainTest extends Configured implements Tool{
public static void main(String[] args) throws Exception {
int run=new ToolRunner().run(new MainTest(), args);
System.exit(run);
}
@Override
public int run(String[] args) throws Exception {
Configuration conf=new Configuration();
//本地测试 不需要设置hdfs
Job job=Job.getInstance(conf);
job.setJarByClass(MainTest.class);
job.setMapperClass(MyOutMapper.class);
job.setNumReduceTasks(0);
// 这就是默认的输入输出组件
job.setInputFormatClass(TextInputFormat.class);
// 这是默认往外输出数据的组件
job.setOutputFormatClass(Out2Format.class);
FileSystem fs=FileSystem.get(conf);
//输入路径
FileInputFormat.setInputPaths(job, new Path("D:/result/myinout/out"));
Path out = new Path("D:/result/myinout/out");
if(fs.exists(out)){
fs.delete(out,true);
}
FileOutputFormat.setOutputPath(job, out);
boolean status=job.waitForCompletion(true);
return status?0:1;
}
public static class MyOutMapper extends Mapper<LongWritable, Text, Text, NullWritable> {
@Override
protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, NullWritable>.Context context)
throws IOException, InterruptedException {
// TODO Auto-generated method stub
super.map(key, value, context);
}
}
}
Out2Format.java
package MyoutputFormat;
import java.io.IOException;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
public class Out2Format extends FileOutputFormat<Text, NullWritable>{
@Override
public RecordWriter<Text, NullWritable> getRecordWriter(TaskAttemptContext job) throws IOException, InterruptedException {
//通过方法获取configruation
Configuration conf=job.getConfiguration();
//通过conf获取Fs
FileSystem fs=FileSystem.get(conf);
Path p1=new Path("d:/result/myout/out1");
Path p2=new Path("d:/result/myout/out2");
FSDataOutputStream out1=fs.create(p1);
FSDataOutputStream out2=fs.create(p2);
//创建两个hdfs输出流
return new MyRecordWriter(out1, out2);
}
static class MyRecordWriter extends RecordWriter<Text, NullWritable>{
FSDataOutputStream out1=null;
FSDataOutputStream out2=null;
public MyRecordWriter(FSDataOutputStream out1,FSDataOutputStream out2) {
this.out1=out1;
this.out2=out2;
}
@Override
public void write(Text key, NullWritable value) throws IOException, InterruptedException {
String [] strs=key.toString().split("::");
if(strs[0].equals("1")){
out1.writeBytes(strs[1]+"\n");
}
else{
out2.writeBytes(strs[1]+"\n");
}
}
@Override
public void close(TaskAttemptContext context) throws IOException, InterruptedException {
IOUtils.closeQuietly(out1);
IOUtils.closeQuietly(out2);
//注意这里引用common的包才能使用closeQuietly
}
}
}