package test;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.SequenceFile.CompressionType;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import util.Utils;
public class SequenceWriteReadFile extends Configured implements Tool{
private static final int Count = 100;
public static void main(String[] args) throws Exception {
System.exit(ToolRunner.run(null, new SequenceWriteReadFile(), args));
}
@Override
public int run(String[] args) throws Exception {
if (args.length != 1) {
System.err.println("Usage: "+getClass().getName()+" <outdir>");
ToolRunner.printGenericCommandUsage(System.err);
return -1;
}
final JobConf jobConf = new JobConf(getConf(), getClass());
jobConf.set("fs.default.name",Utils.URL_HDFS);
jobConf.setSpeculativeExecution(false);
jobConf.setOutputFormat(FileOutputFormat.class);
jobConf.setInputFormat(FileInputFormat.class);
FileSystem fs = FileSystem.newInstance(jobConf);
String dir = args[0];
System.out.println("Dir:" + dir);
Path path = new Path(dir,"test.txt");
System.out.println("==============write=============");
SequenceFile.Writer write = SequenceFile.createWriter(fs, jobConf, path,LongWritable.class, LongWritable.class, CompressionType.NONE);
for(int i = 0 ;i < Count;i ++){
LongWritable offset1 = new LongWritable(i + 1);
LongWritable offset2 = new LongWritable(i + 1);
write.append(offset1, offset2);
System.out.println("write: offset1:" + offset1 + ",offset2:" + offset2);
}
write.close();
System.out.println("==============read=============");
SequenceFile.Reader reader = new SequenceFile.Reader(fs, path, jobConf);
LongWritable offset1 = new LongWritable();
LongWritable offset2 = new LongWritable();
try {
while(reader.next(offset1, offset2)){
System.out.println("read: offset1:" + offset1 + ",offset2:" + offset2);
}
} finally {
reader.close();
}
return 0;
}
}
使用SequenceFile实现文件的读和写,使用文件查看会乱码,使用hadoop fs -text 查看。