<pre name="code" class="java">package opencvImageSeq;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BufferedFSInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text;
import java.io.*;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;
/**
* Created by lwc on 5/23/16.
*/
public class ImageSeqWriter {
public static void main(String[] args) throws Exception {
File inputDir = new File("/home/test/att_faces/s1");
if(!inputDir.isDirectory()) {
throw new Exception("input dir is wrong");
}
File[] inputFiles = inputDir.listFiles();
List<String> imageNames = new ArrayList<>();
InputStream inputStream = null;
String uri = "hdfs://localhost:9000/home/hdfs/pgm2.seq";
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(uri), conf);
Path path = new Path(uri);
Text key = new Text();
Text value = new Text();
// SequenceFile.Writer writer;
// writer = SequenceFile.createWriter(fs, conf, path,key.getClass(), value.getClass());
SequenceFile.Writer writer = SequenceFile.createWriter(conf,
SequenceFile.Writer.file(path), SequenceFile.Writer.keyClass(Text.class),
SequenceFile.Writer.valueClass(Text.class));
for (File file: inputFiles) {
inputStream = new BufferedInputStream(new FileInputStream(file));
String imageName =file.toString().substring(file.toString().lastIndexOf('/') + 1);
imageNames.add(imageName);
key = new Text(imageName);
value = new Text();
byte[] buffer = new byte[1024];
while ((inputStream.read(buffer))!= -1) {
value.set(buffer);
writer.append(key, value);//将每条记录追加到SequenceFile.Writer实例的末尾
value.clear();
}
}
for (String name: imageNames
) {
System.out.println(name);
}
IOUtils.closeStream(inputStream);
IOUtils.closeStream(writer);
}
}
package opencvImageSeq;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.net.URI;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Created by lwc on 5/23/16.
*/
public class ImageSeqReader {
public static void main(String[] args) throws Exception {
String uri = "hdfs://localhost:9000/home/hdfs/pgm2.seq";
String baseDir = "/home/test/result/";
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(uri), conf);
Path path = new Path(uri);
// SequenceFile.Reader reader = new SequenceFile.Reader(fs, path, conf);
SequenceFile.Reader reader = new SequenceFile.Reader(conf,SequenceFile.Reader.file(path));
Map<Text, OutputStream> keyStream = new HashMap<>();
Text key = new Text();
Text value = new Text();
while (reader.next(key,value)) {
if(!keyStream.containsKey(key)) {
keyStream.put(key, new FileOutputStream(baseDir + key));
}
keyStream.get(key).write(value.getBytes());
value.clear();
}
for (HashMap.Entry out : keyStream.entrySet()) {
((FileOutputStream)out.getValue()).flush();
IOUtils.closeStream((FileOutputStream)out.getValue());
}
IOUtils.closeStream(reader);
}
}
package opencvImageSeq;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.MatOfByte;
import org.opencv.imgcodecs.Imgcodecs;
import java.io.ByteArrayOutputStream;
import java.io.OutputStream;
import java.net.URI;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Created by lwc on 5/23/16.
*/
public class ImageSeqMatReader {
public static void main(String[] args) throws Exception {
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
String uri = "hdfs://localhost:9000/home/hdfs/pgm2.seq";
List<Mat> mats = new ArrayList<>();
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(uri), conf);
Path path = new Path(uri);
// SequenceFile.Reader reader = new SequenceFile.Reader(fs, path, conf);
SequenceFile.Reader reader = new SequenceFile.Reader(conf,SequenceFile.Reader.file(path));
Map<Text, OutputStream> keyStream = new HashMap<>();
Text key = new Text();
Text value = new Text();
while (reader.next(key,value)) {
if(!keyStream.containsKey(key)) {
keyStream.put(key, new ByteArrayOutputStream(1024));
}
System.out.println(key.toString() + " key");
keyStream.get(key).write(value.getBytes(), 0, value.getLength());
value.clear();
}
Mat mat ;
ByteArrayOutputStream bs = null;
for (HashMap.Entry out : keyStream.entrySet()) {
bs = ((ByteArrayOutputStream)out.getValue());
bs.flush();
mat = Imgcodecs.imdecode(new MatOfByte(bs.toByteArray()),Imgcodecs.CV_LOAD_IMAGE_UNCHANGED);
mats.add(mat.clone());
}
IOUtils.closeStream(bs);
IOUtils.closeStream(reader);
int i = 10;
System.out.println(mats.size() + " size");
for (Mat mat1: mats) {
Imgcodecs.imwrite("/home/test/" + i++ + ".pgm" ,mat1);
}
}
}