将pgm图片使用Sequence file存储到hdfs,并读取为Mat数组格式

<pre name="code" class="java">package opencvImageSeq;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BufferedFSInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text;

import java.io.*;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;

/**
 * Created by lwc on 5/23/16.
 */
public class ImageSeqWriter {

    public static void main(String[] args) throws Exception {
        File inputDir = new File("/home/test/att_faces/s1");
        if(!inputDir.isDirectory()) {
            throw new Exception("input dir is wrong");
        }
        File[] inputFiles = inputDir.listFiles();
        List<String> imageNames = new ArrayList<>();
        InputStream inputStream = null;

        String uri = "hdfs://localhost:9000/home/hdfs/pgm2.seq";
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(URI.create(uri), conf);
        Path path = new Path(uri);
        Text key = new Text();
        Text value = new Text();
//        SequenceFile.Writer writer;
//        writer = SequenceFile.createWriter(fs, conf, path,key.getClass(), value.getClass());
        SequenceFile.Writer writer = SequenceFile.createWriter(conf,
                SequenceFile.Writer.file(path), SequenceFile.Writer.keyClass(Text.class),
                SequenceFile.Writer.valueClass(Text.class));
        for (File file: inputFiles) {
            inputStream = new BufferedInputStream(new FileInputStream(file));
            String imageName =file.toString().substring(file.toString().lastIndexOf('/') + 1);
            imageNames.add(imageName);
            key = new Text(imageName);
            value = new Text();
            byte[] buffer = new byte[1024];

            while ((inputStream.read(buffer))!= -1) {
                value.set(buffer);
                writer.append(key, value);//将每条记录追加到SequenceFile.Writer实例的末尾
                value.clear();
            }
        }
        for (String name: imageNames
             ) {
            System.out.println(name);
        }
        IOUtils.closeStream(inputStream);
        IOUtils.closeStream(writer);
    }
}

package opencvImageSeq;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text;

import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.net.URI;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
 * Created by lwc on 5/23/16.
 */
public class ImageSeqReader {
    public static void main(String[] args) throws Exception {

        String uri = "hdfs://localhost:9000/home/hdfs/pgm2.seq";
        String baseDir = "/home/test/result/";
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(URI.create(uri), conf);
        Path path = new Path(uri);

//        SequenceFile.Reader reader = new SequenceFile.Reader(fs, path, conf);
        SequenceFile.Reader reader = new SequenceFile.Reader(conf,SequenceFile.Reader.file(path));
        Map<Text, OutputStream> keyStream = new HashMap<>();
        Text key = new Text();
        Text  value = new Text();

        while (reader.next(key,value)) {
            if(!keyStream.containsKey(key)) {
                keyStream.put(key, new FileOutputStream(baseDir + key));
            }
            keyStream.get(key).write(value.getBytes());
            value.clear();
        }
        for (HashMap.Entry out : keyStream.entrySet()) {
            ((FileOutputStream)out.getValue()).flush();
            IOUtils.closeStream((FileOutputStream)out.getValue());
        }
        IOUtils.closeStream(reader);
    }
}

package opencvImageSeq;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.MatOfByte;
import org.opencv.imgcodecs.Imgcodecs;

import java.io.ByteArrayOutputStream;
import java.io.OutputStream;
import java.net.URI;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
 * Created by lwc on 5/23/16.
 */
public class ImageSeqMatReader {
    public static void main(String[] args) throws Exception {

        System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
        String uri = "hdfs://localhost:9000/home/hdfs/pgm2.seq";
        List<Mat> mats = new ArrayList<>();
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(URI.create(uri), conf);
        Path path = new Path(uri);
//        SequenceFile.Reader reader = new SequenceFile.Reader(fs, path, conf);
        SequenceFile.Reader reader = new SequenceFile.Reader(conf,SequenceFile.Reader.file(path));
        Map<Text, OutputStream> keyStream = new HashMap<>();
        Text key = new Text();
        Text  value = new Text();

        while (reader.next(key,value)) {
            if(!keyStream.containsKey(key)) {
                keyStream.put(key, new ByteArrayOutputStream(1024));
            }
            System.out.println(key.toString() + " key");
            keyStream.get(key).write(value.getBytes(), 0, value.getLength());
            value.clear();
        }
        Mat mat ;
        ByteArrayOutputStream bs = null;
        for (HashMap.Entry out : keyStream.entrySet()) {
            bs = ((ByteArrayOutputStream)out.getValue());
            bs.flush();
            mat = Imgcodecs.imdecode(new MatOfByte(bs.toByteArray()),Imgcodecs.CV_LOAD_IMAGE_UNCHANGED);
            mats.add(mat.clone());
        }
        IOUtils.closeStream(bs);
        IOUtils.closeStream(reader);
        int i = 10;
        System.out.println(mats.size() + " size");
        for (Mat mat1: mats) {
            Imgcodecs.imwrite("/home/test/" + i++ + ".pgm" ,mat1);
        }
    }
}


 
 
 

                
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值