Spark Streaming + Kafka + Opencv + Face Recognizer + HDFS Sequence File + Mysql

该博客介绍了一个使用Spark Streaming、Kafka、OpenCV进行人脸识别的系统。首先,通过HDFS Sequence File读取和训练人脸识别模型,接着利用FaceRecognizer进行特征识别。系统接收来自Kafka的数据流,对每一帧进行处理,检测人脸并识别身份,然后将结果存储到MySQL数据库中。
摘要由CSDN通过智能技术生成
<pre name="code" class="java">/**
 * Created by lwc on 6/17/16.
 */

import java.io.*;
import java.sql.*;
import java.util.*;

import kafka.serializer.DefaultDecoder;
import kafka.serializer.StringDecoder;

import org.apache.commons.codec.binary.Base64;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.SequenceFile;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.broadcast.Broadcast;
import org.opencv.core.*;
import org.opencv.face.Face;
import org.opencv.face.FaceRecognizer;
import org.opencv.imgproc.Imgproc;
import scala.Tuple2;
import com.google.gson.Gson;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;

import org.apache.hadoop.io.Text;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.streaming.api.java.*;
import org.apache.spark.streaming.kafka.KafkaUtils;
import org.opencv.imgcodecs.Imgcodecs;
import org.opencv.objdetect.CascadeClassifier;
import org.apache.spark.streaming.Durations;

class GlobleData implements Serializable {
    private static final long serialVersionUID = 1L;
    public Map<Integer, String> idToNameMapping;
    //  public FaceRecognizer faceRecognizer;
    //  public  Map<String, Mat> lableMat = new HashMap<String, Mat>();
    public Map<String, String> lableMat = new HashMap<String, String>();
}

public class AppMatSeq {
    static Map<Integer, String> idToNameMapping;
    static FaceRecognizer faceRecognizer;
    static MatOfInt labelsBuf;
    static List<Mat> mats;
    static Map<String, String> lableMat = new HashMap<String, String>();
    static String fzString;
    static GlobleData globleData = new GlobleData();

    @SuppressWarnings("rawtypes")
    public static void train() throws Exception {
        String uri = "hdfs://10.75.161.88/newfaces.seq";
        mats = new ArrayList<Mat>();
        idToNameMapping = new HashMap<Integer, String>();
        Configuration conf = new Configuration();
        Path path = new Path(uri);
        System.out.println("0");
        SequenceFile.Reader reader = new SequenceFile.Reader(conf, SequenceFile.Reader.file(path));
        System.out.println("1");
        Map<Text, OutputStream> keyStream = new HashMap<Text, OutputStream>();
        Text key = new Text();
        Text value = new Text();
        int count = 0;
        while (reader.next(key, value)) {
            if (!idToNameMapping.containsValue(key.toString().split("_")[0])) {
                idToNameMapping.put(count++, key.toString().split("_")[0]);
            }
            if (key.toString().trim() != null && !keyStream.containsKey(key)) {
                keyStream.put(new Text(key), new ByteArrayOutputStream(1024));
            }
            keyStream.get(key).write(value.getBytes(), 0, value.getLength());
        }
        Map<String, Integer> nameToId = new HashMap<String, Integer>();
        for (Map.Entry entry : idToNameMapping.entrySet()) {
            nameToId.put((String) entry.getValue(), (Integer) entry.getKey());
        }
        Mat mat;
        ByteArrayOutputStream bs = null;
        int counter = 0;
        labelsBuf = new MatOfInt(new int[keyStream.size()]);
        for (Map.Entry out : keyStream.entrySet()) {
            bs = ((ByteArrayOutputStream) out.getValue());
            bs.flush();//Imgcodecs.CV_LOAD_IMAGE_GRAYSCALE
            mat = Imgcodecs.imdecode(new MatOfByte(bs.toByteArray()), Imgcodecs.CV_IMWRITE_JPEG_OPTIMIZE);
            Mat matSave = Imgcodecs.imdecode(new MatOfByte(bs.toByteArray()), Imgcodecs.CV_LOAD_IMAGE_COLOR);
            mats.add(mat.clone());
            int labelId = nameToId.get(out.getKey().toString().split("_")[0]);
            //  lableMat.put(out.getKey().toString().split("_")[0], matSave.clone());
            lableMat.put(out.getKey().toString().split("_")[0], matToJson(matSave.clone()));
            labelsBuf.put(counter++, 0, labelId);
        }
        IOUtils.closeStream(bs);
        IOUtils.closeStream(reader);

        faceRecognizer = Face.createFisherFaceRecognizer();
//         FaceRecognizer faceRecognizer = Face.createEigenFaceRecognizer();
//         FaceRecognizer faceRecognizer = Face.createLBPHFaceRecognizer();

        faceRecognizer.train(mats, labelsBuf);
        if (faceRecognizer == null) {
            System.out.println("in the static after tain, face rec is null");
        } else {
            System.out.println("!!!!!!!!face rec is not null");
        }
        //  globleData.faceRecognizer = faceRecognizer;
        globleData.idToNameMapping = idToNameMapping;
        globleData.lableMat = lableMat;
    }

    @SuppressWarnings("serial")
    public static void main(String[] args) throws Exception {
        System.loadLibrary(Core.NATIVE_LIBRARY_NAME);//54
        System.out.println("train before");
        train();

        System.out.println("train after");
        String brokers = args[0];
        String topics = args[1];

        // Create context with a 2 seconds batch interval
        SparkConf sparkConf = new SparkConf().setAppName("JavaDirectKafkaVideoData");
        JavaSparkContext jsc = new JavaSparkContext(sparkConf);

        final Broadcast<GlobleData> bcVar = jsc.broadcast(globleData);
        JavaStreamingContext jssc = new JavaStreamingContext(jsc, Durations.seconds(2));

        // for graceful shutdown of the application ...
//        Runtime.getRuntime().addShutdownHook(new Thread() {
//            @Override
//            public void run() {
//                System.out.println("Shutting down streaming app...");
//                if (producer != null)
//                    producer.close();
//                jssc.stop(true, true);
//                System.out.println("Shutdown of streaming app complete.");
//            }
//        });

        HashSet<String> topicsSet = new HashSet<String>(Arrays.asList(topics.split(",")));
        HashMap<String, String> kafkaParams = new HashMap<String, String>();
        kafkaParams.put("metadata.broker.list", brokers);
        kafkaParams.put("group.id", "groupid");
        kafkaParams.put("consumer.id", "consumerid");

        // Create direct kafka stream with brokers and topics
        JavaPairInputDStream<String, byte[]> messages = KafkaUtils.createDirectStream(
                jssc,
                String.class,
                byte[].class,
                StringDecoder.class,
                DefaultDecoder.class,
                kafkaParams,
                topicsSet
        );

        JavaDStream<String> content = messages.map(new Function<Tuple2<String, byte[]>, String>() {
            //@Override
            public String call(Tuple2<String, byte[]> tuple2) throws IOException {
                System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
                if ((tuple2 == null) || (tuple2._2().length < 1000))
                    return null;

                Mat image = new Mat(new Size(640, 480), 16);
                image.put(0, 0, tuple2._2());
                List<Mat> detectResults = detectFace(image);
                if (detectResults.size() == 0)
                    return null;

        
  • 1
    点赞
  • 4
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值