android 人脸识别追踪

MainActivity.java

package com.ispring.facedetection;

import android.media.AudioManager;
import android.media.MediaParser;
import android.media.MediaPlayer;
import android.media.MediaRecorder;
import android.media.SoundPool;
import android.os.Bundle;
import androidx.appcompat.app.AppCompatActivity;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.FrameLayout;

import com.google.mediapipe.components.TextureFrameConsumer;
import com.google.mediapipe.framework.TextureFrame;
import com.google.mediapipe.solutioncore.CameraInput;
import com.google.mediapipe.solutioncore.SolutionGlSurfaceView;
import com.google.mediapipe.solutioncore.VideoInput;
import com.google.mediapipe.solutions.facedetection.FaceDetection;
import com.google.mediapipe.solutions.facedetection.FaceDetectionOptions;
import com.google.mediapipe.solutions.facedetection.FaceDetectionResult;
import com.google.mediapipe.solutions.facedetection.FaceKeypoint;
import com.google.mediapipe.formats.proto.LocationDataProto.LocationData.RelativeKeypoint;
import com.ispring.gameplane.R;
import com.ispring.gameplane.game.GameView;

public class MainActivity extends AppCompatActivity {
    private FaceDetection faceDetection;
    private CameraInput cameraInput;
    private SolutionGlSurfaceView<FaceDetectionResult> glSurfaceView;
   
    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_main);
        stopCurrentPipeline();
        setupStreamingModePipeline(InputSource.CAMERA); 

    }

    private void setupStreamingModePipeline() {

        faceDetection = new FaceDetection(this,FaceDetectionOptions.builder().setStaticImageMode(false).setModelSelection(0).build());
        faceDetection.setErrorListener((message, e) -> Log.e(TAG, "面部识别错误:" + message));
  
        cameraInput = new CameraInput(this);
        cameraInput.setNewFrameListener(textureFrame -> faceDetection.send(textureFrame));
        
        glSurfaceView = new SolutionGlSurfaceView<>(
                this, faceDetection.getGlContext(), faceDetection.getGlMajorVersion());
        glSurfaceView.setSolutionResultRenderer(new FaceDetectionResultGlRenderer());
        glSurfaceView.setRenderInputImage(true);
        faceDetection.setResultListener(
                faceDetectionResult -> {
                    logNoseTipKeypoint(faceDetectionResult, /*faceIndex=*/ 0, /*showPixelValues=*/ false);
                    glSurfaceView.setRenderData(faceDetectionResult);
                    glSurfaceView.requestRender();
                });

        glSurfaceView.post(this::startCamera);
    
        FrameLayout frameLayout = findViewById(R.id.frl);
        frameLayout.removeAllViewsInLayout();
        frameLayout.addView(glSurfaceView);
        glSurfaceView.setVisibility(View.VISIBLE);
        frameLayout.requestLayout();
    }
    private void startCamera() {
        cameraInput.start(
                this,
                faceDetection.getGlContext(),
                CameraInput.CameraFacing.FRONT,
                glSurfaceView.getWidth(),
                glSurfaceView.getHeight());
    }
    private void stopCurrentPipeline() {
        if (cameraInput != null) {
            cameraInput.setNewFrameListener(null);
            cameraInput.close();
        }
        if (glSurfaceView != null) {
            glSurfaceView.setVisibility(View.GONE);
        }
        if (faceDetection != null) {
            faceDetection.close();
        }
    }
    private void logNoseTipKeypoint(
            FaceDetectionResult result, int faceIndex, boolean showPixelValues) {
        if (result.multiFaceDetections().isEmpty()) {
            return;
        }
        RelativeKeypoint noseTip = result
                .multiFaceDetections()
                .get(faceIndex)
                .getLocationData()
                .getRelativeKeypoints(FaceKeypoint.NOSE_TIP);
        if (showPixelValues) {
            int width = result.inputBitmap().getWidth();
            int height = result.inputBitmap().getHeight();
            Log.i(TAG,String.format("MediaPipe 人脸检测鼻尖坐标(像素值): x=%f, y=%f",noseTip.getX() * width, noseTip.getY() * height));
        } else {
            float x = noseTip.getX();
            float y = noseTip.getY();
            //鼻尖坐标 左下角 0 1
            //鼻尖坐标 右下角 1 1
            //鼻尖坐标 左上角 0 0
            //鼻尖坐标 右上角 1 0
            //向左x变小 向右x变大
            //向上y变小 向下y变大
        }
    }

    @Override
    protected void onResume() {
        super.onResume();
        cameraInput = new CameraInput(this);
        cameraInput.setNewFrameListener(textureFrame -> faceDetection.send(textureFrame));
        glSurfaceView.post(this::startCamera);
        glSurfaceView.setVisibility(View.VISIBLE);
    }
    @Override
    protected void onPause() {
        super.onPause();
        glSurfaceView.setVisibility(View.GONE);
        cameraInput.close();
    }
    @Override
    protected void onDestroy() {
        super.onDestroy();
    }
}

FaceDetectionResultImageView.java


package com.ispring.facedetection;

import static java.lang.Math.min;

import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Matrix;
import android.graphics.Paint;
import androidx.appcompat.widget.AppCompatImageView;
import com.google.mediapipe.formats.proto.DetectionProto.Detection;
import com.google.mediapipe.solutions.facedetection.FaceDetectionResult;
import com.google.mediapipe.solutions.facedetection.FaceKeypoint;

/** An ImageView implementation for displaying {@link FaceDetectionResult}. */
public class FaceDetectionResultImageView extends AppCompatImageView {
  private static final String TAG = "FaceDetectionResultImageView";

  private static final int KEYPOINT_COLOR = Color.RED;
  private static final int KEYPOINT_RADIUS = 8; // Pixels
  private static final int BBOX_COLOR = Color.GREEN;
  private static final int BBOX_THICKNESS = 5; // Pixels
  private Bitmap latest;

  public FaceDetectionResultImageView(Context context) {
    super(context);
    setScaleType(AppCompatImageView.ScaleType.FIT_CENTER);
  }

  /**
   * Sets a {@link FaceDetectionResult} to render.
   *
   * @param result a {@link FaceDetectionResult} object that contains the solution outputs and the
   *     input {@link Bitmap}.
   */
  public void setFaceDetectionResult(FaceDetectionResult result) {
    if (result == null) {
      return;
    }
    Bitmap bmInput = result.inputBitmap();
    int width = bmInput.getWidth();
    int height = bmInput.getHeight();
    latest = Bitmap.createBitmap(width, height, bmInput.getConfig());
    Canvas canvas = new Canvas(latest);

    canvas.drawBitmap(bmInput, new Matrix(), null);
    int numDetectedFaces = result.multiFaceDetections().size();
    for (int i = 0; i < numDetectedFaces; ++i) {
      drawDetectionOnCanvas(result.multiFaceDetections().get(i), canvas, width, height);
    }
  }

  /** Updates the image view with the latest {@link FaceDetectionResult}. */
  public void update() {
    postInvalidate();
    if (latest != null) {
      setImageBitmap(latest);
    }
  }

  private void drawDetectionOnCanvas(Detection detection, Canvas canvas, int width, int height) {
    if (!detection.hasLocationData()) {
      return;
    }
    // Draw keypoints.
    Paint keypointPaint = new Paint();
    keypointPaint.setColor(KEYPOINT_COLOR);
    for (int i = 0; i < FaceKeypoint.NUM_KEY_POINTS; ++i) {
      int xPixel =
          min(
              (int) (detection.getLocationData().getRelativeKeypoints(i).getX() * width),
              width - 1);
      int yPixel =
          min(
              (int) (detection.getLocationData().getRelativeKeypoints(i).getY() * height),
              height - 1);
      canvas.drawCircle(xPixel, yPixel, KEYPOINT_RADIUS, keypointPaint);
    }
    if (!detection.getLocationData().hasRelativeBoundingBox()) {
      return;
    }
    // Draw bounding box.
    Paint bboxPaint = new Paint();
    bboxPaint.setColor(BBOX_COLOR);
    bboxPaint.setStyle(Paint.Style.STROKE);
    bboxPaint.setStrokeWidth(BBOX_THICKNESS);
    float left = detection.getLocationData().getRelativeBoundingBox().getXmin() * width;
    float top = detection.getLocationData().getRelativeBoundingBox().getYmin() * height;
    float right = left + detection.getLocationData().getRelativeBoundingBox().getWidth() * width;
    float bottom = top + detection.getLocationData().getRelativeBoundingBox().getHeight() * height;
    canvas.drawRect(left, top, right, bottom, bboxPaint);
  }
}

FaceDetectionResultGlRenderer.java

// Copyright 2021 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//      http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

package com.ispring.facedetection;

import android.opengl.GLES20;
import com.google.mediapipe.formats.proto.DetectionProto.Detection;
import com.google.mediapipe.solutioncore.ResultGlRenderer;
import com.google.mediapipe.solutions.facedetection.FaceDetectionResult;
import com.google.mediapipe.solutions.facedetection.FaceKeypoint;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;

/** A custom implementation of {@link ResultGlRenderer} to render {@link FaceDetectionResult}. */
public class FaceDetectionResultGlRenderer implements ResultGlRenderer<FaceDetectionResult> {
  private static final String TAG = "FaceDetectionResultGlRenderer";

  private static final float[] KEYPOINT_COLOR = new float[] {1f, 0f, 0f, 1f};
  private static final float KEYPOINT_SIZE = 16f;
  private static final float[] BBOX_COLOR = new float[] {0f, 1f, 0f, 1f};
  private static final int BBOX_THICKNESS = 8;
  private static final String VERTEX_SHADER =
      "uniform mat4 uProjectionMatrix;\n"
          + "uniform float uPointSize;\n"
          + "attribute vec4 vPosition;\n"
          + "void main() {\n"
          + "  gl_Position = uProjectionMatrix * vPosition;\n"
          + "  gl_PointSize = uPointSize;"
          + "}";
  private static final String FRAGMENT_SHADER =
      "precision mediump float;\n"
          + "uniform vec4 uColor;\n"
          + "void main() {\n"
          + "  gl_FragColor = uColor;\n"
          + "}";
  private int program;
  private int positionHandle;
  private int pointSizeHandle;
  private int projectionMatrixHandle;
  private int colorHandle;

  private int loadShader(int type, String shaderCode) {
    int shader = GLES20.glCreateShader(type);
    GLES20.glShaderSource(shader, shaderCode);
    GLES20.glCompileShader(shader);
    return shader;
  }

  @Override
  public void setupRendering() {
    program = GLES20.glCreateProgram();
    int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, VERTEX_SHADER);
    int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, FRAGMENT_SHADER);
    GLES20.glAttachShader(program, vertexShader);
    GLES20.glAttachShader(program, fragmentShader);
    GLES20.glLinkProgram(program);
    positionHandle = GLES20.glGetAttribLocation(program, "vPosition");
    pointSizeHandle = GLES20.glGetUniformLocation(program, "uPointSize");
    projectionMatrixHandle = GLES20.glGetUniformLocation(program, "uProjectionMatrix");
    colorHandle = GLES20.glGetUniformLocation(program, "uColor");
  }

  @Override
  public void renderResult(FaceDetectionResult result, float[] projectionMatrix) {
    if (result == null) {
      return;
    }
    GLES20.glUseProgram(program);
    GLES20.glUniformMatrix4fv(projectionMatrixHandle, 1, false, projectionMatrix, 0);
    GLES20.glUniform1f(pointSizeHandle, KEYPOINT_SIZE);
    int numDetectedFaces = result.multiFaceDetections().size();
    for (int i = 0; i < numDetectedFaces; ++i) {
      drawDetection(result.multiFaceDetections().get(i));
    }
  }

  /**
   * Deletes the shader program.
   *
   * <p>This is only necessary if one wants to release the program while keeping the context around.
   */
  public void release() {
    GLES20.glDeleteProgram(program);
  }

  private void drawDetection(Detection detection) {
    if (!detection.hasLocationData()) {
      return;
    }
    // Draw keypoints.
    float[] points = new float[FaceKeypoint.NUM_KEY_POINTS * 2];
    for (int i = 0; i < FaceKeypoint.NUM_KEY_POINTS; ++i) {
      points[2 * i] = detection.getLocationData().getRelativeKeypoints(i).getX();
      points[2 * i + 1] = detection.getLocationData().getRelativeKeypoints(i).getY();
    }
    GLES20.glUniform4fv(colorHandle, 1, KEYPOINT_COLOR, 0);
    FloatBuffer vertexBuffer =
        ByteBuffer.allocateDirect(points.length * 4)
            .order(ByteOrder.nativeOrder())
            .asFloatBuffer()
            .put(points);
    vertexBuffer.position(0);
    GLES20.glEnableVertexAttribArray(positionHandle);
    GLES20.glVertexAttribPointer(positionHandle, 2, GLES20.GL_FLOAT, false, 0, vertexBuffer);
    GLES20.glDrawArrays(GLES20.GL_POINTS, 0, FaceKeypoint.NUM_KEY_POINTS);
    if (!detection.getLocationData().hasRelativeBoundingBox()) {
      return;
    }
    // Draw bounding box.
    float left = detection.getLocationData().getRelativeBoundingBox().getXmin();
    float top = detection.getLocationData().getRelativeBoundingBox().getYmin();
    float right = left + detection.getLocationData().getRelativeBoundingBox().getWidth();
    float bottom = top + detection.getLocationData().getRelativeBoundingBox().getHeight();
    drawLine(top, left, top, right);
    drawLine(bottom, left, bottom, right);
    drawLine(top, left, bottom, left);
    drawLine(top, right, bottom, right);
  }

  private void drawLine(float y1, float x1, float y2, float x2) {
    GLES20.glUniform4fv(colorHandle, 1, BBOX_COLOR, 0);
    GLES20.glLineWidth(BBOX_THICKNESS);
    float[] vertex = {x1, y1, x2, y2};
    FloatBuffer vertexBuffer =
        ByteBuffer.allocateDirect(vertex.length * 4)
            .order(ByteOrder.nativeOrder())
            .asFloatBuffer()
            .put(vertex);
    vertexBuffer.position(0);
    GLES20.glEnableVertexAttribArray(positionHandle);
    GLES20.glVertexAttribPointer(positionHandle, 2, GLES20.GL_FLOAT, false, 0, vertexBuffer);
    GLES20.glDrawArrays(GLES20.GL_LINES, 0, 2);
  }
}

build.gradle

apply plugin: 'com.android.application'

android {
    signingConfigs {
        debug {
            storeFile file('D:\\android_debug.jks')
            storePassword '123456'
            keyAlias 'key0'
            keyPassword '123456'
        }
    }
    compileSdkVersion 32
    buildToolsVersion "23.0.3"

    defaultConfig {
        applicationId "com.ispring.gameplane"
        minSdkVersion 21
        targetSdkVersion 32
        versionCode 1
        versionName "1.0"
    }
    buildTypes {
        release {
            minifyEnabled false
            proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
        }
    }
}

dependencies {
    implementation fileTree(dir: 'libs', include: ['*.jar'])
    //noinspection GradleCompatible
    implementation 'com.android.support:appcompat-v7:23.2.1'
    //noinspection GradleCompatible
    implementation 'com.android.support:design:23.2.1'

    implementation 'androidx.appcompat:appcompat:1.3.0'
    implementation 'com.google.android.material:material:1.3.0'
    implementation 'androidx.constraintlayout:constraintlayout:2.0.4'
    implementation 'androidx.exifinterface:exifinterface:1.3.3'
    testImplementation 'junit:junit:4.+'
    
    androidTestImplementation 'androidx.test.ext:junit:1.1.2'
    androidTestImplementation 'androidx.test.espresso:espresso-core:3.3.0'

    implementation 'com.google.mediapipe:solution-core:latest.release'
    implementation 'com.google.mediapipe:facedetection:latest.release'
}

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 打赏
    打赏
  • 3
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 3
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

Atomic_space

你的鼓励就是我创作最大的动力!

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值