1 本文主要介绍使用Android相机来实现相机预览并添加滤镜和录制的项目
2 该项目实现了实时获取人脸的关键点定位,将定位到的关键点使用通过OpenGl和特效算法来实现贴图,人眼放大功能
3 支持磨皮功能
4 人脸识别使用OpenCV
5 人脸关键点定位使用FaceAlignment
初始化
1 SurfaceView初始化
<com.yeliang.widget.CommonSurfaceView
android:id="@+id/surfaceview"
android:layout_width="match_parent"
android:layout_height="match_parent" />
public CommonSurfaceView(Context context, AttributeSet attrs) {
super(context, attrs);
setEGLContextClientVersion(2);
mCommonRender = new CommonRender(this);
setRenderer(mCommonRender);
setRenderMode(RENDERMODE_WHEN_DIRTY);
}
2 Render初始化
CommonRender(GLSurfaceView surfaceView) {
mSurfaceView = surfaceView;
}
Render回调
CommonRender实现了Renderer接口,所以Render会收到相应的Surface状态回调
CommonRender implements GLSurfaceView.Renderer
onSurfaceCreated()
@Override
public void onSurfaceCreated(GL10 gl10, EGLConfig eglConfig) {
mTextures = new int[1];
//1 创建Texture数组
GLES20.glGenTextures(mTextures.length, mTextures, 0);
//2 创建SurfaceTexture
mSurfaceTexture = new SurfaceTexture(mTextures[0]);
mSurfaceTexture.setOnFrameAvailableListener(this);
mCameraFilter = new CameraFilter(mSurfaceView.getContext());
mScreenFilter = new ScreenFilter(mSurfaceView.getContext());
mBigEyeFilter = new BigEyeFilter(mSurfaceView.getContext());
mStickFilter = new StickFilter(mSurfaceView.getContext());
mBeautyFilter = new BeautyFilter(mSurfaceView.getContext());
//渲染线程EGL上下文
EGLContext eglContext = EGL14.eglGetCurrentContext();
mMediaRecorder = new MediaRecorder(mSurfaceView.getContext(), "/sdcard/record.mp4", eglContext);
}
在onSurfaceCreated()方法中
1 创建纹理Id
该纹理id用于后续OpenGl绘制的纹理Id
mTextures = new int[1];
GLES20.glGenTextures(mTextures.length, mTextures, 0);
在后续OpenGL绘制时,要先绑定该纹理id
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId);
2 创建SurfaceTexture
mSurfaceTexture = new SurfaceTexture(mTextures[0]);
关于mSurfaceTexture的作用,有以下三点
第一点,首先是作为Camera的纹理对象,当Camera获取到数据后,会将渲染数据赋值给SurfaceTexture
mCamera.setPreviewTexture(mSurfaceTexture);
第二点,设置一帧纹理可用时的监听
mSurfaceTexture.setOnFrameAvailableListener(this);
当收到纹理可用的回调时,通知SurfaceView渲染
@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
mSurfaceView.requestRender();
}
mSurfaceView.requestRender();的调用会触发绘制即onDrawFrame()方法
第三点,通过SurfaceTexture获取变换矩阵,用与后续OpenGl的矩阵变换
GLES20.glUniformMatrix4fv(vMatrix, 1, false, matrix, 0);
3 创建MediaRecorder对象
mMediaRecorder = new MediaRecorder(mSurfaceView.getContext(), "/sdcard/record.mp4", eglContext);
onSurfaceChanged()
当Surface宽高信息变化时,会触发此方法。
在此方法中创建了FaceTrack对象,并使用FaceTrack对象开启实时识别,并将SurfaceView的宽高传递给滤镜。
@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
//创建跟踪器
mFaceTrack = new FaceTrack("/sdcard/lbpcascade_frontalface.xml",
"/sdcard/seeta_fa_v1.1.bin");
//启动追踪器
mFaceTrack.startTrack();
mCameraFilter.onReady(width, height);
mScreenFilter.onReady(width, height);
mBigEyeFilter.onReady(width, height);
mStickFilter.onReady(width, height);
mBeautyFilter.onReady(width, height);
}
1 创建FaceTrace
mFaceTrack = new FaceTrack("/sdcard/lbpcascade_frontalface.xml",
"/sdcard/seeta_fa_v1.1.bin");
2 启动追踪器
mFaceTrack.startTrack();
onDrawFrame()
onDrawFrame()方法中将多种效果绘制到同一个纹理
@Override
public void onDrawFrame(GL10 gl10) {
//1 清屏 表示把屏幕清理为什么颜色
GLES20.glClearColor(0, 0, 0, 0);
//2 执行 glClearColor传的屏幕颜色
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
//3 更新纹理,这一步骤之后才能从SurfaceTexture中获得数据来渲染
mSurfaceTexture.updateTexImage();
//4 获取变换矩阵
mSurfaceTexture.getTransformMatrix(mtx);
mCameraFilter.setMatrix(mtx);
//1 摄像头采集层纹理
int textureId = mCameraFilter.onDrawFrame(mTextures[0]);
//2 大眼纹理
Face face = mFaceTrack.getFace();
mBigEyeFilter.setFace(face);
textureId = mBigEyeFilter.onDrawFrame(textureId);
//3 贴纸纹理
mStickFilter.setFace(face);
textureId = mStickFilter.onDrawFrame(textureId);
if (isOpenBeauty) {
textureId = mBeautyFilter.onDrawFrame(textureId);
}
mScreenFilter.onDrawFrame(textureId);
mMediaRecorder.encodeFrame(textureId, mSurfaceTexture.getTimestamp());
}
1 多层纹理贴图
int textureId = mCameraFilter.onDrawFrame(mTextures[0]);
//2 大眼纹理
Face face = mFaceTrack.getFace();
mBigEyeFilter.setFace(face);
textureId = mBigEyeFilter.onDrawFrame(textureId);
//3 贴纸纹理
mStickFilter.setFace(face);
textureId = mStickFilter.onDrawFrame(textureId);
if (isOpenBeauty) {
textureId = mBeautyFilter.onDrawFrame(textureId);
}
mScreenFilter.onDrawFrame(textureId);
2 使用MediaRecorder编码纹理数据
mMediaRecorder.encodeFrame(textureId, mSurfaceTexture.getTimestamp());
触发打开相机
1 打开相机
public void startPreview() {
if (mCameraIsOpen) {
return;
}
mCameraIsOpen = true;
//1 打开相机
mCamera = Camera.open(mCameraId);
//2 设置预览参数
Camera.Parameters parameters = mCamera.getParameters();
parameters.setPreviewFormat(ImageFormat.NV21);
setPreviewSize(parameters);
mCamera.setParameters(parameters);
//3 设置数据回调
buffer = new byte[mWidth * mHeight * 3 / 2];
mCamera.addCallbackBuffer(buffer);
mCamera.setPreviewCallbackWithBuffer(this);
try {
//4 设置预览纹理
mCamera.setPreviewTexture(mSurfaceTexture);
//5 开始预览
mCamera.startPreview();
//6 设置自动聚焦
if (mCameraId == Camera.CameraInfo.CAMERA_FACING_BACK) {
loopAutoFocus();
}
} catch (IOException e) {
e.printStackTrace();
}
}
2 接收回调数据
相机开启后,CommonRender#onPreviewFrame(byte[] data, Camera camera)方法将会一直接收到像素数据并将数据传递到native来实时获取关键点定位信息。
mCamera.setPreviewCallbackWithBuffer(this);
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
mFaceTrack.detecor(data);
}
当给SurfaceTexture设置了数据可用回调,并给Camera设置了SurfaceTexture后,相机开启后,会不断回调到onFrameAvailable()方法,在此方法中请求一次绘制。
mSurfaceTexture.setOnFrameAvailableListener(this);
@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
mSurfaceView.requestRender();
}
mCamera.setPreviewTexture(mSurfaceTexture);
追踪器FaceTrack
在CommonRender#onSurfaceChanged()方法中会触发创建FaceTrack对象
@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
//创建跟踪器
mFaceTrack = new FaceTrack("/sdcard/lbpcascade_frontalface.xml",
"/sdcard/seeta_fa_v1.1.bin");
}
public FaceTrack(String model, String seeta) {}
其中的model和seeta分别对应assets文件夹中的如下两个文件,这里先写死路径为本地路径了。
在FaceTrack的构造方法中,
首先执行native层初始化
然后在子线程中将从Camera中获取到的图像数据传递到native中,
最后将结果存储到Face对象中
Face中包含人脸的关键点的坐标
public FaceTrack(String model, String seeta) {
self = native_create(model, seeta);
mHandlerThread = new HandlerThread("track");
mHandlerThread.start();
mHandler = new Handler(mHandlerThread.getLooper()) {
@Override
public void handleMessage(Message msg) {
synchronized (FaceTrack.this) {
if (mCameraHelper == null) {
return;
}
mFace = native_detector(self, (byte[]) msg.obj, mCameraHelper.getCameraId(), CameraHelper.mWidth, CameraHelper.mHeight);
}
}
};
}
1 初始化native层的FaceTrack
首先创建cpp中的FaceTrack对象
FaceTrack *faceTrack = new FaceTrack(model, seeta);
tracker = makePtr<DetectionBasedTracker>(mainDetector, trackingDetector, detectorParams);
faceAlignment = makePtr<seeta::FaceAlignment>(seeta);
2 开启追踪器
在onSizeChanged()方法中调用java层的FaceTrack对象开始检测
mFaceTrack.startTrack();
发起调用native层的开始检测方法
public void startTrack() {
native_start(self);
}
jni方法
extern "C"
JNIEXPORT void JNICALL
Java_com_yeliang_face_FaceTrack_native_1start(JNIEnv *env, jobject instance, jlong self) {
FaceTrack *me =(FaceTrack*) self;
me->startTracking();
}
3 发送图像数据到native层
相机开启状态下回将像素数据回调并传递到onPreviewFrame()方法中
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
mFaceTrack.detecor(data);
}
FaceTrack#detecor()
public void detecor(byte[] data) {
mHandler.removeMessages(11);
Message message = mHandler.obtainMessage(11);
message.obj = data;
mHandler.sendMessage(message);
}
mFace = native_detector(self, (byte[]) msg.obj, mCameraHelper.getCameraId(), CameraHelper.mWidth, CameraHelper.mHeight);
获取关键定位的数组并赋值到java层给Face对象
extern "C"
JNIEXPORT jobject JNICALL
Java_com_yeliang_face_FaceTrack_native_1detector(JNIEnv *env, jobject instance, jlong self,
jbyteArray data_, jint cameraId, jint width,
jint height) {
//LOGD("detector self = %ld", self);
if(self == 0){
//LOGD("detector self = %ld", self);
return NULL;
}
jbyte *data = env->GetByteArrayElements(data_, NULL);
FaceTrack *me = (FaceTrack *) self;
Mat src(height + height / 2, width, CV_8UC1, data);
cvtColor(src, src, CV_YUV2RGBA_NV21);
if(cameraId == 1){
//前置 逆时针旋转90
rotate(src, src, ROTATE_90_COUNTERCLOCKWISE);
//y翻转
flip(src, src, 1);
} else {
rotate(src, src, ROTATE_90_CLOCKWISE);
}
cvtColor(src, src, COLOR_RGBA2GRAY);
//直方图均衡化 增强对比效果
equalizeHist(src, src);
vector<Rect2f> rects;
me->detector(src, rects);
env->ReleaseByteArrayElements(data_, data, 0);
int w = src.cols;
int h = src.rows;
src.release();
int ret = rects.size();
//LOGD("detector ret = %d", ret);
if(ret){
jclass clazz = env->FindClass("com/yeliang/face/Face");
//LOGD("detector ret = %d", &clazz);
jmethodID construct = env->GetMethodID(clazz, "<init>","(IIII[F)V");
int size = ret * 2;
jfloatArray floatArray = env->NewFloatArray(size);
for(int i = 0, j = 0; i < size; j++){
float f[2] = {rects[j].x, rects[j].y};
env->SetFloatArrayRegion(floatArray, i, 2, f);
i += 2;
}
Rect2f faceRect = rects[0];
int width = faceRect.width;
int height = faceRect.height;
jobject face = env->NewObject(clazz, construct, width, height, w, h, floatArray);
//LOGD("detector ret = %d", &face);
return face;
}
return NULL;
}
4 获取关键点
mFace = native_detector(self, (byte[]) msg.obj, mCameraHelper.getCameraId(), CameraHelper.mWidth, CameraHelper.mHeight);
获取到人脸的关键定位点信息后,大眼纹理和贴纸纹理可以利用此人脸信息实现大眼和贴纸功能
mFace = native_detector(self, (byte[]) msg.obj, mCameraHelper.getCameraId(), CameraHelper.mWidth, CameraHelper.mHeight);
FaceTrack#getFace()
public Face getFace() {
return mFace;
}
//2 大眼纹理
Face face = mFaceTrack.getFace();
mBigEyeFilter.setFace(face);
textureId = mBigEyeFilter.onDrawFrame(textureId);
//3 贴纸纹理
mStickFilter.setFace(face);
textureId = mStickFilter.onDrawFrame(textureId);
if (isOpenBeauty) {
textureId = mBeautyFilter.onDrawFrame(textureId);
}
Camera数据渲染
1 顶点着色器
attribute vec4 vPosition;
attribute vec4 vCoord;
uniform mat4 vMatrix;
varying vec2 aCoord;
void main() {
gl_Position = vPosition;
aCoord = (vMatrix * vCoord).xy;
}
2 片元着色器
#extension GL_OES_EGL_image_external : require
precision mediump float;
varying vec2 aCoord;
uniform samplerExternalOES vTexture;
void main() {
gl_FragColor = texture2D(vTexture, aCoord);
}
3 加载顶点和片元着色器文件
String vertexShader = FileUtils.readRawTextFile(context, mVertexShaderId);
String fragShader = FileUtils.readRawTextFile(context, mFragShaderId);
public static String readRawTextFile(Context context, int rawId) {
InputStream inputStream = context.getResources().openRawResource(rawId);
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(inputStream));
String line;
StringBuilder stringBuilder = new StringBuilder();
try {
while ((line = bufferedReader.readLine()) != null) {
stringBuilder.append(line);
stringBuilder.append("\n");
}
} catch (IOException e) {
e.printStackTrace();
}
try {
bufferedReader.close();
} catch (IOException e) {
e.printStackTrace();
}
return stringBuilder.toString();
}
4 创建顶点和片元着色器程序
mGLProgramId = OpenGlUtils.loadProgram(vertexShader, fragShader);
public static int loadProgram(String vSource, String fSource) {
//1 顶点着色器
int vShader = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);
GLES20.glShaderSource(vShader, vSource);
GLES20.glCompileShader(vShader);
int[] status = new int[1];
GLES20.glGetShaderiv(vShader, GLES20.GL_COMPILE_STATUS, status, 0);
if (status[0] != GLES20.GL_TRUE) {
throw new IllegalStateException("load vertex shader failed: " + GLES20.glGetShaderInfoLog(vShader));
}
//2 片元着色器
int fShader = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);
GLES20.glShaderSource(fShader, fSource);
GLES20.glCompileShader(fShader);
GLES20.glGetShaderiv(fShader, GLES20.GL_COMPILE_STATUS, status, 0);
if (status[0] != GLES20.GL_TRUE) {
throw new IllegalStateException("load fragment shader:" + GLES20.glGetShaderInfoLog(vShader));
}
//3 着色器程序
int program = GLES20.glCreateProgram();
GLES20.glAttachShader(program, vShader);
GLES20.glAttachShader(program, fShader);
GLES20.glLinkProgram(program);
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, status, 0);
if (status[0] != GLES20.GL_TRUE) {
throw new IllegalStateException("link program failed: " + GLES20.glGetProgramInfoLog(program));
}
GLES20.glDeleteShader(vShader);
GLES20.glDeleteShader(fShader);
return program;
}
5 获取着色器属性
vPosition = GLES20.glGetAttribLocation(mGLProgramId, "vPosition");
vCoord = GLES20.glGetAttribLocation(mGLProgramId, "vCoord");
vMatrix = GLES20.glGetUniformLocation(mGLProgramId, "vMatrix");
vTexture = GLES20.glGetUniformLocation(mGLProgramId, "vTexture");
6 初始化顶点坐标和纹理坐标
//1 初始化顶点坐标
mGLVertexBuffer = ByteBuffer.allocateDirect(4 * 2 * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
mGLVertexBuffer.clear();
float[] vertex = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f
};
mGLVertexBuffer.put(vertex);
//2 初始化纹理坐标
mGLTextureBuffer = ByteBuffer.allocateDirect(4 * 2 * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
mGLTextureBuffer.clear();
float[] TEXTURE = {
0.0f, 0.0f,
0.0f, 1.0f,
1.0f, 0.0f,
1.0f, 1.0f
};
mGLTextureBuffer.put(TEXTURE);