1.下载编译好的SDK
2.解压后,新建项目,拷贝对应文件到项目中:
3.CmakeLists.txt修改如下:
cmake_minimum_required(VERSION 3.4.1)
include_directories(src/main/cpp/include)
file(GLOB my_source_path src/main/cpp/*.cpp src/main/cpp/*.c)
add_library(
native-lib
SHARED
${my_source_path} )
add_library(
lib_opencv
SHARED
IMPORTED)
set_target_properties(
lib_opencv
PROPERTIES
IMPORTED_LOCATION
${CMAKE_SOURCE_DIR}/src/main/jniLibs/${ANDROID_ABI}/libopencv_java3.so)
target_link_libraries(
native-lib
android
log
jnigraphics
lib_opencv
)
4.在cpp下新建native-lib.h
//
// Created by ygdx_lk on 17/11/20.
//
#ifndef IMAGEFACERECOGNITION_NATIVE_LIB_H
#define IMAGEFACERECOGNITION_NATIVE_LIB_H
#include <jni.h>
#include <string>
#include <android/log.h>
#include <android/bitmap.h>
#include <opencv2/opencv.hpp>
#include <android/native_window_jni.h>
#define LOG_TAG "native"
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__)
extern "C"{
using namespace cv;
using namespace std;
CascadeClassifier *faceClassifier;
ANativeWindow *nativeWindow;
JNIEXPORT void JNICALL Java_com_study_imagefacerecognition_MainActivity_loadModel(JNIEnv *env, jobject instance, jstring detectMode_);
JNIEXPORT jboolean JNICALL Java_com_study_imagefacerecognition_MainActivity_process(JNIEnv *env, jobject instance, jobject bitmap);
JNIEXPORT void JNICALL Java_com_study_imagefacerecognition_MainActivity_setSurfaceview(JNIEnv *env, jobject instance, jobject surface, jint w, jint h);
JNIEXPORT void JNICALL Java_com_study_imagefacerecognition_MainActivity_destroy(JNIEnv *env, jobject instance);
void bitmap2Mat(JNIEnv *env, jobject bitmap, Mat &dst);
};
#endif //IMAGEFACERECOGNITION_NATIVE_LIB_H
5.native-lib.cpp
#include "native-lib.h"
void Java_com_study_imagefacerecognition_MainActivity_loadModel(JNIEnv *env, jobject instance, jstring detectMode_) {
const char *detectMode = env->GetStringUTFChars(detectMode_, 0);
//获取一个Classifier model
faceClassifier = new CascadeClassifier(detectMode);
env->ReleaseStringUTFChars(detectMode_, detectMode);
}
jboolean Java_com_study_imagefacerecognition_MainActivity_process(JNIEnv *env, jobject instance, jobject bitmap) {
int ret = 1;
Mat src;
bitmap2Mat(env, bitmap, src);
imwrite("/sdcard/d.png", src);
if(faceClassifier){
vector<Rect> faces;
Mat grayMat;
//图像灰度化
cvtColor(src, grayMat, CV_BGR2GRAY);
imwrite("/sdcard/huidu.png", grayMat);
//直方图均衡化 增强对比效果
equalizeHist(grayMat, grayMat);
imwrite("/sdcard/e.png", grayMat);
//识别,并将识别到的头部区域写入faces向量中
faceClassifier->detectMultiScale(grayMat, faces);
grayMat.release();
for (int i = 0; i < faces.size(); ++i) {
Rect face = faces[i];
//Scalar(0, 255, 255) 参数含义:矩形的颜色
rectangle(src, face.tl(), face.br(), Scalar(0, 255, 255));
}
}
if(!nativeWindow){
ret = 0;
// goto end;
src.release();
return ret;
}
ANativeWindow_Buffer window_buffer;
if(ANativeWindow_lock(nativeWindow, &window_buffer, 0)){
ret = 0;
// goto end;
src.release();
return ret;
}
imwrite("/sdcard/c.png", src);
//绘制
cvtColor(src, src, CV_BGR2RGBA);
imwrite("/sdcard/b.png", src);
int windowWidth = ANativeWindow_getWidth(nativeWindow);
int windowHeight = ANativeWindow_getHeight(nativeWindow);
LOGI("ROW:%d, STEP:%d window_w:%d, window_h:%d", src.rows, (int)src.step, windowWidth, windowHeight);
// resize(src, src, Size(window_buffer.width/2, window_buffer.height/2));
// 缓冲区的地址/
uint8_t *dst = (uint8_t *) window_buffer.bits;
// 每行的内存大小
int dstStride = window_buffer.stride * 4;
for (int i = 0; i < src.rows; ++i) {
const Mat &row = src.row(i);
uchar *data = row.data;
int step = row.step;//数据量
LOGI("size: dstStride:%d step:%d rows:%d wh:%d", dstStride, step, src.rows, window_buffer.height);
memcpy(dst + (i) * (dstStride) + (dstStride - step) / 2, data, step);
}
// memcpy(window_buffer.bits, src.data, window_buffer.height * window_buffer.width * 4);
ANativeWindow_unlockAndPost(nativeWindow);
// end:
src.release();
return ret;
}
void bitmap2Mat(JNIEnv *env, jobject bitmap, Mat &dst) {
#if 0
AndroidBitmapInfo info;
void *pixels = 0;
//获取bitmap信息
CV_Assert(AndroidBitmap_getInfo(env, bitmap, &info) >= 0);
//必须是 rgba8888 rgb565
CV_Assert(info.format == ANDROID_BITMAP_FORMAT_RGBA_8888);
//lock 获得数据
CV_Assert(AndroidBitmap_lockPixels(env, bitmap, &pixels) >= 0);
CV_Assert(pixels);
dst.create(info.height, info.width, CV_8UC3);
LOGI("bitmap2Mat: RGBA_8888 bitmap -> Mat");
Mat tmp;
tmp = Mat(info.height, info.width, CV_8UC3, pixels);
cvtColor(tmp, dst, COLOR_RGBA2BGR);
tmp.release();
AndroidBitmap_unlockPixels(env, bitmap);
#else
AndroidBitmapInfo info;
void* pixels = 0;
try {
LOGI("nBitmapToMat");
CV_Assert( AndroidBitmap_getInfo(env, bitmap, &info) >= 0 );
CV_Assert( info.format == ANDROID_BITMAP_FORMAT_RGBA_8888 ||
info.format == ANDROID_BITMAP_FORMAT_RGB_565 );
CV_Assert( AndroidBitmap_lockPixels(env, bitmap, &pixels) >= 0 );
CV_Assert( pixels );
dst.create(info.height, info.width, CV_8UC4);
if( info.format == ANDROID_BITMAP_FORMAT_RGBA_8888 )
{
LOGI("nBitmapToMat: RGBA_8888 -> CV_8UC4");
Mat tmp(info.height, info.width, CV_8UC4, pixels);
// if(needUnPremultiplyAlpha) cvtColor(tmp, dst, COLOR_mRGBA2RGBA);
// else tmp.copyTo(dst);
tmp.copyTo(dst);
// cvtColor(tmp, dst, COLOR_mRGBA2RGBA);
cvtColor(dst, dst, COLOR_RGBA2BGR);
tmp.release();
} else {
// info.format == ANDROID_BITMAP_FORMAT_RGB_565
LOGI("nBitmapToMat: RGB_565 -> CV_8UC4");
Mat tmp(info.height, info.width, CV_8UC2, pixels);
// cvtColor(tmp, dst, COLOR_BGR5652RGBA);
cvtColor(tmp, dst, COLOR_BGR5652BGR);
tmp.release();
}
AndroidBitmap_unlockPixels(env, bitmap);
return;
} catch(const cv::Exception& e) {
AndroidBitmap_unlockPixels(env, bitmap);
LOGI("nBitmapToMat catched cv::Exception: %s", e.what());
jclass je = env->FindClass("org/opencv/core/CvException");
if(!je) je = env->FindClass("java/lang/Exception");
env->ThrowNew(je, e.what());
return;
} catch (...) {
AndroidBitmap_unlockPixels(env, bitmap);
LOGI("nBitmapToMat catched unknown exception (...)");
jclass je = env->FindClass("java/lang/Exception");
env->ThrowNew(je, "Unknown exception in JNI code {nBitmapToMat}");
return;
}
#endif
}
void Java_com_study_imagefacerecognition_MainActivity_setSurfaceview(JNIEnv *env, jobject instance, jobject surface, jint w, jint h) {
if(surface && w && h){
if(nativeWindow){
ANativeWindow_release(nativeWindow);
nativeWindow = 0;
}
nativeWindow = ANativeWindow_fromSurface(env, surface);
if(nativeWindow){
LOGI("ANativeWindow_setBuffersGeometry %d, %d", w, h);
ANativeWindow_setBuffersGeometry(nativeWindow, w, h, WINDOW_FORMAT_RGBA_8888);
}
} else{
if(nativeWindow){
ANativeWindow_release(nativeWindow);
nativeWindow = 0;
}
}
}
void Java_com_study_imagefacerecognition_MainActivity_destroy(JNIEnv *env, jobject instance) {
if(faceClassifier){
delete faceClassifier;
faceClassifier = 0;
}
if(nativeWindow){
ANativeWindow_release(nativeWindow);
nativeWindow = 0;
}
}
6.MainActivity.java
package com.study.imagefacerecognition;
import android.Manifest;
import android.app.AlertDialog;
import android.app.ProgressDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Build;
import android.os.Bundle;
import android.os.Environment;
import android.provider.MediaStore;
import android.support.v7.app.AppCompatActivity;
import android.text.TextUtils;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.widget.Button;
import android.widget.Toast;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class MainActivity extends AppCompatActivity {
// Used to load the 'native-lib' library on application startup.
static {
System.loadLibrary("native-lib");
}
private static final String TAG = "MainActivity";
private int width_tmp, height_tmp;
private native void loadModel(String detectMode);
private native boolean process(Bitmap bitmap);
private native void setSurfaceview(Surface surface, int w, int h);
public native void destroy();
private Button bt_load;
private SurfaceView surfaceview;
private ProgressDialog pd;
private Bitmap bm;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
bt_load = (Button)findViewById(R.id.bt_load);
surfaceview = (SurfaceView)findViewById(R.id.surfaceview);
bt_load.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
openImages();
}
});
surfaceview.getHolder().addCallback(new SurfaceHolder.Callback() {
@Override
public void surfaceCreated(SurfaceHolder holder) {
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
setSurfaceview(holder.getSurface(), width_tmp, height_tmp);
safeProcess();
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
}
});
loadData();
if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.M){
insertDummyContactWrapper();
}
}
@Override
protected void onDestroy() {
super.onDestroy();
destroy();
safeRecycled();
}
private void openImages() {
Intent intent;
if(Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT){
intent = new Intent();
intent.setAction(Intent.ACTION_GET_CONTENT);
}else {
intent = new Intent(Intent.ACTION_PICK, MediaStore.Images.Media.EXTERNAL_CONTENT_URI);
}
intent.setType("image/*");
startActivityForResult(Intent.createChooser(intent, "选择图片"), 666);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if(requestCode == 666 && data != null){
getResult(data.getData());
}
}
private void getResult(Uri uri) {
safeRecycled();
String imagePath = null;
if(uri != null){
Log.i(TAG, "getResult: " + uri.toString());
if("file".equals(uri.getScheme())){
imagePath = uri.getPath();
}else if("content".equals(uri.getScheme())){
String[] filePathColumns = {MediaStore.Images.Media.DATA};
Cursor c = getContentResolver().query(uri, filePathColumns, null, null, null);
if(c != null){
if(c.moveToFirst()){
int columnIndex = c.getColumnIndex(filePathColumns[0]);
imagePath = c.getString(columnIndex);
}
c.close();
}
}
}
if(!TextUtils.isEmpty(imagePath)){
Log.i(TAG, "getResult: " + imagePath);
bm = toBitmap(imagePath);
Log.i(TAG, "getResult getbm: " + bm);
safeProcess();
}
}
private void safeProcess() {
if(bm != null && !bm.isRecycled()){
process(bm);
}
}
private void safeRecycled() {
if(bm != null && !bm.isRecycled()){
bm.recycle();
}
bm = null;
}
private Bitmap toBitmap(String imagePath) {
if(TextUtils.isEmpty(imagePath)){
return null;
}
BitmapFactory.Options o = new BitmapFactory.Options();
o.inJustDecodeBounds = true;
BitmapFactory.decodeFile(imagePath, o);
width_tmp = o.outWidth;
height_tmp = o.outHeight;
int scale = 1;
while (true){
if(width_tmp <= surfaceview.getWidth() && height_tmp <= surfaceview.getHeight()){
break;
}
width_tmp /= 2;
height_tmp /= 2;
scale *= 2;
}
BitmapFactory.Options options = new BitmapFactory.Options();
options.inSampleSize = scale;
options.outHeight = height_tmp;
options.outWidth = width_tmp;
Log.i(TAG, "toBitmap: "+ width_tmp + "----" + height_tmp);
setSurfaceview(surfaceview.getHolder().getSurface(), width_tmp, height_tmp);
return BitmapFactory.decodeFile(imagePath, options);
}
private void loadData() {
new AsyncTask<Void, Void, Void>(){
@Override
protected Void doInBackground(Void... voids) {
File dir = new File(Environment.getExternalStorageDirectory(), "face");
copyAssetsFile("haarcascade_frontalface_alt.xml", dir);
File file = new File(dir, "haarcascade_frontalface_alt.xml");
loadModel(file.getAbsolutePath());
return null;
}
private void copyAssetsFile(String s, File dir) {
if(!dir.exists()){
dir.mkdirs();
}
File file = new File(dir, s);
if(!file.exists()){
InputStream is = null;
FileOutputStream fos = null;
try {
byte[] buffer = new byte[1024];
int len;
is = getAssets().open(s);
fos = new FileOutputStream(file);
while ((len = is.read(buffer)) != -1) {
fos.write(buffer, 0, len);
}
fos.flush();
}catch (Exception e){
e.printStackTrace();
}finally {
if(fos != null){
try {
fos.close();
} catch (IOException e) {
e.printStackTrace();
}
fos = null;
}
if(is != null){
try {
is.close();
} catch (IOException e) {
e.printStackTrace();
}
is = null;
}
}
}
}
@Override
protected void onPreExecute() {
super.onPreExecute();
showLoading();
}
@Override
protected void onPostExecute(Void aVoid) {
super.onPostExecute(aVoid);
hideLoading();
}
}.execute();
}
private void hideLoading() {
if(pd != null){
pd.dismiss();
}
}
private void showLoading() {
if(pd == null){
pd = new ProgressDialog(this);
pd.setIndeterminate(true);
}
pd.show();
}
final private int REQUEST_CODE_ASK_MULTIPLE_PERMISSIONS = 124;
private void insertDummyContactWrapper() {
List<String> permissionsNeeded = new ArrayList<>();
final List<String> permissionsList = new ArrayList<String>();
if (!addPermission(permissionsList, Manifest.permission.READ_EXTERNAL_STORAGE))
permissionsNeeded.add("存储");
if (!addPermission(permissionsList, Manifest.permission.READ_PHONE_STATE))
permissionsNeeded.add("手机状态");
if (!addPermission(permissionsList, Manifest.permission.ACCESS_FINE_LOCATION))
permissionsNeeded.add("位置");
if (permissionsList.size() > 0) {
if (permissionsNeeded.size() > 0) {
// Need Rationale
String message = "为了更好的使用LOOK,需要您授权" + permissionsNeeded.get(0);
for (int i = 1; i < permissionsNeeded.size(); i++)
message += "、" + permissionsNeeded.get(i);
message += "权限";
showMessageOKCancel(message,
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
if (Build.VERSION.SDK_INT >= 23) {
requestPermissions(permissionsList.toArray(new String[permissionsList.size()]),
REQUEST_CODE_ASK_MULTIPLE_PERMISSIONS);
}
}
}, new DialogInterface.OnClickListener(){
@Override
public void onClick(DialogInterface dialog, int which) {
finish();
}
});
return;
}
if (Build.VERSION.SDK_INT >= 23) {
requestPermissions(permissionsList.toArray(new String[permissionsList.size()]),
REQUEST_CODE_ASK_MULTIPLE_PERMISSIONS);
}
return;
}
}
private boolean addPermission(List<String> permissionsList, String permission) {
if (Build.VERSION.SDK_INT >= 23 && checkSelfPermission(permission) != PackageManager.PERMISSION_GRANTED) {
permissionsList.add(permission);
// Check for Rationale Option
if (!shouldShowRequestPermissionRationale(permission))
return false;
}
return true;
}
private void showMessageOKCancel(String message, DialogInterface.OnClickListener okListener, DialogInterface.OnClickListener cancelListener) {
new AlertDialog.Builder(this)
.setMessage(message)
.setPositiveButton("确定", okListener)
.setNegativeButton("取消", cancelListener)
.create()
.show();
}
@Override
public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
switch (requestCode) {
case REQUEST_CODE_ASK_MULTIPLE_PERMISSIONS: {
Map<String, Integer> perms = new HashMap<>();
// Initial
perms.put(Manifest.permission.READ_EXTERNAL_STORAGE, PackageManager.PERMISSION_GRANTED);
perms.put(Manifest.permission.READ_PHONE_STATE, PackageManager.PERMISSION_GRANTED);
perms.put(Manifest.permission.ACCESS_FINE_LOCATION, PackageManager.PERMISSION_GRANTED);
// perms.put(Manifest.permission.ACCESS_COARSE_LOCATION, PackageManager.PERMISSION_GRANTED);
// Fill with results
for (int i = 0; i < permissions.length; i++)
perms.put(permissions[i], grantResults[i]);
// Check for ACCESS_FINE_LOCATION
if (perms.get(Manifest.permission.READ_EXTERNAL_STORAGE) == PackageManager.PERMISSION_GRANTED
&& perms.get(Manifest.permission.READ_PHONE_STATE) == PackageManager.PERMISSION_GRANTED
&& perms.get(Manifest.permission.ACCESS_FINE_LOCATION) == PackageManager.PERMISSION_GRANTED
// && perms.get(Manifest.permission.ACCESS_COARSE_LOCATION) == PackageManager.PERMISSION_GRANTED
) {
// All Permissions Granted
} else {
// Permission Denied
Toast.makeText(this, "缺少权限", Toast.LENGTH_SHORT).show();
finish();
}
}
break;
}
}
}