一 AS集成OpenCv库
cmake_minimum_required(VERSION 3.4.1)
add_library(
native-lib
SHARED
src/main/cpp/native-lib.cpp)
include_directories(src/main/cpp/include)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -L${CMAKE_SOURCE_DIR}/src/main/jniLibs/${ANDROID_ABI}")
target_link_libraries( native-lib log opencv_java3 android )
添加库和asset下放置面部识别样本
二 java层获取摄像头数据
CameraHelper
public class CameraHelper implements Camera.PreviewCallback {
private static final String TAG = "CameraHelper";
public static final int WIDTH = 640;
public static final int HEIGHT = 480;
private int mCameraId;
private Camera mCamera;
private byte[] buffer;
private Camera.PreviewCallback mPreviewCallback;
public CameraHelper(int cameraId) {
mCameraId = cameraId;
}
public void switchCamera() {
if (mCameraId == Camera.CameraInfo.CAMERA_FACING_BACK) {
mCameraId = Camera.CameraInfo.CAMERA_FACING_FRONT;
} else {
mCameraId = Camera.CameraInfo.CAMERA_FACING_BACK;
}
stopPreview();
startPreview();
}
public int getCameraId() {
return mCameraId;
}
public void stopPreview() {
if (mCamera != null) {
//预览数据回调接口
mCamera.setPreviewCallback(null);
//停止预览
mCamera.stopPreview();
//释放摄像头
mCamera.release();
mCamera = null;
}
}
public void startPreview() {
try {
//获得camera对象
mCamera = Camera.open(mCameraId);
//配置camera的属性
Camera.Parameters parameters = mCamera.getParameters();
//设置预览数据格式为nv21
parameters.setPreviewFormat(ImageFormat.NV21);
//这是摄像头宽、高
parameters.setPreviewSize(WIDTH, HEIGHT);
// 设置摄像头 图像传感器的角度、方向
mCamera.setParameters(parameters);
buffer = new byte[WIDTH * HEIGHT * 3 / 2];
//数据缓存区
mCamera.addCallbackBuffer(buffer);
mCamera.setPreviewCallbackWithBuffer(this);
//设置预览画面
SurfaceTexture surfaceTexture = new SurfaceTexture(11);
mCamera.setPreviewTexture(surfaceTexture);
mCamera.startPreview();
} catch (Exception ex) {
ex.printStackTrace();
}
}
public void setPreviewCallback(Camera.PreviewCallback previewCallback) {
mPreviewCallback = previewCallback;
}
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
// data数据依然是倒的
mPreviewCallback.onPreviewFrame(data, camera);
camera.addCallbackBuffer(buffer);
}
}
MainActivity
public class MainActivity extends AppCompatActivity implements SurfaceHolder.Callback, Camera
.PreviewCallback {
static {
System.loadLibrary("native-lib");
}
private CameraHelper cameraHelper;
int cameraId = Camera.CameraInfo.CAMERA_FACING_FRONT;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
getPermission();
SurfaceView surfaceView = findViewById(R.id.surfaceView);
surfaceView.getHolder().addCallback(this);
cameraHelper = new CameraHelper(cameraId);
cameraHelper.setPreviewCallback(this);
Utils.copyAssets(this, "lbpcascade_frontalface.xml");
}
@Override
protected void onResume() {
super.onResume();
//初始化跟踪器
init("/sdcard/lbpcascade_frontalface.xml");
cameraHelper.startPreview();
}
@Override
protected void onStop() {
super.onStop();
//释放跟踪器
release();
cameraHelper.stopPreview();
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
//设置surface 用于显示
setSurface(holder.getSurface());
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
}
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
//传输数据
postData(data, CameraHelper.WIDTH, CameraHelper.HEIGHT, cameraId);
}
/**
* 初始化 追踪器
* @param model
*/
native void init(String model);
/**
* 设置画布
* ANativeWindow
* @param surface
*/
native void setSurface(Surface surface);
/**
* 处理摄像头数据
* @param data
* @param w
* @param h
* @param cameraId
*/
native void postData(byte[] data, int w, int h, int cameraId);
/**
* 释放
*/
native void release();
@Override
public boolean onTouchEvent(MotionEvent event) {
if (event.getAction() == MotionEvent.ACTION_UP) {
cameraHelper.switchCamera();
cameraId = cameraHelper.getCameraId();
}
return super.onTouchEvent(event);
}
private static String[] PERMISSIONS_STORAGE = {
Manifest.permission.INTERNET,
Manifest.permission.CAMERA,
Manifest.permission.RECORD_AUDIO
};
private static final int REQUEST_EXTERNAL_STORAGE = 1;
private void getPermission() {
ActivityCompat.requestPermissions(this,
PERMISSIONS_STORAGE, REQUEST_EXTERNAL_STORAGE);
}
}
三 OpenCv进行数据处理
处理、设置灰度、旋转、识别、画矩形、UI显示
#include <jni.h>
#include <opencv2/opencv.hpp>
#include <android/native_window_jni.h>
using namespace cv;
ANativeWindow *window = 0;
class CascadeDetectorAdapter : public DetectionBasedTracker::IDetector {
public:
CascadeDetectorAdapter(cv::Ptr<cv::CascadeClassifier> detector) :
IDetector(),
Detector(detector) {
CV_Assert(detector);
}
void detect(const cv::Mat &Image, std::vector<cv::Rect> &objects) {
Detector->detectMultiScale(Image, objects, scaleFactor, minNeighbours, 0, minObjSize,
maxObjSize);
}
virtual ~CascadeDetectorAdapter() {
}
private:
CascadeDetectorAdapter();
cv::Ptr<cv::CascadeClassifier> Detector;
};
DetectionBasedTracker *tracker = 0;
extern "C"
JNIEXPORT void JNICALL
Java_com_dongnao_facetracing_MainActivity_setSurface(JNIEnv *env, jobject instance,
jobject surface) {
if (window) {
ANativeWindow_release(window);
window = 0;
}
window = ANativeWindow_fromSurface(env, surface);
}
extern "C"
JNIEXPORT void JNICALL
Java_com_dongnao_facetracing_MainActivity_postData(JNIEnv *env, jobject instance,
jbyteArray data_, jint w, jint h,
jint cameraId) {
// nv21的数据
jbyte *data = env->GetByteArrayElements(data_, NULL);
//mat data-》Mat
//1、高 2、宽
Mat src(h + h / 2, w, CV_8UC1, data);
//颜色格式的转换 nv21->RGBA
//将 nv21的yuv数据转成了rgba
cvtColor(src, src, COLOR_YUV2RGBA_NV21);
// 正在写的过程 退出了,导致文件丢失数据
//imwrite("/sdcard/src.jpg",src);
if (cameraId == 1) {
//前置摄像头,需要逆时针旋转90度
rotate(src, src, ROTATE_90_COUNTERCLOCKWISE);
//水平翻转 镜像
flip(src, src, 1);
} else {
//顺时针旋转90度
rotate(src, src, ROTATE_90_CLOCKWISE);
}
Mat gray;
//灰色
cvtColor(src, gray, COLOR_RGBA2GRAY);
//增强对比度 (直方图均衡)
equalizeHist(gray, gray);
std::vector<Rect> faces;
//定位人脸 N个
tracker->process(gray);
tracker->getObjects(faces);
for (Rect face : faces) {
//画矩形
//分别指定 bgra
rectangle(src, face, Scalar(255, 0, 255));
}
//显示
if (window) {
//设置windows的属性
// 因为旋转了 所以宽、高需要交换
//这里使用 cols 和rows 代表 宽、高 就不用关心上面是否旋转了
ANativeWindow_setBuffersGeometry(window, src.cols, src.rows, WINDOW_FORMAT_RGBA_8888);
ANativeWindow_Buffer buffer;
do {
//lock失败 直接brek出去
if (ANativeWindow_lock(window, &buffer, 0)) {
ANativeWindow_release(window);
window = 0;
break;
}
//src.data : rgba的数据
//把src.data 拷贝到 buffer.bits 里去
// 一行一行的拷贝
memcpy(buffer.bits, src.data, buffer.stride*buffer.height*4);
//提交刷新
ANativeWindow_unlockAndPost(window);
} while (0);
}
//释放Mat
//内部采用的 引用计数
src.release();
gray.release();
env->ReleaseByteArrayElements(data_, data, 0);
}
extern "C"
JNIEXPORT void JNICALL
Java_com_dongnao_facetracing_MainActivity_init(JNIEnv *env, jobject instance, jstring model_) {
const char *model = env->GetStringUTFChars(model_, 0);
if (tracker) {
tracker->stop();
delete tracker;
tracker = 0;
}
//智能指针
Ptr<CascadeClassifier> classifier = makePtr<CascadeClassifier>(model);
//创建一个跟踪适配器
Ptr<CascadeDetectorAdapter> mainDetector = makePtr<CascadeDetectorAdapter>(classifier);
Ptr<CascadeClassifier> classifier1 = makePtr<CascadeClassifier>(model);
//创建一个跟踪适配器
Ptr<CascadeDetectorAdapter> trackingDetector = makePtr<CascadeDetectorAdapter>(classifier1);
//拿去用的跟踪器
DetectionBasedTracker::Parameters DetectorParams;
tracker = new DetectionBasedTracker(mainDetector, trackingDetector, DetectorParams);
//开启跟踪器
tracker->run();
env->ReleaseStringUTFChars(model_, model);
}
extern "C"
JNIEXPORT void JNICALL
Java_com_dongnao_facetracing_MainActivity_release(JNIEnv *env, jobject instance) {
if (tracker) {
tracker->stop();
delete tracker;
tracker = 0;
}
}