JNI | c++ 调用 Java 自定义类、方法————以Camera为例

小白出品,可搬可参,yuv2rgb效率一般(640*480 以下应该还可以接受),请自行更换算法。

github:https://github.com/Coder-Wjt/JNI_Samples

测试工具:Eclipse+Unity

Java:

PreviewMode.java:

package wjt.camera.plugin;

public class PreviewMode
{
  private int width;
  private int height;
  private int fps;

  public PreviewMode(int width, int height, int fps)
  {
    this.width = width;
    this.height = height;
    this.fps = fps;
  }


  public int getWidth() {
    return this.width;
  }

  public int getHeight() {
    return this.height;
  }


  public int getFps() {
    return this.fps;
  }

  public String toString()
  {
    return "ImageStreamMode{width=" + this.width + ", height=" + this.height + ", fps=" + this.fps + '}';
  }
}

AndroidCamera.java:

package wjt.camera.plugin;

import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.hardware.Camera.Size;
import android.util.Log;
import java.io.IOException;
import java.util.List;

public class AndroidCamera implements Camera.PreviewCallback {
	private static final String TAG = AndroidCamera.class.getSimpleName();
	private Camera camera;
	private Camera.Parameters cameraParams;
	private boolean isStreaming = false;
	private byte[] dataBuffer;
	private static AndroidCamera androidCameraInstance;
	private static final int MAGIC_TEXTURE_ID = 6;
	private SurfaceTexture gSurfaceTexture;

	private AndroidCamera() {
		Log.d(TAG, "Constructor");
	}

	public static synchronized AndroidCamera getInstance() {
		if (androidCameraInstance == null) {
			androidCameraInstance = new AndroidCamera();
		}
		return androidCameraInstance;
	}

	public boolean openCamera() {
		Log.d(TAG, "openCamera()");

		if (this.camera == null) {
			int numOfCameras = Camera.getNumberOfCameras();
			Log.v(TAG, "openCamera(): Number of Cameras " + numOfCameras);

			for (int i = 0; i < numOfCameras; i++) {
				try {
					this.camera = Camera.open(i);
				} catch (Exception e) {
					Log.d(TAG, "openCamera(): Exception while opening camera #" + i + ": " + e.toString());
				}
			}
		}

		if (this.camera == null) {
			return false;
		}

		cameraOpened();

		return true;
	}

	public boolean closeCamera() {
		Log.d(TAG, "closeCamera()");

		if (this.camera != null) {
			try {
				this.camera.setPreviewTexture(null);
			} catch (IOException e) {
				e.printStackTrace();
			}
			this.camera.setPreviewCallback(null);
			this.camera.stopPreview();
			this.isStreaming = false;
			this.camera.release();
			this.camera = null;
			this.dataBuffer = null;
		}

		cameraClosed();

		return true;
	}

	public boolean startCamera() {
		Log.d(TAG, "startCamera()");

		if (this.camera != null) {
			this.cameraParams = this.camera.getParameters();

			boolean foundNV21 = false;
			List<Integer> formats = this.cameraParams.getSupportedPreviewFormats();
			Log.d(TAG, "Preview format supported count: " + formats.size());
			for (int i = 0; i < formats.size(); i++) {
				int format = ((Integer) formats.get(i)).intValue();
				Log.d(TAG, "startCamera: Preview format supported: " + format + " bits per pixel: "
						+ ImageFormat.getBitsPerPixel(format));
				if (format == ImageFormat.NV21) {
					/*ImageFormat.NV21 == 17*/
					this.cameraParams.setPreviewFormat(ImageFormat.NV21);
					foundNV21 = true;
//					break;
				}
			}

			if (!foundNV21) {
				Log.d(TAG, "startCamera: Camera doesn't support ImageFormat.NV21. Can't use it.");
				return false;
			}

			List<int[]> fpsRange = this.cameraParams.getSupportedPreviewFpsRange();
			for (int i = 0; i < fpsRange.size(); i++) {
				int[] fps = (int[]) fpsRange.get(i);
				String fpsString = "startCamera: Supported preview FPS";
				for (int j = 0; j < fps.length; j++) {
					fpsString = fpsString + " " + fps[j];
				}
				Log.d(TAG, fpsString);
			}

			this.camera.setParameters(this.cameraParams);

			Camera.Size previewSize = this.cameraParams.getPreviewSize();

			int bytesPerPixel = 3;

			this.dataBuffer = new byte[previewSize.width * previewSize.height * bytesPerPixel / 2];
			this.camera.addCallbackBuffer(this.dataBuffer);

			if (this.gSurfaceTexture == null) {
				this.gSurfaceTexture = new SurfaceTexture(MAGIC_TEXTURE_ID);
			}
			try {
				this.camera.setPreviewTexture(this.gSurfaceTexture);
			} catch (IOException e) {
				e.printStackTrace();
			}

			this.camera.setPreviewCallback(this);
			this.camera.startPreview();

			this.isStreaming = true;

			this.cameraParams = this.camera.getParameters();

			int fps = cameraParams.getPreviewFrameRate();
			Log.i(TAG, "startCamera: PreviewSize With = " + previewSize.width
					+ " Height = " + previewSize.height + " fps:" + fps);

			cameraStarted(previewSize.width, previewSize.height, fps);
			return true;
		}

		return false;
	}

	public boolean stopCamera() {
		Log.d(TAG, "stopCamera()");

		if (this.camera != null) {
			this.camera.setPreviewCallback(null);
			this.camera.stopPreview();
			this.isStreaming = false;
		}
		cameraStopped();
		return true;
	}

	public PreviewMode[] getAvailableCameraModes() {
		Log.d(TAG, "getAvailableCameraModes()");

		this.cameraParams = this.camera.getParameters();

		List<Integer> formats = this.cameraParams.getSupportedPreviewFormats();
		Log.d(TAG, "Preview format supported count: " + formats.size());
		for (int i = 0; i < formats.size(); i++) {
			int format = ((Integer) formats.get(i)).intValue();
			Log.d(TAG,
					"Preview format supported: " + format + " bits per pixel: " + ImageFormat.getBitsPerPixel(format));
		}

		List<int[]> fpsRange = this.cameraParams.getSupportedPreviewFpsRange();
		for (int i = 0; i < fpsRange.size(); i++) {
			int[] fps = (int[]) fpsRange.get(i);
			String fpsString = "Supported preview FPS";
			for (int j = 0; j < fps.length; j++) {
				fpsString = fpsString + " " + fps[j];
			}
			Log.d(TAG, fpsString);
		}

		int fps = this.cameraParams.getPreviewFrameRate();
		List<Size> supportedPreviewSizes = this.cameraParams.getSupportedPreviewSizes();
		if ((supportedPreviewSizes == null) || (supportedPreviewSizes.size() <= 0)) {
			PreviewMode[] modes = new PreviewMode[1];
			Camera.Size previewSize = this.cameraParams.getPreviewSize();
			String sizeString = "Supported preview size ";
			sizeString = sizeString + "width:" + previewSize.width + " height:" + previewSize.height;
			Log.d(TAG, sizeString);
			modes[0] = new PreviewMode( previewSize.width, previewSize.height, fps);
			return modes;
		}

		PreviewMode[] modes = new PreviewMode[supportedPreviewSizes.size()];

		for (int i = 0; i < supportedPreviewSizes.size(); i++) {
			Camera.Size size = (Camera.Size) supportedPreviewSizes.get(i);
			String sizeString = "Supported preview size ";
			sizeString = sizeString + "width:" + size.width + " height:" + size.height; 
			Log.d(TAG, sizeString);
			modes[i] = new PreviewMode(size.width, size.height, fps);
		}

		return modes;
	}

	public PreviewMode getCameraMode() {
		Log.d(TAG, "getCameraMode()");
		Camera.Parameters cameraParams = this.camera.getParameters();
		Camera.Size previewSize = cameraParams.getPreviewSize();
		int fps = cameraParams.getPreviewFrameRate();
		return new PreviewMode(previewSize.width, previewSize.height, fps);
	}

	public void setCameraMode(PreviewMode mode) {
		Log.d(TAG, String.format("setCameraMode(%d,%d,%d)",mode.getWidth(),mode.getHeight(),mode.getFps()));
		this.cameraParams.setPreviewSize(mode.getWidth(), mode.getHeight());
		this.camera.setParameters(this.cameraParams);
		if (this.isStreaming) {
			stopCamera();
			startCamera();
		}
	}

	public void onPreviewFrame(byte[] data, Camera myCamera) {
		Camera.Parameters parameters = this.camera.getParameters();
		if ((data != null) && (parameters.getPreviewFormat() == ImageFormat.NV21)) {
			int width = parameters.getPreviewSize().width;
			int height = parameters.getPreviewSize().height;

			processCameraFrame(width, height, data);
		}

		this.camera.addCallbackBuffer(this.dataBuffer);
	}

	private native void cameraStarted(int width, int height, int fps);

	private native void cameraStopped();

	private native void cameraOpened();

	private native void cameraClosed();

	private native void processCameraFrame(int width, int height, byte[] imgdata);
}

Android.mk:

LOCAL_PATH := $(call my-dir)

include $(CLEAR_VARS)

LOCAL_MODULE    := astra_android_bridge
LOCAL_SRC_FILES := astra_android_bridge.cpp

LOCAL_C_INCLUDES	:= $(LOCAL_PATH)/include
LOCAL_CPPFLAGS 	:= -frtti -fexceptions
LOCAL_LDLIBS := -llog
include $(BUILD_SHARED_LIBRARY)

Application.mk:

APP_ABI := armeabi-v7a
APP_STL := gnustl_static

C++:

astra_android_bridge.cpp

/*
 * astra_android_bridge.cpp
 *
 *  Created on: 2019年11月27日
 *      Author: wjintao
 */

#include <jni.h>

#include <stdio.h>
#include <android/log.h>
#include <time.h>
#include <unistd.h>

#define  JNICAMERA_CLASS "wjt/camera/plugin/AndroidCamera"
#define  JNIMODE_CLASS "wjt/camera/plugin/PreviewMode"
#define  LOG_TAG    "WJT_Plugin"
#define  LOGI(...)   __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__)
#define  LOGE(...)   __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)
#define  LOGD(...)   __android_log_print(ANDROID_LOG_DEBUG,LOG_TAG,__VA_ARGS__)

typedef struct PreviewMode {
public:
	int width;
	int height;
	int fps;
} PreviewMode;

#define IMAGEMODECOUNT 255

typedef struct ImageSupportedModeList {
public:
	int ImageModeCount;
	PreviewMode ImageModeList[IMAGEMODECOUNT];
} ImageSupportedModeList;

/// <summary>
/// 支持最大分辩率
/// </summary>
#define RGBDATALENGTH  (1920 * 1080 * 4)

/// <summary>
/// RGB类型
/// </summary>
typedef struct CRGBImage {
public:
	/// <summary>
	/// 当前宽度
	/// </summary>
	int width;
	/// <summary>
	/// 当前高度
	/// </summary>
	int height;
	/// <summary>
	/// 时间戳
	/// </summary>
	unsigned long long Timestamp;
	/// <summary>
	/// RGB数据
	/// </summary>
	unsigned char ImageData[RGBDATALENGTH];
} CRGBImage;

JNIEnv* env;
jclass jcCamera;
jobject joCamera;
jmethodID methodID_getInstance;
jmethodID methodID_openCamera;
jmethodID methodID_closeCamera;
jmethodID methodID_startCamera;
jmethodID methodID_stopCamera;
jmethodID methodID_getAvailableCameraModes;
jmethodID methodID_getCameraMode;
jmethodID methodID_setCameraMode;

PreviewMode nowMode;
int Timestamp;
int imgBytesLen;

unsigned char ImageData[RGBDATALENGTH];
unsigned char YUVSrcData[RGBDATALENGTH];
bool IsStartCamera = false;

const int SIZE = 256;
int RGBY[SIZE], RV[SIZE], GU[SIZE], GV[SIZE], BU[SIZE];

void yuv420_2_rgb24_table_init() {
	for (int i = 0; i < SIZE; i++) {
		//-128~127
		RV[i] = (int) ((i - 128) + (((i - 128) * 103) >> 8));
		GV[i] = (int) (((i - 128) * 183) >> 8);
		GU[i] = (int) (((i - 128) * 88) >> 8);
		BU[i] = (int) ((i - 128) + (((i - 128) * 198) >> 8));
	}
}

//IsMirrored 是否镜像
void yuv420sp_to_rgb(unsigned char* yuv420sp, int width, int height,
		bool IsMirrored, unsigned char* rgb, int& Timestamp) {
	struct timeval tvStart1, tvEnd1;
//	gettimeofday(&tvStart1,NULL);
//	LOGI("yuv420sp_to_rgb start time:%ld\n",tvStart1.tv_usec);
	//定义单通道数据长度
	int frameSize = width * height;
	//准备解码
	int i = 0, y = 0;
	int uvp = 0, u = 0, v = 0;
	//r g b 三元色初始化
	int r = 0, g = 0, b = 0;

	int ii = 0;
	//下面的两个for循环都只是为了把第一个像素点的的R G B读取出来,就是一行一行循环读取.
	for (int j = 0, yp = 0; j < height; j++) {
		uvp = frameSize + (j >> 1) * width;
		u = 0;
		v = 0;

		for (i = 0; i < width; i++, yp++) {
			y = (0xff & ((int) yuv420sp[yp])) - 16;
			if (y < 0)
				y = 0;
			if ((i & 1) == 0) {
				v = (0xff & yuv420sp[uvp++]);	// - 128;
				u = (0xff & yuv420sp[uvp++]);	// - 128;
			}

			//Partial table lookup  部分查表法
			//RV[SIZE],GU[SIZE],GV[SIZE],BU[SIZE]
			r = y + RV[v];
			g = y - GV[v] - GU[u];
			b = y + BU[u];

			//始终持 r g b在0 - 255
			if (r < 0)
				r = 0;
			else if (r > 255)
				r = 255;
			if (g < 0)
				g = 0;
			else if (g > 255)
				g = 255;
			if (b < 0)
				b = 0;
			else if (b > 255)
				b = 255;

			ii = (width - i - 1) + j * width;

			if (!IsMirrored) {
				ii = yp;
			}

			rgb[ii * 3] = (unsigned char) (r);
			rgb[ii * 3 + 1] = (unsigned char) (g);
			rgb[ii * 3 + 2] = (unsigned char) (b);
		}
	}
	gettimeofday(&tvEnd1, NULL);

//	long useTv = (tvEnd1.tv_sec - tvStart1.tv_sec)*1000000+(tvEnd1.tv_usec - tvStart1.tv_usec);
//	LOGI("yuv420sp_to_rgb() translation the frame:	\t%ld\n",useTv);

	Timestamp = tvEnd1.tv_sec * 1000000 + tvEnd1.tv_usec;
}

JNIEXPORT void JNICALL
cameraOpened(JNIEnv* env, jobject obj) {
	//camera open callback
	yuv420_2_rgb24_table_init();
}

JNIEXPORT void JNICALL
cameraClosed(JNIEnv* env, jobject obj) {
	//camera close callback
}

JNIEXPORT void JNICALL
cameraStarted(JNIEnv* env, jobject obj, jint width, jint height, jint fps) {
	//camera start callback
	LOGI("width:%d,height:%d,fps:%d\n", width, height, fps);
	if (nowMode.width != width || nowMode.height != height
			|| nowMode.fps != fps) {
		nowMode.width = width;
		nowMode.height = height;
		nowMode.fps = fps;
	}

	IsStartCamera = true;
}

JNIEXPORT void JNICALL
cameraStopped(JNIEnv* env, jobject obj) {
	//camera stop callback
	IsStartCamera = false;
}

JNIEXPORT void JNICALL
processCameraFrame(JNIEnv* env, jobject obj, jint width, jint height,
		jbyteArray imgdata) {
	//new frame callback
	if (!IsStartCamera)
		return;
	jbyte * imgBody = env->GetByteArrayElements(imgdata, 0);
	jsize imgdatalen = env->GetArrayLength(imgdata);
//	LOGI("width:%d,height:%d,data length:%d\n",width,height,imgdatalen);

	memcpy(YUVSrcData, imgBody, imgdatalen);

	env->ReleaseByteArrayElements(imgdata, imgBody, 0);
	env->DeleteLocalRef(imgdata);

	//YUV(NV21) to RGB
	yuv420sp_to_rgb(YUVSrcData, width, height, true, ImageData, Timestamp);
}

static JNINativeMethod jniMethods[] = { { "processCameraFrame", "(II[B)V",
		(void*) processCameraFrame }, { "cameraOpened", "()V",
		(void*) cameraOpened }, { "cameraClosed", "()V", (void*) cameraClosed },
		{ "cameraStarted", "(III)V", (void*) cameraStarted }, { "cameraStopped",
				"()V", (void*) cameraStopped } };

void setup(JNIEnv* env, jobject obj) {
	LOGI("setup");

	//获取java类
	jcCamera = env->FindClass(JNICAMERA_CLASS);

	//获取java类中的方法ID
	methodID_openCamera = env->GetMethodID(jcCamera, "openCamera", "()Z");
	methodID_closeCamera = env->GetMethodID(jcCamera, "closeCamera", "()Z");
	methodID_startCamera = env->GetMethodID(jcCamera, "startCamera", "()Z");
	methodID_stopCamera = env->GetMethodID(jcCamera, "stopCamera", "()Z");
	methodID_getAvailableCameraModes = env->GetMethodID(jcCamera,
			"getAvailableCameraModes", "()[Lwjt/camera/plugin/PreviewMode;");
	methodID_getCameraMode = env->GetMethodID(jcCamera, "getCameraMode",
			"()Lwjt/camera/plugin/PreviewMode;");
	methodID_setCameraMode = env->GetMethodID(jcCamera, "setCameraMode",
			"(Lwjt/camera/plugin/PreviewMode;)V");

	//找到对应的构造方法
//	//默认构造
//	methodID_getInstance = env->GetMethodID(jcCamera, "<init>", "()V");
	//静态单例构造
	methodID_getInstance = env->GetStaticMethodID(jcCamera, "getInstance",
			"()Lwjt/camera/plugin/AndroidCamera;");
	if (methodID_getInstance == NULL) {
		LOGI("methodID_getInstance == NULL");
		return;
	}
	//创建相应的对象
//	//通过默认构造新建一个类对象
//	joCamera = env->NewObject(jcCamera, methodID_getInstance, NULL);
	joCamera = env->CallStaticObjectMethod(jcCamera, methodID_getInstance);
	if (joCamera == NULL) {
		LOGI("joCamera == NULL");
		return;
	}
	//创建全局引用
	joCamera = (jclass) env->NewGlobalRef(joCamera);
}

JNIEXPORT jint JNICALL JNI_OnLoad(JavaVM* vm, void* reserved) {
	if (vm->GetEnv(reinterpret_cast<void**>(&env), JNI_VERSION_1_4) != JNI_OK) {
		return -1;
	}
	jclass clz = env->FindClass(JNICAMERA_CLASS);
	env->RegisterNatives(clz, jniMethods,
			sizeof(jniMethods) / sizeof(JNINativeMethod));

	//java方法寻址
	setup(env, clz);

	env->DeleteLocalRef(clz);
	return JNI_VERSION_1_4;
}

#ifdef __cplusplus
extern "C" {
#endif

bool openCamera() {
	jboolean isOpen = env->CallBooleanMethod(joCamera, methodID_openCamera);
	return isOpen == JNI_TRUE;
}
bool closeCamera() {
	jboolean isClose = env->CallBooleanMethod(joCamera, methodID_closeCamera);
	return isClose == JNI_TRUE;
}

bool startCamera() {
	jboolean isStart = env->CallBooleanMethod(joCamera, methodID_startCamera);
	return isStart == JNI_TRUE;
}
bool stopCamera() {
	jboolean isStop = env->CallBooleanMethod(joCamera, methodID_stopCamera);
	return isStop == JNI_TRUE;
}

void getAvailableCameraModes(ImageSupportedModeList& modes) {
	//强转为数组
	jobjectArray array = (jobjectArray) env->CallObjectMethod(joCamera,
			methodID_getAvailableCameraModes);

	// 1. 获得对应JAVA类的各个属性
	jclass jcMode = env->FindClass(JNIMODE_CLASS);
	jfieldID heightFieldId = env->GetFieldID(jcMode, "height", "I");
	jfieldID widthFieldId = env->GetFieldID(jcMode, "width", "I");
	jfieldID fpsFieldId = env->GetFieldID(jcMode, "fps", "I");

	int length = env->GetArrayLength(array);

	LOGI("PreviewModes length:%d\n", length);

	// 2. 将数组拆分,每个对象转为结构体
	jobject obj;
	int count = 0;
	for (int i = 0; i < length; i++) {
		obj = env->GetObjectArrayElement(array, i);
		modes.ImageModeList[i].width = env->GetIntField(obj, widthFieldId);
		LOGI("modes[%d] width:%d\n", i, modes.ImageModeList[i].width);
		modes.ImageModeList[i].height = env->GetIntField(obj, heightFieldId);
		LOGI("modes[%d] height:%d\n", i, modes.ImageModeList[i].height);
		modes.ImageModeList[i].fps = env->GetIntField(obj, fpsFieldId);
		LOGI("modes[%d] fps:%d\n", i, modes.ImageModeList[i].fps);
		count++;
	}
	modes.ImageModeCount = count;

	env->DeleteLocalRef(array);
	env->DeleteLocalRef(jcMode);
	env->DeleteLocalRef(obj);
}

void getCameraMode(PreviewMode& Mode) {
	//获取结构体对象
	jobject obj = env->CallObjectMethod(joCamera, methodID_getCameraMode);

	// 1. 获得对应JAVA类的各个属性
	jclass jcMode = env->FindClass(JNIMODE_CLASS);
	jfieldID widthFieldId = env->GetFieldID(jcMode, "width", "I");
	jfieldID heightFieldId = env->GetFieldID(jcMode, "height", "I");
	jfieldID fpsFieldId = env->GetFieldID(jcMode, "fps", "I");

	Mode.width = env->GetIntField(obj, widthFieldId);
	Mode.height = env->GetIntField(obj, heightFieldId);
	Mode.fps = env->GetIntField(obj, fpsFieldId);

	LOGI("Mode width:%d,height:%d,fps:%d\n", Mode.width, Mode.height, Mode.fps);
	env->DeleteLocalRef(jcMode);
	env->DeleteLocalRef(obj);
}

void setCameraMode(PreviewMode Mode) {
	// 1. 获得对应JAVA类的各个属性
	jclass jcMode = env->FindClass(JNIMODE_CLASS);
	// 2、获取ImageStreamMode的构造方法ID(构造方法的名统一为:<init>)
	jmethodID methodID_ModeInstance = env->GetMethodID(jcMode, "<init>",
			"(III)V");
	// 3、创建Cat对象的实例(调用对象的构造方法并初始化对象)
	jobject joMode = env->NewObject(jcMode, methodID_ModeInstance, Mode.width,
			Mode.height, Mode.fps);

	env->CallVoidMethod(joCamera, methodID_setCameraMode, joMode);
	env->DeleteLocalRef(joMode);
	env->DeleteLocalRef(jcMode);
}

void GetImageData(CRGBImage* pImageData) {
	if (!IsStartCamera)
		return;
	pImageData->width = nowMode.width;
	pImageData->height = nowMode.height;
	pImageData->Timestamp = Timestamp;
	int DataLength = nowMode.width * nowMode.height * 3;
	memcpy(pImageData->ImageData, ImageData, DataLength);
}

//unsigned char* GetImageData()
//{
//	if(!IsStartCamera) return NULL;
//	return ImageData;
//}

unsigned char* GetYUV420Data() {
	if (!IsStartCamera)
		return NULL;
	return YUVSrcData;
}

void dispose() {
	LOGI("JNI_dispose");
	//清除对象的引用
	env->DeleteGlobalRef(joCamera);
	env->DeleteGlobalRef(jcCamera);
}

#ifdef __cplusplus
}
#endif

C#:

PreviewMode.cs:

using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;

[System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential, CharSet = System.Runtime.InteropServices.CharSet.Ansi)]
public struct PreviewMode
{
    public int width;
    public int height;
    public int fps;
}

ImageSupportedModeList.cs

using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;

public struct ImageSupportedModeList
{
    public const int IMAGEMODECOUNT = 255;
    public int ImageModeCount; 
    [System.Runtime.InteropServices.MarshalAs(System.Runtime.InteropServices.UnmanagedType.ByValArray, SizeConst = IMAGEMODECOUNT, ArraySubType = System.Runtime.InteropServices.UnmanagedType.Struct)]
    public PreviewMode[] ImageModeList;
}

RGBImage.cs

using UnityEngine;
using System.Collections;
using System;

/// <summary>
/// RGB类型
/// </summary>
[System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential, CharSet = System.Runtime.InteropServices.CharSet.Ansi)]
public struct RGBImage
{

    /// <summary>
    /// 初始化
    /// </summary>
    /// <param name="imageData">数据</param>
    public RGBImage(RGBImage imageData)
        : this(imageData, false)
    {

    }

    /// <summary>
    /// 初始化
    /// </summary>
    /// <param name="imageData">数据</param>
    /// <param name="IsCompress">是否压缩,压缩了的数据直接传进c++有可能会报错</param>
    public RGBImage(RGBImage imageData, bool IsCompress)
    {
        if (!IsCompress) ImageData = new byte[RGBDATALENGTH];
        else ImageData = new byte[RGBDATALENGTH];
        Array.Copy(imageData.ImageData, ImageData, imageData.ImageData.Length);
        Timestamp = imageData.Timestamp;
        width = imageData.width;
        height = imageData.height;
    }

    /// <summary>
    /// 拷贝
    /// </summary>
    /// <param name="imageData"></param>
    public void Copy(RGBImage imageData)
    {
        Array.Copy(imageData.ImageData, ImageData, imageData.ImageData.Length);
        Timestamp = imageData.Timestamp;
        width = imageData.width;
        height = imageData.height;
    }


    /// <summary>
    /// 当前x轴长度
    /// </summary>
    public int width;

    /// <summary>
    /// 当前y轴长度
    /// </summary>
    public int height;
    /// <summary>
    /// 时间戳
    /// </summary>
    public ulong Timestamp;

    /// <summary>
    /// 支持最大分辩率
    /// </summary>
    public const int RGBDATALENGTH = 1920 * 1080 * 4;
    /// <summary>
    /// RGB数据
    /// </summary>
    [System.Runtime.InteropServices.MarshalAs(System.Runtime.InteropServices.UnmanagedType.ByValArray, SizeConst = RGBDATALENGTH, ArraySubType = System.Runtime.InteropServices.UnmanagedType.Struct)]
    //public short[] ImageData;
    public byte[] ImageData;

}

 NativeMethods.cs:

using UnityEngine;
using System.Collections;
using System.Runtime.InteropServices;
using System;

public class NativeMethods {
    [DllImport("astra_android_bridge", EntryPoint = "openCamera")]
    public static extern bool openCamera();
    [DllImport("astra_android_bridge", EntryPoint = "closeCamera")]
    public static extern bool closeCamera();
    [DllImport("astra_android_bridge", EntryPoint = "startCamera")]
    public static extern bool startCamera();
    [DllImport("astra_android_bridge", EntryPoint = "stopCamera")]
    public static extern bool stopCamera();
    [DllImport("astra_android_bridge", EntryPoint = "getAvailableCameraModes")]
    public static extern void getAvailableCameraModes(ref ImageSupportedModeList pSupportedModeList);
    [DllImport("astra_android_bridge", EntryPoint = "getCameraMode")]
    public static extern void getCameraMode(ref PreviewMode Mode);
    [DllImport("astra_android_bridge", EntryPoint = "setCameraMode")]
    public static extern void setCameraMode(PreviewMode Mode);
    [DllImport("astra_android_bridge", EntryPoint = "GetImageData")]
    public static extern void GetImageData(IntPtr ImageData);
    [DllImport("astra_android_bridge", EntryPoint = "GetYUV420Data")]
    public static extern IntPtr GetYUV420Data();
    [DllImport("astra_android_bridge", EntryPoint = "dispose")]
    public static extern void dispose();

}

AndroidManifest.xml:

<?xml version="1.0" encoding="utf-8"?>
<manifest
		xmlns:android="http://schemas.android.com/apk/res/android"
		package="com.unity3d.player"
	android:installLocation="preferExternal"
		android:versionCode="1"
		android:versionName="1.0">

  <uses-sdk
		android:minSdkVersion="12"
		android:targetSdkVersion="18"/>

  <supports-screens
			android:smallScreens="true"
			android:normalScreens="true"
			android:largeScreens="true"
			android:xlargeScreens="true"
			android:anyDensity="true"/>

  <uses-permission android:name="android.permission.SYSTEM_ALERT_WINDOW"/>
  <uses-permission android:name="android.permission.SYSTEM_OVERLAY_WINDOW"/>
  <uses-permission android:name="android.permission.MOUNT_UNMOUNT_FILESYSTEMS"/>
  <uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE"/>
  <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
  <uses-permission android:name="android.permission.CAMERA" />
  <uses-feature android:name="android.hardware.camera" />

  <application
	android:theme="@android:style/Theme.NoTitleBar.Fullscreen"
	android:icon="@drawable/app_icon"
			android:label="@string/app_name"
			android:debuggable="true">
    <activity android:name="com.unity3d.player.UnityPlayerActivity"
							android:label="@string/app_name">
      <intent-filter>
        <action android:name="android.intent.action.MAIN" />
        <category android:name="android.intent.category.LAUNCHER" />
      </intent-filter>

      <meta-data android:name="unityplayer.UnityActivity" android:value="true" />
    </activity>
  </application>
</manifest>

 

  • 0
    点赞
  • 2
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
好的,下面是一个简单的示例程序: 首先,我们需要在Qt中使用Java Native Interface(JNI)来调用Java代码。在Qt中,我们可以使用QAndroidJniObject来实现这一点。 接下来,我们需要使用Android SDK中的ZXing库来实现二维码扫描。为了使用ZXing,我们需要在Qt中导入ZXing库的Java代码。 因此,我们需要在Qt工程中添加以下文件: 1. ZXing Android库文件 将ZXing库的Android代码导入到Qt工程中。这可以通过将zxing-core-3.3.3.jar文件(或最新版本)复制到Qt工程的android/libs目录中来实现。 2. Java代码 ``` package com.example.qrcode; import android.app.Activity; import android.content.Intent; import android.os.Bundle; import android.util.Log; import android.view.SurfaceHolder; import android.view.SurfaceView; import android.view.View; import android.widget.Button; import android.widget.TextView; import android.widget.Toast; import com.google.zxing.BarcodeFormat; import com.google.zxing.DecodeHintType; import com.google.zxing.Result; import com.google.zxing.client.android.BeepManager; import com.google.zxing.client.android.CaptureActivityHandler; import com.google.zxing.client.android.DecodeFormatManager; import com.google.zxing.client.android.DecodeHintManager; import com.google.zxing.client.android.InactivityTimer; import com.google.zxing.client.android.IntentSource; import com.google.zxing.client.android.PreferencesActivity; import com.google.zxing.client.android.ResultHandler; import com.google.zxing.client.android.ResultHandlerFactory; import com.google.zxing.client.android.camera.CameraManager; import com.google.zxing.client.android.result.ResultHandler; import com.google.zxing.client.android.result.ResultHandlerFactory; import java.io.IOException; import java.util.ArrayList; import java.util.EnumMap; import java.util.List; import java.util.Map; public class ScanActivity extends Activity implements SurfaceHolder.Callback { private static final String TAG = ScanActivity.class.getSimpleName(); private CaptureActivityHandler handler; private CameraManager cameraManager; private Result savedResultToShow; private ViewfinderView viewfinderView; private TextView statusView; private Button buttonBack; private BeepManager beepManager; private InactivityTimer inactivityTimer; private List<BarcodeFormat> decodeFormats; private Map<DecodeHintType, ?> decodeHints; private String characterSet; private IntentSource source; /** * 当活动首次创建时调用。 */ @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_scan); // 初始化相机管理器 cameraManager = new CameraManager(getApplication()); viewfinderView = (ViewfinderView) findViewById(R.id.viewfinder_view); statusView = (TextView) findViewById(R.id.status_view); buttonBack = (Button) findViewById(R.id.button_back); beepManager = new BeepManager(this); inactivityTimer = new InactivityTimer(this); // 显示返回按钮 buttonBack.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { finish(); } }); } /** * 当活动已经可见时调用。 */ @Override public void onResume() { super.onResume(); handler = null; savedResultToShow = null; // 初始化相机视图 SurfaceView surfaceView = (SurfaceView) findViewById(R.id.preview_view); SurfaceHolder surfaceHolder = surfaceView.getHolder(); if (surfaceHolder == null) { throw new IllegalStateException("No SurfaceHolder?"); } if (cameraManager.isOpen()) { Log.w(TAG, "initCamera() while already open -- late SurfaceView callback?"); return; } try { cameraManager.openDriver(surfaceHolder); // 创建捕获活动处理程序 if (handler == null) { handler = new CaptureActivityHandler(this, decodeFormats, decodeHints, characterSet, cameraManager); } decodeOrStoreSavedBitmap(null, null); } catch (IOException ioe) { Log.w(TAG, ioe); displayFrameworkBugMessageAndExit(); } catch (RuntimeException e) { Log.w(TAG, "Unexpected error initializing camera", e); displayFrameworkBugMessageAndExit(); } // 启动电源管理器和闪光灯 beepManager.updatePrefs(); inactivityTimer.onResume(); source = IntentSource.NONE; decodeFormats = null; characterSet = null; } /** * 当活动不再可见时调用。 */ @Override public void onPause() { if (handler != null) { handler.quitSynchronously(); handler = null; } inactivityTimer.onPause(); beepManager.close(); cameraManager.closeDriver(); if (!hasSurface) { SurfaceView surfaceView = (SurfaceView) findViewById(R.id.preview_view); SurfaceHolder surfaceHolder = surfaceView.getHolder(); surfaceHolder.removeCallback(this); } super.onPause(); } /** * 当活动被销毁时调用。 */ @Override public void onDestroy() { inactivityTimer.shutdown(); super.onDestroy(); } /** * 处理扫描结果。 * * @param rawResult 扫描结果 * @param barcode 扫描结果的位图 */ public void handleDecode(Result rawResult, com.google.zxing.Result barcode) { inactivityTimer.onActivity(); beepManager.playBeepSoundAndVibrate(); String result = barcode.getText(); Toast.makeText(this, result, Toast.LENGTH_SHORT).show(); // 在状态视图中显示结果 statusView.setText(barcode.getText()); // 将结果保存并显示 savedResultToShow = rawResult; ResultHandler resultHandler = ResultHandlerFactory.makeResultHandler(this, barcode); String displayContents = resultHandler.getDisplayContents(); if (displayContents != null) { statusView.setText(displayContents); } // 将扫描结果返回给Qt应用 QAndroidJniObject jResult = QAndroidJniObject::fromString(result); QAndroidJniObject::callStaticMethod<void>("com/example/qrcode/ScanActivity", "onScanResult", "(Ljava/lang/String;)V", jResult.object<jstring>()); } /** * 在UI线程上显示有关框架错误的消息,并退出应用程序。 */ private void displayFrameworkBugMessageAndExit() { AlertDialog.Builder builder = new AlertDialog.Builder(this); builder.setTitle(getString(R.string.app_name)); builder.setMessage(getString(R.string.msg_camera_framework_bug)); builder.setPositiveButton(R.string.button_ok, new FinishListener(this)); builder.setOnCancelListener(new FinishListener(this)); builder.show(); } /** * 将扫描结果返回给Qt应用。 */ public static void onScanResult(QString result) { emit scanResult(result); } } ``` 3. Qt代码 ``` #include <QtAndroidExtras> #include <QAndroidJniObject> #include <QDebug> ... // 在Android上扫描二维码 void MainWindow::scanQRCode() { QAndroidJniObject::callStaticMethod<void>("com/example/qrcode/ScanActivity", "startScan", "()V"); } // 处理扫描结果 void MainWindow::onScanResult(QString result) { qDebug() << "Scan result:" << result; } // 处理扫描结果信号 void MainWindow::handleScanResult() { QAndroidJniEnvironment env; if (env->ExceptionCheck()) { env->ExceptionClear(); return; } QAndroidJniObject jResult = QAndroidJniObject::callStaticObjectMethod("com/example/qrcode/ScanActivity", "getResult", "()Ljava/lang/String;"); QString result = jResult.toString(); if (result.isEmpty()) { return; } onScanResult(result); } ... // 连接到扫描结果信号 connect(this, SIGNAL(scanResult(QString)), this, SLOT(onScanResult(QString))); // 请求获取Android权限 QtAndroid::PermissionResultCallback callback = [](const QtAndroid::PermissionResult &result) { if (result == QtAndroid::PermissionResult::Granted) { // 已授权 scanQRCode(); } else { // 未授权 qDebug() << "Permission denied!"; } }; QtAndroid::requestPermissionsSync(QStringList() << "android.permission.CAMERA", callback); ... ``` 这个程序将会请求获取相机权限,然后启动扫描二维码的Activity。扫描结果将被传递回Qt应用程序中的槽函数中。

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值