JNI部分代码如下:
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <unistd.h>
#include <errno.h>
#include <sys/types.h>
#include <fcntl.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <sys/select.h>
#include <linux/videodev2.h>
#include <sys/time.h>
/*#include <opencv/cv.h>
#include <opencv2/opencv.hpp>
#include <opencv2/objdetect/objdetect.hpp>
#include <opencv2/highgui/highgui.hpp>*/
#include <android/log.h>
#include <android/bitmap.h>
#include "com_caffe_android_CaffeAndroidJni.h"
#define LOG_TAG "Damon"
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG,LOG_TAG,__VA_ARGS__)
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__)
#define LOGW(...) __android_log_print(ANDROID_LOG_WARN,LOG_TAG,__VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)
#define LOGF(...) __android_log_print(ANDROID_LOG_FATAL,LOG_TAG,__VA_ARGS__)
#ifdef __cplusplus
extern "C" {
#endif
#define dXMemBufferCnt 1
typedef struct tagMyCamerDevice
{
int fd;
struct v4l2_capability mCapability;
struct v4l2_format mFormat;
struct v4l2_buffer mBuffer;
struct v4l2_requestbuffers mRequestBuffers;
int isenable;
void *mMem[dXMemBufferCnt];
}MyCameraDevice;
MyCameraDevice *pXCameraDevice=NULL;
/**************************
struct v4l2_capability
{
u8 driver[16]; // 驱动名字
u8 card[32]; // 设备名字
u8 bus_info[32]; // 设备在系统中的位置
u32 version; // 驱动版本号
u32 capabilities; // 设备支持的操作
u32 reserved[4]; // 保留字段
};
**********************/
int MyCameraDeviceInit(MyCameraDevice *pInputDevice)
{
if(pInputDevice==NULL)
return -1;
pInputDevice->fd=open("/dev/video0", O_RDWR);
if(pInputDevice->fd<0)
{
pInputDevice->fd=open("/dev/video1", O_RDWR);
if(pInputDevice->fd<0)
{
LOGD("damon===> open video device error ! \n");
return -2;
}
}
memset((void *)&pInputDevice->mCapability, 0, sizeof(struct v4l2_capability));
if(ioctl(pInputDevice->fd, VIDIOC_QUERYCAP, &pInputDevice->mCapability)<0)
{
LOGD("damon===> get capability err ! \n");
return -4;
}
LOGD("damon==> capbility -> driver : %s , card : %s , bus_info : %s , version : %u \n", pInputDevice->mCapability.driver, pInputDevice->mCapability.card, pInputDevice->mCapability.bus_info, pInputDevice->mCapability.version);
if((pInputDevice->mCapability.capabilities & V4L2_CAP_VIDEO_CAPTURE)==0)
{
LOGD("damon===> err : video capture not supported . \n");
return -5;
}
if((pInputDevice->mCapability.capabilities & V4L2_CAP_STREAMING)==0)
{
LOGD("damon===> err : device does not support streaming i/o \n");
return -6;
}
struct v4l2_fmtdesc tTempFmtdesc;
struct v4l2_format tTempFormat;
memset((void *)&tTempFmtdesc, 0, sizeof(struct v4l2_fmtdesc));
tTempFmtdesc.index=0;
tTempFmtdesc.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;
LOGD("damon===> show all format support ...\n");
while(ioctl(pInputDevice->fd, VIDIOC_ENUM_FMT, &tTempFmtdesc)!=-1)
{
LOGD("===> \t%d.%s %x \n", tTempFmtdesc.index+1, tTempFmtdesc.description, tTempFmtdesc.pixelformat);
tTempFmtdesc.index++;
}
LOGD("damon===> check rgb32 format support \n");
memset(&tTempFormat, 0, sizeof(struct v4l2_format));
tTempFormat.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;
tTempFormat.fmt.pix.pixelformat=V4L2_PIX_FMT_RGB32;
if(ioctl(pInputDevice->fd, VIDIOC_TRY_FMT, &tTempFormat)==-1)
{
LOGD("damon==> not support format RGB32 \n");
}
LOGD("damon====> get current format information \n");
memset((void *)&pInputDevice->mFormat, 0, sizeof(struct v4l2_format));
pInputDevice->mFormat.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;
if(ioctl(pInputDevice->fd, VIDIOC_G_FMT, &pInputDevice->mFormat)!=-1)
{
LOGD("Current data format : \twidth : %d , height : %d , sizeimage : %d , format : %x \n", pInputDevice->mFormat.fmt.pix.width, pInputDevice->mFormat.fmt.pix.height, pInputDevice->mFormat.fmt.pix.sizeimage, pInputDevice->mFormat.fmt.pix.pixelformat);
}
printf("damon==> %x %x \n", V4L2_PIX_FMT_YUYV, V4L2_PIX_FMT_YUV420);
pInputDevice->mFormat.fmt.pix.width=640;
pInputDevice->mFormat.fmt.pix.height=480;
pInputDevice->mFormat.fmt.pix.pixelformat=V4L2_PIX_FMT_YUYV;
if(ioctl(pInputDevice->fd, VIDIOC_S_FMT, &pInputDevice->mFormat)==-1)
{
LOGD("damon===> Eoor set format ! \n");
}
memset(&pInputDevice->mRequestBuffers, 0, sizeof(struct v4l2_requestbuffers));
pInputDevice->mRequestBuffers.count=dXMemBufferCnt;
pInputDevice->mRequestBuffers.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;
pInputDevice->mRequestBuffers.memory=V4L2_MEMORY_MMAP;
if(ioctl(pInputDevice->fd, VIDIOC_REQBUFS, &pInputDevice->mRequestBuffers)<0)
{
LOGD("damon====> Unable to allocate buffers \n");
return -7;
}
int i=0;
for(i=0; i<dXMemBufferCnt; i++)
{
memset(&pInputDevice->mBuffer, 0, sizeof(struct v4l2_buffer));
pInputDevice->mBuffer.index=i;
pInputDevice->mBuffer.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;
pInputDevice->mBuffer.memory=V4L2_MEMORY_MMAP;
if(ioctl(pInputDevice->fd, VIDIOC_QUERYBUF, &pInputDevice->mBuffer)<0)
{
LOGD("damon===> err : unable to query buffer \n");
return -8;
}
pInputDevice->mMem[i]=mmap(0, pInputDevice->mBuffer.length, PROT_READ, MAP_SHARED, pInputDevice->fd, pInputDevice->mBuffer.m.offset);
if(pInputDevice->mMem[i]==MAP_FAILED)
{
LOGD("damon====> err : unable to map buffer \n");
return -9;
}
}
for (i = 0; i < dXMemBufferCnt; ++i) {
memset(&pInputDevice->mBuffer, 0, sizeof(struct v4l2_buffer));
pInputDevice->mBuffer.index = i;
pInputDevice->mBuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
pInputDevice->mBuffer.memory = V4L2_MEMORY_MMAP;
if(ioctl(pInputDevice->fd, VIDIOC_QBUF, &pInputDevice->mBuffer) < 0)
{
LOGD("damon===> err : Unable to queue buffer");
return -10;
}
}
pInputDevice->isenable=0;
return 0;
}
void VideoEnable(MyCameraDevice *pInputCamDev)
{
if(pInputCamDev==NULL)
return ;
int tTempType=0;
int tTempRet=0;
tTempType=V4L2_BUF_TYPE_VIDEO_CAPTURE;
tTempRet=ioctl(pInputCamDev->fd, VIDIOC_STREAMON, &tTempType);
if(tTempRet<0)
{
LOGD("damon===> unable to start capture \n");
}else
pInputCamDev->isenable=1;
}
void VideoDisable(MyCameraDevice *pInputCamDev)
{
if(pInputCamDev==NULL)
return ;
int tTempType=0;
int tTempRet=0;
tTempType=V4L2_BUF_TYPE_VIDEO_CAPTURE;
tTempRet=ioctl(pInputCamDev->fd, VIDIOC_STREAMOFF, &tTempType);
if(tTempRet<0)
{
LOGD("damon===> unable to start capture \n");
}else
pInputCamDev->isenable=0;
}
int GetImage(unsigned char *pOutputData, int tInputDataLen)
{
if(pXCameraDevice==NULL)
return -1;
if(pXCameraDevice->isenable==0)
VideoEnable(pXCameraDevice);
if(pXCameraDevice->isenable==0)
return -2;
ioctl(pXCameraDevice->fd, VIDIOC_QBUF, &pXCameraDevice->mBuffer);
memset((void *)(&pXCameraDevice->mBuffer), 0, sizeof(struct v4l2_buffer));
pXCameraDevice->mBuffer.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;
pXCameraDevice->mBuffer.memory=V4L2_MEMORY_MMAP;
if(ioctl(pXCameraDevice->fd, VIDIOC_DQBUF, &pXCameraDevice->mBuffer)<0)
{
LOGD("damon===> unable to dequeue buffer \n");
return -2;
}
LOGD("damon===> pixel : %x %x %x \n", pXCameraDevice->mFormat.fmt.pix.pixelformat, V4L2_PIX_FMT_MJPEG, V4L2_PIX_FMT_YUYV);
switch(pXCameraDevice->mFormat.fmt.pix.pixelformat)
{
case V4L2_PIX_FMT_MJPEG:
return -5;
case V4L2_PIX_FMT_YUYV:
memcpy(pOutputData, pXCameraDevice->mMem[pXCameraDevice->mBuffer.index], tInputDataLen);
break;
default:
return -4;
}
/* if(ioctl(pXCameraDevice->fd, VIDIOC_QBUF, &pXCameraDevice->mBuffer) < 0)
{
LOGD("damon===> Unable to requeue buffer");
return -3;
}*/
return 0;
}
void MyCameraDeviceDestroy()
{
if(pXCameraDevice->fd>0)
{
VideoDisable(pXCameraDevice);
close(pXCameraDevice->fd);
pXCameraDevice->fd=0;
}
if(pXCameraDevice!=NULL)
{
free(pXCameraDevice);
pXCameraDevice=NULL;
}
}
int ConvertYUY2toRGB24(char *pOutputRgb, unsigned char *pInputBuffer, int tInputWidth, int tInputHeight)
{
unsigned char *yuyv = pInputBuffer;
int z = 0 , i = 0, j = 0;
for(i = 0 ; i < tInputHeight; i++) //宽度的像素点数
{
for (j = 0; j < tInputWidth; j++) //长度的像素点数
{
int r, g, b;
int y, u, v;
short rgb;
if (!z)
y = yuyv[0] << 8;
else
y = yuyv[2] << 8;
u = yuyv[1] - 128;
v = yuyv[3] - 128;
r = (y + (359 * v)) >> 8;
g = (y - (88 * u) - (183 * v)) >> 8;
b = (y + (454 * u)) >> 8;
r = (r > 255) ? 255 : ((r < 0) ? 0 : r); // RGB的范围为0~255,0为最弱,255为最亮
g = (g > 255) ? 255 : ((g < 0) ? 0 : g);
b = (b > 255) ? 255 : ((b < 0) ? 0 : b);
*pOutputRgb++=b;
*pOutputRgb++=g;
*pOutputRgb++=r;
if (z++)
{
z = 0;
yuyv += 4;
}
}
}
return 0;
}
int ConvertYUY2toARGB32(int *pOutputRgb, unsigned char *pInputBuffer, int tInputWidth, int tInputHeight)
{
unsigned char *yuyv = pInputBuffer;
int z = 0 , i = 0, j = 0;
for(i = 0 ; i < tInputHeight; i++) //宽度的像素点数
{
for (j = 0; j < tInputWidth; j++) //长度的像素点数
{
int r, g, b;
int y, u, v;
short rgb;
if (!z)
y = yuyv[0] << 8;
else
y = yuyv[2] << 8;
u = yuyv[1] - 128;
v = yuyv[3] - 128;
r = (y + (359 * v)) >> 8;
g = (y - (88 * u) - (183 * v)) >> 8;
b = (y + (454 * u)) >> 8;
r = (r > 255) ? 255 : ((r < 0) ? 0 : r); // RGB的范围为0~255,0为最弱,255为最亮
g = (g > 255) ? 255 : ((g < 0) ? 0 : g);
b = (b > 255) ? 255 : ((b < 0) ? 0 : b);
/* *pOutputRgb++=b;
*pOutputRgb++=g;
*pOutputRgb++=r;*/
*pOutputRgb++=0xff000000+((r<<16)&0x00ff0000)+((g<<8)&0x0000ff00)+(b&0x000000ff);
if (z++)
{
z = 0;
yuyv += 4;
}
}
}
return 0;
}
typedef long LONG;
typedef unsigned long DWORD;
typedef unsigned short WORD;
typedef unsigned char uchar;
typedef struct {
WORD bfType;
DWORD bfSize;
WORD bfReserved1;
WORD bfReserved2;
DWORD bfOffBits;
} BMPFILEHEADER_T;
typedef struct{
DWORD biSize;
LONG biWidth;
LONG biHeight;
WORD biPlanes;
WORD biBitCount;
DWORD biCompression;
DWORD biSizeImage;
LONG biXPelsPerMeter;
LONG biYPelsPerMeter;
DWORD biClrUsed;
DWORD biClrImportant;
} BMPINFOHEADER_T;
void savebmp(uchar * pdata, char * bmp_file, int width, int height )
{ //分别为rgb数据,要保存的bmp文件名,图片长宽
int size = width*height*3*sizeof(char); // 每个像素点3个字节
// 位图第一部分,文件信息
BMPFILEHEADER_T bfh;
bfh.bfType = (WORD)0x4d42; //bm
bfh.bfSize = size // data size
+ sizeof( BMPFILEHEADER_T ) // first section size
+ sizeof( BMPINFOHEADER_T ) // second section size
;
bfh.bfReserved1 = 0; // reserved
bfh.bfReserved2 = 0; // reserved
bfh.bfOffBits = sizeof( BMPFILEHEADER_T )+ sizeof( BMPINFOHEADER_T );//真正的数据的位置
// 位图第二部分,数据信息
BMPINFOHEADER_T bih;
bih.biSize = sizeof(BMPINFOHEADER_T);
bih.biWidth = width;
bih.biHeight = -height;//BMP图片从最后一个点开始扫描,显示时图片是倒着的,所以用-height,这样图片就正了
bih.biPlanes = 1;//为1,不用改
bih.biBitCount = 24;
bih.biCompression = 0;//不压缩
bih.biSizeImage = size;
bih.biXPelsPerMeter = 2835 ;//像素每米
bih.biYPelsPerMeter = 2835 ;
bih.biClrUsed = 0;//已用过的颜色,24位的为0
bih.biClrImportant = 0;//每个像素都重要
FILE * fp = fopen( bmp_file,"wb" );
if( !fp ) return;
fwrite( &bfh, 8, 1, fp );//由于linux上4字节对齐,而信息头大小为54字节,第一部分14字节,第二部分40字节,所以会将第一部分补齐为16自己,直接用sizeof,打开图片时就会遇到premature end-of-file encountered错误
fwrite(&bfh.bfReserved2, sizeof(bfh.bfReserved2), 1, fp);
fwrite(&bfh.bfOffBits, sizeof(bfh.bfOffBits), 1, fp);
fwrite( &bih, sizeof(BMPINFOHEADER_T),1,fp );
LOGD("damon===> head size : %d %d \n", 8+sizeof(bfh.bfReserved2)+sizeof(bfh.bfOffBits)+sizeof(BMPINFOHEADER_T), size);
fwrite(pdata,size,1,fp);
fclose( fp );
}
/*
* Class: com_caffe_android_CaffeAndroidJni
* Method: CaffeModelInit
* Signature: ()V
*/
JNIEXPORT jint JNICALL Java_com_caffe_android_CaffeAndroidJni_UsbVideoInit
(JNIEnv *, jobject)
{
if(pXCameraDevice!=NULL)
{
MyCameraDeviceDestroy();
pXCameraDevice=NULL;
}
pXCameraDevice=(MyCameraDevice *)malloc(sizeof(MyCameraDevice));
if(pXCameraDevice==NULL)
return -1;
memset(pXCameraDevice, 0, sizeof(MyCameraDevice));
int tTempRet=MyCameraDeviceInit(pXCameraDevice);
if(tTempRet<0)
{
MyCameraDeviceDestroy();
return -2;
}
VideoEnable(pXCameraDevice);
return 0;
}
JNIEXPORT void JNICALL Java_com_caffe_android_CaffeAndroidJni_UsbVideoClose
(JNIEnv *, jobject)
{
if(pXCameraDevice!=NULL)
{
MyCameraDeviceDestroy();
pXCameraDevice=NULL;
}
}
JNIEXPORT jint JNICALL Java_com_caffe_android_CaffeAndroidJni_SetUsbVideoEnable
(JNIEnv *, jobject, jint tInputEnable)
{
if(pXCameraDevice!=NULL)
{
if(tInputEnable)
VideoEnable(pXCameraDevice);
else
VideoDisable(pXCameraDevice);
return pXCameraDevice->isenable;
}
return 0;
}
JNIEXPORT jintArray JNICALL Java_com_caffe_android_CaffeAndroidJni_GetVideoFrame
(JNIEnv *env, jobject, jintArray pOutputWidthAndHeight)
{
if(pXCameraDevice==NULL)
return NULL;
int tTempWidth=pXCameraDevice->mFormat.fmt.pix.width;
int tTempHeight=pXCameraDevice->mFormat.fmt.pix.height;
int tTempSize=pXCameraDevice->mFormat.fmt.pix.sizeimage;
int tTempCnt=(int)env->GetArrayLength(pOutputWidthAndHeight);
if(tTempCnt<3)
return NULL;
jint *pTempOutParam=new jint[2];
pTempOutParam[0]=tTempWidth;
pTempOutParam[1]=tTempHeight;
pTempOutParam[2]=tTempSize;
env->SetIntArrayRegion(pOutputWidthAndHeight, 0, 3, pTempOutParam);
unsigned char *pTempYuvData=(unsigned char *)malloc(tTempSize);
memset(pTempYuvData, 0, tTempSize);
if(GetImage(pTempYuvData, tTempSize)!=0)
{
LOGD("damon===> get usb video error !");
return NULL;
}
int tTempRgbLen=tTempWidth*tTempHeight;
jint pTempRgbData[tTempRgbLen];
memset(pTempRgbData, 0, tTempRgbLen);
ConvertYUY2toARGB32((int *)pTempRgbData, pTempYuvData, tTempWidth, tTempHeight);
/*char pTempSaveData[tTempRgbLen*3];
ConvertYUY2toRGB24(pTempSaveData, pTempYuvData, tTempWidth, tTempHeight);
savebmp((unsigned char *)pTempSaveData, "/mnt/sdcard/save.bmp", tTempWidth, tTempHeight);
*/
jintArray pTempRet=env->NewIntArray(tTempRgbLen);
env->SetIntArrayRegion(pTempRet, 0, tTempRgbLen, pTempRgbData);
return pTempRet;
}
#ifdef __cplusplus
}
#endif
Android应用层部分调用代码如下:
public native int[] GetVideoFrame(int pOutputParam[]);
public Bitmap rawByteArray2RGBABitmap2(int[] data, int width, int height) {
int frameSize = width * height;
/* int[] rgba = new int[frameSize];
System.out.println("damon===> img : "+width+" , "+height);
for (int i = 0; i < height; i++)
{
for (int j = 0; j < width; j++) {
int tTempIdx=i * width + j;
rgba[tTempIdx] = 0xff000000 + data[tTempIdx*3] + data[tTempIdx*3+1] + data[tTempIdx*3+2];
}
}
Bitmap bmp = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
bmp.setPixels(rgba, 0 , width, 0, 0, width, height);*/
Bitmap bmp = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
bmp.setPixels(data, 0 , width, 0, 0, width, height);
return bmp;
}
int pTempParam[]=new int[3];
int pTempRgbData[]=tXTestJni.GetVideoFrame(pTempParam);
Bitmap tTempUsbVideo=rawByteArray2RGBABitmap2(pTempYuvData, pTempParam[0], pTempParam[1]);