说明:
1 v4l2.c 参考 v4l2uvc.c 内核参考uvc_v4l2.c yuv2rgb.c 参考 utils.c
2 yuv/rgb565 2字节/像素 rgb24 3字节/像素 rgb32 4字节/像素
3 取得LCD屏的分辨率 GetDispResolvtion(int width,int height,int* iLcdapp);
4 摄像头格式为RGB
1 video_manager.h (构造3个结构体)
- struct VideoDevice {
width、height、format...
struct *videoOPr ptOPr;
}
- typedef struct VideoBuf {
T_PixelDatas tPixelDatas;
int iPixelFormat;
}
- struct VideoOpr {
char *name;
int (*InitDevice)(char *strDevName, PT_VideoDevice ptVideoDevice);
int (*ExitDevice)(PT_VideoDevice ptVideoDevice);
int (*GetFrame)(PT_VideoDevice ptVideoDevice, PT_VideoBuf ptVideoBuf);
int (*GetFormat)(PT_VideoDevice ptVideoDevice);
int (*PutFrame)(PT_VideoDevice ptVideoDevice, PT_VideoBuf ptVideoBuf);
int (*StartDevice)(PT_VideoDevice ptVideoDevice);
int (*StopDevice)(PT_VideoDevice ptVideoDevice);
struct VideoOpr *ptNext;
}
2 v4l2.c (实现videoOPr的函数)
struct v4l2_capability tV4l2Cap;
struct v4l2_fmtdesc tFmtDesc;
struct v4l2_format tV4l2Fmt;
struct v4l2_requestbuffers tV4l2ReqBuffs;
struct v4l2_buffer tV4l2Buf;
/* 构造一个VideoOpr结构体 */
static T_VideoOpr g_tV4l2VideoOpr = {
.name = "v4l2",
.InitDevice = V4l2InitDevice,
.ExitDevice = V4l2ExitDevice,
.GetFormat = V4l2GetFormat,
.GetFrame = V4l2GetFrameForStreaming,
.PutFrame = V4l2PutFrameForStreaming,
.StartDevice = V4l2StartDevice,
.StopDevice = V4l2StopDevice,
};
static int V4l2InitDevice(char *strDevName, PT_VideoDevice ptVideoDevice)
{
/* open */
/* VIDIOC_QUERYCAP 确定它是否视频捕捉设备,支持哪种接口(streaming/read,write) */
iError = ioctl(iFd, VIDIOC_QUERYCAP, &tV4l2Cap);
/* VIDIOC_ENUM_FMT 查询支持哪种格式 */
iError = ioctl(iFd, VIDIOC_ENUM_FMT, &tFmtDesc)
/* VIDIOC_S_FMT 设置摄像头使用哪种格式 */
iError = ioctl(iFd, VIDIOC_S_FMT, &tV4l2Fmt);
/* VIDIOC_REQBUFS 申请buffer */
iError = ioctl(iFd, VIDIOC_REQBUFS, &tV4l2ReqBuffs);
/* VIDIOC_QUERYBUF 确定每一个buffer的信息 并且 mmap */
iError = ioctl(iFd, VIDIOC_QUERYBUF, &tV4l2Buf);
}
static int V4l2GetFrameForStreaming(PT_VideoDevice ptVideoDevice, PT_VideoBuf ptVideoBuf)
{
/* poll 等待有数据 */
/* VIDIOC_DQBUF 从队列中取出 */
//并把struct videodeivce 的数据赋给 struct VideoBuf.tPixelDatas;
iRet = ioctl(ptVideoDevice->iFd, VIDIOC_DQBUF, &tV4l2Buf);
}
static int V4l2PutFrameForStreaming(PT_VideoDevice ptVideoDevice, PT_VideoBuf ptVideoBuf)
{
/* VIDIOC_QBUF 放入队列 */
iError = ioctl(ptVideoDevice->iFd, VIDIOC_QBUF, &tV4l2Buf);
}
3 yuv2rgb.c/rgb2rgb.c/mjpeg2rgb.c
//实现这3个函数
typedef struct VideoConvert {
char *name;
int (*isSupport)(int iPixelFormatIn, int iPixelFormatOut);
int (*Convert)(PT_VideoBuf ptVideoBufIn, PT_VideoBuf ptVideoBufOut);
int (*ConvertExit)(PT_VideoBuf ptVideoBufOut);
struct VideoConvert *ptNext;
}T_VideoConvert, *PT_VideoConvert;
static int isSupportYuv2Rgb(int iPixelFormatIn, int iPixelFormatOut)
{
if (iPixelFormatIn != V4L2_PIX_FMT_YUYV)
return 0;
if ((iPixelFormatOut != V4L2_PIX_FMT_RGB565) && (iPixelFormatOut != V4L2_PIX_FMT_RGB32))
{
return 0;
}
return 1;
}
/* 参考luvcview */
static int Yuv2RgbConvert(PT_VideoBuf ptVideoBufIn, PT_VideoBuf ptVideoBufOut)
{
Pyuv422torgb565(ptPixelDatasIn->aucPixelDatas, ptPixelDatasOut->aucPixelDatas, ptPixelDatasOut->iWidth, ptPixelDatasOut->iHeight);
Pyuv422torgb32(ptPixelDatasIn->aucPixelDatas, ptPixelDatasOut->aucPixelDatas, ptPixelDatasOut->iWidth, ptPixelDatasOut->iHeight);
}