移植ov5640摄像头到imx6ull开发板(三)

实验平台:正点原子imx6ull-mini开发板

摄像头:ov5640

显示屏:正点原子1024*600 LCD

概要

在前两篇文章中,理清了probe函数,以及一些重要结构体和函数之间的关系。现在我们把ov5640驱动移植到imx6ull开发板,正点原子已经提供了移植好的驱动,并且适用正点原子官方所有LCD分辨率。如果有需要可以前往正点原子资料下载中心下载imx6ull资料。笔者这里只针对分辨率1024*600帧率30FPS进行移植,出于学习的目的,笔者将移植的步骤记录下来

获取ov5640驱动

首先获取恩智浦提供的ov5640摄像头驱动,路径:linux-imx-rel_imx_4.1.15_2.1.0_ga/drivers/media/platform/mxc/subdev,新建文件夹new_ov5640,

awei@awei-virtual-machine:~/liunx/IMX6ULL/project$ mkdir new_ov5640

复制上述路径下的ov5640.cmx6s_capture.cnew_ov5640

awei@awei-virtual-machine:~/liunx/IMX6ULL/project/new_ov5640$ cp ../../linux/linux-imx-rel_imx_4.1.15_2.1.0_ga/drivers/media/platform/mxc/subdev/ov5640.c mx6s_capture.c ./

修改设备树

笔者使用i2c和csi与开发板通信,因此首先在i2c和CSI节点下添加ov5640摄像头描述信息

&i2c2 {
	clock_frequency = <100000>;
	pinctrl-names = "default";
	pinctrl-0 = <&pinctrl_i2c2>;
	status = "okay";

	ov5640: ov5640@3c {
		compatible = "ovti,ov5640";
		reg = <0x3c>;
		pinctrl-names = "default";
		pinctrl-0 = <&pinctrl_csi1
						&csi_pwn_rst>;
		clocks = <&clks IMX6UL_CLK_CSI>;
		clock-names = "csi_mclk";
		pwn-gpios = <&gpio1 4 1>;
		rst-gpios = <&gpio1 2 0>;
		csi_id = <0>;
		mclk = <24000000>;
		mclk_source = <0>;
		status = "okay";
		port {
			ov5640_ep: endpoint {
				remote-endpoint = <&csi1_ep>;
			};
		};
	};
};

csi引脚设置

		pinctrl_csi1: csi1grp { //CSI引脚设置
			fsl,pins = <
				MX6UL_PAD_CSI_MCLK__CSI_MCLK		0x1b088
				MX6UL_PAD_CSI_PIXCLK__CSI_PIXCLK	0x1b088
				MX6UL_PAD_CSI_VSYNC__CSI_VSYNC		0x1b088
				MX6UL_PAD_CSI_HSYNC__CSI_HSYNC		0x1b088
				MX6UL_PAD_CSI_DATA00__CSI_DATA02	0x1b088
				MX6UL_PAD_CSI_DATA01__CSI_DATA03	0x1b088
				MX6UL_PAD_CSI_DATA02__CSI_DATA04	0x1b088
				MX6UL_PAD_CSI_DATA03__CSI_DATA05	0x1b088
				MX6UL_PAD_CSI_DATA04__CSI_DATA06	0x1b088
				MX6UL_PAD_CSI_DATA05__CSI_DATA07	0x1b088
				MX6UL_PAD_CSI_DATA06__CSI_DATA08	0x1b088
				MX6UL_PAD_CSI_DATA07__CSI_DATA09	0x1b088
			>;
		};

        pinctrl_i2c2: i2c2grp {//i2c引脚设置
			fsl,pins = <
				MX6UL_PAD_UART5_TX_DATA__I2C2_SCL 0x4001b8b0
				MX6UL_PAD_UART5_RX_DATA__I2C2_SDA 0x4001b8b0
			>;
		};

电源引脚与reset引脚设置

		//OV5640电源引脚和reset引脚
		csi_pwn_rst: csi_pwn_rstgrp {
			fsl,pins = <
				MX6UL_PAD_GPIO1_IO02__GPIO1_IO02	0x10b0 	/*reset引脚*/
				MX6UL_PAD_GPIO1_IO04__GPIO1_IO04	0x10b0	/*//电源引脚*/
			>;
		};

在csi节点下添加描述信息

&csi {
	status = "okay";

	port {
		csi1_ep: endpoint {
			remote-endpoint = <&ov5640_ep>;
		};
	};
};

至此设备添加完成

添加结构体信息

ov5640_mode

上一篇文章具体讲解了在移植的过程中要做的修改,首先就是在ov5640_mode枚举中添加自己的LCD模式,my_ov5640_mode_1024_600 = 9

enum ov5640_mode {
	ov5640_mode_MIN = 0,
	ov5640_mode_VGA_640_480 = 0,
	ov5640_mode_QVGA_320_240 = 1,
	ov5640_mode_NTSC_720_480 = 2,
	ov5640_mode_PAL_720_576 = 3,
	ov5640_mode_720P_1280_720 = 4,
	ov5640_mode_1080P_1920_1080 = 5,
	ov5640_mode_QSXGA_2592_1944 = 6,
	ov5640_mode_QCIF_176_144 = 7,
	ov5640_mode_XGA_1024_768 = 8,
	my_ov5640_mode_1024_600 = 9 //适配自己的LCD
    ov5640_mode_MAX = 9,

};

ov5640_mode_info_data[2][ov5640_mode_MAX + 1]

在此结构体中的30FPS类中添加新增的模式,分辨率,及寄存器组配置,如代码中56~58行所示

static struct ov5640_mode_info ov5640_mode_info_data[2][ov5640_mode_MAX + 1] = {
	{
		{ov5640_mode_VGA_640_480,      640,  480,
		ov5640_setting_15fps_VGA_640_480,
		ARRAY_SIZE(ov5640_setting_15fps_VGA_640_480)},
		{ov5640_mode_QVGA_320_240,     320,  240,
		ov5640_setting_15fps_QVGA_320_240,
		ARRAY_SIZE(ov5640_setting_15fps_QVGA_320_240)},
		{ov5640_mode_NTSC_720_480,     720,  480,
		ov5640_setting_15fps_NTSC_720_480,
		ARRAY_SIZE(ov5640_setting_15fps_NTSC_720_480)},
		{ov5640_mode_PAL_720_576,      720,  576,
		ov5640_setting_15fps_PAL_720_576,
		ARRAY_SIZE(ov5640_setting_15fps_PAL_720_576)},
		{ov5640_mode_720P_1280_720,   1280,  720,
		ov5640_setting_15fps_720P_1280_720,
		ARRAY_SIZE(ov5640_setting_15fps_720P_1280_720)},
		{ov5640_mode_1080P_1920_1080, 1920, 1080,
		ov5640_setting_15fps_1080P_1920_1080,
		ARRAY_SIZE(ov5640_setting_15fps_1080P_1920_1080)},
		{ov5640_mode_QSXGA_2592_1944, 2592, 1944,
		ov5640_setting_15fps_QSXGA_2592_1944,
		ARRAY_SIZE(ov5640_setting_15fps_QSXGA_2592_1944)},
		{ov5640_mode_QCIF_176_144,     176,  144,
		ov5640_setting_15fps_QCIF_176_144,
		ARRAY_SIZE(ov5640_setting_15fps_QCIF_176_144)},
		{ov5640_mode_XGA_1024_768,    1024,  768,
		ov5640_setting_15fps_XGA_1024_768,
		ARRAY_SIZE(ov5640_setting_15fps_XGA_1024_768)},
	},
	{
		{ov5640_mode_VGA_640_480,      640,  480,
		ov5640_setting_30fps_VGA_640_480,
		ARRAY_SIZE(ov5640_setting_30fps_VGA_640_480)},
		{ov5640_mode_QVGA_320_240,     320,  240,
		ov5640_setting_30fps_QVGA_320_240,
		ARRAY_SIZE(ov5640_setting_30fps_QVGA_320_240)},
		{ov5640_mode_NTSC_720_480,     720,  480,
		ov5640_setting_30fps_NTSC_720_480,
		ARRAY_SIZE(ov5640_setting_30fps_NTSC_720_480)},
		{ov5640_mode_PAL_720_576,      720,  576,
		ov5640_setting_30fps_PAL_720_576,
		ARRAY_SIZE(ov5640_setting_30fps_PAL_720_576)},
		{ov5640_mode_720P_1280_720,   1280,  720,
		ov5640_setting_30fps_720P_1280_720,
		ARRAY_SIZE(ov5640_setting_30fps_720P_1280_720)},
		{ov5640_mode_1080P_1920_1080, 0, 0, NULL, 0},
		{ov5640_mode_QSXGA_2592_1944, 0, 0, NULL, 0},
		{ov5640_mode_QCIF_176_144,     176,  144,
		ov5640_setting_30fps_QCIF_176_144,
		ARRAY_SIZE(ov5640_setting_30fps_QCIF_176_144)},
		{ov5640_mode_XGA_1024_768,    1024,  768,
		ov5640_setting_30fps_XGA_1024_768,
		ARRAY_SIZE(ov5640_setting_30fps_XGA_1024_768)},

		{my_ov5640_mode_1024_600,    1024,  600,
		my_LCD_30fps_1024_600,
		ARRAY_SIZE(my_LCD_30fps_1024_600)},
	},
};

添加像素点格式

使用RGB565格式,{MEDIA_BUS_FMT_RGB565_2X8_LE, V4L2_COLORSPACE_SRGB},

static const struct ov5640_datafmt ov5640_colour_fmts[] = {
	{MEDIA_BUS_FMT_RGB565_2X8_LE, V4L2_COLORSPACE_SRGB},
	{MEDIA_BUS_FMT_YUYV8_2X8, V4L2_COLORSPACE_JPEG},
};

至此,结构体补充完成,在结构体中添加这些信息,最终目的是在不改变驱动框架的前提下将寄存器组的值写入到ov5640摄像头中,接下来就是根据需求配置寄存器的值

配置寄存器组

输出窗口大小

原驱动代码中已经帮我们完成了大部分功能,我们首先要做的就是适配自己LCD的分辨率。

02a0e31d43be464dae938368fe28fb3c.png

 这幅图看的怪怪的,我用自己的理解画了一幅图。

46b6f381a7db4d1297bffd190182cc36.jpeg

如果知道一个矩形的两个对角顶点,那么就可以确定这个矩形的位置。因此我们只要配置X_ADDR_ST、Y_ADDR_ST、X_ADDR_END、Y_ADDR_END四个坐标值就是ov5640的窗口大小

X_ADDR_ST[0x3800,0X3801]:0x3800,0X3801分别代表坐标值的高位和低位。

这里按照硬件手册配置为默认值即可,即(X_ADDR_ST,Y_ADDR_ST):(0,0)

(X_ADDR_END、Y_ADDR_END):(2623,1951),注意这里只是配置了窗口大小,并不是我们最终要输出的画面。

3d34e1fedb064c22a7eb7fd6c757a3b1.png

 之后,开始配置真正的输出窗口大小。

笔者的LCD分辨率为1024*600,因此向0x3808地址写入0x04,0x3809地址写入0x00,即0x0400 = 1024,向0x380A地址写入0x02,向0x380B地址写入0x58,即0x0258 = 600,至此完成分辨率的适配。

帧率设置

笔者暂时没有搞明白ov5640摄像头的帧率设置,目前先参考驱动代码中30fps的设置方法。

{0x3035, 0x21, 0, 0}, {0x3036, 0x69, 0, 0}, {0x3037, 0x13, 0, 0}

ov5640的寄存器非常多,我这里就不一一讲了,可以去看硬件手册或者正点原子hal库开发教程

这里附上参考正点原子的1024*600 LCD配置,如果需要其他配置可去正点原子官网下载代码

static struct reg_value my_LCD_30fps_1024_600[] = {
        {0x3c07, 0x08, 0, 0}, {0x3820, 0x47, 0, 0}, {0x3821, 0x07, 0, 0},
        {0x3814, 0x31, 0, 0}, {0x3815, 0x31, 0, 0}, {0x3800, 0x00, 0, 0},
        {0x3801, 0x00, 0, 0}, {0x3802, 0x00, 0, 0}, {0x3803, 0x04, 0, 0},
        {0x3804, 0x0a, 0, 0}, {0x3805, 0x3f, 0, 0}, {0x3806, 0x07, 0, 0},
        {0x3807, 0x9b, 0, 0}, {0x3808, 0x04, 0, 0}, {0x3809, 0x00, 0, 0},
        {0x380a, 0x02, 0, 0}, {0x380b, 0x58, 0, 0}, {0x380c, 0x07, 0, 0},
        {0x380d, 0x68, 0, 0}, {0x380e, 0x03, 0, 0}, {0x380f, 0xd8, 0, 0},
        {0x3813, 0x06, 0, 0}, {0x3618, 0x00, 0, 0}, {0x3612, 0x29, 0, 0},
        {0x3709, 0x52, 0, 0}, {0x370c, 0x03, 0, 0}, {0x3a02, 0x0b, 0, 0},
        {0x3a03, 0x88, 0, 0}, {0x3a14, 0x0b, 0, 0}, {0x3a15, 0x88, 0, 0},
        {0x4004, 0x02, 0, 0}, {0x3002, 0x1c, 0, 0}, {0x3006, 0xc3, 0, 0},
        {0x4713, 0x03, 0, 0}, {0x4407, 0x04, 0, 0}, {0x460b, 0x35, 0, 0},
        {0x460c, 0x20, 0, 0}, {0x4837, 0x22, 0, 0}, {0x3824, 0x01, 0, 0},
        {0x5001, 0xa3, 0, 0}, {0x3034, 0x1a, 0, 0}, {0x3035, 0x21, 0, 0},
        {0x3036, 0x69, 0, 0}, {0x3037, 0x13, 0, 0},
};

修改函数

上一篇文章理清了,哪些函数负责设置参数,这里对其修改即可。

ov5640_s_fmt

在函数中设置像素点格式和分辨率等信息

static int ov5640_s_fmt(struct v4l2_subdev *sd,
			struct v4l2_mbus_framefmt *mf)
{
	struct i2c_client *client = v4l2_get_subdevdata(sd);
	struct ov5640 *sensor = to_ov5640(client);
	const struct ov5640_datafmt *my_fm = NULL;//初始化像素点格式结构体

	my_fm = ov5640_find_datafmt(mf->code);	//是否设置了像素点格式
	if(!my_fm){							//如果没有设置,则设置为rgb565格式
		mf->code	= ov5640_colour_fmts[0].code;
		mf->colorspace	= ov5640_colour_fmts[0].colorspace;		
	}

	mf->field = V4L2_FIELD_NONE;	//像素点数据逐行输出

	/*设置像素格式为rgb565格式*/
	ov5640_write_reg(0x501f, 0x01);
	ov5640_write_reg(0x4300, 0x6f);

	/*直接将my_LCD_30fps_1024_600寄存器组值写入ov5640*/
	ov5640_change_mode(ov5640_30_fps, my_ov5640_mode_1024_600);

	//设置分辨率为1024*600
	mf->width = ov5640_mode_info_data[1][my_ov5640_mode_1024_600].width;
	mf->height = ov5640_mode_info_data[1][my_ov5640_mode_1024_600].height;

	sensor->fmt = my_fm;
	sensor->pix.colorspace= mf->colorspace;

	return 0;
}

至此ov5640.c修改完成,接下来修改mx6s_capture.c文件

mx6s_capture.c

在struct mx6s_fmt formats结构体中添加对应的像素格式信息,笔者这里是RGB565

{

        .name       = "RGB565_LE",

        .fourcc     = V4L2_PIX_FMT_RGB565,

        .pixelformat    = V4L2_PIX_FMT_RGB565,

        .mbus_code  = V4L2_PIX_FMT_RGB565,

        .bpp        = 2,

    }

static struct mx6s_fmt formats[] = {
	{
		.name		= "UYVY-16",
		.fourcc		= V4L2_PIX_FMT_UYVY,
		.pixelformat	= V4L2_PIX_FMT_UYVY,
		.mbus_code	= MEDIA_BUS_FMT_UYVY8_2X8,
		.bpp		= 2,
	}, {
		.name		= "YUYV-16",
		.fourcc		= V4L2_PIX_FMT_YUYV,
		.pixelformat	= V4L2_PIX_FMT_YUYV,
		.mbus_code	= MEDIA_BUS_FMT_YUYV8_2X8,
		.bpp		= 2,
	}, {
		.name		= "YUV32 (X-Y-U-V)",
		.fourcc		= V4L2_PIX_FMT_YUV32,
		.pixelformat	= V4L2_PIX_FMT_YUV32,
		.mbus_code	= MEDIA_BUS_FMT_AYUV8_1X32,
		.bpp		= 4,
	}, {
		.name		= "RAWRGB8 (SBGGR8)",
		.fourcc		= V4L2_PIX_FMT_SBGGR8,
		.pixelformat	= V4L2_PIX_FMT_SBGGR8,
		.mbus_code	= MEDIA_BUS_FMT_SBGGR8_1X8,
		.bpp		= 1,
	},{
		.name		= "RGB565_LE",
		.fourcc		= V4L2_PIX_FMT_RGB565,
		.pixelformat	= V4L2_PIX_FMT_RGB565,
		.mbus_code	= MEDIA_BUS_FMT_RGB565_2X8_LE,
		.bpp		= 2,
	}
};

通过static int mx6s_configure_csi(struct mx6s_csi_dev *csi_dev)这个函数选择像素点格式,可以看到代码中的case与struct mx6s_fmt formats结构体中pixelformat相对应,因此我们需要在函数中添加rgb565格式,22行:case V4L2_PIX_FMT_RGB565:

static int mx6s_configure_csi(struct mx6s_csi_dev *csi_dev)
{
	struct v4l2_pix_format *pix = &csi_dev->pix;
	u32 cr1, cr18;
	u32 width;

	if (pix->field == V4L2_FIELD_INTERLACED) {
		csi_deinterlace_enable(csi_dev, true);
		csi_buf_stride_set(csi_dev, csi_dev->pix.width);
		csi_deinterlace_mode(csi_dev, csi_dev->std);
	} else {
		csi_deinterlace_enable(csi_dev, false);
		csi_buf_stride_set(csi_dev, 0);
	}

	switch (csi_dev->fmt->pixelformat) {
	case V4L2_PIX_FMT_YUV32:
	case V4L2_PIX_FMT_SBGGR8:
		width = pix->width;
		break;
	case V4L2_PIX_FMT_UYVY:
	case V4L2_PIX_FMT_RGB565:
	case V4L2_PIX_FMT_YUYV:
		if (csi_dev->csi_mux_mipi == true)
			width = pix->width;
		else
			/* For parallel 8-bit sensor input */
			width = pix->width * 2;
		break;
	default:
		pr_debug("   case not supported\n");
		return -EINVAL;
	}
	csi_set_imagpara(csi_dev, width, pix->height);

	if (csi_dev->csi_mux_mipi == true) {
		cr1 = csi_read(csi_dev, CSI_CSICR1);
		cr1 &= ~BIT_GCLK_MODE;
		csi_write(csi_dev, cr1, CSI_CSICR1);

		cr18 = csi_read(csi_dev, CSI_CSICR18);
		cr18 &= BIT_MIPI_DATA_FORMAT_MASK;
		cr18 |= BIT_DATA_FROM_MIPI;

		switch (csi_dev->fmt->pixelformat) {
		case V4L2_PIX_FMT_UYVY:
		case V4L2_PIX_FMT_YUYV:
			cr18 |= BIT_MIPI_DATA_FORMAT_YUV422_8B;
			break;
		case V4L2_PIX_FMT_SBGGR8:
			cr18 |= BIT_MIPI_DATA_FORMAT_RAW8;
			break;
		default:
			pr_debug("   fmt not supported\n");
			return -EINVAL;
		}

		csi_write(csi_dev, cr18, CSI_CSICR18);
	}
	return 0;
}

编译模块

KERNELDIR := /home/awei/liunx/IMX6ULL/linux/linux-imx-rel_imx_4.1.15_2.1.0_ga
			 
CURRENR_PATH := $(shell pwd)

obj-m :=ov5640.o mx6s_capture.o

build: kernel_modules

kernel_modules:
	$(MAKE) -C $(KERNELDIR) M=$(CURRENR_PATH) modules

clean:
	$(MAKE) -C $(KERNELDIR) M=$(CURRENR_PATH) clean

应用程序

这里直接使用正点原子提供的应用代码

/***************************************************************
 Copyright ? ALIENTEK Co., Ltd. 1998-2021. All rights reserved.
 文件名 : v4l2_camera.c
 作者 : 邓涛
 版本 : V1.0
 描述 : V4L2摄像头应用编程实战
 其他 : 无
 论坛 : www.openedv.com
 日志 : 初版 V1.0 2021/7/09 邓涛创建
 ***************************************************************/

#include <stdio.h>
#include <stdlib.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <unistd.h>
#include <sys/ioctl.h>
#include <string.h>
#include <errno.h>
#include <sys/mman.h>
#include <linux/videodev2.h>
#include <linux/fb.h>
#include <time.h>
#include <linux/rtc.h>
#include <sys/time.h>

#define FB_DEV "/dev/fb0"   // LCD设备节点
#define FRAMEBUFFER_COUNT 3 // 帧缓冲数量
#define RTC_DEV "/dev/rtc0" // RTC设备节点,fd名称
/*** 摄像头像素格式及其描述信息 ***/
typedef struct camera_format
{
    unsigned char description[32]; // 字符串描述信息
    unsigned int pixelformat;      // 像素格式
} cam_fmt;

/*** 描述一个帧缓冲的信息 ***/
typedef struct cam_buf_info
{
    unsigned short *start; // 帧缓冲起始地址
    unsigned long length;  // 帧缓冲长度
} cam_buf_info;

unsigned short timeBUuffer[1024 * 64 * 2]; // 存放时间的像素点信息

static int width;                          // LCD宽度
static int height;                         // LCD高度
static unsigned short *screen_base = NULL; // LCD显存基地址
static int fb_fd = -1;                     // LCD设备文件描述符
static int v4l2_fd = -1;                   // 摄像头设备文件描述符
static cam_buf_info buf_infos[FRAMEBUFFER_COUNT];
static cam_fmt cam_fmts[10];
static int frm_width, frm_height; // 视频帧宽度和高度


static int fb_dev_init(void)
{
    struct fb_var_screeninfo fb_var = {0};
    struct fb_fix_screeninfo fb_fix = {0};
    unsigned long screen_size;

    /* 打开framebuffer设备 */
    fb_fd = open(FB_DEV, O_RDWR);
    if (0 > fb_fd)
    {
        fprintf(stderr, "open error: %s: %s\n", FB_DEV, strerror(errno));
        return -1;
    }

    /* 获取framebuffer设备信息 */
    ioctl(fb_fd, FBIOGET_VSCREENINFO, &fb_var);
    ioctl(fb_fd, FBIOGET_FSCREENINFO, &fb_fix);

    screen_size = fb_fix.line_length * fb_var.yres;
    width = fb_var.xres;
    height = fb_var.yres;

    /* 内存映射 */
    screen_base = mmap(NULL, screen_size, PROT_READ | PROT_WRITE, MAP_SHARED, fb_fd, 0);
    if (MAP_FAILED == (void *)screen_base)
    {
        perror("mmap error");
        close(fb_fd);
        return -1;
    }

    /* LCD背景刷白 */
    memset(screen_base, 0xF800, screen_size);
    return 0;
}

static int v4l2_dev_init(const char *device)
{
    struct v4l2_capability cap = {0};

    /* 打开摄像头 */
    v4l2_fd = open(device, O_RDWR);
    if (0 > v4l2_fd)
    {
        fprintf(stderr, "open error: %s: %s\n", device, strerror(errno));
        return -1;
    }

    /* 查询设备功能 */
    ioctl(v4l2_fd, VIDIOC_QUERYCAP, &cap);

    /* 判断是否是视频采集设备 */
    if (!(V4L2_CAP_VIDEO_CAPTURE & cap.capabilities))
    {
        fprintf(stderr, "Error: %s: No capture video device!\n", device);
        close(v4l2_fd);
        return -1;
    }

    return 0;
}

static void v4l2_enum_formats(void)
{
    struct v4l2_fmtdesc fmtdesc = {0};

    /* 枚举摄像头所支持的所有像素格式以及描述信息 */
    fmtdesc.index = 0;
    fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    while (0 == ioctl(v4l2_fd, VIDIOC_ENUM_FMT, &fmtdesc))
    {

        // 将枚举出来的格式以及描述信息存放在数组中
        cam_fmts[fmtdesc.index].pixelformat = fmtdesc.pixelformat;
        strcpy(cam_fmts[fmtdesc.index].description, fmtdesc.description);
        fmtdesc.index++;
    }
}

static void v4l2_print_formats(void)
{
    struct v4l2_frmsizeenum frmsize = {0};
    struct v4l2_frmivalenum frmival = {0};
    int i;

    frmsize.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    frmival.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    for (i = 0; cam_fmts[i].pixelformat; i++)
    {

        printf("format<0x%x>, description<%s>\n", cam_fmts[i].pixelformat,
               cam_fmts[i].description);

        /* 枚举出摄像头所支持的所有视频采集分辨率 */
        frmsize.index = 0;
        frmsize.pixel_format = cam_fmts[i].pixelformat;
        frmival.pixel_format = cam_fmts[i].pixelformat;
        while (0 == ioctl(v4l2_fd, VIDIOC_ENUM_FRAMESIZES, &frmsize))
        {

            printf("size<%d*%d> ",
                   frmsize.discrete.width,
                   frmsize.discrete.height);
            frmsize.index++;

            /* 获取摄像头视频采集帧率 */
            frmival.index = 0;
            frmival.width = frmsize.discrete.width;
            frmival.height = frmsize.discrete.height;
            while (0 == ioctl(v4l2_fd, VIDIOC_ENUM_FRAMEINTERVALS, &frmival))
            {

                printf("<%dfps>", frmival.discrete.denominator /
                                      frmival.discrete.numerator);
                frmival.index++;
            }
            printf("\n");
        }
        printf("\n");
    }
}

static int v4l2_set_format(void)
{
    struct v4l2_format fmt = {0};
    struct v4l2_streamparm streamparm = {0};

    /* 设置帧格式 */
    fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; // type类型
    fmt.fmt.pix.width = width;              // 视频帧宽度
    fmt.fmt.pix.height = height;            // 视频帧高度
    fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB565;
    // V4L2_PIX_FMT_RGB565; // 像素格式
    // fmt.fmt.pix.field = V4L2_FIELD_ANY;
    if (0 > ioctl(v4l2_fd, VIDIOC_S_FMT, &fmt))
    {
        fprintf(stderr, "ioctl error: VIDIOC_S_FMT: %s\n", strerror(errno));
        return -1;
    }

    /*** 判断是否已经设置为我们要求的RGB565像素格式
    如果没有设置成功表示该设备不支持RGB565像素格式 */
    if (V4L2_PIX_FMT_RGB565 != fmt.fmt.pix.pixelformat)
    {
        fprintf(stderr, "Error: the device does not support RGB565 format!\n");
        return -1;
    }

    frm_width = fmt.fmt.pix.width;   // 获取实际的帧宽度
    frm_height = fmt.fmt.pix.height; // 获取实际的帧高度
    printf("视频帧大小<%d * %d>\n", frm_width, frm_height);

    /* 获取streamparm */
    streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    ioctl(v4l2_fd, VIDIOC_G_PARM, &streamparm);

    /** 判断是否支持帧率设置 **/
    if (V4L2_CAP_TIMEPERFRAME & streamparm.parm.capture.capability)
    {
        streamparm.parm.capture.timeperframe.numerator = 1;
        streamparm.parm.capture.timeperframe.denominator = 30; // 30fps
        if (0 > ioctl(v4l2_fd, VIDIOC_S_PARM, &streamparm))
        {
            fprintf(stderr, "ioctl error: VIDIOC_S_PARM: %s\n", strerror(errno));
            return -1;
        }
    }

    return 0;
}

static int v4l2_init_buffer(void)
{
    struct v4l2_requestbuffers reqbuf = {0};
    struct v4l2_buffer buf = {0};

    /* 申请帧缓冲 */
    reqbuf.count = FRAMEBUFFER_COUNT; // 帧缓冲的数量
    reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    reqbuf.memory = V4L2_MEMORY_MMAP;
    if (0 > ioctl(v4l2_fd, VIDIOC_REQBUFS, &reqbuf))
    {
        fprintf(stderr, "ioctl error: VIDIOC_REQBUFS: %s\n", strerror(errno));
        return -1;
    }

    /* 建立内存映射 */
    buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    buf.memory = V4L2_MEMORY_MMAP;
    for (buf.index = 0; buf.index < FRAMEBUFFER_COUNT; buf.index++)
    {

        ioctl(v4l2_fd, VIDIOC_QUERYBUF, &buf);
        buf_infos[buf.index].length = buf.length;
        buf_infos[buf.index].start = mmap(NULL, buf.length,
                                          PROT_READ | PROT_WRITE, MAP_SHARED,
                                          v4l2_fd, buf.m.offset);
        if (MAP_FAILED == buf_infos[buf.index].start)
        {
            perror("mmap error");
            return -1;
        }
    }

    /* 入队 */
    for (buf.index = 0; buf.index < FRAMEBUFFER_COUNT; buf.index++)
    {

        if (0 > ioctl(v4l2_fd, VIDIOC_QBUF, &buf))
        {
            fprintf(stderr, "ioctl error: VIDIOC_QBUF: %s\n", strerror(errno));
            return -1;
        }
    }

    return 0;
}

static int v4l2_stream_on(void)
{
    /* 打开摄像头、摄像头开始采集数据 */
    enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

    if (0 > ioctl(v4l2_fd, VIDIOC_STREAMON, &type))
    {
        fprintf(stderr, "ioctl error: VIDIOC_STREAMON: %s\n", strerror(errno));
        return -1;
    }

    return 0;
}

static void v4l2_read_data(void)
{
    struct v4l2_buffer buf = {0};
    unsigned short *base;
    unsigned short *start;
    int min_w, min_h;
    int j,i;

    if (width > frm_width)
        min_w = frm_width;
    else
        min_w = width;
    if (height > frm_height)
        min_h = frm_height;
    else
        min_h = height;

    buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    buf.memory = V4L2_MEMORY_MMAP;
    for ( ; ; ) {

        for(buf.index = 0; buf.index < FRAMEBUFFER_COUNT; buf.index++) {

            ioctl(v4l2_fd, VIDIOC_DQBUF, &buf);     //出队
            for (j = 0, base=screen_base, start=buf_infos[buf.index].start;
                        j < min_h; j++) {

                memcpy(base, start, min_w * 2); //RGB565 一个像素占2个字节
                #if 0
                for (i = 0; i < 1024;i++)
                {
                    printf("%d\r\n", base[i]);
                }
                #endif
                base += width; // LCD显示指向下一行
                start += frm_width;//指向下一行数据
            }

            // 数据处理完之后、再入队、往复
            ioctl(v4l2_fd, VIDIOC_QBUF, &buf);
        }
    }
}

int main(int argc, char *argv[])
{

    if (2 != argc)
    {
        fprintf(stderr, "Usage: %s <video_dev>\n", argv[0]);
        exit(EXIT_FAILURE);
    }

    /* 初始化LCD */
    if (fb_dev_init())
        exit(EXIT_FAILURE);

    /* 初始化摄像头 */
    if (v4l2_dev_init(argv[1]))
        exit(EXIT_FAILURE);

    /* 枚举所有格式并打印摄像头支持的分辨率及帧率 */
    v4l2_enum_formats();
    v4l2_print_formats();

    /* 设置格式 */
    if (v4l2_set_format())
        exit(EXIT_FAILURE);

    /* 初始化帧缓冲:申请、内存映射、入队 */
    if (v4l2_init_buffer())
        exit(EXIT_FAILURE);

    /* 开启视频采集 */
    if (v4l2_stream_on())
        exit(EXIT_FAILURE);

    /* 读取数据:出队 */
    v4l2_read_data(); // 在函数内循环采集数据、将其显示到LCD屏

    exit(EXIT_SUCCESS);
}

交叉编译成可执行文件,将.ko文件和可执行文件复制到开发板,运行如下

cd2ae797fc914bbdb4bc0e331b6f3086.png

加载成功,从打印信息可以看到,摄像头已经支持1024*600分辨率的设备,格式为rgb565格式,帧率为30fps。效果如下:

da2664ff940e4cd380bb4757719d2032.jpeg

屏幕被分割成很多画面,并没有达到预期的效果,检查了一番发现了问题所在,在应用程序中调用ioctl(v4l2_fd, VIDIOC_S_PARM, &streamparm)时,跳转到ov5640.c文件执行ov5640_s_parm函数,而这个函数中重新设置了参数,将我们的原先设置覆盖掉了,因此显示出错。

ret = ov5640_change_mode(frame_rate,
				a->parm.capture.capturemode);

将其注释掉即可

/*ret = ov5640_change_mode(frame_rate,
				a->parm.capture.capturemode);*/

到这并未结束,加载驱动后发现摄像头的画面虽然正常了,但是画面一直在移动。

QQ视频20230910152532

解决方法,改变驱动能力:ov5640_driver_capability(1);函数位于ov5640_init_mode函数中

static int ov5640_init_mode(void)//初始化OV5640摄像头模式
{
	struct reg_value *pModeSetting = NULL;
	int ArySize = 0, retval = 0;

	ov5640_soft_reset();//软件复位

	pModeSetting = ov5640_global_init_setting;
	ArySize = ARRAY_SIZE(ov5640_global_init_setting);
	retval = ov5640_download_firmware(pModeSetting, ArySize);//根据芯片手册配置ov5640寄存器
	if (retval < 0)
		goto err;

	pModeSetting = ov5640_init_setting_30fps_VGA;
	ArySize = ARRAY_SIZE(ov5640_init_setting_30fps_VGA);
	retval = ov5640_download_firmware(pModeSetting, ArySize);
	if (retval < 0)
		goto err;

	/* change driver capability to 2x according to validation board.
	 * if the image is not stable, please increase the driver strength.
	 */
	ov5640_driver_capability(1);			//驱动能力
	ov5640_set_bandingfilter();
	ov5640_set_AE_target(AE_Target);
	ov5640_set_night_mode(night_mode);

	/* skip 9 vysnc: start capture at 10th vsync */
	msleep(300);

	/* turn off night mode */
	night_mode = 0;

	/* auto focus */
	//ov5640_auto_focus();		//自动对焦
err:
	return retval;
}

重新编译,加载模块,至此,移植完成,效果如下:

QQ视频20230910161125

总结

笔者只添加了1024*600,30fps,rgb565格式这一种模式,并没有适配其他设备,如果你的LCD分辨率与笔者不一样,按照之前的步骤做相应的修改即可。

  • 30
    点赞
  • 11
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值