orangepi-zero2使用tft-lcd实时显示USB摄像头图像-2

本文使用环境:
电脑:Ubuntu 18.04.5 LTS
开发板:orangepi-zero2
交叉编译器:aarch64-none-linux-gnu-
摄像头:普通USB摄像头即可
显示屏:openmv 1.8英寸SPI显示屏

摄像头的使用参考:
https://blog.csdn.net/qq_37280428/article/details/119683577?spm=1001.2014.3001.5501

一、USB摄像头驱动

之前的文章已经介绍了orangepi-zero2的LCD显示屏的使用,本文集中于usb摄像头数据的采集以及数据的处理。usb摄像头采集出来的数据是YUV格式的,因此不能直接用于LCD的显示,由之前的文章(orangepi-zero2实时显示USB摄像头图像)可知LCD的驱动框架输入数据是rgb565。理论的知识不讲太多了:

word is cheap show me the code

二、上代码

本文程序做了一定的改进,一共使用了三种方案:

  1. 使用opencv的库函数进行yuv转rgb和缩放。
  2. 使用opencv库函数进行图像的缩放,自编程序将yuv转rgb。
  3. 自编程序进行图像缩放和yuv转rgb。
    在程序的开头有三个宏定义,三个宏定义分别对应三个不同的场景。
/*
*以下宏定义主要用来选择不同的方案:
*	YUV2RGB_OPENCV_ALL:yuv转rgb和尺寸的缩放全部采用opencv。
*	YUV2RGB_OPENCV_RESIZE:图片的缩放采用opencv。
*	YUV2RGB:不采用opencv。可以不依赖于opencv的库。
*/
//只能使用以下的宏定义一个
//#define YUV2RGB_OPENCV_ALL 1
//#define YUV2RGB_OPENCV_RESIZE 1
#define YUV2RGB_C 1

在使用的时候按照实际情况使用其中一个宏定义就可以了。such as:
如果没有opencv库函数,就可以只使用以下宏定义:

#define YUV2RGB_C 1

如果有opencv库,可以使用另外另两个宏定义。

/**
 * 简单的USB摄像头采集,显示程序
 * The simplest USB camera acquisition and display program.
 *
 * 赵先森 Mr.Z
 * https://blog.csdn.net/qq_37280428
 *
 * 本程序实现采集usb摄像头的yuv数据,并实时显示到SPI-LCD屏上
 * This program achieves the acquisition of USB camera 
 * YUV data, and real-time display to SPI-LCD screen
 */

#include <stdio.h>
#include <stdlib.h>

#include <linux/videodev2.h>
#include <stdio.h>
#include <stdlib.h>
#include <sys/types.h>
#include <sys/socket.h>
#include <sys/time.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <unistd.h>
#include <string.h>
#include <unistd.h>
#include <linux/videodev2.h>
#include <linux/version.h>
#include <sys/utsname.h>
#include <sys/mman.h>
#include <sys/ioctl.h>
#include <errno.h>
#include <time.h>
#include "stdint.h"
#include <linux/fb.h>

typedef unsigned char u8;

/*
*以下宏定义主要用来选择不同的方案:
*	YUV2RGB_OPENCV_ALL:yuv转rgb和尺寸的缩放全部采用opencv。
*
*	YUV2RGB_OPENCV_RESIZE:图片的缩放采用opencv。
*
*	YUV2RGB:不采用opencv。可以不依赖于opencv的库。
*/

//只能使用以下的宏定义一个
//#define YUV2RGB_OPENCV_ALL 1
//#define YUV2RGB_OPENCV_RESIZE 1
#define YUV2RGB_C 1


//使用opencv就必须包含头文件
#if defined(YUV2RGB_OPENCV_ALL) || defined(YUV2RGB_OPENCV_RESIZE)

#include <opencv2/opencv.hpp>
using namespace cv;

#endif // DEBUG



#define RGB2RGB565(R, G, B) ((((R) >> 3) << 11) | (((G) >> 2) << 5) | ((B) >> 3)) & 0XFFFF

#define WIDHT 640
#define HEIGHT 480
#define FPS 25

//摄像头获取的初始化代码
#define V4L_BUFFERS_DEFAULT 4 //4个缓存帧
#define V4L_BUFFERS_MAX 8     //最大是八个

unsigned int nbufs = V4L_BUFFERS_DEFAULT;

void *mem0[V4L_BUFFERS_MAX];
struct v4l2_buffer buf0;
//用到的变量:

double what_time_is_it_now()
{
	struct timeval time;
	if (gettimeofday(&time, NULL))
	{
		return 0;
	}
	return (double)time.tv_sec + (double)time.tv_usec * .000001;
}

//正常返回dev  错误返回 -1
int usb_video_init()
{

	//打开视频设备初始化:
	struct v4l2_capability cap;
	int ret, dev;

	dev = open("/dev/video0", O_RDWR);
	if (dev < 0)
	{
		printf("dev open fail ...\n");
		return -1;
	}

	memset(&cap, 0, sizeof(cap));
	ret = ioctl(dev, VIDIOC_QUERYCAP, &cap);
	if (ret < 0)
	{
		printf("Unable to query buffer cap1...\n");
		close(dev);
		return -1;
	}
	else
	{
		printf("driver:\t\t%s\n", cap.driver);
		printf("card:\t\t%s\n", cap.card);
		printf("bus_info:\t%s\n", cap.bus_info);
		printf("version:\t%d\n", cap.version);
		printf("capabilities:\t%x\n", cap.capabilities);
	}
	//设置视频格式:
	struct v4l2_format fmt;

	memset(&fmt, 0, sizeof(fmt));
	fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	fmt.fmt.pix.width = WIDHT;
	fmt.fmt.pix.height = HEIGHT;
	fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
	fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;

	ret = ioctl(dev, VIDIOC_S_FMT, &fmt);
	if (ret < 0)
	{
		printf("Unable to query buffer cap2...\n");
		close(dev);
		return -1;
	}
	else
	{
		printf("fmt.type:\t\t%d\n", fmt.type);
		printf("pix.pixelformat:\t%c%c%c%c\n", fmt.fmt.pix.pixelformat & 0xFF, (fmt.fmt.pix.pixelformat >> 8) & 0xFF, (fmt.fmt.pix.pixelformat >> 16) & 0xFF, (fmt.fmt.pix.pixelformat >> 24) & 0xFF);
		printf("pix.height:\t\t%d\n", fmt.fmt.pix.height);
		printf("pix.width:\t\t%d\n", fmt.fmt.pix.width);
		printf("pix.field:\t\t%d\n", fmt.fmt.pix.field);
	}

	//设置帧率:
	struct v4l2_streamparm parm;

	memset(&parm, 0, sizeof(parm));
	parm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

	ret = ioctl(dev, VIDIOC_G_PARM, &parm);
	if (ret < 0)
	{
		printf("Unable to query buffer parm1...\n");
		close(dev);
		return -1;
	}
	parm.parm.capture.timeperframe.numerator = 1;
	parm.parm.capture.timeperframe.denominator = FPS;

	ret = ioctl(dev, VIDIOC_S_PARM, &parm);
	if (ret < 0)
	{
		printf("Unable to query buffer parm2...\n");
		close(dev);
		return -1;
	}
	printf("Fps:\t\t%d\n", parm.parm.capture.timeperframe.denominator);

	//申请视频缓存:
	struct v4l2_requestbuffers rb;

	memset(&rb, 0, sizeof(rb));
	rb.count = nbufs;
	rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	rb.memory = V4L2_MEMORY_MMAP;

	ret = ioctl(dev, VIDIOC_REQBUFS, &rb);
	if (ret < 0)
	{
		printf("Unable to query buffer rb...\n");
		close(dev);
		return -1;
	}
	nbufs = rb.count;

	//映射缓存:
	for (int i = 0; i < nbufs; ++i)
	{
		memset(&buf0, 0, sizeof(buf0));
		buf0.index = i;
		buf0.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
		buf0.memory = V4L2_MEMORY_MMAP;
		ret = ioctl(dev, VIDIOC_QUERYBUF, &buf0);
		if (ret < 0)
		{
			printf("Unable to query buffer buf01 %d...\n", i);
			close(dev);
			return -1;
		}

		printf("leng: %d  \n", buf0.length);

		mem0[i] = mmap(0, buf0.length, PROT_READ | PROT_WRITE, MAP_SHARED, dev, buf0.m.offset);
		if ((unsigned char *)-1 == mem0[i])
		{
			printf("mem[%d] err...\n", i);
			close(dev);
			return -1;
		}
	}

	//将缓存插入队列:

	for (int i = 0; i < nbufs; ++i)
	{
		memset(&buf0, 0, sizeof(buf0));
		buf0.index = i;
		buf0.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
		buf0.memory = V4L2_MEMORY_MMAP;
		ret = ioctl(dev, VIDIOC_QBUF, &buf0);
		if (ret < 0)
		{
			printf("Unable to query buffer buf02 %d...\n", i);
			close(dev);
			return -1;
		}
	}

	//开启视频流:
	int type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	ret = ioctl(dev, VIDIOC_STREAMON, &type);
	if (ret < 0)
	{
		printf("Unable to query buffer type...\n");
		close(dev);
		return -1;
	}
	return dev;
}
#ifdef YUV2RGB_OPENCV_ALL

static void display_pic_opencv(void *base, unsigned int width, unsigned int height, unsigned int stride, uint8_t *frame)
{
	unsigned int xcoi, ycoi;
	unsigned short rgb565_color = 0;
	unsigned int data_888 = 0;
	unsigned short R, G, B;
	unsigned int iPixelAddr = 0;
	Mat pic_resize;

	unsigned short *screen_base = (unsigned short *)base;

	//建立两个空的buf,rgb的和yuv的
	Mat buf_rgb(480, 640, CV_8UC3);
	//yuv为16位,两个字节,将数据转转化到mat的yuv数组
	Mat buf_yuv(480, 640, CV_8UC2, (uint8_t *)frame);
	//转化到将yuv数据转化到RGB88
	cvtColor(buf_yuv, buf_rgb, COLOR_YUV2RGB_YUYV); //函数第三个参数需要视YUV图像格式而定
	//将480 * 640  转化成128 * 160
	resize(buf_rgb, pic_resize, Size(160, 128), INTER_LANCZOS4); 调整大小
	unsigned int num = 0;

	for (ycoi = 0; ycoi < height; ycoi++)
	{
		for (xcoi = 0; xcoi < width; xcoi += 1)
		{
			int i = xcoi + ycoi * width;

			R = pic_resize.data[3 * i];
			G = pic_resize.data[3 * i + 1];
			B = pic_resize.data[3 * i + 2];
			rgb565_color = RGB2RGB565(R, G, B);

			screen_base[i] = rgb565_color;
		}
		iPixelAddr += width * 2;
	}
}
#endif



#ifdef YUV2RGB_C

static void display_pic(void *base, unsigned int width, unsigned int height, unsigned int stride, uint8_t *frame)
{
	unsigned int xcoi, ycoi;
	unsigned short rgb565_color = 0;
	unsigned int data_888 = 0;
	unsigned short R, G, B;
	unsigned int iPixelAddr = 0;

	unsigned short *screen_base = (unsigned short *)base;
	unsigned int num = 0;
	for (ycoi = 0; ycoi < height; ycoi++)
	{
		for (xcoi = 0; xcoi < width; xcoi += 1)
		{
			int i = xcoi + ycoi * width;

			R = frame[3 * i];
			G = frame[3 * i + 1];
			B = frame[3 * i + 2];
			rgb565_color = RGB2RGB565(R, G, B);

			screen_base[i] = rgb565_color;
		}
		iPixelAddr += width * 2;
	}
}

#endif

#ifdef YUV2RGB_OPENCV_RESIZE

static void display_pic_open_re(void *base, unsigned int width, unsigned int height, unsigned int stride, uint8_t *frame)
{
	unsigned int xcoi, ycoi;
	unsigned short rgb565_color = 0;
	unsigned int data_888 = 0;
	unsigned short R, G, B;
	unsigned int iPixelAddr = 0;
	Mat pic_resize;

	unsigned short *screen_base = (unsigned short *)base;

	//rgb
	Mat buf_rgb(480, 640, CV_8UC3, (uint8_t *)frame);

	resize(buf_rgb, pic_resize, Size(160, 128), INTER_LANCZOS4); 调整大小
	unsigned int num = 0;

	for (ycoi = 0; ycoi < height; ycoi++)
	{
		for (xcoi = 0; xcoi < width; xcoi += 1)
		{
			int i = xcoi + ycoi * width;

			R = pic_resize.data[3 * i];
			G = pic_resize.data[3 * i + 1];
			B = pic_resize.data[3 * i + 2];
			rgb565_color = RGB2RGB565(R, G, B);

			screen_base[i] = rgb565_color;
		}
		iPixelAddr += width * 2;
	}
}
#endif

void Yuv422_toRGB(uint8_t *yuv, uint8_t *rgb, int source_width, int source_height, int target_width, int target_height)
{
    float scale_x;
    float scale_y;
 
    if (target_width >= source_width) //不能放大,只能缩小
    {
        scale_x = 1;
    }
    else
    {
        scale_x = (float)source_width / target_width;
    }
 
    if (target_height >= source_height) //不能放大,只能缩小
    {
        scale_y = 1;
    }
    else
    {
        scale_y = (float)source_height / target_height;
    }
 
    float y_t = 0, x_t = 0; //目标行列是否取样判断
 
    int y_t_count = 0; //目标行
    int x_t_count = 0; //目标列
 
    int r, g, b;
    int y0, y1, u, v;
 
    int p, p_t;   //指针
    int p0, p_t0; //每一行开始的指针
 
    scale_x = scale_x * 4; //一次处理2个点,4个数据
 
    for (int h = 0; h < source_height; h++)
    {
        if (h >= y_t) //取样条件满足
        {
 
            y_t += scale_y; //更新取样条件
 
            p0 = h * source_width * 2;           //输入图片指针起点 YUV422是两个数据一个点
            p_t0 = y_t_count * target_width * 3; //输出图像指针起点 RGB是3个数据一个点
 
            x_t = 0;
            x_t_count = 0; //开始新的一行,重置
 
            for (int w = 0; w < source_width * 2; w += 4) //一次取4个数值(两个点)
            {
                if (w >= x_t) //满足取样条件
                {
                    x_t += scale_x; //更新取样条件
 
                    p = p0 + w;                 //输入指针
                    p_t = p_t0 + x_t_count * 3; //输出指针
 
                    y0 = (int)yuv[p]; //YUYV
                    p++;
                    u = (int)yuv[p];
                    p++;
                    y1 = (int)yuv[p];
                    p++;
                    v = (int)yuv[p];
 
                    r = y0 + (int)(1.370705 * (v - 128));
                    g = y0 - (int)(0.698001 * (v - 128)) - (int)(0.337633 * (u - 128));
                    b = y0 + (int)(1.732446 * (u - 128));
 
                    rgb[p_t] = r > 255 ? 0xFF : (r < 0 ? 0x00 : (unsigned char)r); //R值大于255 或小于0 越界处理
                    p_t++;
                    rgb[p_t] = g > 255 ? 0xFF : (g < 0 ? 0x00 : (unsigned char)g); //G值大于255 或小于0 越界处理
                    p_t++;
                    rgb[p_t] = b > 255 ? 0xFF : (b < 0 ? 0x00 : (unsigned char)b); //B值大于255 或小于0 越界处理
                    
 
                    x_t_count++; //处理完一个点
                    if (x_t_count >= target_width)
                        break; //超过停止
 
                    //处理第二个点
                    r = y1 + (int)(1.370705 * (v - 128));
                    g = y1 - (int)(0.698001 * (v - 128)) - (int)(0.337633 * (u - 128));
                    b = y1 + (int)(1.732446 * (u - 128));
 
                    p_t++;
 
                    rgb[p_t] = r > 255 ? 0xFF : (r < 0 ? 0x00 : (unsigned char)r); //R值大于255 或小于0 越界处理
                    p_t++;
                    rgb[p_t] = g > 255 ? 0xFF : (g < 0 ? 0x00 : (unsigned char)g); //G值大于255 或小于0 越界处理
                    p_t++;
                    rgb[p_t] = b > 255 ? 0xFF : (b < 0 ? 0x00 : (unsigned char)b); //B值大于255 或小于0 越界处理
                    
 
                    x_t_count++;
                    if (x_t_count >= target_width)
                        break; //超过停止
                }
            }
 
            y_t_count++;
            if (y_t_count >= target_height)
                break; //超过停止
        }
    }
}

int main(int argc, char **argv)
{
	int dev;

	//初始化h264摄像头
	dev = usb_video_init();
	while (dev < 0)
	{
		printf("h264 cam init err...111\n");
		sleep(5);
		dev = usb_video_init();
	}
	printf("h264 cam init ok..............111\n");

	struct fb_var_screeninfo fb_var = {0};
	struct fb_fix_screeninfo fb_fix = {0};
	unsigned int screensize;
	int fd;
	static void *base = NULL;

	// 打开LCD
	fd = open("/dev/fb1", O_RDWR);

	if (fd < 0)
	{
		printf("Error: Failed to open /dev/fb0 device.\n");
		return fd;
	}

	// 获取framebuffer设备的参数信息
	ioctl(fd, FBIOGET_VSCREENINFO, &fb_var);
	ioctl(fd, FBIOGET_FSCREENINFO, &fb_fix);

	// mmap映射
	screensize = fb_var.yres * fb_fix.line_length;
	//printf("lie: %d   hang:  %d   %d   %d\n", fb_var.yres, fb_var.xres, fb_fix.line_length, fb_var.bits_per_pixel);
	base = mmap(NULL, screensize, PROT_READ | PROT_WRITE, MAP_SHARED, fd, 0);
	if (MAP_FAILED == base)
	{
		close(fd);
		perror("mmap error");
		return -1;
	}

	memset(base, 0x00, screensize); // 显存清零

	uint8_t *frame_yuv, *frame_rgb;
	int frame_length = 0;
	int frame_size = 640 * 480 * 3;
	frame_yuv = (uint8_t *)malloc(frame_size);
	if (NULL == frame_yuv)
	{
		perror("malloc err...:");
	}
	//rgb888
	frame_rgb = (uint8_t *)malloc(frame_size);
	if (NULL == frame_rgb)
	{
		perror("malloc err...:");
	}

	//循环显示
	for (;;)
	{

		double start, end;
		start = what_time_is_it_now();

		/*接受摄像头传输的图像数据start*/
		memset(&buf0, 0, sizeof(buf0));
		buf0.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
		buf0.memory = V4L2_MEMORY_MMAP;
		ioctl(dev, VIDIOC_DQBUF, &buf0);

		//拷贝当前摄像头数据到发送数组
		memcpy(frame_yuv, (u8 *)mem0[buf0.index], buf0.bytesused);
		frame_length = buf0.bytesused;
		ioctl(dev, VIDIOC_QBUF, &buf0);
		/*接受摄像头传输的图像数据end*/

		#ifdef YUV2RGB_OPENCV_ALL

			//图形的转换和缩放都是利用OPENCV
			display_pic_opencv(base, fb_var.xres, fb_var.yres, fb_fix.line_length, frame_yuv);

		#elif YUV2RGB_OPENCV_RESIZE

			//使用openc,进行图片的缩放。效果稍微好一些
			Yuv422_toRGB(frame_yuv, frame_rgb, WIDHT, HEIGHT,640,480);
			display_pic_open_re(base, fb_var.xres, fb_var.yres, fb_fix.line_length, frame_rgb);

		#elif YUV2RGB_C

			//未使用opencv,全部采用C语言进行数据的处理和转化,但是效果比较差,(主要是差在图形缩放那个地方)
			Yuv422_toRGB(frame_yuv, frame_rgb, WIDHT, HEIGHT,160,128);
			display_pic(base, fb_var.xres, fb_var.yres, fb_fix.line_length, frame_rgb);

		#endif // DEBUG
		printf("time:%f\n",(what_time_is_it_now()-start)*1000);

	}
	free(frame_yuv);
	free(frame_rgb);
	return 0;
}

鸣谢
该函数是参考的某一个博主的文章,但是我现在找不到了,所以这里特别感谢一下。

void Yuv422_toRGB(uint8_t *yuv, uint8_t *rgb, int source_width, int source_height, int target_width, int target_height)
  • 0
    点赞
  • 7
    收藏
    觉得还不错? 一键收藏
  • 打赏
    打赏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

Mr·赵

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值