YUYV转RGB
一、 YUV格式
Y–亮度,UV–颜色640*480–4: 2: 2 —jpeg,h264,
1.YUV 4:4:4
YUV三个信道的抽样率相同,因此在生成的图像里,每个象素的三个分量信息完整(每个分量通常8比特),经过8比特量化之后,未经压缩的每个像素占用3个字节。
下面的四个像素为:[Y0 U0 V0][Y1 U1 V1] [Y2 U2 V2] [Y3 U3 V3]
存放的码流为:Y0 U0 V0 Y1 U1 V1 Y2 U2 V2 Y3 U3 V3
2.YUV 4:2:2 (YUYV)
每个色差信道的抽样率是亮度信道的一半,所以水平方向的色度抽样率只是4.4.4的一半。对非压缩的8比特量化的图像来说,每个由两个水平方向相邻的像素组成的宏像素需要占用4字节内存。
下面的四个像素为:[Y0 U0 V0] [Y1 U1 V1] [Y2 U2 V2][Y3 U3 V3]
存放的码流为:Y0 U0 Y1 V1 Y2 U2 Y3 V3
映射出像素点为:[Y0 U0 V1][Y1 U0 v1] [Y2 U2 V3][Y3 U2 V3]
3.YUV 4:1:1
4:1:1的色度抽样,是在水平方向上对色度进行4:1抽样。对于低端用户和消费类产品这仍然是可以接受的。对非压缩的8比特量化的视频来说,每个由4个水平方向相邻的像素组成的宏像素需要占用6字节内存。
下面的四个像素为:[Y0 U0 V0][Y1 U1 V1] [Y2 U2 V2][Y3 U3 V3]
存放的码流为:Y0 U0 Y1 Y2 V2 Y3
映射出像素点为:[Y0 U0 V2] [Y1 U0 V2] [Y2 U0 V2][Y3 U0 V2]
4.YUV4:2:0
4.2:0并不意味着只有Y,Cb而没有Cr分量。它指得是对每行扫描线来说,只有一种色度分量以2:1的抽样率存储。相邻的扫描行存储不同的色度分量,也就是说,如果一行是4.2:0的话,下一行就是4.0:2,再下一行是4.:20…l以此类推。对每个色度分量来说,水平方向和竖直方向的抽样率都是2:1,所以可以说色度的抽样率是4:1。对非压缩的8比特量化的视频来说,每个由2x2个2行2列相邻的像素组成的宏像素需要占用6字节内存。
下面八个像素为:[Y0 U0 V0][Y1 U1 V1] [Y2 U2 V2] [Y3 U3 V3] [Y5 U5 V5] [Y6 U6 V6] [Y7 U7 V7]
存放的码流为:Y0 U0 Y1 Y2 U2 Y3 Y5 V5 Y6 Y7 V7 Y8
映射出的像素点为:[Y0 U0 V5] [Y1 U0 V5] [Y2 U2 V7][Y3 U2 V7][Y5 U0 V5][Y6 U0 V5][Y7 U2 V7][Y8 U2 V7]
二、转码公式
1.小数形式,未量化
[YUV] -> [RGB]
R = Y + 1.4075 * (V - 128);
G = Y - 0.3455 * (U - 128) - 0.7169 * (V - 128);
B = Y + 1.779 * (U - 128);
Y = 0.299 * R + 0.587 * G + 0.114 * B;
U = (B - Y) / 1.772;
v = (R - Y) / 1.402; (U ~(-128 - 127))
或写为:
Y = 0.299 * R + 0.587 * G + 0.114 * B;
U = -0.169 * R - 0.331 * G + 0.5 * B;
V = 0.5 * R - 0.419 * G - 0.081 * B;
2.整数形式(减少计算量)未量化
R = Y + ((360 * (V - 128)) >> 8);
G = Y - (((88 * (U - 128) + 184 * (V - 128))) >> 8);
B = Y + ((455 * (U - 128)) >> 8);
Y = (77 * R + 150 * G + 29 * B) >> 8;
U = ((-44 * R - 87 * G + 131 * B) >> 8) + 128;
V = ((131 * R - 110 * G - 21 * B) >> 8) + 128;
3.量化后的公式( Y~(16,235) U/V~(16,240))量化
yuv --> rgb
R = 1.164 * Y + 1.596 * v - 2229;
G = 1.164 * Y - 0.392 * U - 8.823 * V + 135.6;
B = 1.164 * Y + 2.017 * U - 276.8;
rgb --> yuv
Y = 0.257 * R' + 0.504 * G' + 8.098 * B' + 16;
U = -0.148 * R' - 0.291 * G' +0.439 * B' + 128;
V = 0.439 * R' - 0.368 * G'- 8.071 * B' + 128;
三、摄像头采集的数据为YUYV4:2:2格式数据
下面的四个像素为:[Y0 U0 V0] [Y1 U1 V1] [Y2 U2 V2][Y3 U3 V3]
存放的码流为:Y0 U0 Y1 V1 Y2 U2 Y3 V3
映射出像素点为:[Y0 U0 V1][Y1 U0 v1] [Y2 U2 V3][Y3 U2 V3]
从摄像头采集的一帧数据中读取4个字节Y0 U0 Y1 V1,把这四个字节转两个像素,[Y0 U0 v1][Y1 U0 v1],在这两个像素通过yuv转rgb公式转换为RGB像素
yuv --> rgb
R = 1.164 * Y + 1.596 * v - 2229;
G = 1.164 * Y - 0.392 * U - 8.823 * V + 135.6;
B = 1.164 * Y + 2.017 * U - 276.8;
把yuv4: 2: 2 --> rgb
buffer —流码: Y0U0Y1V1 Y2U2Y3v3
char buffer[640*480*2];//存储yuv数据
char rgbdata[640*480*3];
int r1, g1, b1;
int r2, g2, b2;
for(int i=0; i<640*480/2; i++)
{
char data[4];
memcpy(data,buffer+i*4,4);
// Y0 U0 Y1 V1-->[Y0 U0 V1] [Y1 U0 v1]
unsigned char Y0 = data[0];
unsigned char U0 = data[1];
unsigned char Y1 = data[2];
unsigned char V1 = data[3];
r1 = Y0 + 1.4075 * (V1-128); if(r1>255)r1=255; if(r1<0)r1=0;
g1 =Y0 - 0.3455 * (U0-128) - 0.7169 * (V1-128); if(g1>255)g1=255; if(g1<0)g1=0;
b1 = Y0 + 1.779 * (U0-128); if(b1>255)b1=255; if(b1<0)b1=0;
r2 = Y1 + 1.4075 * (V1-128) ;if(r2>255)r2=255; if(r2<0)r2=0;
g2 = Y1 - 0.3455 * (U0-128) - 0.7169*(V1-128); if(g2>255)g2=255;if(g2<0)g2=0;
b2 = Y1 + 1.779 * (U0-128);if(b2>255)b2=255;if(b2<0)b2=0;
rgbdata[i*6+0]=r1;
rgbdata[i*6+1]g1;
rgbdata[i*6+2]=b1;
rgbdata[i*6+3]=r2;
rgbdata[i*6+4]=g2;
rgbdata[i*6+5]=b2;
}
四、代码实现:
#include <stdio.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <stdlib.h>
#include <unistd.h>
#include <sys/ioctl.h>
#include <string.h>
#include <linux/videodev2.h> //命令码
#include <sys/mman.h>
#include <linux/fb.h>
void yuyv_to_rgb(unsigned char *yuyvdata, unsigned char *rgbdata, int w, int h)
{
int r1, g1, b1;
int r2, g2, b2;
for (int i = 0; i < w * h / 2; i++)
{
char data[4];
memcpy(data, yuyvdata + i * 4, 4);
// Y0 U0 Y1 V1-->[Y0 U0 V1] [Y1 U0 v1]
unsigned char Y0 = data[0];
unsigned char U0 = data[1];
unsigned char Y1 = data[2];
unsigned char V1 = data[3];
r1 = Y0 + 1.4075 * (V1-128); if(r1>255)r1=255; if(r1<0)r1=0;
g1 =Y0 - 0.3455 * (U0-128) - 0.7169 * (V1-128); if(g1>255)g1=255; if(g1<0)g1=0;
b1 = Y0 + 1.779 *(U0-128); if(b1>255)b1=255; if(b1<0)b1=0;
r2 = Y1+1.4075* (V1-128) ;if(r2>255)r2=255; if(r2<0)r2=0;
g2 = Y1- 0.3455 *(U0-128) - 0.7169*(V1-128); if(g2>255)g2=255;if(g2<0)g2=0;
b2 = Y1+ 1.779 * (U0-128);if(b2>255)b2=255;if(b2<0)b2=0;
rgbdata[i * 6 + 0] = r1;
rgbdata[i * 6 + 1] = g1;
rgbdata[i * 6 + 2] = b1;
rgbdata[i * 6 + 3] = r2;
rgbdata[i * 6 + 4] = g2;
rgbdata[i * 6 + 5] = b2;
}
}
int lcdfd = 0;
unsigned int *lcdptr = NULL;
int lcd_w, lcd_h;
void lcd_show_rgb(unsigned char *rgbdata, int w, int h)
{
unsigned int *ptr = lcdptr;
for (int i = 0; i < h; i++)
{
for (int j = 0; j < w; j++)
{
memcpy(ptr + j, rgbdata + j * 3, 3);
}
ptr += lcd_w;
rgbdata += w * 3;
}
}
int main(int argc, const char *argv[])
{
lcdfd = open("/dev/fb0", O_RDWR);
//获取lcd信息
struct fb_var_screeninfo info;
int lret = ioctl(lcdfd, FBIOGET_VSCREENINFO, &info);
//虚拟机
lcd_w = info.xres_virtual;
lcd_h = info.yres_virtual;
//开发板
// lcd_w = info.xres;
// lcd.h = info.yres;
lcdptr = (unsigned int *)mmap(NULL, lcd_w * lcd_h * 4, PROT_READ | PROT_WRITE, MAP_SHARED, lcdfd, 0);
// 1.打开设备
int fd = open("/dev/video0", O_RDWR);
if (fd < 0)
{
perror("open video0 faild");
return -1;
}
// 2.获取摄像头支持的格式ioctl
struct v4l2_fmtdesc v4fmt;
v4fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
// v4fmt.index = 0;
int i = 0;
while (1)
{
v4fmt.index = i++;
int ret = ioctl(fd, VIDIOC_ENUM_FMT, &v4fmt);
if (ret < 0)
{
perror("itctl get fmt faild");
break;
}
printf("index = %d\n", v4fmt.index);
printf("flags = %d\n", v4fmt.flags);
printf("description = %s\n", v4fmt.description);
unsigned char *p = (unsigned char *)&v4fmt.pixelformat;
printf("pixelformat = %c%c%c%c\n", p[0], p[1], p[2], p[3]);
printf("reserved[0] = %d\n", v4fmt.reserved[0]);
printf("------------------------------------------\n");
}
// 3.设置采集格式
struct v4l2_format vfmt;
vfmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; //摄像头采集
vfmt.fmt.pix.width = 640; //设置宽
vfmt.fmt.pix.height = 480; //设置高
vfmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; //设置视频采集格式
int ret = ioctl(fd, VIDIOC_S_FMT, &vfmt);
if (ret < 0)
{
perror("VIDIOC_S_FMT faild");
}
//查看是否设置成功
memset(&vfmt, 0, sizeof(vfmt));
vfmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
ret = ioctl(fd, VIDIOC_G_FMT, &vfmt); //获取视频采集格式
if (ret < 0)
{
perror("VIDIOC_G_FMT faild");
}
printf("vfmt.fmt.pix.width = %d\n", vfmt.fmt.pix.width);
printf("vfmt.fmt.pix.height = %d\n", vfmt.fmt.pix.height);
unsigned char *p = (unsigned char *)&v4fmt.pixelformat;
printf("pixelformat = %c%c%c%c\n", p[0], p[1], p[2], p[3]);
// 4.申请内核空间
struct v4l2_requestbuffers reqbuffer;
reqbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
reqbuffer.count = 4; //申请4个缓冲区
reqbuffer.memory = V4L2_MEMORY_MMAP; //映射方式
ret = ioctl(fd, VIDIOC_REQBUFS, &reqbuffer);
if (ret < 0)
{
perror("VIDIOC_REQBUFS faild");
}
// 5.映射
unsigned char *mptr[4]; //保存映射后用户空间首地址
unsigned int size[4];
struct v4l2_buffer mapbuffer;
//初始化type,index
mapbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
for (int i = 0; i < 4; i++)
{
mapbuffer.index = i;
ret = ioctl(fd, VIDIOC_QUERYBUF, &mapbuffer); //从内核空间查询一空间做映射
if (ret < 0)
{
perror("VIDIOC_QUERYBUF faild");
}
mptr[i] = (unsigned char *)mmap(NULL, mapbuffer.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, mapbuffer.m.offset);
size[i] = mapbuffer.length;
//使用完毕,放回
ret = ioctl(fd, VIDIOC_QBUF, &mapbuffer);
if (ret < 0)
{
perror("VIDIOC_QBUF faild");
}
}
// 6.开始采集
int type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
ret = ioctl(fd, VIDIOC_STREAMON, &type);
if (ret < 0)
{
perror("VIDIOC_STREAMON faild");
}
//定义一个空间存储解码后的rgb数据
unsigned char rgbdata[640 * 480 * 3];
while (1)
{
// 7.从队列中提取一帧数据
struct v4l2_buffer readbuffer;
readbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
ret = ioctl(fd, VIDIOC_DQBUF, &readbuffer);
if (ret < 0)
{
perror("VIDIOC_DQBUF faild");
}
//显示在lcd上
//把yuyv数据解码为rgb数据
yuyv_to_rgb(mptr[readbuffer.index], rgbdata, 640,480);
lcd_show_rgb(rgbdata, 640, 480);
//通知内核已经使用完毕
ret = ioctl(fd, VIDIOC_QBUF, &readbuffer);
if (ret < 0)
{
perror("VIDIOC_QBUF faild");
}
}
// 8.停止采集
ret = ioctl(fd, VIDIOC_STREAMOFF, &type);
if (ret < 0)
{
perror("VIDIOC_STREAMOFF faild");
}
// 9.释放映射
for (int i = 0; i < 4; i++)
{
munmap(mptr[i], size[i]);
}
// 10.关闭设备
close(fd);
return 0;
}
五、编译:
ubuntu版本
gcc video_show_yuyv.c -o
开发板版本
装备arm版本的libjpeg库把libjpeg目录拷贝到工程当前目录下
arm-linux-gcc -o video_show_yuyv video_show_yuyv.c
六、虚拟lcd终端测试:
ubuntu上
打开虚拟终端:ctrl +fn+alt+f3
执行测试