使用gstreamer的fb videosink播放视频

Gstreamer支持各种类型的sink,比如xvimagesink,v4l2sink, autovideosink,fakesink,imxeglvivsink,autovideosink, imxipuvideosink,  fbdevsink,fdsink,glimagesink等等,这里介绍一下如何使用framebuffer(/dev/fb)节点来实现视频播放输出。

首先介绍一下FB抓图:

#include <stdio.h>
#include <stdlib.h>
 #include <unistd.h>
 #include <fcntl.h>
 #include <linux/fb.h>
 #include <sys/mman.h>
 #include <sys/ioctl.h>

int main()
{
    int fbfd = 0;
    struct fb_var_screeninfo vinfo;
    struct fb_fix_screeninfo finfo;
    long int screensize = 0;
    char *fbp = 0;
    int x = 0, y = 0;
    long int location = 0;
    int startx=0, starty=0;     
    int width, height;

    // Open the file for reading and writing
    fbfd = open("/dev/fb0", O_RDWR);
    if (fbfd == -1)
	{
        perror("Error: cannot open framebuffer device");
        exit(1);
    }

    printf("The framebuffer device was opened successfully.\n");

    // Get fixed screen information
    if (ioctl(fbfd, FBIOGET_FSCREENINFO, &finfo) == -1)
	{
        perror("Error reading fixed information");
        exit(2);
    }

    // Get variable screen information
    if (ioctl(fbfd, FBIOGET_VSCREENINFO, &vinfo) == -1)
	{
        perror("Error reading variable information");
        exit(3);
    }

    printf("%dx%d, %dbpp, %dpitch\n", vinfo.xres, vinfo.yres, vinfo.bits_per_pixel, finfo.line_length);

    // Figure out the size of the screen in bytes
    screensize = vinfo.yres * finfo.line_length;
    printf("screensize %ld\n", screensize);


    // Map the device to memory
    fbp = (char *)mmap(0, screensize, PROT_READ | PROT_WRITE, MAP_SHARED, fbfd, 0);
    if ((signed long)fbp == -1) {
        perror("Error: failed to map framebuffer device to memory");
        exit(4);
    }
    printf("The framebuffer device was mapped to memory successfully.\n");

    startx = 0; starty = 0;       // Where we are going to put the pixel
    width = 200;
    height = 100;

    // Figure out where in memory to put the pixel
    for (y = starty; y < height + starty; y++) {
        for (x = startx; x < width + startx; x++) {

            location = (x+vinfo.xoffset) * (vinfo.bits_per_pixel/8) +
                       (y+vinfo.yoffset) * finfo.line_length;
        
            if (vinfo.bits_per_pixel == 32) {
                *(fbp + location) = 10;        // Some blue
                *(fbp + location + 1) = 15+(x-startx)/2;     // A little green
                *(fbp + location + 2) = 200-(y-starty)/5;    // A lot of red
                *(fbp + location + 3) = 0;      // No transparency
            } else  { //assume 16bpp
                int b = 10;
                int g = (x-startx)/6;     // A little green
                int r = 31-(y-starty)/16;    // A lot of red
                unsigned short int t = r<<11 | g << 5 | b;
                *((unsigned short int*)(fbp + location)) = t;
            }

        }
    }
    munmap(fbp, screensize);
    close(fbfd);
    getchar();    
    return 0;
}

执行sudo chvt 3切换到3号字符控制台

执行命令后,抓图

cat /dev/fb0 > tmp

framebuffer中的内容被保存为tmp文件,tmp文件是RGB裸流格式,没有直接查看的工具,需要进行一次转换。

事先需要安装libjpeg.a库,

sudo apt-get install libjpeg62-dev

网上找到一段代码,将RGB数据转换为JPG文件输出

​

//cat /dev/fb/0 >tmp
//./grab -i tmp -o grab.jpg
#include <setjmp.h>
#include <stdio.h>
#include <getopt.h>
#include <jpeglib.h>
#include <stdio.h>
#include <stdlib.h>

#define W 1920
#define H 1080
/*The name of this program.*/
const char *program_name;

/*
   Prints usage information for this program to STREAM,and exit the program with EXIT_CODE.
*/

void print_usage(FILE* stream,int exit_code)
{
        fprintf(stream,"Usage: %s options [inputfile outputfile...]\n",program_name);

        fprintf(stream,
                        " -h --help Display this usage information\n"
                        " -i --input filename Read from file\n"
                        " -o --output filename Write output to file\n"
                   );
        exit(exit_code);
}


void convert_line( char *src, char *dst, int line,int weight,int height );

int main(int argc,char **argv)
{
        char *input_filename = NULL;
		char *output_filename = NULL;
        unsigned int image_height = 0;
        unsigned int image_width = 0;

        if(!output_filename){
                output_filename = "grab.jpg";
        }

        if(!input_filename)
        {
                input_filename = "tmp";
        }

        printf("outputfile = %s,inputfile = %s\n",output_filename,input_filename);
        struct jpeg_compress_struct cinfo;
        struct jpeg_error_mgr jerr;

        JSAMPLE *buffer;
        JSAMPLE *image_buffer;
        buffer = malloc(W*H*3);
        image_buffer = malloc(W*3);


        FILE *in;
        int ret = 0;
        in = fopen(input_filename,"r");
        if(!in){
                printf("please provide the rgb source\n");
                exit(0);
        }

        ret = fread(buffer, 1,W*H*2, in );

        if( ret == 0 ){
                printf("read image buffer error\n");
                exit(0);
        }

        int height = H;
        int width = W;

        FILE *outfile;
        JSAMPROW row_pointer[1];//points to JSAMPLE row[s]

        //setp 1:allocate and initialize JPEG compression object
        cinfo.err = jpeg_std_error(&jerr);
        jpeg_create_compress(&cinfo);

        //step 2:specify data destination
        //char *filename="grab1.jpg";
        if((outfile = fopen(output_filename,"wb")) == NULL){
                printf("cannot open file to write\n");
                exit(0);
        }
        jpeg_stdio_dest( &cinfo, outfile );

        //step 3:set parameters for compression
        cinfo.image_width = width;
        cinfo.image_height = height;
        cinfo.input_components = 3;
        cinfo.in_color_space = JCS_RGB;

        jpeg_set_defaults(&cinfo);

        jpeg_set_quality(&cinfo, 100, TRUE );

        //step 4:start compresor
        jpeg_start_compress(&cinfo, TRUE);

        //step 5:
        while(cinfo.next_scanline < cinfo.image_height ){
                convert_line( buffer,image_buffer,cinfo.next_scanline,height,width);
                //printf("%d======end",cinfo.next_scanline);
                row_pointer[0] = image_buffer;
                (void)jpeg_write_scanlines( &cinfo, row_pointer, 1);
        }
        jpeg_finish_compress(&cinfo);
        fclose( outfile );
        jpeg_destroy_compress(&cinfo);
        free(buffer);
        free(image_buffer);
}

void convert_line(char* src,char* dst,int line,int width,int height){
        int NUM1 = W*2;
        int NUM2 = W*3;
        char *p = src + line*NUM1;
        int i = 0;
        while( i < NUM2 ){
                dst[i] = (p[1]&0xf8 );
                dst[i+1] = ((p[1]&0x7)<<5) | ((p[0] & 0xe0)>>3);
                dst[i+2] = (p[0]&0x1f)<<3;
                i += 3;
                p += 2;
        }
} 

编译

gcc jpeg.c -ljpeg

执行后,tmp文件被转换为grab.jpg文件输出

fbset工具也可以获取到framebuffer信息

接下来我们在framebuffer上播放视频。

先来看一下在图形界面中运行一个测试用例,使用XWindow输出xvimagesink:

gst-launch-1.0 -v videotestsrc pattern=snow ! xvimagesink

如我们需要opengl渲染采用glimagesink:

gst-launch-1.0 filesrc location=./test.mp4  ! decodebin  ! glimagesink

接下来在字符控制台中测试:

首先默认安装的ubuntu系统没有安装fbdevsink插件,需要手动安装,输入以下命令即可安装

sudo apt-get install libgstreamer1.0-0 gstreamer1.0-plugins-base gstreamer1.0-plugins-good gstreamer1.0-plugins-bad gstreamer1.0-plugins-ugly gstreamer1.0-libav gstreamer1.0-doc gstreamer1.0-tools gstreamer1.0-x gstreamer1.0-alsa gstreamer1.0-gl gstreamer1.0-gtk3 gstreamer1.0-qt5 gstreamer1.0-pulseaudio

切换到字符控制台,执行命令:

gst-launch-1.0 -v videotestsrc pattern=snow !  fbdevsink&

利用上面介绍的方法抓图后得到的效果,可以看到在framebuffer上绘制出了转换效果,可能是由于转换程序的问题,这里的雪花视频画面被分成了两个部分现实,而且两部分都不是完整矩形,实际上这是转换的问题,实际在控制台上的显示是非常规整的,一个漂亮的SNOW正方形显示在中间。

接下来看控制台播放视频是什么效果:

插曲:mplayer可以通过mplayuer xxx.mp4 -vo fbdev来使用FB进行视频输出,有兴趣的可以自己玩一下,不过这里我们不过多介绍

执行命令:

gst-launch-1.0 filesrc location=movie.mp4 ! decodebin name=decoder decoder. ! queue ! videoconvert ! fbdevsink&

接下来我们看一下GST管道的拓扑结构,这篇文章详细介绍了如何导出GST的拓扑图,导出后如下图所示:

上图中,最值得注意的是最后像素转换的环节,由于解码器只出YUV格式的图形,但是FB又只能接受RGB格式的图像,所以中间接入了一个convert环节

可见,这里从NV21(YUV的一种排列格式), 经过convertor转换后,变成了RGB格式,这个时候FB可以放心的显示了。

如果去掉videoconvert组件,则播放管道创建失败

gst-launch-1.0 filesrc location=movie.mp4 ! decodebin name=decoder decoder. ! queue ! fbdevsink&

设置暂停管道 ...
管道正在 PREROLLING ...
从组件“vaapipostproc0”获取上下文:gst.gl.GLDisplay=context, gst.gl.GLDisplay=(GstGLDisplay)"\(GstGLDisplayGBM\)\ gldisplaygbm0";
从组件“vaapipostproc0”获取上下文:gst.vaapi.Display=context, gst.vaapi.Display=(GstVaapiDisplay)"\(GstVaapiDisplayDRM\)\ vaapidisplaydrm1";
重新分配延迟时间...
警告:来自组件 /GstPipeline:pipeline0/GstDecodeBin:decoder:延迟链接失败。
额外的调试信息:
./grammar.y(510): gst_parse_no_more_pads (): /GstPipeline:pipeline0/GstDecodeBin:decoder:
failed delayed linking some pad of GstDecodeBin named decoder to some pad of GstQueue named queue0
错误:来自组件 /GstPipeline:pipeline0/GstDecodeBin:decoder/GstQTDemux:qtdemux0:Internal data stream error.
额外的调试信息:
qtdemux.c(6073): gst_qtdemux_loop (): /GstPipeline:pipeline0/GstDecodeBin:decoder/GstQTDemux:qtdemux0:
streaming stopped, reason not-linked (-1)
错误: 管道不需要 preroll.
设置 NULL 管道 ...
释放管道资源 ...

侧面反映了视频格式转换不可或缺。


结束!

  • 8
    点赞
  • 10
    收藏
    觉得还不错? 一键收藏
  • 打赏
    打赏
  • 1
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

papaofdoudou

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值