PS:鉴于 上一篇文章 https://blog.csdn.net/u013142545/article/details/131916262?spm=1001.2014.3001.5501
使用v4l2与QT实现的效果,不能满足设计标准,帧率与延时不能同时满足,因此,项目需改变框架,重新开发。
在网上浏览,偶然得知Gstreamer能快速处理,同时不占用资源,决定使用,在半知半解的情况下,一个坑一个坑
的爬,终于实现出来了。现将自己的实现过程记录,以便后续查看,同时希望能帮到有需要的朋友。
项目功能拆分:
1:显示 ====》利用Gstreamer来拉流并显示,命令行如下:gst-launch-1.0 -v v4l2src device=/dev/video0 \
! image/jpeg,width=640,height=480,framerate=30/1 \
! jpegdec ! videobalance contrast=-0.7 brightness=0.2 saturation=2.0 \
! autovideoconvert \
! textoverlay text="ABCDEFG" ! ximagesink sync=false
命令行特别说明:textoverlay 元素 是用来显示文字信息,可用于提示作用,显示图标的元素是gdkpixbufoverlay,可在官网查看使用说明
2:录像 ===》使用avilib 开源库,在管道v4l2处分流出源数据(视频)流,因mjpeg本身就是压缩过的数据,可直接保存成avi视频文件
分流的关键代码如下:
GstPad *srcpad = gst_element_get_static_pad (date.vsrc, "src");
gst_pad_add_probe (srcpad, GST_PAD_PROBE_TYPE_BUFFER,
(GstPadProbeCallback) cb_save_video_data, &date, NULL);
gst_object_unref (srcpad);
cb_save_video_data:回调函数 date:自定义结构体,元素及变量的集合
static GstPadProbeReturn
cb_save_video_data (GstPad *pad,
GstPadProbeInfo *info,
CustomData * date){
//g_print("enter cb_save_video_data\n");
if(2 == button_clicked_val)
{
if(!recording_flag)
{
g_object_set(GST_OBJECT(date->textoverlay),"text","Recording",NULL);
/start record
recording_flag = 1;
///create file
char strFile[50]= {0};
memset(strFile,0,50);
time_t current_time = time(NULL); // 获取当前的 Unix 时间戳
struct tm *local_time = localtime(¤t_time); // 将时间戳转换为本地时间
// printf("当前时间:%d年%d月%d日 %d:%d:%d\n",
// local_time->tm_year + 1900, local_time->tm_mon + 1, local_time->tm_mday,
// local_time->tm_hour, local_time->tm_min, local_time->tm_sec);
sprintf(strFile,"%s%dY%dM%dD_%dh%dm%ds.avi",RECORDFILEPATH,local_time->tm_year + 1900,local_time->tm_mon + 1,local_time->tm_mday \
,local_time->tm_hour,local_time->tm_min,local_time->tm_sec);
out_fd = AVI_open_output_file(strFile);
if(out_fd!=NULL)
{
AVI_set_video(out_fd, 640, 480, 15, "MJPG");
}
else
{
perror("Fail to open AVI\n");
exit(EXIT_FAILURE);
}
}else{
g_object_set(GST_OBJECT(date->textoverlay),"text","",NULL);
stop record
recording_flag = 0;
AVI_close(out_fd);
}
button_clicked_val = 0;
}
if(recording_flag)
{
write file
GstBuffer *buffer = NULL ;
GstMapInfo map_info;
buffer = GST_PAD_PROBE_INFO_BUFFER (info); //info和buffer中的数据不能直接操作
if (!gst_buffer_map(buffer, &map_info, GST_MAP_READ)) { //映射出数据
g_print("gst_buffer_map() error!");
return GST_PAD_PROBE_DROP;
}
//g_print("buffer size = %ld \n", map_info.size);
AVI_write_frame(out_fd,map_info.data,map_info.size,0);
gst_buffer_unmap (buffer, &map_info);
}
return GST_PAD_PROBE_OK;
}
3:截图 ===》在管道ximagesink 处分流,将获取的数据流转换成图片并保存
关键代码如下:
GstPad *pad = gst_element_get_static_pad (date.vsink, "sink");
gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_BUFFER,
(GstPadProbeCallback) cb_have_data, &date, NULL);
gst_object_unref (pad);
static GstPadProbeReturn
cb_have_data (GstPad *pad,
GstPadProbeInfo *info,
CustomData * date){
if (1 == button_clicked_val) {
g_print("enter cb_have_data\n");
GstBuffer *buffer = NULL ;
GstMapInfo map_info;
GstStructure *s;
gint width, height; //图片的尺寸
GstCaps *sink_caps =gst_pad_get_current_caps(pad);
s = gst_caps_get_structure (sink_caps, 0);
gboolean res;
res = gst_structure_get_int (s, "width", &width); //获取图片的宽
res |= gst_structure_get_int (s, "height", &height); //获取图片的高
if (!res) {
g_print ("gst_structure_get_int fail\n");
button_clicked_val = 0;
return GST_PAD_PROBE_DROP;
}
// 使用mapinfo获取图像数据
buffer = GST_PAD_PROBE_INFO_BUFFER (info); //info和buffer中的数据不能直接操作
if (!gst_buffer_map(buffer, &map_info, GST_MAP_READ)) { //映射出数据
g_print("gst_buffer_map() error!");
button_clicked_val = 0;
return GST_PAD_PROBE_DROP;
}
g_print("jpg size = %ld \n", map_info.size);
//QString strFile = QString("Capture-%0.jpg").arg(QDateTime::currentDateTime().toString("yyyyMMddhhmmssz"));
//char strFile[] = {"abcdefg.jpg"};
char strFile[50]= {0};
memset(strFile,0,50);
time_t current_time = time(NULL); // 获取当前的 Unix 时间戳
struct tm *local_time = localtime(¤t_time); // 将时间戳转换为本地时间
// printf("当前时间:%d年%d月%d日 %d:%d:%d\n",
// local_time->tm_year + 1900, local_time->tm_mon + 1, local_time->tm_mday,
// local_time->tm_hour, local_time->tm_min, local_time->tm_sec);
sprintf(strFile,"%s%dY%dM%dD_%dh%dm%ds.jpg",RECORDFILEPATH,local_time->tm_year + 1900,local_time->tm_mon + 1,local_time->tm_mday \
,local_time->tm_hour,local_time->tm_min,local_time->tm_sec);
guint8 rgb[640*480*3]={0};
cvtColorGBRx2RGB(rgb, map_info.data, width, height);
// QImage保存图片
#if 0
QImage img(rgb, width, height, width * 3, QImage::Format_RGB888); //直接操作映射出的数据map_info
img.save(strFile, "jpeg");
#endif
//也可以使用GDK的方式保存图片
#if 1
GError *error = NULL;
GdkPixbuf * pixbuf = gdk_pixbuf_new_from_data (rgb, GDK_COLORSPACE_RGB, FALSE, 8, width, height,GST_ROUND_UP_4 (width * 3), NULL, NULL); //将数据保存到GdkPixbuf
/* save the pixbuf */
gdk_pixbuf_save (pixbuf, strFile, "jpeg", &error, NULL); //把GdkPixbuf保存成jpeg格式
g_object_unref(pixbuf); //释放掉GdkPixbuf
#endif
//g_print("save %s succese\n",strFile);
g_object_set(GST_OBJECT(date->textoverlay),"text","Capture success",NULL);
gst_buffer_unmap (buffer, &map_info);
button_clicked_val = 0;
GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(date->pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "capture");
}else if(2 != button_clicked_val && 3 != button_clicked_val && 1 != button_clicked_val)
{
g_object_set(GST_OBJECT(date->textoverlay),"text","",NULL);
}else if(3 == button_clicked_val)
{
g_object_set(GST_OBJECT(date->textoverlay),"text","Battery low",NULL);
button_clicked_val = 0;
}else{
nothing to do
}
return GST_PAD_PROBE_OK;
}
4:用户交互 ===》使用串口通信,来接收用户按下的按键,协议解析,将用户操作的信息,通过textoverlay 将文字加入视频流,显示
对应的信息,起到提示作用
代码如下:
date.textoverlay = gst_element_factory_make("textoverlay",NULL);
g_object_set(GST_OBJECT(date.textoverlay),"text","ABCDEFG",NULL);
5:热插拔 ===》实现两个进程,进程一为Gstreamer处理步骤1 2 3 4,进程二为用QT来显示一张图片,最后,用脚本来监听摄像头文件
/dev/video0是否存在,来切换进程一还是进程二
PS: Gstreamer知识点可查看官网文档与例程,也可网上浏览相关的技术博客
重点借鉴 https://blog.csdn.net/qq_41563600/article/details/121343927
https://blog.csdn.net/qq_41563600/article/details/121308586
等等,此博主Gstreamer系列的技术博客,跪谢大神
问题抛出:命令行 gst-launch-1.0 -v v4l2src device=/dev/video0 \
! image/jpeg,width=640,height=480,framerate=30/1 \
! tee name=srctee srctee. \
! queue ! filesink location=jpeg_640_480.avi srctee. \
! jpegdec ! videobalance contrast=-0.7 brightness=0.2 saturation=2.0 \
! autovideoconvert \
! ximagesink sync=false
上述命令行是起作用,也能正常录像与显示,若是不需要录像只需去掉 ! tee name=srctee srctee. \
! queue ! filesink location=jpeg_640_480.avi srctee. \ 即可
但是在代码中,动态加入与删除 ! tee name=srctee srctee. \
! queue ! filesink location=jpeg_640_480.avi srctee. \ 这段逻辑,无法实现链接,即管道无法串联起来,
问题排查出 是filesink 后面的 srctee. 导致的,本人不才,无法使用代码的方式解决此问题,若是有朋友正好知道,还请帮忙解惑!
整个工程源代码:使用积分下载