关于GStreamer 保存摄像头数据为MP4

最近项目用到了这个需求,然后使用gstreamer代码去实现把usb摄像头的数据保存为MP4数据,以前弄过转换为h264的,所以一开始都很顺利了,但是发现保存下来的文件播放器打不开,用工具检测发现没有PTS也就是时间戳,想起来h264是没有时间的,保存为MP4需要自己加PTS时间戳。

所以就研究怎么加,一开始怎么加都不行,直接在appsrc里面手动为GST_BUFFER_PTS修改也不行,用网上其他人的办法,但是一用pipeline里的clock又报错,纠结时了。

命令行执行时发现加-e就可以实现,问题又来到了-e怎么添加进代码,后面发现src有个eos的actions,在每次结束时手动为他发送这个信号然后就可以了。

"end-of-stream" :  GstFlowReturn user_function (GstElement* object);

做个笔记,具体详情以后再补充把。

注册bus的结束信号,然后结束录像通过发送信号来结束。

g_signal_connect (G_OBJECT (bus), "message::eos", (GCallback)eos_cb, c_data);

这是结束录像的接口。


/**
 * @brief gstreamer_yuv2mp4_init
 *
 * @param CustomData *c_data  : customdata
 * @return 1
 */
int gstreamer_yuv2mp4_deinit(CustomData *c_data)
{
  g_print ("gstreamer_h264enc_deinit.\n");

  GstFlowReturn ret;
  g_signal_emit_by_name (c_data->app_source, "end-of-stream", &ret);

  /*等待线程终止*/
  pthread_join(pid, NULL);
  // printf("pthread join\n");
}

放入录像数据的时候要插入pts。

/**
 * @brief yuv2_to_mp4
 *
 * @param CustomData *data  : gstreamer customdata
 * @param void *data  : picture yuv data
 * @return 1
 */
int gstreamer_yuv2mp4_send(CustomData *c_data, void *src_data)
{
  GstBuffer *buffer;
  GstFlowReturn ret;
  GstMapInfo map;
  /* Create a new empty buffer */
  buffer = gst_buffer_new_and_alloc (c_data->src_data_size);

  /* Set its timestamp and duration */
  struct timeval tv;
  struct timezone tz;
  long long t = 0;
  gettimeofday(&tv, &tz);
  t = (tv.tv_sec*1000000 + tv.tv_usec)*1000 - my_basetime;
  GST_BUFFER_PTS(buffer) = t; //插入pts,配合eos生成带pts的MP4
  GST_BUFFER_DURATION(buffer) = gst_util_uint64_scale_int(1, GST_SECOND, 30 / 1);

  gst_buffer_map (buffer, &map, GST_MAP_WRITE);
  memcpy(map.data, src_data, c_data->src_data_size);
  gst_buffer_unmap (buffer, &map);

  g_signal_emit_by_name (c_data->app_source, "push-buffer", buffer, &ret);

  gst_buffer_unref (buffer);

  if ( !!(ret) ) {
    /* We got some error, stop sending data */
    return FALSE;
  }

  return TRUE;
}

不是很会讲,直接贴代码吧。下面是完整的部分


#include <stdio.h>
#include <string.h>
#include <pthread.h>

#include "gst_yuv2mp4.h"

/*
gst-launch-1.0 appsrc ! \
     'video/x-raw, width=(int)1280, height=(int)720, \
     format=(string)YUY2, framerate=(fraction)30/1' ! nvvidconv ! \
     'video/x-raw(memory:NVMM), format=(string)I420' ! nvv4l2h264enc ! h264parse ! qtmux \
      ! filesink \
     location=test.mp4 -e
*/

static void  _gstreamer_h264enc_init (CustomData *c_data);
static void  _gstreamer_h264enc_run (void *arg);

static pthread_t pid;
static long long my_basetime;

/**
 * @brief gstreamer_yuv2mp4_init
 *
 * @param CustomData *c_data  : customdata
 * @param int w,int h : picture high wight
 * @param int size : picture size 
 * @param char *file : must *.mp4
 * @return 1
 */
int gstreamer_yuv2mp4_init(CustomData *c_data, int w, int h, long size, char *file)
{
  /* Initialize custom data structure */
  memset(c_data, 0, sizeof (CustomData));

  c_data->src_data_size = size;
  c_data->width = w;
  c_data->height = h;
  c_data->file = file;

  _gstreamer_h264enc_init(c_data);
  
  // _gstreamer_h264enc_init(c_data);
  if( pthread_create(&pid, NULL, (void *)_gstreamer_h264enc_run, (void *)c_data) != 0)
  {
      printf("create thread fail\n");
  }
}

/**
 * @brief gstreamer_yuv2mp4_init
 *
 * @param CustomData *c_data  : customdata
 * @return 1
 */
int gstreamer_yuv2mp4_deinit(CustomData *c_data)
{
  g_print ("gstreamer_h264enc_deinit.\n");

  GstFlowReturn ret;
  g_signal_emit_by_name (c_data->app_source, "end-of-stream", &ret);

  /*等待线程终止*/
	pthread_join(pid, NULL);
  // printf("pthread join\n");
}

/**
 * @brief yuv2_to_mp4
 *
 * @param CustomData *data  : gstreamer customdata
 * @param void *data  : picture yuv data
 * @return 1
 */
int gstreamer_yuv2mp4_send(CustomData *c_data, void *src_data)
{
  GstBuffer *buffer;
  GstFlowReturn ret;
  GstMapInfo map;
  /* Create a new empty buffer */
  buffer = gst_buffer_new_and_alloc (c_data->src_data_size);

  /* Set its timestamp and duration */
  struct timeval tv;
  struct timezone tz;
  long long t = 0;
  gettimeofday(&tv, &tz);
  t = (tv.tv_sec*1000000 + tv.tv_usec)*1000 - my_basetime;
  GST_BUFFER_PTS(buffer) = t; //插入pts,配合eos生成带pts的MP4
  GST_BUFFER_DURATION(buffer) = gst_util_uint64_scale_int(1, GST_SECOND, 30 / 1);

  gst_buffer_map (buffer, &map, GST_MAP_WRITE);
  memcpy(map.data, src_data, c_data->src_data_size);
  gst_buffer_unmap (buffer, &map);

  g_signal_emit_by_name (c_data->app_source, "push-buffer", buffer, &ret);

  gst_buffer_unref (buffer);

  if ( !!(ret) ) {
    /* We got some error, stop sending data */
    return FALSE;
  }

  return TRUE;
}

/********************************************************************************************************************/

/* This function is called when an error message is posted on the bus */
static void error_cb (GstBus *bus, GstMessage *msg, CustomData *c_data) {
  GError *err;
  gchar *debug_info;

  /* Print error details on the screen */
  gst_message_parse_error (msg, &err, &debug_info);
  g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
  g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
  g_clear_error (&err);
  g_free (debug_info);

  g_main_loop_quit (c_data->main_loop);
}

/* This function is called when an eos message is posted on the bus */
static void eos_cb (GstBus *bus, GstMessage *msg, CustomData *c_data) {
  g_print ("End-Of-Stream reached.\n");

  g_main_loop_quit (c_data->main_loop);
}

static void _gstreamer_h264enc_init (CustomData *c_data)
{
  GstCaps *caps;
  GstBus *bus;

  /* Initialize GStreamer */
  gst_init (NULL, NULL);

  /* Create the elements */
  c_data->app_source  = gst_element_factory_make ("appsrc", "source");
  c_data->capsfilter1 = gst_element_factory_make ("capsfilter", "caps1");
  c_data->convert     = gst_element_factory_make ("nvvidconv", "convert");
  c_data->capsfilter2 = gst_element_factory_make ("capsfilter", "caps2");
  // c_data->queue       = gst_element_factory_make ("queue", "_queue");
  c_data->h264enc     = gst_element_factory_make ("nvv4l2h264enc", "h264_enc");
  c_data->parse       = gst_element_factory_make ("h264parse", "_parse");
  c_data->qtmux       = gst_element_factory_make ("qtmux", "_qtmux"); //mp4 复用文件
  c_data->sink    = gst_element_factory_make ("filesink", "sink");

  /* Create the empty pipeline */
  c_data->pipeline = gst_pipeline_new ("test-pipeline");

  if (!c_data->pipeline || !c_data->app_source || !c_data->capsfilter1 ||
   !c_data->convert || !c_data->capsfilter2 || /*!c_data->queue || */!c_data->h264enc || !c_data->parse || !c_data->qtmux || !c_data->sink) {
    g_printerr ("Not all elements could be created.\n");
    return;
  }

  /* Configure appsrc */
  // g_object_set (c_data->app_source, "do-timestamp", TRUE, NULL);

  /* Configure capsfilter */
  caps = gst_caps_new_simple("video/x-raw",
                "width", G_TYPE_INT, c_data->width,
                "height", G_TYPE_INT, c_data->height,
                "framerate", GST_TYPE_FRACTION, 30, 1,
                "format", G_TYPE_STRING, "YUY2",NULL);
  g_object_set (c_data->capsfilter1, "caps", caps, NULL);
  gst_caps_unref(caps);

  /* GStreamer-CRITICAL */
  // caps = gst_caps_new_simple("video/x-raw(memory:NVMM)",
  //             "format", G_TYPE_STRING, "I420",NULL);
  // g_object_set (c_data->capsfilter2, "caps", caps, NULL);
  // gst_caps_unref(caps);

  /* Configure nvv4l2h264enc */
  /*
     (0): DisablePreset    - Disable HW-Preset
     (1): UltraFastPreset  - UltraFastPreset for high perf
     (2): FastPreset       - FastPreset
     (3): MediumPreset     - MediumPreset
     (4): SlowPreset       - SlowPreset  
      (0): Baseline         - GST_V4L2_H264_VIDENC_BASELINE_PROFILE
      (2): Main             - GST_V4L2_H264_VIDENC_MAIN_PROFILE
      (4): High 
  */
	g_object_set(G_OBJECT(c_data->h264enc), "preset-level", 4, NULL); // (1): UltraFastPreset
	g_object_set(G_OBJECT(c_data->h264enc), "profile", 0, NULL);

	g_object_set(G_OBJECT(c_data->h264enc), "maxperf-enable", 1, NULL);   //启用最大性能模式
 	g_object_set(G_OBJECT(c_data->h264enc), "capture-io-mode", 2, NULL);   //
    g_object_set(G_OBJECT(c_data->h264enc), "output-io-mode" , 5, NULL);   //
  
	g_object_set(G_OBJECT(c_data->h264enc), "iframeinterval", 10, NULL);  //设置I帧间隔
	g_object_set(G_OBJECT(c_data->h264enc), "idrinterval",    10, NULL);
	g_object_set(G_OBJECT(c_data->h264enc), "control-rate", 	0, NULL);   //码率控制模式 (0:可变比特率,1:恒定比特率)
	g_object_set(G_OBJECT(c_data->h264enc), "bitrate", 		8*1024*1024, NULL);//码率
	g_object_set(G_OBJECT(c_data->h264enc), "peak-bitrate", 	8*1024*1024, NULL);//峰值比特率
	g_object_set(G_OBJECT(c_data->h264enc), "num-B-Frames",   0, NULL);//B帧设置为0
	g_object_set(G_OBJECT(c_data->h264enc), "insert-sps-pps", 1, NULL);//在IDR插入SPS和PPS
	g_object_set(G_OBJECT(c_data->h264enc), "insert-aud",      0, NULL);//插入 AUD
	g_object_set(G_OBJECT(c_data->h264enc), "poc-type",        2, NULL);//解码/编码顺序和显示顺序相同
	g_object_set(G_OBJECT(c_data->h264enc), "disable-cabac",   1, NULL);//解码/编码顺序和显示顺序相同disable-cabac

  g_object_set(G_OBJECT(c_data->sink), "location", c_data->file, NULL);
  g_object_set(G_OBJECT(c_data->sink), "sync", FALSE, NULL);

  /* Link all elements that can be automatically linked because they have "Always" pads */
  gst_bin_add_many (GST_BIN (c_data->pipeline), c_data->app_source, c_data->capsfilter1, c_data->convert, c_data->capsfilter2, 
   c_data->h264enc, c_data->parse, c_data->qtmux, c_data->sink, NULL);
  if (gst_element_link_many (c_data->app_source, c_data->capsfilter1, c_data->convert, c_data->capsfilter2,
   c_data->h264enc, c_data->parse, c_data->qtmux, c_data->sink, NULL) != TRUE) {
    g_printerr ("Elements could not be linked.\n");
    gst_object_unref (c_data->pipeline);
    return;
  }
  
  /* Instruct the bus to emit signals for each received message, and connect to the interesting signals */
  bus = gst_element_get_bus (c_data->pipeline);
  gst_bus_add_signal_watch (bus);
  g_signal_connect (G_OBJECT (bus), "message::error", (GCallback)error_cb, c_data);
  g_signal_connect (G_OBJECT (bus), "message::eos", (GCallback)eos_cb, c_data);
  gst_object_unref (bus);
}

static void  _gstreamer_h264enc_run (void *arg)
{
  CustomData *c_data = (CustomData *)arg;
  /* Start playing the pipeline */
  gst_element_set_state (c_data->pipeline, GST_STATE_PLAYING);

  /* Create a GLib Main Loop and set it to run */
  c_data->main_loop = g_main_loop_new (NULL, FALSE);
  g_main_loop_run (c_data->main_loop);

  /* Free resources */
  gst_element_set_state (c_data->pipeline, GST_STATE_NULL);
  gst_object_unref (c_data->pipeline);

  // printf("pthread exit\n");
  pthread_exit(NULL);
}
  • 5
    点赞
  • 10
    收藏
    觉得还不错? 一键收藏
  • 7
    评论
您好!对于使用Qt结合GStreamer录制视频,您可以按照以下步骤进行操作: 1. 首先,确保您的系统已经安装了GStreamer库和Qt开发环境。 2. 在Qt项目中,添加GStreamer库的包含路径和链接库。您可以在.pro文件中添加以下内容: ```plaintext INCLUDEPATH += /path/to/gstreamer/include LIBS += -L/path/to/gstreamer/lib -lgstreamer-1.0 ``` 请根据您的系统配置正确设置GStreamer的include和lib路径。 3. 在Qt代码中,使用GStreamer API进行视频录制。以下是一个示例代码片段: ```cpp #include <gst/gst.h> #include <gst/app/gstappsink.h> // 在函数中进行录制 void recordVideo() { gst_init(nullptr, nullptr); // 创建GStreamer pipeline GstElement* pipeline = gst_parse_launch("videotestsrc ! videoconvert ! autovideosink", nullptr); // 设置输出文件路径和格式 GstElement* appsink = gst_bin_get_by_name(GST_BIN(pipeline), "autovideosink"); g_object_set(appsink, "location", "/path/to/output.mp4", nullptr); // 启动录制 gst_element_set_state(pipeline, GST_STATE_PLAYING); // 等待录制完成或用户停止录制 // 这里可以添加适当的逻辑来控制录制时间或停止条件 // 停止录制和清理资源 gst_element_set_state(pipeline, GST_STATE_NULL); gst_object_unref(pipeline); } ``` 在上述示例中,我们使用`videotestsrc`作为视频源,`videoconvert`将视频格式进行转换,然后使用`autovideosink`将视频数据输出到文件。 您可以根据需要修改GStreamer pipeline中的元素来适应您的实际情况,例如使用摄像头作为视频源。 记得根据您的需求,添加适当的错误处理和录制停止条件。希望这能帮到您!如果有任何其他问题,请随时提问。

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 7
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值