二、Gstreamer splitmuxsink

该元素包装muxer和sink,并在mux内容即将跨越最大尺寸的最大时间阈值时启动一个新文件,在视频关键帧边界分割。只有一个输入视频流可以与任意多的音频和字幕流混接。

默认情况下,它使用mp4mux和filesink,但可以通过muxersink属性更改它们。

最小文件大小是1 GOP,但是,如果任意两个关键帧之间的距离大于限制,则限制可能会超过。

如果视频流可用,则分割过程由视频流内容驱动,视频流必须包含关闭的GOPs,以便输出文件的部分能够正确地单独播放。在没有视频流的情况下,第一个可用的流用作同步的参考。

在async-finalize模式下,当超过阈值时,旧的muxer和sink从管道断开连接,然后离开以异步方式完成文件,并创建一个新的muxer和sink来继续处理下一个片段。因此,使用muxer-factory和sink-factory属性以及muxer-properties和sink-properties来构造新对象,而不是使用muxer和sink对象。

Example pipelines

从测试源捕获的视频流,并将其融合到ISO mp4文件中,根据需要分割以限制大小/持续时间为10秒和1MB的最大大小。

gst-launch-1.0 -e videotestsrc ! video/x-raw,width=320,height=240 ! videoconvert ! queue ! timeoverlay ! x264enc key-int-max=10 ! h264parse ! splitmuxsink location=video%02d.mov max-size-time=10000000000 max-size-bytes=1000000

记录从v4l2设备捕获的视频流,并将其绑定到可流的Matroska文件中,根据需要分割以限制大小/持续时间为10秒。每个文件都将异步完成。

gst-launch-1.0 -e v4l2src num-buffers=500 ! video/x-raw,width=320,height=240 ! videoconvert ! queue ! timeoverlay ! x264enc key-int-max=10 ! h264parse ! splitmuxsink location=video%02d.mkv max-size-time=10000000000 muxer-factory=matroskamux muxer-properties=“properties,streamable=true”

记录10帧到mp4文件,使用muxer-pad-map在splitmuxsink sink pad和它将交付到的相应muxer pad之间进行显式映射。

gst-launch-1.0 videotestsrc num-buffers=10 ! jpegenc ! .video splitmuxsink muxer=qtmux muxer-pad-map=x-pad-map,video=video_1 location=test%05d.mp4 -v

信号

format-location

/**
 * fragment_id 是片段ID,比如设定了最大文件数,这个ID就会这[0,max_num_files]循环
*/
gchararray
format_location_callback (GstElement * splitmux,
                          guint fragment_id,
                          gpointer udata)

返回值:下一个输出文件使用的位置。这必须是一个新分配的字符串,当splitmuxsink元素不再需要它时,它将用g_free释放,所以使用g_strdup或g_strdup_printf或类似的函数来分配它。

format_location_full

/* 多了GstSample,这里面包含了第一个Buffer */
gchararray
format_location_full_callback (GstElement * splitmux,
                               guint fragment_id,
                               GstSample * first_sample,
                               gpointer udata)
#include<gst/gst.h>

const int FILES_NUMBER = 12;

typedef struct _CustomData {
  GstElement *pipeline;
  GstElement *source;
  GstElement *h264_depay;
  GstElement *queue;
  GstElement *h264_parse;
  GstElement *splitmuxsink;
  GMainLoop *loop;
} CustomData;

static const char *rtsp_url = "rtsp://admin:yangquan123@192.168.10.7:554/Streaming/Channels/101";
static const char *filename = "cam02";
static guint64 max_size_time = 60 *GST_SECOND;

// Handler for the pad-added signal 
static void pad_added_handler (GstElement *src, GstPad *pad, CustomData *data);

//handler for formatting properly the file saved
static gchar* formatted_file_saving_handler(GstChildProxy *splitmux, guint fragment_id);
					    
// gpointer is a void
static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer data);

int main(int argc, char **argv){
  GOptionContext *context;
  GError *error = NULL;
  CustomData data;
  GstBus *bus;
  GstStateChangeReturn ret;

  char **filenames = g_new0 (char *, FILES_NUMBER);
  for (int i = 0; i < FILES_NUMBER; i++) {
    filenames[i] = g_new0 (gchar, 30);
  }

  // Initialize GStreamer
  gst_init (&argc, &argv);

  // Create the elements
  data.source = gst_element_factory_make ("rtspsrc", "source");
  data.h264_depay = gst_element_factory_make("rtph264depay", "depay");
  data.queue = gst_element_factory_make("queue", "queue");
  data.h264_parse = gst_element_factory_make("h264parse", "parse");
  data.splitmuxsink = gst_element_factory_make("splitmuxsink", "splitmuxsink");
  
  // Create the empty pipeline
  data.pipeline = gst_pipeline_new ("multifiles_saving-pipeline");


  if (!data.pipeline || !data.source || !data.queue ||\
      !data.h264_depay || !data.h264_parse || !data.splitmuxsink) {
    g_printerr ("Not all elements could be created.\n");
    return -1;
  }

  //Build the pipeline. Note that we are NOT linking the source at this
  //point. We will do it later
  gst_bin_add_many (GST_BIN (data.pipeline),
		    data.source, data.h264_depay, data.queue, data.h264_parse,
		    data.splitmuxsink,
		    NULL);

  if( !gst_element_link_many(data.h264_depay,
			     data.h264_parse,
           data.queue,
			     data.splitmuxsink,
			     NULL) ){
    g_printerr ("Some elements could not be linked.\n");
    gst_object_unref (data.pipeline);
    return -1;
  }

  //Set the rtsp location and the latency in one step
  g_object_set (data.source, "location", rtsp_url, "latency", 0, NULL);

  //this call is necessary because of a bug: sometime the mp4mux can not multiplex
  //the stream when onsecutive buffers have the same timestamp.
  //This can occur if h264parse receives a frame with invalid timestamp so it then guesses a timestamp.
  //this way the pts is computed  
  // gst_base_parse_set_pts_interpolation( (GstBaseParse*)data.h264_parse, true );
  
  //GstElement *muxer = gst_element_factory_make("matroskamux", "matroskamux");
  //cyclically save 5 video seconds 12 times. values lower than 5 seconds cause a segmentation fault
  g_print("max size time: %" G_GUINT64_FORMAT "\n", max_size_time);
  g_object_set (data.splitmuxsink, "location", "video%02d.mp4",
		"max-size-time", max_size_time,
		//"max-size-bytes", 10 *500,
		"max-files", FILES_NUMBER,
		//"muxer", muxer,
		NULL);

  // Connect to the pad-added signal 
  g_signal_connect (data.source, "pad-added", G_CALLBACK (pad_added_handler), &data);
  //g_signal_connect( data.splitmuxsink, "format-location", G_CALLBACK (formatted_file_saving_handler), NULL);

  // Start playing 
  ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
  if (ret == GST_STATE_CHANGE_FAILURE) {
    g_printerr ("Unable to set the pipeline to the playing state.\n");
    gst_object_unref (data.pipeline);
    return -1;
  }

  // Listen to the bus 
  bus = gst_element_get_bus (data.pipeline);
  // GMainLoop *loop;
  guint bus_watch_id;
  //let's create a GMainLoop structure:
  //NULL as first parameter means that the default context will be used
  data.loop = g_main_loop_new (NULL, FALSE);
  bus_watch_id = gst_bus_add_watch (bus, bus_call, &data);
  g_print ("Running...\n");
  //the main thread stops it till g_main_loop_quit() is called
  //when this event occurs the g_main_loop_run returns
  g_main_loop_run (data.loop);

  //Free resources 
  gst_object_unref (bus);
  gst_element_set_state (data.pipeline, GST_STATE_NULL);
  gst_object_unref (data.pipeline);
  g_source_remove (bus_watch_id);
  //deallocating filenames
  g_strfreev (filenames);

  return 0;

  //when there is an error trying to reconnect
  // //wating for 10 seconds
  // g_print("[multifiles_saving::main]. let's wait 10 seconds...\n");
  // usleep(10 * 1000000);
  // g_print("[multifiles_saving::main]. setting state...\n");
  // ret = gst_element_set_state (data.pipeline, GST_STATE_NULL);
  // if (ret == GST_STATE_CHANGE_FAILURE) 
  //   g_printerr ("[multifiles_saving::main]. Unable to set the pipeline to the null state.\n");
  // else{
  //   g_printerr ("[multifiles_saving::main]. changing state in null succeeded.\n");
  //   g_printerr("[multifiles_saving::main]. restoring format-location signal\n");
  //   g_signal_connect( data.splitmuxsink, "format-location", G_CALLBACK (formatted_file_saving_handler), NULL);
  // }
  // g_print("trying to reset the pipeline in playing state\n");
  // ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
  // if (ret == GST_STATE_CHANGE_FAILURE) 
  //   g_printerr ("[multifiles_saving::main]. Unable to set the pipeline to the playing state.\n");
  // else
  //   g_printerr ("[multifiles_saving::main]. change state in play succeeded.\n");
  // g_main_loop_run(data.loop);
  // // Free resources 
  // gst_object_unref (bus);
  // gst_element_set_state (data.pipeline, GST_STATE_NULL);
  // gst_object_unref (data.pipeline);
  // g_source_remove (bus_watch_id);
  // //deallocating filenames
  // for(int i = 0; i < FILES_NUMBER; ++i)
  //   delete[] filenames[i];
  // return 0;
}

static void pad_added_handler (GstElement *src, GstPad *new_pad, CustomData *data) {
  GstPad *h264_depay_sink_pad = gst_element_get_static_pad (data->h264_depay, "sink");

  GstPadLinkReturn ret;
  GstCaps *new_pad_caps = NULL;
  GstStructure *new_pad_struct = NULL;
  const gchar *new_pad_type = NULL;

  g_print ("Received new pad '%s' from '%s':\n", GST_PAD_NAME (new_pad), GST_ELEMENT_NAME (src));

  // If our audio and video converter are already linked, we have nothing to do here
  if(gst_pad_is_linked(h264_depay_sink_pad)){
    g_print ("We are already linked. Ignoring.\n");
    goto exit;
  }

  // Check the new pad's type 
  new_pad_caps = gst_pad_get_current_caps (new_pad);
  // Caps (capabilities) are composed of an array of GstStructure
  new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
  // Each structure has a name and a set of property
  new_pad_type = gst_structure_get_name (new_pad_struct);
  g_print ("Pad received is of type: '%s'\n", new_pad_type);
  // the name is important to understand what sink cap we need to link the element
  if( g_str_has_prefix (new_pad_type, "application/x-rtp") ) {
    g_print("trying to link the rtpsrc' src and h264depay's sink\n");
    // Attempt the link 
    ret = gst_pad_link_full (new_pad, h264_depay_sink_pad, GST_PAD_LINK_CHECK_CAPS);
    if (GST_PAD_LINK_FAILED (ret)) {
      g_print ("Type is '%s' but link failed.\n", new_pad_type);
    } else {
      g_print ("Link succeeded (type '%s').\n", new_pad_type);
    }
  }

 exit:
  // Unreference the new pad's caps, if we got them 
  if (new_pad_caps != NULL)
    gst_caps_unref (new_pad_caps);

  // Unreference the sink pad 
  gst_object_unref(h264_depay_sink_pad);
}

gboolean bus_call(GstBus *bus, GstMessage *msg, gpointer data){
  CustomData *CustomData_ptr = (CustomData *)data;
  GMainLoop *loop = CustomData_ptr->loop;
  //g_print( "got message %s %s \n", GST_MESSAGE_TYPE_NAME(msg), gst_structure_get_name ( gst_message_get_structure(msg) ) );

  switch (GST_MESSAGE_TYPE (msg)) {

  case GST_MESSAGE_EOS:
    g_print ("[multifiles_saving::bus_call]. End of stream\n");
    //g_main_loop_quit (loop);
    break;

  case GST_MESSAGE_ERROR: {
    gchar  *debug;
    GError *error;
    // static bool trying_to_reconnect = false;

    // if(!trying_to_reconnect){
    //   trying_to_reconnect = true;
    //   gst_message_parse_error (msg, &error, &debug);
    //   g_free (debug);

    //   g_printerr ("[multifiles_saving::bus_call]. Error: %s\n", error->message);
    //   g_error_free (error);

    //   //g_main_loop_quit (loop);
    //   gst_element_set_state (CustomData_ptr->pipeline, GST_STATE_READY);
    //   //wating for 30 seconds
    //   g_print("[multifiles_saving::bus_call]. let's wait 10 seconds to give time to reconnection...");
    //   usleep(10 * 1000000);
    //   g_print("trying to reset the pipeline in playing state\n");
    //   GstStateChangeReturn ret = gst_element_set_state (CustomData_ptr->pipeline, GST_STATE_PLAYING);
    //   if (ret == GST_STATE_CHANGE_FAILURE) 
    // 	g_printerr ("[multifiles_saving::bus_call]. Unable to set the pipeline to the playing state.\n");
    //   else{
    // 	g_printerr ("[multifiles_saving::bus_call]. change state succeeded.\n");
    // 	trying_to_reconnect = false;
    //   }
      
    // }else{
    //   gst_message_parse_error (msg, &error, &debug);
    //   g_free (debug);
    //   g_error_free (error);
    // }


    gst_message_parse_error (msg, &error, &debug);
    g_free (debug);
    
    g_printerr ("[multifiles_saving::bus_call]. Error: %s\n", error->message);
    g_error_free (error);
    
    //g_main_loop_quit (loop);
    
    //this code snippet work
    //wating for 10 seconds
    g_print("[multifiles_saving::bus_call]. let's wait 10 seconds...\n");
    g_usleep(10 * 1000000);
    //if put ready, after reconnection it starts from the last fragment-id
    //if put null,  after reconnection it starts from the last fragment-id
    gst_element_send_event(CustomData_ptr->pipeline, gst_event_new_eos());
    g_usleep(1 * 1000000);
    g_print("[multifiles_saving::bus_call]. setting state...\n");
    GstStateChangeReturn ret = gst_element_set_state (CustomData_ptr->pipeline, GST_STATE_NULL);
    if (ret == GST_STATE_CHANGE_FAILURE) 
      g_printerr ("[multifiles_saving::bus_call]. Unable to set the pipeline to the NULL state.\n");
    else{
      g_printerr ("[multifiles_saving::bus_call]. changing state in ready succeeded.\n");
      g_printerr("[multifiles_saving::bus_call]. restoring format-location signal\n");
      //GstPad *split_sink= gst_element_get_static_pad (CustomData_ptr->splitmuxsink, "sink");
      //GstPadTemplate *templ = gst_element_class_get_pad_template(GST_ELEMENT_GET_CLASS(CustomData_ptr->splitmuxsink), "video");
      //GstPad *split_pad = gst_element_request_pad(CustomData_ptr->splitmuxsink, templ, NULL, NULL);
      GstPad *split_pad = gst_element_get_static_pad(CustomData_ptr->splitmuxsink,"video");
      GstPad *h264_src = gst_element_get_static_pad (CustomData_ptr->h264_parse, "src");
      gst_pad_unlink (h264_src, split_pad);
      gst_object_unref(CustomData_ptr->splitmuxsink);
      gst_bin_remove(GST_BIN (CustomData_ptr->pipeline), CustomData_ptr->splitmuxsink);      
      CustomData_ptr->splitmuxsink = gst_element_factory_make("splitmuxsink", "splitmuxsink");
      gst_bin_add(GST_BIN(CustomData_ptr->pipeline), CustomData_ptr->splitmuxsink);
      if( !gst_element_link(CustomData_ptr->h264_parse, CustomData_ptr->splitmuxsink) )
	g_printerr ("parse and split nont linked");
      g_signal_connect( CustomData_ptr->splitmuxsink, "format-location", G_CALLBACK (formatted_file_saving_handler), NULL);
    }
    g_print("trying to reset the pipeline in playing state\n");
    ret = gst_element_set_state (CustomData_ptr->pipeline, GST_STATE_PLAYING);
    if (ret == GST_STATE_CHANGE_FAILURE) 
      g_printerr ("[multifiles_saving::bus_call]. Unable to set the pipeline to the playing state.\n");
    else{
      g_printerr ("[multifiles_saving::bus_call]. change state in play succeeded.\n");
      g_main_loop_run (CustomData_ptr->loop);
    }
    break;
  }
  case GST_MESSAGE_ELEMENT: {
    //g_print("Received a gst_message_element\n");
    break;
  }
  default:
    break;
  }
  
  return TRUE;
}

gchar* formatted_file_saving_handler(GstChildProxy *splitmux, guint fragment_id){
  //  boost::ignore_unused(splitmux);
  // //D( g_print("[formatted_file_saving_handler]. fragment_id: %d\n", fragment_id) );
  // D( Time_spent<>() );  
  // time_t rawtime;
  // struct tm * timeinfo;
  // char filename_buffer[30];
  
  // time (&rawtime);
  // timeinfo = localtime (&rawtime);

  // //making time string in the format YYYY-mm-dd_hh:mm:ss
  // strftime (filename_buffer, 30, "%F_%T", timeinfo);

  // //making filename string in the forma YYYY-mm-dd_hh:mm:ss_filename_fragment_id
  // sprintf(filename_buffer, "%s_%s_%d.mp4", filename_buffer, filename, fragment_id);

  // //if int the filenames array at the fragment_id position we already had
  // //a filename we must delete it
  // if( !( (filenames[fragment_id]).empty() ) )
  //   if( remove( (filenames[fragment_id] ).c_str() ) ) 
  //     perror( "Error deleting file" );

  // filenames[fragment_id] = filename_buffer;

  // std::time_t now;
  // struct tm *timeinfo;
  
  // std::time (&now);
  // timeinfo = std::localtime (&now);

  // //if int the filenames array at the fragment_id position we already had
  // //a filename we must delete it
  // if( *filenames[fragment_id] )
  //   if( remove( (filenames[fragment_id] ) ) ) 
  //     perror( "Error deleting file" );

  // //making time string in the format YYYY-mm-dd_hh:mm:ss
  // strftime (filenames[fragment_id], 30, "%F_%T", timeinfo);

  // //making filename string in the forma YYYY-mm-dd_hh:mm:ss_filename_fragment_id
  // sprintf( filenames[fragment_id], "%s_%s_%d.mp4", filenames[fragment_id], filename, fragment_id );

  // D( std::cout << filenames[fragment_id] << std::endl );
  //this function allocates the memory to hold the result.
  //The returned string should be freed with g_free() when no longer needed
  g_print ("video%02d.mp4\n", fragment_id);
  //This should be done by the function caller, I guess
  return g_strdup_printf("video%02d.mp4", fragment_id );
}

参考:splitmuxsink
参考:multifiles_saving.cc

  • 1
    点赞
  • 3
    收藏
    觉得还不错? 一键收藏
  • 1
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值