【Gstreamer 系列 5.2】ubuntu下的Gstreamer工程代码 2 - mp4与yuv视频的相互转换

1.mp4->yuv 视频

/**
  * @brief          xx.mp4 to xx.yuv 
  * @param *source  mp4 file address, example "./source.mp4"
  * @param *sink    yuv file address, example "./sink.yuv"
  * @return         bool
  */
bool videoDecode(char *source, char *sink)
{
	GstBus *bus;
	GMainLoop *loop;

    GstElement *pipeline, *et_source, *et_sink, *et_qtdemux, *et_h264parse, *et_omxh264dec;

    /* create loop */
    loop = g_main_loop_new(NULL, FALSE);

    /* create pipe */
	pipeline = gst_pipeline_new("video-decode");

    /* create source-link element */
    et_source = gst_element_factory_make("filesrc", "file-source");
    et_sink = gst_element_factory_make("filesink", "file-sink");

    /* create decode element */
	et_qtdemux = gst_element_factory_make("qtdemux", "work-qtdemux");
	et_h264parse = gst_element_factory_make("h264parse", "work-h264parse");
	et_omxh264dec = gst_element_factory_make("omxh264dec", "work-omxh264dec");

    if(!et_source || !et_sink || 
       !et_qtdemux || !et_h264parse || !et_omxh264dec ) 
	{
		printf("One element could not be created\n");
		return false;
	}

    /* set source-link file */
    g_object_set(G_OBJECT(et_source), "location", source, NULL);
    g_object_set(G_OBJECT(et_sink), "location", sink, NULL);

    /* get bus */
    bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
    gst_bus_add_watch(bus, bus_call, loop);
	gst_object_unref(bus);

    /* put all elements in a bin */
	gst_bin_add_many(GST_BIN(pipeline), et_source, et_qtdemux, et_h264parse, et_omxh264dec, et_sink, NULL);

	/* link together - note that we cannot link the parser and
	 * decoder yet, becuse the parser uses dynamic pads. For that,
	 * we set a pad-added signal handler. */
    gst_element_link(et_source, et_qtdemux);
	gst_element_link_many(et_h264parse, et_omxh264dec, et_sink, NULL);
    g_signal_connect(et_qtdemux, "pad-added", G_CALLBACK(pad_link_next_pad), et_h264parse);

    printf("start videoDecode\n");

    /* Now set to playing and iterate. */
	gst_element_set_state(pipeline, GST_STATE_PLAYING);
	g_main_loop_run(loop);

	/* clean up nicely */
	gst_element_set_state(pipeline, GST_STATE_NULL);
	gst_object_unref(GST_OBJECT(pipeline));

	return true;
}

2.yuv->mp4 视频

/**
  * @brief xx.yuv to xx.mp4 
  * @param *source: yuv file address, example "./source.yuv"
  * @param *sink:   mp4 file address, example "./sink.mp4"
  * @return bool
  */
bool videoEncode(char *source, char *sink, int outPutWidth, int outPutHeight, int outPutFramerate, int outPutBitrate)
{
    GstBus *bus;
	GMainLoop *loop;

    GstElement *pipeline, *et_source, *et_sink, *et_videoparse, *et_omxh264enc, *et_queue, *et_h264parse, *et_mp4mux;

    /* create loop */
    loop = g_main_loop_new(NULL, FALSE);

    /* create pipe */
	pipeline = gst_pipeline_new("video-encode");

    /* create source-link element */
    et_source = gst_element_factory_make("filesrc", "file-source");
    et_sink = gst_element_factory_make("filesink", "file-sink");

    /* create videoparse element */
	et_videoparse = gst_element_factory_make("videoparse", "work-videoparse");

    /* create encode element */
	et_omxh264enc = gst_element_factory_make("omxh264enc", "work-omxh264enc");
	et_queue = gst_element_factory_make("queue", "work-queue");
	et_h264parse = gst_element_factory_make("h264parse", "work-h264parse");
	et_mp4mux = gst_element_factory_make("mp4mux", "work-mp4mux");

    if(!et_source || !et_sink || 
       !et_videoparse || 
       !et_omxh264enc || !et_queue || !et_h264parse || !et_mp4mux) 
	{
		printf("error One element could not be created\n");
		return false;
	}

    /* set source-link file */
    g_object_set(G_OBJECT(et_source), "location", source, NULL);
    g_object_set(G_OBJECT(et_sink), "location", sink, NULL);

	/* set videoparse */                
	g_object_set(et_videoparse, "format", 23, "width", outPutWidth, "height", outPutHeight, "framerate", outPutFramerate, 1, NULL);// "nv12" = 23,
    /* set videoparse todo*/
    g_object_set(et_omxh264enc, "target-bitrate", outPutBitrate, NULL);

    /* get bus */
    bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
    gst_bus_add_watch(bus, bus_call, loop);
	gst_object_unref(bus);
	 
    /* put all elements in a bin */
	gst_bin_add_many(GST_BIN(pipeline), et_source, et_videoparse, et_omxh264enc, et_queue, et_h264parse, et_mp4mux, et_sink, NULL);

	/* link element */
    gst_element_link_many(et_source, et_videoparse, et_omxh264enc, et_queue, et_h264parse, et_mp4mux, et_sink, NULL);

	printf("start to videoEncode\n");

    /* Now set to playing and iterate. */
	gst_element_set_state(pipeline, GST_STATE_PLAYING);
	g_main_loop_run(loop);

	/* clean up nicely */
	gst_element_set_state(pipeline, GST_STATE_NULL);
	gst_object_unref(GST_OBJECT(pipeline));

	return true;
}

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
以下是Linux平台C语言利用GStreamer实现mp4格式视频播放的代码,包括管道配置,连接,以及动态连接element的pad。其中难点在于demux控件通过回调方式进行音视频的分流,连接解码器、sink输出。 ```c #include <gst/gst.h> typedef struct _CustomData { GstElement *pipeline; GstElement *source; GstElement *demuxer; GstElement *audio_decoder; GstElement *video_decoder; GstElement *audio_sink; GstElement *video_sink; GstElement *convert; GstElement *resample; GstElement *audio_queue; GstElement *video_queue; GstElement *video_convert; GstElement *video_scale; GstElement *video_filter; GstElement *audio_filter; GstElement *audio_convert; GstElement *audio_resample; GstElement *audio_sink2; GstElement *video_sink2; GstBus *bus; guint bus_watch_id; gboolean is_live; } CustomData; static void pad_added_handler (GstElement *src, GstPad *new_pad, CustomData *data); int main(int argc, char *argv[]) { CustomData data; GstStateChangeReturn ret; GstPad *audio_sink_pad = NULL; GstPad *video_sink_pad = NULL; GstPad *audio_src_pad = NULL; GstPad *video_src_pad = NULL; GstCaps *audio_caps = NULL; GstCaps *video_caps = NULL; GstCaps *filter_caps = NULL; GstCaps *convert_caps = NULL; GstCaps *scale_caps = NULL; GstCaps *resample_caps = NULL; GstCaps *audio_convert_caps = NULL; GstCaps *audio_resample_caps = NULL; GstCaps *audio_filter_caps = NULL; GstCaps *video_filter_caps = NULL; GstCaps *video_convert_caps = NULL; GstCaps *video_scale_caps = NULL; /* Initialize GStreamer */ gst_init (&amp;argc, &amp;argv); /* Initialize our data structure */ memset (&amp;data, 0, sizeof (data)); data.is_live = FALSE; /* Create the elements */ data.source = gst_element_factory_make ("filesrc", "source"); data.demuxer = gst_element_factory_make ("qtdemux", "demuxer"); data.audio_decoder = gst_element_factory_make ("decodebin", "audio_decoder"); data.video_decoder = gst_element_factory_make ("decodebin", "video_decoder"); data.convert = gst_element_factory_make ("videoconvert", "convert"); data.resample = gst_element_factory_make ("audioresample", "resample"); data.audio_queue = gst_element_factory_make ("queue", "audio_queue"); data.video_queue = gst_element_factory_make ("queue", "video_queue"); data.audio_sink = gst_element_factory_make ("autoaudiosink", "audio_sink"); data.video_sink = gst_element_factory_make ("autovideosink", "video_sink"); data.video_convert = gst_element_factory_make ("videoconvert", "video_convert"); data.video_scale = gst_element_factory_make ("videoscale", "video_scale"); data.video_filter = gst_element_factory_make ("capsfilter", "video_filter"); data.audio_filter = gst_element_factory_make ("capsfilter", "audio_filter"); data.audio_convert = gst_element_factory_make ("audioconvert", "audio_convert"); data.audio_resample = gst_element_factory_make ("audioresample", "audio_resample"); data.audio_sink2 = gst_element_factory_make ("alsasink", "audio_sink2"); data.video_sink2 = gst_element_factory_make ("xvimagesink", "video_sink2"); /* Create the empty pipeline */ data.pipeline = gst_pipeline_new ("test-pipeline"); if (!data.pipeline || !data.source || !data.demuxer || !data.audio_decoder || !data.video_decoder || !data.convert || !data.resample || !data.audio_queue || !data.video_queue || !data.audio_sink || !data.video_sink || !data.video_convert || !data.video_scale || !data.video_filter || !data.audio_filter || !data.audio_convert || !data.audio_resample || !data.audio_sink2 || !data.video_sink2) { g_printerr ("Not all elements could be created.\n"); return -1; } /* Build the pipeline */ gst_bin_add_many (GST_BIN (data.pipeline), data.source, data.demuxer, data.audio_decoder, data.video_decoder, data.convert, data.resample, data.audio_queue, data.video_queue, data.audio_sink, data.video_sink, data.video_convert, data.video_scale, data.video_filter, data.audio_filter, data.audio_convert, data.audio_resample, data.audio_sink2, data.video_sink2, NULL); if (gst_element_link (data.source, data.demuxer) != TRUE || gst_element_link_many (data.audio_queue, data.audio_convert, data.audio_resample, data.audio_filter, data.audio_sink2, NULL) != TRUE || gst_element_link_many (data.video_queue, data.video_convert, data.video_scale, data.video_filter, data.video_sink2, NULL) != TRUE) { g_printerr ("Elements could not be linked.\n"); gst_object_unref (data.pipeline); return -1; } /* Set the URI to play */ g_object_set (data.source, "location", argv[1], NULL); /* Connect to the pad-added signal */ g_signal_connect (data.demuxer, "pad-added", G_CALLBACK (pad_added_handler), &amp;data); /* Start playing */ ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING); if (ret == GST_STATE_CHANGE_FAILURE) { g_printerr ("Unable to set the pipeline to the playing state.\n"); gst_object_unref (data.pipeline); return -1; } /* Wait until error or EOS */ data.bus = gst_element_get_bus (data.pipeline); data.bus_watch_id = gst_bus_add_watch (data.bus, bus_call, loop); gst_object_unref (data.bus); g_main_loop_run (loop); /* Free resources */ gst_element_set_state (data.pipeline, GST_STATE_NULL); gst_object_unref (data.pipeline); return 0; } static void pad_added_handler (GstElement *src, GstPad *new_pad, CustomData *data) { GstPad *sink_pad = NULL; GstPadLinkReturn ret; GstCaps *new_pad_caps = NULL; GstStructure *new_pad_struct = NULL; const gchar *new_pad_type = NULL; g_print ("Received new pad '%s' from '%s':\n", GST_PAD_NAME (new_pad), GST_ELEMENT_NAME (src)); /* Check the new pad's type */ new_pad_caps = gst_pad_get_current_caps (new_pad); new_pad_struct = gst_caps_get_structure (new_pad_caps, 0); new_pad_type = gst_structure_get_name (new_pad_struct); if (g_str_has_prefix (new_pad_type, "audio/x-raw")) { sink_pad = gst_element_get_static_pad (data->audio_queue, "sink"); g_print ("Linking audio demuxer to audio queue.\n"); } else if (g_str_has_prefix (new_pad_type, "video/x-raw")) { sink_pad = gst_element_get_static_pad (data->video_queue, "sink"); g_print ("Linking video demuxer to video queue.\n"); } else { g_print ("It has type '%s' which is not raw audio. Ignoring.\n", new_pad_type); goto exit; } /* If we have an unlinked sink pad, link it now */ if (gst_pad_is_linked (sink_pad)) { g_print ("We are already linked. Ignoring.\n"); goto exit; } /* Attempt the link */ ret = gst_pad_link (new_pad, sink_pad); if (GST_PAD_LINK_FAILED (ret)) { g_print ("Type is '%s' but link failed.\n", new_pad_type); } else { g_print ("Link succeeded (type '%s').\n", new_pad_type); } exit: /* Unreference the new pad's caps, if we got them */ if (new_pad_caps != NULL) gst_caps_unref (new_pad_caps); /* Unreference the sink pad */ if (sink_pad != NULL) gst_object_unref (sink_pad); } ```

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值