gstreamer通过filesink实现录像

基于gstreamer与GTK的函数封装,可直接与GtkWidget事件绑定调用。实现了触发事件开始视频录像,再次触发结束录制并保存。注意gstreamer结点tee的绑定与设置。

typedef struct _MediaData
{
    GstElement *pipeline;
    GstElement *tee_video_stream ;
    GstElement *videomux, *record_queue,*file_sink;
    GstPad *queue_record_pad, *tee_record_pad;
    GstPad *queue_capture_pad, *tee_capture_pad;
} MediaData;

typedef struct _AppData
{
    GstElement *pipeline_app;
    GstElement *video_sink;
    MediaData  mediaData;
} AppData;

static AppData appdata;
static bool video_record_enable  = false;

static int video_recording_cb (GtkButton * button, CustomData * data)
{
    static int record_time = 1;
    MediaData*  recordingData = &appdata.mediaData;
    GstPadTemplate *templ;
    templ = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (recordingData->tee_video_stream), "src_%u");
    recordingData->tee_record_pad = gst_element_request_pad (recordingData->tee_video_stream, templ, NULL, NULL);
    if (video_record_enable == false)
    {
        printf("\nVideo Press!\n");
        button_operations((gpointer)DISABLE_ALLBUTTONS);
        recordingData->record_queue = gst_element_factory_make ("queue", "record_queue");
        recordingData->videomux     = gst_element_factory_make ("matroskamux", "video_mux");
        recordingData->file_sink    = gst_element_factory_make ("filesink", "file_sink");
        g_object_set(recordingData->videomux, "offset-to-zero", TRUE, NULL);
        g_object_set (GST_OBJECT(recordingData->file_sink),"buffer-size", 67108864 ,NULL ); 
        g_object_set (GST_OBJECT(recordingData->file_sink),"async", false ,NULL ); 
        char  filename[1000];
        sprintf(filename, "/home/123/test%d.mp4", record_time);
        g_object_set (G_OBJECT(recordingData->file_sink),"location",filename,NULL);
        record_time++;
        gst_bin_add_many (GST_BIN (recordingData->pipeline),(recordingData->record_queue), (recordingData->videomux), (recordingData->file_sink),NULL);
        if (gst_element_link_many (recordingData->record_queue, recordingData->videomux, recordingData->file_sink, NULL) != TRUE)
        {
            g_printerr ("Elements could not be linked in record branch\n");
            return 1;
        }
        gst_element_sync_state_with_parent (recordingData->record_queue);
        gst_element_sync_state_with_parent (recordingData->videomux);
        gst_element_sync_state_with_parent (recordingData->file_sink);
        recordingData->queue_record_pad = gst_element_get_static_pad (recordingData->record_queue, "sink");

        if (gst_pad_link (recordingData->tee_record_pad, recordingData->queue_record_pad) != GST_PAD_LINK_OK)
        {
            g_printerr ("Tee capture could not be linked.\n");
        }

        video_record_enable = true;
    }
    else
    {
        printf("\nVideo Release!\n");

        gst_element_set_state (recordingData->file_sink, GST_STATE_NULL);
        gst_element_set_state (recordingData->videomux, GST_STATE_NULL);
        gst_element_set_state (recordingData->record_queue, GST_STATE_NULL);
        recordingData->queue_record_pad = gst_element_get_static_pad (recordingData->record_queue, "sink");
        gst_pad_unlink(recordingData->tee_record_pad, recordingData->queue_record_pad);
        gst_bin_remove(GST_BIN (recordingData->pipeline), recordingData->record_queue);
        gst_bin_remove(GST_BIN (recordingData->pipeline), recordingData->videomux);
        gst_bin_remove(GST_BIN (recordingData->pipeline), recordingData->file_sink);
        gst_element_release_request_pad (recordingData->tee_video_stream, recordingData->tee_record_pad);
        gst_object_unref (recordingData->queue_record_pad);
        gst_object_unref (recordingData->tee_record_pad);
        video_record_enable = false;
    }
    return 1;
}
### 使用 GStreamer 的 `filesink` 元素保存数据 为了将媒体流保存到文件中,可以使用 GStreamer 中的 `filesink` 元素。此元素允许指定目标文件的位置并处理写入操作。 下面是一个简单的命令行例子,展示如何通过管道将视频流捕获并存储至本地文件: ```bash gst-launch-1.0 videotestsrc ! x264enc ! mp4mux ! filesink location=test_video.mp4 ``` 上述命令创建了一个测试视频源 (`videotestsrc`) 并将其编码为 H.264 格式(`x264enc`) ,之后利用 MP4 复用器 (`mp4mux`) 来准备最终输出格式,最后通过 `filesink` 将结果保存到了名为 `test_video.mp4` 文件里[^1]。 对于更复杂的场景,比如当涉及到多个输入源或特定解码需求时,则可能需要调整参数来适应具体的应用环境。例如,在NVIDIA DeepStream SDK配置中提到的情况,如果要记录来自网络摄像机或其他实时源的数据,应该考虑设置适当的缓冲策略以及确保 GPU 加速组件正确初始化[^3]。 另外值得注意的是,在构建基于 C 或其他编程语言的应用程序时,可以通过调用 pkg-config 工具自动获取必要的编译标志和链接选项,从而简化开发过程。这有助于管理不同版本间的兼容性和依赖关系变化带来的影响[^2]。 #### Python 示例代码片段 这里提供一段Python脚本作为示范,它展示了怎样动态地建立一个包含 `filesink` 组件在内的GStreamer管线: ```python import gi gi.require_version('Gst', '1.0') from gi.repository import Gst, GObject def main(): # Initialize GStreamer Gst.init(None) pipeline = Gst.parse_launch( 'uridecodebin uri=http://example.com/video.mp4 name=decode ' '! queue ! x264enc ! mp4mux ! filesink location=output_file.mp4' ) decode = pipeline.get_by_name('decode') bus = pipeline.get_bus() loop = GObject.MainLoop() def on_message(bus, message): t = message.type if t == Gst.MessageType.EOS: loop.quit() elif t == Gst.MessageType.ERROR: err, debug = message.parse_error() print(f"Error: {err}, Debug info: {debug}") loop.quit() bus.connect("message", on_message) # Start playing pipeline.set_state(Gst.State.PLAYING) try: loop.run() except KeyboardInterrupt: pass # Clean up pipeline.set_state(Gst.State.NULL) if __name__ == '__main__': main() ```
评论 2
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值