流多路复用器:Stream Mux

1.理论

流多路复用块,以指定格式将多个流多路复用为一个。

将 N 个流复用在一起,生成一个输出流,其中包含来自第一个流的 N0 个项目、来自第二个流的 N1 个项目,依此类推,并重复:

 [N0, N1, N2, ..., Nm, N0, N1, ...]

2.DEMO

在这里插入图片描述

2.2.参数

在这里插入图片描述
Type:数据类型
Lengths:每个输入流的长度N,例:1,1表示2个输入数据流,每个数据流长度为1
Num Inputs:输入数据流个数,要与Lengths的项数保持一致
Vec Length:默认为1

2.3.GRC

在这里插入图片描述
代码下载:Stream-Mux.grc

2.4.运行结果

  1. 时域图:QT GUI Time Sink
    在这里插入图片描述

3.相关模块

1: 随机信源:Random Source
2: 时域图:QT GUI Time Sink
3: 类型转换:Char To Floathttps

参考链接

1: Stream_Mux

  • 3
    点赞
  • 10
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
### 回答1: 以下是使用DeepStream SDK C版本推的示例代码。在这个示例中,我们将从网络摄像头读取视频,进行推并在控制台输出日志。请注意,这个示例假设你已经配置好了DeepStream SDK,包括安装必要的库和设置环境变量。 ```c #include <gst/gst.h> #include <glib.h> static GMainLoop *loop; static gboolean bus_call(GstBus *bus, GstMessage *msg, gpointer data) { GMainLoop *loop = (GMainLoop *) data; switch (GST_MESSAGE_TYPE(msg)) { case GST_MESSAGE_EOS: g_print("End of stream\n"); g_main_loop_quit(loop); break; case GST_MESSAGE_ERROR: { gchar *debug; GError *error; gst_message_parse_error(msg, &error, &debug); g_free(debug); g_printerr("Error: %s\n", error->message); g_error_free(error); g_main_loop_quit(loop); break; } default: break; } return TRUE; } int main(int argc, char *argv[]) { GstElement *pipeline, *source, *filter, *sink; GstBus *bus; GstCaps *caps; guint bus_watch_id; /* Initialize GStreamer */ gst_init(&argc, &argv); loop = g_main_loop_new(NULL, FALSE); /* Create elements */ source = gst_element_factory_make("v4l2src", "source"); filter = gst_element_factory_make("capsfilter", "filter"); sink = gst_element_factory_make("rtmpsink", "sink"); /* Create empty pipeline */ pipeline = gst_pipeline_new("test-pipeline"); if (!pipeline || !source || !filter || !sink) { g_printerr("Not all elements could be created.\n"); return -1; } /* Build the pipeline */ gst_bin_add_many(GST_BIN(pipeline), source, filter, sink, NULL); if (gst_element_link_many(source, filter, sink, NULL) != TRUE) { g_printerr("Elements could not be linked.\n"); gst_object_unref(pipeline); return -1; } /* Set source properties */ g_object_set(G_OBJECT(source), "device", "/dev/video0", NULL); /* Set filter properties */ caps = gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING, "YUY2", "width", G_TYPE_INT, 640, "height", G_TYPE_INT, 480, "framerate", GST_TYPE_FRACTION, 30, 1, NULL); g_object_set(G_OBJECT(filter), "caps", caps, NULL); gst_caps_unref(caps); /* Set sink properties */ g_object_set(G_OBJECT(sink), "location", "rtmp://localhost/live/test", NULL); /* Start playing */ gst_element_set_state(pipeline, GST_STATE_PLAYING); /* Wait until error or EOS */ bus = gst_element_get_bus(pipeline); bus_watch_id = gst_bus_add_watch(bus, bus_call, loop); g_main_loop_run(loop); /* Clean up */ gst_bus_remove_watch(bus, bus_watch_id); gst_object_unref(bus); gst_element_set_state(pipeline, GST_STATE_NULL); gst_object_unref(pipeline); g_main_loop_unref(loop); return 0; } ``` ### 回答2: DeepStream是NVIDIA开发的用于实时视频分析的框架,提供了用于推的插件。下面是一个简单的DeepStream C版本推代码示例: ```c #include <stdio.h> #include <gst/gst.h> #include <gst/gstinfo.h> #define WIDTH 1280 #define HEIGHT 720 #define FRAMERATE_N 30 #define FRAMERATE_D 1 static gboolean bus_call(GstBus *bus, GstMessage *msg, gpointer data) { GMainLoop *loop = (GMainLoop *)data; switch (GST_MESSAGE_TYPE(msg)) { case GST_MESSAGE_EOS: g_print("End of stream\n"); g_main_loop_quit(loop); break; case GST_MESSAGE_ERROR: { gchar *debug; GError *error; gst_message_parse_error(msg, &error, &debug); g_free(debug); g_printerr("Error: %s\n", error->message); g_error_free(error); g_main_loop_quit(loop); break; } default: break; } return TRUE; } int main(int argc, char *argv[]) { GMainLoop *loop; GstElement *pipeline, *src, *streammux, *pgie, *nvvidconv, *nvosd, *enc, *rtppay, *sink; GstCaps *caps; GstBus *bus; /* 初始化GStreamer */ gst_init(&argc, &argv); loop = g_main_loop_new(NULL, FALSE); /* 创建pipeline */ pipeline = gst_pipeline_new("pipeline"); /* 创建source element,用于读取视频源 */ src = gst_element_factory_make("v4l2src", "source"); g_object_set(G_OBJECT(src), "device", "/dev/video0", NULL); /* 创建streammux element,用于合并多个成一个 */ streammux = gst_element_factory_make("nvstreammux", "stream-muxer"); g_object_set(G_OBJECT(streammux), "width", WIDTH, "height", HEIGHT, NULL); g_object_set(G_OBJECT(streammux), "live-source", 1, NULL); /* 创建pgie element,用于深度学习物体检测 */ pgie = gst_element_factory_make("nvinfer", "primary-inference"); g_object_set(G_OBJECT(pgie), "config-file-path", "pgie_config.txt", NULL); /* 创建nvvidconv element,用于格式转换 */ nvvidconv = gst_element_factory_make("nvvideoconvert", "nvvideo-converter"); /* 创建nvosd element,用于添加物体检测结果的标记 */ nvosd = gst_element_factory_make("nvdsosd", "nv-onscreendisplay"); /* 创建encoder element,用于编码 */ enc = gst_element_factory_make("nvv4l2h264enc", "encoder"); /* 创建rtppay element,用于封装为RTP包 */ rtppay = gst_element_factory_make("rtph264pay", "rtppay"); /* 创建sink element,用于推 */ sink = gst_element_factory_make("udpsink", "sink"); g_object_set(G_OBJECT(sink), "host", "127.0.0.1", "port", 5000, NULL); if (!pipeline || !src || !streammux || !pgie || !nvvidconv || !nvosd || !enc || !rtppay || !sink) { g_error("Failed to create elements"); return -1; } /* 将所有元素添加到pipeline中 */ gst_bin_add_many(GST_BIN(pipeline), src, streammux, pgie, nvvidconv, nvosd, enc, rtppay, sink, NULL); /* 链接元素 */ gst_element_link(src, streammux); gst_element_link(streammux, pgie); gst_element_link(pgie, nvvidconv); gst_element_link(nvvidconv, nvosd); gst_element_link(nvosd, enc); gst_element_link(enc, rtppay); gst_element_link(rtppay, sink); /* 设置输入视频格式 */ caps = gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING, "I420", "width", G_TYPE_INT, WIDTH, "height", G_TYPE_INT, HEIGHT, "framerate", GST_TYPE_FRACTION, FRAMERATE_N, FRAMERATE_D, NULL); g_object_set(G_OBJECT(src), "caps", caps, NULL); gst_caps_unref(caps); /* 设置bus,用于处理消息 */ bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline)); gst_bus_add_watch(bus, bus_call, loop); /* 启动pipeline */ gst_element_set_state(pipeline, GST_STATE_PLAYING); /* 进入主循环 */ g_main_loop_run(loop); /* 停止pipeline */ gst_element_set_state(pipeline, GST_STATE_NULL); /* 释放资源 */ gst_object_unref(GST_OBJECT(pipeline)); g_main_loop_unref(loop); return 0; } ``` 这个代码使用GStreamer框架创建了一个DeepStream的pipeline,读取了摄像头的视频,并对视频进行了物体检测和编码后推。请注意,这只是一个简单的示例代码,实际情况下可能需要根据具体需求进行调整和扩展。
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值