Gstreamer实现摄像头的远程采集,udp传输,本地显示和保存为AVI文件 接收保存显示端

经过两个星期的努力终于完成 Gstreamer实现摄像头的远程采集,udp传输,本地显示和保存为AVI文件,的C语言程序,现在分享给大家,欢迎大家评论指正

由于本程序存在录制时间短但保存成文件的播放长度很长的问题,希望知道的高手们指点一下解决的方法,在此先谢谢了!!!!


recv-display-avifile:

gst-launch  udpsrc caps=" application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)RAW, sampling=(string)YCbCr-4:2:0, depth=(string)8, width=(string)320, height=(string)240, colorimetry=(string)SMPTE240M, ssrc=(guint)4294234526, payload=(int)96, clock-base=(guint)520513122, seqnum-base=(guint)28177" port=9996 ! queue ! rtpvrawdepay  ! queue ! tee name="splitter" ! queue ! ffmpegcolorspace ! autovideosink splitter. ! queue ! ffmpegcolorspace ! jpegenc ! avimux ! filesink location=osug-udp-2.avi


C code:

  1. #include <string.h>  
  2. #include <math.h>  
  3.   
  4. #include <gst/gst.h>  
  5.   
  6. /* the caps of the sender RTP stream. This is usually negotiated out of band with  
  7.  * SDP or RTSP. */  
  8. #define VIDEO_CAPS "application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)RAW, sampling=(string)YCbCr-4:2:0, depth=(string)8, width=(string)320, height=(string)240, colorimetry=(string)SMPTE240M"  
  9. //#define VIDEO_CAPS "application/x-rtp,media=video,clock-rate=9000,encoding-name=H264"  
  10.   
  11.   
  12. #define AVINAME "camera.avi"   
  13. #define PORT 9996  
  14. #define VIDEO_SINK  "autovideosink"  
  15. /* the destination machine to send RTCP to. This is the address of the sender and  
  16.  * is used to send back the RTCP reports of this receiver. If the data is sent  
  17.  * from another machine, change this address. */  
  18. #define DEST_HOST "127.0.0.1"  
  19.   
  20. /* print the stats of a source */  
  21. static void print_source_stats (GObject * source) {  
  22.   GstStructure *stats;  
  23.   gchar *str;  
  24.   
  25.   g_return_if_fail (source != NULL);  
  26.   
  27.   /* get the source stats */  
  28.   g_object_get (source, "stats", &stats, NULL);  
  29.   
  30.   /* simply dump the stats structure */  
  31.   str = gst_structure_to_string (stats);  
  32.   g_print ("source stats: %s\n", str);  
  33.   
  34.   gst_structure_free (stats);  
  35.   g_free (str);  
  36. }  
  37.   
  38. /* will be called when gstrtpbin signals on-ssrc-active. It means that an RTCP  
  39.  * packet was received from another source. */  
  40. static void on_ssrc_active_cb (GstElement * rtpbin, guint sessid, guint ssrc, GstElement * depay) {  
  41.   
  42.     GObject *session, *isrc, *osrc;  
  43.     g_print ("got RTCP from session %u, SSRC %u\n", sessid, ssrc);  
  44.   
  45.   /* get the right session */  
  46.   g_signal_emit_by_name (rtpbin, "get-internal-session", sessid, &session);  
  47.   
  48.   /* get the internal source (the SSRC allocated to us, the receiver */  
  49.   g_object_get (session, "internal-source", &isrc, NULL);  
  50.   print_source_stats (isrc);  
  51.   
  52.   /* get the remote source that sent us RTCP */  
  53.   g_signal_emit_by_name (session, "get-source-by-ssrc", ssrc, &osrc);  
  54.   print_source_stats (osrc);  
  55. }  
  56.   
  57. /* will be called when rtpbin has validated a payload that we can depayload */  
  58. static void  
  59. pad_added_cb (GstElement * rtpbin, GstPad * new_pad, GstElement * depay)  
  60. {  
  61.   GstPad *sinkpad;  
  62.   GstPadLinkReturn lres;  
  63.   
  64.   g_print ("new payload on pad: %s\n", GST_PAD_NAME (new_pad));  
  65.   
  66.   sinkpad = gst_element_get_static_pad (depay, "sink");  
  67.   g_assert (sinkpad);  
  68.   
  69.   lres = gst_pad_link (new_pad, sinkpad);  
  70.   g_assert (lres == GST_PAD_LINK_OK);  
  71.   gst_object_unref (sinkpad);  
  72.   
  73. }  
  74.   
  75.   
  76. int main (int argc, char *argv[])  
  77. {  
  78.   GstElement *rtpbin, *rtpsrc, *rtcpsrc, *rtcpsink;  
  79.   GstElement *videodepay,  
  80.              *videodec,  
  81.              *videoqueue,  
  82.              //*videores,  
  83.              *videoconv,  
  84.              *videosink,  
  85.   
  86.          *tee,  
  87.          *aviqueue,  
  88.              *aviconv,  
  89.          *avidenc,  
  90.          *avifmux,  
  91.          *avifilesink;  
  92.   
  93.   
  94.   GstElement *pipeline;  
  95.   GMainLoop *loop;  
  96.   GstCaps *caps;  
  97.   gboolean res1,res2;  
  98.   GstPadLinkReturn lres;  
  99.   GstPad *srcpad, *sinkpad;  
  100.   
  101.   /* always init first */  
  102.   gst_init (&argc, &argv);  
  103.   
  104.   /* the pipeline to hold everything */  
  105.   pipeline = gst_pipeline_new (NULL);  
  106.   g_assert (pipeline);  
  107.   
  108.   /* the udp src and source we will use for RTP and RTCP */  
  109.   rtpsrc = gst_element_factory_make ("udpsrc", "rtpsrc");  
  110.   g_assert (rtpsrc);  
  111.   g_object_set (rtpsrc, "port", PORT, NULL);  
  112.   /* we need to set caps on the udpsrc for the RTP data */  
  113.   caps = gst_caps_from_string (VIDEO_CAPS);  
  114.   g_object_set (rtpsrc, "caps", caps, NULL);  
  115.   gst_caps_unref (caps);  
  116.   
  117.   rtcpsrc = gst_element_factory_make ("udpsrc", "rtcpsrc");  
  118.   g_assert (rtcpsrc);  
  119.   g_object_set (rtcpsrc, "port", 9997, NULL);  
  120.   
  121.   rtcpsink = gst_element_factory_make ("udpsink", "rtcpsink");  
  122.   g_assert (rtcpsink);  
  123.   g_object_set (rtcpsink, "port", 9999, "host", DEST_HOST, NULL);  
  124.   /* no need for synchronisation or preroll on the RTCP sink */  
  125.   g_object_set (rtcpsink, "async", FALSE, "sync", FALSE, NULL);  
  126.   
  127.   gst_bin_add_many (GST_BIN (pipeline), rtpsrc, rtcpsrc, rtcpsink, NULL);  
  128.   
  129.   /* the depayloading and decoding */  
  130.   videodepay = gst_element_factory_make ("rtpvrawdepay", "videodepay");  
  131.   g_assert (videodepay);  
  132.   videoqueue=gst_element_factory_make ("queue","videoqueue");  
  133.   g_assert(videoqueue);  
  134.   
  135.   tee = gst_element_factory_make ("tee","tee");  
  136.   g_assert(tee);  
  137.   
  138.   aviqueue=gst_element_factory_make ("queue","aviqueue");  
  139.   g_assert(aviqueue);  
  140.   
  141. //  videodec = gst_element_factory_make ("ffmpegcolorspace", "videodec");  
  142. //  g_assert (videodec);  
  143.   /* the audio playback and format conversion */  
  144.   videoconv = gst_element_factory_make ("ffmpegcolorspace", "videoconv");  
  145.   g_assert (videoconv);  
  146. /*  
  147.   audiores = gst_element_factory_make ("audioresample", "audiores");  
  148.   g_assert (audiores);  
  149. */  
  150.   videosink = gst_element_factory_make (VIDEO_SINK, "videosink");  
  151.   g_assert (videosink);  
  152.   
  153.   aviconv = gst_element_factory_make ("ffmpegcolorspace","avicinv");  
  154.   g_assert (aviconv);  
  155.   avidenc = gst_element_factory_make ("jpegenc","avidenc");  
  156.   g_assert (aviconv);  
  157.   avifmux = gst_element_factory_make ("avimux","avifmux");  
  158.   g_assert (avifmux);  
  159.   avifilesink = gst_element_factory_make ("filesink","avifilesink");  
  160.   g_assert (avifilesink);  
  161.     
  162.   
  163.   g_object_set(avifilesink,"location",AVINAME,NULL);  
  164.   
  165.   /* add depayloading and playback to the pipeline and link */  
  166. //  gst_bin_add_many (GST_BIN (pipeline), videodepay, videoconv, /*videores,*/videoqueue, videosink, aviconv,avidenc,avifmux,avifilename,NULL);  
  167.   gst_bin_add_many (GST_BIN (pipeline), videodepay,tee,videoqueue,videoconv,videosink, aviqueue,aviconv,avidenc,avifmux,avifilesink,NULL);  
  168.   
  169.   res1 = gst_element_link_many (videodepay, tee,videoqueue,videoconv,videosink, NULL);  
  170.   g_assert (res1 == TRUE);  
  171.   res2 = gst_element_link_many (tee,aviqueue,aviconv,avidenc,avifmux,avifilesink,NULL);  
  172.   g_assert (res2 == TRUE);  
  173.   
  174.   /* the rtpbin element */  
  175.   rtpbin = gst_element_factory_make ("gstrtpbin", "rtpbin");  
  176.   g_assert (rtpbin);  
  177.   
  178.   g_object_set (G_OBJECT (rtpbin),"latency",200,NULL);  
  179.   
  180.   gst_bin_add (GST_BIN (pipeline), rtpbin);  
  181.   
  182.   /* now link all to the rtpbin, start by getting an RTP sinkpad for session 0 */  
  183.   srcpad = gst_element_get_static_pad (rtpsrc, "src");  
  184.   sinkpad = gst_element_get_request_pad (rtpbin, "recv_rtp_sink_0");  
  185.   lres = gst_pad_link (srcpad, sinkpad);  
  186.   g_assert (lres == GST_PAD_LINK_OK);  
  187.   gst_object_unref (srcpad);  
  188.   
  189.   /* get an RTCP sinkpad in session 0 */  
  190.   srcpad = gst_element_get_static_pad (rtcpsrc, "src");  
  191.   sinkpad = gst_element_get_request_pad (rtpbin, "recv_rtcp_sink_0");  
  192.   lres = gst_pad_link (srcpad, sinkpad);  
  193.   g_assert (lres == GST_PAD_LINK_OK);  
  194.   gst_object_unref (srcpad);  
  195.   gst_object_unref (sinkpad);  
  196.   
  197.   /* get an RTCP srcpad for sending RTCP back to the sender */  
  198.   srcpad = gst_element_get_request_pad (rtpbin, "send_rtcp_src_0");  
  199.   sinkpad = gst_element_get_static_pad (rtcpsink, "sink");  
  200.   lres = gst_pad_link (srcpad, sinkpad);  
  201.   g_assert (lres == GST_PAD_LINK_OK);  
  202.   gst_object_unref (sinkpad);  
  203.   
  204.   /* the RTP pad that we have to connect to the depayloader will be created  
  205.    * dynamically so we connect to the pad-added signal, pass the depayloader as  
  206.    * user_data so that we can link to it. */  
  207.   g_signal_connect (rtpbin, "pad-added", G_CALLBACK (pad_added_cb), videodepay);  
  208.   
  209.   /* give some stats when we receive RTCP */  
  210.   //g_signal_connect (rtpbin, "on-ssrc-active", G_CALLBACK (on_ssrc_active_cb),videodepay);  
  211.   
  212.   /* set the pipeline to playing */  
  213.   g_print ("starting receiver pipeline\n");  
  214.   gst_element_set_state (pipeline, GST_STATE_PLAYING);  
  215.   
  216.   /* we need to run a GLib main loop to get the messages */  
  217.   loop = g_main_loop_new (NULL, FALSE);  
  218.   g_main_loop_run (loop);  
  219.   
  220.   g_print ("stopping receiver pipeline\n");  
  221.   gst_element_set_state (pipeline, GST_STATE_NULL);  
  222.   
  223.   gst_object_unref (pipeline);  
  224.   
  225.   return 0;  
  226. }  
#include <string.h>
#include <math.h>

#include <gst/gst.h>

/* the caps of the sender RTP stream. This is usually negotiated out of band with
 * SDP or RTSP. */
#define VIDEO_CAPS "application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)RAW, sampling=(string)YCbCr-4:2:0, depth=(string)8, width=(string)320, height=(string)240, colorimetry=(string)SMPTE240M"
//#define VIDEO_CAPS "application/x-rtp,media=video,clock-rate=9000,encoding-name=H264"


#define AVINAME "camera.avi" 
#define PORT 9996
#define VIDEO_SINK  "autovideosink"
/* the destination machine to send RTCP to. This is the address of the sender and
 * is used to send back the RTCP reports of this receiver. If the data is sent
 * from another machine, change this address. */
#define DEST_HOST "127.0.0.1"

/* print the stats of a source */
static void print_source_stats (GObject * source) {
  GstStructure *stats;
  gchar *str;

  g_return_if_fail (source != NULL);

  /* get the source stats */
  g_object_get (source, "stats", &stats, NULL);

  /* simply dump the stats structure */
  str = gst_structure_to_string (stats);
  g_print ("source stats: %s\n", str);

  gst_structure_free (stats);
  g_free (str);
}

/* will be called when gstrtpbin signals on-ssrc-active. It means that an RTCP
 * packet was received from another source. */
static void on_ssrc_active_cb (GstElement * rtpbin, guint sessid, guint ssrc, GstElement * depay) {

    GObject *session, *isrc, *osrc;
    g_print ("got RTCP from session %u, SSRC %u\n", sessid, ssrc);

  /* get the right session */
  g_signal_emit_by_name (rtpbin, "get-internal-session", sessid, &session);

  /* get the internal source (the SSRC allocated to us, the receiver */
  g_object_get (session, "internal-source", &isrc, NULL);
  print_source_stats (isrc);

  /* get the remote source that sent us RTCP */
  g_signal_emit_by_name (session, "get-source-by-ssrc", ssrc, &osrc);
  print_source_stats (osrc);
}

/* will be called when rtpbin has validated a payload that we can depayload */
static void
pad_added_cb (GstElement * rtpbin, GstPad * new_pad, GstElement * depay)
{
  GstPad *sinkpad;
  GstPadLinkReturn lres;

  g_print ("new payload on pad: %s\n", GST_PAD_NAME (new_pad));

  sinkpad = gst_element_get_static_pad (depay, "sink");
  g_assert (sinkpad);

  lres = gst_pad_link (new_pad, sinkpad);
  g_assert (lres == GST_PAD_LINK_OK);
  gst_object_unref (sinkpad);

}


int main (int argc, char *argv[])
{
  GstElement *rtpbin, *rtpsrc, *rtcpsrc, *rtcpsink;
  GstElement *videodepay,
             *videodec,
             *videoqueue,
             //*videores,
             *videoconv,
             *videosink,

	     *tee,
	     *aviqueue,
             *aviconv,
	     *avidenc,
	     *avifmux,
	     *avifilesink;


  GstElement *pipeline;
  GMainLoop *loop;
  GstCaps *caps;
  gboolean res1,res2;
  GstPadLinkReturn lres;
  GstPad *srcpad, *sinkpad;

  /* always init first */
  gst_init (&argc, &argv);

  /* the pipeline to hold everything */
  pipeline = gst_pipeline_new (NULL);
  g_assert (pipeline);

  /* the udp src and source we will use for RTP and RTCP */
  rtpsrc = gst_element_factory_make ("udpsrc", "rtpsrc");
  g_assert (rtpsrc);
  g_object_set (rtpsrc, "port", PORT, NULL);
  /* we need to set caps on the udpsrc for the RTP data */
  caps = gst_caps_from_string (VIDEO_CAPS);
  g_object_set (rtpsrc, "caps", caps, NULL);
  gst_caps_unref (caps);

  rtcpsrc = gst_element_factory_make ("udpsrc", "rtcpsrc");
  g_assert (rtcpsrc);
  g_object_set (rtcpsrc, "port", 9997, NULL);

  rtcpsink = gst_element_factory_make ("udpsink", "rtcpsink");
  g_assert (rtcpsink);
  g_object_set (rtcpsink, "port", 9999, "host", DEST_HOST, NULL);
  /* no need for synchronisation or preroll on the RTCP sink */
  g_object_set (rtcpsink, "async", FALSE, "sync", FALSE, NULL);

  gst_bin_add_many (GST_BIN (pipeline), rtpsrc, rtcpsrc, rtcpsink, NULL);

  /* the depayloading and decoding */
  videodepay = gst_element_factory_make ("rtpvrawdepay", "videodepay");
  g_assert (videodepay);
  videoqueue=gst_element_factory_make ("queue","videoqueue");
  g_assert(videoqueue);

  tee = gst_element_factory_make ("tee","tee");
  g_assert(tee);

  aviqueue=gst_element_factory_make ("queue","aviqueue");
  g_assert(aviqueue);

//  videodec = gst_element_factory_make ("ffmpegcolorspace", "videodec");
//  g_assert (videodec);
  /* the audio playback and format conversion */
  videoconv = gst_element_factory_make ("ffmpegcolorspace", "videoconv");
  g_assert (videoconv);
/*
  audiores = gst_element_factory_make ("audioresample", "audiores");
  g_assert (audiores);
*/
  videosink = gst_element_factory_make (VIDEO_SINK, "videosink");
  g_assert (videosink);

  aviconv = gst_element_factory_make ("ffmpegcolorspace","avicinv");
  g_assert (aviconv);
  avidenc = gst_element_factory_make ("jpegenc","avidenc");
  g_assert (aviconv);
  avifmux = gst_element_factory_make ("avimux","avifmux");
  g_assert (avifmux);
  avifilesink = gst_element_factory_make ("filesink","avifilesink");
  g_assert (avifilesink);
  

  g_object_set(avifilesink,"location",AVINAME,NULL);

  /* add depayloading and playback to the pipeline and link */
//  gst_bin_add_many (GST_BIN (pipeline), videodepay, videoconv, /*videores,*/videoqueue, videosink, aviconv,avidenc,avifmux,avifilename,NULL);
  gst_bin_add_many (GST_BIN (pipeline), videodepay,tee,videoqueue,videoconv,videosink, aviqueue,aviconv,avidenc,avifmux,avifilesink,NULL);

  res1 = gst_element_link_many (videodepay, tee,videoqueue,videoconv,videosink, NULL);
  g_assert (res1 == TRUE);
  res2 = gst_element_link_many (tee,aviqueue,aviconv,avidenc,avifmux,avifilesink,NULL);
  g_assert (res2 == TRUE);

  /* the rtpbin element */
  rtpbin = gst_element_factory_make ("gstrtpbin", "rtpbin");
  g_assert (rtpbin);

  g_object_set (G_OBJECT (rtpbin),"latency",200,NULL);

  gst_bin_add (GST_BIN (pipeline), rtpbin);

  /* now link all to the rtpbin, start by getting an RTP sinkpad for session 0 */
  srcpad = gst_element_get_static_pad (rtpsrc, "src");
  sinkpad = gst_element_get_request_pad (rtpbin, "recv_rtp_sink_0");
  lres = gst_pad_link (srcpad, sinkpad);
  g_assert (lres == GST_PAD_LINK_OK);
  gst_object_unref (srcpad);

  /* get an RTCP sinkpad in session 0 */
  srcpad = gst_element_get_static_pad (rtcpsrc, "src");
  sinkpad = gst_element_get_request_pad (rtpbin, "recv_rtcp_sink_0");
  lres = gst_pad_link (srcpad, sinkpad);
  g_assert (lres == GST_PAD_LINK_OK);
  gst_object_unref (srcpad);
  gst_object_unref (sinkpad);

  /* get an RTCP srcpad for sending RTCP back to the sender */
  srcpad = gst_element_get_request_pad (rtpbin, "send_rtcp_src_0");
  sinkpad = gst_element_get_static_pad (rtcpsink, "sink");
  lres = gst_pad_link (srcpad, sinkpad);
  g_assert (lres == GST_PAD_LINK_OK);
  gst_object_unref (sinkpad);

  /* the RTP pad that we have to connect to the depayloader will be created
   * dynamically so we connect to the pad-added signal, pass the depayloader as
   * user_data so that we can link to it. */
  g_signal_connect (rtpbin, "pad-added", G_CALLBACK (pad_added_cb), videodepay);

  /* give some stats when we receive RTCP */
  //g_signal_connect (rtpbin, "on-ssrc-active", G_CALLBACK (on_ssrc_active_cb),videodepay);

  /* set the pipeline to playing */
  g_print ("starting receiver pipeline\n");
  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  /* we need to run a GLib main loop to get the messages */
  loop = g_main_loop_new (NULL, FALSE);
  g_main_loop_run (loop);

  g_print ("stopping receiver pipeline\n");
  gst_element_set_state (pipeline, GST_STATE_NULL);

  gst_object_unref (pipeline);

  return 0;
}

由于本程序存在录制时间短但保存成文件的播放长度很长的问题,希望知道的高手们指点一下解决的方法,在此先谢谢了!!!!
  • 0
    点赞
  • 2
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
要在 Linux 开发板上实现摄像头采集图像并实时显示在屏幕上,需要按以下步骤操作: 1. 确认摄像头已经被 Linux 系统正确识别并加载了对应的驱动程序。 2. 查找摄像头对应的设备文件,一般在 /dev 目录下,例如 /dev/video0。 3. 安装并配置视频采集软件,如 V4L2(Video for Linux 2)。 4. 打开摄像头设备文件,设置采集参数并开始采集。 5. 通过视频渲染软件(如 GStreamer)将采集到的视频数据显示在屏幕上。 具体步骤如下: 1. 确认摄像头已经被 Linux 系统正确识别并加载了对应的驱动程序。可以通过 dmesg 命令或 /var/log/messages 文件查看系统日志,确认摄像头是否被正确识别。 2. 查找摄像头对应的设备文件,一般在 /dev 目录下,例如 /dev/video0。可以通过 ls /dev/video* 命令查找。 3. 安装并配置视频采集软件,如 V4L2。V4L2 是 Linux 下常用的视频采集软件,可以通过 apt-get 命令或源码编译安装。安装完成后,需要配置 V4L2 的参数,如采集分辨率、帧率、色彩空间等。 4. 打开摄像头设备文件,设置采集参数并开始采集。可以使用 V4L2 提供的命令行工具 v4l2-ctl 或自己编写程序实现。在采集过程中,可以设置采集缓冲区、采集帧数等参数。 5. 通过视频渲染软件(如 GStreamer)将采集到的视频数据显示在屏幕上。可以使用 GStreamer 提供的命令行工具 gst-launch 或自己编写程序实现。在渲染过程中,需要设置视频格式、显示窗口等参数。 以上是在 Linux 开发板上实现摄像头采集图像并实时显示在屏幕上的大致步骤,具体操作还需要根据实际情况进行调整。

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值