Gstreamer实现摄像头的远程采集,udp传输,本地显示和保存为AVI文件 接收保存显示端

经过两个星期的努力终于完成 Gstreamer实现摄像头的远程采集,udp传输,本地显示和保存为AVI文件,的C语言程序,现在分享给大家,欢迎大家评论指正

由于本程序存在录制时间短但保存成文件的播放长度很长的问题,希望知道的高手们指点一下解决的方法,在此先谢谢了!!!!


recv-display-avifile:

gst-launch  udpsrc caps=" application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)RAW, sampling=(string)YCbCr-4:2:0, depth=(string)8, width=(string)320, height=(string)240, colorimetry=(string)SMPTE240M, ssrc=(guint)4294234526, payload=(int)96, clock-base=(guint)520513122, seqnum-base=(guint)28177" port=9996 ! queue ! rtpvrawdepay  ! queue ! tee name="splitter" ! queue ! ffmpegcolorspace ! autovideosink splitter. ! queue ! ffmpegcolorspace ! jpegenc ! avimux ! filesink location=osug-udp-2.avi


C code:

#include <string.h>
#include <math.h>

#include <gst/gst.h>

/* the caps of the sender RTP stream. This is usually negotiated out of band with
 * SDP or RTSP. */
#define VIDEO_CAPS "application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)RAW, sampling=(string)YCbCr-4:2:0, depth=(string)8, width=(string)320, height=(string)240, colorimetry=(string)SMPTE240M"
//#define VIDEO_CAPS "application/x-rtp,media=video,clock-rate=9000,encoding-name=H264"


#define AVINAME "camera.avi" 
#define PORT 9996
#define VIDEO_SINK  "autovideosink"
/* the destination machine to send RTCP to. This is the address of the sender and
 * is used to send back the RTCP reports of this receiver. If the data is sent
 * from another machine, change this address. */
#define DEST_HOST "127.0.0.1"

/* print the stats of a source */
static void print_source_stats (GObject * source) {
  GstStructure *stats;
  gchar *str;

  g_return_if_fail (source != NULL);

  /* get the source stats */
  g_object_get (source, "stats", &stats, NULL);

  /* simply dump the stats structure */
  str = gst_structure_to_string (stats);
  g_print ("source stats: %s\n", str);

  gst_structure_free (stats);
  g_free (str);
}

/* will be called when gstrtpbin signals on-ssrc-active. It means that an RTCP
 * packet was received from another source. */
static void on_ssrc_active_cb (GstElement * rtpbin, guint sessid, guint ssrc, GstElement * depay) {

    GObject *session, *isrc, *osrc;
    g_print ("got RTCP from session %u, SSRC %u\n", sessid, ssrc);

  /* get the right session */
  g_signal_emit_by_name (rtpbin, "get-internal-session", sessid, &session);

  /* get the internal source (the SSRC allocated to us, the receiver */
  g_object_get (session, "internal-source", &isrc, NULL);
  print_source_stats (isrc);

  /* get the remote source that sent us RTCP */
  g_signal_emit_by_name (session, "get-source-by-ssrc", ssrc, &osrc);
  print_source_stats (osrc);
}

/* will be called when rtpbin has validated a payload that we can depayload */
static void
pad_added_cb (GstElement * rtpbin, GstPad * new_pad, GstElement * depay)
{
  GstPad *sinkpad;
  GstPadLinkReturn lres;

  g_print ("new payload on pad: %s\n", GST_PAD_NAME (new_pad));

  sinkpad = gst_element_get_static_pad (depay, "sink");
  g_assert (sinkpad);

  lres = gst_pad_link (new_pad, sinkpad);
  g_assert (lres == GST_PAD_LINK_OK);
  gst_object_unref (sinkpad);

}


int main (int argc, char *argv[])
{
  GstElement *rtpbin, *rtpsrc, *rtcpsrc, *rtcpsink;
  GstElement *videodepay,
             *videodec,
             *videoqueue,
             //*videores,
             *videoconv,
             *videosink,

	     *tee,
	     *aviqueue,
             *aviconv,
	     *avidenc,
	     *avifmux,
	     *avifilesink;


  GstElement *pipeline;
  GMainLoop *loop;
  GstCaps *caps;
  gboolean res1,res2;
  GstPadLinkReturn lres;
  GstPad *srcpad, *sinkpad;

  /* always init first */
  gst_init (&argc, &argv);

  /* the pipeline to hold everything */
  pipeline = gst_pipeline_new (NULL);
  g_assert (pipeline);

  /* the udp src and source we will use for RTP and RTCP */
  rtpsrc = gst_element_factory_make ("udpsrc", "rtpsrc");
  g_assert (rtpsrc);
  g_object_set (rtpsrc, "port", PORT, NULL);
  /* we need to set caps on the udpsrc for the RTP data */
  caps = gst_caps_from_string (VIDEO_CAPS);
  g_object_set (rtpsrc, "caps", caps, NULL);
  gst_caps_unref (caps);

  rtcpsrc = gst_element_factory_make ("udpsrc", "rtcpsrc");
  g_assert (rtcpsrc);
  g_object_set (rtcpsrc, "port", 9997, NULL);

  rtcpsink = gst_element_factory_make ("udpsink", "rtcpsink");
  g_assert (rtcpsink);
  g_object_set (rtcpsink, "port", 9999, "host", DEST_HOST, NULL);
  /* no need for synchronisation or preroll on the RTCP sink */
  g_object_set (rtcpsink, "async", FALSE, "sync", FALSE, NULL);

  gst_bin_add_many (GST_BIN (pipeline), rtpsrc, rtcpsrc, rtcpsink, NULL);

  /* the depayloading and decoding */
  videodepay = gst_element_factory_make ("rtpvrawdepay", "videodepay");
  g_assert (videodepay);
  videoqueue=gst_element_factory_make ("queue","videoqueue");
  g_assert(videoqueue);

  tee = gst_element_factory_make ("tee","tee");
  g_assert(tee);

  aviqueue=gst_element_factory_make ("queue","aviqueue");
  g_assert(aviqueue);

//  videodec = gst_element_factory_make ("ffmpegcolorspace", "videodec");
//  g_assert (videodec);
  /* the audio playback and format conversion */
  videoconv = gst_element_factory_make ("ffmpegcolorspace", "videoconv");
  g_assert (videoconv);
/*
  audiores = gst_element_factory_make ("audioresample", "audiores");
  g_assert (audiores);
*/
  videosink = gst_element_factory_make (VIDEO_SINK, "videosink");
  g_assert (videosink);

  aviconv = gst_element_factory_make ("ffmpegcolorspace","avicinv");
  g_assert (aviconv);
  avidenc = gst_element_factory_make ("jpegenc","avidenc");
  g_assert (aviconv);
  avifmux = gst_element_factory_make ("avimux","avifmux");
  g_assert (avifmux);
  avifilesink = gst_element_factory_make ("filesink","avifilesink");
  g_assert (avifilesink);
  

  g_object_set(avifilesink,"location",AVINAME,NULL);

  /* add depayloading and playback to the pipeline and link */
//  gst_bin_add_many (GST_BIN (pipeline), videodepay, videoconv, /*videores,*/videoqueue, videosink, aviconv,avidenc,avifmux,avifilename,NULL);
  gst_bin_add_many (GST_BIN (pipeline), videodepay,tee,videoqueue,videoconv,videosink, aviqueue,aviconv,avidenc,avifmux,avifilesink,NULL);

  res1 = gst_element_link_many (videodepay, tee,videoqueue,videoconv,videosink, NULL);
  g_assert (res1 == TRUE);
  res2 = gst_element_link_many (tee,aviqueue,aviconv,avidenc,avifmux,avifilesink,NULL);
  g_assert (res2 == TRUE);

  /* the rtpbin element */
  rtpbin = gst_element_factory_make ("gstrtpbin", "rtpbin");
  g_assert (rtpbin);

  g_object_set (G_OBJECT (rtpbin),"latency",200,NULL);

  gst_bin_add (GST_BIN (pipeline), rtpbin);

  /* now link all to the rtpbin, start by getting an RTP sinkpad for session 0 */
  srcpad = gst_element_get_static_pad (rtpsrc, "src");
  sinkpad = gst_element_get_request_pad (rtpbin, "recv_rtp_sink_0");
  lres = gst_pad_link (srcpad, sinkpad);
  g_assert (lres == GST_PAD_LINK_OK);
  gst_object_unref (srcpad);

  /* get an RTCP sinkpad in session 0 */
  srcpad = gst_element_get_static_pad (rtcpsrc, "src");
  sinkpad = gst_element_get_request_pad (rtpbin, "recv_rtcp_sink_0");
  lres = gst_pad_link (srcpad, sinkpad);
  g_assert (lres == GST_PAD_LINK_OK);
  gst_object_unref (srcpad);
  gst_object_unref (sinkpad);

  /* get an RTCP srcpad for sending RTCP back to the sender */
  srcpad = gst_element_get_request_pad (rtpbin, "send_rtcp_src_0");
  sinkpad = gst_element_get_static_pad (rtcpsink, "sink");
  lres = gst_pad_link (srcpad, sinkpad);
  g_assert (lres == GST_PAD_LINK_OK);
  gst_object_unref (sinkpad);

  /* the RTP pad that we have to connect to the depayloader will be created
   * dynamically so we connect to the pad-added signal, pass the depayloader as
   * user_data so that we can link to it. */
  g_signal_connect (rtpbin, "pad-added", G_CALLBACK (pad_added_cb), videodepay);

  /* give some stats when we receive RTCP */
  //g_signal_connect (rtpbin, "on-ssrc-active", G_CALLBACK (on_ssrc_active_cb),videodepay);

  /* set the pipeline to playing */
  g_print ("starting receiver pipeline\n");
  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  /* we need to run a GLib main loop to get the messages */
  loop = g_main_loop_new (NULL, FALSE);
  g_main_loop_run (loop);

  g_print ("stopping receiver pipeline\n");
  gst_element_set_state (pipeline, GST_STATE_NULL);

  gst_object_unref (pipeline);

  return 0;
}

由于本程序存在录制时间短但保存成文件的播放长度很长的问题,希望知道的高手们指点一下解决的方法,在此先谢谢了!!!!


程序的发送端在上一篇博客中欢迎浏览!!!!http://blog.csdn.net/zhujinghao09/article/details/8528802

  • 6
    点赞
  • 19
    收藏
    觉得还不错? 一键收藏
  • 19
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 19
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值