Here‘s a summary of some relevant code related to streaming

本文展示了如何利用GStreamer命令行工具和库创建RTP流,以及通过VLC进行播放。同时,还提供了一个C++程序示例,用于启动一个RTSP服务器并从网络摄像头流式传输视频。此外,还讨论了如何结合OpenCV和GStreamer进行视频处理和编码,以及利用硬件加速来节省CPU资源。
摘要由CSDN通过智能技术生成

The command for RTP (Real-time Transport Protocol) streaming

gst-launch-1.0 -e v4l2src device=/dev/video1 ! videoconvert ! video/x-raw,format=NV12,width=640,height=480 ! mpph264enc ! h264parse ! rtph264pay config-interval=1 pt=96 ! udpsink host=192.168.9.100(target ip address) port=5000

To use VLC media player for streaming playback

v=0
m=video 5000 RTP/AVP 96
a=rtpmap:96 H264/90000
c=IN IP4 192.168.9.100

use GStreamer to open a webcam and stream to an RTSP server

#include <gst/gst.h>
#include <gst/rtsp-server/rtsp-server.h>
#include <termios.h>
#include <unistd.h>
#include <fcntl.h>
#include <iostream>

// Author: <LuoYiXuan>
// Created: <2023.4.17>
// Description: <webcam_rtsp code>

GstRTSPMediaFactory *create_rtsp_media_factory(const gchar *device_node) 
{
    GstRTSPMediaFactory *factory;
    gchar *pipeline_desc;

    pipeline_desc = g_strdup_printf("v4l2src device=%s ! videoconvert ! videoscale ! video/x-raw,width=640,height=480 ! mpph264enc ! rtph264pay name=pay0 pt=96", device_node);
    factory = gst_rtsp_media_factory_new();
    gst_rtsp_media_factory_set_launch(factory, pipeline_desc);
    g_free(pipeline_desc);

    return factory;
}

gboolean keyboard_input_cb(GIOChannel *source, GIOCondition condition, gpointer data)
{
    GMainLoop *loop = (GMainLoop *)data;
    gchar input_char;

    g_io_channel_read_chars(source, &input_char, 1, NULL, NULL);

    if (input_char == 27) // 27 is the ASCII code for the Esc key
    {
        g_main_loop_quit(loop);
    }

    return TRUE;
}


void start_rtsp_server(const gchar *device_node, const gchar *server_address, guint16 server_port) 
{
    GMainLoop *loop;
    GstRTSPServer *server;
    GstRTSPMountPoints *mounts;
    GstRTSPMediaFactory *factory;
    gchar *port_str;

    // Set up the terminal to get one character at a time 
    struct termios old_termios, new_termios; 
    tcgetattr(STDIN_FILENO, &old_termios); 
    new_termios = old_termios; 
    new_termios.c_lflag &= ~(ICANON | ECHO); 
    tcsetattr(STDIN_FILENO, TCSANOW, &new_termios); 
    // Set the standard input (stdin) to non-blocking mode 
    fcntl(STDIN_FILENO, F_SETFL, O_NONBLOCK);

    gst_init(NULL, NULL);
    server = gst_rtsp_server_new();
    g_object_set(G_OBJECT(server), "address", server_address, NULL);
    port_str = g_strdup_printf("%d", server_port);
    g_object_set(G_OBJECT(server), "service", port_str, NULL);
    g_free(port_str);

    mounts = gst_rtsp_server_get_mount_points(server);
    factory = create_rtsp_media_factory(device_node);
    gst_rtsp_mount_points_add_factory(mounts, "/test", factory);
    g_object_unref(mounts);

    gst_rtsp_server_attach(server, NULL);

    loop = g_main_loop_new(NULL, FALSE);
    // Set up the keyboard input callback 
    GIOChannel *stdin_channel = g_io_channel_unix_new(STDIN_FILENO); 
    g_io_add_watch(stdin_channel, G_IO_IN, keyboard_input_cb, loop); 
    g_io_channel_unref(stdin_channel);
    std::cout << "rtsping ----"<< std::endl;
    g_main_loop_run(loop);

    // Restore the original terminal settings 
    tcsetattr(STDIN_FILENO, TCSANOW, &old_termios);

    g_main_loop_unref(loop);
    g_object_unref(server);
    std::cout << "end rtsping ----"<< std::endl;
}

int main(int argc, char *argv[]) 
{
    gchar *device_node;
    cpu_set_t mask;
    CPU_ZERO(&mask);
    CPU_SET(7,&mask);
    pthread_setaffinity_np(pthread_self(),sizeof(mask),&mask);

    if (argc > 1) {
    device_node = argv[1];
    } else {
    g_printerr("USAGE: %s /dev/videoX\n", argv[0]);
    return -1;  
    }

    start_rtsp_server(device_node, "127.0.0.1", 8554);
    
    return 0;
}

Using the GStreamer plugin with OpenCV allows for easy creation of pipelines and writing of pipeline data

#include <stdio.h>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/opencv.hpp>

// Author: <LuoYiXuan>
// Created: <2023.4.17>
// Description: <opencv_plugin_gstreamer code>

int main(int argc, char** argv) {

cv::VideoCapture cap(11);
    if (!cap.isOpened()) {
        std::cerr << "Error: Could not open camera "<< std::endl;
        return -1;
    }
    cap.set(cv::CAP_PROP_FRAME_WIDTH,640);
    cap.set(cv::CAP_PROP_FRAME_HEIGHT,480);

// second part of sender pipeline
cv::VideoWriter writer;
writer.open("appsrc ! videoconvert ! x264enc tune=zerolatency bitrate=1000 speed-preset=superfast ! rtph264pay ! udpsink host=192.168.9.100 port=5000"
, 0, (double)30, cv::Size(640, 480), true);
if (!writer.isOpened()) {
printf("=ERR= can't create video writer\n");
return -1;
}

cv::Mat frame; 
int key;

while (true) {

cap >> frame;
if (frame.empty())
break;

/* Process the frame here */

// cv::cvtColor(frame, frame, cv::COLOR_BGR2YUV_I420);
cv::cvtColor(frame, frame, cv::COLOR_BGR2GRAY);
cv::cvtColor(frame, frame, cv::COLOR_GRAY2BGR);
writer << frame;
key = cv::waitKey( 30 );
}
}

mat2pipeline current releases (x264enc version)

#include <gst/gst.h>
#include <stdlib.h>
#include <string.h>
#include <iostream>
#include <chrono>
#include <opencv2/opencv.hpp>

// Author: <LuoYiXuan>
// Created: <2023.4.17>
// Description: <mat2pipeline code>

int Width;
int Height;
int Framerate;
cv::VideoCapture cap;

gboolean LinkElementsManually(GstElement *stream, GstElement *muxer)
{
	gchar *req_pad_name;
	GstPad *req_pad;
	GstPad *static_pad;

	/* Get source pad from queue pipeline */
	static_pad = gst_element_get_static_pad(stream, "src");
	/* Get sink pad from muxer */
	req_pad = gst_element_request_pad(muxer, gst_element_class_get_pad_template(GST_ELEMENT_GET_CLASS(muxer), "sink_%d"), NULL, NULL);

	req_pad_name = gst_pad_get_name(req_pad);

	g_print("stream Pad name for Muxer %s\n", req_pad_name);
	g_free(req_pad_name);
	/* Link Both src-> sink pads */
	if (GST_IS_PAD(static_pad) && GST_IS_PAD(req_pad))
	{
		int ret = gst_pad_link(static_pad, req_pad);
		if (ret == GST_PAD_LINK_OK)
			return 1; //success
		else
			g_print("Error %s\n", ret);
		//return 0; // failure
	}
	else
		return 0; // failure
}

static void need_data_cv_image_data(GstElement *appsrc, guint unused_size, gpointer    user_data)
{
	static GstClockTime timestamp = 0;
	GstBuffer *buffer;
	guint size, depth, height, width, channels;
	GstFlowReturn ret;
	cv::Mat img;
	guchar *cv_imgData;
	GstMapInfo map;

	cap.read(img);

	//= cv::imread("sample_png.png", CV_LOAD_IMAGE_COLOR);
	//cv::resize(img, img, cv::Size(320, 240));
	height = img.rows;	 width = img.cols;
	channels = (guint)img.channels();
	cv_imgData = (guchar *)img.data;
	size = height*width*channels;

	/* Allocate opencv image size buffer */
	buffer = gst_buffer_new_allocate(NULL, size, NULL);
	gst_buffer_map(buffer, &map, GST_MAP_WRITE);
	memcpy((guchar *)map.data, img.data, size);

	/* Addition of time stamp to buffer */
	GST_BUFFER_PTS(buffer) = timestamp;
	GST_BUFFER_DURATION(buffer) = gst_util_uint64_scale_int(1, GST_SECOND, Framerate);
	timestamp += GST_BUFFER_DURATION(buffer);

	/* Emit buffer with signal */
	g_signal_emit_by_name(appsrc, "push-buffer", buffer, &ret);

	if (ret != GST_FLOW_OK) {
		/* something wrong, stop pushing */
		//g_main_loop_quit(loop);
		gst_buffer_unmap(buffer, &map);
		gst_buffer_unref(buffer);
		g_print("Failed to push Video Buffer\n");
	}

	gst_buffer_unmap(buffer, &map);
	gst_buffer_unref(buffer);
}

static gboolean bus_call(GstBus *bus, GstMessage *msg, gpointer data)
{
	GMainLoop *loop = (GMainLoop *)data;

	switch (GST_MESSAGE_TYPE(msg)) {
		gchar  *debug;
		GError *error;

	case GST_MESSAGE_EOS:
		g_print("End of stream\n");
		g_main_loop_quit(loop);
		break;

	case GST_MESSAGE_ERROR:

		gst_message_parse_error(msg, &error, &debug);
		g_free(debug);

		g_printerr("Error: %s\n", error->message);
		g_printerr("Debug Information: %s\n", debug);
		g_error_free(error);

		g_main_loop_quit(loop);
		break;
	default:
		break;
	}

	return TRUE;
}

void Open_Cv_Capture()
{
	cv::Mat frame;
	cap.open(11);
    cap.set(cv::CAP_PROP_FRAME_WIDTH,640);
    cap.set(cv::CAP_PROP_FRAME_HEIGHT,480);

	if (!cap.isOpened())
	{
		printf("Unable to open Webcam!\n");
	}// read cv frame to get width and height
	cap.read(frame);
	/* Required to create input pipeline */
	Width = frame.cols;
	Height = frame.rows;
	Framerate = 25;
}
int main(int argc, char *argv[])
{
    // Initialization
    gst_init(&argc, &argv);

    GMainLoop *loop;
    GstBus *bus;
    guint bus_watch_id;

    loop = g_main_loop_new(NULL, FALSE);

    // Initialize Video capture from Opencv
    Open_Cv_Capture();

    // Create gstreamer elements
    GstElement *pipeline = gst_pipeline_new("OpenCv2GstPipeline");
    GstElement *cv_source = gst_element_factory_make("appsrc", "cv_image_source");
    GstElement *v_videorate = gst_element_factory_make("videorate", "v_videorate");
    GstElement *v_convert = gst_element_factory_make("videoconvert", "videoconvert");
    GstElement *v_rawCapsfilter = gst_element_factory_make("capsfilter", "raw_video_filer");
    GstElement *v_encCapsfilter = gst_element_factory_make("capsfilter", "encoder_filter");
    GstElement *encoder = gst_element_factory_make("x264enc", "encoder");
    GstElement *rtph264pay = gst_element_factory_make("rtph264pay", "rtph264pay");
    GstElement *v_queue = gst_element_factory_make("queue", "video-queue");
    GstElement *udpsink = gst_element_factory_make("udpsink", "udpsink");

    if (!pipeline || !cv_source || !encoder || !rtph264pay || !v_convert || !udpsink || !v_encCapsfilter || !v_rawCapsfilter) {
        g_printerr("One element could not be created. Exiting.\n");
        return -1;
    }

    // Set input video file for source element
    g_object_set(G_OBJECT(udpsink), "host", "192.168.9.100", "port", 5000, NULL);

    // we add a message handler
    bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
    bus_watch_id = gst_bus_add_watch(bus, bus_call, loop);
    gst_object_unref(bus);

    // Add all elements into the pipeline
    gst_bin_add_many(GST_BIN(pipeline), cv_source, v_videorate, v_convert, v_rawCapsfilter, encoder, v_encCapsfilter, rtph264pay, v_queue, udpsink, NULL);

    // Do Timestamp on buffer
    gst_util_set_object_arg(G_OBJECT(cv_source), "format", "time");

    // setup
    g_object_set(G_OBJECT(cv_source), "caps",
                 gst_caps_new_simple("video/x-raw",
                                     "format", G_TYPE_STRING, "BGR", // Input Opencv Image format is BGR
                                     "width", G_TYPE_INT, Width,
                                     "height", G_TYPE_INT, Height,
"framerate", GST_TYPE_FRACTION, Framerate, 1,
"parsed", G_TYPE_BOOLEAN, TRUE, "sparse", G_TYPE_BOOLEAN, TRUE,
NULL), NULL);
// setup appsrc
g_object_set(G_OBJECT(cv_source),
             "stream-type", 0,
             "format", GST_FORMAT_TIME, 
             "is-live", TRUE, NULL);

// Add Caps filter to Raw image to convert BGR to YUV image format
GstCaps *caps = gst_caps_new_simple("video/x-raw", 
                                    "format", G_TYPE_STRING, "I420", // This will change BGR image to I420
                                    "width", G_TYPE_INT, Width,
                                    "height", G_TYPE_INT, Height,
                                    "framerate", GST_TYPE_FRACTION, Framerate, 1,
                                    NULL);

g_object_set(G_OBJECT(v_rawCapsfilter), "caps", caps, NULL);
gst_caps_unref(caps);

// Set Caps filter for Encoder
GstCaps *vidEncCaps = gst_caps_new_simple("video/x-h264",
                                          "stream-format", G_TYPE_STRING, "byte-stream",
                                          "profile", G_TYPE_STRING,  "main",
                                          NULL);

g_object_set(G_OBJECT(v_encCapsfilter), "caps", vidEncCaps, NULL);
gst_caps_unref(vidEncCaps);

g_object_set(G_OBJECT(encoder), "bitrate", 500000, "ref", 4, "pass", 4 , "key-int-max", 0, "byte-stream", TRUE, "tune", 0x00000004, "noise-reduction", 1000, NULL);
//g_object_set(G_OBJECT(encoder), "bitrate", 500000, NULL);
// Link all elements
if (gst_element_link_many(cv_source, v_convert, v_videorate, v_rawCapsfilter, encoder, v_encCapsfilter, rtph264pay, v_queue, NULL) != TRUE) {
    g_printerr("Many Elements could not be linked.\n");
    gst_object_unref(pipeline);
    return -1;
}

// final link queue to udpsink
if (gst_element_link_many(v_queue, udpsink, NULL) != TRUE) {
    g_printerr("Many Elements could not be linked.\n");
    gst_object_unref(pipeline);
    return -1;
}

// Set call back signal for get video data dinamically
g_signal_connect(cv_source, "need-data", G_CALLBACK(need_data_cv_image_data), NULL);

// Set the pipeline to "playing" state
if (gst_element_set_state(pipeline, GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE)
{
    g_printerr("Unable to set the pipeline to the playing state.\n");
    gst_object_unref(pipeline);
    getchar();
    return -1;
}

g_print("Running...\n");
g_main_loop_run(loop);

// Free resources and change state to NULL
gst_object_unref(bus);
g_print("Returned, stopping playback...\n");
gst_element_set_state(pipeline, GST_STATE_NULL);
g_print("Freeing pipeline...\n");
gst_object_unref(GST_OBJECT(pipeline));
g_print("Completed. Goodbye!\n");
getchar();
return 0;
}

Mat2pipeline Hardware acceleration (save CPU usage )

#include <gst/gst.h>
#include <stdlib.h>
#include <string.h>
#include <iostream>
#include <chrono>
#include <opencv2/opencv.hpp>

// Author: <LuoYiXuan>
// Created: <2023.4.17>
// Description: <Mat2pipeline Hardware acceleration code>

int Width;
int Height;
int Framerate;
cv::VideoCapture cap;

gboolean LinkElementsManually(GstElement *stream, GstElement *muxer)
{
	gchar *req_pad_name;
	GstPad *req_pad;
	GstPad *static_pad;

	/* Get source pad from queue pipeline */
	static_pad = gst_element_get_static_pad(stream, "src");
	/* Get sink pad from muxer */
	req_pad = gst_element_request_pad(muxer, gst_element_class_get_pad_template(GST_ELEMENT_GET_CLASS(muxer), "sink_%d"), NULL, NULL);

	req_pad_name = gst_pad_get_name(req_pad);

	g_print("stream Pad name for Muxer %s\n", req_pad_name);
	g_free(req_pad_name);
	/* Link Both src-> sink pads */
	if (GST_IS_PAD(static_pad) && GST_IS_PAD(req_pad))
	{
		int ret = gst_pad_link(static_pad, req_pad);
		if (ret == GST_PAD_LINK_OK)
			return 1; //success
		else
			g_print("Error %s\n", ret);
		//return 0; // failure
	}
	else
		return 0; // failure
}

static void need_data_cv_image_data(GstElement *appsrc, guint unused_size, gpointer    user_data)
{
	static GstClockTime timestamp = 0;
	GstBuffer *buffer;
	guint size, depth, height, width, channels;
	GstFlowReturn ret;
	cv::Mat img;
	guchar *cv_imgData;
	GstMapInfo map;

	cap.read(img);

	//= cv::imread("sample_png.png", CV_LOAD_IMAGE_COLOR);
	//cv::resize(img, img, cv::Size(320, 240));
	height = img.rows;	 width = img.cols;
	channels = (guint)img.channels();
	cv_imgData = (guchar *)img.data;
	size = height*width*channels;

	/* Allocate opencv image size buffer */
	buffer = gst_buffer_new_allocate(NULL, size, NULL);
	gst_buffer_map(buffer, &map, GST_MAP_WRITE);
	memcpy((guchar *)map.data, img.data, size);

	/* Addition of time stamp to buffer */
	GST_BUFFER_PTS(buffer) = timestamp;
	GST_BUFFER_DURATION(buffer) = gst_util_uint64_scale_int(1, GST_SECOND, Framerate);
	timestamp += GST_BUFFER_DURATION(buffer);

	/* Emit buffer with signal */
	g_signal_emit_by_name(appsrc, "push-buffer", buffer, &ret);

	if (ret != GST_FLOW_OK) {
		/* something wrong, stop pushing */
		//g_main_loop_quit(loop);
		gst_buffer_unmap(buffer, &map);
		gst_buffer_unref(buffer);
		g_print("Failed to push Video Buffer\n");
	}

	gst_buffer_unmap(buffer, &map);
	gst_buffer_unref(buffer);
}

static gboolean bus_call(GstBus *bus, GstMessage *msg, gpointer data)
{
	GMainLoop *loop = (GMainLoop *)data;

	switch (GST_MESSAGE_TYPE(msg)) {
		gchar  *debug;
		GError *error;

	case GST_MESSAGE_EOS:
		g_print("End of stream\n");
		g_main_loop_quit(loop);
		break;

	case GST_MESSAGE_ERROR:

		gst_message_parse_error(msg, &error, &debug);
		g_free(debug);

		g_printerr("Error: %s\n", error->message);
		g_printerr("Debug Information: %s\n", debug);
		g_error_free(error);

		g_main_loop_quit(loop);
		break;
	default:
		break;
	}

	return TRUE;
}

void Open_Cv_Capture()
{
	cv::Mat frame;
	cap.open(11);
    cap.set(cv::CAP_PROP_FRAME_WIDTH,640);
    cap.set(cv::CAP_PROP_FRAME_HEIGHT,480);

	if (!cap.isOpened())
	{
		printf("Unable to open Webcam!\n");
	}// read cv frame to get width and height
	cap.read(frame);
	/* Required to create input pipeline */
	Width = frame.cols;
	Height = frame.rows;
	Framerate = 30;
}
int main(int argc, char *argv[]) {
    gst_init(&argc, &argv);

    GMainLoop *loop;
    GstBus *bus;
    guint bus_watch_id;

    loop = g_main_loop_new(NULL, FALSE);

    // Initialize Video capture from Opencv
    Open_Cv_Capture();

    GstElement *pipeline;
    GError *error = NULL;
    gchar *pipeline_str = g_strdup_printf("appsrc name=cv_image_source ! videoconvert ! videorate ! "
                                          "capsfilter caps=\"video/x-raw, format=(string)NV12, width=(int)%d, height=(int)%d, framerate=(fraction)%d/1\" ! "
                                          "mpph264enc ! "
                                          "h264parse ! "
                                          "rtph264pay config-interval=1 pt=96 ! udpsink host=192.168.9.100 port=5000", Width, Height, Framerate);

    pipeline = gst_parse_launch(pipeline_str, &error);
    g_free(pipeline_str);

    if (!pipeline) {
        g_printerr("Pipeline could not be created: %s\n", error->message);
        g_error_free(error);
        return -1;
    }

    // Add the bus watch
    bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
    bus_watch_id = gst_bus_add_watch(bus, bus_call, loop);
    gst_object_unref(bus);

    // Get the appsrc element from the pipeline
    GstElement *cv_source = gst_bin_get_by_name(GST_BIN(pipeline), "cv_image_source");

    // Set appsrc properties
    gst_util_set_object_arg(G_OBJECT(cv_source), "format", "time");
    g_object_set(G_OBJECT(cv_source), "caps",
                 gst_caps_new_simple("video/x-raw",
                                     "format", G_TYPE_STRING, "BGR",
                                     "width", G_TYPE_INT, Width,
                                     "height", G_TYPE_INT, Height,
                                     "framerate", GST_TYPE_FRACTION, Framerate, 1,
                                     "parsed", G_TYPE_BOOLEAN, TRUE,
                                     "sparse", G_TYPE_BOOLEAN, TRUE,
                                     NULL), NULL);
    g_object_set(G_OBJECT(cv_source),
                 "stream-type", 0,
                 "format", GST_FORMAT_TIME, 
                 "is-live", TRUE, NULL);

    // Connect the need_data signal
    g_signal_connect(cv_source, "need-data", G_CALLBACK(need_data_cv_image_data), NULL);

    // Set the pipeline to "playing" state
    if (gst_element_set_state(pipeline, GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
        g_printerr("Unable to set the pipeline to the playing state.\n");
gst_object_unref(pipeline);
getchar();
return -1;
}
g_print("Running...\n");
g_main_loop_run(loop);

// Free resources and change state to NULL
gst_object_unref(bus);
g_print("Returned, stopping playback...\n");
gst_element_set_state(pipeline, GST_STATE_NULL);
g_print("Freeing pipeline...\n");
gst_object_unref(GST_OBJECT(pipeline));
g_print("Completed. Goodbye!\n");
getchar();
return 0;
}
  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值