gstreamer将H264码流转为avi视频文件示例

是时候记录怎么使用gstreamer库将h264码流转为avi、mp4、flv等视频文件了。
下图是本片示例视频合成的流程图,其中H264 采集与队列实现部分代码未贴上。
在这里插入图片描述
总体思想是,“视频合成主线程”根据视频数据通道创建gstreamer视频合成pipeline线程,然后剩余的视频合成以及文件存储操作均由pipeline的appsrc与appsink的回调函数出现。
视频合成主线程与gstreamer的pipeline线程的之间的信号同步由以下标志位以及信号量完成
guint record_flag;
sem_t frame_put;
sem_t frame_get;
sem_t record_on;

关于gstreamer-1.0库一些元件的使用注意事项:

  • appsrc元件,因为每一帧H264的长度不一样,所以需要每次向其注入数据时,需要重新指定数据长度
  • 视频文件合成结束时,一定要通过appsrc发送end of stream信号,否则非正常结束的文件,因为文件信息不全导致无法播放
  • 采用splitmuxsink元件时,不能将其muxer属性对应的元件设置为flvmux,因为splitmuxsink的video的pad动态申请时只支持video_%u格式,flvmux的为video形式,因此不支持
  • 合成视频时,mux类的元件支持的输入的H264帧格式不一样,有的是byte-stream,有的是avc格式,两者的区别是avc格式的H264帧的分隔符00 00 00 01,应该使用帧长度替换,注意为大端格式,长度放在地1、2、3字节(从0计数),且高字节放在左边1字节开始。尤其是视频“I”帧,包含sps、pps、iframe,每个分隔符都要用长度替换,长度不包含分隔符本身。
  • h264parse元件存在问题,50K长度附近的的视频"I"帧,在由byte-stream转为avc格式时,会出现数据丢失
#define __USE_GNU
#include <sched.h>
#include <gst/gst.h>
#include <gst/app/gstappsrc.h>
#include <gst/app/gstappsink.h>
#include <stdio.h>
#include <string.h>
#include <unistd.h>
#include <stdlib.h>
#include <assert.h>
#include <sys/types.h>
#include <sys/un.h>
#include <fcntl.h>
#include <sys/time.h>
#include <sys/mman.h>
#include <sys/ioctl.h>
#include <linux/fb.h>
#include <signal.h>
#include <pthread.h>
#include <semaphore.h>
#include <errno.h>
#include "media.h"
#include "queue.h"
typedef struct _GstDataStruct{
   
	GstElement *pipeline;
	GstElement *appsrc;
	GstElement *appsink;
	//GstElement *h264parse;
	//GstElement *muxfile;
	GstElement *avimux;
	
	guint sourceid;
	guint appsrc_index;
	guint appsink_index;
	guint bus_watch_id;
	GstBus *bus;
	GMainLoop *loop;  		// GLib's Main Loop
	REC_MSG *rec_msg;
	guint record_flag;
	guint ch;
	sem_t frame_put;
	sem_t frame_get;
	sem_t record_on;
	unsigned int width;
	unsigned int height;
	unsigned int fps;
	char* filename;
	FILE *vfile;
} MuxGstDataStruct;
#define RECORD_DIR "/mnt/httpsrv/av_record/"
#define RECORD_TIME_SEC (3 * 60)

extern unsigned long q_record;
extern int start_wait_iframe0;
extern int start_wait_iframe1;
extern int start_wait_iframe2;
extern int start_wait_iframe3;
extern int had_camera_3;
extern char startup_ID[64];
extern unsigned int bytes_per_frame;
extern int video_base_ts_uninit;

REC_MSG video_rec_msg;
MuxGstDataStruct ch0_AviMuxGst;
MuxGstDataStruct ch1_AviMuxGst;
MuxGstDataStruct ch2_AviMuxGst;
MuxGstDataStruct ch3_AviMuxGst;

unsigned int ch0_online;
unsigned int ch0_width;
unsigned int ch0_height;
unsigned int ch0_fps;

unsigned int ch1_online;
unsigned int ch1_width;
unsigned int ch1_height;
unsigned int ch1_fps;

unsigned int ch2_online;
unsigned int ch2_width;
unsigned int ch2_height;
unsigned int ch2_fps;

unsigned int ch3_online;
unsigned int ch3_width;
unsigned int ch3_height;
unsigned int ch3_fps;

#define MAX_FILE_NAME	(96)
char filename0[MAX_FILE_NAME] = {
   0};
char filename1[MAX_FILE_NAME] = {
   0};
char filename2[MAX_FILE_NAME] = {
   0};
char filename3[MAX_FILE_NAME] = {
   0};		// add by luke zhao 2018.6.14, used for 360 video

static gboolean avi_mux_bus_msg_call(GstBus *bus, GstMessage *msg, MuxGstDataStruct *pAviMuxGst)
{
   
	gchar *debug;
	GError *error;
	GMainLoop *loop = pAviMuxGst->loop;

	GST_DEBUG ("ch:%d, got message %s", pAviMuxGst->ch, gst_message_type_get_name (GST_MESSAGE_TYPE (msg)));
	switch (GST_MESSAGE_TYPE(msg))
	{
   
		case GST_MESSAGE_EOS:
			printf("ch:%d, End of stream\n", pAviMuxGst->ch);
			fflush(pAviMuxGst->vfile);
			fclose(pAviMuxGst->vfile);
			g_main_loop_quit(loop);
			break;
		case GST_MESSAGE_ERROR:
			gst_message_parse_error(msg, &error, &debug);
			g_free(debug);
			g_printerr("ch:%d, Error: %s\n", pAviMuxGst->ch, error->message);
			g_error_free(error);
			g_main_loop_quit(loop);
			break;
		default:
			break;
	}
	return TRUE;
}



static void start_feed(GstElement * pipeline, guint size, MuxGstDataStruct *pAviMuxGst)
{
   
	GstFlowReturn ret;
	GstBuffer *buffer;
	GstMemory *memory;
	gpointer data;
	gsize len;

	sem_wait(&pAviMuxGst->frame_put);

	if(pAviMuxGst->record_flag == 0)
	{
   
		printf("ch:%d, end of stream change to new file!\n", pAviMuxGst->ch);
		g_signal_emit_by_name (pAviMuxGst->appsrc, "end-of-stream", &ret);
	}
	else
	{
   
		data = (gpointer)video_rec_msg.frame;
		len = (gsize)video_rec_msg.used_size;
		char szTemp[64] = {
   0};
		sprintf(szTemp, "%d", video_rec_msg.used_size);
		g_object_set(G_OBJECT(pAviMuxGst->appsrc), "blocksize", szTemp, NULL);
		gst_app_src_set_size (pAviMuxGst->appsrc, len);
		//printf("ch:%d, get frame:%p, len:%d, szTemp:%s!!!!\n", pAviMuxGst->ch, data, len, szTemp);

		pAviMuxGst->appsrc_index++;
		buffer = gst_buffer_new();
		memory = gst_memory_new_wrapped(GST_MEMORY_FLAG_READONLY, data, len, 0, len, NULL, NULL);
		gst_buffer_append_memory (buffer, memory);
		g_signal_emit_by_name (pAviMuxGst->appsrc, "push-buffer", buffer, &ret);
		gst_buffer_unref(buffer);
	}

	sem_post(&pAviMuxGst->frame_get);
}

static void stop_feed(GstElement * pipeline, MuxGstDataStruct *pAviMuxGst)
{
   
	g_print("ch:%d, stop feed ...................\n", pAviMuxGst->ch);
//	if (pMuxGstData->sourceid != 0)
//	{
   
//		//GST_DEBUG ("ch:%d, stop feeding...\n", pAviMuxGst->ch);
//		g_source_remove (pAviMuxGst->sourceid);
//		pAviMuxGst->sourceid = 0;
//	}
}


static void new_sample_on_appsink (GstElement *sink, MuxGstDataStruct *pAviMuxGst)
{
   
	int ret = 0;
	GstSample *sample = NULL;
	struct timeval tvl;

	gettimeofday(&tvl, NULL);
	g_signal_emit_by_name (sink, "pull-sample", &sample);
	if(sample)
	{
   
		pAviMuxGst->appsink_index++;
		GstBuffer *buffer = gst_sample_get_buffer(sample);
		GstMapInfo info;
		if(gst_buffer_map((buffer), &info, GST_MAP_READ))
		{
   
			//printf("ch:%d, mux appsink rcv data len:%d time: %d, index:%d!\n", pAviMuxGst->ch,
			//	  (unsigned int)info.size, (unsigned int)tvl.tv_sec, pAviMuxGst->appsink_index);
			fwrite(info.data, i
评论 5
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值