前言
接上一篇,测试采用POSIX共享内存与信号量测试进程间通信,POSIX共享内存没有进程间同步操作,因此需要信号量实现同步。POSIX信号量作为进程间同步操作时,使用匿名信号量,且匿名信号量必须预先位于共享内存段中。编译POSIX函数时,需要链接 librt.so,因此在链接库文件时需要指定 “-lrt” 参数
需求说明
- 进程A:Gstreamer进程,使用appsrc、vpuenc_h264、appsink三个元件对视频进行H264编码,appsrc通过元件的信号获取NV12格式的图像数据,vpuenc_h264元件进行编码,appsink获取编码后的H264帧、appsink的信号回调函数负责通过共享内存与信号量,将数据传输到进程B
- 进程B:视频推动进程,通过共享内存获取H264视频帧,使用rtmp方式推送到服务端
进程A
共享内存以及信号均由进程B创建并初始化
- 注意共享内存的数据结构ShmVideo360Struct,数据结构内预定义用于进程同步的信号量,由创建共享内存的进程B初始化
- 共享内存文件名标识#define POSIX_SHM “POSIX.shm”,文件名可随意,但是不能存在多重路径,例如:#define POSIX_SHM “/home/root/POSIX.shm”,这样会导致shm_open函数失败,原因是非法参数
#include <gst/gst.h>
#include <gst/app/gstappsrc.h>
#include <gst/app/gstappsink.h>
#include <stdio.h>
#include <string.h>
#include <unistd.h>
#include <stdlib.h>
#include <assert.h>
#include <sys/types.h>
#include <sys/un.h>
#include <fcntl.h>
#include <linux/fb.h>
#include <sys/time.h>
#include <sys/mman.h>
#include <sys/stat.h>
#include <sys/ioctl.h>
#include <signal.h>
#include <pthread.h>
#include <semaphore.h>
#include <ipu.h>
typedef struct _GstDataStruct{
GstElement *pipeline;
GstElement *app_source;
GstElement *video_convert;
GstElement *h264_encoder;
GstElement *app_sink;
guint sourceid; // To control the GSource
guint app_src_index;
guint app_sink_index;
guint bus_watch_id;
GstBus *bus;
GMainLoop *loop; // GLib's Main Loop
int frame_rate;
int gop_size;
int bit_rate;
int quant;
} GstDataStruct;
typedef struct
{
struct fb_var_screeninfo vinfo; // 可变的STOP显示屏幕信息
struct fb_fix_screeninfo finfo;
int width; // width 宽度
int height; // height 高度
int bpp; // bit per pixel 像素的位数
int rowsize; // 屏幕一行所占字节数
int real_len; // 实际显示区域的字节数
int total_len; // 显示区域总字节数,即长度
int offset; // 实际显示位置在总长度中的偏移量
}FbInfoStruct;
#define VIDEO_BUF_SIZE ((128 * 1024 * 1024) - (sizeof(unsigned int)*3) - (sizeof(sem_t)*2))
typedef struct
{
sem_t sem_h264_put;
sem_t sem_h264_get;
unsigned int head;
unsigned int len;
unsigned int index;
unsigned char data[VIDEO_BUF_SIZE];
}ShmVideo360Struct;
static int fb_init(void);
static int gst_pipeline_init(void);
static void new_h264_sample_on_appsink (GstElement *sink, GstDataStruct *pGstData);
static void start_feed (GstElement * pipeline, guint size, GstDataStruct *pGstData);
static void stop_feed (GstElement * pipeline, GstDataStruct *pGstData);
gboolean bus_msg_call(GstBus *bus, GstMessage *msg, GstDataStruct *pGstData);
static void *ipu_thread(void *arg);
struct IPU_PHY_MEM {
char *vaddr;
unsigned int paddr;
unsigned int size;
};
struct ipu_task gtask;
struct IPU_PHY_MEM ipu_pmem;
static GstDataStruct GstData;
static FbInfoStruct FbInfo;
ShmVideo360Struct *shared360 = NULL; //指向shm
int posix_shm_fd;
void* posix_shm;
int fd_fb;
int fd_ipu;
sem_t sem_frame_put;
sem_t sem_frame_get;
#define POSIX_SHM "POSIX.shm"
#define OBJ_PERMS (S_IRUSR | S_IWUSR | S_IRGRP | S_IWGRP)
int main(int argc, char *argv[])
{
int ret = 0;
pthread_t tid;
printf("================ imx60 360 main start =============\n");
memset (&GstData, 0, sizeof (GstDataStruct));
sem_init(&sem_frame_put, 0, 0);
sem_init(&sem_frame_get, 0, 0);
if(argc == 5)
{
GstData.frame_rate = atoi(argv[1]);
GstData.gop_size = atoi(argv[2]);
GstData.bit_rate = atoi(argv[3]);
GstData.quant = atoi(argv[4]);
}
else
{
GstData.frame_rate = 30;
GstData.gop_size = 30;
GstData.bit_rate = 1800;
GstData.quant = 1;
}
printf("frame_rate:%d, gop_size:%d, bit_rate:%d ,quant:%d!\n",
GstData.frame_rate, GstData.gop_size, GstData.bit_rate, GstData.quant);
while(1)
{
posix_shm_fd = shm_open(POSIX_SHM, O_RDWR, 0); //打开已存在的共享内存
if(posix_shm_fd != -1) break;
printf("posix shm open failed, try again, 2 second later!\n");
sleep(2);
}
errno = 0;
posix_shm = mmap(NULL, sizeof(ShmVideo360Struct), PROT_READ | PROT_WRITE, MAP_SHARED, posix_shm_fd, 0);
if (posix_shm == MAP_FAILED)
{
printf("posix mmap failed\n");
printf("errno info: %s\n", strerror(errno));
close(posix_shm_fd);
shm_unlink(POSIX_SHM);
return 0;
}
shared360 = (ShmVideo360Struct*)posix_shm;
fb_init();
ret = pthread_create(&tid, NULL, ipu_thread, NULL);
if(ret)
{
printf("create gst_thread failed, error = %d \n", ret);
close(fd_fb);
close(fd_ipu);
return -1;
}
printf("create ipu_thread (thr ID = 0x%x) OK.\n", (unsigned int)tid);
if(gst_pipeline_init() != 0)
{
gst_object_unref (GstData.pipeline);
close(fd_fb);
close(fd_ipu);
return -1;
}
printf("pipeline start to playing!\n");
gst_element_set_state (GstData.pipeline, GST_STATE_PLAYING);
GstData.loop = g_main_loop_new(NULL, FALSE); // Create gstreamer loop
g_main_loop_run(GstData.loop); // Loop will run until receiving EOS (end-of-stream), will block here
printf("g_main_loop_run returned, stopping record\n");
gst_element_set_state (GstData.pipeline, GST_STATE_NULL); // Stop pipeline to be released
printf("Deleting pipeline\n");
gst_object_unref (GstData.pipeline); // THis will also delete all pipeline elements
g_source_remove(GstData.bus_watch_id);
g_main_loop_unref(GstData.loop);
close(fd_fb);
close(fd_ipu);
sem_destroy(&sem_frame_put);
sem_destroy(&sem_frame_get);
munmap(posix_shm, sizeof(ShmVideo360Struct));
shm_unlink(POSIX_SHM);
printf("================ imx60 360 main end ==============\n");
return 0;
}
static void *ipu_thread(void *arg)
{
int res;
unsigned int yuv_size = 0;
unsigned int ipu_index = 0;
sleep(2);
yuv_size = FbInfo.vinfo.xres * FbInfo.vinfo.yres * 3 / 2; // NV12 size
printf("yuv_size:%d\n", yuv_size);
ipu_pmem.paddr = yuv_size;
ipu_pmem.size = yuv_size;
printf("before IPU_ALLOC pmem.paddr:%d\n", ipu_pmem.paddr);
res = ioctl(fd_ipu, IPU_ALLOC, &ipu_pmem.paddr);
printf("after IPU_ALLOC pmem.paddr:%d\n", ipu_pmem.paddr);
ipu_pmem.vaddr = mmap(NULL, yuv_size, PROT_READ | PROT_WRITE, MAP_SHARED, fd_ipu, ipu_pmem.paddr);
printf("pmem.vaddr:%d\n", (int)ipu_pmem.vaddr);
bzero(>ask, sizeof(gtask));
gtask.input.width = FbInfo.vinfo.xres;
gtask.input.height = FbInfo.vinfo.yres;
gtask.input.crop.w = FbInfo.vinfo.xres;
gtask.input.crop.h = FbInfo.vinfo.yres;
if(FbInfo.vinfo.bits_per_pixel == 16)
gtask.input.format = IPU_PIX_FMT_RGB565;
else
gtask.input.format = IPU_PIX_FMT_BGR32;
FbInfo.offset = FbInfo.vinfo.xres * (FbInfo.vinfo.bits_per_pixel >> 3) * FbInfo.vinfo.yoffset;
gtask.input.paddr = FbInfo.finfo.smem_start + FbInfo.offset;
gtask.output.width = FbInfo.vinfo.xres;
gtask.output.height = FbInfo.vinfo.yres;
gtask.output.crop.w = FbInfo.vinfo.xres;
gtask.output.crop.h = FbInfo.vinfo.yres;
gtask.output.format = IPU_PIX_FMT_NV12;
gtask.output.paddr = ipu_pmem.paddr;
gtask.priority = IPU_TASK_PRIORITY_HIGH;
gtask.task_id = IPU_TASK_ID_PP;
while(res != IPU_CHECK_OK)
{
printf("IPU check task!\n");
res = ioctl(fd_ipu, IPU_CHECK_TASK, >ask);
}
while(1)
{
ipu_index++;
printf("ipu_index:%d\n",ipu_index);
res = ioctl(fd_ipu, IPU_QUEUE_TASK, >ask);
if(res < 0)
{
printf("IPU queue task failed\n");
}
else
{
sem_post(&sem_frame_put);
sem_wait(&sem_frame_get);
ioctl(fd_fb, FBIOGET_VSCREENINFO, &FbInfo.vinfo);
FbInfo.offset = FbInfo.vinfo.xres * (FbInfo.vinfo.bits_per_pixel >> 3) * FbInfo.vinfo.yoffset;
gtask.input.paddr = FbInfo.finfo.smem_start + FbInfo.offset;
}
}
munmap(ipu_pmem.vaddr, ipu_pmem.size);
ioctl(fd_ipu, IPU_FREE, &ipu_pmem.paddr);
return 0;
}
static int fb_init(void)
{
printf("=========== imx60 360 get fb0 info start ==========\n");
memset (&FbInfo, 0, sizeof (FbInfoStruct));
fd_ipu = open("/dev/mxc_ipu", O_RDWR);
fd_fb = open("/dev/fb0", O_RDWR);
ioctl(fd_fb, FBIOGET_VSCREENINFO, &FbInfo.vinfo);
ioctl(fd_fb, FBIOGET_FSCREENINFO, &FbInfo.finfo);
FbInfo.width = FbInfo.vinfo.xres;
FbInfo.height = FbInfo.vinfo.yres;
FbInfo.bpp = FbInfo.vinfo.bits_per_pixel;
FbInfo.rowsize = FbInfo.width * (FbInfo.bpp >> 3);
FbInfo.offset = FbInfo.rowsize * FbInfo.vinfo.yoffset;
FbInfo.total_len = FbInfo.vinfo.xres_virtual * FbInfo.vinfo.yres_virtual * (FbInfo.bpp >> 3);
FbInfo.real_len = FbInfo.width * FbInfo.height * (FbInfo.bpp >> 3);
printf("================= var screen info =================\n");
printf(" sz [%d x %d] %d\n", FbInfo.width, FbInfo.height, FbInfo.bpp);
printf(" vsz [%d x %d]\n", FbInfo.vinfo.xres_virtual, FbInfo.vinfo.yres_virtual);
printf(" pan : (%d, %d)\n", FbInfo.vinfo.xoffset, FbInfo.vinfo.yoffset);
printf(" off : %d\n", FbInfo.offset);
printf("============ imx60 360 get fb0 info end ===========\n");
return 0;
}
static int gst_pipeline_init(void)
{
printf("============= imx60 360 gst init start ============\n");
gst_init (NULL, NULL);
printf("=========== create imx60 360 pipeline =============\n");
GstData.pipeline = gst_pipeline_new ("imx60_360");
GstData.app_source = gst_element_factory_make ("appsrc", "src");
GstData.h264_encoder = gst_element_factory_make ("vpuenc_h264", "video-encoder");
GstData.app_sink = gst_element_factory_make ("appsink", "fake");
if (!GstData.pipeline || !GstData.app_source || !GstData.h264_encoder || !GstData.app_sink)
{
g_printerr ("One element could not be created... Exit\n");
return -1;
}
printf("============ link imx60 360 pipeline ==============\n");
char szTemp[64];
sprintf(szTemp, "%d", FbInfo.vinfo.xres * FbInfo.vinfo.yres * 3 / 2); // NV12 size
g_object_set(G_OBJECT(GstData.app_source), "blocksize", szTemp, NULL);
g_object_set(G_OBJECT(GstData.app_source), "size", szTemp, NULL);
g_object_set(G_OBJECT(GstData.app_source), "do-timestamp", TRUE, NULL);
g_object_set(G_OBJECT(GstData.app_source), "stream-type", 0, "format", GST_FORMAT_TIME, NULL);
g_object_set(G_OBJECT(GstData.app_source), "min-percent", 0, "typefind", TRUE, NULL);
GstCaps *caps_appsrc;
caps_appsrc = gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING,"NV12",
"width", G_TYPE_INT, FbInfo.width,
"height", G_TYPE_INT, FbInfo.height,
"framerate",GST_TYPE_FRACTION, GstData.frame_rate, 1, NULL);
GstCaps *caps_app_sink;
caps_app_sink = gst_caps_new_simple("video/x-h264", "stream-format", G_TYPE_STRING, "byte-stream",
"alignment", G_TYPE_STRING, "au", NULL);
g_object_set(G_OBJECT(GstData.app_source), "caps", caps_appsrc, NULL);
g_object_set(G_OBJECT(GstData.h264_encoder), "bitrate", GstData.bit_rate, NULL);
g_object_set(G_OBJECT(GstData.h264_encoder), "quant", GstData.quant, NULL);
g_object_set(G_OBJECT(GstData.h264_encoder), "gop-size", GstData.gop_size, NULL);
g_object_set(G_OBJECT(GstData.app_sink), "emit-signals", TRUE, "caps", caps_app_sink, "sync", TRUE, NULL);
GstData.bus = gst_pipeline_get_bus(GST_PIPELINE(GstData.pipeline));
GstData.bus_watch_id = gst_bus_add_watch(GstData.bus, (GstBusFunc)bus_msg_call, (gpointer)&GstData);
gst_object_unref(GstData.bus);
g_signal_connect(GstData.app_source, "need-data", G_CALLBACK(start_feed), &GstData);
g_signal_connect(GstData.app_source, "enough-data", G_CALLBACK(stop_feed), &GstData);
g_signal_connect(GstData.app_sink, "new-sample", G_CALLBACK(new_h264_sample_on_appsink), &GstData);
gst_bin_add_many(GST_BIN(GstData.pipeline), GstData.app_source, GstData.h264_encoder, GstData.app_sink, NULL);
if(gst_element_link(GstData.app_source, GstData.h264_encoder) != TRUE)
{
g_printerr ("GstData.app_source could not link GstData.h264_encoder\n");
gst_object_unref (GstData.pipeline);
return -1;
}
if(gst_element_link_filtered(GstData.h264_encoder, GstData.app_sink, caps_app_sink) != TRUE)
{
g_printerr ("GstData.h264_encoder could not link GstData.app_sink\n");
gst_object_unref (GstData.pipeline);
return -1;
}
gst_caps_unref (caps_app_sink);
gst_caps_unref (caps_appsrc);
return 0;
}
static void new_h264_sample_on_appsink (GstElement *sink, GstDataStruct *pGstData)
{
int ret = 0;
GstSample *sample = NULL;
g_signal_emit_by_name (sink, "pull-sample", &sample);
if(sample)
{
pGstData->app_sink_index++;
GstBuffer *buffer = gst_sample_get_buffer(sample);
GstMapInfo info;
if(gst_buffer_map((buffer), &info, GST_MAP_READ))
{
ret = sem_wait(&shared360->sem_h264_get);
if ((shared360->head + info.size) > VIDEO_BUF_SIZE)
{
printf("video run a loop %d++++++++++++++++++++++++++++++++++\n", shared360->head);
shared360->head = 0;
}
memcpy(&shared360->data[shared360->head], info.data, info.size);
shared360->len = info.size;
shared360->index++;
g_print ("h264 frame is put to shm buffer, len:%d, index:%d\n", (int)info.size, pGstData->app_sink_index);
ret = sem_post(&shared360->sem_h264_put);
if(ret == -1)
{
printf("SEM error, end of stream!\n");
g_signal_emit_by_name (pGstData->app_source, "end-of-stream", &ret);
}
gst_buffer_unmap(buffer, &info);
}
gst_sample_unref(sample);
}
}
static void start_feed (GstElement * pipeline, guint size, GstDataStruct *pGstData)
{
GstFlowReturn ret;
GstBuffer *buffer;
GstMemory *memory;
pGstData->app_src_index++;
buffer = gst_buffer_new();
sem_wait(&sem_frame_put);
memory = gst_memory_new_wrapped(GST_MEMORY_FLAG_READONLY, ipu_pmem.vaddr, ipu_pmem.size, 0, ipu_pmem.size, NULL, NULL);
gst_buffer_append_memory (buffer, memory);
g_signal_emit_by_name (pGstData->app_source, "push-buffer", buffer, &ret);
gst_buffer_unref(buffer);
printf("pGstData->app_src_index:%d\n",pGstData->app_src_index);
sem_post(&sem_frame_get);
}
static void stop_feed(GstElement * pipeline, GstDataStruct *pGstData)
{
g_print("stop feed...................\n");
// if (pGstData->sourceid != 0)
// {
// //GST_DEBUG ("stop feeding");
// g_source_remove (pGstData->sourceid);
// pGstData->sourceid = 0;
// }
}
// Bus messages processing, similar to all gstreamer examples
gboolean bus_msg_call(GstBus *bus, GstMessage *msg, GstDataStruct *pGstData)
{
gchar *debug;
GError *error;
GMainLoop *loop = pGstData->loop;
GST_DEBUG ("got message %s",gst_message_type_get_name (GST_MESSAGE_TYPE (msg)));
switch (GST_MESSAGE_TYPE(msg))
{
case GST_MESSAGE_EOS:
printf("End of stream\n");
g_main_loop_quit(loop);
break;
case GST_MESSAGE_ERROR:
gst_message_parse_error(msg, &error, &debug);
g_free(debug);
g_printerr("Error: %s\n", error->message);
g_error_free(error);
g_main_loop_quit(loop);
break;
default:
break;
}
return TRUE;
}
进程B
进程B创建POSIX共享内存,并对共享内存中预定义的匿名posix信号量进行初始化,四步操作如下:
- posix_shm_fd = shm_open(POSIX_SHM, O_CREAT | O_RDWR | O_TRUNC, OBJ_PERMS)
- ret = ftruncate(posix_shm_fd, sizeof(ShmVideo360Struct))
- posix_shm = mmap(NULL, sizeof(ShmVideo360Struct), PROT_READ | PROT_WRITE, MAP_SHARED, posix_shm_fd, 0)
- sem_init(&shared360->sem_h264_put, 1, 0);
- sem_init(&shared360->sem_h264_get, 1, 1);
#include <stdio.h>
#include <string.h>
#include <unistd.h>
#include <stdlib.h>
#include <assert.h>
#include <sys/types.h>
#include <sys/un.h>
#include <fcntl.h>
#include <sys/time.h>
#include <sys/mman.h>
#include <sys/stat.h>
#include <sys/ioctl.h>
#include <signal.h>
#include <pthread.h>
#include <semaphore.h>
#include <errno.h>
#include "media.h"
#include "srs_librtmp.h"
#include "queue.h"
#define VIDEO_BUF_SIZE ((128 * 1024 * 1024) - (sizeof(unsigned int)*3) - (sizeof(sem_t)*2))
typedef struct
{
sem_t sem_h264_put;
sem_t sem_h264_get;
unsigned int head;
unsigned int len;
unsigned int index;
unsigned char data[VIDEO_BUF_SIZE];
}ShmVideo360Struct;
extern unsigned long q_record;
extern int is_iframe(char p);
unsigned long q_srs_rtmp360; // add by luke zhao 2018.6.13, used for 360 video
srs_rtmp_t rtmp360 = NULL; // add by luke zhao 2018.6.13, used for 360 video
pthread_mutex_t rtmp_lock360; // add by luke zhao 2018.6.13, used for 360 video
int start_wait_iframe360 = 1; // add by luke zhao 2018.6.13, used for 360 video
int rtmp_stop360 = 1; // add by luke zhao 2018.6.13, used for 360 video
int rtmp360_had_start = 0; // add by luke zhao 2018.6.13, used for 360 video
int rtmp360_wait_I = 1; // add by luke zhao 2018.6.13, used for 360 video
char rtmp_url360[128] = {0}; // add by luke zhao 2018.6.13, used for 360 video
struct timeval tvl_360; // add by luke zhao 2018.6.14, used for 360 video,360 frame has no time stamp,use ch0
#define POSIX_SHM "POSIX.shm"
#define OBJ_PERMS (S_IRUSR | S_IWUSR | S_IRGRP | S_IWGRP)
ShmVideo360Struct *shared360 = NULL; //指向shm
int posix_shm_fd;
void* posix_shm;
void *thr_rcv_360(void *arg)
{
int ret = 0;
REC_MSG rec_msg;
unsigned char *vbuf;
errno = 0;
posix_shm_fd = shm_open(POSIX_SHM, O_CREAT | O_RDWR | O_TRUNC, OBJ_PERMS); //创建共享内存
if(posix_shm_fd == -1)
{
printf("posix shm open failed!\n");
printf("errno info: %s\n", strerror(errno));
return 0;
}
ret = ftruncate(posix_shm_fd, sizeof(ShmVideo360Struct));
if(ret == -1)
{
printf("posix shm ftruncate failed!\n");
close(posix_shm_fd);
shm_unlink(POSIX_SHM);
return 0;
}
posix_shm = mmap(NULL, sizeof(ShmVideo360Struct), PROT_READ | PROT_WRITE, MAP_SHARED, posix_shm_fd, 0);
if (posix_shm == MAP_FAILED)
{
printf("posix mmap failed\n");
return 0;
}
printf("posix mmap success:%d\n", (int)posix_shm);
shared360 = (ShmVideo360Struct*)posix_shm;
shared360->head = 0;
shared360->len = 0;
shared360->index = 0;
sem_init(&shared360->sem_h264_put, 1, 0);
sem_init(&shared360->sem_h264_get, 1, 1);
while(1)
{
if(sem_wait(&shared360->sem_h264_put) == -1) break;
vbuf = &shared360->data[shared360->head];
//gettimeofday(&tvl_360, NULL);
rec_msg.msg_type = MEDIA_VIDEO;
rec_msg.frame = vbuf;
rec_msg.used_size = shared360->len;
rec_msg.channel = MAX_CHANNEL-1;
rec_msg.ts_sec = tvl_360.tv_sec;
rec_msg.ts_usec = tvl_360.tv_usec;
rec_msg.index = shared360->index;
printf("shm has rcv data len:%d time: %d, index:%d!\n", shared360->len, (unsigned int)tvl_360.tv_sec, shared360->index);
if(rtmp360_wait_I)
{
if(is_iframe(vbuf[4]))
{
rtmp360_wait_I = 0;
printf("rtmp360 wait I frame ok!!!\n");
}
else
{
rtmp360_wait_I = 1;
}
}
shared360->head += shared360->len;
sem_post(&shared360->sem_h264_get);
ret = q_send(q_record, (unsigned char *)&rec_msg, sizeof(REC_MSG));
if((rtmp360 != NULL) && (rtmp_stop360 == 0))
{
ret = q_send(q_srs_rtmp360, (unsigned char *)&rec_msg, sizeof(REC_MSG));
}
}
sem_destroy(&shared360->sem_h264_put);
sem_destroy(&shared360->sem_h264_get);
munmap(posix_shm, sizeof(ShmVideo360Struct));
shm_unlink(POSIX_SHM);
return 0;
}