一、概述
因为gstreamer没有提供qnx系统的支持, 因此这里要实现音频和视频的播放,就必须自己实现最终的音视频输出的元件,即sink元件,一开始,我的想法是,可否移植开源的音视频输出库,比如sdl,alsa等等, 但是发现有些麻烦, 反而把事情弄的更复杂了。 最终还是踏踏实实的两个gstreamer的sink元件,用来输出音频和视频。
要编写gstreamer的插件有很多方式, 比如直接“继承”实现GstElementClass和GstElement,使用gst-template工具创建一个插件模板,然后去实现。 我这里使用的另一种方式, 既然是实现sink类型的元件。 那么就直接找gstreamer已有的sink元件作为模板,将他们复制过来,删除原有的接口实现代码,换上自己的实现。
注意, 输出的plugin动态库名称以 libpluginname.so 的形式, 而这个pluginname就是 编写插件时,定义插件的宏GST_PLUGIN_DEFINE 的参数中的name, 如果不匹配,就会出现插件无法是使用,被gstreamer将插件加入黑名单
二、实现QNX audio sink插件
其实gstreamer有sink插件的“基类”:GstAudioSinkClass, 我们要做的就是继承和override一些接口。GstAudioSinkClass的全部接口如下:
struct _GstAudioSinkClass {
/**基类,其完成的继承路径是 GSTAudioSinkClass ——> GstAudioBaseSinkClass ——>
GstBaseSinkClass ——> GstElementClass ,再向上就是和gobject相关的东西了,这里不涉及 */
GstAudioBaseSinkClass parent_class;
/* vtable, 接口定义 */
/* open the device with given specs, 打开设备*/
gboolean (*open) (GstAudioSink *sink);
/* prepare resources and state to operate with the given specs, 进行prepare操作 */
gboolean (*prepare) (GstAudioSink *sink, GstAudioRingBufferSpec *spec);
/* undo anything that was done in prepare(), 回退到prepare之前 */
gboolean (*unprepare) (GstAudioSink *sink);
/* close the device, 关闭设备 */
gboolean (*close) (GstAudioSink *sink);
/* write samples to the device, 向音频设备写数据 */
gint (*write) (GstAudioSink *sink, gpointer data, guint length);
/* get number of frames queued in the device,获取设备队列里面有多少帧没有输出(即缓存) */
guint (*delay) (GstAudioSink *sink);
/* reset the audio device, unblock from a write,重置设备 */
void (*reset) (GstAudioSink *sink);
/*< private >*/
gpointer _gst_reserved[GST_PADDING];
};
我这里就是以oss音频 sink插件作为模板,将其源码复制出来,删除基于oss接口实现的 gstreamer audio sink接口的代码,然后再填充自己的代码, 其中 头文件 gstQnxAudioSink.h, 其内容如下:
/* GStreamer
* Copyright (C) gu.wen <454727014@qq.com>
*
* gstqnxaudiosink.h:
*
*/
#ifndef __GST_QNXAUDIO_H__
#define __GST_QNXAUDIO_H__
#include <gst/gst.h>
#include <gst/audio/gstaudiosink.h>
#include <glib/gtypes.h>
#include <sys/asoundlib.h> /** 包含qnx audio接口的头文件 */
G_BEGIN_DECLS
/** 定义方面操作的工具宏 */
#define GST_TYPE_QNXAUDIOSINK (gst_qnxaudio_sink_get_type())
#define GST_QNXAUDIOSINK(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj), \
GST_TYPE_QNXAUDIOSINK, \
GstQnxAudioSink))
#define GST_QNXAUDIOSINK_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass), \
GST_TYPE_QNXAUDIOSINK, \
GstQnxAudioSinkClass))
#define GST_IS_QNXAUDIOSINK(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj), \
GST_TYPE_QNXAUDIOSINK))
#define GST_IS_QNXAUDIOSINK_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass), \
GST_TYPE_QNXAUDIOSINK))
typedef struct _GstQnxAudioSink GstQnxAudioSink;
typedef struct _GstQnxAudioSinkClass GstQnxAudioSinkClass;
struct _GstQnxAudioSink
{
GstAudioSink sink; /** 继承audio sink, 这个成员变量必须放在首位 */
gchar *device; /** qnx audio 设备节点的路径 */
gint audioCard; /** 声卡id */
gint bytes_per_sample;
GstCaps *probed_caps; /** 存放检索到的caps信息 */
/** qnx audio 相关的 数据结构 */
snd_pcm_info_t info;
snd_pcm_t *pcm_handle;
snd_pcm_channel_info_t channelInfo;
snd_mixer_t *mixer_handle;
snd_mixer_group_t mixerGroup;
snd_pcm_channel_params_t channelParam;
snd_pcm_channel_setup_t channelSetup;
};
struct _GstQnxAudioSinkClass
{
/**类,用于定义接口,其完成的继承路径是
* _GstQnxAudioSinkClass ——> GSTAudioSinkClass ——> GstAudioBaseSinkClass
* ——> GstBaseSinkClass ——> GstElementClass
* 再向上就是和gobject相关的东西了,这里不涉及
*/
GstAudioSinkClass parent_class;
};
/** 获取GType,具体的参考 gobject/glib相关的信息 */
GType gst_qnxaudio_sink_get_type(void);
G_END_DECLS
#endif /* __GST_QNXAUDIO_H__ */
接下来再试重点, qnx audio sink 的实现源文件, 如下所示:
/* GStreamer
* Copyright (C) gu.wen <454727014@qq.com>
*
* gstqnxaudiosink.c:
*
*/
/**
* SECTION:element-qnxaudio
*
* This element lets you output sound using the qnx audio system (QNXAUDIO).
*
* Note that you should almost always use generic audio conversion elements
* like audioconvert and audioresample in front of an audiosink to make sure
* your pipeline works under all circumstances (those conversion elements will
* act in passthrough-mode if no conversion is necessary).
*
* <refsect2>
* <title>Example pipelines</title>
* |[
* gst-launch-1.0 -v audiotestsrc ! audioconvert ! volume volume=0.1 ! qnxaudio
* ]| will output a sine wave (continuous beep sound) to your sound card (with
* a very low volume as precaution).
* |[
* gst-launch-1.0 -v filesrc location=music.ogg ! decodebin ! audioconvert !
* audioresample ! qnxaudio ]|
* will play an Ogg/Vorbis audio file and output it using the Open Sound System.
* </refsect2>
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <stdio.h>
#include <errno.h>
#include <unistd.h>
#include <string.h>
#include "gstQnxAudioSink.h"
#define PLUGIN_VERSION "00.01.00"
#define PACKAGE "gst-plugins-qnx"
#define GST_LICENSE "LGPL"
#define GST_PACKAGE_NAME "GStreamer qnx Plug-ins source release"
#define GST_PACKAGE_ORIGIN "Unknown package origin"
GST_DEBUG_CATEGORY_EXTERN(qnxaudio_debug);
#define GST_CAT_DEFAULT qnxaudio_debug
static void gst_qnxaudio_sink_dispose(GObject *object);
static void gst_qnxaudio_sink_finalise(GObject *object);
static void gst_qnxaudio_sink_get_property(GObject *object, guint prop_id,
GValue *value, GParamSpec *pspec);
static void gst_qnxaudio_sink_set_property(GObject *object, guint prop_id,
const GValue *value, GParamSpec *pspec);
static GstCaps *gst_qnxaudio_sink_getcaps(GstBaseSink *bsink, GstCaps *filter);
static gboolean gst_qnxaudio_sink_open(GstAudioSink *asink);
static gboolean gst_qnxaudio_sink_close(GstAudioSink *asink);
static gboolean gst_qnxaudio_sink_prepare(GstAudioSink *asink,
GstAudioRingBufferSpec *spec);
static gboolean gst_qnxaudio_sink_unprepare(GstAudioSink *asink);
static gint gst_qnxaudio_sink_write(GstAudioSink *asink, gpointer data,
guint length);
static guint gst_qnxaudio_sink_delay(GstAudioSink *asink);
static void gst_qnxaudio_sink_reset(GstAudioSink *asink);
/* QnxAudioSink signals and args */
enum
{
LAST_SIGNAL
};
#define DEFAULT_DEVICE "/dev/snd/pcmPreferredp"
enum
{
PROP_0,
PROP_DEVICE,
};
#define FORMATS "{" GST_AUDIO_NE(S16) "," GST_AUDIO_NE(U16) ", S8, U8 }"
/** 定义 固定pad的工厂 */
static GstStaticPadTemplate qnxaudio_sink_factory =
GST_STATIC_PAD_TEMPLATE("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS("audio/x-raw, "
"format = (string) " FORMATS ", "
"layout = (string) interleaved, "
"rate = (int) [ 1, MAX ], "
"channels = (int) 1; "
"audio/x-raw, "
"format = (string) " FORMATS ", "
"layout = (string) interleaved, "
"rate = (int) [ 1, MAX ], "
"channels = (int) 2, "
"channel-mask = (bitmask) 0x3"));
/* static guint gst_qnxaudio_sink_signals[LAST_SIGNAL] = { 0 }; */
#define gst_qnxaudio_sink_parent_class parent_class
G_DEFINE_TYPE(GstQnxAudioSink, gst_qnxaudio_sink, GST_TYPE_AUDIO_SINK);
static void gst_qnxaudio_sink_dispose(GObject *object)
{ /** override GObjectClass 的dispose接口 */
GstQnxAudioSink *qnxaudio = GST_QNXAUDIOSINK(object);
if (qnxaudio->probed_caps)
{/**如果已经索引过qnx audio sink 的能力, 就是放已有的caps信息 */
gst_caps_unref(qnxaudio->probed_caps);
qnxaudio->probed_caps = NULL;
}
/** 再调用父类的dispose接口 */
G_OBJECT_CLASS(parent_class)->dispose(object);
}
static GstStructure *
gst_qnxaudio_helper_get_format_structure(unsigned int format_bit)
{/** 根据qnx audio的类型生成caps信息的结构体 */
GstStructure *structure;
//printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
const gchar *format;
switch (format_bit)
{
case SND_PCM_SFMT_U8:
format = "U8";
break;
case SND_PCM_SFMT_S16_LE:
format = "S16LE";
break;
case SND_PCM_SFMT_S16_BE:
format = "S16BE";
break;
case SND_PCM_SFMT_S8:
format = "S8";
break;
case SND_PCM_SFMT_U16_LE:
format = "U16LE";
break;
case SND_PCM_SFMT_U16_BE:
format = "U16BE";
break;
default:
g_assert_not_reached();
//printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
return NULL;
}
printf("[%s.%d]===>:format: %s\n", __FUNCTION__, __LINE__, format);
structure = gst_structure_new("audio/x-raw",
"format", G_TYPE_STRING, format,
"layout", G_TYPE_STRING, "interleaved", NULL);
return structure;
}
static GstCaps *
gst_qnxaudio_helper_probe_caps(snd_pcm_channel_info_t *channelInfo)
{/** 索引qnx audio的能力信息 */
//printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
#if G_BYTE_ORDER == G_LITTLE_ENDIAN
const guint probe_formats[] =
{SND_PCM_SFMT_S16_LE, SND_PCM_SFMT_U16_LE, SND_PCM_SFMT_U8, SND_PCM_SFMT_S8};
#else
const guint probe_formats[] =
{SND_PCM_SFMT_S16_BE, SND_PCM_SFMT_U16_BE, SND_PCM_SFMT_U8, SND_PCM_SFMT_S8};
#endif
GstStructure *structure;
GstCaps *caps;
int f;
/* FIXME test make sure we're not currently playing */
/* FIXME test both mono and stereo */
caps = gst_caps_new_empty();
/* assume that the most significant bit of format_mask is 0 */
for (f = 0; f < G_N_ELEMENTS(probe_formats); ++f)
{
printf("[%s.%d]===>:idx:%d, rate range(%d - %d)\n",
__FUNCTION__, __LINE__, f,
channelInfo->min_rate, channelInfo->max_rate);
GValue rate_value = {0};
/* one big range */
g_value_init(&rate_value, GST_TYPE_INT_RANGE);
gst_value_set_int_range(&rate_value, channelInfo->min_rate,
(channelInfo->max_rate == -1) ?
channelInfo->min_rate :
channelInfo->max_rate);
structure = gst_qnxaudio_helper_get_format_structure(probe_formats[f]);
gst_structure_set(structure, "channels",
GST_TYPE_INT_RANGE, 1, 2, NULL);
gst_structure_set_value(structure, "rate", &rate_value);
g_value_unset(&rate_value);
gst_caps_append_structure(caps, structure);
}
if (gst_caps_is_empty(caps))
{
/* fixme: make user-visible */
GST_WARNING("Your qnx audio device could not be probed correctly");
}
else
{
caps = gst_caps_simplify(caps);
}
printf("[%s.%d]===>:probed caps: %p\n",
__FUNCTION__, __LINE__, caps);
GST_DEBUG("probed caps: %" GST_PTR_FORMAT, caps);
return caps;
}
static void gst_qnxaudio_sink_class_init(GstQnxAudioSinkClass *klass)
{/** 类初始化接口 */
//printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
GstBaseSinkClass *gstbasesink_class;
GstAudioSinkClass *gstaudiosink_class;
gobject_class = (GObjectClass *)klass;
gstelement_class = (GstElementClass *)klass;
gstbasesink_class = (GstBaseSinkClass *)klass;
gstaudiosink_class = (GstAudioSinkClass *)klass;
parent_class = g_type_class_peek_parent(klass);
/** override 父类接口 */
gobject_class->dispose = gst_qnxaudio_sink_dispose;
gobject_class->finalize = gst_qnxaudio_sink_finalise;
gobject_class->get_property = gst_qnxaudio_sink_get_property;
gobject_class->set_property = gst_qnxaudio_sink_set_property;
/** 安装属性接口 */
g_object_class_install_property(gobject_class, PROP_DEVICE,
g_param_spec_string("device", "Device",
"QNXAUDIO device (usually /dev/dspN)",
DEFAULT_DEVICE,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
/** override 父类接口 */
gstbasesink_class->get_caps =
GST_DEBUG_FUNCPTR(gst_qnxaudio_sink_getcaps);
gstaudiosink_class->open = GST_DEBUG_FUNCPTR(gst_qnxaudio_sink_open);
gstaudiosink_class->close = GST_DEBUG_FUNCPTR(gst_qnxaudio_sink_close);
gstaudiosink_class->prepare = GST_DEBUG_FUNCPTR(gst_qnxaudio_sink_prepare);
gstaudiosink_class->unprepare =
GST_DEBUG_FUNCPTR(gst_qnxaudio_sink_unprepare);
gstaudiosink_class->write = GST_DEBUG_FUNCPTR(gst_qnxaudio_sink_write);
gstaudiosink_class->delay = GST_DEBUG_FUNCPTR(gst_qnxaudio_sink_delay);
gstaudiosink_class->reset = GST_DEBUG_FUNCPTR(gst_qnxaudio_sink_reset);
/** 设置qnx audio sink 元件的meta信息 */
gst_element_class_set_static_metadata(gstelement_class,
"Audio Sink (QNXAUDIO)",
"Sink/Audio",
"Output to a sound card via QNXAUDIO",
"guwen <454727014@qq.com>");
/** 添加固定pad工厂 */
gst_element_class_add_static_pad_template(gstelement_class,
&qnxaudio_sink_factory);
//printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
}
static void gst_qnxaudio_sink_init(GstQnxAudioSink *qnxaudio)
{/** qnx audio sink数据结构初始化 */
GST_DEBUG_OBJECT(qnxaudio, "initializing qnxaudio");
//printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
qnxaudio->pcm_handle = NULL;
qnxaudio->mixer_handle = NULL;
qnxaudio->audioCard = -1;
qnxaudio->probed_caps = NULL;
qnxaudio->device = strdup(DEFAULT_DEVICE);
memset(&qnxaudio->channelInfo, 0, sizeof(qnxaudio->channelInfo));
memset(&qnxaudio->channelParam, 0, sizeof(qnxaudio->channelParam));
memset(&qnxaudio->channelSetup, 0, sizeof(qnxaudio->channelSetup));
memset(&qnxaudio->mixerGroup, 0, sizeof(qnxaudio->mixerGroup));
//printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
}
static void gst_qnxaudio_sink_finalise(GObject *object)
{/** override finalise接口 */
//printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
GstQnxAudioSink *qnxaudio = GST_QNXAUDIOSINK(object);
g_free(qnxaudio->device);
G_OBJECT_CLASS(parent_class)->finalize((GObject *)(object));
}
static void
gst_qnxaudio_sink_set_property(GObject *object, guint prop_id,
const GValue *value, GParamSpec *pspec)
{/** 设置属性 */
//printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
GstQnxAudioSink *sink;
sink = GST_QNXAUDIOSINK(object);
switch (prop_id)
{
case PROP_DEVICE:
//printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
g_free(sink->device);
sink->device = g_value_dup_string(value);
if (sink->probed_caps)
{
gst_caps_unref(sink->probed_caps);
sink->probed_caps = NULL;
}
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
break;
}
}
static void gst_qnxaudio_sink_get_property(GObject *object, guint prop_id,
GValue *value, GParamSpec *pspec)
{
GstQnxAudioSink *sink;
//printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
sink = GST_QNXAUDIOSINK(object);
switch (prop_id)
{
case PROP_DEVICE:
g_value_set_string(value, sink->device);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
break;
}
}
static GstCaps *gst_qnxaudio_sink_getcaps(GstBaseSink *bsink,
GstCaps *filter)
{
GstQnxAudioSink *qnxaudio;
GstCaps *caps;
qnxaudio = GST_QNXAUDIOSINK(bsink);
// printf("[%s.%d]===>:pcm_handle: %p\n",
// __FUNCTION__, __LINE__, qnxaudio->pcm_handle);
if (qnxaudio->pcm_handle == NULL)
{
printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
caps = gst_pad_get_pad_template_caps(GST_BASE_SINK_PAD(bsink));
}
else if (qnxaudio->probed_caps)
{
printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
caps = gst_caps_ref(qnxaudio->probed_caps);
}
else
{
//printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
caps = gst_qnxaudio_helper_probe_caps(&qnxaudio->channelInfo);
if (caps && !gst_caps_is_empty(caps))
{
qnxaudio->probed_caps = gst_caps_ref(caps);
}
}
if (filter && caps)
{
//printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
GstCaps *intersection;
intersection =
gst_caps_intersect_full(filter, caps, GST_CAPS_INTERSECT_FIRST);
gst_caps_unref(caps);
return intersection;
}
else
{
printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
return caps;
}
}
static gint ilog2(gint x)
{
/* well... hacker's delight explains... */
x = x | (x >> 1);
x = x | (x >> 2);
x = x | (x >> 4);
x = x | (x >> 8);
x = x | (x >> 16);
x = x - ((x >> 1) & 0x55555555);
x = (x & 0x33333333) + ((x >> 2) & 0x33333333);
x = (x + (x >> 4)) & 0x0f0f0f0f;
x = x + (x >> 8);
x = x + (x >> 16);
return (x & 0x0000003f) - 1;
}
static gint
gst_qnxaudio_sink_get_format(GstAudioRingBufferFormatType fmt,
GstAudioFormat rfmt)
{
gint result = -1;
switch (fmt)
{
case GST_AUDIO_RING_BUFFER_FORMAT_TYPE_MU_LAW:
result = SND_PCM_SFMT_MU_LAW;
break;
case GST_AUDIO_RING_BUFFER_FORMAT_TYPE_A_LAW:
result = SND_PCM_SFMT_A_LAW;
break;
case GST_AUDIO_RING_BUFFER_FORMAT_TYPE_IMA_ADPCM:
result = SND_PCM_SFMT_IMA_ADPCM;
break;
case GST_AUDIO_RING_BUFFER_FORMAT_TYPE_MPEG:
result = SND_PCM_SFMT_MPEG;
break;
case GST_AUDIO_RING_BUFFER_FORMAT_TYPE_RAW:
{
switch (rfmt)
{
case GST_AUDIO_FORMAT_U8:
result = SND_PCM_SFMT_U8;
break;
case GST_AUDIO_FORMAT_S16LE:
result = SND_PCM_SFMT_S16_LE;
break;
case GST_AUDIO_FORMAT_S16BE:
result = SND_PCM_SFMT_S16_BE;
break;
case GST_AUDIO_FORMAT_S8:
result = SND_PCM_SFMT_S8;
break;
case GST_AUDIO_FORMAT_U16LE:
result = SND_PCM_SFMT_U16_LE;
break;
case GST_AUDIO_FORMAT_U16BE:
result = SND_PCM_SFMT_U16_BE;
break;
default:
result = -1;
break;
}
break;
}
default:
result = -1;
break;
}
return result;
}
static gboolean gst_qnxaudio_sink_open(GstAudioSink *asink)
{
//printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
GstQnxAudioSink *qnxaudio;
qnxaudio = GST_QNXAUDIOSINK(asink);
int rtn = snd_pcm_open_name(&qnxaudio->pcm_handle,
qnxaudio->device, SND_PCM_OPEN_PLAYBACK);
if (rtn == EOK)
{
rtn = snd_pcm_info(qnxaudio->pcm_handle, &qnxaudio->info);
if (rtn == 0)
{
qnxaudio->channelInfo.channel = SND_PCM_CHANNEL_PLAYBACK;
rtn = snd_pcm_plugin_info(qnxaudio->pcm_handle,
&qnxaudio->channelInfo);
if (rtn == 0)
{
qnxaudio->audioCard = qnxaudio->info.card;
//printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
return TRUE;
}
else
{
GST_ELEMENT_ERROR(qnxaudio, RESOURCE, OPEN_WRITE, (NULL),
("Could not open audio device for playback."
"snd_pcm_plugin_info failed: %s\n",
snd_strerror(rtn)));
}
/** todo something */
}
else
{
GST_ELEMENT_ERROR(qnxaudio, RESOURCE, OPEN_WRITE,
(("Could not open audio device for playback. "
"get qnx audio pcm info failedd")),
(NULL));
}
rtn = snd_pcm_close(qnxaudio->pcm_handle);
}
else
{
GST_ERROR_OBJECT(qnxaudio,
"open qnx audio output device: %s failed\n",
qnxaudio->device);
/** Errors */
if (rtn == -EINVAL)
{
GST_ELEMENT_ERROR(qnxaudio, RESOURCE, OPEN_WRITE,
(("Could not open audio device for playback. "
"The mode is invalid")),
(NULL));
}
else if (rtn == -ENOENT)
{
GST_ELEMENT_ERROR(qnxaudio, RESOURCE, NOT_FOUND,
(("Could not open audio device for playback. "
"The named device doesn't exist.")),
(NULL));
}
else if (rtn == -ENOMEM)
{
GST_ELEMENT_ERROR(qnxaudio, RESOURCE, NO_SPACE_LEFT,
(("Could not open audio device for playback. "
"Not enough memory is available to allocate the"
"control structures")),
(NULL));
}
else if (rtn == -SND_ERROR_INCOMPATIBLE_VERSION)
{
GST_ELEMENT_ERROR(qnxaudio, RESOURCE, OPEN_WRITE,
(("Could not open audio device for playback. "
"The audio driver version is incompatible "
"with the client library that the application"
"is using.")),
(NULL));
}
}
qnxaudio->pcm_handle = NULL;
qnxaudio->mixer_handle = NULL;
//printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
return FALSE;
}
static gboolean gst_qnxaudio_sink_close(GstAudioSink *asink)
{
GstQnxAudioSink *qnxaudio;
int rtn;
qnxaudio = GST_QNXAUDIOSINK(asink);
//printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
if (qnxaudio->pcm_handle)
{
rtn = snd_pcm_plugin_flush(qnxaudio->pcm_handle,
SND_PCM_CHANNEL_PLAYBACK);
}
if (qnxaudio->mixer_handle)
{
rtn = snd_mixer_close(qnxaudio->mixer_handle);
if (rtn != 0)
{
GST_ERROR_OBJECT(qnxaudio, "close mixer failed: %d\n", rtn);
return FALSE;
}
qnxaudio->mixer_handle = NULL;
}
if (qnxaudio->pcm_handle)
{
rtn = snd_pcm_close(qnxaudio->pcm_handle);
if (rtn != 0)
{
GST_ERROR_OBJECT(qnxaudio,
"close qnx audio device failed: %d\n",
rtn);
return FALSE;
}
qnxaudio->pcm_handle = NULL;
}
//printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
return TRUE;
}
static gboolean
gst_qnxaudio_sink_prepare(GstAudioSink *asink, GstAudioRingBufferSpec *spec)
{
GstQnxAudioSink *qnxaudio;
int tmp;
guint mSampleBits, mSampleRate, mSampleChannels;
gint fragSize;
snd_pcm_channel_params_t* pChParam;
qnxaudio = GST_QNXAUDIOSINK(asink);
//printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
tmp = gst_qnxaudio_sink_get_format(spec->type,
GST_AUDIO_INFO_FORMAT(&spec->info));
if (tmp == -1)
{
GST_ELEMENT_ERROR(qnxaudio, RESOURCE, SETTINGS, (NULL),
("Unable to get format (%d, %d)", spec->type,
GST_AUDIO_INFO_FORMAT(&spec->info)));
return FALSE;
}
mSampleBits = GST_AUDIO_INFO_WIDTH(&spec->info);
mSampleRate = GST_AUDIO_INFO_RATE(&spec->info);
mSampleChannels = GST_AUDIO_INFO_CHANNELS(&spec->info);
if (mSampleBits != 16 && mSampleBits != 8)
{
GST_ELEMENT_ERROR(qnxaudio, RESOURCE, SETTINGS, (NULL),
("unexpected width %d", mSampleBits));
return FALSE;
}
tmp = ilog2(spec->segsize);
tmp = ((spec->segtotal & 0x7fff) << 16) | tmp;
GST_DEBUG_OBJECT(qnxaudio, "set segsize: %d, segtotal: %d, value: %08x",
spec->segsize, spec->segtotal, tmp);
fragSize = 0;
pChParam = &qnxaudio->channelParam;
pChParam->mode = SND_PCM_MODE_BLOCK;
pChParam->channel = SND_PCM_CHANNEL_PLAYBACK;
pChParam->start_mode = SND_PCM_START_FULL;
pChParam->stop_mode = SND_PCM_STOP_STOP;
pChParam->buf.block.frag_size =
qnxaudio->channelInfo.max_fragment_size;
if (fragSize != -1)
{
pChParam->buf.block.frag_size = fragSize;
}
pChParam->buf.block.frags_max = -1; //num_frags;
pChParam->buf.block.frags_min = 1;
pChParam->format.interleave = 1;
pChParam->format.rate = mSampleRate;
pChParam->format.voices = mSampleChannels;
if (mSampleBits == 8)
{
pChParam->format.format = SND_PCM_SFMT_U8;
}
else if (mSampleBits == 16)
{
pChParam->format.format = SND_PCM_SFMT_S16_LE;
}
else
{
pChParam->format.format = SND_PCM_SFMT_S16_LE;
}
strcpy(pChParam->sw_mixer_subchn_name, "Wave playback channel");
tmp = snd_pcm_plugin_params(qnxaudio->pcm_handle,
&qnxaudio->channelParam);
if (tmp < 0)
{
GST_ELEMENT_ERROR(qnxaudio, RESOURCE, SETTINGS, (NULL),
("snd_pcm_plugin_params failed: %s\n",
snd_strerror(tmp)));
return FALSE;
}
tmp = snd_pcm_plugin_prepare(qnxaudio->pcm_handle,
SND_PCM_CHANNEL_PLAYBACK);
if (tmp < 0)
{
GST_ELEMENT_ERROR(qnxaudio, RESOURCE, SETTINGS, (NULL),
("snd_pcm_plugin_prepare failed: %s\n",
snd_strerror(tmp)));
return FALSE;
}
qnxaudio->channelSetup.channel = SND_PCM_CHANNEL_PLAYBACK;
qnxaudio->channelSetup.mixer_gid = &qnxaudio->mixerGroup.gid;
tmp = snd_pcm_plugin_setup(qnxaudio->pcm_handle,
&qnxaudio->channelSetup);
if (tmp < 0)
{
GST_ELEMENT_ERROR(qnxaudio, RESOURCE, SETTINGS, (NULL),
("snd_pcm_plugin_setup failed: %s\n",
snd_strerror(tmp)));
return FALSE;
}
GST_INFO_OBJECT(qnxaudio, "Format %s \n",
snd_pcm_get_format_name(
qnxaudio->channelSetup.format.format));
GST_INFO_OBJECT(qnxaudio, "Frag Size %d \n",
qnxaudio->channelSetup.buf.block.frag_size);
GST_INFO_OBJECT(qnxaudio, "Total Frags %d \n",
qnxaudio->channelSetup.buf.block.frags);
GST_INFO_OBJECT(qnxaudio, "Rate %d \n",
qnxaudio->channelSetup.format.rate);
GST_INFO_OBJECT(qnxaudio, "Voices %d \n",
qnxaudio->channelSetup.format.voices);
if (qnxaudio->mixerGroup.gid.name[0] == 0)
{
GST_ELEMENT_ERROR(qnxaudio, RESOURCE, SETTINGS, (NULL),
("Mixer Pcm Group [%s] Not Set \n",
qnxaudio->mixerGroup.gid.name));
return FALSE;
}
tmp = snd_mixer_open(&qnxaudio->mixer_handle,
qnxaudio->audioCard,
qnxaudio->channelSetup.mixer_device);
if (tmp < 0)
{
GST_ELEMENT_ERROR(qnxaudio, RESOURCE, SETTINGS, (NULL),
("snd_mixer_open failed: %s\n",
snd_strerror(tmp)));
return FALSE;
}
spec->segsize = pChParam->buf.block.frag_size;
//spec->segtotal = info.fragstotal;
qnxaudio->bytes_per_sample = GST_AUDIO_INFO_BPF(&spec->info);
GST_DEBUG_OBJECT(qnxaudio, "got segsize: %d, segtotal: %d, value: %08x",
spec->segsize, spec->segtotal, tmp);
//printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
return TRUE;
}
static gboolean gst_qnxaudio_sink_unprepare(GstAudioSink *asink)
{
//printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
if (!gst_qnxaudio_sink_close(asink))
{
GST_ERROR_OBJECT(asink, "Could not close the audio device");
return FALSE;
}
if (!gst_qnxaudio_sink_open(asink))
{
GST_ERROR_OBJECT(asink, "Could not reopen the audio device");
return FALSE;
}
//printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
return TRUE;
}
static gint gst_qnxaudio_sink_write(GstAudioSink *asink,
gpointer data, guint length)
{
printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
return snd_pcm_plugin_write(GST_QNXAUDIOSINK(asink)->pcm_handle,
data, length);
}
static guint gst_qnxaudio_sink_delay(GstAudioSink *asink)
{
GstQnxAudioSink *qnxaudio;
printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
snd_pcm_channel_status_t status;
memset(&status, 0, sizeof(status));
status.channel = SND_PCM_CHANNEL_PLAYBACK;
qnxaudio = GST_QNXAUDIOSINK(asink);
if (snd_pcm_plugin_status(qnxaudio->pcm_handle, &status) < 0)
{
GST_ERROR_OBJECT(asink,
"underrun: playback channel status error\n");
return 0;
}
return status.count / qnxaudio->bytes_per_sample;
}
static void gst_qnxaudio_sink_reset(GstAudioSink *asink)
{
//printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
/* There's nothing we can do here really,
* 对于qnx audio这里没有实现这个接口
**/
}
//
//
GST_DEBUG_CATEGORY (qnxaudio_debug);
#define GST_CAT_DEFAULT qnxaudio_debug
static gboolean
plugin_init (GstPlugin * plugin)
{
//printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
/** 注册 qnxaudiosink这个元件 */
if (!gst_element_register(plugin, "qnxaudiosink", GST_RANK_SECONDARY,
GST_TYPE_QNXAUDIOSINK))
{
return FALSE;
}
GST_DEBUG_CATEGORY_INIT(qnxaudio_debug, "qnxaudio",
0, "QNX audio elements");
#ifdef ENABLE_NLS
GST_DEBUG("binding text domain %s to locale dir %s", GETTEXT_PACKAGE,
LOCALEDIR);
bindtextdomain(GETTEXT_PACKAGE, LOCALEDIR);
bind_textdomain_codeset(GETTEXT_PACKAGE, "UTF-8");
#endif /* ENABLE_NLS */
//printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
return TRUE;
}
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
GST_VERSION_MINOR,
qnxaudio,
"Qnx Audio support for GStreamer",
plugin_init, PLUGIN_VERSION, GST_LICENSE,
GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
三、 实现QNX video sink 插件
和audio sink一样,gstreamer也提供了video sink的“基类”GstVideoSinkClass, 其内容就相对比较简单:
/**
* GstVideoSinkClass:
* @parent_class: the parent class structure
* @show_frame: render a video frame. Maps to #GstBaseSinkClass.render() and
* #GstBaseSinkClass.preroll() vfuncs. Rendering during preroll will be
* suppressed if the #GstVideoSink:show-preroll-frame property is set to
* %FALSE.
*
* The video sink class structure. Derived classes should override the
* @show_frame virtual function.
*/
struct _GstVideoSinkClass {
GstBaseSinkClass parent_class;
GstFlowReturn (*show_frame) (GstVideoSink *video_sink, GstBuffer *buf);
/*< private >*/
gpointer _gst_reserved[GST_PADDING];
};
一样的参考现有的视频输出元件的模板,定义头文件如下:
/* GStreamer
* Copyright (C) gu.wen <454727014@qq.com>
*
* gstqnxvideosink.h:
*
*/
#ifndef __GST_QNX_VIDEO_SINK_H__
#define __GST_QNX_VIDEO_SINK_H__
#include <gst/gst.h>
#include <gst/video/video.h>
#include <gst/video/gstvideosink.h>
//for qnx screen api
#include <screen/screen.h>
//#include <aalib.h>
#ifdef __cplusplus
extern "C"
{
#endif /* __cplusplus */
#define GST_TYPE_QNXVIDEOSINK \
(gst_qnxvideo_sink_get_type())
#define GST_QNXVIDEOSINK(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_QNXVIDEOSINK, GstQnxVideoSink))
#define GST_QNXVIDEOSINK_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_QNXVIDEOSINK, GstQnxVideoSinkClass))
#define GST_IS_QNXVIDEOSINK(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_QNXVIDEOSINK))
#define GST_IS_QNXVIDEOSINK_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_QNXVIDEOSINK))
typedef struct _GstQnxVideoSink GstQnxVideoSink;
typedef struct _GstQnxVideoSinkClass GstQnxVideoSinkClass;
struct _GstQnxVideoSink
{/** qnx video sink 继承 自 GstVideoSink */
GstVideoSink parent;
GstVideoInfo info;
gint frames_displayed;
guint64 frame_time;
screen_context_t screen_ctx;
screen_window_t screen_win;
gboolean isOpened;
screen_buffer_t dispBuf[2];
gint dispRect[4]; /** dsiaply rect size */
gint startX;
gint startY;
gint width;
gint height;
};
struct _GstQnxVideoSinkClass
{/** qnx video sink class 继承自 GstVideoSinkClass */
GstVideoSinkClass parent_class;
};
GType gst_qnxvideo_sink_get_type(void);
#ifdef __cplusplus
}
#endif /* __cplusplus */
#endif /* __GST_QNXVIDEOSINKE_H__ */
接下来就是重点,具体的video sink元件的实现,其代码稍微有点多,具体如下所示。
/* GStreamer
* Copyright (C) gu.wen <454727014@qq.com>
*
* gstqnxvideosink.c:
*
*/
/**
* SECTION:element-qnxvideosink
* @see_also: #GstQnxVideoSink
*
* Displays video on qnx.
*
* <refsect2>
* <title>Example launch line</title>
* |[
* gst-launch-1.0 filesrc location=test.avi ! decodebin ! videoconvert
* ! qnxvideosink ]| This pipeline renders a video to ascii art into
* a separate window. |[
* gst-launch-1.0 filesrc location=test.avi ! decodebin ! videoconvert
* ! qnxvideosink driver=curses ]|
* This pipeline renders a video to ascii art into the current terminal.
* </refsect2>
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <string.h>
#include <sys/time.h>
#include <gst/video/gstvideometa.h>
#include "gstQnxVideoSink.h"
///
///
#define PLUGIN_VERSION "00.01.00"
#define PACKAGE "gst-plugins-qnx"
#define GST_LICENSE "LGPL"
#define GST_PACKAGE_NAME "GStreamer qnx Plug-ins source release"
#define GST_PACKAGE_ORIGIN "Unknown package origin"
#define GST_DEFAULT_FRAME_BUF_CNT 4
#define GST_DISP_BUF_CNT 4
#define GST_DISP_USAGE SCREEN_USAGE_NATIVE|SCREEN_USAGE_WRITE
/* qnxvideosink signals and args */
enum
{
LAST_SIGNAL
};
enum
{
PROP_0,
PROP_WIDTH,
PROP_HEIGHT,
#if 0
PROP_DRIVER,
PROP_DITHER,
PROP_BRIGHTNESS,
PROP_CONTRAST,
PROP_GAMMA,
PROP_INVERSION,
PROP_RANDOMVAL,
#endif
PROP_FRAMES_DISPLAYED,
PROP_FRAME_TIME
};
static GstStaticPadTemplate sink_template =
GST_STATIC_PAD_TEMPLATE("sink", GST_PAD_SINK,GST_PAD_ALWAYS,
GST_STATIC_CAPS(
GST_VIDEO_CAPS_MAKE("NV12")));
static GstCaps *gst_qnxvideo_sink_fixate(GstBaseSink *bsink, GstCaps *caps);
static gboolean gst_qnxvideo_sink_setcaps(GstBaseSink *bsink, GstCaps *caps);
static void gst_qnxvideo_sink_get_times(GstBaseSink *bsink, GstBuffer *buffer,
GstClockTime *start, GstClockTime *end);
static gboolean gst_qnxvideo_sink_propose_allocation(GstBaseSink *bsink,
GstQuery *query);
static GstFlowReturn gst_qnxvideo_sink_show_frame(GstVideoSink *videosink,
GstBuffer *buffer);
static void gst_qnxvideo_sink_set_property(GObject *object, guint prop_id,
const GValue *value, GParamSpec *pspec);
static void gst_qnxvideo_sink_get_property(GObject *object, guint prop_id,
GValue *value, GParamSpec *pspec);
static GstStateChangeReturn gst_qnxvideo_sink_change_state(GstElement *element,
GstStateChange transition);
#define gst_qnxvideo_sink_parent_class parent_class
G_DEFINE_TYPE(GstQnxVideoSink, gst_qnxvideo_sink, GST_TYPE_VIDEO_SINK);
#if 0
#define GST_TYPE_QNXVIDEODRIVERS (gst_qnxvideo_sink_drivers_get_type())
static GType gst_qnxvideo_sink_drivers_get_type(void)
{
static GType driver_type = 0;
if (!driver_type)
{
GEnumValue *drivers;
const struct aa_driver *driver;
gint n_drivers;
gint i;
for (n_drivers = 0; aa_drivers[n_drivers]; n_drivers++)
{
/* count number of drivers */
}
drivers = g_new0(GEnumValue, n_drivers + 1);
for (i = 0; i < n_drivers; i++)
{
driver = aa_drivers[i];
drivers[i].value = i;
drivers[i].value_name = g_strdup(driver->name);
drivers[i].value_nick = g_utf8_strdown(driver->shortname, -1);
}
drivers[i].value = 0;
drivers[i].value_name = NULL;
drivers[i].value_nick = NULL;
driver_type = g_enum_register_static("GstQnxVideoSinkDrivers",
drivers);
}
return driver_type;
}
#define GST_TYPE_AADITHER (gst_qnxvideo_sink_dither_get_type())
static GType gst_qnxvideo_sink_dither_get_type(void)
{
static GType dither_type = 0;
if (!dither_type)
{
GEnumValue *ditherers;
gint n_ditherers;
gint i;
for (n_ditherers = 0; aa_dithernames[n_ditherers]; n_ditherers++)
{
/* count number of ditherers */
}
ditherers = g_new0(GEnumValue, n_ditherers + 1);
for (i = 0; i < n_ditherers; i++)
{
ditherers[i].value = i;
ditherers[i].value_name = g_strdup(aa_dithernames[i]);
ditherers[i].value_nick =
g_strdelimit(g_strdup(aa_dithernames[i]), " _", '-');
}
ditherers[i].value = 0;
ditherers[i].value_name = NULL;
ditherers[i].value_nick = NULL;
dither_type = g_enum_register_static("GstQnxVideoSinkDitherers",
ditherers);
}
return dither_type;
}
#endif
static void gst_qnxvideo_sink_class_init(GstQnxVideoSinkClass *klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
GstBaseSinkClass *gstbasesink_class;
GstVideoSinkClass *gstvideosink_class;
printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
gobject_class = (GObjectClass *)klass;
gstelement_class = (GstElementClass *)klass;
gstbasesink_class = (GstBaseSinkClass *)klass;
gstvideosink_class = (GstVideoSinkClass *)klass;
gobject_class->set_property = gst_qnxvideo_sink_set_property;
gobject_class->get_property = gst_qnxvideo_sink_get_property;
/* FIXME: add long property descriptions */
g_object_class_install_property(
G_OBJECT_CLASS(klass), PROP_WIDTH,
g_param_spec_int("width", "width", "width",
G_MININT, G_MAXINT, 0,
G_PARAM_READWRITE |
G_PARAM_STATIC_STRINGS));
g_object_class_install_property(
G_OBJECT_CLASS(klass), PROP_HEIGHT,
g_param_spec_int("height", "height", "height",
G_MININT, G_MAXINT, 0,
G_PARAM_READWRITE |
G_PARAM_STATIC_STRINGS));
#if 0
g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_DRIVER,
g_param_spec_enum ("driver", "driver", "driver", GST_TYPE_QNXVIDEODRIVERS,
0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_DITHER,
g_param_spec_enum ("dither", "dither", "dither", GST_TYPE_AADITHER, 0,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_BRIGHTNESS,
g_param_spec_int ("brightness", "brightness", "brightness", G_MININT,
G_MAXINT, 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_CONTRAST,
g_param_spec_int ("contrast", "contrast", "contrast", G_MININT, G_MAXINT,
0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_GAMMA,
g_param_spec_float ("gamma", "gamma", "gamma", 0.0, 5.0, 1.0,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_INVERSION,
g_param_spec_boolean ("inversion", "inversion", "inversion", TRUE,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_RANDOMVAL,
g_param_spec_int ("randomval", "randomval", "randomval", G_MININT,
G_MAXINT, 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property(G_OBJECT_CLASS(klass),
PROP_FRAMES_DISPLAYED,
g_param_spec_int("frames-displayed",
"frames displayed",
"frames displayed",
G_MININT, G_MAXINT,
0, G_PARAM_READABLE |
G_PARAM_STATIC_STRINGS));
g_object_class_install_property(G_OBJECT_CLASS(klass),
PROP_FRAME_TIME,
g_param_spec_int("frame-time",
"frame time",
"frame time",
G_MININT,
G_MAXINT, 0,
G_PARAM_READABLE |
G_PARAM_STATIC_STRINGS));
#endif
gst_element_class_add_static_pad_template(gstelement_class, &sink_template);
gst_element_class_set_static_metadata(gstelement_class,
"Qnx video sink",
"Sink/Video",
"An Qnx videosink",
"guwen <454727014@qq.com>");
gstelement_class->change_state =
GST_DEBUG_FUNCPTR(gst_qnxvideo_sink_change_state);
gstbasesink_class->fixate = GST_DEBUG_FUNCPTR(gst_qnxvideo_sink_fixate);
gstbasesink_class->set_caps = GST_DEBUG_FUNCPTR(gst_qnxvideo_sink_setcaps);
gstbasesink_class->get_times = GST_DEBUG_FUNCPTR(gst_qnxvideo_sink_get_times);
gstbasesink_class->propose_allocation =
GST_DEBUG_FUNCPTR(gst_qnxvideo_sink_propose_allocation);
gstvideosink_class->show_frame =
GST_DEBUG_FUNCPTR(gst_qnxvideo_sink_show_frame);
printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
}
static GstCaps* gst_qnxvideo_sink_fixate(GstBaseSink *bsink, GstCaps *caps)
{
GstStructure *structure;
printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
caps = gst_caps_make_writable(caps);
structure = gst_caps_get_structure(caps, 0);
gst_structure_fixate_field_nearest_int(structure, "width", 1920);
gst_structure_fixate_field_nearest_int(structure, "height", 720);
gst_structure_fixate_field_nearest_fraction(structure, "framerate", 30, 1);
caps = GST_BASE_SINK_CLASS(parent_class)->fixate(bsink, caps);
printf("[%s.%d]===>:caps: %p\n", __FUNCTION__, __LINE__, caps);
return caps;
}
static gboolean gst_qnxvideo_sink_setcaps(GstBaseSink *basesink, GstCaps *caps)
{
GstQnxVideoSink *qnxvideosink;
GstVideoInfo info;
printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
qnxvideosink = GST_QNXVIDEOSINK(basesink);
if (!gst_video_info_from_caps(&info, caps))
goto invalid_caps;
qnxvideosink->info = info;
qnxvideosink->width = info.width;
qnxvideosink->height = info.height;
printf("[%s.%d]===>:witdh: %d, height:%d\n",
__FUNCTION__, __LINE__, info.width, info.height);
if (qnxvideosink->screen_win)
{
memset(&(qnxvideosink->dispRect), 0, sizeof(qnxvideosink->dispRect));
qnxvideosink->dispRect[2] = qnxvideosink->width;
qnxvideosink->dispRect[3] = qnxvideosink->height;
screen_create_window_buffers(qnxvideosink->screen_win, GST_DISP_BUF_CNT);
//screen_get_window_property_iv(qnxvideosink->screen_win,
// SCREEN_PROPERTY_BUFFER_SIZE,
// qnxvideosink->dispRect + 2);
screen_set_window_property_iv(qnxvideosink->screen_win,
SCREEN_PROPERTY_SIZE,
qnxvideosink->dispRect + 2);
screen_set_window_property_iv(qnxvideosink->screen_win,
SCREEN_PROPERTY_BUFFER_SIZE,
qnxvideosink->dispRect + 2);
}
printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
return TRUE;
/* ERRORS */
invalid_caps:
{
GST_ERROR_OBJECT(qnxvideosink, "invalid caps");
printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
return FALSE;
}
}
static void
gst_qnxvideo_sink_init(GstQnxVideoSink *qnxvideosink)
{
#if 0
memcpy(&qnxvideosink->ascii_surf, &aa_defparams,
sizeof(struct aa_hardware_params));
qnxvideosink->ascii_parms.bright = 0;
qnxvideosink->ascii_parms.contrast = 16;
qnxvideosink->ascii_parms.gamma = 1.0;
qnxvideosink->ascii_parms.dither = 0;
qnxvideosink->ascii_parms.inversion = 0;
qnxvideosink->ascii_parms.randomval = 0;
qnxvideosink->aa_driver = 0;
#else
printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
qnxvideosink->screen_win = NULL;
qnxvideosink->screen_ctx = NULL;
qnxvideosink->isOpened = FALSE;
memset(&(qnxvideosink->dispRect), 0, sizeof(qnxvideosink->dispRect));
printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
#endif
}
static void gst_qnxvideo_sink_scale(GstQnxVideoSink *qnxvideosink,
guchar *src, guchar *dest,
gint sw, gint sh, gint ss,
gint dw, gint dh)
{
gint ypos, yinc, y;
gint xpos, xinc, x;
g_return_if_fail((dw != 0) && (dh != 0));
ypos = 0x10000;
yinc = (sh << 16) / dh;
xinc = (sw << 16) / dw;
for (y = dh; y; y--)
{
while (ypos > 0x10000)
{
ypos -= 0x10000;
src += ss;
}
xpos = 0x10000;
{
guchar *destp = dest;
guchar *srcp = src;
for (x = dw; x; x--)
{
while (xpos >= 0x10000L)
{
srcp++;
xpos -= 0x10000L;
}
*destp++ = *srcp;
xpos += xinc;
}
}
dest += dw;
ypos += yinc;
}
}
static void
gst_qnxvideo_sink_get_times(GstBaseSink *sink, GstBuffer *buffer,
GstClockTime *start, GstClockTime *end)
{
//printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
*start = GST_BUFFER_TIMESTAMP(buffer);
if (GST_BUFFER_DURATION_IS_VALID(buffer))
*end = *start + GST_BUFFER_DURATION(buffer);
}
static gboolean gst_qnxvideo_sink_propose_allocation(GstBaseSink *bsink,
GstQuery *query)
{
GstCaps *caps;
GstVideoInfo info;
guint size;
printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
gst_query_parse_allocation(query, &caps, NULL);
if (caps == NULL)
goto no_caps;
if (!gst_video_info_from_caps(&info, caps))
goto invalid_caps;
size = GST_VIDEO_INFO_SIZE(&info);
/* we need at least 2 buffer because we hold on to the last one */
gst_query_add_allocation_pool(query, NULL, size,
GST_DEFAULT_FRAME_BUF_CNT, 0);
/* we support various metadata */
gst_query_add_allocation_meta(query, GST_VIDEO_META_API_TYPE, NULL);
printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
return TRUE;
/* ERRORS */
no_caps:
{
GST_ERROR_OBJECT(bsink, "no caps specified");
printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
return FALSE;
}
invalid_caps:
{
GST_ERROR_OBJECT(bsink, "invalid caps specified");
printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
return FALSE;
}
}
static GstFlowReturn gst_qnxvideo_sink_show_frame(GstVideoSink *videosink,
GstBuffer *buffer)
{
GstQnxVideoSink *qnxvideosink;
GstVideoFrame frame;
// printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
qnxvideosink = GST_QNXVIDEOSINK(videosink);
GST_DEBUG("show frame");
if (!gst_video_frame_map(&frame, &qnxvideosink->info,
buffer, GST_MAP_READ))
goto invalid_frame;
#if 0
screen_pixmap_t pixmapInfo;
screen_create_pixmap(&pixmapInfo, qnxvideosink->screen_ctx);
int format = SCREEN_FORMAT_NV12;
screen_set_pixmap_property_iv(pixmapInfo,
SCREEN_PROPERTY_FORMAT, &format);
gint usage = SCREEN_USAGE_WRITE | SCREEN_USAGE_NATIVE;
screen_set_pixmap_property_iv(pixmapInfo, SCREEN_PROPERTY_USAGE, &usage);
int size[2] = { qnxvideosink->width, qnxvideosink->height };
screen_set_pixmap_property_iv(pixmapInfo, S
CREEN_PROPERTY_BUFFER_SIZE, size);
screen_buffer_t pixmapBuf;
screen_create_pixmap_buffer(pixmapInfo);
screen_get_pixmap_property_pv(pixmapInfo, SCREEN_PROPERTY_RENDER_BUFFERS,
(void **)&pixmapBuf);
unsigned char *pBuf = NULL;
screen_get_buffer_property_pv(pixmapBuf, SCREEN_PROPERTY_POINTER,
(void **)&pBuf);
int stride = 0;
screen_get_buffer_property_iv(pixmapBuf, SCREEN_PROPERTY_STRIDE, &stride);
#if 0
for (i = 0; i < size[1]; i++, ptr += stride) {
for (j = 0; j < size[0]; j++) {
ptr[j*4] = 0xa0;
ptr[j*4+1] = 0xa0;
ptr[j*4+2] = 0xa0;
ptr[j*4+3] = ((j >= i && j <= size[1]-i) ||
(j <= i && j >= size[1]-i)) ? 0xff : 0;
}
}
#endif
#if 0
gst_qnxvideo_sink_scale(qnxvideosink,
GST_VIDEO_FRAME_PLANE_DATA(&frame, 0), /* src */
pBuf, /* dest */
GST_VIDEO_INFO_WIDTH(&qnxvideosink->info), /* sw */
GST_VIDEO_INFO_HEIGHT(&qnxvideosink->info), /* sh */
GST_VIDEO_FRAME_PLANE_STRIDE(&frame, 0), /* ss */
qnxvideosink->width, /* dw */
qnxvideosink->height); /* dh */
#else
memcpy(pBuf, GST_VIDEO_FRAME_PLANE_DATA(&frame, 0),
qnxvideosink->width * qnxvideosink->height * 3 / 2);
#endif
screen_buffer_t render_buf[2];
screen_get_window_property_pv(qnxvideosink->screen_win,
SCREEN_PROPERTY_RENDER_BUFFERS,
(void **)render_buf);
int hg[] = {
SCREEN_BLIT_SOURCE_WIDTH, qnxvideosink->width,
SCREEN_BLIT_SOURCE_HEIGHT, qnxvideosink->height,
SCREEN_BLIT_DESTINATION_X, 0,
SCREEN_BLIT_DESTINATION_Y, 0,
SCREEN_BLIT_DESTINATION_WIDTH, qnxvideosink->width,
SCREEN_BLIT_DESTINATION_HEIGHT, qnxvideosink->height,
SCREEN_BLIT_TRANSPARENCY, SCREEN_TRANSPARENCY_SOURCE_OVER,
SCREEN_BLIT_END
};
screen_blit(qnxvideosink->screen_ctx, render_buf[0], pixmapBuf, hg);
screen_post_window(qnxvideosink->screen_win, render_buf[0], 1,
qnxvideosink->dispRect, 0);
screen_destroy_pixmap(pixmapInfo);
#else
screen_buffer_t render_buf[GST_DISP_BUF_CNT];
screen_get_window_property_pv(qnxvideosink->screen_win,
SCREEN_PROPERTY_RENDER_BUFFERS,
(void **)&render_buf);
char *pBuf = NULL;
screen_get_buffer_property_pv(render_buf[0], SCREEN_PROPERTY_POINTER,
(void **)&pBuf);
memcpy(pBuf, GST_VIDEO_FRAME_PLANE_DATA(&frame, 0),
qnxvideosink->width *
qnxvideosink->height * 3 / 2);
screen_post_window(qnxvideosink->screen_win, render_buf[0], 1,
qnxvideosink->dispRect, 0);
#endif
gst_video_frame_unmap(&frame);
qnxvideosink->frames_displayed++;
// printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
return GST_FLOW_OK;
/* ERRORS */
invalid_frame:
{
GST_ERROR_OBJECT(qnxvideosink, "invalid frame");
printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
return GST_FLOW_ERROR;
}
}
static void
gst_qnxvideo_sink_set_property(GObject *object, guint prop_id,
const GValue *value,
GParamSpec *pspec)
{
GstQnxVideoSink *qnxvideosink;
printf("[%s.%d]===>: prop_id:%d\n", __FUNCTION__, __LINE__, prop_id);
qnxvideosink = GST_QNXVIDEOSINK(object);
switch (prop_id)
{
case PROP_WIDTH:
qnxvideosink->width = g_value_get_int(value);
break;
case PROP_HEIGHT:
qnxvideosink->height = g_value_get_int(value);
break;
#if 0
case PROP_DRIVER:
{
qnxvideosink->aa_driver = g_value_get_enum(value);
break;
}
case PROP_DITHER:
{
qnxvideosink->ascii_parms.dither = g_value_get_enum(value);
break;
}
case PROP_BRIGHTNESS:
{
qnxvideosink->ascii_parms.bright = g_value_get_int(value);
break;
}
case PROP_CONTRAST:
{
qnxvideosink->ascii_parms.contrast = g_value_get_int(value);
break;
}
case PROP_GAMMA:
{
qnxvideosink->ascii_parms.gamma = g_value_get_float(value);
break;
}
case PROP_INVERSION:
{
qnxvideosink->ascii_parms.inversion = g_value_get_boolean(value);
break;
}
case PROP_RANDOMVAL:
{
qnxvideosink->ascii_parms.randomval = g_value_get_int(value);
break;
}
#endif
default:
break;
}
printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
}
static void
gst_qnxvideo_sink_get_property(GObject *object, guint prop_id, GValue *value,
GParamSpec *pspec)
{
GstQnxVideoSink *qnxvideosink;
printf("[%s.%d]===>:, prop_id: %d\n", __FUNCTION__, __LINE__, prop_id);
qnxvideosink = GST_QNXVIDEOSINK(object);
switch (prop_id)
{
case PROP_WIDTH:
{
g_value_set_int(value, qnxvideosink->width);
break;
}
case PROP_HEIGHT:
{
g_value_set_int(value, qnxvideosink->height);
break;
}
#if 0
case PROP_DRIVER:
{
g_value_set_enum(value, qnxvideosink->aa_driver);
break;
}
case PROP_DITHER:
{
g_value_set_enum(value, qnxvideosink->ascii_parms.dither);
break;
}
case PROP_BRIGHTNESS:
{
g_value_set_int(value, qnxvideosink->ascii_parms.bright);
break;
}
case PROP_CONTRAST:
{
g_value_set_int(value, qnxvideosink->ascii_parms.contrast);
break;
}
case PROP_GAMMA:
{
g_value_set_float(value, qnxvideosink->ascii_parms.gamma);
break;
}
case PROP_INVERSION:
{
g_value_set_boolean(value, qnxvideosink->ascii_parms.inversion);
break;
}
case PROP_RANDOMVAL:
{
g_value_set_int(value, qnxvideosink->ascii_parms.randomval);
break;
}
case PROP_FRAMES_DISPLAYED:
{
g_value_set_int(value, qnxvideosink->frames_displayed);
break;
}
case PROP_FRAME_TIME:
{
g_value_set_int(value, qnxvideosink->frame_time / 1000000);
break;
}
#endif
default:
{
G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
break;
}
}
printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
}
static gboolean
gst_qnxvideo_sink_open(GstQnxVideoSink *qnxvideosink)
{
printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
if (qnxvideosink->isOpened == FALSE)
{
screen_create_context(&qnxvideosink->screen_ctx,
SCREEN_APPLICATION_CONTEXT);
screen_create_window(&qnxvideosink->screen_win,
qnxvideosink->screen_ctx);
int usage = GST_DISP_USAGE;
screen_set_window_property_iv(qnxvideosink->screen_win,
SCREEN_PROPERTY_USAGE, &usage);
int format = SCREEN_FORMAT_NV12;
screen_set_window_property_iv(qnxvideosink->screen_win,
SCREEN_PROPERTY_FORMAT, &format);
qnxvideosink->isOpened = TRUE;
printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
return TRUE;
}
printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
return FALSE;
}
static gboolean gst_qnxvideo_sink_close(GstQnxVideoSink *qnxvideosink)
{
printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
if (qnxvideosink->isOpened == TRUE)
{
if (qnxvideosink->screen_win)
{
screen_destroy_window(qnxvideosink->screen_win);
qnxvideosink->screen_win = NULL;
}
if (qnxvideosink->screen_ctx)
{
screen_destroy_context(qnxvideosink->screen_ctx);
qnxvideosink->screen_ctx = NULL;
}
printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
return TRUE;
}
printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
return FALSE;
}
static GstStateChangeReturn
gst_qnxvideo_sink_change_state(GstElement *element,
GstStateChange transition)
{
printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
GstStateChangeReturn ret;
switch (transition)
{
case GST_STATE_CHANGE_NULL_TO_READY:
break;
case GST_STATE_CHANGE_READY_TO_PAUSED:
if (!gst_qnxvideo_sink_open(GST_QNXVIDEOSINK(element)))
goto open_failed;
break;
case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
break;
default:
break;
}
ret = GST_ELEMENT_CLASS(parent_class)->change_state(element, transition);
switch (transition)
{
case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
break;
case GST_STATE_CHANGE_PAUSED_TO_READY:
break;
case GST_STATE_CHANGE_READY_TO_NULL:
gst_qnxvideo_sink_close(GST_QNXVIDEOSINK(element));
break;
default:
break;
}
printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
return ret;
open_failed:
{
printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
return GST_STATE_CHANGE_FAILURE;
}
}
static gboolean plugin_init(GstPlugin *plugin)
{
printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
if (!gst_element_register(plugin, "qnxvideosink",
GST_RANK_SECONDARY, GST_TYPE_QNXVIDEOSINK))
{
printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
return FALSE;
}
printf("[%s.%d]===>:\n", __FUNCTION__, __LINE__);
return TRUE;
}
GST_PLUGIN_DEFINE(GST_VERSION_MAJOR,
GST_VERSION_MINOR,
qnxvideo,
"Qnx video sink",
plugin_init, PLUGIN_VERSION,
GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN);
注意, 输出的plugin动态库名称以 libpluginname.so 的形式, 而这个pluginname就是 编写插件时,定义插件的宏GST_PLUGIN_DEFINE 的参数中的name, 如果不匹配,就会出现插件无法是使用,被gstreamer将插件加入黑名单