1 简介
上文分析了OpenGL ES渲染的实现 IJKPLAYER源码分析-OpenGL ES渲染-CSDN博客。本文便可以分析video画面是如何在Android端窗口上显示的了。
2 EGL
为何有了OpenGL ES api,还要引入EGL api ?原因是:
OpenGL ES定义了平台无关的GL绘图指令,但没有连接到原生窗口的接口,EGL则定义了displays/contexts/surfaces平台接口,且是跨平台的。
EGL™是Khronos渲染API(如OpenGL ES或OpenVG)与底层本地平台窗口系统之间的接口。它处理图形上下文管理、表面/缓冲区绑定和渲染同步,并使用其他Khronos api实现高性能、加速、混合模式的2D和3D渲染。EGL还提供了Khronos之间的互操作能力,以实现api之间的有效数据传输—例如在运行OpenMAX AL的视频子系统和运行OpenGL ES的GPU之间。
不同平台有不同的EGL系统,windows上是WGL,Linux上是GLX,Apple OS上是AGL。
EGL可用于管理绘图surface,提供如下机制:
- 与设备的原生窗口通信;
- 查询绘图surface可用类型及配置;
- 创建绘图surface;
- 管理纹理贴图;
关于EGL的详细介绍,请参考 EGL 。
3 显示
Android端的显示,因软硬解等方式,有以下3种显示方法:
- mediacodec硬解显示,使用ANativeWindow;
- FFmpeg软解显示,使用EGL输出到surface;
- ANativeWindow显示,使用ANativeWindow;
3.1 硬解
mediacodec硬解1帧video数据之后,从outputBuffer拿出一帧,然后调用releaseOutputBuffer(output_buffer_index, render)来显示画面。有几点需要注意:
- mediacodec硬解时,喂给解码器的video帧,是编码后的压缩数据,并不涉及对像素格式的转换,输入给解码器的视频源的数据,来啥像素格式的video帧,就喂给mediacodec什么样的数据;
- 由于mediacodec硬解后显示由ANativeWindow所支持,不会像FFmpeg软解一样,需采用OpenGL ES render再通过EGL输出到surface上显示;
- mediacodec硬解后显示,直接调用releaseOutputBuffer(output_buffer_index, render) 即可;
3.2 软解
Android端只有FFmpeg软解才会用到EGL,mediacodec硬解和ANativeWindow均不使用它。
- 在播放器初始化时,由Java层将surface传入native,native通过ANativeWindow_fromSurface(env, surface)拿到ANativeWindow指针;
- EGL通过eglCreateWindowSurface(display, config, window, NULL)创建EGLSurface实例;
- EGL完成对EGLDisplay、EGLContext和EGLSurface的创建之后,将EGLDisplay和EGLSurface与EGLContext进行绑定;
- 再由OpenGL ES完成1帧的render;
- 再通过eglSwapBuffers(egl->display, egl->surface)完成FrontFrameBuffer和BackendFrameBuffer的交换,由EGLDisplay输出到Java层的surface上,完成显示;
- 渲染并显示完1帧之后,重置上下文,并释放egl线程eglReleaseThread();
- 重复第3~6步骤,继续后续帧的render与显示;
- 最后,EGL的显示,用到了double buffer即双缓冲区技术,用于OpenGL ES render的是BackendFrameBuffer,FrontFrameBuffer用于最终的surface输出;
为何要使用double buffer ?
- 原因是,如果app使用单缓冲绘图时可能会存在图像闪烁的问题,因为图像生成不是一下子被绘制出来的,而是按照从左到右、从上到下逐像素绘制的。如果最终图像不是在瞬间全部展示给用户,而是通过把绘制过程也展示出来了,这会导致用户看到的渲染效果出现闪烁;
- 为了规避这个问题,可以使用双缓冲渲染:前缓冲保存着最终输出的图像,它会在屏幕上显示;而所有的的渲染指令都会在后缓冲上绘制,对用户屏蔽从左到右、从上到下逐像素绘制的过程,这样就可以避免闪烁了。
通过Java层传入的surface拿到ANativeWindow指针:
void SDL_VoutAndroid_SetAndroidSurface(JNIEnv *env, SDL_Vout *vout, jobject android_surface)
{
ANativeWindow *native_window = NULL;
if (android_surface) {
native_window = ANativeWindow_fromSurface(env, android_surface);
if (!native_window) {
ALOGE("%s: ANativeWindow_fromSurface: failed\n", __func__);
// do not return fail here;
}
}
SDL_VoutAndroid_SetNativeWindow(vout, native_window);
if (native_window)
ANativeWindow_release(native_window);
}
void SDL_VoutAndroid_SetNativeWindow(SDL_Vout *vout, ANativeWindow *native_window)
{
SDL_LockMutex(vout->mutex);
SDL_VoutAndroid_SetNativeWindow_l(vout, native_window);
SDL_UnlockMutex(vout->mutex);
}
最后,ANativeWindow指针保存在SDL_Vout成员vout->opaque中:
static void SDL_VoutAndroid_SetNativeWindow_l(SDL_Vout *vout, ANativeWindow *native_window)
{
AMCTRACE("%s(%p, %p)\n", __func__, vout, native_window);
SDL_Vout_Opaque *opaque = vout->opaque;
if (opaque->native_window == native_window) {
if (native_window == NULL) {
// always invalidate buffers, if native_window is changed
SDL_VoutAndroid_invalidateAllBuffers_l(vout);
}
return;
}
IJK_EGL_terminate(opaque->egl);
SDL_VoutAndroid_invalidateAllBuffers_l(vout);
if (opaque->native_window)
ANativeWindow_release(opaque->native_window);
if (native_window)
ANativeWindow_acquire(native_window);
opaque->native_window = native_window;
opaque->null_native_window_warned = 0;
}
显示主入口:
EGLBoolean IJK_EGL_display(IJK_EGL* egl, EGLNativeWindowType window, SDL_VoutOverlay *overlay)
{
EGLBoolean ret = EGL_FALSE;
if (!egl)
return EGL_FALSE;
IJK_EGL_Opaque *opaque = egl->opaque;
if (!opaque)
return EGL_FALSE;
if (!IJK_EGL_makeCurrent(egl, window))
return EGL_FALSE;
ret = IJK_EGL_display_internal(egl, window, overlay);
eglMakeCurrent(egl->display, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT);
eglReleaseThread(); // FIXME: call at thread exit
return ret;
}
通过ANativeWindow完成由EGL显示的Context上下文构建,使用EGL所推荐的配置:
static EGLBoolean IJK_EGL_makeCurrent(IJK_EGL* egl, EGLNativeWindowType window)
{
if (window && window == egl->window &&
egl->display &&
egl->surface &&
egl->context) {
if (!eglMakeCurrent(egl->display, egl->surface, egl->surface, egl->context)) {
ALOGE("[EGL] elgMakeCurrent() failed (cached)\n");
return EGL_FALSE;
}
return EGL_TRUE;
}
IJK_EGL_terminate(egl);
egl->window = window;
if (!window)
return EGL_FALSE;
EGLDisplay display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
if (display == EGL_NO_DISPLAY) {
ALOGE("[EGL] eglGetDisplay failed\n");
return EGL_FALSE;
}
EGLint major, minor;
if (!eglInitialize(display, &major, &minor)) {
ALOGE("[EGL] eglInitialize failed\n");
return EGL_FALSE;
}
ALOGI("[EGL] eglInitialize %d.%d\n", (int)major, (int)minor);
static const EGLint configAttribs[] = {
EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
EGL_BLUE_SIZE, 8,
EGL_GREEN_SIZE, 8,
EGL_RED_SIZE, 8,
EGL_NONE
};
static const EGLint contextAttribs[] = {
EGL_CONTEXT_CLIENT_VERSION, 2,
EGL_NONE
};
EGLConfig config;
EGLint numConfig;
if (!eglChooseConfig(display, configAttribs, &config, 1, &numConfig)) {
ALOGE("[EGL] eglChooseConfig failed\n");
eglTerminate(display);
return EGL_FALSE;
}
#ifdef __ANDROID__
{
EGLint native_visual_id = 0;
if (!eglGetConfigAttrib(display, config, EGL_NATIVE_VISUAL_ID, &native_visual_id)) {
ALOGE("[EGL] eglGetConfigAttrib() returned error %d", eglGetError());
eglTerminate(display);
return EGL_FALSE;
}
int32_t width = ANativeWindow_getWidth(window);
int32_t height = ANativeWindow_getWidth(window);
ALOGI("[EGL] ANativeWindow_setBuffersGeometry(f=%d);", native_visual_id);
int ret = ANativeWindow_setBuffersGeometry(window, width, height, native_visual_id);
if (ret) {
ALOGE("[EGL] ANativeWindow_setBuffersGeometry(format) returned error %d", ret);
eglTerminate(display);
return EGL_FALSE;
}
}
#endif
EGLSurface surface = eglCreateWindowSurface(display, config, window, NULL);
if (surface == EGL_NO_SURFACE) {
ALOGE("[EGL] eglCreateWindowSurface failed\n");
eglTerminate(display);
return EGL_FALSE;
}
// 由于 OpenGL ES 的渲染必须新开一个线程,并为该线程绑定显示设备及上下文环境(EGLContext),所以 eglMakeCurrent() 就是来绑定该线程的显示设备及上下文的。
EGLSurface context = eglCreateContext(display, config, EGL_NO_CONTEXT, contextAttribs);
if (context == EGL_NO_CONTEXT) {
ALOGE("[EGL] eglCreateContext failed\n");
eglDestroySurface(display, surface);
eglTerminate(display);
return EGL_FALSE;
}
if (!eglMakeCurrent(display, surface, surface, context)) {
ALOGE("[EGL] elgMakeCurrent() failed (new)\n");
eglDestroyContext(display, context);
eglDestroySurface(display, surface);
eglTerminate(display);
return EGL_FALSE;
}
IJK_GLES2_Renderer_setupGLES();
egl->context = context;
egl->surface = surface;
egl->display = display;
return EGL_TRUE;
}
特别说明下,eglGetConfigAttrib(display, config, EGL_NATIVE_VISUAL_ID, &native_visual_id)拿到hal推荐的硬件层解码像素格式,再传入ANativeWindow:
#ifdef __ANDROID__
{
EGLint native_visual_id = 0;
if (!eglGetConfigAttrib(display, config, EGL_NATIVE_VISUAL_ID, &native_visual_id)) {
ALOGE("[EGL] eglGetConfigAttrib() returned error %d", eglGetError());
eglTerminate(display);
return EGL_FALSE;
}
int32_t width = ANativeWindow_getWidth(window);
int32_t height = ANativeWindow_getWidth(window);
ALOGI("[EGL] ANativeWindow_setBuffersGeometry(f=%d);", native_visual_id);
int ret = ANativeWindow_setBuffersGeometry(window, width, height, native_visual_id);
if (ret) {
ALOGE("[EGL] ANativeWindow_setBuffersGeometry(format) returned error %d", ret);
eglTerminate(display);
return EGL_FALSE;
}
}
#endif
EGL显示Context上下文构建完后,由OpenGL ES完成1帧的render,然后交换egl->display与egl->surface,最终输出到Java层的surface上完成图像的显示:
static EGLBoolean IJK_EGL_display_internal(IJK_EGL* egl, EGLNativeWindowType window, SDL_VoutOverlay *overlay)
{
IJK_EGL_Opaque *opaque = egl->opaque;
if (!IJK_EGL_prepareRenderer(egl, overlay)) {
ALOGE("[EGL] IJK_EGL_prepareRenderer failed\n");
return EGL_FALSE;
}
if (!IJK_GLES2_Renderer_renderOverlay(opaque->renderer, overlay)) {
ALOGE("[EGL] IJK_GLES2_render failed\n");
return EGL_FALSE;
}
eglSwapBuffers(egl->display, egl->surface);
return EGL_TRUE;
}
值得一提的是,OpenGL ES 需要开发者自己开辟一个新的线程,来执行 OpenGL ES 的渲染操作,还要求开发者在执行渲染操作前要为这个线程绑定上下文环境。EGL 为绑定上下文环境提供了 eglMakeCurrent 这个接口。IJKPLAYER所开辟的线程便是video_refresh_thread。
3.3 ANativeWindow
以上介绍了Android系统下软硬解的显示,最后再来看看ANativeWindow的显示。ANativeWindow显示所支持的像素格式,主要是RGB系列,以及少许的YUV系列,部分像素格式的支持与OpenGL ES相同。 在overlay_format使能了SDL_FCC__GLES2选项之后,IJKPLAYER选择使用OpenGL ES进行render,再由EGL渲染到surface上。
关于ANativeWindow的NDK开发文档,请参见 ANativeWindow开发文档
- ANativeWindow支持RGB系列,以及YV12;
- 因此若像素格式不在支持之列,需转换为ANativeWindow所支持的像素格式;
ANativeWindow是通过 ANativeWindow_setBuffersGeometry方法将video的宽高及像素格式传入的:
int32_t ANativeWindow_setBuffersGeometry(
ANativeWindow *window,
int32_t width,
int32_t height,
int32_t format
)
Details | ||
Parameters | window | pointer to an ANativeWindow object. |
width | width of the buffers in pixels. | |
height | height of the buffers in pixels. | |
format | one of the AHardwareBuffer_Format constants. | |
Returns | 0 for success, or a negative value on error. |
可以看到ANativeWindow_setBuffersGeometry接口的format参数,是一个AHardwareBuffer_Format类型的常量:
AHardwareBuffer_Format{
AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM = 1,
AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM = 2,
AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM = 3,
AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM = 4,
AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT = 0x16,
AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM = 0x2b,
AHARDWAREBUFFER_FORMAT_BLOB = 0x21,
AHARDWAREBUFFER_FORMAT_D16_UNORM = 0x30,
AHARDWAREBUFFER_FORMAT_D24_UNORM = 0x31,
AHARDWAREBUFFER_FORMAT_D24_UNORM_S8_UINT = 0x32,
AHARDWAREBUFFER_FORMAT_D32_FLOAT = 0x33,
AHARDWAREBUFFER_FORMAT_D32_FLOAT_S8_UINT = 0x34,
AHARDWAREBUFFER_FORMAT_S8_UINT = 0x35,
AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420 = 0x23,
AHARDWAREBUFFER_FORMAT_YCbCr_P010 = 0x36,
AHARDWAREBUFFER_FORMAT_R8_UNORM = 0x38,
AHARDWAREBUFFER_FORMAT_R16_UINT = 0x39,
AHARDWAREBUFFER_FORMAT_R16G16_UINT = 0x3a,
AHARDWAREBUFFER_FORMAT_R10G10B10A10_UNORM = 0x3b
}
ANativeWindow显示的主函数:
int SDL_Android_NativeWindow_display_l(ANativeWindow *native_window, SDL_VoutOverlay *overlay)
{
int retval;
if (!native_window)
return -1;
if (!overlay) {
ALOGE("SDL_Android_NativeWindow_display_l: NULL overlay");
return -1;
}
if (overlay->w <= 0 || overlay->h <= 0) {
ALOGE("SDL_Android_NativeWindow_display_l: invalid overlay dimensions(%d, %d)", overlay->w, overlay->h);
return -1;
}
int curr_w = ANativeWindow_getWidth(native_window);
int curr_h = ANativeWindow_getHeight(native_window);
int curr_format = ANativeWindow_getFormat(native_window);
int buff_w = IJKALIGN(overlay->w, 2);
int buff_h = IJKALIGN(overlay->h, 2);
AndroidHalFourccDescriptor *overlayDesc = native_window_get_desc(overlay->format);
if (!overlayDesc) {
ALOGE("SDL_Android_NativeWindow_display_l: unknown overlay format: %d", overlay->format);
return -1;
}
AndroidHalFourccDescriptor *voutDesc = native_window_get_desc(curr_format);
if (!voutDesc || voutDesc->hal_format != overlayDesc->hal_format) {
ALOGD("ANativeWindow_setBuffersGeometry: w=%d, h=%d, f=%.4s(0x%x) => w=%d, h=%d, f=%.4s(0x%x)",
curr_w, curr_h, (char*) &curr_format, curr_format,
buff_w, buff_h, (char*) &overlay->format, overlay->format);
retval = ANativeWindow_setBuffersGeometry(native_window, buff_w, buff_h, overlayDesc->hal_format);
if (retval < 0) {
ALOGE("SDL_Android_NativeWindow_display_l: ANativeWindow_setBuffersGeometry: failed %d", retval);
return retval;
}
if (!voutDesc) {
ALOGE("SDL_Android_NativeWindow_display_l: unknown hal format %d", curr_format);
return -1;
}
}
ANativeWindow_Buffer out_buffer;
retval = ANativeWindow_lock(native_window, &out_buffer, NULL);
if (retval < 0) {
ALOGE("SDL_Android_NativeWindow_display_l: ANativeWindow_lock: failed %d", retval);
return retval;
}
if (out_buffer.width != buff_w || out_buffer.height != buff_h) {
ALOGE("unexpected native window buffer (%p)(w:%d, h:%d, fmt:'%.4s'0x%x), expecting (w:%d, h:%d, fmt:'%.4s'0x%x)",
native_window,
out_buffer.width, out_buffer.height, (char*)&out_buffer.format, out_buffer.format,
buff_w, buff_h, (char*)&overlay->format, overlay->format);
// TODO: 8 set all black
ANativeWindow_unlockAndPost(native_window);
ANativeWindow_setBuffersGeometry(native_window, buff_w, buff_h, overlayDesc->hal_format);
return -1;
}
int render_ret = voutDesc->render(&out_buffer, overlay);
if (render_ret < 0) {
// TODO: 8 set all black
// return after unlock image;
}
retval = ANativeWindow_unlockAndPost(native_window);
if (retval < 0) {
ALOGE("SDL_Android_NativeWindow_display_l: ANativeWindow_unlockAndPost: failed %d", retval);
return retval;
}
return render_ret;
}
IJKPLAYER对ANativeWindow所支持的像素格式:
enum {
HAL_PIXEL_FORMAT_RGBA_8888 = 1,
HAL_PIXEL_FORMAT_RGBX_8888 = 2,
HAL_PIXEL_FORMAT_RGB_888 = 3,
HAL_PIXEL_FORMAT_RGB_565 = 4,
HAL_PIXEL_FORMAT_BGRA_8888 = 5,
HAL_PIXEL_FORMAT_RGBA_5551 = 6,
HAL_PIXEL_FORMAT_RGBA_4444 = 7,
/* 0x8 - 0xFF range unavailable */
/* 0x100 - 0x1FF HAL implement */
HAL_PIXEL_FORMAT_YV12 = 0x32315659, // YCrCb 4:2:0 Planar
HAL_PIXEL_FORMAT_RAW_SENSOR = 0x20,
HAL_PIXEL_FORMAT_BLOB = 0x21,
HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED = 0x22,
/* Legacy formats (deprecated), used by ImageFormat.java */
HAL_PIXEL_FORMAT_YCbCr_422_SP = 0x10, // NV16
HAL_PIXEL_FORMAT_YCrCb_420_SP = 0x11, // NV21
HAL_PIXEL_FORMAT_YCbCr_422_I = 0x14, // YUY2
};
对支持的像素格式,绑定render方法:
static AndroidHalFourccDescriptor g_hal_fcc_map[] = {
// YV12
{ HAL_PIXEL_FORMAT_YV12, "HAL_YV12", HAL_PIXEL_FORMAT_YV12, android_render_on_yv12 },
{ SDL_FCC_YV12, "YV12", HAL_PIXEL_FORMAT_YV12, android_render_on_yv12 },
// RGB565
{ HAL_PIXEL_FORMAT_RGB_565, "HAL_RGB_565", HAL_PIXEL_FORMAT_RGB_565, android_render_on_rgb565 },
{ SDL_FCC_RV16, "RV16", HAL_PIXEL_FORMAT_RGB_565, android_render_on_rgb565 },
// RGB8888
{ HAL_PIXEL_FORMAT_RGBX_8888, "HAL_RGBX_8888", HAL_PIXEL_FORMAT_RGBX_8888, android_render_on_rgb8888 },
{ HAL_PIXEL_FORMAT_RGBA_8888, "HAL_RGBA_8888", HAL_PIXEL_FORMAT_RGBA_8888, android_render_on_rgb8888 },
{ HAL_PIXEL_FORMAT_BGRA_8888, "HAL_BGRA_8888", HAL_PIXEL_FORMAT_BGRA_8888, android_render_on_rgb8888 },
{ SDL_FCC_RV32, "RV32", HAL_PIXEL_FORMAT_RGBX_8888, android_render_on_rgb8888 },
};
然后,对视频宽高进行字节对齐,并传入宽高及hal硬件所支持的像素格式:
int buff_w = IJKALIGN(overlay->w, 2);
int buff_h = IJKALIGN(overlay->h, 2);
ANativeWindow_setBuffersGeometry(native_window, buff_w, buff_h, overlayDesc->hal_format);
此处以ANativeWindow显示yv12为例,将yv12点yuv分量分别copy给ANativeWindow_Buffer结构体的bits指针,完成像素数据的传入,最后由ANativeWindow进行render显示:
static int android_render_yv12_on_yv12(ANativeWindow_Buffer *out_buffer, const SDL_VoutOverlay *overlay)
{
// SDLTRACE("SDL_VoutAndroid: android_render_yv12_on_yv12(%p)", overlay);
assert(overlay->format == SDL_FCC_YV12);
assert(overlay->planes == 3);
int min_height = IJKMIN(out_buffer->height, overlay->h);
int dst_y_stride = out_buffer->stride;
int dst_c_stride = IJKALIGN(out_buffer->stride / 2, 16);
int dst_y_size = dst_y_stride * out_buffer->height;
int dst_c_size = dst_c_stride * out_buffer->height / 2;
// ALOGE("stride:%d/%d, size:%d/%d", dst_y_stride, dst_c_stride, dst_y_size, dst_c_size);
uint8_t *dst_pixels_array[] = {
out_buffer->bits,
out_buffer->bits + dst_y_size,
out_buffer->bits + dst_y_size + dst_c_size,
};
int dst_line_height[] = { min_height, min_height / 2, min_height / 2 };
int dst_line_size_array[] = { dst_y_stride, dst_c_stride, dst_c_stride };
for (int i = 0; i < 3; ++i) {
int dst_line_size = dst_line_size_array[i];
int src_line_size = overlay->pitches[i];
int line_height = dst_line_height[i];
uint8_t *dst_pixels = dst_pixels_array[i];
const uint8_t *src_pixels = overlay->pixels[i];
if (dst_line_size == src_line_size) {
int plane_size = src_line_size * line_height;
// ALOGE("sdl_image_copy_plane %p %p %d", dst_pixels, src_pixels, dst_plane_size);
memcpy(dst_pixels, src_pixels, plane_size);
} else {
// TODO: 9 padding
int bytewidth = IJKMIN(dst_line_size, src_line_size);
// ALOGE("av_image_copy_plane %p %d %p %d %d %d", dst_pixels, dst_line_size, src_pixels, src_line_size, bytewidth, line_height);
av_image_copy_plane(dst_pixels, dst_line_size, src_pixels, src_line_size, bytewidth, line_height);
}
}
return 0;
}
static int android_render_on_yv12(ANativeWindow_Buffer *out_buffer, const SDL_VoutOverlay *overlay)
{
assert(out_buffer);
assert(overlay);
switch (overlay->format) {
case SDL_FCC_YV12: {
return android_render_yv12_on_yv12(out_buffer, overlay);
}
}
return -1;
}