android ndk 快速渲染yuv数据

因项目需要,要实现pc/windows传屏到android端显示,项目已经采用ffmpeg编码解码,现在碰到了如何将解码得到的yuv数据渲染到android surfaceview 上的问题。如果将yuv转换成rgb,软件实现的方法倒是简单,但是项目要求实时性,这种效率肯定是极低的。

下面是我整理的一些方法:

这篇文章给出了大致的方案参考 

这篇举例yuv转rgb的一些方式

至于通过yuv转rgb,然后渲染rgb的这种方式,这里就不再赘述了。

1、ANative Window API

针对android2.3以后的版本。
好像只能渲染RGB ?没有进行过测试。
header:frameworks/native/include/android/native_window.h
——参考自《Pro Android C++ with the NDK》

2、 OpenGL ES 2.0:

当平台有GPU的情况下,并且android2.2以上。
“虽然也是将 YUV 数据转 RGB 数据再显示,但是是通过GPU 来处理的,不同于利用 CPU 来进行颜色空间转换,速度快了很多”。glViewport(..) 是设置显示窗口的大小和位置,bindTexture(..)是分别绑定Y,U,V 数据。

这篇文章是用java实现的gl播放yuv。

这篇文章是用c实现的,还未测试效果。

opengl的方式我也写过,但由于stagefright api的方法,并没有测试下去,以后有机会再测。

3、Private C++ API

android团队不推荐使用。The performance in this method is also the same as the ANativeWindow one(难道也只能渲染rgb?)
eg:lash, Firefox, VPlayer, MX Player and VLC for Android

这种没有深入研究,还不知道具体里面是采用了什么。

4、stagefright api

我不知道是不是应该归类在private c++ api下,不过这个的确更加平台相关,而且不是开放接口。
这里渲染有两个类:
AwesomeLocalRenderer-SoftwareRenderer
AwsomeNativeWindowRenderer- 硬件方式。

这篇文章有对此方法较为详细的描述 ,我也是主要参考它的。

header:system/core/include/system/window.h

SoftwareRenderer中有ColorConverter,也有直接render yuv的部分,而我们用的就是直接render。

ffmpeg解码出来是PIX_FMT_YUV420P,查资料应该是和OMX_COLOR_FormatYUV420Planar一样的,那么应该能直接应用softwarerenderer中的if (mColorFormat == OMX_COLOR_FormatYUV420Planar) 这部分代码,而buf的颜色空间用的是HAL_PIXEL_FORMAT_YV12 。(同属于 YUV420planar格式)

下面直接给代码吧:(以下代码只是render和ffmpeg解码部分,仅供参考)

// edu_zjgsu_mrtspserver_server_X264.cpp
extern "C"
{
#ifndef __STDC_CONSTANT_MACROS
#  define __STDC_CONSTANT_MACROS
#endif
#include <math.h>
#include <libavutil/rational.h>
#include <libavutil/opt.h>
#include <libavcodec/avcodec.h>
#include <libavutil/channel_layout.h>
#include <libavutil/imgutils.h>
#include <libavutil/mathematics.h>
#include <libavutil/samplefmt.h>
#include <libswscale/swscale.h>
};
#include <jni.h>
#include <android_runtime/AndroidRuntime.h>
#include <android_runtime/android_view_Surface.h>
#include <gui/Surface.h>
#include <assert.h>
#include <utils/Log.h>
#include <nativehelper/JNIHelp.h>
#include <media/stagefright/foundation/ADebug.h>
#include <ui/GraphicBufferMapper.h>
#include <cutils/properties.h>
#include <android/log.h>
#include <string.h>
#include <time.h>
#include <system/window.h>
#define   LOG_TAG    "LOG_TEST"
#define   LOGI(...)  __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__)
#define   LOGE(...)  __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)
using namespace android;

AVCodec *codec;
AVCodecContext *c;
AVFrame *m_pYUVFrame; //YUV帧数据
uint8_t *yuv_buffer;
sp<Surface> surface;

int ALIGN(int x, int y) {
    // y must be a power of 2.
    return (x + y - 1) & ~(y - 1);
}

/** 渲染 **/
void render(const void *data, const void *data_u, const void *data_v, const sp<ANativeWindow> &nativeWindow,int width, int height) {
    sp<ANativeWindow> mNativeWindow = nativeWindow;
    int err;
    // 裁剪大小,这里暂时设置是一样的,有需要再改
    int mCropWidth = width;
    int mCropHeight = height;
    // 目前其实  mCropWidth == width == bufWidth
    int halFormat = HAL_PIXEL_FORMAT_YV12;
    int bufWidth = (mCropWidth + 1) & ~1; //按2对齐。成2的倍数(多了减1)
    int bufHeight = (mCropHeight + 1) & ~1;
    CHECK_EQ(0,
            native_window_set_usage(
            mNativeWindow.get(),
            GRALLOC_USAGE_SW_READ_NEVER | GRALLOC_USAGE_SW_WRITE_OFTEN
            | GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_EXTERNAL_DISP));
    CHECK_EQ(0,
           native_window_set_scaling_mode(
           mNativeWindow.get(),
           NATIVE_WINDOW_SCALING_MODE_SCALE_CROP));
    // 分配buf显示空间
    CHECK_EQ(0, native_window_set_buffers_geometry(
                mNativeWindow.get(),
                bufWidth,
                bufHeight,
                halFormat));
    ANativeWindowBuffer *buf;
    //申请一块空闲的图形缓冲区
    if ((err = native_window_dequeue_buffer_and_wait(mNativeWindow.get(),
            &buf)) != 0) {
        LOGI("Surface::dequeueBuffer returned error %d", err);
        return;
    }
    GraphicBufferMapper &mapper = GraphicBufferMapper::get();
    Rect bounds(mCropWidth, mCropHeight);
    void *dst;
    CHECK_EQ(0, mapper.lock(//用来锁定一个图形缓冲区并将缓冲区映射到用户进程
           buf->handle, GRALLOC_USAGE_SW_WRITE_OFTEN, bounds, &dst));
// 开始适配
    const uint8_t *src_y = (const uint8_t *)data;
    const uint8_t *src_u = (const uint8_t *)data_u;
    const uint8_t *src_v = (const uint8_t *)data_v;

    uint8_t *dst_y = (uint8_t *)dst;
    size_t dst_y_size = buf->stride * buf->height;
    size_t dst_c_stride = ALIGN(buf->stride / 2, 16);
    size_t dst_c_size = dst_c_stride * buf->height / 2;
    uint8_t *dst_v = dst_y + dst_y_size;
    uint8_t *dst_u = dst_v + dst_c_size;

    for (int y = 0; y < mCropHeight; ++y) {
        memcpy(dst_y, src_y, mCropWidth);
        src_y += width;
        dst_y += buf->stride;
    }
    for (int y = 0; y < (mCropHeight + 1) / 2; ++y) {
        memcpy(dst_u, src_u, (mCropWidth + 1) / 2);
        memcpy(dst_v, src_v, (mCropWidth + 1) / 2);
        src_u += width / 2;
        src_v += width / 2;
        dst_u += dst_c_stride;
        dst_v += dst_c_stride;
    }

    CHECK_EQ(0, mapper.unlock(buf->handle));
    if ((err = mNativeWindow->queueBuffer(mNativeWindow.get(), buf, -1)) != 0) {
        LOGI("Surface::queueBuffer returned error %d", err);
    }
    buf = NULL;
}

int X264Init()
{
    int ret,got_output;
    c= NULL;

    avcodec_register_all();
    /* find the mpeg1 video encoder */
    codec = avcodec_find_decoder(AV_CODEC_ID_H264);
    if (!codec) {
        fprintf(stderr, "Codec not found\n");
        return 1;
    }
    c = avcodec_alloc_context3(codec);
    if (!c) {
        fprintf(stderr, "Could not allocate video codec context\n");
        return 2;
    }
    if(codec->capabilities&CODEC_CAP_TRUNCATED)
        c->flags|= CODEC_FLAG_TRUNCATED; /* we do not send complete frames */

    /* open it */
        ret = avcodec_open2(c, codec, NULL);
    if (ret < 0) {
        fprintf(stderr, "Could not open codec\n");
        return 3;
    }
    m_pYUVFrame = av_frame_alloc();
    if (!m_pYUVFrame) {
        fprintf(stderr, "Could not allocate video frame\n");
        return 6;
    }
    yuv_buffer = NULL;
    return 0;
}

/** 解码,传入一帧数据和大小 **/
int DecodeFrame(uint8_t *pInBuf, int iInBufSize)
{
    AVPacket pkt;
    int ret,got_output=0;
    av_init_packet(&pkt);
    pkt.data = pInBuf;    // packet data will be allocated by the encoder
    pkt.size = iInBufSize;
    /* encode the image */
    ret = avcodec_decode_video2(c, m_pYUVFrame,  &got_output, &pkt);
    if (ret < 0) {
        fprintf(stderr, "Error decoding frame\n");
        return 1;
    }
    LOGI("got_output=%d",got_output);
    if (got_output) {
        render(m_pYUVFrame->data[0],m_pYUVFrame->data[1],m_pYUVFrame->data[2],surface,c->width,c->height);
        av_free_packet(&pkt);
    }
    return got_output;
}

Android.mk :

LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := cutils
LOCAL_SRC_FILES := prebuilt/libcutils.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE := utils
LOCAL_SRC_FILES := prebuilt/libutils.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE := binder
LOCAL_SRC_FILES := prebuilt/libbinder.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE := ui
LOCAL_SRC_FILES := prebuilt/libui.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE := gui
LOCAL_SRC_FILES := prebuilt/libgui.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE := android_runtime
LOCAL_SRC_FILES := prebuilt/libandroid_runtime.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE := stagefright_foundation
LOCAL_SRC_FILES := prebuilt/libstagefright_foundation.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE := avformat-55
LOCAL_SRC_FILES := prebuilt/libavformat-55.so

include $(CLEAR_VARS)
LOCAL_MODULE := avutil-52
LOCAL_SRC_FILES := prebuilt/libavutil-52.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE := avcodec-55
LOCAL_SRC_FILES := prebuilt/libavcodec-55.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE := swscale-2
LOCAL_SRC_FILES := prebuilt/libswscale-2.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE := swresample-0
LOCAL_SRC_FILES := prebuilt/libswresample-0.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE    := CallFFMpeg
LOCAL_SRC_FILES := edu_zjgsu_mrtspserver_server_X264.cpp
LOCAL_LDLIBS := -llog -ljnigraphics -lz -landroid
# 一些ffmpeg的相关库和stagefright相关库
LOCAL_SHARED_LIBRARIES := \
        libavformat-55 \
        libavcodec-55 \
        libswscale-2 \
        libavutil-52 \
        libswresample-0 \
        libcuitls \
        libutils \
        libbinder \
        libui \
        libgui \
        libandroid_runtime \
        libstagefright_foundation

LOCAL_C_INCLUDES := \
    ../frameworks/native/include \
    ../frameworks/base/include\
    ../system/core/include\
    ../libnativehelper/include\
    ../hardware/libhardware/include\
    ../frameworks/av/include\
    jni/include\
    ../frameworks/native/include/media/openmax

TARGET_ARCH_ABI := armeabi-v7a
LOCAL_ARM_NEON := true
LOCAL_CXXFLAGS := -DHAVE_PTHREADS
include $(BUILD_SHARED_LIBRARY)

最后效果:1280x720图像渲染时间2-3ms。

遇到的问题:1、编译问题,因为要依赖到stagefright等其他相关的底层库,所以平台相关性较大。2、可能由于解码库的原因,如果图像width不是16的倍数,如1366,那么render时候会出现螺旋状花屏或是uv通道重影的现象。

这篇文章也提供了类似的方法,但没有详细的代码,以待研究。



转载于:https://my.oschina.net/yingcj/blog/387167

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值