最近需要通过surface显示YUV视频,参考了网上一些资料,发现大多是基于4.0,4.4的代码,4.0相对5.1,android的代码接口有些变化,参考示例不能直接使用,调试过程中也碰到了很多问题,故整理出来分享。
本文使用的是jni方式,应用端通过jni接口,调用C++层代码,进行YUV图像显示。应用层做了一个简单apk,jni层是在android源码里面编译的,使用so库的方式,开发环境是android5.1.
下面直接上代码(主要是测试用,很多地方写得不规范和存在问题,但整体不影响演示)。
4.1 jni层
Jni层是关键,先介绍
4.1.1 主要代码
这些代码都在一个文件里,
变量定义,库加载和jni注册
#include <stdio.h>
#include "JNIHelp.h"
#include <jni.h>
#include <stddef.h>
#include <unistd.h>
#include <limits.h>
#include "types.h"
using namespace android;
static const char *classPathName = "com/example/yuvplayer/YuvStream"; //xia
static sp<Surface> native_surface; //xia
#define CHECK_EQ(x,y) {if(x == y);}
static jobject gObject;
static JavaVM* jvm=0;
#include "debug.h"
typedef union {
JNIEnv* env;
void* venv;
} UnionJNIEnvToVoid;
jint JNI_OnLoad(JavaVM* vm, void* reserved)
{
UnionJNIEnvToVoid uenv;
uenv.venv = NULL;
jint result = -1;
JNIEnv* env = NULL;
LOGW("JNI_OnLoad 1111");
setJavaVM(vm); //xia
if (vm->GetEnv(&uenv.venv, JNI_VERSION_1_4) != JNI_OK) {
LOGD("ERROR: GetEnv failed");
goto bail;
}
env = uenv.env;
LOGW("JNI_OnLoad 2222");
if (registerNativeMethods(env, classPathName, methods, sizeof(methods) / sizeof(methods[0])) != JNI_TRUE) {
LOGD("ERROR: registerNatives failed");
goto bail;
}
result = JNI_VERSION_1_4;
bail:
return result;
}
/*
* Register several native methods for one class.
*/
static int registerNativeMethods(JNIEnv* env, const char* className,
JNINativeMethod* gMethods, int numMethods)
{
jclass clazz;
//if(className)
LOGW("registerNativeMethods: '%s',%d", className,numMethods);
clazz = env->FindClass(className);
if (clazz == NULL)
{
env->DeleteLocalRef(clazz);
return JNI_FALSE;
}
if (env->RegisterNatives(clazz, gMethods, numMethods) < 0) {
env->DeleteLocalRef(clazz);
return JNI_FALSE;
}
env->DeleteLocalRef(clazz);
return JNI_TRUE;
}
Native函数
static JNINativeMethod methods[] = {
{"NativeStartDecode", "(Landroid/view/Surface;)V", (void*)startDecode},
{"NativeStopDecode", "()V", (void*)stopDecode},
{"NativeDecode", "()I", (void*)Decode},
{"NativePutBufToDecode", "([B)I", (void*)putBufToDecode},
{"nativeShowYUV", "([BII)V", (void *)nativeShowYUV},
{"nativeShowYUVint", "([III)V", (void *)nativeShowYUVint},
};
static jint Decode(JNIEnv *env, jobject thiz)
{
LOGD("Decode enter ======>");
//ShowYUV();
}
static void startDecode(JNIEnv *env, jobject thiz,jobject jSurface)
{
LOGD("startDecode enter ~");
sp<Surface> mSurface;
if (jSurface != NULL) {
setSurface(env, jSurface, 9);
mSurface= native_surface;
return; //xia test
}
}
static void stopDecode(JNIEnv *env, jobject thiz)
{
}
static void
nativeShowYUV(JNIEnv *env, jobject thiz,jbyteArray yuvData,jint width,jint height){
ALOGE("width = %d,height = %d",width,height);
jint len = env->GetArrayLength(yuvData);
ALOGE("len = %d",len);
jbyte *byteBuf = env->GetByteArrayElements(yuvData, 0);
//jint *byteBuf = env->GetIntArrayElements(yuvData, 0);
//char * bufYUV = NULL;
//bufYUV = (char *)byteBuf;
//p3
render(byteBuf,len,native_surface,width,height);
//p4
// mDisplay->setSize(width,height);
// mDisplay->startDisplayDecode(bufYUV);
}
辅助函数
//method2
static int ALIGN(int x, int y) {
// y must be a power of 2.
return (x + y - 1) & ~(y - 1);
}
static android::Surface* getNativeSurface(JNIEnv* env, jobject jsurface, jint version)
{
jclass clazz = env->FindClass("android/view/Surface");
jfieldID field_surface;
if(version <=8)
{
field_surface = env->GetFieldID(clazz, "mSurface", "I");
}
else
field_surface = env->GetFieldID(clazz, "mNativeObject", "J");
if (field_surface == NULL)
{
return NULL;
}
return (android::Surface *) env->GetLongField(jsurface, field_surface);//GetIntField GetLongField
}
int setSurface(JNIEnv *env, jobject jsurface, jint version)
{
native_surface = getNativeSurface(env, jsurface, version);
if(android::Surface::isValid(native_surface))
{
__android_log_print(ANDROID_LOG_INFO, "libjni", "native_surface is valid");
return 1;
}
else
__android_log_print(ANDROID_LOG_ERROR, "libjni", "native_surface is invalid");
return 0;
}
void ConvertYUVBuffer(uint8_t* dstbuf,const void *data,int width,int height,ANativeWindowBuffer *buf) {
int mCropWidth = width;
int mCropHeight = height;
int mWidth = (mCropWidth + 1) & ~1;//按2对齐
int mHeight = (mCropHeight + 1) & ~1;
const uint8_t *src_y = (const uint8_t *)data;
const uint8_t *src_u = (const uint8_t *)data + mWidth * mHeight;
const uint8_t *src_v = src_u + (mWidth / 2 * mHeight / 2);
uint8_t *dst_y = (uint8_t *)dstbuf;
size_t dst_y_size = buf->stride * buf->height;
size_t dst_c_stride = ALIGN(buf->stride / 2, 16);
size_t dst_c_size = dst_c_stride * buf->height / 2;
uint8_t *dst_v = dst_y + dst_y_size;
uint8_t *dst_u = dst_v + dst_c_size;
for (int y = 0; y < mCropHeight; ++y) {
memcpy(dst_y, src_y, mCropWidth);
src_y += mWidth;
dst_y += buf->stride;
}
for (int y = 0; y < (mCropHeight + 1) / 2; ++y) {
memcpy(dst_u, src_u, (mCropWidth + 1) / 2);
memcpy(dst_v, src_v, (mCropWidth + 1) / 2);
src_u += mWidth / 2;
src_v += mWidth / 2;
dst_u += dst_c_stride;
dst_v += dst_c_stride;
}
}
static void render
( const void *data, size_t size, const sp<ANativeWindow> &nativeWindow,int width,int height)
{
ALOGE("[%s]%d",__FILE__,__LINE__);
sp<ANativeWindow> mNativeWindow = nativeWindow;
int err;
int mCropWidth = width;
int mCropHeight = height;
int halFormat = HAL_PIXEL_FORMAT_YV12;
int bufWidth = (mCropWidth + 1) & ~1;//对齐
int bufHeight = (mCropHeight + 1) & ~1;
CHECK_EQ(0,
native_window_set_usage(
mNativeWindow.get(),
GRALLOC_USAGE_SW_READ_NEVER | GRALLOC_USAGE_SW_WRITE_OFTEN
| GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_EXTERNAL_DISP));
CHECK_EQ(0,
native_window_set_scaling_mode(
mNativeWindow.get(),
NATIVE_WINDOW_SCALING_MODE_SCALE_CROP));
CHECK_EQ(0, native_window_set_buffers_geometry(
mNativeWindow.get(),
bufWidth,
bufHeight,
halFormat));
ANativeWindowBuffer *buf;
if ((err = native_window_dequeue_buffer_and_wait(mNativeWindow.get(),
&buf)) != 0) {
ALOGW("Surface::dequeueBuffer returned error %d", err);
return;
}
GraphicBufferMapper &mapper = GraphicBufferMapper::get();
Rect bounds(mCropWidth, mCropHeight);
uint8_t* dst = NULL;
CHECK_EQ(0, mapper.lock(
buf->handle, GRALLOC_USAGE_SW_WRITE_OFTEN, bounds, (void**)&dst));//
//将yuv数据copy到图形缓冲区
ConvertYUVBuffer(dst,data,width,height, buf);
}
ALOGE("render buf->stride = %d",buf->stride);
CHECK_EQ(0, mapper.unlock(buf->handle));
if ((err = mNativeWindow->queueBuffer(mNativeWindow.get(), buf,
-1)) != 0) {
ALOGW("Surface::queueBuffer returned error %d", err);
}
buf = NULL;
}
Make文件
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := lib YUVShow
#LOCAL_LDLIBS += -llog
LOCAL_C_INCLUDES := \
bionic \
bionic/libstdc++/include \
external/stlport/stlport \
external/libyuv/files/include \
frameworks/av/media/libstagefright \
frameworks/av/media/libstagefright/include \
frameworks/native/include/media/openmax \
frameworks/av/media/libstagefright/codecs/on2/h264dec/source \
frameworks/av/media/libstagefright/codecs/on2/h264dec/inc \
frameworks/av/include/media/stagefright \
frameworks/native/include/utils \
frameworks/native/include/gui \
LOCAL_SHARED_LIBRARIES := \
libbinder \
libui \
libgui \
libstagefright \
libutils \
libstagefright_soft_h264dec
#LOCAL_STATIC_LIBRARIES := libyuv_static
LOCAL_SRC_FILES := \
YUVShow.cpp \
LOCAL_MODULE_TAGS := optional
LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)
#LOCAL_CFLAGS += -m64
include $(BUILD_SHARED_LIBRARY)
4.2 app
4.2.1java
Java代码很多,不能写全,贴关键部分,主要是使用textureview去呈现窗口,可扩展子类,重写onSurfaceTextureAvailable等方法,当建立一个surface后surface传递给jni,
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int width,
int height) {
…
//当SurfaceTexture被创建后,调用jni给底层设置Surface
mSavedSurface = new Surface(mSavedSurfaceTexture);
YuvStream.getInstance().setSurface(mSavedSurface);
}
做一个按钮,在触发函数里读文件,将读到的数据发给jni去播放,
final private String FILE_NAME2 = "carphone001.yuv";
private int width2 = 176;
private int height2 = 144;
private int size2 = width2 * height2 * 3/2;
private OnClickListener getOnClickListener() {
OnClickListener onClickListener = new OnClickListener() {
public void onClick(View view) {
int id = view.getId();
Log.d(this, "onClick(View " + view + ", id " + id + ")...");
switch (id) {
case R.id.btn_decode:
byte[]yuvArray = new byte[size2];
int[] mIntArray = new int[width2 * height2];
readYUVFile(yuvArray, FILE_NAME2,size2);
YuvStream.getInstance().ShowYUV(yuvArray,width2,height2);
}
读文件
private boolean readYUVFile(byte[] yuvArray,String filename,int len){
try {
File sdCardDir = Environment.getExternalStorageDirectory();
FileInputStream fis = new FileInputStream(
sdCardDir.getCanonicalPath() +"/" + filename);
fis.read(yuvArray, 0, len);
//test play file
fis.close();
return true;
} else {
return false;
}
}catch (Exception e) {
e.printStackTrace();
return false;
}
}
4.2.2 布局
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent">
<FrameLayout
android:layout_width="match_parent"
android:layout_height="match_parent" >
<TextureView
android:id="@+id/incomingVideo"
android:layout_gravity="center_vertical"
android:layout_width="match_parent"
android:layout_height="match_parent" />
<FrameLayout
android:id="@+id/previewVideoContainer"
android:layout_marginRight="5dp"
android:layout_marginTop="20dp"
android:layout_gravity="top|right"
android:layout_width="90dp"
android:layout_height="110dp"
android:background="#aaa" >
<TextureView
android:id="@+id/previewVideo"
android:layout_marginLeft="2dp"
android:layout_marginTop="2dp"
android:layout_width="match_parent"
android:layout_height="match_parent"
/>
</FrameLayout>
</FrameLayout>
<TextView
android:background="@android:color/transparent"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/hello_world"
android:layout_marginLeft="15dp"
android:layout_marginTop="15dp" />
<TextView
android:id="@+id/btn_decode"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:drawableTop="@drawable/hello_world"
android:text="@string/hello_world" />
</FrameLayout>
4.2.3 jni接口
package com.example.yuvplayer;
import android.os.Handler;
import android.os.Message;
//import android.os.RegistrantList;
//import android.os.SystemProperties;
import android.util.Log;
import android.view.Surface;
class YuvStream {
private static final String TAG = "YuvStream";
private static native void NativeStartDecode(Surface st);
private static native int NativeDecode();
private static native void NativeStopDecode();
private static native void nativeShowYUV(byte[] frame,int w,int h);
private static native void nativeShowYUVint(int[] frame,int w,int h);
// Use a singleton
private static YuvStream mInstance;
/**
* This method returns the single instance of ImsMedia object *
*/
public static synchronized YuvStream getInstance() {
if (mInstance == null) {
mInstance = new YuvStream();
}
return mInstance;
}
static {
System.loadLibrary("YUVShow");
}
public void setSurface(Surface st) {
log("setSurface(Surface: " + st + ")");
mSurface = st;
NativeStartDecode(st);
}
public void decode() {
log("decode()");
NativeDecode();
}
public void ShowYUV(int[] frame,int w,int h) {
log("ShowYUV()");
//nativeShowYUV(frame, w, h);
nativeShowYUVint(frame, w, h);
}
public void ShowYUV(byte[] frame,int w,int h) {
log("ShowYUV()");
nativeShowYUV(frame, w, h);
}
}
4.3相关信息
YUV素材文件在http://www.cipr.rpi.edu/resource/sequences/index.html可下载。
根据代码中的文件路径,放置到手机sdcard目录下,adb remount,adb push xxx.yuv sdcard即可。
编译出的so库应该是64位的,要放置到system/lib64目录下。
需要了解YUV的基本知识,网上有很好的资料。
要了解自己的素材文件的尺寸和采样方式,代码里要匹配,否则解码会有问题。
最后的效果如图(进入apk,点击按钮出现):