使用jni调用ffmpeg.so中的H264解码函数播放文件

现在开始使用之前编译的ffmpeg解码H264文件,之前编译ffmpeg的步骤点击进入

一:把之前编译好的ffmpeg.so放入ndk的安装目录......platforms/android-3/arch-arm/usr/lib下

二:以h264_project/jni形式新建一个工程,把ffmpeg源码放到jni目录下(因为需要用到里面的很多h文件)

三:在jni目录下编写H264Android.c文件,内容如下:(该文件中的函数名是通过java中的native方法而来的,请自行命名,有关如何生成该文件请查看ndk中hello-jni例子)

/* DO NOT EDIT THIS FILE - it is machine generated */
#include <string.h>
#include <jni.h>
#include <stdio.h>
#include <stdlib.h>

#include <ffmpeg/libavutil/common.h>
#include <ffmpeg/libavcodec/avcodec.h>
struct AVCodec *codec=NULL;			  // Codec
struct AVCodecContext *c=NULL;		  // Codec Context
struct AVFrame *picture=NULL;		  // Frame

int iWidth=0;
int iHeight=0;

int *colortab=NULL;
int *u_b_tab=NULL;
int *u_g_tab=NULL;
int *v_g_tab=NULL;
int *v_r_tab=NULL;

//short *tmp_pic=NULL;

unsigned int *rgb_2_pix=NULL;
unsigned int *r_2_pix=NULL;
unsigned int *g_2_pix=NULL;
unsigned int *b_2_pix=NULL;

void DeleteYUVTab()
{
//	av_free(tmp_pic);

	av_free(colortab);
	av_free(rgb_2_pix);
}

void CreateYUVTab_16()
{
	int i;
	int u, v;

//	tmp_pic = (short*)av_malloc(iWidth*iHeight*2); // 缓存 iWidth * iHeight * 16bits

	colortab = (int *)av_malloc(4*256*sizeof(int));
	u_b_tab = &colortab[0*256];
	u_g_tab = &colortab[1*256];
	v_g_tab = &colortab[2*256];
	v_r_tab = &colortab[3*256];

	for (i=0; i<256; i++)
	{
		u = v = (i-128);

		u_b_tab[i] = (int) ( 1.772 * u);
		u_g_tab[i] = (int) ( 0.34414 * u);
		v_g_tab[i] = (int) ( 0.71414 * v);
		v_r_tab[i] = (int) ( 1.402 * v);
	}

	rgb_2_pix = (unsigned int *)av_malloc(3*768*sizeof(unsigned int));

	r_2_pix = &rgb_2_pix[0*768];
	g_2_pix = &rgb_2_pix[1*768];
	b_2_pix = &rgb_2_pix[2*768];

	for(i=0; i<256; i++)
	{
		r_2_pix[i] = 0;
		g_2_pix[i] = 0;
		b_2_pix[i] = 0;
	}

	for(i=0; i<256; i++)
	{
		r_2_pix[i+256] = (i & 0xF8) << 8;
		g_2_pix[i+256] = (i & 0xFC) << 3;
		b_2_pix[i+256] = (i ) >> 3;
	}

	for(i=0; i<256; i++)
	{
		r_2_pix[i+512] = 0xF8 << 8;
		g_2_pix[i+512] = 0xFC << 3;
		b_2_pix[i+512] = 0x1F;
	}

	r_2_pix += 256;
	g_2_pix += 256;
	b_2_pix += 256;
}

void DisplayYUV_16(unsigned int *pdst1, unsigned char *y, unsigned char *u, unsigned char *v, int width, int height, int src_ystride, int src_uvstride, int dst_ystride)
{
	int i, j;
	int r, g, b, rgb;

	int yy, ub, ug, vg, vr;

	unsigned char* yoff;
	unsigned char* uoff;
	unsigned char* voff;

	unsigned int* pdst=pdst1;

	int width2 = width/2;
	int height2 = height/2;

	if(width2>iWidth/2)
	{
		width2=iWidth/2;

		y+=(width-iWidth)/4*2;
		u+=(width-iWidth)/4;
		v+=(width-iWidth)/4;
	}

	if(height2>iHeight)
		height2=iHeight;

	for(j=0; j<height2; j++) // 一次2x2共四个像素
	{
		yoff = y + j * 2 * src_ystride;
		uoff = u + j * src_uvstride;
		voff = v + j * src_uvstride;

		for(i=0; i<width2; i++)
		{
			yy  = *(yoff+(i<<1));
			ub = u_b_tab[*(uoff+i)];
			ug = u_g_tab[*(uoff+i)];
			vg = v_g_tab[*(voff+i)];
			vr = v_r_tab[*(voff+i)];

			b = yy + ub;
			g = yy - ug - vg;
			r = yy + vr;

			rgb = r_2_pix[r] + g_2_pix[g] + b_2_pix[b];

			yy = *(yoff+(i<<1)+1);
			b = yy + ub;
			g = yy - ug - vg;
			r = yy + vr;

			pdst[(j*dst_ystride+i)] = (rgb)+((r_2_pix[r] + g_2_pix[g] + b_2_pix[b])<<16);

			yy = *(yoff+(i<<1)+src_ystride);
			b = yy + ub;
			g = yy - ug - vg;
			r = yy + vr;

			rgb = r_2_pix[r] + g_2_pix[g] + b_2_pix[b];

			yy = *(yoff+(i<<1)+src_ystride+1);
			b = yy + ub;
			g = yy - ug - vg;
			r = yy + vr;

			pdst [((2*j+1)*dst_ystride+i*2)>>1] = (rgb)+((r_2_pix[r] + g_2_pix[g] + b_2_pix[b])<<16);
		}
	}
}

/*
 * Class:     com_zhutieju_testservice_H264Android
 * Method:    initDecoder
 * Signature: (II)I
 */
JNIEXPORT jint JNICALL Java_com_zhutieju_testservice_H264Android_initDecoder
  (JNIEnv* env, jobject thiz, jint width, jint height)
{
	iWidth = width;
	iHeight = height;

	CreateYUVTab_16();
	
	avcodec_init();
	avcodec_register_all();
	//找到H264解码器
	codec = avcodec_find_decoder(CODEC_ID_H264);
	//分配解码器内存
	c = avcodec_alloc_context();
	//打开解码器
	avcodec_open(c,codec);
	//给解码器解码后的帧数据分配存储空间
	picture  = avcodec_alloc_frame();//picture= malloc(sizeof(AVFrame));

	return 1;
}

/*
 * Class:     com_zhutieju_testservice_H264Android
 * Method:    dalDecoder
 * Signature: ([BI[B)I
 */
JNIEXPORT jint JNICALL Java_com_zhutieju_testservice_H264Android_dalDecoder
  (JNIEnv* env, jobject thiz, jbyteArray in, jint nalLen, jbyteArray out)
{
	int i;
	int imod;
	int got_picture=0;

	jbyte * Buf = (jbyte*)(*env)->GetByteArrayElements(env, in, 0);
	jbyte * Pixel= (jbyte*)(*env)->GetByteArrayElements(env, out, 0);

	int consumed_bytes = avcodec_decode_video(c, picture, &got_picture, Buf, nalLen);

	if(got_picture > 0)
	{
		DisplayYUV_16((int*)Pixel, picture->data[0], picture->data[1], picture->data[2], c->width, c->height, picture->linesize[0], picture->linesize[1], iWidth);
/*
		for(i=0; i<c->height; i++)
			fwrite(picture->data[0] + i * picture->linesize[0], 1, c->width, outf);

		for(i=0; i<c->height/2; i++)
			fwrite(picture->data[1] + i * picture->linesize[1], 1, c->width/2, outf);

		for(i=0; i<c->height/2; i++)
			fwrite(picture->data[2] + i * picture->linesize[2], 1, c->width/2, outf);
// */
	}

    (*env)->ReleaseByteArrayElements(env, in, Buf, 0);
    (*env)->ReleaseByteArrayElements(env, out, Pixel, 0);

	return consumed_bytes;
}

/*
 * Class:     com_zhutieju_testservice_H264Android
 * Method:    releaseDecoder
 * Signature: ()I
 */
JNIEXPORT jint JNICALL Java_com_zhutieju_testservice_H264Android_releaseDecoder
  (JNIEnv* env, jobject thiz)
{
	if(c)
	{
		avcodec_close(c);
	    free(c->priv_data);

		free(c);
		c = NULL;
	}

	if(picture)
	{
		free(picture);
		picture = NULL;
	}

	DeleteYUVTab();

	return 1;
}


以上方法说明:

a、Java_com_zhutieju_testservice_H264Android_initDecoder为初始化解码器,需要参数为一帧数据的宽高。其中CreateYUVTab_16函数是初始化用于YUV转RGB的函数(由于android手机录制的视频格式是YUV420sp的,因此通过转RGB,使java层容易生成位图被画)

b、Java_com_zhutieju_testservice_H264Android_dalDecoder为解码一帧的函数,需要参数为原始数据,原始数据长度,out为存放解码后数据的数组       

c、Java_com_zhutieju_testservice_H264Android_dalDecoder释放申请的内存空间

四:在jni目录下编写Android.mk文件,内容如下:

LOCAL_PATH := $(call my-dir)

include $(CLEAR_VARS)

PATH_TO_FFMPEG_SOURCE := $(LOCAL_PATH)/ffmpeg
LOCAL_C_INCLUDES +=$(PATH_TO_FFMPEG_SOURCE)
LOCAL_LDLIBS := -lffmpeg
LOCAL_MODULE    := H264Android
LOCAL_SRC_FILES := H264Android.c

include $(BUILD_SHARED_LIBRARY)

其中LOCAL_LDLIBS表示你依赖的动态库,这里依赖ffmpeg.so,注意动态库前面的lib不用写

五:运行ndk-build

成功后会在同jni相同级别下生成libs和obj两个目录,其中我们需要的库在libs中

六:把生成的库H264Android.so和ffmpeg.so放到eclipse中android工程的libs/armeabi目录下,然后编写android代码

a、H264Android.java类,包含解码的本地函数,内容如下:

package com.zhutieju.testservice;

public class H264Android {
	static {
		System.loadLibrary("ffmpeg");
		System.loadLibrary("H264Android");
	}
	
	public native int initDecoder(int width,int heigth);
	public native int dalDecoder(byte[] in,int size,byte[] out);
	public native int releaseDecoder();
}


b、MyView.java类,自定义SurfaceView,用于显示画面,内容如下:

package com.zhutieju.testservice;

import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
/**
 * 自定义View用于显示接受到的画面
 * @author Administrator
 *
 */
public class MyView extends SurfaceView implements SurfaceHolder.Callback {
	SurfaceHolder holder;// 控制SurfaceView的Holder对象
	private MyThread t;
	private Bitmap ivBitmap;//需要被画的BMP图片
	private boolean running = true;//控制MyThread的run方法
	private Paint paint = new Paint();
	
	public MyView(Context context) {
		super(context);
		// TODO Auto-generated constructor stub
		this.holder = this.getHolder();
		holder.addCallback(this);
		t = new MyThread();
	}

	@Override
	public void surfaceCreated(SurfaceHolder holder) {
		// TODO Auto-generated method stub
		t.start();
	}

	@Override
	public void surfaceChanged(SurfaceHolder holder, int format, int width,
			int height) {
		// TODO Auto-generated method stub

	}

	@Override
	public void surfaceDestroyed(SurfaceHolder holder) {
		// TODO Auto-generated method stub
		if (t.isAlive()) {
			running = false;
		}
	}
	/**
	 * 控制更新MyView的画图线程
	 * @author Administrator
	 *
	 */
	private class MyThread extends Thread {
		Canvas canvas = null;
		@Override
		public void run() {
			while (running) {			
				if (Util.list.size() > 0) {
					canvas = holder.lockCanvas();
					if (canvas != null) {
						onDraw(canvas);
						System.out.println("------一帧图像被绘画-------");
					}
					holder.unlockCanvasAndPost(canvas);
				}	
				try {
					sleep(20);
				} catch (InterruptedException e) {
					// TODO Auto-generated catch block
					e.printStackTrace();
				}
			}
		}

	}
	/**
	 * 重写View的onDraw方法
	 */
	@Override
	protected void onDraw(Canvas canvas) {
		// TODO Auto-generated method stub
		super.onDraw(canvas);
		Matrix m = new Matrix();
		ivBitmap = Util.setOrgetBitmap(1, null);
		canvas.drawBitmap(ivBitmap, m, null);
		canvas.drawText("2013年5月2号", 0, 10, paint);
	}

}

  

c、Util.jav类,工具类,内容如下:

package com.zhutieju.testservice;

import java.util.ArrayList;

import android.graphics.Bitmap;
/**
 * 存储集合的工具类
 * @author Administrator
 *
 */
public class Util {
	public static ArrayList<Bitmap> list = new ArrayList<Bitmap>();
	/**
	 * 设置或获取位图
	 * @param type	0表示设置,1表示获取
	 * @param mBitmap
	 * @return
	 */
	public static synchronized Bitmap setOrgetBitmap(int type,Bitmap mBitmap) {
		switch (type) {
		case 0:
			list.add(mBitmap);
			return null;
		case 1:
			Bitmap bitmap = list.get(0);
			list.remove(0);
			return bitmap;
		}
		return null;
	}
}


d、MainActivity.java,主要实现类,内容如下:

package com.zhutieju.testservice;

import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.ByteBuffer;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;

import android.app.Activity;
import android.graphics.Bitmap;
import android.graphics.Bitmap.Config;
import android.os.Bundle;
import android.util.Log;

/**
 * 主Activity
 * 
 * @author Administrator
 * 
 */
public class MainActivity extends Activity {
	public static final String TAG = "服务端日志";
	//ServerThread thread;
	ReadFileThread thread;
	boolean isActivity = true;// 控制线程ServerThread的run方法
	long decoder;
	H264Android h264;
	byte[] mPixel = new byte[352 * 288*4];
	ByteBuffer buffer = ByteBuffer.wrap(mPixel);
	public static ArrayList<byte[]> framebuf = new ArrayList<byte[]>();
	
	@Override
	public void onCreate(Bundle savedInstanceState) {
		super.onCreate(savedInstanceState);
		setContentView(new MyView(this));
		h264 = new H264Android();
		decoder =  h264.initDecoder(352,288);
		int i = mPixel.length;
		for (i = 0; i < mPixel.length; i++) {
			mPixel[i] = (byte) 0x00;
		}
		//thread = new ServerThread();
		thread = new ReadFileThread();
		thread.start();
		new Thread(new DecordeThread()).start();
	}

	@Override
	protected void onDestroy() {
		// TODO Auto-generated method stub
		super.onDestroy();
		if (thread.isAlive()) {
			isActivity = false;
		}
		finish();
		System.exit(0);
	}

	/**
	 * 获取客户端数据的线程类
	 * 
	 * @author Administrator
	 * 
	 */
/*	public class ServerThread extends Thread {
		ServerSocket ss;// 服务端ServerSocket	
		public ServerThread() {
			
		}

		Socket s;
		InputStream is;
		int len, size;
		StringBuilder sb;;

		@Override
		public void run() {
			try {
				Log.v(TAG, "服务端启动成功--------------->");
				ss = new ServerSocket(5000);	
				s = ss.accept();
				Log.v(TAG, "与客户端连接成功------------->"+s.toString());
				DataInputStream dis = new DataInputStream(s.getInputStream());
				ByteArrayOutputStream baos;
				while (isActivity) {
					int len2 = 0;
					baos = new ByteArrayOutputStream();
					
					byte[] b = new byte[20];
					dis.read(b);
					try {
						System.out.println("--->" + 
						JTools.toStringList(b, 0, "UTF-8").get(0));
					} catch (Exception e) {
						e.printStackTrace();
					}
					
					sb = new StringBuilder();
					while ((len = dis.read()) != '\r') {
						if (len == -1) {
							break;
						}
						sb.append((char) len);
					}
					if (sb.length() > 0) {
						size = Integer.parseInt(sb.toString());
						byte[] data = new byte[size];
						while (size > 0
								&& (len = dis.read(data, 0, size)) != -1) {
							baos.write(data, 0, len);
							size = size - len;
						}
					}
					
					len = dis.readInt();
					Log.i(TAG, "len的大小为"+len);
					byte[] data = new byte[len];
					while(len>0 && (len2 = dis.read(data,0,len)) != -1) {
						System.out.println("-------len2------->" + len2);
						baos.write(data, 0, len2);
						len = len - len2;
					}
					if(framebuf.size()>25) {
						try {
							sleep(800);
						} catch (InterruptedException e) {
							// TODO Auto-generated catch block
							e.printStackTrace();
						}
					}
					Log.i(TAG, "-------------------------------------------");
				setOrget(1, baos.toByteArray());
				}

			} catch (IOException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
		}

		@Override
		public void destroy() {
			// TODO Auto-generated method stub
			h264.releaseDecoder();
			try {
				if (ss != null) {
					ss.close();
				}
				if (s != null) {
					s.close();
				}
			} catch (IOException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
			System.exit(0);
			super.destroy();
		}
	} */
	
	/**
	 * 读取文件类
	 * @author Administrator
	 *
	 */
	public class ReadFileThread extends Thread {
		int file_index = 0;
		File file = new File("/mnt/sdcard/zhutj.264");
		@Override
		public void run() {
			try {
				while(file_index<file.length()) {
					byte[] data = new byte[1024*50];
					RandomAccessFile raf = new RandomAccessFile(file, "r");
					int len = readOneFrame(raf, data);
					Log.i(TAG, "一帧长度为:"+len);
					byte[] newData = new byte[len];
					System.arraycopy(data, 0, newData, 0, len);
					//Log.i(TAG, "前四个字节为:"+newData[0]+" "+newData[1]+" "+newData[2]+" "+newData[3]);
					setOrget(1, newData);
				}
			} catch (Exception e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
		}
		/**
		 * 读取一帧长度
		 * @param raf	读取文件流
		 * @param data	保存读取的数据
		 * @return		返回一帧长度
		 * @throws IOException
		 */
		private int readOneFrame(RandomAccessFile raf,byte[] data) throws IOException {
			int len = 0;//表示一帧长度
			raf.seek(file_index);
			while(true) {
		            if((data[len++] = raf.readByte())==0 && (data[len++] = raf.readByte())==0 ) {
                                if((data[len++] = raf.readByte()) ==0 &&  len>6) {
                                    if((data[len++]=raf.readByte())==1) {
                                            file_index+=(len - 4);
                                            return len - 4;
                                    } else {
                                            continue;
                                    }
                                } else if(data[len-1]== 1 && len>6){
                                    file_index+=(len-3);
                                        return len -3;
                                } else {
                                       continue;
                                }
			     } else {
					continue;
			     }
		        }
		}
	}
	

	
	/**
	 * 保存或获取数据
	 * @param type
	 * @param data
	 * @return
	 */
	public synchronized byte[] setOrget(int type,byte[] data) {
		switch (type) {
		case 1:	//放入数据
			framebuf.add(data);
			return null;
		case 0: //获取数据
			if(framebuf.size()>0) {
				byte[] b = framebuf.get(0);
				framebuf.remove(0);
				return b;
			}
		}
		return null;
	}
	
	
	
	/**
	 * 解码线程类
	 * @author Administrator
	 *
	 */
	class DecordeThread implements Runnable {
		@Override
		public void run() {
			while(true) {
				byte[] dataa = setOrget(0, null);
				if (dataa != null&&dataa.length > 0) {//一帧数据收到解码
					int resout = h264.dalDecoder(dataa, dataa.length, mPixel);
					if(resout>0) {
						Bitmap videoBit = Bitmap.createBitmap(352, 288, Config.RGB_565);
						videoBit.copyPixelsFromBuffer(buffer);
						Util.setOrgetBitmap(0, videoBit);
						Log.i(TAG, "集合中的数据:"+Util.list.size());
					}
				} 
			}
		}
	}
	
	public static String nowTime() {
		  Calendar c = Calendar.getInstance();
		  c.setTimeInMillis(new Date().getTime());
		  SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
		  return dateFormat.format(c.getTime());
		 }
}

注意:H264Android.so和ffmpeg.so在静态块里面加载有先有顺序;关于播放的文件zhutj.264是我自己按需求录制的文件,可以用手机也可以用PC,但是文件的保存是以一帧一帧的数据保存,每帧数据前加了0x0001,且文件一开始是sps和pps数据,然后才是I帧,B帧等。

整个工程下载地址点击打开链接,播放文件下载地址播放的文件以及说明


评论 5
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值