读取h.264视频文件的每一帧送Mediacodec 解码/Surface 显示
H264FileDecodeActivity.java
public class H264FileDecodeActivity extends Activity
implements SurfaceHolder.Callback, OnClickListener, TextureView.SurfaceTextureListener, OnFrameAvailableListener {
private SurfaceView testSurfaceView;
private TextureView textureView;
private SurfaceHolder holder;
//文件路径
//private String path0 = Environment.getExternalStorageDirectory() + "/H265_22f.h265";
private String path = Environment.getExternalStorageDirectory() + "/mediacodec_1.264"; // 须修改为自己的视频文件路径
private String TAG = "H264FileDecodeActivity";
private int width, height;
//解码器
private MediaCodec mCodec;
private boolean isFirst = true;
// 需要解码的类型
private final static String MIME_TYPE = "video/avc"; // H.264 Advanced Video
private final static int TIME_INTERNAL = 5;
//文件读取完成标识
private boolean isFinish = false;
//这个值用于找到第一个帧头后,继续寻找第二个帧头,如果解码失败可以尝试缩小这个值
private int FRAME_MIN_LEN = 8; //1024;
//一般H264帧大小不超过200k,如果解码失败可以尝试增大这个值
private static final int FRAME_MAX_LEN = 300 * 1024;
//根据帧率获取的解码每帧需要休眠的时间,根据实际帧率进行操作
private int PRE_FRAME_TIME = 1000 / 25;
//保存完整数据帧
byte[] frame = new byte[FRAME_MAX_LEN];
//每次从文件读取的数据
byte[] readData = new byte[10 * 1024];
//当前帧长度
int frameLen = 0;
int headFirstIndex;
int headSecondIndex;
int frameNum;
//读取文件解码线程
UpdateDecoder updateDecoderT;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_h264);
testSurfaceView = (SurfaceView) findViewById(R.id.surfaceview);
holder = testSurfaceView.getHolder();
holder.addCallback(this);
textureView = (TextureView) findViewById(R.id.textureview);
textureView.setSurfaceTextureListener(this);
Button btn = (Button) findViewById(R.id.takePhoto);
btn.setOnClickListener(this);
nalu = new NaluUnit();
isFinish = false;
}
Handler myHandler = new Handler() {
public void handleMessage(Message msg) {
switch (msg.what) {
case 2:
byte[] sendBuf = mQueue0.poll();
onFrame(sendBuf, 0, sendBuf.length);
break;
case 1:
Log.d(TAG," onFrame 1 ");
onFrame((byte[])msg.obj, 0, msg.arg1);
break;
case 0:
Log.d(TAG," onFrame 0 ");
onFrame((byte[])msg.obj, 0, msg.arg1);
break;
default :
break;
}
super.handleMessage(msg);
}
};
private LinkedBlockingQueue<ByteBuffer> mQueue;
private LinkedBlockingQueue<byte[]> mQueue0;
int waitC = 0;
class UpdateDecoder extends Thread{
private boolean runFlag;
public void init(){
//mList = new ArrayList<ByteBuffer>(); //new ArrayList<ByteBuffer>(8)
mQueue = new LinkedBlockingQueue<ByteBuffer>();
mQueue0 = new LinkedBlockingQueue<byte[]>();
}
public void pushBuf(byte[] buf, int offset, int len)
{
//ByteBuffer buffer = ByteBuffer.allocate(len);
//buffer.put(buf,offset,len);
//mQueue.offer(buffer);
byte[] frameD = new byte[len];
System.arraycopy(frameD, 0, buf, offset, len);
mQueue0.offer(frameD);
}
public void updataStop()
{
runFlag = false;
mQueue0.clear();
}
@Override
public void run() {
runFlag = true;
Log.i("UpdateDecoder", " UpdateDecoder mQueue.size() = "+mQueue0.size());
while(runFlag){
if(mQueue0.size() <= 0){
try {
Thread.sleep(40);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
while(mQueue0.size() > 0){
//ByteBuffer sendBuf = mQueue.poll();
//onFrame(sendBuf.array(), 0, sendBuf.capacity());
//byte[] sendBuf = mQueue0.poll();
try {
//onFrame(sendBuf, 0, sendBuf.length);
Message message = new Message();
message.what = 2;
myHandler.sendMessage(message);
try {
Thread.sleep(40);
} catch (InterruptedException e) {
e.printStackTrace();
}
} catch (Throwable t) {
t.printStackTrace();
}
}
}
}
}
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
this.width = width;
this.height = height;
Log.i(TAG, "onSurfaceTextureAvailable: width = " + width + ", height = " + height);
startCodec();
}
@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
// TODO Auto-generated method stub
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
Log.i(TAG, "onSurfaceTextureSizeChanged: width = " + width + ", height = " + height);
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
stopCodec();
return false;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
width = holder.getSurfaceFrame().width();
height = holder.getSurfaceFrame().height();
Log.i(TAG, "surfaceCreated: width = " + width + ", height = " + height);
//startCodec();
//updateDecoderT = new UpdateDecoder();
//updateDecoderT.init();
//updateDecoderT.start();
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
//stopCodec();
//updateDecoderT.updataStop();
}
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
if(!isFinish) {
doDecoder();
//doDecodec();
}
}
public void startCodec() {
if (isFirst) {
initDecoder();
}
}
private class DecoderCallback extends MediaCodec.Callback{
@Override
public void onInputBufferAvailable(MediaCodec codec, int index) {
if(nalu.size != 0) {
ByteBuffer inputBuffer = mCodec.getInputBuffer(index);
long timestamp = mCount++ * 1000000 / 25;
//Log.i(TAG," nalu type = "+ nalu.type+", nalu.size = "+nalu.size);
inputBuffer.clear();
inputBuffer.put(nalu.data, 0, nalu.size);
mCodec.queueInputBuffer(index, 0, nalu.size, timestamp, 0);
}
}
@Override
public void onOutputBufferAvailable(MediaCodec codec, int index, MediaCodec.BufferInfo info) {
//ByteBuffer encodedData = codec.getOutputBuffer(index);
//encodedData.position(info.offset);
//encodedData.limit(info.offset + info.size);
codec.releaseOutputBuffer(index, true);
}
@Override
public void onError(MediaCodec codec, MediaCodec.CodecException e) {
Log.d(TAG, "Error: " + e);
}
@Override
public void onOutputFormatChanged(MediaCodec codec, MediaFormat format) {
Log.d(TAG, "encoder output format changed: " + format);
}
}
private void initDecoder() {
try {
//根据需要解码的类型创建解码器
mCodec = MediaCodec.createDecoderByType(MIME_TYPE);
} catch (IOException e) {
e.printStackTrace();
}
MediaFormat mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, width, height);
//MediaFormat mediaFormat = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, width, height);
//mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible);
//SurfaceView
//mCodec.configure(mediaFormat, holder.getSurface(), null, 0); //直接解码送surface显示
//TextureView
SurfaceTexture texture = textureView.getSurfaceTexture();
texture.setDefaultBufferSize(width, height);
//texture.setOnFrameAvailableListener(this);
Surface surface0 = new Surface(texture);
mCodec.configure(mediaFormat, surface0, null, 0); //直接解码送surface显示
// mCodec.setCallback(new DecoderCallback());
//开始解码
mCodec.start();
isFirst = false;
}
public void stopCodec() {
try {
mCodec.stop();
mCodec.release();
mCodec = null;
isFirst = true;
isFinish = true;
} catch (Exception e) {
e.printStackTrace();
mCodec = null;
}
}
int mCount = 0;
public void onFrame(byte[] buf, int offset, int length) {
//-1表示一直等待;0表示不等待;其他大于0的参数表示等待毫秒数
int inputBufferIndex = mCodec.dequeueInputBuffer(-1);
if (inputBufferIndex >= 0) {
ByteBuffer inputBuffer = mCodec.getInputBuffer(inputBufferIndex);
inputBuffer.clear();
inputBuffer.put(buf, offset, length);
//解码
long timestamp = mCount * 1000000 / 25;
mCodec.queueInputBuffer(inputBufferIndex, 0, length, timestamp, 0);
mCount++;
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = mCodec.dequeueOutputBuffer(bufferInfo, 0); //10
//循环解码,直到数据全部解码完成
while (outputBufferIndex >= 0) {
//logger.d("outputBufferIndex = " + outputBufferIndex);
mCodec.releaseOutputBuffer(outputBufferIndex, true);
outputBufferIndex = mCodec.dequeueOutputBuffer(bufferInfo, 0);
}
}
private int findHead(byte[] data, int offset, int max) {
int i;
for (i = offset; i <= max; i++) {
if (isHead(data, i))
break;
}
//检测到最大值,未发现帧头
if (i == max) {
i = -1;
}
return i;
}
private boolean isHead(byte[] data, int offset) {
boolean result = false;
// 00 00 00 01 x
if (data[offset] == 0x00 && data[offset + 1] == 0x00
&& data[offset + 2] == 0x00 && data[3] == 0x01 && isVideoFrameHeadType(data[offset + 4])) {
result = true;
}
// 00 00 01 x
if (data[offset] == 0x00 && data[offset + 1] == 0x00
&& data[offset + 2] == 0x01 && isVideoFrameHeadType(data[offset + 3])) {
result = true;
}
return result;
}
private boolean isVideoFrameHeadType(byte head) {
return head == (byte) 0x65 || head == (byte) 0x61 || head == (byte) 0x41 || head == (byte) 0x67 || head == (byte) 0x68 ;
}
public void doDecodec() {
final File file = new File(path);
Log.e(TAG," path = "+path);
if (file.exists()) {
try {
new Thread() {
public void run() {
//循环读取数据
FileInputStream fis = null;
try {
fis = new FileInputStream(file);
} catch (FileNotFoundException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
//开始时间
long startTime = System.currentTimeMillis();
frameNum = 0;
FRAME_MIN_LEN = 8;
while (!isFinish) {
try {
if (fis.available() > 0)
{
int readLen = fis.read(readData);
//当前长度小于最大值
if (frameLen + readLen < FRAME_MAX_LEN) {
//将readData拷贝到frame
System.arraycopy(readData, 0, frame, frameLen, readLen);
//修改frameLen
frameLen += readLen;
//寻找第一个帧头
headFirstIndex = findHead(frame, 0, frameLen);
while (headFirstIndex >= 0 && isHead(frame, headFirstIndex)) {
//寻找第二个帧头
headSecondIndex = findHead(frame, headFirstIndex + FRAME_MIN_LEN, frameLen);
//如果第二个帧头存在,则两个帧头之间的就是一帧完整的数据
if (headSecondIndex > 0 && isHead(frame, headSecondIndex)) {
Log.e("MediacodecT"," frame length : " + (headSecondIndex - headFirstIndex));
//视频解码
onFrame(frame, headFirstIndex, headSecondIndex - headFirstIndex);
System.arraycopy(frame, headSecondIndex, frame, 0, frameLen-headSecondIndex);
frameLen = frameLen - headSecondIndex;
frameNum++;
if(frameNum == 4) {
FRAME_MIN_LEN = 1024;
}
long time = PRE_FRAME_TIME - (System.currentTimeMillis() - startTime);
if (time > 0) {
try {
Thread.sleep(time);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
startTime = System.currentTimeMillis();
//继续寻找数据帧
headFirstIndex = findHead(frame, 0, frameLen);
} else {
//找不到第二个帧头
headFirstIndex = -1;
}
}
} else {
//如果长度超过最大值,frameLen置0
frameLen = 0;
}
} else {
//文件读取结束
isFinish = true;
}
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
isFinish = false;
Log.i(TAG, " frameNum "+frameNum);
}
}.start();
} catch (Exception e) {
e.printStackTrace();
}
}
}
NaluUnit nalu;
public class NaluUnit {
byte[] data;
int size;
int type;
public NaluUnit() {
data = new byte[20*1024];
size = 0;
}
}
public void doDecoder()
{
final File file = new File(path);
if(!file.exists() || !file.canRead()){
Log.e(TAG,"failed to open h264 file.");
return;
}
Log.e(TAG," path = "+path);
Log.e(TAG," readH264FromFile ");
new Thread()
{
public void run() {
int readlen = 0;
int writelen = 0;
int i = 0;
int pos = 0;
boolean findFlag = false;
FileInputStream fis = null;
try {
fis = new FileInputStream(file);
} catch (FileNotFoundException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
//每次从文件读取的数据
frameNum = 0;
long startTime = System.currentTimeMillis();
while (!isFinish)
{
try {
if(fis.available() > 0) {
readlen = fis.read(frame, pos, frame.length-pos);
if(readlen<=0) {
break;
}
readlen += pos;
i = 0;
pos = 0;
writelen = readlen;
while(i < readlen-4) {
findFlag = false;
if(frame[i++] == 0x00 && frame[i++] == 0x00 && frame[i++] == 0x00 && frame[i++] == 0x01)
{
pos = i;
while (pos < readlen-4) {
if(frame[pos++] == 0x00 && frame[pos++] == 0x00 && frame[pos++] == 0x00 && frame[pos++] == 0x01) {
findFlag = true;
break;
}
}
if(findFlag){
nalu.size = pos-i;
nalu.type = frame[i]&0x1f;
System.arraycopy(frame, i-4, nalu.data, 0, nalu.size);
Log.i(TAG," nalu type = "+ nalu.type+", nalu.size = "+nalu.size);
onFrame(frame, i-4, pos-i); // start code + nalu 送解码器
frameNum++;
long time = PRE_FRAME_TIME - (System.currentTimeMillis() - startTime);
if (time > 0) {
try {
Thread.sleep(time);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
startTime = System.currentTimeMillis();
i = pos-4;
writelen = i;
}else {
writelen = i-4;
break;
}
}
}
if(writelen>0 && writelen<readlen) {
System.arraycopy(frame, writelen, frame, 0, readlen-writelen);
Log.i(TAG, " readlen = "+readlen+", writelen = "+writelen);
}
pos = readlen-writelen;
}else {
isFinish = true;
}
} catch (IOException e) {
// TODO Auto-generated catch block
Log.e(TAG, " error = "+e.getMessage());
e.printStackTrace();
}
}
isFinish = false;
Log.i(TAG, " frameNum "+frameNum);
}
}.start();
}
}
布局文件:
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent" >
<SurfaceView
android:id="@+id/surfaceview"
android:layout_width="640px"
android:layout_height="480px" />
<TextureView
android:id="@+id/textureview"
android:layout_below="@id/surfaceview"
android:layout_width="640px"
android:layout_height="480px"
android:layout_marginTop="20dp"/>
<Button
android:id="@+id/takePhoto"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:background="@drawable/btn_camera_all_click"
android:layout_alignParentBottom="true"
android:layout_centerHorizontal="true"
android:layout_marginBottom="10dip"/>
</RelativeLayout>
效果图: