Android MediaCodec h264硬件编码


使用之前需要了解MediaCodec支持的什么编解码器,对应的编码器支持的输入格式。

用下面这个函数检查是否支持的需要的编码器:如"video/avc"

private static MediaCodecInfo selectCodec(String mimeType) {
        int numCodecs = MediaCodecList.getCodecCount();
        for (int i = 0; i < numCodecs; i++) {
            MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);

            if (!codecInfo.isEncoder()) {
                continue;
            }

            String[] types = codecInfo.getSupportedTypes();
            for (int j = 0; j < types.length; j++) {
                if (types[j].equalsIgnoreCase(mimeType)) {
                    return codecInfo;
                }
            }
        }
        return null;
    }

下面这个函数打印一下支持的颜色格式:


     */private static void printColorFormat(MediaCodecInfo codecInfo, String mimeType) {
        MediaCodecInfo.CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(mimeType);
        for (int i = 0; i < capabilities.colorFormats.length; i++) {
            int colorFormat = capabilities.colorFormats[i];
             switch (colorFormat) {
            // these are the formats we know how to handle for this testcase MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
            case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
				Log.e(TAG,"COLOR_FormatYUV420PackedPlanar" );
				break;
            case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
				Log.e(TAG,"COLOR_FormatYUV420SemiPlanar");
				break;
            case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
			Log.e(TAG,"COLOR_FormatYUV420PackedSemiPlanar");
				break;
         
            default:
                Log.e(TAG,""+colorFormat);
			}
        }
       


这里我的平台我查询出支持的格式有:

video/avc 支持

MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar

MediaCodecInfo.CodecCapabilities.COLOR_COLOR_FormatYUV420PackedSemiPlanar


下面实现了一个将COLOR_FormatYUV420Planar格式编码成H264 然后通过UDP发送的类:

package com.ist;

import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetAddress;
import java.net.SocketException;
import java.net.UnknownHostException;
import java.nio.ByteBuffer;

import com.ist.h264.MainActivity;

import android.annotation.SuppressLint;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.os.Environment;
import android.util.Log;



public class AvcEncoder 
{
	private final static String TAG = "AvcEncoder";
	
	private int TIMEOUT_USEC = 12000;

	private MediaCodec mediaCodec;
	private int m_width;
	private int m_height;
	private int m_framerate;
	private byte[] m_info = null; 
	public byte[] configbyte; 
	
	//
	private boolean cameraIsOK= false;//记录Camera是否可用
	
	public boolean isRuning = false;
	
	///send
	private DatagramSocket socket;  
	private InetAddress address;   
	private int port;
    
    //save h264
    private boolean  save_h264=false;
	private static String path = Environment.getExternalStorageDirectory().getAbsolutePath() + "/test1.h264";
	private BufferedOutputStream outputStream;
	private FileOutputStream outStream;
	
	private  MediaFormat mediaFormat ;

  
	@SuppressLint("NewApi")
	public AvcEncoder(int width, int height, int framerate, int bitrate ,String ip,int port) { 
		
		m_width  = width; 
		m_height = height;
		m_framerate = framerate;
		
		this.port = port;
	
	    mediaFormat = MediaFormat.createVideoFormat("video/avc", width, height);
	    mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar);    
	    mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, width*height*5);
	    mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
	    mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
	 
	    if(save_h264){
	    	createfile();
	    }
	    
	    try {
        	socket = new DatagramSocket();  
			address = InetAddress.getByName(ip); 
		} catch (UnknownHostException e1) {
			// TODO Auto-generated catch block
			e1.printStackTrace();
		} catch (SocketException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}  
	 
	}
	

	public void enableSaveFile(boolean save ){
		save_h264 =save;
	}
	 
	
	@SuppressLint("NewApi")
	public  int  initCamera(int id){
		
		  int ret = com.ist.Camera.prepareCamera(id, m_width,m_height,m_framerate);
		  
		  cameraIsOK= (ret==0?true:false);
		  System.out.println("cameraIsOK "+cameraIsOK);
		 
		   mediaCodec = MediaCodec.createEncoderByType("video/avc");
		   mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
		   mediaCodec.start(); 
		  return ret;
	}
	
	
	
	@SuppressLint("NewApi")
	public int uninitCamera(){
		  cameraIsOK = false;
		  com.ist.Camera.stopCamera();
		  mediaCodec.stop();
		  return 0;
	}
	
	
	@SuppressLint("NewApi")
	public int releaseAvEncoder(){
		try {
	    	if(save_h264){
				outputStream.flush();
		        outputStream.close();
	        }
	      
	    } catch (Exception e){ 
	        e.printStackTrace();
	    }
		socket.close();
		mediaCodec.release();
		return 0;
	}
	

	


	
	ByteBuffer[] inputBuffers;
	ByteBuffer[] outputBuffers;


	
	
	

	
	public int  StartEncoderThread(){
		
		if(!cameraIsOK){
			System.out.println("return -1");
			return -1;
		}
		isRuning = false;
		Thread EncoderThread = new Thread(new Runnable() {

			@SuppressLint("NewApi")
			@Override 
			public void run() {
				isRuning = true;
				byte[] input = null;
				long pts =  0;
				long generateIndex = 0;
				byte[] yuyv_img = new byte[m_width*m_height*2/4*3];
				
				
				while (isRuning ) {
					//System.out.println("time = "+time);
					com.ist.Camera.processCamera(yuyv_img);
					input = yuyv_img;
					if (input != null) {
						try {
							long startMs = System.currentTimeMillis();
							ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
							ByteBuffer[] outputBuffers = mediaCodec.getOutputBuffers();
							int inputBufferIndex = mediaCodec.dequeueInputBuffer(-1);
							if (inputBufferIndex >= 0) {
								pts = computePresentationTime(generateIndex);
								ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
								inputBuffer.clear();
								inputBuffer.put(input);
								mediaCodec.queueInputBuffer(inputBufferIndex, 0, input.length, pts, 0);
								generateIndex += 1;
							}
							
							MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
							int outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC);
							while (outputBufferIndex >= 0) {
								//Log.i("AvcEncoder", "Get H264 Buffer Success! flag = "+bufferInfo.flags+",pts = "+bufferInfo.presentationTimeUs+"");
								ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
								byte[] outData = new byte[bufferInfo.size];
								outputBuffer.get(outData);
								if(bufferInfo.flags == 2){
									configbyte = new byte[bufferInfo.size];
									configbyte = outData;
								}else if(bufferInfo.flags == 1){
									byte[] keyframe = new byte[bufferInfo.size + configbyte.length];
									System.arraycopy(configbyte, 0, keyframe, 0, configbyte.length);
									System.arraycopy(outData, 0, keyframe, configbyte.length, outData.length);
									if(save_h264){
									outputStream.write(keyframe, 0, keyframe.length);
									}
									 try {         
						                  DatagramPacket packet=new DatagramPacket(keyframe,keyframe.length, address,port);  
						                  socket.send(packet);  
						              } catch (IOException e)  
						              {  
						                System.out.println(e.getMessage());
						              }  
								}else{
									if(save_h264){
									outputStream.write(outData, 0, outData.length);
									}
									//send h264 udp
									try {         
						                  DatagramPacket packet=new DatagramPacket(outData,outData.length, address,port);  
						                  socket.send(packet);  
						              } catch (IOException e)  
						              {  
						                System.out.println(e.getMessage());
						              }  
								}

								mediaCodec.releaseOutputBuffer(outputBufferIndex, false);
								outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC);
							}

						} catch (Throwable t) {
							t.printStackTrace();
						}
					} else {
						try {
							Thread.sleep(500);
						} catch (InterruptedException e) {
							e.printStackTrace();
						}
					}
				}
			}
		});
		EncoderThread.start();
		return 0;
		
	}
	
	
	public void StopEncoderThread(){
		isRuning = false;
		if(save_h264){
			try {
				outputStream.flush();
			} catch (Exception e) {
				// TODO: handle exception
			}
			
        }
		
		try {
			Thread.sleep(1000);
		} catch (InterruptedException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		
	}
	

	
    /**
     * Generates the presentation time for frame N, in microseconds.
     */
    private long computePresentationTime(long frameIndex) {
        return 132 + frameIndex * 1000000 / m_framerate;
    }
    
    
    
	private void createfile(){
		File file = new File(path);
		if(file.exists()){
			file.delete();
		}
		
	    try {
	        outputStream = new BufferedOutputStream(new FileOutputStream(file));
	    } catch (Exception e){ 
	        e.printStackTrace();
	    }
	}
}

引用到了之前做的JNI代码:

package com.ist;

import android.graphics.Bitmap;

public class Camera {

	
    public native static int prepareCamera(int videoid,int width,int height,int framerate);
    public native static void processCamera(byte[] yuyv_img );
    public native static void stopCamera();
    public native static void pixeltobmp(Bitmap bitmap);
    static {
        System.loadLibrary("ImageProc");
    } 
	
}


最后如何调用,看一下:

package com.ist.h264;

import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetAddress;
import java.net.SocketException;
import java.net.UnknownHostException;
import java.nio.ByteBuffer;
import java.util.Arrays;

import com.ist.AvcEncoder;
import com.ist.Camera;

import android.annotation.SuppressLint;
import android.app.Activity;
import android.graphics.Rect;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaFormat;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.view.View.OnClickListener; 
import android.widget.Toast;


public class MainActivity extends Activity  implements OnClickListener {
	static final String tag = "MainActivity";
	private int cameraId=0;

	static final int IMG_WIDTH=640; //
	static final int IMG_HEIGHT=360;
	
	private AvcEncoder avcEncoder; 
 
	int framerate = 10;
    int bitrate = 2500000; 
    
    boolean camera_ok =false;
    
    boolean isRelease =true;

	DatagramSocket socket;  
    InetAddress address;    

	@Override
	protected void onCreate(Bundle savedInstanceState) {
		super.onCreate(savedInstanceState);
		setContentView(R.layout.activity_main);
		findViewById(R.id.bt_pause).setOnClickListener(this);
		findViewById(R.id.bt_start).setOnClickListener(this);
		findViewById(R.id.bt_relese).setOnClickListener(this);

	}

	@Override
	public void onClick(View view) {
		int id = view.getId();
		
		int ret =0;
		switch (id) {
		case R.id.bt_start:
			if(isRelease){
				System.out.println("AvcEncoder");
				avcEncoder = new AvcEncoder(IMG_WIDTH, IMG_HEIGHT, framerate, bitrate,"192.168.10.2",5000);
				isRelease =false;
			}  
			
			if(!camera_ok){  
				System.out.println("init camera..");
				ret = avcEncoder.initCamera(cameraId);
  				if(ret!=0){
					camera_ok=false;
					Toast.makeText(MainActivity.this, "摄像头初始化失败", Toast.LENGTH_SHORT).show();
					return;
				}
				camera_ok=true;
				
			}
			System.out.println("start...");
			avcEncoder.StartEncoderThread(); 
			
			//avcEncoder
			
			
			break;
			
		case R.id.bt_pause:
			avcEncoder.StopEncoderThread();
			break;
		case R.id.bt_relese:
			avcEncoder.StopEncoderThread();
			if(camera_ok){
				com.ist.Camera.stopCamera();
				camera_ok =false;
			}
			
			if(!isRelease){
				
				avcEncoder.releaseAvEncoder();
			}
			isRelease = true;
			break;

		default:
			break;
		}
		
	}
	


}
	 	
	 


这里最要注意的就是输入缓冲区的数据的长度,过长会报出stack overflow,这里就应该手工计算一下比如I420sp格式一帧图像是多大,和用二进制看看采集的图像是不是有00000填充的部分。


然后我们可以用VLC播放器来看看视频流是不是正常播放


先设置一下视频去服用器,选择H264



然后打开网络流:

如 udp://@:5000






本文获取图像的代码封装详解见下面这个链接:

http://blog.csdn.net/zmnqazqaz/article/details/51601545


代码下载地:

http://download.csdn.net/detail/zmnqazqaz/9543285




  • 1
    点赞
  • 33
    收藏
    觉得还不错? 一键收藏
  • 4
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 4
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值