一、代码分析
1、创建mediacodec并初始化
可通过createDecoderByType来创建mediacodec:
// 创建 MediaCodec,此时是 Uninitialized 状态
mediaCodec = MediaCodec.createEncoderByType("video/avc");
上面表示创建了一个编码器,但是还需要对这个编码器进行一些配置,包括格式,码率帧率等等,如下:
MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", width, height);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar); //颜色格式
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, width*height*5); //码率
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30); //帧率
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1); //I 帧间隔
设置好相关配置格式后,调用configure进行配置。
// 调用 configure 进入 Configured 状态
mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
配置好后,可以调用start启动编码工作,进入Executing状态。不过一开始调用start后是先进入Executing的子状态Flushed状态,等后面编码线程启动后有数据了,才真正进入Running状态:
// 调用 start 进入 Executing 状态,开始准备编解码工作
mediaCodec.start();
2、编码线程
编码线程是真正的编码过程,本例子是将数据编码为h264。
在启动线程之前,先创建个文件,用来保存编码后的h264数据,如下:
private BufferedOutputStream outputStream;
FileOutputStream outStream;
private void createfile(String path){
File file = new File(path);
Log.d(TAG,"createfile path = "+path);
if(file.exists()){
file.delete();
}
try {
outputStream = new BufferedOutputStream(new FileOutputStream(file));
} catch (Exception e){
e.printStackTrace();
}
}
获取可用的输入缓冲区的索引:
int inputBufferIndex = mediaCodec.dequeueInputBuffer(-1);
获取输入缓冲区:
// 输入缓冲区
ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
从输入缓冲区队列中取出可用缓冲区,并填充数据:
// 从输入缓冲区队列中取出可用缓冲区,并填充数据
if (inputBufferIndex >= 0) {
// 计算时间戳
pts = computePresentationTime(generateIndex);
ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
inputBuffer.put(input);
mediaCodec.queueInputBuffer(inputBufferIndex, 0, input.length, pts, 0);
generateIndex += 1;
}
创建输出缓冲区:
//输出缓冲区
ByteBuffer[] outputBuffers = mediaCodec.getOutputBuffers();
获取已成功编解码的输出缓冲区的索引:
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC);
从输出缓冲区队列中拿到编解码后的内容,配置相关内容,包括SPS,PPS等,然后进行相应操作(这里是写入output h264文件)后释放,供下一次使用:
// 从输出缓冲区队列中拿到编解码后的内容,进行相应操作(这里是写入output h264文件)后释放,供下一次使用
while (outputBufferIndex >= 0) {
ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
byte[] outData = new byte[bufferInfo.size];
outputBuffer.get(outData);
// flags 判断
if(bufferInfo.flags == 2){ // 配置相关的内容,也就是 SPS,PPS
configbyte = new byte[bufferInfo.size];
configbyte = outData;
}else if(bufferInfo.flags == 1){ //关键帧
byte[] keyframe = new byte[bufferInfo.size + configbyte.length];
System.arraycopy(configbyte, 0, keyframe, 0, configbyte.length);
System.arraycopy(outData, 0, keyframe, configbyte.length, outData.length);
outputStream.write(keyframe, 0, keyframe.length);
}else{ // 非关键帧和SPS、PPS,直接写入文件,可能是B帧或者P帧
outputStream.write(outData, 0, outData.length);
}
mediaCodec.releaseOutputBuffer(outputBufferIndex, false);
outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC);
}
完整编码线程代码如下:
//编码子线程
public void StartEncoderThread(){
Thread EncoderThread = new Thread(new Runnable() {
@SuppressLint("NewApi")
@Override
public void run() {
isRuning = true;
byte[] input = null;
long pts = 0;
long generateIndex = 0;
while (isRuning) {
if (MainActivity.YUVQueue.size() >0){
input = MainActivity.YUVQueue.poll();
byte[] yuv420sp = new byte[m_width*m_height*3/2];
NV21ToNV12(input,yuv420sp,m_width,m_height);
input = yuv420sp;
}
if (input != null) {
try {
long startMs = System.currentTimeMillis();
// 输入缓冲区
ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
// 输出缓冲区
ByteBuffer[] outputBuffers = mediaCodec.getOutputBuffers();
// 从输入缓冲区队列中取出可用缓冲区,并填充数据
int inputBufferIndex = mediaCodec.dequeueInputBuffer(-1);
if (inputBufferIndex >= 0) {
// 计算时间戳
pts = computePresentationTime(generateIndex);
ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
inputBuffer.put(input);
mediaCodec.queueInputBuffer(inputBufferIndex, 0, input.length, pts, 0);
generateIndex += 1;
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
// 从输出缓冲区队列中拿到编解码后的内容,进行相应操作(这里是写入output h264文件)后释放,供下一次使用
int outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC);
while (outputBufferIndex >= 0) {
//Log.d(TAG, "Get H264 Buffer Success! flag = "+bufferInfo.flags+",pts = "+bufferInfo.presentationTimeUs+"");
ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
byte[] outData = new byte[bufferInfo.size];
outputBuffer.get(outData);
// flags 判断
if(bufferInfo.flags == 2){ // 配置相关的内容,也就是 SPS,PPS
configbyte = new byte[bufferInfo.size];
configbyte = outData;
}else if(bufferInfo.flags == 1){ //关键帧
byte[] keyframe = new byte[bufferInfo.size + configbyte.length];
System.arraycopy(configbyte, 0, keyframe, 0, configbyte.length);
System.arraycopy(outData, 0, keyframe, configbyte.length, outData.length);
outputStream.write(keyframe, 0, keyframe.length);
}else{ // 非关键帧和SPS、PPS,直接写入文件,可能是B帧或者P帧
outputStream.write(outData, 0, outData.length);
}
mediaCodec.releaseOutputBuffer(outputBufferIndex, false);
outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC);
}
} catch (Throwable t) {
t.printStackTrace();
}
} else {
try {
Thread.sleep(500);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
}
});
EncoderThread.start();
}
3、编码结束关闭回收
完成编码后,对相关内存回收和线程资源等关闭处理:
private void StopEncoder() {
try {
// 调用 stop 方法进入 Uninitialized 状态
mediaCodec.stop();
// 调用 release 方法释放,结束操作
mediaCodec.release();
} catch (Exception e){
e.printStackTrace();
}
}
public void StopThread(){
isRuning = false;
try {
StopEncoder();
outputStream.flush();
outputStream.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
至此,整个编码为h264文件的过程就结束了。
二、完整的Domo代码
AvcEncoder.java
package com.example.mediacodec_encodeh264;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import android.annotation.SuppressLint;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.util.Log;
public class AvcEncoder
{
private static final String TAG = "weekend";
private int TIMEOUT_USEC = 12000;
private MediaCodec mediaCodec;
int m_width;
int m_height;
int m_framerate;
byte[] m_info = null;
public byte[] configbyte;
@SuppressLint("NewApi")
public AvcEncoder(int width, int height, int framerate, int bitrate, String path) {
m_width = width;
m_height = height;
m_framerate = framerate;
MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", width, height);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar); //颜色格式
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, width*height*5); //码率
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30); //帧率
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1); //I 帧间隔
try {
// 创建 MediaCodec,此时是 Uninitialized 状态
mediaCodec = MediaCodec.createEncoderByType("video/avc");
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
// 调用 configure 进入 Configured 状态
mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
// 调用 start 进入 Executing 状态,开始编解码工作
mediaCodec.start();
createfile(path);
}
private BufferedOutputStream outputStream;
FileOutputStream outStream;
private void createfile(String path){
File file = new File(path);
Log.d(TAG,"createfile path = "+path);
if(file.exists()){
file.delete();
}
try {
outputStream = new BufferedOutputStream(new FileOutputStream(file));
} catch (Exception e){
e.printStackTrace();
}
}
@SuppressLint("NewApi")
private void StopEncoder() {
try {
// 调用 stop 方法进入 Uninitialized 状态
mediaCodec.stop();
// 调用 release 方法释放,结束操作
mediaCodec.release();
} catch (Exception e){
e.printStackTrace();
}
}
ByteBuffer[] inputBuffers;
ByteBuffer[] outputBuffers;
public boolean isRuning = false;
public void StopThread(){
isRuning = false;
try {
StopEncoder();
outputStream.flush();
outputStream.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
int count = 0;
//编码子线程
public void StartEncoderThread(){
Thread EncoderThread = new Thread(new Runnable() {
@SuppressLint("NewApi")
@Override
public void run() {
isRuning = true;
byte[] input = null;
long pts = 0;
long generateIndex = 0;
while (isRuning) {
if (MainActivity.YUVQueue.size() >0){
input = MainActivity.YUVQueue.poll();
byte[] yuv420sp = new byte[m_width*m_height*3/2];
NV21ToNV12(input,yuv420sp,m_width,m_height);
input = yuv420sp;
}
if (input != null) {
try {
long startMs = System.currentTimeMillis();
// 输入缓冲区
ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
// 输出缓冲区
ByteBuffer[] outputBuffers = mediaCodec.getOutputBuffers();
// 从输入缓冲区队列中取出可用缓冲区,并填充数据
int inputBufferIndex = mediaCodec.dequeueInputBuffer(-1);
if (inputBufferIndex >= 0) {
// 计算时间戳
pts = computePresentationTime(generateIndex);
ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
inputBuffer.put(input);
mediaCodec.queueInputBuffer(inputBufferIndex, 0, input.length, pts, 0);
generateIndex += 1;
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
// 从输出缓冲区队列中拿到编解码后的内容,进行相应操作(这里是写入output h264文件)后释放,供下一次使用
int outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC);
while (outputBufferIndex >= 0) {
//Log.d(TAG, "Get H264 Buffer Success! flag = "+bufferInfo.flags+",pts = "+bufferInfo.presentationTimeUs+"");
ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
byte[] outData = new byte[bufferInfo.size];
outputBuffer.get(outData);
// flags 判断
if(bufferInfo.flags == 2){ // 配置相关的内容,也就是 SPS,PPS
configbyte = new byte[bufferInfo.size];
configbyte = outData;
}else if(bufferInfo.flags == 1){ //关键帧
byte[] keyframe = new byte[bufferInfo.size + configbyte.length];
System.arraycopy(configbyte, 0, keyframe, 0, configbyte.length);
System.arraycopy(outData, 0, keyframe, configbyte.length, outData.length);
outputStream.write(keyframe, 0, keyframe.length);
}else{ // 非关键帧和SPS、PPS,直接写入文件,可能是B帧或者P帧
outputStream.write(outData, 0, outData.length);
}
mediaCodec.releaseOutputBuffer(outputBufferIndex, false);
outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC);
}
} catch (Throwable t) {
t.printStackTrace();
}
} else {
try {
Thread.sleep(500);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
}
});
EncoderThread.start();
}
private void NV21ToNV12(byte[] nv21,byte[] nv12,int width,int height){
if(nv21 == null || nv12 == null) {
return;
}
int framesize = width*height;
int i = 0,j = 0;
System.arraycopy(nv21, 0, nv12, 0, framesize);
for(i = 0; i < framesize; i++){
nv12[i] = nv21[i];
}
for (j = 0; j < framesize/2; j+=2)
{
nv12[framesize + j-1] = nv21[j+framesize];
}
for (j = 0; j < framesize/2; j+=2)
{
nv12[framesize + j] = nv21[j+framesize-1];
}
}
/**
* Generates the presentation time for frame N, in microseconds.
*/
private long computePresentationTime(long frameIndex) {
return 132 + frameIndex * 1000000 / m_framerate;
}
}
MainActivity.java
package com.example.mediacodec_encodeh264;
import java.io.IOException;
import java.util.concurrent.ArrayBlockingQueue;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.pm.PackageManager;
import android.graphics.ImageFormat;
import android.hardware.Camera;
import android.hardware.Camera.Parameters;
import android.hardware.Camera.PreviewCallback;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.os.Build;
import android.os.Bundle;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
import android.os.Environment;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.widget.Button;
public class MainActivity extends Activity implements SurfaceHolder.Callback,PreviewCallback{
private static final String TAG = "weekend";
private SurfaceView surfaceview;
private SurfaceHolder surfaceHolder;
private Button mBtnPlay;
private boolean mWorking = false;
private static String path = Environment.getExternalStorageDirectory().getAbsolutePath() + "/TestOutput.h264";
private Camera camera;
private Parameters parameters;
int width = 640;
int height = 480;
int framerate = 24;
int biterate = 8500*1000;
private static int yuvqueuesize = 10;
public static ArrayBlockingQueue<byte[]> YUVQueue = new ArrayBlockingQueue<byte[]>(yuvqueuesize);
private AvcEncoder avcCodec;
private final static int CAMERA_OK = 10001;
private static String[] PERMISSIONS_STORAGE = {
"android.permission.CAMERA",
"android.permission.WRITE_EXTERNAL_STORAGE" };
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
surfaceview = findViewById(R.id.surfaceview);
SupportAvcCodec();
if (Build.VERSION.SDK_INT>22) {
if (!checkPermissionAllGranted(PERMISSIONS_STORAGE)){
ActivityCompat.requestPermissions(MainActivity.this,
PERMISSIONS_STORAGE, CAMERA_OK);
}else{
init();
}
}else{
init();
}
//init
mBtnPlay = (Button) findViewById(R.id.btnStartVideo);
mWorking = false;
mBtnPlay.setText("start");
mBtnPlay.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
runOnUiThread(new Runnable() {
@Override
public void run() {
if(mWorking){
stopWork();
mWorking = false;
mBtnPlay.setText("start");
showSaveFilePath();
}else{
startWork();
mWorking = true;
mBtnPlay.setText("stop");
}
}
});
}
});
}
private void init(){
surfaceHolder = surfaceview.getHolder();
surfaceHolder.addCallback(this);
}
private boolean checkPermissionAllGranted(String[] permissions) {
for (String permission : permissions) {
if (ContextCompat.checkSelfPermission(this, permission) != PackageManager.PERMISSION_GRANTED) {
// 只要有一个权限没有被授予, 则直接返回 false
return false;
}
}
return true;
}
@Override
public void onRequestPermissionsResult(int requestCode,String[] permissions,int[] grantResults) {
switch (requestCode) {
case CAMERA_OK:
if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
//这里已经获取到了摄像头的权限
init();
} else {
showWaringDialog();
}
break;
default:
break;
}
}
private void showWaringDialog() {
AlertDialog dialog = new AlertDialog.Builder(this)
.setTitle("警告!")
.setMessage("请前往设置->应用->PermissionDemo->权限中打开相关权限,否则功能无法正常运行!")
.setPositiveButton("确定", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
// 一般情况下如果用户不授权的话,功能是无法运行的,做退出处理
finish();
}
}).show();
}
private void showSaveFilePath() {
AlertDialog dialog = new AlertDialog.Builder(this)
.setTitle("完成!")
.setMessage("编码后的文件存放在 "+path)
.setPositiveButton("确定", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
}
}).show();
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
stopWork();
}
private void startWork(){
camera = getBackCamera();
startcamera(camera);
avcCodec = new AvcEncoder(this.width,this.height,framerate,biterate,path);
avcCodec.StartEncoderThread();
}
private void stopWork(){
if (null != camera) {
camera.setPreviewCallback(null);
camera.stopPreview();
camera.release();
camera = null;
avcCodec.StopThread();
}
}
@Override
public void onPreviewFrame(byte[] data, android.hardware.Camera camera) {
// TODO Auto-generated method stub
putYUVData(data,data.length);
}
public void putYUVData(byte[] buffer, int length) {
if (YUVQueue.size() >= 10) {
YUVQueue.poll();
}
YUVQueue.add(buffer);
}
@SuppressLint("NewApi")
private boolean SupportAvcCodec(){
if(Build.VERSION.SDK_INT>=18){
for(int j = MediaCodecList.getCodecCount() - 1; j >= 0; j--){
MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(j);
String[] types = codecInfo.getSupportedTypes();
for (int i = 0; i < types.length; i++) {
if (types[i].equalsIgnoreCase("video/avc")) {
return true;
}
}
}
}
return false;
}
private void startcamera(Camera mCamera){
if(mCamera != null){
try {
mCamera.setPreviewCallback(this);
mCamera.setDisplayOrientation(90);
if(parameters == null){
parameters = mCamera.getParameters();
}
parameters = mCamera.getParameters();
parameters.setPreviewFormat(ImageFormat.NV21);
parameters.setPreviewSize(width, height);
mCamera.setParameters(parameters);
mCamera.setPreviewDisplay(surfaceHolder);
mCamera.startPreview();
} catch (IOException e) {
e.printStackTrace();
}
}
}
@TargetApi(9)
private Camera getBackCamera() {
Camera c = null;
try {
c = Camera.open(0); // attempt to get a Camera instance
} catch (Exception e) {
e.printStackTrace();
}
return c; // returns null if camera is unavailable
}
}
activity_main.xml
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent" >
<SurfaceView
android:id="@+id/surfaceview"
android:layout_width="match_parent"
android:layout_marginBottom="60dp"
android:layout_height="match_parent"/>
<Button
android:id="@+id/btnStartVideo"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_centerHorizontal="true"
android:layout_alignParentBottom="true"
android:text="Start"/>
</RelativeLayout>
AndroidManifest.xml
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.example.mediacodec_encodeh264"
android:versionCode="1"
android:versionName="1.0" >
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.CAMERA" />
<application
android:allowBackup="true"
android:icon="@mipmap/ic_launcher"
android:label="@string/app_name"
android:theme="@style/Theme.MediaCodec_EncodeH264">
<activity
android:name=".MainActivity"
android:label="@string/app_name" >
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>