概述
在上一篇文章Android音视频录制概述。已经大概讲述了Surafce录制的原理,如果大家还没看那篇文章,请先去查看哈。
后续文章介绍Android音视频录制(2)——buffer录制
Surface录制视频数据一共分为以下几个部分:
1,摄像头预览预览
包括A:摄像头配置 B:基于GLSurface View的OpenGL绘制
2,编码器编码数据
包括A:编码器配置 B:EGL配置 C:基于编码器Surface的OpenGL绘制
3,视频数据混合(输出到文件)
音频数据录制分为以下几个部分:
1,音频采集线程
2,音频数据编码
3,音频数据混合(输出到文件)
代码讲解部分,我只讲解关键代码,之后我会把代码上传到git,具体的自己下载阅读,讲解过程我个人习惯把讲解内容放在对应的代码注释里,所以,一般看代码注释就能看懂代码逻辑。
视频录制
摄像头预览
综述流程:Activity启动,GLSurfaceView被创建,在它的创建完成回调中加载opengl环境(即需要加载顶点shader和片元shader),并从opengl中获取要渲染的纹理(SurfaceTexure),将纹理和GLSurfaceView绑定,然后创建opengl绘制器,最后初始化相机,初始化相机的时候将相机和纹理绑定,并开始预览,这就是预览的流程,这个流程的核心就是摄像头(Camera)——OpenGL纹理(SurfaceTexture)——GLSurfaceView的相互连接起来,当然其中细节很多,小伙伴看代码注释就清晰明了了。
package lda.com.camerasurfacerecorder;
import android.annotation.TargetApi;
import android.content.Context;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.opengl.GLSurfaceView;
import android.util.AttributeSet;
import android.util.Log;
import android.view.Display;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.WindowManager;
import java.io.IOException;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
/**
* 继承GLSurfaceView
*/
public class CameraGLSurfaceView extends GLSurfaceView {
private static final int CAMERA_ID = 0;
private Context mContext;
private static final String TAG = CameraGLSurfaceView.class.getSimpleName();
private SurfaceRenderer mRenderer;//OpenGL渲染器
private Camera mCamera;
private int mRotation;
private boolean mIsFrontFace;
private int mVideoWidth = Config.VIDEO_WIDTH, mVideoHeight = Config.VIDEO_HEIGHT;
public CameraGLSurfaceView(Context context) {
super(context);
init(context);
}
public CameraGLSurfaceView(Context context, AttributeSet attrs) {
super(context, attrs);
init(context);
}
@TargetApi(8)
private void init(Context context) {
mContext = context;
mRenderer = new SurfaceRenderer(this);
// GLES 2.0, API >= 8
setEGLContextClientVersion(2);
setRenderer(mRenderer);
/* // 设置RENDERMODE_WHEN_DIRTY可以减少性能消耗
setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY); */
}
/**
* @param holder
*/
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
stopPreview();
mRenderer.onSurfaceDestroy();
super.surfaceDestroyed(holder);
}
public void startPreview(int width, int height){
width = Config.VIDEO_HEIGHT;
height = Config.VIDEO_WIDTH;
initCamera(width, height);
if(mCamera == null){
return;
}
try {
final Camera.Size previewSize = mCamera.getParameters().getPreviewSize();
Log.i(TAG, String.format("previewSize(%d, %d)", previewSize.width, previewSize.height));
setVideoSize(previewSize.width, previewSize.height);
final SurfaceTexture st = mRenderer.getSurfaceTexture();
st.setDefaultBufferSize(previewSize.width, previewSize.height);
mCamera.setPreviewTexture(st);//相机和opengl纹理绑定
if (mCamera != null) {
//开启摄像头预览
mCamera.startPreview();
}
}catch (Exception e){
Log.e(TAG, "startPreview:", e);
if (mCamera != null) {
mCamera.release();
mCamera = null;
}
}
}
/**
* 初始化相机
* @param width
* @param height
*/
private void initCamera(int width, int height) {
Log.d(TAG, "initCamera:");
if (mCamera == null) {
try {
mCamera = Camera.open(CAMERA_ID);
final Camera.Parameters params = mCamera.getParameters();
final List<String> focusModes = params.getSupportedFocusModes();
if (focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
} else if(focusModes
.contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
params.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
} else {
Log.i(TAG, "Camera does not support autofocus");
}
final List<int[]> supportedFpsRange = params.getSupportedPreviewFpsRange();
final int[] max_fps = supportedFpsRange.get(supportedFpsRange.size() - 1);
params.setPreviewFpsRange(max_fps[0], max_fps[1]);
params.setRecordingHint(true);
final Camera.Size closestSize = getClosestSupportedSize(params.getSupportedPreviewSizes(), width, height);
params.setPreviewSize(closestSize.width, closestSize.height);
final Camera.Size pictureSize = getClosestSupportedSize(params.getSupportedPictureSizes(), width, height);
params.setPictureSize(pictureSize.width, pictureSize.height);
//调整相机角度
setRotation(params);
mCamera.setParameters(params);
} catch (Exception e) {
Log.e(TAG, "initCamera:", e);
if (mCamera != null) {
mCamera.release();
mCamera = null;
}
}
}
}
public void stopPreview(){
Log.v(TAG, "stopPreview:");
if (mCamera != null) {
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
}
public void setVideoSize(final int width, final int height) {
if ((mRotation % 180) == 0) {
mVideoWidth = width;
mVideoHeight = height;
} else {
mVideoWidth = height;
mVideoHeight = width;
}
//调整OpenGL视口
queueEvent(new Runnable() {
@Override
public void run() {
mRenderer.updateViewport();
}
});
Log.d(TAG, "setVideoSize: width x height=" + width + " x " + height );
}
private static Camera.Size getClosestSupportedSize(List<Camera.Size> supportedSizes, final int requestedWidth, final int requestedHeight) {
return (Camera.Size) Collections.min(supportedSizes, new Comparator<Camera.Size>() {
private int diff(final Camera.Size size) {
return Math.abs(requestedWidth - size.width) + Math.abs(requestedHeight - size.height);
}
@Override
public int compare(final Camera.Size lhs, final Camera.Size rhs) {
return diff(lhs) - diff(rhs);
}
});
}
/**
* 设置摄像头角度
* @param params
*/
private final void setRotation(final Camera.Parameters params) {
final Display display = ((WindowManager)mContext.getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
final int rotation = display.getRotation();
int degrees = 0;
switch (rotation) {
case Surface.ROTATION_0: degrees = 0; break;
case Surface.ROTATION_90: degrees = 90; break;
case Surface.ROTATION_180: degrees = 180; break;
case Surface.ROTATION_270: degrees = 270; break;
}
final Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
android.hardware.Camera.getCameraInfo(CAMERA_ID, info);
mIsFrontFace = (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT);
if (mIsFrontFace) { // 前置摄像头
degrees = (info.orientation + degrees) % 360;
degrees = (360 - degrees) % 360; // reverse
} else { // 后置摄像头
degrees = (info.orientation - degrees + 360) % 360;
}
mCamera.setDisplayOrientation(degrees);
mRotation = degrees;
Log.d(TAG, "setRotation:" + degrees);
}
public int getVideoHeight() {
return mVideoHeight;
}
public int getVideoWidth() {
return mVideoWidth;
}
public void startRecord() {
mRenderer.setNeedRecord(true);
}
public void stopRecord() {
mRenderer.setStopRecorder(true);
}
}
CameraGLSurfaceView这个类即是安卓上层的GLSurfaceView,其中关键函数是初始化摄像头(initCamera),并让摄像头和OpenGL纹理绑定。
SurfaceRenderer是摄像头预览的OpenGL渲染器,关键函数是onSurfaceCreated初始化opengl环境,并获取opengl纹理,onDrawFrame执行opengl绘制,并将纹理缓存数据传递给视频编码器,同时视频和音频编码器的初始化和启动也是在onDrawFrame中完成,编码器内容后面详述。
package lda.com.camerasurfacerecorder;
import android.graphics.SurfaceTexture;
import android.opengl.EGL14;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.opengl.Matrix;
import android.os.Environment;
import android.util.Log;
import java.io.File;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import lda.com.camerasurfacerecorder.recorder.MMuxer;
import lda.com.camerasurfacerecorder.recorder.VideoSurfaceEncoder;
import lda.com.camerasurfacerecorder.recorder.audio.AudioEncoder;
/**
*摄像头预览opengl渲染器
*/
public class SurfaceRenderer implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener{
private static String TAG = SurfaceRenderer.class.getSimpleName();
private CameraGLSurfaceView mSurfaceView;//GLSurfaceView
private SurfaceTexture mSurfaceTexture;//渲染纹理
private int mTextureId;
private GLDrawer2D mDrawer;//OpenGL绘制
private float[] mSurfaceTextureMatrix = new float[16];//纹理变换矩阵
//投影变换矩阵(注意,opengl坐标系和手机屏幕坐标系不同,为了正常显示,opengl坐标需要左乘投影变换矩阵左)
private float[] mMvpMatrix = new float[16];
private boolean mIsNeedUpdateTexture = false;
private boolean mIsNeedRecord = false;
private VideoSurfaceEncoder mVideoEncoder;//视频编码器
private boolean mIsRecordCurrFrame = true;
private boolean mIsStopRecorder = false;
private AudioEncoder mAudioEncoder;//音频编码器
public SurfaceRenderer(CameraGLSurfaceView surfaceView) {
mSurfaceView = surfaceView;
Matrix.setIdentityM(mMvpMatrix, 0);
}
/**
* Renderer
* @param gl
* @param config
*/
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
Log.v(TAG, "onSurfaceCreated:");
// 摄像头渲染需要 OES_EGL_image_external extension
final String extensions = GLES20.glGetString(GLES20.GL_EXTENSIONS); // API >= 8
if (!extensions.contains("OES_EGL_image_external"))
throw new RuntimeException("This system does not support OES_EGL_image_external.");
// 创建纹理ID
mTextureId = GLDrawer2D.initTextureId();
// 创建渲染纹理
mSurfaceTexture = new SurfaceTexture(mTextureId);
mSurfaceTexture.setOnFrameAvailableListener(this);
// 黄色清屏
GLES20.glClearColor(1.0f, 1.0f, 0.0f, 1.0f);
mDrawer = new GLDrawer2D();
mDrawer.setMatrix(mMvpMatrix, 0);
}
public boolean isNeedRecord() {
return mIsNeedRecord;
}
public void setNeedRecord(boolean isNeedRecord){
mIsNeedRecord = isNeedRecord;
}
/**
* Renderer
* @param gl
* @param width
* @param height
*/
@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
Log.v(TAG, String.format("onSurfaceChanged:(%d,%d)", width, height));
// if at least with or height is zero, initialization of this view is still progress.
if ((width == 0) || (height == 0)) return;
updateViewport();
mSurfaceView.startPreview(width, height);
}
/**
* opengl绘制函数
* @param gl
*/
@Override
public void onDrawFrame(GL10 gl) {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
if (mIsNeedUpdateTexture) {
mIsNeedUpdateTexture = false;
//更新纹理(摄像头已经绑定该SurfaceTexture)
mSurfaceTexture.updateTexImage();
// 获取纹理变换矩阵
mSurfaceTexture.getTransformMatrix(mSurfaceTextureMatrix);
}
// draw to preview screen
if(mIsNeedRecord){
if(mVideoEncoder == null){
MMuxer mMuxer = new MMuxer(getSaveVideoPath());
mVideoEncoder = new VideoSurfaceEncoder(mMuxer, mSurfaceView.getVideoWidth(), mSurfaceView.getVideoHeight());
mAudioEncoder = new AudioEncoder(mMuxer);
mVideoEncoder.setAllKeyFrame(true);
mVideoEncoder.setEglAndStart(EGL14.eglGetCurrentContext(), mTextureId);
mAudioEncoder.start();
Log.d(TAG, "init encoder");
}
// Log.d(TAG, "encoderprepared=" + mEncoder.isPrepared() + " isRecordCurrFrame=" + mIsRecordCurrFrame);
if(mVideoEncoder != null && mVideoEncoder.isPrepared() && mIsRecordCurrFrame){
long curr = System.currentTimeMillis();
Log.d(TAG, "======drawTime========" + (curr - mDrawTime));
mDrawTime = curr;
mVideoEncoder.render(mSurfaceTextureMatrix, mMvpMatrix);
}
mIsRecordCurrFrame = !mIsRecordCurrFrame;
if(mIsStopRecorder){
mVideoEncoder.eos();
mAudioEncoder.eos();
mIsNeedRecord = false;
mVideoEncoder = null;
}
}
mDrawer.draw(mTextureId, mSurfaceTextureMatrix);
}
public long mDrawTime = 0;
public void setStopRecorder(boolean stopRecorder) {
mIsStopRecorder = stopRecorder;
}
private String getSaveVideoPath() {
File dir = new File(Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator + "00recorder" + File.separator);
if(!dir.exists() || !dir.isDirectory()){
dir.mkdirs();
}
File file = new File(dir, "surface.mp4");
return file.getAbsolutePath();
}
public void onSurfaceDestroy(){
Log.v(TAG, "onSurfaceDestroyed:");
if (mDrawer != null) {
mDrawer.release();
mDrawer = null;
}
if (mSurfaceTexture != null) {
mSurfaceTexture.release();
mSurfaceTexture = null;
}
GLDrawer2D.deleteTex(mTextureId);
}
/**
* OnFrameAvailableListener
* @param surfaceTexture
*/
@Override
public void onFrameAvailable<