MediaRecord的状态机
创建MediaRecorder,这时候处于Initial状态
mediaRecorder = new MediaRecorder();
设置数据来源,这时候处于Initialized状态
mediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);//设置音频来源
mediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);//设置视频来源
设置编码格式,封装格式,码率,这时候处于DataSourceConfigured状态
mediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);//设置输出格式
mediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);//设置音频编码格式,请注意这里使用默认,实际app项目需要考虑兼容问题,应该选择AAC
mediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);//设置视频编码格式,请注意这里使用默认,实际app项目需要考虑兼容问题,应该选择H264
mediaRecorder.setVideoEncodingBitRate(8*width*height);//设置比特率 一般是 1*分辨率 到 10*分辨率 之间波动。比特率越大视频越清晰但是视频文件也越大。
mediaRecorder.setVideoFrameRate(30);//设置帧数 选择 30即可, 过大帧数也会让视频文件更大当然也会更流畅,但是没有多少实际提升。人眼极限也就30帧了。
mediaRecorder.setVideoSize(width,height);
mediaRecorder.setOrientationHint(90);
SurfaceTexture surfaceTexture = binding.surfaceView.getSurfaceTexture();
width = 1280;
height = 720;
surfaceTexture.setDefaultBufferSize(width,height);
Surface previewSurface = new Surface(surfaceTexture);
mediaRecorder.setPreviewDisplay(previewSurface);
mediaRecorder.setOutputFile(file.getAbsolutePath());
调用prepare,处于prepared
try {
mediaRecorder.prepare();
} catch (IOException e) {
e.printStackTrace();
}
调用start就会处于recording状态,调用reset状态就会处于initial状态
要记住使用外要调用release状态
录制方法
private void record() {
try {
Log.e("RecordActivity", "record");
closePreview();
configMediaRecorder();
SurfaceTexture surfaceTexture = binding.surfaceView.getSurfaceTexture();
surfaceTexture.setDefaultBufferSize(width,height);
Surface previewSurface = new Surface(surfaceTexture);
final CaptureRequest.Builder builder
= mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
builder.addTarget(previewSurface);
builder.addTarget(mediaRecorder.getSurface());
OutputConfiguration outputConfiguration = new OutputConfiguration(previewSurface);
OutputConfiguration recordConfiguration = new OutputConfiguration(mediaRecorder.getSurface());
List<OutputConfiguration> outputs = new ArrayList<>();
outputs.add(outputConfiguration);
outputs.add(recordConfiguration);
SessionConfiguration sessionConfiguration
= new SessionConfiguration(SessionConfiguration.SESSION_REGULAR,
outputs,
mExecutorService,
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession session) {
mCameraRecordCaptureSession = session;
builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
CaptureRequest request = builder.build();
try {
mCameraRecordCaptureSession.setRepeatingRequest(request, new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureStarted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, long timestamp, long frameNumber) {
super.onCaptureStarted(session, request, timestamp, frameNumber);
}
@Override
public void onCaptureProgressed(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull CaptureResult partialResult) {
super.onCaptureProgressed(session, request, partialResult);
}
@Override
public void onCaptureCompleted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
}
@Override
public void onCaptureFailed(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull CaptureFailure failure) {
super.onCaptureFailed(session, request, failure);
}
@Override
public void onCaptureSequenceCompleted(@NonNull CameraCaptureSession session, int sequenceId, long frameNumber) {
super.onCaptureSequenceCompleted(session, sequenceId, frameNumber);
}
@Override
public void onCaptureSequenceAborted(@NonNull CameraCaptureSession session, int sequenceId) {
super.onCaptureSequenceAborted(session, sequenceId);
}
@Override
public void onCaptureBufferLost(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull Surface target, long frameNumber) {
super.onCaptureBufferLost(session, request, target, frameNumber);
}
}, sub1Handler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession session) {
}
}
);
mCamera.createCaptureSession(sessionConfiguration);
mediaRecorder.start();
} catch (Exception e) {
}
}
通过代码我们可以知道,MediaRecord是从surface中取得数据进行保存
package com.yuanxuzhen.androidmedia.record;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.app.ActivityCompat;
import android.Manifest;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.ImageFormat;
import android.graphics.Rect;
import android.graphics.SurfaceTexture;
import android.graphics.YuvImage;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureFailure;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.TotalCaptureResult;
import android.hardware.camera2.params.OutputConfiguration;
import android.hardware.camera2.params.SessionConfiguration;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.Image;
import android.media.ImageReader;
import android.media.MediaRecorder;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import android.util.Size;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.TextureView;
import android.view.View;
import android.widget.Toast;
import com.yuanxuzhen.androidmedia.DirUtil;
import com.yuanxuzhen.androidmedia.databinding.ActivityCameraLayoutBinding;
import com.yuanxuzhen.androidmedia.databinding.ActivityRecordBinding;
import com.yuanxuzhen.androidmedia.video.CameraActivity;
import com.yuanxuzhen.androidmedia.video.CompareSizesByArea;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import permissions.dispatcher.NeedsPermission;
import permissions.dispatcher.OnNeverAskAgain;
import permissions.dispatcher.OnPermissionDenied;
import permissions.dispatcher.RuntimePermissions;
@RuntimePermissions
public class RecordActivity extends AppCompatActivity {
ActivityRecordBinding binding;
ExecutorService mExecutorService;
CameraManager cameraManager;
CameraDevice mCamera;
private String frontCameraId = "";
private String backCameraId = "";
CameraCaptureSession mCameraPreviewCaptureSession;
CameraCaptureSession mCameraRecordCaptureSession;
HandlerThread subHandlerThread;
Handler subHandler;
HandlerThread sub1HandlerThread;
Handler sub1Handler;
int width, height;
private MediaRecorder mediaRecorder;
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
cameraManager = (CameraManager) getSystemService(CAMERA_SERVICE);
mExecutorService = Executors.newCachedThreadPool();
binding = ActivityRecordBinding.inflate(getLayoutInflater());
setContentView(binding.getRoot());
subHandlerThread = new HandlerThread("sub");
subHandlerThread.start();
subHandler = new Handler(subHandlerThread.getLooper());
sub1HandlerThread = new HandlerThread("sub1");
sub1HandlerThread.start();
sub1Handler = new Handler(sub1HandlerThread.getLooper());
binding.startRecord.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
record();
}
});
binding.stopRecord.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
stopRecorder();
preview();
}
});
binding.textureview.setSurfaceTextureListener(new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(@NonNull SurfaceTexture surface, int width, int height) {
RecordActivityPermissionsDispatcher.startCameraWithPermissionCheck(RecordActivity.this);
}
@Override
public void onSurfaceTextureSizeChanged(@NonNull SurfaceTexture surface, int width, int height) {
}
@Override
public boolean onSurfaceTextureDestroyed(@NonNull SurfaceTexture surface) {
return false;
}
@Override
public void onSurfaceTextureUpdated(@NonNull SurfaceTexture surface) {
}
});
mediaRecorder = new MediaRecorder();
}
@NeedsPermission({Manifest.permission.RECORD_AUDIO, Manifest.permission.CAMERA})
public void startCamera() {
Log.e("CameraActivity", "startCamera");
mExecutorService.execute(new Runnable() {
@Override
public void run() {
try {
String[] cameraIdArray = cameraManager.getCameraIdList();
for (String ele : cameraIdArray) {
CameraCharacteristics cameraCharacteristics = cameraManager.getCameraCharacteristics(ele);
if (cameraCharacteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT) {
frontCameraId = ele;
} else if (cameraCharacteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_BACK) {
backCameraId = ele;
}
}
calculateCameraParameters();
if (ActivityCompat.checkSelfPermission(RecordActivity.this,
Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
return;
}
cameraManager.openCamera(backCameraId,
mExecutorService,
new CameraDevice.StateCallback() {
@Override
public void onOpened(@NonNull CameraDevice camera) {
Log.e("CameraActivity", "onOpened");
mCamera = camera;
preview();
}
@Override
public void onDisconnected(@NonNull CameraDevice camera) {
Log.e("CameraActivity", "onDisconnected");
}
@Override
public void onError(@NonNull CameraDevice camera, int error) {
Log.e("CameraActivity", "onError");
}
}
);
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
public void stopCamera() {
Log.e("CameraActivity", "stopCamera");
closeRecordiew();
closePreview();
if (mCamera != null) {
mCamera.close();
mCamera = null;
}
}
@OnPermissionDenied(Manifest.permission.RECORD_AUDIO)
public void onDeniedAudio() {
Toast.makeText(this, "录音权限拒绝", Toast.LENGTH_SHORT).show();
}
@OnNeverAskAgain(Manifest.permission.RECORD_AUDIO)
public void onNeverAskAgainAudio() {
Toast.makeText(this, "录音权限再不询问", Toast.LENGTH_SHORT).show();
}
@OnPermissionDenied(Manifest.permission.CAMERA)
public void onDeniedCamera() {
Toast.makeText(this, "录像权限拒绝", Toast.LENGTH_SHORT).show();
}
@OnNeverAskAgain(Manifest.permission.CAMERA)
public void onNeverAskAgainCamera() {
Toast.makeText(this, "录像权限再不询问", Toast.LENGTH_SHORT).show();
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions,
@NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
RecordActivityPermissionsDispatcher.onRequestPermissionsResult(this, requestCode, grantResults);
}
/**
* Check if this device has a camera
*/
private boolean checkCameraHardware() {
if (getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA)) {
// this device has a camera
return true;
} else {
// no camera on this device
return false;
}
}
@Override
protected void onDestroy() {
stopRecorder();
stopCamera();
if(mediaRecorder != null){
mediaRecorder.release();
}
super.onDestroy();
}
@RequiresApi(api = Build.VERSION_CODES.N)
private void preview() {
try {
closeRecordiew();
final CaptureRequest.Builder previewBuilder
= mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
previewBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
SurfaceTexture surfaceTexture = binding.textureview.getSurfaceTexture();//得到纹理
surfaceTexture.setDefaultBufferSize(width,height);
Surface previewSurface = new Surface(surfaceTexture);
previewBuilder.addTarget(previewSurface);
OutputConfiguration outputConfiguration = new OutputConfiguration(previewSurface);
List<OutputConfiguration> outputs = new ArrayList<>();
outputs.add(outputConfiguration);
SessionConfiguration sessionConfiguration
= new SessionConfiguration(SessionConfiguration.SESSION_REGULAR,
outputs,
mExecutorService,
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession session) {
mCameraPreviewCaptureSession = session;
CaptureRequest previewRequest = previewBuilder.build();
try {
mCameraPreviewCaptureSession.setRepeatingRequest(previewRequest, new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureStarted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, long timestamp, long frameNumber) {
super.onCaptureStarted(session, request, timestamp, frameNumber);
}
@Override
public void onCaptureProgressed(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull CaptureResult partialResult) {
super.onCaptureProgressed(session, request, partialResult);
}
@Override
public void onCaptureCompleted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
}
@Override
public void onCaptureFailed(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull CaptureFailure failure) {
super.onCaptureFailed(session, request, failure);
}
@Override
public void onCaptureSequenceCompleted(@NonNull CameraCaptureSession session, int sequenceId, long frameNumber) {
super.onCaptureSequenceCompleted(session, sequenceId, frameNumber);
}
@Override
public void onCaptureSequenceAborted(@NonNull CameraCaptureSession session, int sequenceId) {
super.onCaptureSequenceAborted(session, sequenceId);
}
@Override
public void onCaptureBufferLost(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull Surface target, long frameNumber) {
super.onCaptureBufferLost(session, request, target, frameNumber);
}
}, sub1Handler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession session) {
}
}
);
mCamera.createCaptureSession(sessionConfiguration);
} catch (Exception e) {
}
}
private void closePreview() {
try{
if (mCameraPreviewCaptureSession != null) {
mCameraPreviewCaptureSession.stopRepeating();
mCameraPreviewCaptureSession.close();
mCameraPreviewCaptureSession = null;
}
}catch (Exception e){
e.printStackTrace();
}
}
private void closeRecordiew() {
try{
if (mCameraRecordCaptureSession != null) {
mCameraRecordCaptureSession.stopRepeating();
mCameraRecordCaptureSession.close();
mCameraRecordCaptureSession = null;
}
}catch (Exception e){
e.printStackTrace();
}
}
int i = 0;
@RequiresApi(api = Build.VERSION_CODES.N)
private void record() {
try {
Log.e("RecordActivity", "record");
closePreview();
configMediaRecorder();
SurfaceTexture surfaceTexture = binding.textureview.getSurfaceTexture();
surfaceTexture.setDefaultBufferSize(width,height);
Surface previewSurface = new Surface(surfaceTexture);
final CaptureRequest.Builder builder
= mCamera.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
builder.addTarget(previewSurface);
builder.addTarget(mediaRecorder.getSurface());
OutputConfiguration outputConfiguration = new OutputConfiguration(previewSurface);
OutputConfiguration recordConfiguration = new OutputConfiguration(mediaRecorder.getSurface());
List<OutputConfiguration> outputs = new ArrayList<>();
outputs.add(outputConfiguration);
outputs.add(recordConfiguration);
SessionConfiguration sessionConfiguration
= new SessionConfiguration(SessionConfiguration.SESSION_REGULAR,
outputs,
mExecutorService,
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession session) {
mCameraRecordCaptureSession = session;
builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO);
CaptureRequest request = builder.build();
try {
mCameraRecordCaptureSession.setRepeatingRequest(request, new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureStarted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, long timestamp, long frameNumber) {
super.onCaptureStarted(session, request, timestamp, frameNumber);
}
@Override
public void onCaptureProgressed(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull CaptureResult partialResult) {
super.onCaptureProgressed(session, request, partialResult);
}
@Override
public void onCaptureCompleted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
}
@Override
public void onCaptureFailed(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull CaptureFailure failure) {
super.onCaptureFailed(session, request, failure);
}
@Override
public void onCaptureSequenceCompleted(@NonNull CameraCaptureSession session, int sequenceId, long frameNumber) {
super.onCaptureSequenceCompleted(session, sequenceId, frameNumber);
}
@Override
public void onCaptureSequenceAborted(@NonNull CameraCaptureSession session, int sequenceId) {
super.onCaptureSequenceAborted(session, sequenceId);
}
@Override
public void onCaptureBufferLost(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull Surface target, long frameNumber) {
super.onCaptureBufferLost(session, request, target, frameNumber);
}
}, sub1Handler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession session) {
}
}
);
mCamera.createCaptureSession(sessionConfiguration);
mediaRecorder.start();
} catch (Exception e) {
}
}
/**
* 根据当前摄像头计算所需参数
*/
private void calculateCameraParameters() {
try {
CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(backCameraId);
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
List sizeList = Arrays.asList(map.getOutputSizes(ImageFormat.YUV_420_888));
Iterator<Size> iterator = sizeList.iterator();
while (iterator.hasNext()){
Size size = iterator.next();
Log.e("CameraActivity", "camera width=" + size.getWidth() + " height=" + size.getHeight());
}
Size largest = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.YUV_420_888)), new CompareSizesByArea());
Log.e("CameraActivity", "calculateCameraParameters width=" + largest.getWidth() + " height=" + largest.getHeight());
width = 1280;
height = 720;
} catch (Exception e) {
e.printStackTrace();
}
}
private void configMediaRecorder(){
File file = new File(DirUtil.getCacheDir()+File.separator + "record" + File.separator + "record.mp4");
if (file.exists()){
file.delete();
}
file.getParentFile().mkdirs();
mediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);//设置音频来源
mediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);//设置视频来源
mediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);//设置输出格式
mediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);//设置音频编码格式,请注意这里使用默认,实际app项目需要考虑兼容问题,应该选择AAC
mediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);//设置视频编码格式,请注意这里使用默认,实际app项目需要考虑兼容问题,应该选择H264
mediaRecorder.setVideoEncodingBitRate(8*width*height);//设置比特率 一般是 1*分辨率 到 10*分辨率 之间波动。比特率越大视频越清晰但是视频文件也越大。
mediaRecorder.setVideoFrameRate(30);//设置帧数 选择 30即可, 过大帧数也会让视频文件更大当然也会更流畅,但是没有多少实际提升。人眼极限也就30帧了。
mediaRecorder.setVideoSize(width,height);
mediaRecorder.setOrientationHint(90);
SurfaceTexture surfaceTexture = binding.textureview.getSurfaceTexture();
surfaceTexture.setDefaultBufferSize(width,height);
Surface previewSurface = new Surface(surfaceTexture);
mediaRecorder.setPreviewDisplay(previewSurface);
mediaRecorder.setOutputFile(file.getAbsolutePath());
try {
mediaRecorder.prepare();
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* 暂停录制视频(暂停后视频文件会自动保存)
*/
private void stopRecorder(){
if(mediaRecorder != null){
mediaRecorder.stop();
mediaRecorder.reset();
}
}
}
gitee地址