录像主要功能在VideoHelper方法中实现
录像功能大体流程:
-
创建MedioRecorder实例,以及用于显示预览图像信息的Surface的实例,并调用CaptureRequestBuilder.addTarget()方法,将MedioRecorder和Surface的实例,作为显示层添加到该请求的目标列表中。
-
调用CameraDevice.CreateRequest(CameraDevice.TEMPLATE_RECORED)方法,为新的捕获请求创建一个CaptureRequest.Build对象,并用CameraDevice.TEMPLATE_RECORED参数初始化
-
调用CameraDevice.CreateCaptureSession方法,通过提供目标输出集来创建新的捕获会话,该方法传入三个参数:新的用于捕获图像信息的Surface集合,此处为显示预览信息的surface实例,以及记录图像信息用的MediaRecorder的实例–CameraCaptureSession.StateCallback:用于通知新捕获session的callback;以及最后的Handler:为一个句柄,代表执行callback的handler,如果程序希望直接在当前线程中执行callback,则可以将handler参数设为null
5.在StateCallback中重写onConfigured()方法,完成回调.其中包括:
调用CaptureRequestBuilder.set()方法,设置捕获的参数
调用CameraCaptureSession.setRepeatingReqest()方法,通过此捕获session,持续重复捕获图像
调用MediaRecorder.start()方法,开始捕获数据并将数据编码到指定文件
调用CameraCaptureSession.stopRepeating()方法,取消持续捕获
调用CameraCaptureSession.abortCapture()方法,尽可能快地丢弃当前待处理和正在进行的所有捕获
调用MediaRecorder.stop,停止图像捕获
- 重新开启预览
以下为录像功能的代码:
public class VideoHelper {
private static final String TAG = "VideoHelper";
private final Activity mActivity;
private final TextureView mTextureView;
private CircleImageView imageButton;
private Handler mCameraHandler;
private CameraManager mCameraManager;
private Size mVideoSize;
private Size mPreviewSize;
private Integer mSensorOrientation;
private final Semaphore mCameraOpenCloseLock = new Semaphore(1);
private CameraCaptureSession mPreviewSession;
private CaptureRequest.Builder mPreviewBuilder;
private MediaRecorder mMediaRecorder;
private static final int SENSOR_ORIENTATION_DEFAULT_DEGREES = 90;
private static final int SENSOR_ORIENTATION_INVERSE_DEGREES = 270;
private static final SparseIntArray DEFAULT_ORIENTATIONS = new SparseIntArray();
private static final SparseIntArray INVERSE_ORIENTATIONS = new SparseIntArray();
private MyTask myTask;
private final Thread handlerThread = new Thread(()->{
mMediaRecorder.start();
});
static {
DEFAULT_ORIENTATIONS.append(Surface.ROTATION_0, 90);
DEFAULT_ORIENTATIONS.append(Surface.ROTATION_90, 0);
DEFAULT_ORIENTATIONS.append(Surface.ROTATION_180, 270);
DEFAULT_ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
static {
INVERSE_ORIENTATIONS.append(Surface.ROTATION_0, 270);
INVERSE_ORIENTATIONS.append(Surface.ROTATION_90, 180);
INVERSE_ORIENTATIONS.append(Surface.ROTATION_180, 90);
INVERSE_ORIENTATIONS.append(Surface.ROTATION_270, 0);
}
private CameraDevice mCameraDevice;
private String mNextVideoAbsolutePath;
public VideoHelper(Activity activity, TextureView textureView, CircleImageView imageButton) {
this.mActivity = activity;
this.mTextureView = textureView;
this.imageButton = imageButton;
init();
myTask = new MyTask(mActivity);
}
private void init() {
mCameraHandler = new Handler(Looper.myLooper());
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
}
public void startRecordingVideo() {
if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) {
Log.d(TAG, "startRecordingVideo: mCameraDevice mTextureView mPreviewSize 有空的: "
+ mCameraDevice + ":" + mTextureView + ":" + mPreviewSize);
return;
}
mMediaRecorder.start();
// mActivity.runOnUiThread(() -> {
// // Start recording
// mMediaRecorder.start();
// });
// try {
// setUpMediaRecorder();
// SurfaceTexture texture = mTextureView.getSurfaceTexture();
// assert texture != null;
// texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
// mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
// List<Surface> surfaces = new ArrayList<>();
// Set up Surface for the camera preview
// Surface previewSurface = new Surface(texture);
// Surface recorderSurface = mMediaRecorder.getSurface();
// Set up Surface for the MediaRecorder
// surfaces.add(previewSurface);
// surfaces.add(recorderSurface);
//
// mPreviewBuilder.addTarget(previewSurface);
// mPreviewBuilder.addTarget(recorderSurface);
// Start a capture session
// Once the session starts, we can update the UI and start recording
// mCameraDevice.createCaptureSession(surfaces, new CameraCaptureSession.StateCallback() {
//
// @Override
// public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) {
// mPreviewSession = cameraCaptureSession;
// if (null == mCameraDevice) {
// return;
// }
// try {
// mPreviewBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);;
// mPreviewSession.setRepeatingRequest(mPreviewBuilder.build(), null, mCameraHandler);
// } catch (CameraAccessException e) {
// e.printStackTrace();
// }
// mActivity.runOnUiThread(() -> {
// // Start recording
// mMediaRecorder.start();
// });
// }
//
// @Override
// public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) {
// if (null != mActivity) {
// Toast.makeText(mActivity, "Failed", Toast.LENGTH_SHORT).show();
// }
// }
// }, mCameraHandler);
//
// } catch (CameraAccessException | IOException e) {
// e.printStackTrace();
// }
}
public void stopRecordingVideo() {
if (mMediaRecorder != null){
try {
mMediaRecorder.stop();
}catch (IllegalStateException e){
mMediaRecorder = null;
mMediaRecorder = new MediaRecorder();
}
mMediaRecorder.release();
}
Log.d(TAG, "stopRecordingVideo: restartPreview---------------------------------");
restartPreview();
new MyTask(mActivity).execute();
}
@SuppressLint("StaticFieldLeak")
private class MyTask extends AsyncTask<Object, Integer, Bitmap> {
public final Uri uriImage = MediaStore.Images.Media.EXTERNAL_CONTENT_URI;
public final Uri uriVideo = MediaStore.Video.Media.EXTERNAL_CONTENT_URI;
public String[] projectionImage = {MediaStore.Images.Media.DATA};
public String[] projectionVideo = {MediaStore.Video.Media.DATA};
private Context mContext;
public MyTask(Context context) {
mContext = context;
}
@Override
protected Bitmap doInBackground(Object... objects) {
@SuppressLint("Recycle")
Cursor cursorImage = mContext.getContentResolver().query(uriImage, projectionImage,
null, null, MediaStore.Images.Media.DATE_ADDED + " DESC");
Cursor cursorVideo = mContext.getContentResolver().query(uriVideo, projectionVideo,
null, null, MediaStore.Video.Media.DATE_ADDED + " DESC");
Bitmap resultBitmap = FileUtil.compareImageAndVideoPath(cursorImage, cursorVideo);
return resultBitmap;
}
@Override
protected void onPostExecute(Bitmap bitmap) {
if (bitmap != null) {
Log.d(TAG, "onPostExecute: video中获取缩略图成功");
imageButton.setImageBitmap(bitmap);
}
}
}
// public void pauseRecord(){
// mMediaRecorder.pause();
// }
//
// public void reStartRecord(){
// mMediaRecorder.resume();
// }
private void setUpMediaRecorder() throws IOException {
final Activity activity = mActivity;
if (null == activity) {
return;
}
//mMediaRecorder.release();
mMediaRecorder = null;
mMediaRecorder = new MediaRecorder();
//mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
if (mNextVideoAbsolutePath == null || mNextVideoAbsolutePath.isEmpty()) {
mNextVideoAbsolutePath = FileUtil.getVideoPath();
Log.i(TAG, "setUpMediaRecorder: " + mNextVideoAbsolutePath);
}
mMediaRecorder.setOutputFile(mNextVideoAbsolutePath);
mMediaRecorder.setVideoEncodingBitRate(10000000);
mMediaRecorder.setVideoFrameRate(30);
mMediaRecorder.setVideoSize(1920, 1080);
mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
//mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
switch (mSensorOrientation) {
case SENSOR_ORIENTATION_DEFAULT_DEGREES:
mMediaRecorder.setOrientationHint(DEFAULT_ORIENTATIONS.get(rotation));
break;
case SENSOR_ORIENTATION_INVERSE_DEGREES:
mMediaRecorder.setOrientationHint(INVERSE_ORIENTATIONS.get(rotation));
break;
}
mMediaRecorder.prepare();
}
private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(@NonNull CameraDevice cameraDevice) {
mCameraDevice = cameraDevice;
startPreview();
mCameraOpenCloseLock.release();
if (null != mTextureView) {
configureTransform(mTextureView.getWidth(), mTextureView.getHeight());
}
}
@Override
public void onDisconnected(@NonNull CameraDevice cameraDevice) {
mCameraOpenCloseLock.release();
Log.d(TAG, "onDisconnected: " + cameraDevice);
cameraDevice.close();
mCameraDevice = null;
}
@Override
public void onError(@NonNull CameraDevice cameraDevice, int error) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
}
};
private void restartPreview(){
try {
setUpMediaRecorder();
SurfaceTexture texture = mTextureView.getSurfaceTexture();
Surface previewSurface = new Surface(texture);
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
mPreviewBuilder.addTarget(previewSurface);
//mPreviewBuilder.addTarget(mMediaRecorder.getSurface());
//
// mPreviewBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
mPreviewSession.setRepeatingRequest(mPreviewBuilder.build(), null, mCameraHandler);
} catch (CameraAccessException | IOException e) {
e.printStackTrace();
}
//
// Size cameraSize = getMatchingSize2();
// SurfaceTexture surfaceTexture = mTextureView.getSurfaceTexture();
// surfaceTexture.setDefaultBufferSize(cameraSize.getWidth(),cameraSize.getHeight());
// Surface previewSurface = new Surface(surfaceTexture);
// try {
// mPreviewCaptureRequest = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
// mPreviewCaptureRequest.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
// mPreviewCaptureRequest.addTarget(previewSurface);
// mCameraCaptureSession.setRepeatingRequest(mPreviewCaptureRequest.build(),mSessionCaptureCallback,mChildHandler);
// } catch (CameraAccessException e) {
// e.printStackTrace();
// }
}
private void startPreview() {
if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) {
Log.d(TAG, "startPreview: mCameraDevice mTextureView mPreviewSize 有空的: "
+ mCameraDevice + ":" + mTextureView + ":" + mPreviewSize);
return;
}
try {
setUpMediaRecorder();
//closePreviewSession();
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
Log.d(TAG, "createCaptureSession的CameraDevice : " + mCameraDevice);
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
Surface previewSurface = new Surface(texture);
Surface recordSurface = mMediaRecorder.getSurface();
mPreviewBuilder.addTarget(previewSurface);
mPreviewBuilder.addTarget(recordSurface);
List<Surface> surfaces = new ArrayList<>();
surfaces.add(previewSurface);
surfaces.add(recordSurface);
mCameraDevice.createCaptureSession(surfaces, //-----此处添加mediaRecorder的getSurface()
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession session) {
mPreviewSession = session;
if (null == mCameraDevice) {
Log.d(TAG, "startPreview:onConfigured mCameraDevice为空");
return;
}
try {
mPreviewBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
mPreviewSession.setRepeatingRequest(mPreviewBuilder.build(), null, mCameraHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession session) {
Log.d(TAG, "onConfigureFailed: 视频开启预览画面失败");
}
}, mCameraHandler);
} catch (CameraAccessException | IOException e) {
e.printStackTrace();
}
}
private void closePreviewSession() {
if (mPreviewSession != null) {
mPreviewSession.close();
mPreviewSession = null;
}
}
public void releaseCamera() {
try {
mCameraOpenCloseLock.acquire();
if (mPreviewSession != null) {
mPreviewSession.close();
mPreviewSession = null;
}
if (mCameraDevice != null) {
mCameraDevice.close();
mCameraDevice = null;
}
if (mMediaRecorder != null) {
mMediaRecorder.release();
Log.d(TAG, "releaseCamera: mMediaRecorder : " + mMediaRecorder);
}
} catch (InterruptedException e) {
e.printStackTrace();
}finally {
mCameraOpenCloseLock.release();
}
Log.d(TAG, "releaseCamera: mPreviewSession mCameraDevice mMediaRecorder : " + mPreviewSession + mCameraDevice + mMediaRecorder);
}
private final TextureView.SurfaceTextureListener mSurfaceTextureListener
= new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture,
int width, int height) {
Log.i(TAG, "onSurfaceTextureAvailable: ");
initCameraInfo(width, height);
Log.d(TAG, "onSurfaceTextureSizeChanged: restart 0");
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture,
int width, int height) {
configureTransform(width, height);
Log.d(TAG, "onSurfaceTextureSizeChanged: restart 1");
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
Log.d(TAG, "onSurfaceTextureSizeChanged: restart 2");
return true;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {
//Log.d(TAG, "onSurfaceTextureSizeChanged: restart 3");
}
};
@SuppressLint("ShowToast")
private void initCameraInfo(int width, int height) {
try{
mCameraManager = (CameraManager) mActivity.getSystemService(Context.CAMERA_SERVICE);
String[] cameraIdList = mCameraManager.getCameraIdList();
if (cameraIdList == null) {
Toast.makeText(mActivity, "没有相机可用", Toast.LENGTH_SHORT);
return;
}
String cameraId = mCameraManager.getCameraIdList()[0];
CameraCharacteristics characteristics = mCameraManager.getCameraCharacteristics(cameraId);
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
//mVideoSize = chooseVideoSize(map.getOutputSizes(MediaRecorder.class));
mVideoSize = new Size(1920, 1080);
Log.d(TAG, "initCameraInfo: mVideoSize : " + mVideoSize.getWidth() + " " + mVideoSize.getHeight());
mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), 1920, 1080, mVideoSize);
if (ActivityCompat.checkSelfPermission(mActivity, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
// TODO: Consider calling
// ActivityCompat#requestPermissions
// here to request the missing permissions, and then overriding
// public void onRequestPermissionsResult(int requestCode, String[] permissions,
// int[] grantResults)
// to handle the case where the user grants the permission. See the documentation
// for ActivityCompat#requestPermissions for more details.
return;
}
openCamera(width, height);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void openCamera(int width, int height) {
if (ActivityCompat.checkSelfPermission(mActivity, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
// TODO: Consider calling
// ActivityCompat#requestPermissions
// here to request the missing permissions, and then overriding
// public void onRequestPermissionsResult(int requestCode, String[] permissions,
// int[] grantResults)
// to handle the case where the user grants the permission. See the documentation
// for ActivityCompat#requestPermissions for more details.
Log.d(TAG, "initCameraInfo: mTextureView-----------没有相机权限");
List<String> permissionList = new ArrayList<>();
permissionList.add(Manifest.permission.READ_EXTERNAL_STORAGE);
permissionList.add(Manifest.permission.CAMERA);
permissionList.add(Manifest.permission.MODIFY_AUDIO_SETTINGS);
ActivityCompat.requestPermissions(mActivity, permissionList.toArray(new String[permissionList.size()]),1002);
}else {
try {
String cameraId = mCameraManager.getCameraIdList()[0];
Log.d(TAG, "initCameraInfo: mTextureView-----------6准备打开相机");
mCameraManager.openCamera(cameraId, mStateCallback, mCameraHandler);
Log.d(TAG, "openCamera: 打开相机");
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
}
private void configureTransform(int viewWidth, int viewHeight) {
Activity activity = mActivity;
if (null == mTextureView || null == mPreviewSize || null == activity) {
return;
}
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
Matrix matrix = new Matrix();
RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());
float centerX = viewRect.centerX();
float centerY = viewRect.centerY();
if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scale = Math.max(
(float) viewHeight / mPreviewSize.getHeight(),
(float) viewWidth / mPreviewSize.getWidth());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(90 * (rotation - 2), centerX, centerY);
}
mTextureView.setTransform(matrix);
}
private static Size chooseVideoSize(Size[] choices) {
for (Size size : choices) {
if (size.getWidth() == size.getHeight() * 4 / 3 && size.getWidth() <= 1080) {
Log.i(TAG, "chooseVideoSize: " + size.toString());
return size;
}
}
Log.e(TAG, "Couldn't find any suitable video size");
return choices[choices.length - 1];
}
/**
* Given {@code choices} of {@code Size}s supported by a camera, chooses the smallest one whose
* width and height are at least as large as the respective requested values, and whose aspect
* ratio matches with the specified value.
*
* @param choices The list of sizes that the camera supports for the intended output class
* @param width The minimum desired width
* @param height The minimum desired height
* @param aspectRatio The aspect ratio
* @return The optimal {@code Size}, or an arbitrary one if none were big enough
*/
private Size chooseOptimalSize(Size[] choices, int width, int height, Size aspectRatio) {
// Collect the supported resolutions that are at least as big as the preview Surface
List<Size> bigEnough = new ArrayList<>();
int w = aspectRatio.getWidth();
int h = aspectRatio.getHeight();
for (Size option : choices) {
if (option.getHeight() == option.getWidth() * h / w &&
option.getWidth() >= width && option.getHeight() >= height) {
bigEnough.add(option);
}
}
// Pick the smallest of those, assuming we found any
if (bigEnough.size() > 0) {
return Collections.min(bigEnough, new CompareSizeByArea());
} else {
Log.e(TAG, "Couldn't find any suitable preview size");
return choices[0];
}
}
/**
*
* 根据提供的参数值返回与指定宽高相等或最接近的尺寸
*
* @param targetWidth 目标宽度
* @param targetHeight 目标高度
* @param maxWidth 最大宽度(即TextureView的宽度)
* @param maxHeight 最大高度(即TextureView的高度)
* @param sizeList 支持的Size列表
*
* @return 返回与指定宽高相等或最接近的尺寸
*
*/
private Size getBestSize(int targetWidth, int targetHeight, int maxWidth, int maxHeight, List<Size> sizeList){
List<Size> bigEnough = new ArrayList<>(); //比指定宽高大的Size列表
List<Size> notBigEnough = new ArrayList<>(); //比指定宽高小的Size列表
//宽<=最大宽度 && 高<=最大高度 && 宽高比 == 目标值宽高比
for (Size size : sizeList) {
if (size.getWidth() <= maxWidth && size.getHeight() <= maxHeight
&& size.getWidth() == size.getHeight() * targetWidth / targetHeight){
if (size.getWidth() >= targetWidth && size.getHeight() >= targetHeight){
bigEnough.add(size);
}else {
notBigEnough.add(size);
}
}
//Log.d(TAG, "系统支持的尺寸: " + size.getWidth() * size.getHeight() + "比例 :" + size.getWidth() / size.getHeight());
}
Log.i(TAG, "最大尺寸: " + maxWidth * maxHeight + "比例 :" + targetWidth / targetHeight);
Log.i(TAG, "目标尺寸: " + targetWidth * targetHeight + "比例 :" + targetWidth / targetHeight);
//选择bigEnough中最小的值 或 notBigEnough中最大的值
if (bigEnough.size() > 0){
return Collections.min(bigEnough, new CompareSizeByArea());
}else if (notBigEnough.size() > 0){
return Collections.max(notBigEnough, new CompareSizeByArea());
}
return sizeList.get(0);
}
private static class CompareSizeByArea implements Comparator<Size> {
@Override
public int compare(Size size1, Size size2) {
return (int) Math.signum( size1.getWidth() * size1.getHeight() - size2.getWidth() * size2.getHeight());
}
}
}
gh中最小的值 或 notBigEnough中最大的值
if (bigEnough.size() > 0){
return Collections.min(bigEnough, new CompareSizeByArea());
}else if (notBigEnough.size() > 0){
return Collections.max(notBigEnough, new CompareSizeByArea());
}
return sizeList.get(0);
}
private static class CompareSizeByArea implements Comparator<Size> {
@Override
public int compare(Size size1, Size size2) {
return (int) Math.signum( size1.getWidth() * size1.getHeight() - size2.getWidth() * size2.getHeight());
}
}
}