对录像这块,没怎么研究视频尺寸,这也是我没有用MediaRecorde的原因,我估计用MediaRecorde报错是尺寸不匹配
录像和录音差不多
VideoEncoder
public class VideoEncoder { //视频参数 private static final String VIDEO_MIME_TYPE = "video/avc";//视频类型 private static final int BIT_RATE = 128000; //比特率 private static final int FRAME_RATE = 25; //帧率 private static final int FI_FRAME_INTERVAL = 5; //关键帧时间 private static final int MAX_WIDTH = 1280; private static final int MAX_HEIGHT = 1280; private int videoWidth = 1280; private int videoHeight = 720; private MediaMuxer mediaMuxer; private MediaCodec videoMediaCodec; private Surface surface; //应该是按2的倍数吧,没验证过,反正直接用相机的尺寸会报错,可能尺寸太大 public void setVideoSize(int width,int height){ int w = width; int h = height; if(w > h){ while (w > MAX_WIDTH){ w = w/2; h = h/2; } }else{ while (h > MAX_HEIGHT){ w = w/2; h = h/2; } } videoWidth = w; videoHeight = h; } //初始化,和录音差不多就MediaFormat初始化参数不同和获取Surface public void initVideo(){ File f = new File("/storage/emulated/0/360/"+System.currentTimeMillis()+".mp4"); try { mediaMuxer = new MediaMuxer(f.getPath(),MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); } catch (Exception e){ e.printStackTrace(); } try { videoMediaCodec = MediaCodec.createEncoderByType(VIDEO_MIME_TYPE); } catch (IOException e) { e.printStackTrace(); } MediaFormat mediaFormat = MediaFormat.createVideoFormat(VIDEO_MIME_TYPE, videoWidth, videoHeight); mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE); mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE); mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, FI_FRAME_INTERVAL); videoMediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); surface = videoMediaCodec.createInputSurface(); videoMediaCodec.start(); } public Surface getSurface() { return surface; } public int getVideoWidth(){ return videoWidth; } public int getVideoHeight(){ return videoHeight; } public void start(){ Thread thread = new Thread(){ @Override public void run() { super.run(); videoEncoder(); } }; thread.start(); } private boolean stop = true; public void stop(){ stop = false; } //继续死循环 public void videoEncoder() { MediaCodec.BufferInfo mBufferInfo = new MediaCodec.BufferInfo(); int mTrackIndex = -1; while (stop) { //等待数据返回,阻塞线程,时间1000毫秒 int encoderStatus = videoMediaCodec.dequeueOutputBuffer(mBufferInfo, 1000); if (encoderStatus >= 0) { ByteBuffer encodedData = videoMediaCodec.getOutputBuffer(encoderStatus); if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { mBufferInfo.size = 0; } //有数据是写入 if (mBufferInfo.size != 0) { encodedData.position(mBufferInfo.offset); encodedData.limit(mBufferInfo.offset + mBufferInfo.size); writeData(mTrackIndex, encodedData, mBufferInfo); } videoMediaCodec.releaseOutputBuffer(encoderStatus, false); if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { break; // out of while } }else if(encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER){ //负数,听说是一段数据写入完成传入INFO_TRY_AGAIN_LATER,会进入这里,用Surface不知道会不会进入,没验证过 }else if(encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED){ MediaFormat mediaFormat = videoMediaCodec.getOutputFormat(); mTrackIndex = mediaMuxer.addTrack(mediaFormat); mediaMuxer.start(); }else{ //不是正数都跳过 } } //使用Surface需要调用,通知MediaCodec停止接收数据,直接数据操作时不需要调用 videoMediaCodec.signalEndOfInputStream(); release(); } //防止多线程时同时调用 public synchronized void writeData(int mTrackIndex,ByteBuffer encodedData,MediaCodec.BufferInfo mBufferInfo){ mediaMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo); } //释放 public void release() { if(surface != null){ surface.release(); surface = null; } if (videoMediaCodec != null) { videoMediaCodec.stop(); videoMediaCodec.release(); videoMediaCodec = null; } if(mediaMuxer != null){ mediaMuxer.release(); mediaMuxer = null; } } }
VideoSurfaceView
public class VideoSurfaceView extends SurfaceView{ private EGLUtils mEglUtils; private GLFramebuffer mFramebuffer; private GLRenderer mRenderer; private SurfaceTexture mSurfaceTexture; private Surface mSurface; private final Object mObject = new Object(); private String mCameraId; private CameraManager mCameraManager; private CameraCaptureSession mCameraCaptureSession; private CameraDevice mCameraDevice; private Handler mHandler; private VideoEncoder mVideoEncoder; private final Object vObject = new Object(); private int screenWidth, screenHeight; private int previewWidth; private int previewHeight; public VideoSurfaceView(Context context) { super(context); init(context); } public VideoSurfaceView(Context context, AttributeSet attributeSet) { super(context,attributeSet); init(context); } private void init(Context context){ mFramebuffer = new GLFramebuffer(context); mRenderer = new GLRenderer(context); initCamera2(); getHolder().addCallback(new SurfaceHolder.Callback() { @Override public void surfaceCreated(SurfaceHolder surfaceHolder) { } @Override public void surfaceChanged(SurfaceHolder surfaceHolder, int i, int i1, int i2) { screenWidth = i1; screenHeight = i2; //都是前面的代码 Thread thread = new Thread(){ @Override public void run() { super.run(); mEglUtils = new EGLUtils(); mEglUtils.initEGL(EGL14.EGL_NO_CONTEXT,getHolder().getSurface()); mRenderer.initShader(); Size mPreviewSize = getPreferredPreviewSize(mSizes, screenWidth, screenHeight); previewWidth = mPreviewSize.getHeight(); previewHeight = mPreviewSize.getWidth(); int left,top,viewWidth,viewHeight; if(previewHeight > previewWidth){ left = 0; viewWidth = screenWidth; viewHeight = (int)(previewHeight*1.0f/previewWidth*viewWidth); top = (screenHeight - viewHeight)/2; }else{ top = 0; viewHeight = screenHeight; viewWidth = (int)(previewWidth*1.0f/previewHeight*viewHeight); left = (screenWidth - viewWidth)/2; } Rect rect = new Rect(); rect.left = left; rect.top = top; rect.right = left + viewWidth; rect.bottom = top + viewHeight; mFramebuffer.initFramebuffer(previewWidth,previewHeight); mSurfaceTexture = mFramebuffer.getSurfaceTexture(); mSurfaceTexture.setDefaultBufferSize(previewWidth, previewHeight); mSurfaceTexture.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() { @Override public void onFrameAvailable(SurfaceTexture surfaceTexture) { synchronized (mObject) { mObject.notifyAll(); } } }); openCamera2(); while (true){ synchronized (mObject) { try { mObject.wait(); } catch (InterruptedException e) { e.printStackTrace(); } } if(mSurfaceTexture == null){ break; } mFramebuffer.drawFrameBuffer(previewWidth,previewHeight); //控制录像线程,和双屏播放一样,有主渲染线程控制子渲染线程 if(mVideoEncoder != null){ synchronized (vObject){ vObject.notifyAll(); } } GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT); GLES20.glViewport(rect.left,rect.top,rect.width(),rect.height()); mFramebuffer.drawFrame(); mRenderer.drawFrame(); mEglUtils.swap(); } mEglUtils.release(); } }; thread.start(); } @Override public void surfaceDestroyed(SurfaceHolder surfaceHolder) { if(mCameraCaptureSession != null){ mCameraCaptureSession.getDevice().close(); mCameraCaptureSession.close(); mCameraCaptureSession = null; } if(mSurface != null){ mSurface.release(); mSurface = null; } if(mSurfaceTexture != null){ mSurfaceTexture.release(); mSurfaceTexture = null; synchronized (mObject) { mObject.notifyAll(); } } } }); } private boolean stop = false; public void startVideo(){ if(mVideoEncoder != null){ return; } mVideoEncoder = new VideoEncoder(); mVideoEncoder.setVideoSize(previewWidth,previewHeight); mVideoEncoder.initVideo(); stop = false; Thread thread = new Thread(){ @Override public void run() { super.run(); GLRenderer renderer = new GLRenderer(getContext()); EGLUtils eglUtils = new EGLUtils(); //把MediaCodec生成的Surface给EGL做渲染用 eglUtils.initEGL(mEglUtils.getContext(),mVideoEncoder.getSurface()); renderer.initShader(); //继续死循环 while (true){ synchronized (vObject) { try { vObject.wait(); } catch (InterruptedException e) { e.printStackTrace(); } } if(mSurfaceTexture == null){ break; } if(stop){ break; } GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT); GLES20.glViewport(0,0,mVideoEncoder.getVideoWidth(),mVideoEncoder.getVideoHeight()); mFramebuffer.drawFrame(); renderer.drawFrame(); //渲染,这样MediaCodec.dequeueOutputBuffer就有数据了 eglUtils.swap(); } eglUtils.release(); } }; thread.start(); mVideoEncoder.start(); } public void stopVideo(){ mVideoEncoder.stop(); stop = true; mVideoEncoder = null; } //相机操作 private Size[] mSizes; private void initCamera2() { HandlerThread handlerThread = new HandlerThread("Camera2"); handlerThread.start(); mHandler = new Handler(handlerThread.getLooper()); mCameraManager = (CameraManager) getContext().getSystemService(Context.CAMERA_SERVICE); try { String[] CameraIdList = mCameraManager.getCameraIdList(); mCameraId = CameraIdList[0]; CameraCharacteristics characteristics = mCameraManager.getCameraCharacteristics(mCameraId); characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL); StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); if(map != null){ mSizes = map.getOutputSizes(SurfaceTexture.class); } } catch (CameraAccessException e) { e.printStackTrace(); } } private void openCamera2(){ if (PermissionChecker.checkSelfPermission(getContext(), Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED) { try { mCameraManager.openCamera(mCameraId, stateCallback, mHandler); } catch (CameraAccessException e) { e.printStackTrace(); } } } private CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() { @Override public void onOpened(@NonNull CameraDevice cameraDevice) { mCameraDevice = cameraDevice; takePreview(); } @Override public void onDisconnected(@NonNull CameraDevice cameraDevice) { if (mCameraDevice != null) { mCameraDevice.close(); mCameraDevice = null; } } @Override public void onError(@NonNull CameraDevice cameraDevice, int i) { } }; private void takePreview() { try { mSurface = new Surface(mSurfaceTexture); final CaptureRequest.Builder builder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); builder.addTarget(mSurface); mCameraDevice.createCaptureSession(Arrays.asList(mSurface), new CameraCaptureSession.StateCallback() { @Override public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) { if (null == mCameraDevice) return; mCameraCaptureSession = cameraCaptureSession; builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH); CaptureRequest previewRequest = builder.build(); try { mCameraCaptureSession.setRepeatingRequest(previewRequest, null, mHandler); } catch (CameraAccessException e) { e.printStackTrace(); } } @Override public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) { } }, mHandler); } catch (CameraAccessException e) { e.printStackTrace(); } } private Size getPreferredPreviewSize(Size[] sizes, int width, int height) { List<Size> collectorSizes = new ArrayList<>(); for (Size option : sizes) { if (width > height) { if (option.getWidth() > width && option.getHeight() > height) { collectorSizes.add(option); } } else { if (option.getHeight() > width && option.getWidth() > height) { collectorSizes.add(option); } } } if (collectorSizes.size() > 0) { return Collections.min(collectorSizes, new Comparator<Size>() { @Override public int compare(Size s1, Size s2) { return Long.signum(s1.getWidth() * s1.getHeight() - s2.getWidth() * s2.getHeight()); } }); } return sizes[0]; } }