Android从5.0开始(API 21)推出了全新的摄像头访问接口:Camera2,相较与Camera1,使用复杂度提高了,但是同时相应的拍照和录像性能也更好了。理论上说,从5.0往后,Camera1仍然能用,但说实话既然推出了新的API,肯定会有适配性方面的问题。
本人的亲身经历是——开始时做MTK 4.4平台的智能设备开发,产品的视频通话(视频通话用过linphone和webrtc的框架,由于没有深入框架内部(引擎逻辑),整体感觉免费的就是会有这样和那样的问题)、拍照、录像等都是采用了Camera1实现,后来因项目需要公司转做MTK 5.1平台,由于Camera1仍然能用,就懒得去重新实现,再后来加入了CVBS摄像头的需求,突然发现使用Camera1无法录像了,才不得不转到Camera2的接口实现。因此,建议遇到跟我一样境况的小伙伴们,要舍得花一点学习成本,尽快转到Camera2。
第一部分,回顾Camera1
避免遗忘,先回顾一下Camera1的实现流程吧。
选择是否有预览界面,看起来好像有预览界面的代码更简洁:
private int[] cameraGlTextures;
private SurfaceTexture mSurfaceTexture;
if (mHasUi) {
logd("show UI");
mTextureView = new TextureView(context);
LayoutParams layoutParams = new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT);
mTextureView.setLayoutParams(layoutParams);
mTextureView.setSurfaceTextureListener(new MySurfaceTextureListener());
group.addView(mTextureView);
} else {
if (null == mSurfaceTexture) {
cameraGlTextures = new int[1];
// Generate one texture pointer and bind it as an external texture.
GLES20.glGenTextures(1, cameraGlTextures, 0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, cameraGlTextures[0]);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
mSurfaceTexture = new SurfaceTexture(cameraGlTextures[0], true);
mSurfaceTexture.setOnFrameAvailableListener(null);
}
mWorker.sendEmptyMessageDelayed(TASK_INIT_CAMERA, 100);//开始初始化
}
有预览界面的,重点在于 MySurfaceTextureListener 这个回调对象:
private final class MySurfaceTextureListener implements SurfaceTextureListener {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
logd("onSurfaceTextureAvailable");
mSurfaceTexture = surface;//这里获得SurfaceTexture对象
if (mAlwaysOpen) {
mWorker.sendEmptyMessage(TASK_INIT_CAMERA);//开始初始化
}
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
stopPreview();
return true;
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
}
看看摄像头初始化做了些什么吧:
case TASK_INIT_CAMERA:
logd("TASK_INIT_CAMERA");
if (mWorker.hasMessages(TASK_INIT_CAMERA)) {
mWorker.removeMessages(TASK_INIT_CAMERA);
}
if (null == mCamera) {
initCamera();
}
if (null != mCamera && !mPreview) {
startPreview();
}
break;
//initCamera()
/**
* 获得camera的句柄,将mSurfaceTexture设置为camera的画布
*/
private void initCamera() {
logd("initCamera");
if (null == mCamera) {
try {
mCamera = Camera.open(CameraInfo.CAMERA_FACING_BACK);
//open方法获得Camera的对象引用,这里选择前、后摄像头
logd("CAMERA_FACING_BACK");
} catch (Exception e) {
logd("open camera failed:"+e.toString());
}
if (null == mCamera) {
try {
mCamera = Camera.open(CameraInfo.CAMERA_FACING_FRONT);
logd("CAMERA_FACING_FRONT");
} catch (Exception e) {
logd("open camera failed:" + e.toString());
}
}
if (null == mCamera) {
logd("error no camera");
openCmareaError("connect camera error");
return;
} else {
logd("open camera succeed:" + mCamera.toString());
}
mCamera.setErrorCallback(mErrorCallback);//设置错误回调
}
mFramIndex = 0;
mError = 0;
//打开摄像头成功
}
//打开错误时的一些处理
private void openCmareaError(String seasion) {
logd("openCmareaError:" + seasion);
try {
if (null != mCompater)
mCompater.setIgnore(seasion);
if (null != mListener)
mListener.openCamreaError(mCompater);
if (null != mPreviewListener)
mPreviewListener.openCamreaError();
mError++;
if (mAlwaysOpen) {