接上一篇SnapdragonCamera源码分析(二)OpenCamera流程继续分析,当Camera Device被成功open后,开始创建Session会话。
createSessions():
private void createSessions() {
if(PersistUtil.isTraceEnable())
Trace.beginSection("createSessions");
if (mPaused || !mCamerasOpened || mTempHoldVideoInVideoIntent) return;
final int cameraId = mCurrentSceneMode.getCurrentId();
Log.d(TAG,"createSessions : Current SceneMode is "+mCurrentSceneMode.mode);
switch (mCurrentSceneMode.mode) {
case VIDEO:
createSessionForVideo(cameraId);
break;
case HFR:
if (!HFR_RATE.equals("")) {
mSettingsManager.setValue(SettingsManager.KEY_VIDEO_HIGH_FRAME_RATE, HFR_RATE);
}
createSessionForVideo(cameraId);
break;
default:
createSession(cameraId);
}
if (PersistUtil.isTraceEnable())
Trace.endSection();
}
1、判断CameraActivity是否处于onPause()、Camera是否成功打开、是否是录像等;
2、根据当前mCurrentSceneMode获取当前需要打开的CameraID和创建Seesion的方式;根据前两篇分析此时mCurrentSceneMode应为“Photo”模式的,故调用createSession()方法。
createSession()
private void createSession(final int id) {
Log.d(TAG, "createSession,id: " + id + ",mPaused:" + mPaused + ",mCameraOpened:" + !mCameraOpened[id] + ",mCameraDevice:" + (mCameraDevice[id] == null));
if (mPaused || !mCameraOpened[id] || (mCameraDevice[id] == null)) return;
List<Surface> list = new LinkedList<Surface>();
mState[id] = STATE_PREVIEW;
mControlAFMode = CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE;
try {
// We set up a CaptureRequest.Builder with the output Surface.
mPreviewRequestBuilder[id] = getRequestBuilder(id);
mPreviewRequestBuilder[id].setTag(id);
CameraCaptureSession.StateCallback captureSessionCallback =
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(CameraCaptureSession cameraCaptureSession) {
if (mPaused || null == mCameraDevice[id] ||
cameraCaptureSession == null) {
return;
}
Log.i(TAG, "cameracapturesession - onConfigured " + id);
setCameraModeSwitcherAllowed(true);
// When the session is ready, we start displaying the preview.
mCaptureSession[id] = cameraCaptureSession;
if (id == getMainCameraId()) {
mCurrentSession = cameraCaptureSession;
}
initializePreviewConfiguration(id);
setDisplayOrientation();
updateFaceDetection();
try {
if (isBackCamera() && getCameraMode() == DUAL_MODE) {
linkBayerMono(id);
mIsLinked = true;
}
// Finally, we start displaying the camera preview.
// for cases where we are in dual mode with mono preview off,
// don't set repeating request for mono
if (mCaptureSession[id] == null) {
return;
}
if (id == MONO_ID && !canStartMonoPreview()
&& getCameraMode() == DUAL_MODE) {
mCaptureSession[id].capture(mPreviewRequestBuilder[id]
.build(), mCaptureCallback, mCameraHandler);
} else {
mCaptureSession[id].setRepeatingRequest(mPreviewRequestBuilder[id]
.build(), mCaptureCallback, mCameraHandler);
}
if (mIntentMode == INTENT_MODE_STILL_IMAGE_CAMERA &&
mIsVoiceTakePhote) {
mHandler.sendEmptyMessageDelayed(VOICE_INTERACTION_CAPTURE, 500);
}
if (isClearSightOn()) {
ClearSightImageProcessor.getInstance().onCaptureSessionConfigured(id == BAYER_ID, cameraCaptureSession);
} else if (mChosenImageFormat == ImageFormat.PRIVATE && id == getMainCameraId()) {
mPostProcessor.onSessionConfigured(mCameraDevice[id], mCaptureSession[id]);
}
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (IllegalStateException e) {
e.printStackTrace();
} catch (IllegalArgumentException e) {
e.printStackTrace();
}
mCurrentSessionClosed = false;
}
@Override
public void onConfigureFailed(CameraCaptureSession cameraCaptureSession) {
Log.e(TAG, "cameracapturesession - onConfigureFailed " + id);
setCameraModeSwitcherAllowed(true);
if (mActivity.isFinishing()) {
return;
}
Toast.makeText(mActivity, "Camera Initialization Failed",
Toast.LENGTH_SHORT).show();
}
@Override
public void onClosed(CameraCaptureSession session) {
Log.d(TAG, "cameracapturesession - onClosed");
setCameraModeSwitcherAllowed(true);
}
};
Surface surface = null;
try {
waitForPreviewSurfaceReady();
} catch (RuntimeException e) {
Log.v(TAG,
"createSession: normal status occur Time out waiting for surface ");
}
surface = getPreviewSurfaceForSession(id);
if (id == getMainCameraId()) {
mFrameProcessor.setOutputSurface(surface);
mFrameProcessor.setVideoOutputSurface(null);
}
if (isClearSightOn()) {
if (surface != null) {
mPreviewRequestBuilder[id].addTarget(surface);
list.add(surface);
}
ClearSightImageProcessor.getInstance().createCaptureSession(
id == BAYER_ID, mCameraDevice[id], list, captureSessionCallback);
} else if (id == getMainCameraId()) {
if (mFrameProcessor.isFrameFilterEnabled() && !mDeepPortraitMode) {
mActivity.runOnUiThread(new Runnable() {
public void run() {
SurfaceHolder surfaceHolder = mUI.getSurfaceHolder();
if (surfaceHolder != null) {
surfaceHolder.setFixedSize(
mPreviewSize.getHeight(), mPreviewSize.getWidth());
}
}
});
}
List<Surface> surfaces = mFrameProcessor.getInputSurfaces();
for (Surface surs : surfaces) {
mPreviewRequestBuilder[id].addTarget(surs);
list.add(surs);
}
if (!mSettingsManager.isHeifWriterEncoding() && !mYUVCallback) {
list.add(mImageReader[id].getSurface());
}
if (mSaveRaw) {
list.add(mRawImageReader[id].getSurface());
}
if (mYUVCallback) {
Log.d(TAG, "yuv=" + mYUVCount);
for (int i = 0; i < mYUVCount; i++) {
list.add(mYUVImageReader[i].getSurface());
}
}
mPreviewRequestBuilder[id].addTarget(surface);
List<OutputConfiguration> outputConfigurations = null;
outputConfigurations = new ArrayList<OutputConfiguration>();
for (Surface s : list) {
outputConfigurations.add(new OutputConfiguration(s));
}
if (mSettingsManager.isHeifWriterEncoding()) {
if (mInitHeifWriter != null) {
mHeifOutput = new OutputConfiguration(mInitHeifWriter.getInputSurface());
mHeifOutput.enableSurfaceSharing();
outputConfigurations.add(mHeifOutput);
}
}
if (mChosenImageFormat == ImageFormat.YUV_420_888 || mChosenImageFormat == ImageFormat.PRIVATE) {
if (mPostProcessor.isZSLEnabled()) {
mPreviewRequestBuilder[id].addTarget(mImageReader[id].getSurface());
list.add(mPostProcessor.getZSLReprocessImageReader().getSurface());
if (mSaveRaw) {
mPreviewRequestBuilder[id].addTarget(mRawImageReader[id].getSurface());
}
mCameraDevice[id].createReprocessableCaptureSession(new InputConfiguration(mImageReader[id].getWidth(),
mImageReader[id].getHeight(), mImageReader[id].getImageFormat()), list, captureSessionCallback, mCameraHandler);
} else {
if (mSettingsManager.isHeifWriterEncoding() && outputConfigurations != null) {
mCameraDevice[id].createCaptureSessionByOutputConfigurations(outputConfigurations,
captureSessionCallback, mCameraHandler);
} else {
mCameraDevice[id].createCaptureSession(list, captureSessionCallback, mCameraHandler);
}
}
} else {
if (ApiHelper.isAndroidPOrHigher() && outputConfigurations != null) {
Log.i(TAG, "list size:" + list.size());
createCameraSessionWithSessionConfiguration(id, outputConfigurations,
captureSessionCallback, mCameraHandler, mPreviewRequestBuilder[id]);
} else {
mCameraDevice[id].createCaptureSession(list, captureSessionCallback, mCameraHandler);
}
}
} else {
if (surface != null) {
mPreviewRequestBuilder[id].addTarget(surface);
list.add(surface);
}
list.add(mImageReader[id].getSurface());
// Here, we create a CameraCaptureSession for camera preview.
mCameraDevice[id].createCaptureSession(list, captureSessionCallback, mCameraHandler);
}
} catch (CameraAccessException | NullPointerException e) {
Log.d(TAG, "create session error");
setCameraModeSwitcherAllowed(true);
e.printStackTrace();
} catch (IllegalStateException e) {
setCameraModeSwitcherAllowed(true);
Log.v(TAG, "createSession: mPaused status occur Time out waiting for surface ");
} catch (IllegalArgumentException e) {
setCameraModeSwitcherAllowed(true);
e.printStackTrace();
}
}
1、首先判断CameraActivity当前状态是否处于onPause()、当前Camera Device是否成功打开:
Log.d(TAG, "createSession,id: " + id + ",mPaused:" + mPaused + ",mCameraOpened:" + !mCameraOpened[id] + ",mCameraDevice:" + (mCameraDevice[id] == null));
if (mPaused || !mCameraOpened[id] || (mCameraDevice[id] == null)) return;
2、创建流集合、初始化状态为预览状态、设置AF模式为CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE;
3、创建CaptureRequest.Builder,并设置tag为Camera Device ID:
// We set up a CaptureRequest.Builder with the output Surface.
mPreviewRequestBuilder[id] = getRequestBuilder(id);
mPreviewRequestBuilder[id].setTag(id);
private CaptureRequest.Builder getRequestBuilder(int id) throws CameraAccessException {
CaptureRequest.Builder builder;
if (mPostProcessor.isZSLEnabled() && id == getMainCameraId()) {
builder = mCameraDevice[id].createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
} else {
builder = mCameraDevice[id].createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
}
if (builder != null) {
applySessionParameters(builder);
}
return builder;
}
调用CameraDevice标准接口创建类型为预览类型的CaptureRequest.Builder,并开始设置相关参数设置:
private void applySessionParameters(CaptureRequest.Builder builder) {
applyEarlyPCR(builder);
applyMctf(builder);
}
设置Tag
"org.codeaurora.qcamera3.sessionParameters.numPCRsBeforeStreamOn"为1:
private void applyEarlyPCR(CaptureRequest.Builder request) {
try {
request.set(CaptureModule.earlyPCR, 1);
} catch (IllegalArgumentException e) {
}
}
如果底层支持"org.codeaurora.qcamera3.sessionParameters.enableMCTFwithReferenceFrame"这个Tag,且当前处于Video或者HFR等mode,则设置这个Tag:
private void applyMctf(CaptureRequest.Builder builder) {
//add for mctf tag
if (mSettingsManager.isSwMctfSupported() && (mCurrentSceneMode.mode == CameraMode.VIDEO || mCurrentSceneMode.mode == CameraMode.HFR)) {
int mctfVaule = PersistUtil.mctfValue();
try {
builder.set(CaptureModule.mctf, (byte) (mctfVaule == 1 ? 0x01 : 0x00));
} catch (IllegalArgumentException e) {
Log.d(TAG, "mctf no vendor tag");
}
}
}
4、声明Session创建状态回调CameraCaptureSession.StateCallback:
CameraCaptureSession.StateCallback captureSessionCallback =
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(CameraCaptureSession cameraCaptureSession) {
if (mPaused || null == mCameraDevice[id] ||
cameraCaptureSession == null) {
return;
}
Log.i(TAG, "cameracapturesession - onConfigured " + id);
setCameraModeSwitcherAllowed(true);
// When the session is ready, we start displaying the preview.
mCaptureSession[id] = cameraCaptureSession;
if (id == getMainCameraId()) {
mCurrentSession = cameraCaptureSession;
}
initializePreviewConfiguration(id);
setDisplayOrientation();
updateFaceDetection();
try {
if (isBackCamera() && getCameraMode() == DUAL_MODE) {
linkBayerMono(id);
mIsLinked = true;
}
// Finally, we start displaying the camera preview.
// for cases where we are in dual mode with mono preview off,
// don't set repeating request for mono
if (mCaptureSession[id] == null) {
return;
}
if (id == MONO_ID && !canStartMonoPreview()
&& getCameraMode() == DUAL_MODE) {
mCaptureSession[id].capture(mPreviewRequestBuilder[id]
.build(), mCaptureCallback, mCameraHandler);
} else {
mCaptureSession[id].setRepeatingRequest(mPreviewRequestBuilder[id]
.build(), mCaptureCallback, mCameraHandler);
}
if (mIntentMode == INTENT_MODE_STILL_IMAGE_CAMERA &&
mIsVoiceTakePhote) {
mHandler.sendEmptyMessageDelayed(VOICE_INTERACTION_CAPTURE, 500);
}
if (isClearSightOn()) {
ClearSightImageProcessor.getInstance().onCaptureSessionConfigured(id == BAYER_ID, cameraCaptureSession);
} else if (mChosenImageFormat == ImageFormat.PRIVATE && id == getMainCameraId()) {
mPostProcessor.onSessionConfigured(mCameraDevice[id], mCaptureSession[id]);
}
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (IllegalStateException e) {
e.printStackTrace();
} catch (IllegalArgumentException e) {
e.printStackTrace();
}
mCurrentSessionClosed = false;
}
@Override
public void onConfigureFailed(CameraCaptureSession cameraCaptureSession) {
Log.e(TAG, "cameracapturesession - onConfigureFailed " + id);
setCameraModeSwitcherAllowed(true);
if (mActivity.isFinishing()) {
return;
}
Toast.makeText(mActivity, "Camera Initialization Failed",
Toast.LENGTH_SHORT).show();
}
@Override
public void onClosed(CameraCaptureSession session) {
Log.d(TAG, "cameracapturesession - onClosed");
setCameraModeSwitcherAllowed(true);
}
};
1、>如果Session会话按照当前配置创建成功时则回调onConfigured(),参数即为所成功创建的Session会话实例;
2、>如果Session会话无法按照相应的配置创建失败时则回调onConfigureFailed();
3、>如果当前Session会话关闭时则回调onClosed();
5、判断当前时刻CaptureUI的预览surface view是否创建成功并可用,获取此预览surface:
Surface surface = null;
try {
waitForPreviewSurfaceReady();
} catch (RuntimeException e) {
Log.v(TAG,
"createSession: normal status occur Time out waiting for surface ");
}
surface = getPreviewSurfaceForSession(id);
public Surface getPreviewSurfaceForSession(int id) {
if (isBackCamera()) {
if (getCameraMode() == DUAL_MODE && id == MONO_ID) {
return mUI.getMonoDummySurface();
} else {
return mUI.getSurfaceHolder().getSurface();
}
} else {
return mUI.getSurfaceHolder().getSurface();
}
}
6、将surface设置给帧处理器:
if (id == getMainCameraId()) {
mFrameProcessor.setOutputSurface(surface);
mFrameProcessor.setVideoOutputSurface(null);
}
7、如果后置双摄模式且“ClearSight”(pref_camera2_clearsight_key)开关打开时,则直接将此surface进行配流操作:
if (isClearSightOn()) {
if (surface != null) {
mPreviewRequestBuilder[id].addTarget(surface);
list.add(surface);
}
ClearSightImageProcessor.getInstance().createCaptureSession(
id == BAYER_ID, mCameraDevice[id], list, captureSessionCallback);
}
配流状态通过captureSessionCallback回调获取。
8、如果设置了滤镜,但不处于深度人像模式,重设预览surface的size:
else if (id == getMainCameraId()) {
if (mFrameProcessor.isFrameFilterEnabled() && !mDeepPortraitMode) {
mActivity.runOnUiThread(new Runnable() {
public void run() {
SurfaceHolder surfaceHolder = mUI.getSurfaceHolder();
if (surfaceHolder != null) {
surfaceHolder.setFixedSize(
mPreviewSize.getHeight(), mPreviewSize.getWidth());
}
}
});
}
8.1>、开始准备预览流:
List<Surface> surfaces = mFrameProcessor.getInputSurfaces();
for (Surface surs : surfaces) {
mPreviewRequestBuilder[id].addTarget(surs);
list.add(surs);
}
public List<Surface> getInputSurfaces() {
List<Surface> surfaces = new ArrayList<Surface>();
if (mIsDeepPortrait) {
surfaces.add(getReaderSurface());
return surfaces;
}
if (mPreviewFilters.size() == 0 && mFinalFilters.size() == 0) {
surfaces.add(mSurfaceAsItIs);
if (mIsVideoOn) {
surfaces.add(mVideoSurfaceAsItIs);
}
} else if (mFinalFilters.size() == 0) {
surfaces.add(mSurfaceAsItIs);
if (mIsVideoOn) {
surfaces.add(mVideoSurfaceAsItIs);
}
surfaces.add(getReaderSurface());
} else {
surfaces.add(getReaderSurface());
}
return surfaces;
}
获取ImageReader的surface:
private Surface getReaderSurface() {
synchronized (mAllocationLock) {
if (mInputImageReader == null) {
return null;
}
return mInputImageReader.getSurface();
}
}
YUV的ImageReader:
mInputImageReader = ImageReader.newInstance(mSize.getWidth(), mSize.getHeight(), ImageFormat.YUV_420_888, 12);
可以看到如果未设置帧滤镜的话,直接使用预览surfaceView的surface,如果处于深度人像或者其他情况下则使用配置的ImageReader YUV的surface。
8.2>、如果不支持Heif编码,未配置Yuv流、mYUVCallback,添加ImageReader surface,此处的ImageReader流是在onResumeAfterSuper() -> openProcessors() -> setUpCameraOutputs()中配置的:
if (!mSettingsManager.isHeifWriterEncoding() && !mYUVCallback) {
list.add(mImageReader[id].getSurface());
}
8.3>、如果需要保存Raw图,则添加配置的RawImageReader的surface:
if (mSaveRaw) {
list.add(mRawImageReader[id].getSurface());
}
8.4>、如果支持Yuv的流,则将配置的Yuv流全数添加:
if (mYUVCallback) {
Log.d(TAG, "yuv=" + mYUVCount);
for (int i = 0; i < mYUVCount; i++) {
list.add(mYUVImageReader[i].getSurface());
}
}
8.5>、配置输入、输出流:
mPreviewRequestBuilder[id].addTarget(surface);
List<OutputConfiguration> outputConfigurations = null;
outputConfigurations = new ArrayList<OutputConfiguration>();
for (Surface s : list) {
outputConfigurations.add(new OutputConfiguration(s));
}
8.6>、如果支持Heif编码,构建相应surface的Configurations:
if (mSettingsManager.isHeifWriterEncoding()) {
if (mInitHeifWriter != null) {
mHeifOutput = new OutputConfiguration(mInitHeifWriter.getInputSurface());
mHeifOutput.enableSurfaceSharing();
outputConfigurations.add(mHeifOutput);
}
}
8.7>、当前设置配置下的图像格式为Yuv或Private时;根据相关设置项,配置添加相关的ImageReader流,去配置创建Session:
if (mChosenImageFormat == ImageFormat.YUV_420_888 || mChosenImageFormat == ImageFormat.PRIVATE) {
if (mPostProcessor.isZSLEnabled()) {
mPreviewRequestBuilder[id].addTarget(mImageReader[id].getSurface());
list.add(mPostProcessor.getZSLReprocessImageReader().getSurface());
if (mSaveRaw) {
mPreviewRequestBuilder[id].addTarget(mRawImageReader[id].getSurface());
}
mCameraDevice[id].createReprocessableCaptureSession(new InputConfiguration(mImageReader[id].getWidth(),
mImageReader[id].getHeight(), mImageReader[id].getImageFormat()), list, captureSessionCallback, mCameraHandler);
} else {
if (mSettingsManager.isHeifWriterEncoding() && outputConfigurations != null) {
mCameraDevice[id].createCaptureSessionByOutputConfigurations(outputConfigurations,
captureSessionCallback, mCameraHandler);
} else {
mCameraDevice[id].createCaptureSession(list, captureSessionCallback, mCameraHandler);
}
}
}
8.8>、当设置的图像格式是其它格式时;判断android api是否大于28,是则使用
createCameraSessionWithSessionConfiguration(id, outputConfigurations,
captureSessionCallback, mCameraHandler, mPreviewRequestBuilder[id]);
private void createCameraSessionWithSessionConfiguration(int cameraId,
List<OutputConfiguration> outConfigurations, CameraCaptureSession.StateCallback listener,
Handler handler, CaptureRequest.Builder initialRequest) {
int opMode = SESSION_REGULAR;
String valueFS2 = mSettingsManager.getValue(SettingsManager.KEY_SENSOR_MODE_FS2_VALUE);
if (valueFS2 != null) {
int intValue = Integer.parseInt(valueFS2);
if (intValue == 1) {
opMode |= STREAM_CONFIG_MODE_FS2;
}
}
Log.v(TAG, " createCameraSessionWithSessionConfiguration opMode: " + opMode);
Method method_setSessionParameters = null;
Method method_createCaptureSession = null;
Object sessionConfig = null;
try {
Class clazz = Class.forName("android.hardware.camera2.params.SessionConfiguration");
sessionConfig = clazz.getConstructors()[0].newInstance(
opMode, outConfigurations,
new HandlerExecutor(handler), listener);
if (method_setSessionParameters == null) {
method_setSessionParameters = clazz.getDeclaredMethod(
"setSessionParameters", CaptureRequest.class);
}
method_setSessionParameters.invoke(sessionConfig, initialRequest.build());
method_createCaptureSession = CameraDevice.class.getDeclaredMethod(
"createCaptureSession", clazz);
method_createCaptureSession.invoke(mCameraDevice[cameraId], sessionConfig);
} catch (Exception exception) {
Log.w(TAG, "createCameraSessionWithSessionConfiguration method is not exist");
exception.printStackTrace();
}
}
通过反射方式构建SessionConfiguration传入opMode交由HAL;调用createCaptureSession创建Session会话。
否则直接调用标准接口createCaptureSession()创建会话:
mCameraDevice[id].createCaptureSession(list, captureSessionCallback, mCameraHandler);
9、当未打开ClearSight开关,且当前模式下CameraID与所需创建的Session会话的CameraID不一致时,则添加预览流、ImageReader流后调用标准接口创建Session会话:
if (surface != null) {
mPreviewRequestBuilder[id].addTarget(surface);
list.add(surface);
}
list.add(mImageReader[id].getSurface());
// Here, we create a CameraCaptureSession for camera preview.
mCameraDevice[id].createCaptureSession(list, captureSessionCallback, mCameraHandler);
从上面可以看到当Camera Device被成功open后,就开始着手创建与底层交流的Session会话,根据需要配置所需要使用的流。配流、Session会话的创建状态则通过回调CameraCaptureSession.StateCallback接口来获取。
当配流、Session会话创建配置成功时,回调CameraCaptureSession.StateCallback#onConfigured()方法:
public void onConfigured(CameraCaptureSession cameraCaptureSession) {
if (mPaused || null == mCameraDevice[id] ||
cameraCaptureSession == null) {
return;
}
Log.i(TAG, "cameracapturesession - onConfigured " + id);
setCameraModeSwitcherAllowed(true);
// When the session is ready, we start displaying the preview.
mCaptureSession[id] = cameraCaptureSession;
if (id == getMainCameraId()) {
mCurrentSession = cameraCaptureSession;
}
initializePreviewConfiguration(id);
setDisplayOrientation();
updateFaceDetection();
try {
if (isBackCamera() && getCameraMode() == DUAL_MODE) {
linkBayerMono(id);
mIsLinked = true;
}
// Finally, we start displaying the camera preview.
// for cases where we are in dual mode with mono preview off,
// don't set repeating request for mono
if (mCaptureSession[id] == null) {
return;
}
if (id == MONO_ID && !canStartMonoPreview()
&& getCameraMode() == DUAL_MODE) {
mCaptureSession[id].capture(mPreviewRequestBuilder[id]
.build(), mCaptureCallback, mCameraHandler);
} else {
mCaptureSession[id].setRepeatingRequest(mPreviewRequestBuilder[id]
.build(), mCaptureCallback, mCameraHandler);
}
if (mIntentMode == INTENT_MODE_STILL_IMAGE_CAMERA &&
mIsVoiceTakePhote) {
mHandler.sendEmptyMessageDelayed(VOICE_INTERACTION_CAPTURE, 500);
}
if (isClearSightOn()) {
ClearSightImageProcessor.getInstance().onCaptureSessionConfigured(id == BAYER_ID, cameraCaptureSession);
} else if (mChosenImageFormat == ImageFormat.PRIVATE && id == getMainCameraId()) {
mPostProcessor.onSessionConfigured(mCameraDevice[id], mCaptureSession[id]);
}
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (IllegalStateException e) {
e.printStackTrace();
} catch (IllegalArgumentException e) {
e.printStackTrace();
}
mCurrentSessionClosed = false;
}
1、异常状态判断,是否CameraActivity处于onPause()、CameraDevice是否不为空,及其创建的Session实例是否不为空;
2、重置状态,使能模式切换;
3、保存创建的Session实例,初始化mCaptureSession[id]、mCurrentSession,准备开始启动预览;
4、初始话设置预览配置:
initializePreviewConfiguration(id);
设置AF Trigger,设置闪光灯,机器其他诸如AE、AF、AWB等:
private void initializePreviewConfiguration(int id) {
mPreviewRequestBuilder[id].set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest
.CONTROL_AF_TRIGGER_IDLE);
applyFlash(mPreviewRequestBuilder[id], id);
applyCommonSettings(mPreviewRequestBuilder[id], id);
}
根据设置菜单项设置闪光灯:
applyFlash(mPreviewRequestBuilder[id], id);
private void applyFlash(CaptureRequest.Builder request, int id) {
if (mSettingsManager.isFlashSupported(id)) {
String value = mSettingsManager.getValue(mCurrentSceneMode.mode == CameraMode.PRO_MODE ?
SettingsManager.KEY_VIDEO_FLASH_MODE : SettingsManager.KEY_FLASH_MODE);
applySnapshotFlash(request, value);
} else {
request.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
}
}
根据设置菜单项设置AE、AF、AWB、ISO等:
applyCommonSettings(mPreviewRequestBuilder[id], id);
private void applyCommonSettings(CaptureRequest.Builder builder, int id) {
builder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
builder.set(CaptureRequest.CONTROL_AF_MODE, mControlAFMode);
applyAfModes(builder);
applyFaceDetection(builder);
applyTouchTrackFocus(builder);
applyWhiteBalance(builder);
applyExposure(builder);
applyIso(builder);
applyColorEffect(builder);
applySceneMode(builder);
applyZoom(builder, id);
applyInstantAEC(builder);
applySaturationLevel(builder);
applyAntiBandingLevel(builder);
applySharpnessControlModes(builder);
applyExposureMeteringModes(builder);
applyHistogram(builder);
applyAWBCCTAndAgain(builder);
applyBGStats(builder);
applyBEStats(builder);
applyWbColorTemperature(builder);
applyToneMapping(builder);
}
5、设置初始化屏幕方向角度mDisplayRotation、mDisplayOrientation等:
setDisplayOrientation();
private void setDisplayOrientation() {
mDisplayRotation = CameraUtil.getDisplayRotation(mActivity);
mDisplayOrientation = CameraUtil.getDisplayOrientationForCamera2(
mDisplayRotation, getMainCameraId());
}
6、根据是否开启人脸检测开关更新绘制UI,即预览界面人脸标记框:
updateFaceDetection();
private void updateFaceDetection() {
final String value = mSettingsManager.getValue(SettingsManager.KEY_FACE_DETECTION);
mActivity.runOnUiThread(new Runnable() {
@Override
public void run() {
if (value == null || value.equals("off")
|| !mSettingsManager.isFDRenderingAtPreview())
mUI.onStopFaceDetection();
else {
mUI.onStartFaceDetection(mDisplayOrientation,
mSettingsManager.isFacingFront(getMainCameraId()),
mCropRegion[getMainCameraId()],
mSettingsManager.getSensorActiveArraySize(getMainCameraId()));
}
}
});
}
7、后置双摄模式下,设置高通双摄Tag:
if (isBackCamera() && getCameraMode() == DUAL_MODE) {
linkBayerMono(id);
mIsLinked = true;
}
public void linkBayerMono(int id) {
Log.d(TAG, "linkBayerMono " + id);
if (id == BAYER_ID) {
mPreviewRequestBuilder[id].set(BayerMonoLinkEnableKey, (byte) 1);
mPreviewRequestBuilder[id].set(BayerMonoLinkMainKey, (byte) 1);
mPreviewRequestBuilder[id].set(BayerMonoLinkSessionIdKey, MONO_ID);
} else if (id == MONO_ID) {
mPreviewRequestBuilder[id].set(BayerMonoLinkEnableKey, (byte) 1);
mPreviewRequestBuilder[id].set(BayerMonoLinkMainKey, (byte) 0);
mPreviewRequestBuilder[id].set(BayerMonoLinkSessionIdKey, BAYER_ID);
}
}
CaptureRequest.Key<Byte> BayerMonoLinkEnableKey =
new CaptureRequest.Key<>("org.codeaurora.qcamera3.dualcam_link_meta_data.enable",
Byte.class);
CaptureRequest.Key<Byte> BayerMonoLinkMainKey =
new CaptureRequest.Key<>("org.codeaurora.qcamera3.dualcam_link_meta_data.is_main",
Byte.class);
CaptureRequest.Key<Integer> BayerMonoLinkSessionIdKey =
new CaptureRequest.Key<>("org.codeaurora.qcamera3.dualcam_link_meta_data" +
".related_camera_id", Integer.class);
8、对于处于双摄且关闭了单声道预览的情况,不为其开启重复请求预览:
// Finally, we start displaying the camera preview.
// for cases where we are in dual mode with mono preview off,
// don't set repeating request for mono
if (mCaptureSession[id] == null) {
return;
}
if (id == MONO_ID && !canStartMonoPreview()
&& getCameraMode() == DUAL_MODE) {
mCaptureSession[id].capture(mPreviewRequestBuilder[id]
.build(), mCaptureCallback, mCameraHandler);
}
其他情况下则直接设置重复请求预览:
else {
mCaptureSession[id].setRepeatingRequest(mPreviewRequestBuilder[id]
.build(), mCaptureCallback, mCameraHandler);
}
可以看到开启预览请求都是调用的标准接口capture()、setRepeatingRequest(),相关预览状态则通过mCaptureCallback(CameraCaptureSession.CaptureCallback)回调。
9、申请开启预览后,如果当前是语音拍照模式,则延时500ms发送handler消息“VOICE_INTERACTION_CAPTURE”请求拍照:
if (mIntentMode == INTENT_MODE_STILL_IMAGE_CAMERA &&
mIsVoiceTakePhote) {
mHandler.sendEmptyMessageDelayed(VOICE_INTERACTION_CAPTURE, 500);
}
10、如果当前打开ClearSight开关或者当前图像格式为ImageFormat.PRIVATE时,分别调用onCaptureSessionConfigured或者onSessionConfigured通知相关处理器Session配置创建成功:
if (isClearSightOn()) {
ClearSightImageProcessor.getInstance().onCaptureSessionConfigured(id == BAYER_ID, cameraCaptureSession);
} else if (mChosenImageFormat == ImageFormat.PRIVATE && id == getMainCameraId()) {
mPostProcessor.onSessionConfigured(mCameraDevice[id], mCaptureSession[id]);
}
当预览请求发送后,其成功与否状态通过mCaptureCallback(CameraCaptureSession.CaptureCallback)回调:
/**
* A {@link CameraCaptureSession.CaptureCallback} that handles events related to JPEG capture.
*/
private CameraCaptureSession.CaptureCallback mCaptureCallback
= new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureProgressed(CameraCaptureSession session,
CaptureRequest request,
CaptureResult partialResult) {
...
}
@Override
public void onCaptureCompleted(CameraCaptureSession session,
CaptureRequest request,
TotalCaptureResult result) {
...
}
};
1、当图像捕获部分进行时就会回调该方法,此时一些(但不是全部)结果是可用的,此时回调onCaptureProgressed();
@Override
public void onCaptureProgressed(CameraCaptureSession session,
CaptureRequest request,
CaptureResult partialResult) {
int id = (int) partialResult.getRequest().getTag();
if (id == getMainCameraId()) {
Face[] faces = partialResult.get(CaptureResult.STATISTICS_FACES);
if (BSGC_DEBUG)
Log.d(BSGC_TAG, "onCaptureProgressed Detected Face size = " + Integer.toString(faces == null ? 0 : faces.length));
if (faces != null && mSettingsManager.isFDRenderingAtPreview()) {
if (isBsgcDetecionOn() || isFacialContourOn() || isFacePointOn()) {
updateFaceView(faces, getBsgcInfo(partialResult, faces.length));
} else {
updateFaceView(faces, null);
}
}
}
}
人脸检测相关,根据每帧检测到的相关人脸坐标信息更新预览界面人脸表示框。
2、当图像捕捉完全完成时,并且结果已经可用时,此时回调onCaptureCompleted();
@Override
public void onCaptureCompleted(CameraCaptureSession session,
CaptureRequest request,
TotalCaptureResult result) {
int id = (int) result.getRequest().getTag();
if (id == getMainCameraId()) {
updateFocusStateChange(result);
updateAWBCCTAndgains(result);
updateAECGainAndExposure(result);
Face[] faces = result.get(CaptureResult.STATISTICS_FACES);
if (BSGC_DEBUG)
Log.d(BSGC_TAG, "onCaptureCompleted Detected Face size = " + Integer.toString(faces == null ? 0 : faces.length));
if (faces != null && mSettingsManager.isFDRenderingAtPreview()) {
if (isBsgcDetecionOn() || isFacialContourOn() || isFacePointOn()) {
updateFaceView(faces, getBsgcInfo(result, faces.length));
} else {
updateFaceView(faces, null);
}
}
updateT2tTrackerView(result);
}
if (mPaused) {
return;
}
detectHDRMode(result, id);
processCaptureResult(result);
mPostProcessor.onMetaAvailable(result);
String stats_visualizer = mSettingsManager.getValue(
SettingsManager.KEY_STATS_VISUALIZER_VALUE);
if (stats_visualizer != null) {
updateStatsView(stats_visualizer, result);
} else {
mUI.updateAWBInfoVisibility(View.GONE);
mUI.updateAECInfoVisibility(View.GONE);
updateGraghViewVisibility(View.GONE);
updateBGStatsVisibility(View.GONE);
updateBEStatsVisibility(View.GONE);
}
}
1>、判断当前CameraID与此次预览请求时以Camera ID设置的tag是否吻合,如果吻合,则根据返回的预览帧结果更新AF、AWB、AE状态,获取人脸信息坐标数据更新人脸标识框,更新对焦追踪框。
2>、根据预览帧结果更新HDR模式菜单UI:
detectHDRMode(result, id);
如果autoHdr打开,场景模式为自动,且预览帧返回的结果显示也证实处于Hdr模式时,绘制显示Auto Hdr模式菜单UI;反之,不显示:
private void detectHDRMode(CaptureResult result, int id) {
String value = mSettingsManager.getValue(SettingsManager.KEY_SCENE_MODE);
String autoHdr = mSettingsManager.getValue(SettingsManager.KEY_AUTO_HDR);
Byte hdrScene = result.get(CaptureModule.isHdr);
if (value == null || hdrScene == null) return;
mAutoHdrEnable = false;
if (autoHdr != null && "enable".equals(autoHdr) && "0".equals(value) && hdrScene == 1) {
mAutoHdrEnable = true;
mActivity.runOnUiThread(new Runnable() {
public void run() {
if (mDrawAutoHDR2 != null) {
mDrawAutoHDR2.setVisibility(View.VISIBLE);
mDrawAutoHDR2.AutoHDR();
}
}
});
return;
} else {
mActivity.runOnUiThread(new Runnable() {
public void run() {
if (mDrawAutoHDR2 != null) {
mDrawAutoHDR2.setVisibility(View.INVISIBLE);
}
}
});
}
}
是否处于Hdr模式Tag:
public static CaptureResult.Key<Byte> isHdr =
new CaptureResult.Key<>("org.codeaurora.qcamera3.stats.is_hdr_scene", Byte.class);
3>、继续处理预览帧结果,根据其预览帧更新界面UI等。
故可以看到一般当Camera被成功开启时,就开始根据场景需要配置相应的预览流、ImageReader流或Yuv流去创建Session会话。当Session会话创建成功时,则再根据设置菜单的相关设置项去设置相应Tag,包括闪光灯、AE、AF等,调用标准接口向底层申请打开预览。
基本都是围绕Camera2 CameraCaptureSession中的相关标准接口实现的。相关记录详见Android Camera2之CameraCaptureSession