问题点
背景
记录下今天分析的一个Cts问题
android.app.appops.cts.AppOpsLoggingTest#openCameraWithAttributio
android.app.appops.cts.AppOpsLoggingTest#openCameraWithDefaultAttribution
05-05 14:30:10.923 10317 6286 6306 E TestRunner: ----- begin exception -----
05-05 14:30:10.923 10317 6286 6306 E TestRunner: java.lang.ArrayIndexOutOfBoundsException: length=0; index=0
05-05 14:30:10.923 10317 6286 6306 E TestRunner: at android.app.appops.cts.AppOpsLoggingTest.openCamera(AppOpsLoggingTest.kt:794)
05-05 14:30:10.923 10317 6286 6306 E TestRunner: at android.app.appops.cts.AppOpsLoggingTest.openCameraWithDefaultAttribution(AppOpsLoggingTest.kt:853)
05-05 14:30:10.923 10317 6286 6306 E TestRunner: at java.lang.reflect.Method.invoke(Native Method)
05-05 14:30:10.923 10317 6286 6306 E TestRunner: at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59)
05-05 14:30:10.923 10317 6286 6306 E TestRunner: at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
05-05 14:30:10.923 10317 6286 6306 E TestRunner: at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:61)
05-05 14:30:10.923 10317 6286 6306 E TestRunner: at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
05-05 14:30:10.923 10317 6286 6306 E TestRunner: at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:148)
05-05 14:30:10.923 10317 6286 6306 E TestRunner: at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:142)
05-05 14:30:10.923 10317 6286 6306 E TestRunner: at java.util.concurrent.FutureTask.run(FutureTask.java:264)
05-05 14:30:10.923 10317 6286 6306 E TestRunner: at java.lang.Thread.run(Thread.java:1012)
05-05 14:30:10.923 10317 6286 6306 E TestRunner: ----- end exception -----
05-05 14:30:10.927 10317 6286 6306 I TestRunner: finished: openCameraWithDefaultAttribution(android.app.appops.cts.AppOpsLoggingTest)
05-05 14:30:10.935 10317 6286 6306 I TestRunner: run finished: 1 tests, 1 failed, 0 ignored
分析
log里没有给出明确提示。 只有分析源码了
/**
* Realistic end-to-end test for opening camera. This uses the default (==null) attribution.
* This is interesting as null attribution handling is more complex in native code.
*/
@Test
fun openCameraWithDefaultAttribution() {
openCamera(context)
}
private fun openCamera(context: Context) {
val cameraManager = context.getSystemService(CameraManager::class.java)
val openedCamera = CompletableFuture<CameraDevice>()
assumeTrue(cameraManager.cameraIdList.isNotEmpty())
val cameraId = cameraManager!!.cameraIdList[0]
val config = cameraManager!!.getCameraCharacteristics(cameraId)
.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)
val outputFormat = config!!.outputFormats[0]
val outputSize: Size = config!!.getOutputSizes(outputFormat)[0]
val handler = Handler(context.mainLooper)
val cameraDeviceCallback = object : CameraDevice.StateCallback() {
override fun onOpened(cameraDevice: CameraDevice) {
val imageReader = ImageReader.newInstance(
outputSize.width, outputSize.height, outputFormat, 2)
val builder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW)
builder.addTarget(imageReader.surface)
val captureRequest = builder.build()
val sessionConfiguration = SessionConfiguration(
SessionConfiguration.SESSION_REGULAR,
listOf(OutputConfiguration(imageReader.surface)),
context.mainExecutor,
object : CameraCaptureSession.StateCallback() {
override fun onConfigured(session: CameraCaptureSession) {
session.capture(captureRequest, null, handler)
}
override fun onConfigureFailed(session: CameraCaptureSession) {}
})
imageReader.setOnImageAvailableListener({
cameraDevice.close()
openedCamera.complete(cameraDevice)
}, handler)
cameraDevice.createCaptureSession(sessionConfiguration)
}
override fun onDisconnected(ameraDevice: CameraDevice) {}
override fun onError(cameraDevice: CameraDevice, i: Int) {}
}
cameraManager!!.openCamera(cameraId, context.mainExecutor, cameraDeviceCallback)
openedCamera.get(TIMEOUT_MILLIS, MILLISECONDS).close()
eventually {
assertThat(asyncNoted[0].op).isEqualTo(OPSTR_CAMERA)
assertThat(asyncNoted[0].attributionTag).isEqualTo(context.attributionTag)
assertThat(asyncNoted[0].message).contains(cameraId)
}
}
结论是outputSize 没有值
解决
通过额外的log tracing:
outputFormat = 32 对应Hex值 0x20
enum class PublicFormat {
UNKNOWN = 0x0,
RGBA_8888 = 0x1,
RGBX_8888 = 0x2,
RGB_888 = 0x3,
RGB_565 = 0x4,
NV16 = 0x10,
NV21 = 0x11,
YUY2 = 0x14,
RGBA_FP16 = 0x16,
RAW_SENSOR = 0x20,
PRIVATE = 0x22,
YUV_420_888 = 0x23,
RAW_PRIVATE = 0x24,
RAW10 = 0x25,
RAW12 = 0x26,
RGBA_1010102 = 0x2b,
JPEG = 0x100,
DEPTH_POINT_CLOUD = 0x101,
RAW_DEPTH = 0x1002, // @hide
YV12 = 0x32315659,
Y8 = 0x20203859,
Y16 = 0x20363159, // @hide
DEPTH16 = 0x44363159,
DEPTH_JPEG = 0x69656963,
HEIC = 0x48454946,
};
需要配置raw_sensor格式
camera驱动对应的配置
<streamInfo>
<!--Information for a stream data -->
<streamConfiguration>
<!--Virtual Channel of the data
Valid values for virtual channel are: 0, 1, 2 and 3 -->
<vc range="[0,3]">0</vc>
<!--Data type of the stream. Default value is 0x2B (10-bit RAW) -->
<dt>0x2B</dt>
<!--Frame dimension: contains xStart, yStart, width and height -->
<frameDimension>
<xStart>0</xStart>
<yStart>0</yStart>
<width>4208</width>
<height>3120</height>
</frameDimension>
<!--Bit width of the data -->
<bitWidth>10</bitWidth>
<!--Type of the stream
Supported stream types are: BLOB, IMAGE, BAYER_GRBG, BAYER_RGGB, BAYER_Y, YUV_UYVY, YUV_YUYV -->
<type>IMAGE</type>
</streamConfiguration>
</streamInfo>