第11讲 cameraserver进程启动之HIDL CameraDevice初始化 修正CameraCharacteristics - Android Camera Native Framework

https://deepinout.com/android-camera-native-framework/cameraserver-process-startup-enumerateproviders-hidl-cameradevice-update-cameracharacteristics.html

1 修复/更新 mCameraCharacteristics

HidlProviderInfo::HidlDeviceInfo3::HidlDeviceInfo3(
        const std::string& name,
        const metadata_vendor_id_t tagId,
        const std::string &id, uint16_t minorVersion,
        const CameraResourceCost& resourceCost,
        sp<CameraProviderManager::ProviderInfo> parentProvider,
        const std::vector<std::string>& publicCameraIds,
        sp<hardware::camera::device::V3_2::ICameraDevice> interface) :
        DeviceInfo3(name, tagId, id, minorVersion, resourceCost, parentProvider, publicCameraIds) {

    status_t res = fixupMonochromeTags();
    if (OK != res) {
        ALOGE("%s: Unable to fix up monochrome tags based for older HAL version: %s (%d)",
                __FUNCTION__, strerror(-res), res);
        return;
    }
    auto stat = addDynamicDepthTags();
    if (OK != stat) {
        ALOGE("%s: Failed appending dynamic depth tags: %s (%d)", __FUNCTION__, strerror(-stat),
                stat);
    }
    res = deriveHeicTags();
    if (OK != res) {
        ALOGE("%s: Unable to derive HEIC tags based on camera and media capabilities: %s (%d)",
                __FUNCTION__, strerror(-res), res);
    }

    if (SessionConfigurationUtils::isUltraHighResolutionSensor(mCameraCharacteristics)) {
        status_t status = addDynamicDepthTags(/*maxResolution*/true);
        if (OK != status) {
            ALOGE("%s: Failed appending dynamic depth tags for maximum resolution mode: %s (%d)",
                    __FUNCTION__, strerror(-status), status);
        }

        status = deriveHeicTags(/*maxResolution*/true);
        if (OK != status) {
            ALOGE("%s: Unable to derive HEIC tags based on camera and media capabilities for"
                    "maximum resolution mode: %s (%d)", __FUNCTION__, strerror(-status), status);
        }
    }

    res = addRotateCropTags();
    if (OK != res) {
        ALOGE("%s: Unable to add default SCALER_ROTATE_AND_CROP tags: %s (%d)", __FUNCTION__,
                strerror(-res), res);
    }
    res = addPreCorrectionActiveArraySize();
    if (OK != res) {
        ALOGE("%s: Unable to add PRE_CORRECTION_ACTIVE_ARRAY_SIZE: %s (%d)", __FUNCTION__,
                strerror(-res), res);
    }
    res = camera3::ZoomRatioMapper::overrideZoomRatioTags(
            &mCameraCharacteristics, &mSupportNativeZoomRatio);
    if (OK != res) {
        ALOGE("%s: Unable to override zoomRatio related tags: %s (%d)",
                __FUNCTION__, strerror(-res), res);
    }

    camera_metadata_entry flashAvailable =
            mCameraCharacteristics.find(ANDROID_FLASH_INFO_AVAILABLE);
    if (flashAvailable.count == 1 &&
            flashAvailable.data.u8[0] == ANDROID_FLASH_INFO_AVAILABLE_TRUE) {
        mHasFlashUnit = true;
        // Fix up flash strength tags for devices without these keys.
        res = fixupTorchStrengthTags();
        if (OK != res) {
            ALOGE("%s: Unable to add default ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL and"
                    "ANDROID_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL tags: %s (%d)", __FUNCTION__,
                    strerror(-res), res);
        }
    } else {
        mHasFlashUnit = false;
    }

    camera_metadata_entry entry =
            mCameraCharacteristics.find(ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL);
    if (entry.count == 1) {
        mTorchDefaultStrengthLevel = entry.data.i32[0];
    } else {
        mTorchDefaultStrengthLevel = 0;
    }
    entry = mCameraCharacteristics.find(ANDROID_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL);
    if (entry.count == 1) {
        mTorchMaximumStrengthLevel = entry.data.i32[0];
    } else {
        mTorchMaximumStrengthLevel = 0;
    }

    mTorchStrengthLevel = 0;

    queryPhysicalCameraIds();

    // Get physical camera characteristics if applicable
    auto castResult = device::V3_5::ICameraDevice::castFrom(interface);
    if (!castResult.isOk()) {
        ALOGV("%s: Unable to convert ICameraDevice instance to version 3.5", __FUNCTION__);
        return;
    }
    sp<device::V3_5::ICameraDevice> interface_3_5 = castResult;
    if (interface_3_5 == nullptr) {
        ALOGE("%s: Converted ICameraDevice instance to nullptr", __FUNCTION__);
        return;
    }

    if (mIsLogicalCamera) {
        for (auto& id : mPhysicalIds) {
            if (std::find(mPublicCameraIds.begin(), mPublicCameraIds.end(), id) !=
                    mPublicCameraIds.end()) {
                continue;
            }

            hardware::hidl_string hidlId(id);
            ret = interface_3_5->getPhysicalCameraCharacteristics(hidlId,
                    [&status, &id, this](Status s, device::V3_2::CameraMetadata metadata) {
                status = s;
                if (s == Status::OK) {
                    camera_metadata_t *buffer =
                            reinterpret_cast<camera_metadata_t*>(metadata.data());
                    size_t expectedSize = metadata.size();
                    int res = validate_camera_metadata_structure(buffer, &expectedSize);
                    if (res == OK || res == CAMERA_METADATA_VALIDATION_SHIFTED) {
                        set_camera_metadata_vendor_id(buffer, mProviderTagid);
                        mPhysicalCameraCharacteristics[id] = buffer;
                    } else {
                        ALOGE("%s: Malformed camera metadata received from HAL", __FUNCTION__);
                        status = Status::INTERNAL_ERROR;
                    }
                }
            });

            if (!ret.isOk()) {
                ALOGE("%s: Transaction error getting physical camera %s characteristics for %s: %s",
                        __FUNCTION__, id.c_str(), id.c_str(), ret.description().c_str());
                return;
            }
            if (status != Status::OK) {
                ALOGE("%s: Unable to get physical camera %s characteristics for device %s: %s (%d)",
                        __FUNCTION__, id.c_str(), mId.c_str(),
                        statusToString(status), status);
                return;
            }

            res = camera3::ZoomRatioMapper::overrideZoomRatioTags(
                    &mPhysicalCameraCharacteristics[id], &mSupportNativeZoomRatio);
            if (OK != res) {
                ALOGE("%s: Unable to override zoomRatio related tags: %s (%d)",
                        __FUNCTION__, strerror(-res), res);
            }
        }
    }

    if (!kEnableLazyHal) {
        // Save HAL reference indefinitely
        mSavedInterface = interface;
    }


}

1.1 fixupMonochromeTags

当Camera Device < 3.5 且 包含 MONOCHROME Capability(HAL 3.3新增)的Camera Device

  1. ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT(HAL 3.2就有)
    更新为
    ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_MONO(HAL3.4新增)
  2. 删除Monochrome camera不需要的Static Keys,Request Keys,Result Keys
  3. 更新ANDROID_SENSOR_BLACK_LEVEL_PATTERN,Monochrome Camera的所有的2×2通道必须有相同的值。

status_t CameraProviderManager::ProviderInfo::DeviceInfo3::fixupMonochromeTags() {
    status_t res = OK;
    auto& c = mCameraCharacteristics;
    sp<ProviderInfo> parentProvider = mParentProvider.promote();
    if (parentProvider == nullptr) {
        return DEAD_OBJECT;
    }
    IPCTransport ipcTransport = parentProvider->getIPCTransport();
    // Override static metadata for MONOCHROME camera with older device version
    if (ipcTransport == IPCTransport::HIDL &&
            (mVersion.get_major() == 3 && mVersion.get_minor() < 5)) {
        camera_metadata_entry cap = c.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
        for (size_t i = 0; i < cap.count; i++) {
            if (cap.data.u8[i] == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME) {
                // ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
                uint8_t cfa = ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_MONO;
                res = c.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT, &cfa, 1);
                if (res != OK) {
                    ALOGE("%s: Failed to update COLOR_FILTER_ARRANGEMENT: %s (%d)",
                          __FUNCTION__, strerror(-res), res);
                    return res;
                }

                // ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS
                const std::vector<uint32_t> sKeys = {
                        ANDROID_SENSOR_REFERENCE_ILLUMINANT1,
                        ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
                        ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
                        ANDROID_SENSOR_CALIBRATION_TRANSFORM2,
                        ANDROID_SENSOR_COLOR_TRANSFORM1,
                        ANDROID_SENSOR_COLOR_TRANSFORM2,
                        ANDROID_SENSOR_FORWARD_MATRIX1,
                        ANDROID_SENSOR_FORWARD_MATRIX2,
                };
                res = removeAvailableKeys(c, sKeys,
                        ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
                if (res != OK) {
                    ALOGE("%s: Failed to update REQUEST_AVAILABLE_CHARACTERISTICS_KEYS: %s (%d)",
                            __FUNCTION__, strerror(-res), res);
                    return res;
                }

                // ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS
                const std::vector<uint32_t> reqKeys = {
                        ANDROID_COLOR_CORRECTION_MODE,
                        ANDROID_COLOR_CORRECTION_TRANSFORM,
                        ANDROID_COLOR_CORRECTION_GAINS,
                };
                res = removeAvailableKeys(c, reqKeys, ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS);
                if (res != OK) {
                    ALOGE("%s: Failed to update REQUEST_AVAILABLE_REQUEST_KEYS: %s (%d)",
                            __FUNCTION__, strerror(-res), res);
                    return res;
                }

                // ANDROID_REQUEST_AVAILABLE_RESULT_KEYS
                const std::vector<uint32_t> resKeys = {
                        ANDROID_SENSOR_GREEN_SPLIT,
                        ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
                        ANDROID_COLOR_CORRECTION_MODE,
                        ANDROID_COLOR_CORRECTION_TRANSFORM,
                        ANDROID_COLOR_CORRECTION_GAINS,
                };
                res = removeAvailableKeys(c, resKeys, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS);
                if (res != OK) {
                    ALOGE("%s: Failed to update REQUEST_AVAILABLE_RESULT_KEYS: %s (%d)",
                            __FUNCTION__, strerror(-res), res);
                    return res;
                }

                // ANDROID_SENSOR_BLACK_LEVEL_PATTERN
                camera_metadata_entry blEntry = c.find(ANDROID_SENSOR_BLACK_LEVEL_PATTERN);
                for (size_t j = 1; j < blEntry.count; j++) {
                    blEntry.data.i32[j] = blEntry.data.i32[0];
                }
            }
        }
    }
    return res;
}

1.2 addDynamicDepthTags

告诉APP支持哪些 depth输出流。

Dynamic Depth是指包含Depth 景深信息的JPEG,
在Camera Framework完成Jpeg和Depth Buffer的Composite,因此相关静态Metadata HAL不会填,这里要补充上。
筛选规则是:Jpeg和Depth Size相同(或宽高比近似)的Streams

ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS
ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS
ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS

status_t CameraProviderManager::ProviderInfo::DeviceInfo3::addDynamicDepthTags(
        bool maxResolution) {
    const int32_t depthExclTag = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE;

    const int32_t scalerSizesTag =
              SessionConfigurationUtils::getAppropriateModeTag(
                      ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, maxResolution);
    const int32_t scalerMinFrameDurationsTag =
            ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS;
    const int32_t scalerStallDurationsTag =
                 SessionConfigurationUtils::getAppropriateModeTag(
                        ANDROID_SCALER_AVAILABLE_STALL_DURATIONS, maxResolution);

    const int32_t depthSizesTag =
            SessionConfigurationUtils::getAppropriateModeTag(
                    ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS, maxResolution);
    const int32_t depthStallDurationsTag =
            SessionConfigurationUtils::getAppropriateModeTag(
                    ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS, maxResolution);
    const int32_t depthMinFrameDurationsTag =
            SessionConfigurationUtils::getAppropriateModeTag(
                    ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS, maxResolution);

    const int32_t dynamicDepthSizesTag =
            SessionConfigurationUtils::getAppropriateModeTag(
                    ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS, maxResolution);
    const int32_t dynamicDepthStallDurationsTag =
            SessionConfigurationUtils::getAppropriateModeTag(
                    ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS, maxResolution);
    const int32_t dynamicDepthMinFrameDurationsTag =
            SessionConfigurationUtils::getAppropriateModeTag(
                 ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS, maxResolution);

    auto& c = mCameraCharacteristics;
    std::vector<std::tuple<size_t, size_t>> supportedBlobSizes, supportedDepthSizes,
            supportedDynamicDepthSizes, internalDepthSizes;
    auto chTags = c.find(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
    if (chTags.count == 0) {
        ALOGE("%s: Supported camera characteristics is empty!", __FUNCTION__);
        return BAD_VALUE;
    }

    bool isDepthExclusivePresent = std::find(chTags.data.i32, chTags.data.i32 + chTags.count,
            depthExclTag) != (chTags.data.i32 + chTags.count);
    bool isDepthSizePresent = std::find(chTags.data.i32, chTags.data.i32 + chTags.count,
            depthSizesTag) != (chTags.data.i32 + chTags.count);
    if (!(isDepthExclusivePresent && isDepthSizePresent)) {
        // No depth support, nothing more to do.
        return OK;
    }

    auto depthExclusiveEntry = c.find(depthExclTag);
    if (depthExclusiveEntry.count > 0) {
        if (depthExclusiveEntry.data.u8[0] != ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE) {
            // Depth support is exclusive, nothing more to do.
            return OK;
        }
    } else {
        ALOGE("%s: Advertised depth exclusive tag but value is not present!", __FUNCTION__);
        return BAD_VALUE;
    }

    getSupportedSizes(c, scalerSizesTag, HAL_PIXEL_FORMAT_BLOB,
            &supportedBlobSizes);
    getSupportedSizes(c, depthSizesTag, HAL_PIXEL_FORMAT_Y16, &supportedDepthSizes);
    if (supportedBlobSizes.empty() || supportedDepthSizes.empty()) {
        // Nothing to do in this case.
        return OK;
    }

    getSupportedDynamicDepthSizes(supportedBlobSizes, supportedDepthSizes,
            &supportedDynamicDepthSizes, &internalDepthSizes);
    if (supportedDynamicDepthSizes.empty()) {
        // Nothing more to do.
        return OK;
    }

    std::vector<int32_t> dynamicDepthEntries;
    for (const auto& it : supportedDynamicDepthSizes) {
        int32_t entry[4] = {HAL_PIXEL_FORMAT_BLOB, static_cast<int32_t> (std::get<0>(it)),
                static_cast<int32_t> (std::get<1>(it)),
                ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT };
        dynamicDepthEntries.insert(dynamicDepthEntries.end(), entry, entry + 4);
    }

    std::vector<int64_t> depthMinDurations, depthStallDurations;
    std::vector<int64_t> blobMinDurations, blobStallDurations;
    std::vector<int64_t> dynamicDepthMinDurations, dynamicDepthStallDurations;

    getSupportedDurations(c, depthMinFrameDurationsTag, HAL_PIXEL_FORMAT_Y16, internalDepthSizes,
                          &depthMinDurations);
    getSupportedDurations(c, scalerMinFrameDurationsTag, HAL_PIXEL_FORMAT_BLOB,
                          supportedDynamicDepthSizes, &blobMinDurations);
    if (blobMinDurations.empty() || depthMinDurations.empty() ||
            (depthMinDurations.size() != blobMinDurations.size())) {
        ALOGE("%s: Unexpected number of available depth min durations! %zu vs. %zu",
                __FUNCTION__, depthMinDurations.size(), blobMinDurations.size());
        return BAD_VALUE;
    }

    getSupportedDurations(c, depthStallDurationsTag, HAL_PIXEL_FORMAT_Y16, internalDepthSizes,
            &depthStallDurations);
    getSupportedDurations(c, scalerStallDurationsTag, HAL_PIXEL_FORMAT_BLOB,
            supportedDynamicDepthSizes, &blobStallDurations);
    if (blobStallDurations.empty() || depthStallDurations.empty() ||
            (depthStallDurations.size() != blobStallDurations.size())) {
        ALOGE("%s: Unexpected number of available depth stall durations! %zu vs. %zu",
                __FUNCTION__, depthStallDurations.size(), blobStallDurations.size());
        return BAD_VALUE;
    }

    getSupportedDynamicDepthDurations(depthMinDurations, blobMinDurations,
            &dynamicDepthMinDurations);
    getSupportedDynamicDepthDurations(depthStallDurations, blobStallDurations,
            &dynamicDepthStallDurations);
    if (dynamicDepthMinDurations.empty() || dynamicDepthStallDurations.empty() ||
            (dynamicDepthMinDurations.size() != dynamicDepthStallDurations.size())) {
        ALOGE("%s: Unexpected number of dynamic depth stall/min durations! %zu vs. %zu",
                __FUNCTION__, dynamicDepthMinDurations.size(), dynamicDepthStallDurations.size());
        return BAD_VALUE;
    }

    std::vector<int64_t> dynamicDepthMinDurationEntries;
    auto itDuration = dynamicDepthMinDurations.begin();
    auto itSize = supportedDynamicDepthSizes.begin();
    while (itDuration != dynamicDepthMinDurations.end()) {
        int64_t entry[4] = {HAL_PIXEL_FORMAT_BLOB, static_cast<int32_t> (std::get<0>(*itSize)),
                static_cast<int32_t> (std::get<1>(*itSize)), *itDuration};
        dynamicDepthMinDurationEntries.insert(dynamicDepthMinDurationEntries.end(), entry,
                entry + 4);
        itDuration++; itSize++;
    }

    std::vector<int64_t> dynamicDepthStallDurationEntries;
    itDuration = dynamicDepthStallDurations.begin();
    itSize = supportedDynamicDepthSizes.begin();
    while (itDuration != dynamicDepthStallDurations.end()) {
        int64_t entry[4] = {HAL_PIXEL_FORMAT_BLOB, static_cast<int32_t> (std::get<0>(*itSize)),
                static_cast<int32_t> (std::get<1>(*itSize)), *itDuration};
        dynamicDepthStallDurationEntries.insert(dynamicDepthStallDurationEntries.end(), entry,
                entry + 4);
        itDuration++; itSize++;
    }

    std::vector<int32_t> supportedChTags;
    supportedChTags.reserve(chTags.count + 3);
    supportedChTags.insert(supportedChTags.end(), chTags.data.i32,
            chTags.data.i32 + chTags.count);
    supportedChTags.push_back(dynamicDepthSizesTag);
    supportedChTags.push_back(dynamicDepthMinFrameDurationsTag);
    supportedChTags.push_back(dynamicDepthStallDurationsTag);
    c.update(dynamicDepthSizesTag, dynamicDepthEntries.data(), dynamicDepthEntries.size());
    c.update(dynamicDepthMinFrameDurationsTag, dynamicDepthMinDurationEntries.data(),
            dynamicDepthMinDurationEntries.size());
    c.update(dynamicDepthStallDurationsTag, dynamicDepthStallDurationEntries.data(),
             dynamicDepthStallDurationEntries.size());
    c.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, supportedChTags.data(),
            supportedChTags.size());

    return OK;
}

1.3 deriveHeicTags

HEIC是Camera Framework调用Encoder的接口压成的,因此相关静态Metadata需要补上

ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS
ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS
ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS

status_t CameraProviderManager::ProviderInfo::DeviceInfo3::deriveHeicTags(bool maxResolution) {
    int32_t scalerStreamSizesTag =
            SessionConfigurationUtils::getAppropriateModeTag(
                    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, maxResolution);
    int32_t scalerMinFrameDurationsTag =
            SessionConfigurationUtils::getAppropriateModeTag(
                    ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, maxResolution);

    int32_t heicStreamSizesTag =
            SessionConfigurationUtils::getAppropriateModeTag(
                    ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, maxResolution);
    int32_t heicMinFrameDurationsTag =
            SessionConfigurationUtils::getAppropriateModeTag(
                    ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS, maxResolution);
    int32_t heicStallDurationsTag =
            SessionConfigurationUtils::getAppropriateModeTag(
                    ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS, maxResolution);

    auto& c = mCameraCharacteristics;

    camera_metadata_entry halHeicSupport = c.find(ANDROID_HEIC_INFO_SUPPORTED);
    if (halHeicSupport.count > 1) {
        ALOGE("%s: Invalid entry count %zu for ANDROID_HEIC_INFO_SUPPORTED",
                __FUNCTION__, halHeicSupport.count);
        return BAD_VALUE;
    } else if (halHeicSupport.count == 0 ||
            halHeicSupport.data.u8[0] == ANDROID_HEIC_INFO_SUPPORTED_FALSE) {
        // Camera HAL doesn't support mandatory stream combinations for HEIC.
        return OK;
    }

    camera_metadata_entry maxJpegAppsSegments =
            c.find(ANDROID_HEIC_INFO_MAX_JPEG_APP_SEGMENTS_COUNT);
    if (maxJpegAppsSegments.count != 1 || maxJpegAppsSegments.data.u8[0] == 0 ||
            maxJpegAppsSegments.data.u8[0] > 16) {
        ALOGE("%s: ANDROID_HEIC_INFO_MAX_JPEG_APP_SEGMENTS_COUNT must be within [1, 16]",
                __FUNCTION__);
        return BAD_VALUE;
    }

    // Populate HEIC output configurations and its related min frame duration
    // and stall duration.
    std::vector<int32_t> heicOutputs;
    std::vector<int64_t> heicDurations;
    std::vector<int64_t> heicStallDurations;

    camera_metadata_entry halStreamConfigs = c.find(scalerStreamSizesTag);
    camera_metadata_entry minFrameDurations = c.find(scalerMinFrameDurationsTag);

    // camera、encoder都支持就填充
    status_t res = fillHeicStreamCombinations(&heicOutputs, &heicDurations, &heicStallDurations,
            halStreamConfigs, minFrameDurations);
    if (res != OK) {
        ALOGE("%s: Failed to fill HEIC stream combinations: %s (%d)", __FUNCTION__,
                strerror(-res), res);
        return res;
    }

    c.update(heicStreamSizesTag, heicOutputs.data(), heicOutputs.size());
    c.update(heicMinFrameDurationsTag, heicDurations.data(), heicDurations.size());
    c.update(heicStallDurationsTag, heicStallDurations.data(), heicStallDurations.size());

    return OK;
}

1.4 addRotateCropTags

如果ANDROID_SCALER_AVAILABLE_ROTATE_AND_CROP_MODES没有填,则至少条一个值:ANDROID_SCALER_ROTATE_AND_CROP_NONE

status_t CameraProviderManager::ProviderInfo::DeviceInfo3::addRotateCropTags() {
    status_t res = OK;
    auto& c = mCameraCharacteristics;

    auto availableRotateCropEntry = c.find(ANDROID_SCALER_AVAILABLE_ROTATE_AND_CROP_MODES);
    if (availableRotateCropEntry.count == 0) {
        uint8_t defaultAvailableRotateCropEntry = ANDROID_SCALER_ROTATE_AND_CROP_NONE;
        res = c.update(ANDROID_SCALER_AVAILABLE_ROTATE_AND_CROP_MODES,
                &defaultAvailableRotateCropEntry, 1);
    }
    return res;
}

1.5 addPreCorrectionActiveArraySize

如果ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE没有填,则获取ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE的值填进去
overrideZoomRatioTags

如果HAL不支持ANDROID_CONTROL_ZOOM_RATIO_RANGE,则通过ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM来构造一个Zoom Ratio Range,更新到ANDROID_CONTROL_ZOOM_RATIO_RANGE。并增加Zoom Ratio相关的Static/Request/Result Keys

status_t CameraProviderManager::ProviderInfo::DeviceInfo3::addPreCorrectionActiveArraySize() {
    status_t res = OK;
    auto& c = mCameraCharacteristics;

    auto activeArraySize = c.find(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE);
    auto preCorrectionActiveArraySize = c.find(
            ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE);
    if (activeArraySize.count == 4 && preCorrectionActiveArraySize.count == 0) {
        std::vector<int32_t> preCorrectionArray(
                activeArraySize.data.i32, activeArraySize.data.i32+4);
        res = c.update(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE,
                preCorrectionArray.data(), 4);
        if (res != OK) {
            ALOGE("%s: Failed to add ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE: %s(%d)",
                    __FUNCTION__, strerror(-res), res);
            return res;
        }
    } else {
        return res;
    }

    auto charTags = c.find(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
    bool hasPreCorrectionActiveArraySize = std::find(charTags.data.i32,
            charTags.data.i32 + charTags.count,
            ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE) !=
            (charTags.data.i32 + charTags.count);
    if (!hasPreCorrectionActiveArraySize) {
        std::vector<int32_t> supportedCharTags;
        supportedCharTags.reserve(charTags.count + 1);
        supportedCharTags.insert(supportedCharTags.end(), charTags.data.i32,
                charTags.data.i32 + charTags.count);
        supportedCharTags.push_back(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE);

        res = c.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, supportedCharTags.data(),
                supportedCharTags.size());
        if (res != OK) {
            ALOGE("%s: Failed to update ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS: %s(%d)",
                    __FUNCTION__, strerror(-res), res);
            return res;
        }
    }

    return res;
}

1.6 fixupTorchStrengthTags

如果没有填ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL,则填成1
如果没有填ANDROID_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL,则填成1

status_t CameraProviderManager::ProviderInfo::DeviceInfo3::fixupTorchStrengthTags() {
    status_t res = OK;
    auto& c = mCameraCharacteristics;
    auto flashInfoStrengthDefaultLevelEntry = c.find(ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL);
    if (flashInfoStrengthDefaultLevelEntry.count == 0) {
        int32_t flashInfoStrengthDefaultLevel = 1;
        res = c.update(ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL,
                &flashInfoStrengthDefaultLevel, 1);
        if (res != OK) {
            ALOGE("%s: Failed to update ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL: %s (%d)",
                    __FUNCTION__,strerror(-res), res);
            return res;
        }
    }
    auto flashInfoStrengthMaximumLevelEntry = c.find(ANDROID_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL);
    if (flashInfoStrengthMaximumLevelEntry.count == 0) {
        int32_t flashInfoStrengthMaximumLevel = 1;
        res = c.update(ANDROID_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL,
                &flashInfoStrengthMaximumLevel, 1);
        if (res != OK) {
            ALOGE("%s: Failed to update ANDROID_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL: %s (%d)",
                    __FUNCTION__,strerror(-res), res);
            return res;
        }
    }
    return res;
}

1.7 queryPhysicalCameraIds

如果支持MULTI_CAMERA Capability,则从ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS获取到Physical Camera Ids放到mPhysicalIds

void CameraProviderManager::ProviderInfo::DeviceInfo3::queryPhysicalCameraIds() {
    camera_metadata_entry_t entryCap;

    entryCap = mCameraCharacteristics.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
    for (size_t i = 0; i < entryCap.count; ++i) {
        uint8_t capability = entryCap.data.u8[i];
        if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA) {
            mIsLogicalCamera = true;
            break;
        }
    }
    if (!mIsLogicalCamera) {
        return;
    }

    camera_metadata_entry_t entryIds = mCameraCharacteristics.find(
            ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS);
    const uint8_t* ids = entryIds.data.u8;
    size_t start = 0;
    for (size_t i = 0; i < entryIds.count; ++i) {
        if (ids[i] == '\0') {
            if (start != i) {
                mPhysicalIds.push_back((const char*)ids+start);
            }
            start = i+1;
        }
    }
}

修复/更新 PhysicalCamera Characteristics

getPhysicalCameraCharacteristics
overrideZoomRatioTags

    if (mIsLogicalCamera) {
        for (auto& id : mPhysicalIds) {
            if (std::find(mPublicCameraIds.begin(), mPublicCameraIds.end(), id) !=
                    mPublicCameraIds.end()) {
                continue;
            }

            hardware::hidl_string hidlId(id);
            ret = interface_3_5->getPhysicalCameraCharacteristics(hidlId,
                    [&status, &id, this](Status s, device::V3_2::CameraMetadata metadata) {
                status = s;
                if (s == Status::OK) {
                    camera_metadata_t *buffer =
                            reinterpret_cast<camera_metadata_t*>(metadata.data());
                    size_t expectedSize = metadata.size();
                    int res = validate_camera_metadata_structure(buffer, &expectedSize);
                    if (res == OK || res == CAMERA_METADATA_VALIDATION_SHIFTED) {
                        set_camera_metadata_vendor_id(buffer, mProviderTagid);
                        mPhysicalCameraCharacteristics[id] = buffer;
                    } else {
                        ALOGE("%s: Malformed camera metadata received from HAL", __FUNCTION__);
                        status = Status::INTERNAL_ERROR;
                    }
                }
            });

            if (!ret.isOk()) {
                ALOGE("%s: Transaction error getting physical camera %s characteristics for %s: %s",
                        __FUNCTION__, id.c_str(), id.c_str(), ret.description().c_str());
                return;
            }
            if (status != Status::OK) {
                ALOGE("%s: Unable to get physical camera %s characteristics for device %s: %s (%d)",
                        __FUNCTION__, id.c_str(), mId.c_str(),
                        statusToString(status), status);
                return;
            }

            res = camera3::ZoomRatioMapper::overrideZoomRatioTags(
                    &mPhysicalCameraCharacteristics[id], &mSupportNativeZoomRatio);
            if (OK != res) {
                ALOGE("%s: Unable to override zoomRatio related tags: %s (%d)",
                        __FUNCTION__, strerror(-res), res);
            }
        }
    }

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值