Android Camera getSupportedPreviewSizes失败问题

问题现象:某些相机无法获取支持的分辨率,只能获取到的一个默认的480p。
问题定位:在应用上面通常都是通过getParameters()然后相应的Parameters,再调用Parameters类里面的方法再获取的,所以首先需要跟进的getParameters()接口,如下:

/**
     * Returns the current settings for this Camera service.
     * If modifications are made to the returned Parameters, they must be passed
     * to {@link #setParameters(Camera.Parameters)} to take effect.
     *
     * @see #setParameters(Camera.Parameters)
     */
    public Parameters getParameters() {
        Parameters p = new Parameters();
        String s = native_getParameters();
        p.unflatten(s);
        return p;
    }

在这里实际上是直接调用了native_getParameters()这个方法,继续跟进:

private native final String native_getParameters();

到JNI为:

static jstring android_hardware_Camera_getParameters(JNIEnv *env, jobject thiz)
{
    ALOGV("getParameters");
    sp<Camera> camera = get_native_camera(env, thiz, NULL);
    if (camera == 0) return 0;

    String8 params8 = camera->getParameters();
    if (params8.isEmpty()) {
        jniThrowRuntimeException(env, "getParameters failed (empty parameters)");
        return 0;
    }
    return env->NewStringUTF(params8.string());
}

从jni再到下一层framework/av/camera/Camera.cpp:

String8 Camera::getParameters() const
{
    ALOGV("getParameters");
    String8 params;
    sp <ICamera> c = mCamera;
    if (c != 0) params = mCamera->getParameters();
    return params;
}

再到framework/av/camera/ICamera.cpp:

 // get preview/capture parameters - key/value pairs
    String8 getParameters() const
    {
        ALOGV("getParameters");
        Parcel data, reply;
        data.writeInterfaceToken(ICamera::getInterfaceDescriptor());
        remote()->transact(GET_PARAMETERS, data, &reply);
        return reply.readString8();
    }

同时在这这里需要关注:

status_t BnCamera::onTransact(
    uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
{
    switch(code) {
        case DISCONNECT: {
            ALOGV("DISCONNECT");
            CHECK_INTERFACE(ICamera, data, reply);
            disconnect();
            reply->writeNoException();
            return NO_ERROR;
        } break;
        case SET_PREVIEW_TARGET: {
            ALOGV("SET_PREVIEW_TARGET");
            CHECK_INTERFACE(ICamera, data, reply);
            sp<IGraphicBufferProducer> st =
                interface_cast<IGraphicBufferProducer>(data.readStrongBinder());
            reply->writeInt32(setPreviewTarget(st));
            return NO_ERROR;
        } break;
        case SET_PREVIEW_CALLBACK_FLAG: {
            ALOGV("SET_PREVIEW_CALLBACK_TYPE");
            CHECK_INTERFACE(ICamera, data, reply);
            int callback_flag = data.readInt32();
            setPreviewCallbackFlag(callback_flag);
            return NO_ERROR;
        } break;
        case SET_PREVIEW_CALLBACK_TARGET: {
            ALOGV("SET_PREVIEW_CALLBACK_TARGET");
            CHECK_INTERFACE(ICamera, data, reply);
            sp<IGraphicBufferProducer> cp =
                interface_cast<IGraphicBufferProducer>(data.readStrongBinder());
            reply->writeInt32(setPreviewCallbackTarget(cp));
            return NO_ERROR;
        }
        case START_PREVIEW: {
            ALOGV("START_PREVIEW");
            CHECK_INTERFACE(ICamera, data, reply);
            reply->writeInt32(startPreview());
            return NO_ERROR;
        } break;
        case START_RECORDING: {
            ALOGV("START_RECORDING");
            CHECK_INTERFACE(ICamera, data, reply);
            reply->writeInt32(startRecording());
            return NO_ERROR;
        } break;
        case STOP_PREVIEW: {
            ALOGV("STOP_PREVIEW");
            CHECK_INTERFACE(ICamera, data, reply);
            stopPreview();
            return NO_ERROR;
        } break;
        case STOP_RECORDING: {
            ALOGV("STOP_RECORDING");
            CHECK_INTERFACE(ICamera, data, reply);
            stopRecording();
            return NO_ERROR;
        } break;
        case RELEASE_RECORDING_FRAME: {
            ALOGV("RELEASE_RECORDING_FRAME");
            CHECK_INTERFACE(ICamera, data, reply);
            sp<IMemory> mem = interface_cast<IMemory>(data.readStrongBinder());
            releaseRecordingFrame(mem);
            return NO_ERROR;
        } break;
        case STORE_META_DATA_IN_BUFFERS: {
            ALOGV("STORE_META_DATA_IN_BUFFERS");
            CHECK_INTERFACE(ICamera, data, reply);
            bool enabled = data.readInt32();
            reply->writeInt32(storeMetaDataInBuffers(enabled));
            return NO_ERROR;
        } break;
        case PREVIEW_ENABLED: {
            ALOGV("PREVIEW_ENABLED");
            CHECK_INTERFACE(ICamera, data, reply);
            reply->writeInt32(previewEnabled());
            return NO_ERROR;
        } break;
        case RECORDING_ENABLED: {
            ALOGV("RECORDING_ENABLED");
            CHECK_INTERFACE(ICamera, data, reply);
            reply->writeInt32(recordingEnabled());
            return NO_ERROR;
        } break;
        case AUTO_FOCUS: {
            ALOGV("AUTO_FOCUS");
            CHECK_INTERFACE(ICamera, data, reply);
            reply->writeInt32(autoFocus());
            return NO_ERROR;
        } break;
        case CANCEL_AUTO_FOCUS: {
            ALOGV("CANCEL_AUTO_FOCUS");
            CHECK_INTERFACE(ICamera, data, reply);
            reply->writeInt32(cancelAutoFocus());
            return NO_ERROR;
        } break;
        case TAKE_PICTURE: {
            ALOGV("TAKE_PICTURE");
            CHECK_INTERFACE(ICamera, data, reply);
            int msgType = data.readInt32();
            reply->writeInt32(takePicture(msgType));
            return NO_ERROR;
        } break;
        case SET_PARAMETERS: {
            ALOGV("SET_PARAMETERS");
            CHECK_INTERFACE(ICamera, data, reply);
            String8 params(data.readString8());
            reply->writeInt32(setParameters(params));
            return NO_ERROR;
         } break;
        case GET_PARAMETERS: {
            ALOGV("GET_PARAMETERS");
            CHECK_INTERFACE(ICamera, data, reply);
             reply->writeString8(getParameters());
            return NO_ERROR;
         } break;
        case SEND_COMMAND: {
            ALOGV("SEND_COMMAND");
            CHECK_INTERFACE(ICamera, data, reply);
            int command = data.readInt32();
            int arg1 = data.readInt32();
            int arg2 = data.readInt32();
            reply->writeInt32(sendCommand(command, arg1, arg2));
            return NO_ERROR;
         } break;
        case CONNECT: {
            CHECK_INTERFACE(ICamera, data, reply);
            sp<ICameraClient> cameraClient = interface_cast<ICameraClient>(data.readStrongBinder());
            reply->writeInt32(connect(cameraClient));
            return NO_ERROR;
        } break;
        case LOCK: {
            CHECK_INTERFACE(ICamera, data, reply);
            reply->writeInt32(lock());
            return NO_ERROR;
        } break;
        case UNLOCK: {
            CHECK_INTERFACE(ICamera, data, reply);
            reply->writeInt32(unlock());
            return NO_ERROR;
        } break;
        default:
            return BBinder::onTransact(code, data, reply, flags);
    }
}

不难发现到了这里貌似需要到client端了,即frameworks/av/services/camera/libcameraservice/api1/CameraClient.cpp:

// get preview/capture parameters - key/value pairs
String8 CameraClient::getParameters() const {
    Mutex::Autolock lock(mLock);
    if (checkPidAndHardware() != NO_ERROR) return String8();

    String8 params(mHardware->getParameters().flatten());
    LOG1("getParameters (pid %d) (%s)", getCallingPid(), params.string());
    return params;
}

这里又需要确定mHardware指向哪里了,mHardware初始化代码如下:

status_t CameraClient::initialize(camera_module_t *module) {
    int callingPid = getCallingPid();
    status_t res;

    LOG1("CameraClient::initialize E (pid %d, id %d)", callingPid, mCameraId);

    // Verify ops permissions
    res = startCameraOps();
    if (res != OK) {
        return res;
    }

    char camera_device_name[10];
    snprintf(camera_device_name, sizeof(camera_device_name), "%d", mCameraId);

    mHardware = new CameraHardwareInterface(camera_device_name);
    res = mHardware->initialize(&module->common);
    if (res != OK) {
        ALOGE("%s: Camera %d: unable to initialize device: %s (%d)",
                __FUNCTION__, mCameraId, strerror(-res), res);
        mHardware.clear();
        return NO_INIT;
    }

    mHardware->setCallbacks(notifyCallback,
            dataCallback,
            dataCallbackTimestamp,
            (void *)mCameraId);

    // Enable zoom, error, focus, and metadata messages by default
    enableMsgType(CAMERA_MSG_ERROR | CAMERA_MSG_ZOOM | CAMERA_MSG_FOCUS |
                  CAMERA_MSG_PREVIEW_METADATA | CAMERA_MSG_FOCUS_MOVE);

    LOG1("CameraClient::initialize X (pid %d, id %d)", callingPid, mCameraId);
    return OK;
}

中,我们继续通过mHardware = new CameraHardwareInterface(camera_device_name)找到相应文件frameworks/av/services/camera/libcameraservice/device1/CameraHardwareInterface.h:

/** Return the camera parameters. */
    CameraParameters getParameters() const
    {
        ALOGV("%s(%s)", __FUNCTION__, mName.string());
        CameraParameters parms;
        if (mDevice->ops->get_parameters) {
            char *temp = mDevice->ops->get_parameters(mDevice);
            String8 str_parms(temp);
            if (mDevice->ops->put_parameters)
                mDevice->ops->put_parameters(mDevice, temp);
            else
                free(temp);
            parms.unflatten(str_parms);
        }
        return parms;
    }

这里指向了mDevice->ops->get_parameters()这里,如果有了解Camera启动流程的话,应该是知道这个接口是在厂家的CameraMoudlel里面的camera_device_open()中去指定了具体功能,例如hisilicon路径为:device/hisilicon/bigfish/hardware/camera/camera_hal/CameraModule.cpp,相应代码如下:

static int camera_device_open(const hw_module_t* module, const char* name, hw_device_t** device)
{
    CAMERA_HAL_LOGV("enter %s()", __FUNCTION__);

    int ret         = 0;
    int camera_id   = 0;

    camera_device_t* camera_device              = NULL;
    camera_device_ops_t* camera_ops             = NULL;
    android::CameraHal* camera_hal   = NULL;

    if(!name || !device)
    {
        ALOGE("invalid parameter[name=%p, device=%p]", name, device);
        return -EINVAL;
    }

    camera_id  = atoi(name);
    camera_hal = new android::CameraHal(camera_id);
    if(!camera_hal || !camera_hal->mInitOK)
    {
        ALOGE("fail to allocate memory for CameraHal or fail to init CameraHal");
        ret = -ENOMEM;
        goto EXIT;
        //return -EINVAL;
    }

    camera_device   = new camera_device_t;
    camera_ops      = new camera_device_ops_t;
    if(!camera_device || !camera_ops)
    {
        ALOGE("fail to allocate memory for camera_device_t or camera_device_ops_t");
        ret = -ENOMEM;
        goto EXIT;
    }

    memset(camera_device, 0x00, sizeof(*camera_device));
    memset(camera_ops, 0x00, sizeof(*camera_ops));

    camera_device->common.tag                 = HARDWARE_DEVICE_TAG;
    camera_device->common.version             = 0;
    camera_device->common.module              = const_cast<hw_module_t*>(module);
    camera_device->common.close               = camera_device_close;
    camera_device->ops                        = camera_ops;
    camera_device->priv                       = camera_hal;

    camera_ops->set_preview_window            = set_preview_window;
    camera_ops->set_callbacks                 = set_callbacks;
    camera_ops->auto_focus                    = auto_focus;
    camera_ops->enable_msg_type               = enable_msg_type;
    camera_ops->disable_msg_type              = disable_msg_type;
    camera_ops->msg_type_enabled              = msg_type_enabled;
    camera_ops->start_preview                 = start_preview;
    camera_ops->stop_preview                  = stop_preview;
    camera_ops->preview_enabled               = preview_enabled;
    camera_ops->store_meta_data_in_buffers    = store_meta_data_in_buffers;
    camera_ops->start_recording               = start_recording;
    camera_ops->stop_recording                = stop_recording;
    camera_ops->recording_enabled             = recording_enabled;
    camera_ops->release_recording_frame       = release_recording_frame;
    camera_ops->cancel_auto_focus             = cancel_auto_focus;
    camera_ops->take_picture                  = take_picture;
    camera_ops->cancel_picture                = cancel_picture;
    camera_ops->set_parameters                = set_parameters;
    camera_ops->get_parameters                = get_parameters;
    camera_ops->put_parameters                = put_parameters;
    camera_ops->send_command                  = send_command;
    camera_ops->release                       = release;
    camera_ops->dump                          = dump;

    *device                                   = &camera_device->common;

    return 0;

EXIT:
    if(camera_hal)
    {
        delete camera_hal;
        camera_hal = NULL;
    }

    if(camera_device)
    {
        delete camera_device;
        camera_device = NULL;
    }

    if(camera_ops)
    {
        delete camera_ops;
        camera_ops = NULL;
    }

    return -1;
}

其中camera_ops->get_parameters = get_parameters指向了文件中的get_parameters方法:

/*
 **************************************************************************
 * FunctionName: get_parameters;
 * Description : NA;
 * Input       : NA;
 * Output      : NA;
 * ReturnValue : NA;
 * Other       : NA;
 **************************************************************************
 */
static char* get_parameters(struct camera_device * dev)
{
    CAMERA_HAL_LOGV("enter %s()", __FUNCTION__);

    android::CameraHal* hal = TO_CAMERA_HAL_INTERFACE(dev);
    android::CameraParameters par;
    android::String8 str_params;
    char* param = NULL;

    par = hal->getParameters();
    str_params = par.flatten();
    if(str_params.length() > 0)
    {
        param = (char*)malloc(str_params.length() + 1);
        if(NULL == param)
        {
            ALOGE("fail to allocate memory for CameraHal or fail to init CameraHal");
            return param;
        }
    }
    sprintf(param, "%s", str_params.string());
    return param;

}

这里又指向了CameraHal,device/hisilicon/bigfish/hardware/camera/camera_hal/CameraHal.cpp:

/*
 **************************************************************************
 * FunctionName: CameraHal::getParameters;
 * Description : NA;
 * Input       : NA;
 * Output      : NA;
 * ReturnValue : NA;
 * Other       : NA;
 **************************************************************************
 */
CameraParameters CameraHal::getParameters() const
{
    CAMERA_HAL_LOGV("enter %s()", __FUNCTION__);

    Mutex::Autolock lock(mLock);

    return mExtendedEnv.mParameters;
}

到了这里后,只是直接返回相应的mExtendedEnv.mParameters,所以需要我们定位哪里进行赋值的,不难发现在同一文件下有一个参数初始化函数如下:

/*
 **************************************************************************
 * FunctionName: CameraHal::initDefaultParameters;
 * Description : NA;
 * Input       : NA;
 * Output      : NA;
 * ReturnValue : NA;
 * Other       : NA;
 **************************************************************************
 */
void CameraHal::initDefaultParameters()
{
    CAMERA_HAL_LOGV("enter %s()", __FUNCTION__);

    int ret = 0;
    CameraParameters p;

    mParameterManager->queryCap(p);

    ret = mParameterManager->setParam(p);
    if(ret < 0)
    {
        CAMERA_HAL_LOGE("fail to set parameters");
        return;
    }

    ret = mParameterManager->commitParam();
    if(ret < 0)
    {
        CAMERA_HAL_LOGE("fail to commit parameters");
    }

    mExtendedEnv.mParameters = p;
}

在参数初始化中主要需要看 mParameterManager->queryCap§,那么又需要跟进 mParameterManager->queryCap()了,在CameraHal的初始化中有 mParameterManager = new CapabilityManager(&mExtendedEnv),所以又需要找到CapabilityManager相关地方了,device/hisilicon/bigfish/hardware/camera/capability_manager/CapabilityManager.cpp:

/*
 **************************************************************************
 * FunctionName: CapabilityManager::queryCap;
 * Description : NA;
 * Input       : NA;
 * Output      : NA;
 * ReturnValue : NA;
 * Other       : NA;
 **************************************************************************
 */
int CapabilityManager::queryCap ( CameraParameters& p )
{
    CAMERA_HAL_LOGV("enter %s()", __FUNCTION__);

    uint32_t i = 0;
    for(i=0; i<mParametersObjs.size(); ++i)
    {
        mParametersObjs[i]->queryCapability(p);
    }
    return 0;
}

不难发现这里又指向了mParametersObjs[i]->queryCapability§的函数,具体阅读时发现mParametersObjs有很多,这里我们只需要根据自己需求来分析就可以了,在这里我们需要分析的CameraParameterPreview中的queryCapability,代码如下:

int CameraParameterPreview::queryCapability(CameraParameters& p)
{
    CAMERA_HAL_LOGV("enter %s()", __FUNCTION__);

    struct v4l2_frmsizeenum fsize;
    String8 strPreview("");
    char strTmp[64];
    memset(&fsize, 0x00, sizeof(fsize));
    fsize.index         = 0;
    fsize.pixel_format  = DEFAULT_CAMERA_PREVIEW_V4L2_FORMAT;
    fsize.type          = V4L2_FRMSIZE_TYPE_DISCRETE;

    int order[PREVIEW_SIZE_COUNT];
    int supportnum =0;
    bool haveone=false;

    while(0 == ioctl(mCameraFd, VIDIOC_ENUM_FRAMESIZES, &fsize))
    {
        fsize.index++;
        for(unsigned int i=0; i < PREVIEW_SIZE_COUNT; i++)
        {
            if( (fsize.discrete.width == previewSizes[i].width) &&
                (fsize.discrete.height == previewSizes[i].height) )
            {
                order[supportnum++] = i;
                haveone=true;
            }
        }
    }

    SortingArray(order, supportnum);

    for(int i=0; i < supportnum; i++)
    {
        snprintf(strTmp, sizeof(strTmp), "%dx%d", previewSizes[order[i]].width, previewSizes[order[i]].height);
        strPreview += strTmp;
        if(i < supportnum-1){
            strPreview += ",";
        }
    }

    CAMERA_PARAM_LOGI("support preview size = %s", strPreview.string());
    int maxWidth = 0, maxHeight = 0;
    getMaxSize(strPreview, &maxWidth, &maxHeight);

    if(haveone)
    {
        CAMERA_PARAM_LOGI("real support preview size = %s", strPreview.string());
        p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES, strPreview);

        CAMERA_PARAM_LOGI("support video size = %s", strPreview.string());
        p.set(CameraParameters::KEY_SUPPORTED_VIDEO_SIZES, strPreview);

        snprintf(strTmp, sizeof(strTmp), "%dx%d", maxWidth, maxHeight);
        String8 realMaxPreviewSize(strTmp);
        CAMERA_PARAM_LOGI("set preferred-preview-size-for-video = %s", realMaxPreviewSize.string());
        p.set(CameraParameters::KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO, realMaxPreviewSize);
    }

    struct v4l2_fmtdesc fmt;

    memset(&fmt, 0, sizeof(fmt));
    fmt.index = 0;
    fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    while (0 == ioctl(mCameraFd, VIDIOC_ENUM_FMT, &fmt))
    {
        fmt.index++;
        CAMERA_PARAM_LOGI("Camera support capture format: { pixelformat = '%c%c%c%c', description = '%s' }",
                fmt.pixelformat & 0xFF, (fmt.pixelformat >> 8) & 0xFF,
                (fmt.pixelformat >> 16) & 0xFF, (fmt.pixelformat >> 24) & 0xFF,
                fmt.description);
        if(strcmp((char *)fmt.description, "MJPEG")== 0 || strcmp((char *)fmt.description, "Motion-JPEG")== 0)
        {
            mPreviewFormat = V4L2_PIX_FMT_MJPEG;
        }
    }

    //here we must init the preview size
    p.setPreviewSize(maxWidth, maxHeight);

    String8 strPreviewFmt("");

    strPreviewFmt += CameraParameters::PIXEL_FORMAT_YUV420SP;
    strPreviewFmt += ",";
    strPreviewFmt += CameraParameters::PIXEL_FORMAT_YUV420P;

    p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS, strPreviewFmt);
    p.set(CameraParameters::KEY_PREVIEW_FORMAT, DEFAULT_CAMERA_PREVIEW_FORMAT);
    p.set(CameraParameters::KEY_VIDEO_FRAME_FORMAT, DEFAULT_CAMERA_VIDEO_FORMAT);

    return 0;
}

如果我们从打印看不难发现在这里打印出来的分辨率就只有默认的640*480,仔细阅读代码不难发现关键点在0 == ioctl(mCameraFd, VIDIOC_ENUM_FRAMESIZES, &fsize)这里,这里是直接和Camera进行了通信去获取支持的分辨率了,我们在盒子上面获取到的结果就是只有默认的,这个问题到了这里分析不动了,只好求助芯片厂家hisilicon了,得到的回复是Camera的像素格式不一样,并且提供了相关补丁,修改后的代码如下:

int CameraParameterPreview::queryCapability(CameraParameters& p)
{
    CAMERA_HAL_LOGV("enter %s()", __FUNCTION__);
	struct v4l2_fmtdesc fmt;
    memset(&fmt, 0, sizeof(fmt));
    fmt.index = 0;
    fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	
    struct v4l2_frmsizeenum fsize;
	String8 strPreviewYUV("");
    String8 strPreviewMJEPG("");
	
    char strTmp[64];
	
	bool haveone=false;
    int orderYUV[PREVIEW_SIZE_COUNT];
    int supportYUVnum =0;
    int orderMJPEG[PREVIEW_SIZE_COUNT];
    int supportMJPEGnum =0;
    int maxYUVWidth = 0, maxYUVHeight = 0;
    int maxMJPEGWidth = 0, maxMJPEGHeight = 0;
    while(0 == ioctl(mCameraFd, VIDIOC_ENUM_FMT, &fmt))
    {
        fmt.index++;
        CAMERA_PARAM_LOGI("CameraParameterPreview support capture format: { pixelformat = '%c%c%c%c', description = '%s' }",
            fmt.pixelformat & 0xFF, (fmt.pixelformat >> 8) & 0xFF,
            (fmt.pixelformat >> 16) & 0xFF, (fmt.pixelformat >> 24) & 0xFF,
            fmt.description);
        if(strcmp((char *)fmt.description, "MJPEG")== 0 || strcmp((char *)fmt.description, "Motion-JPEG")== 0)
        {
            mPreviewFormat = V4L2_PIX_FMT_MJPEG;
        }
        else if (strcmp((char *)fmt.description, "YUV 4:2:2 (YUYV)")== 0 || strcmp((char *)fmt.description, "YUV 4:2:0 (M420)")== 0)
        {
            mPreviewFormat = V4L2_PIX_FMT_YUYV;
        }
        else
        {
            mPreviewFormat = V4L2_PIX_FMT_MJPEG;
        }
        memset(strTmp, 0x00, sizeof(strTmp));

		memset(&fsize, 0x00, sizeof(fsize));
		fsize.index         = 0;
		fsize.pixel_format  = mPreviewFormat;
		fsize.type          = V4L2_FRMSIZE_TYPE_DISCRETE;

		if(mPreviewFormat == V4L2_PIX_FMT_YUYV)
		{
			while(0 == ioctl(mCameraFd, VIDIOC_ENUM_FRAMESIZES, &fsize))
			{
				fsize.index++;
				for(unsigned int i=0; i < PREVIEW_SIZE_COUNT; i++)
				{
					if( (fsize.discrete.width == previewSizes[i].width) &&
					(fsize.discrete.height == previewSizes[i].height) )
					{
						orderYUV[supportYUVnum++] = i;
						haveone=true;
					}
				}
			}
			SortingArray(orderYUV, supportYUVnum);
			for(int i=0; i < supportYUVnum; i++)
			{
				snprintf(strTmp, sizeof(strTmp), "%dx%d", previewSizes[orderYUV[i]].width, previewSizes[orderYUV[i]].height);
				strPreviewYUV += strTmp;
				if(i < supportYUVnum){
					strPreviewYUV += ",";
				}
			}
			CAMERA_PARAM_LOGI("support preview yuv size = %s", strPreviewYUV.string());
			getMaxSize(strPreviewYUV, &maxYUVWidth, &maxYUVHeight);
		}
		else
		{
			while(0 == ioctl(mCameraFd, VIDIOC_ENUM_FRAMESIZES, &fsize))
			{
				fsize.index++;
				for(unsigned int i=0; i < PREVIEW_SIZE_COUNT; i++)
				{
					if( (fsize.discrete.width == previewSizes[i].width) &&
						(fsize.discrete.height == previewSizes[i].height) )
					{
						orderMJPEG[supportMJPEGnum++] = i;
						haveone=true;
					}
				}
			}
			SortingArray(orderMJPEG, supportMJPEGnum);
			for(int i=0; i < supportMJPEGnum; i++)
			{			
				snprintf(strTmp, sizeof(strTmp), "%dx%d", previewSizes[orderMJPEG[i]].width, previewSizes[orderMJPEG[i]].height);
				strPreviewMJEPG += strTmp;
				if(i < supportMJPEGnum){
					strPreviewMJEPG += ",";
				}
			}
		
			CAMERA_PARAM_LOGI("support preview mjepg size = %s", strPreviewMJEPG.string());
			getMaxSize(strPreviewMJEPG, &maxMJPEGWidth, &maxMJPEGHeight);
		}
	}
	
    if(haveone)
    {
        if(maxYUVWidth * maxYUVHeight > maxMJPEGWidth * maxMJPEGHeight)
        {
            mPreviewFormat = V4L2_PIX_FMT_YUYV;
            CAMERA_PARAM_LOGI("real support preview yuv size = %s", strPreviewYUV.string());
            p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES, strPreviewYUV);

            CAMERA_PARAM_LOGI("support video yuv size = %s", strPreviewYUV.string());
            p.set(CameraParameters::KEY_SUPPORTED_VIDEO_SIZES, strPreviewYUV);

            snprintf(strTmp, sizeof(strTmp), "%dx%d", maxYUVWidth, maxYUVHeight);
            p.setPreviewSize(maxYUVWidth, maxYUVHeight);
		}
		else
		{
            mPreviewFormat = V4L2_PIX_FMT_MJPEG;
            CAMERA_PARAM_LOGI("real support preview mjepg size = %s", strPreviewMJEPG.string());
            p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES, strPreviewMJEPG);
            CAMERA_PARAM_LOGI("support video mjepg size = %s", strPreviewMJEPG.string());
            p.set(CameraParameters::KEY_SUPPORTED_VIDEO_SIZES, strPreviewMJEPG);
            snprintf(strTmp, sizeof(strTmp), "%dx%d", maxMJPEGWidth, maxMJPEGHeight);
            p.setPreviewSize(maxMJPEGWidth, maxMJPEGHeight);
         }
        String8 realMaxPreviewSize(strTmp);
        CAMERA_PARAM_LOGI("set preferred-preview-size-for-video = %s", realMaxPreviewSize.string());
        p.set(CameraParameters::KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO, realMaxPreviewSize);
    }

    //here we must init the preview size


    String8 strPreviewFmt("");

    strPreviewFmt += CameraParameters::PIXEL_FORMAT_YUV420SP;
    strPreviewFmt += ",";
    strPreviewFmt += CameraParameters::PIXEL_FORMAT_YUV420P;

    p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS, strPreviewFmt);
    p.set(CameraParameters::KEY_PREVIEW_FORMAT, DEFAULT_CAMERA_PREVIEW_FORMAT);
    p.set(CameraParameters::KEY_VIDEO_FRAME_FORMAT, DEFAULT_CAMERA_VIDEO_FORMAT);

    return 0;
}

不难发现先去获取了支持的格式,然后针对格式获取分辨率,到此获取支持分辨率问题得以解决。

  • 0
    点赞
  • 2
    收藏
    觉得还不错? 一键收藏
  • 3
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 3
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值