android8,2024年是做Android开发人员的绝佳时机

#include

#include “Camera.h”

/******************************************************************************\

DECLARATIONS

Not used in any other project source files, header file is redundant

\******************************************************************************/

extern camera_module_t HAL_MODULE_INFO_SYM;

namespace android {

namespace HalModule {

/* Available cameras */

extern Camera *cams[];

static int getNumberOfCameras();

static int getCameraInfo(int cameraId, struct camera_info *info);

static int setCallbacks(const camera_module_callbacks_t *callbacks);

static void getVendorTagOps(vendor_tag_ops_t* ops);

static int openDevice(const hw_module_t *module, const char *name, hw_device_t **device);

static struct hw_module_methods_t moduleMethods = {

.open = openDevice

};

}; /* namespace HalModule */

}; /* namespace android */

/******************************************************************************\

DEFINITIONS

\******************************************************************************/

camera_module_t HAL_MODULE_INFO_SYM = {

.common = {

.tag                = HARDWARE_MODULE_TAG,

.module_api_version = CAMERA_MODULE_API_VERSION_2_3,

.hal_api_version    = HARDWARE_HAL_API_VERSION,

.id                 = “usbcamera”,

.name               = “V4l2 Camera”,

.author             = “Antmicro Ltd.”,

.methods            = &android::HalModule::moduleMethods,

.dso                = NULL,

.reserved           = {0}

},

.get_number_of_cameras  = android::HalModule::getNumberOfCameras,

.get_camera_info        = android::HalModule::getCameraInfo,

.set_callbacks          = android::HalModule::setCallbacks,

};

namespace android {

namespace HalModule {

static Camera mainCamera;

Camera *cams[] = {

&mainCamera

};

static int getNumberOfCameras() {

return NELEM(cams);

};

static int getCameraInfo(int cameraId, struct camera_info *info) {

cameraId = 0;//cameraId - 1;

if(cameraId < 0 || cameraId >= getNumberOfCameras()) {

return -ENODEV;

}

if(!cams[cameraId]->isValid()) {

return -ENODEV;

}

return cams[cameraId]->cameraInfo(info);

}

int setCallbacks(const camera_module_callbacks_t * /*callbacks*/) {

ALOGI(“%s: lihb setCallbacks”, __FUNCTION__);

/* TODO: Implement for hotplug support */

return OK;

}

static int openDevice(const hw_module_t *module, const char *name, hw_device_t **device) {

if (module != &HAL_MODULE_INFO_SYM.common) {

return -EINVAL;

}

if (name == NULL) {

return -EINVAL;

}

errno = 0;

int cameraId = (int)strtol(name, NULL, 10);

cameraId = 0;

if(errno || cameraId < 0 || cameraId >= getNumberOfCameras()) {

return -EINVAL;

}

if(!cams[cameraId]->isValid()) {

*device = NULL;

return -ENODEV;

}

return cams[cameraId]->openDevice(device);

}

}; /* namespace HalModule */

}; /* namespace android */

大家可能会发现,在getCameraInfo函数里,我将cameraId写死成了0,但是前面我们不是刚说过,我们的usbcamera是第二个摄像头,id要为1么?其实这里的id,和前面说的那个id,不是同一个意思。在这里之所以写成0,是因为我们这套usbcamera hal代码上面,只挂了一个摄像头,这个摄像头对应的代码放在Camera *cams[]数组里,这个数组里只放了一个对象,所以id自然就要为0了。然后我们在我们的halModule.cpp里操作getCameraInfo或openDevice时,就会通过这个数组调用到static Camera mainCamera;这里定义的这个Camera对象了。这个Camera,也是我们自己写的代码:**

**//Camera.h

#ifndef CAMERA_H

#define CAMERA_H

#include <utils/Errors.h>

#include <hardware/camera_common.h>

#include <V4l2Device.h>

#include <hardware/camera3.h>

#include <camera/CameraMetadata.h>

#include <utils/Mutex.h>

#include “Workers.h”

#include “ImageConverter.h”

#include “DbgUtils.h”

#include <cutils/ashmem.h>

#include <cutils/log.h>

#include <sys/mman.h>

#include “VGA_YUV422.h”

namespace android {

class Camera: public camera3_device {

public:

Camera();

virtual ~Camera();

bool isValid() { return mValid; }

virtual status_t cameraInfo(struct camera_info *info);

virtual int openDevice(hw_device_t **device);

virtual int closeDevice();

protected:

virtual camera_metadata_t * staticCharacteristics();

virtual int initialize(const camera3_callback_ops_t *callbackOps);

virtual int configureStreams(camera3_stream_configuration_t *streamList);

virtual const camera_metadata_t * constructDefaultRequestSettings(int type);

virtual int registerStreamBuffers(const camera3_stream_buffer_set_t *bufferSet);

virtual int processCaptureRequest(camera3_capture_request_t *request);

/* HELPERS/SUBPROCEDURES */

void notifyShutter(uint32_t frameNumber, uint64_t timestamp);

void processCaptureResult(uint32_t frameNumber, const camera_metadata_t *result, const Vector<camera3_stream_buffer> &buffers);

camera_metadata_t *mStaticCharacteristics;

camera_metadata_t *mDefaultRequestSettings[CAMERA3_TEMPLATE_COUNT];

CameraMetadata mLastRequestSettings;

V4l2Device *mDev;

bool mValid;

const camera3_callback_ops_t *mCallbackOps;

size_t mJpegBufferSize;

private:

ImageConverter mConverter;

Mutex mMutex;

uint8_t* mFrameBuffer;

uint8_t* rszbuffer;

int mProperty_enableTimesTamp = -1;

/* STATIC WRAPPERS */

static int sClose(hw_device_t *device);

static int sInitialize(const struct camera3_device *device, const camera3_callback_ops_t *callback_ops);

static int sConfigureStreams(const struct camera3_device *device, camera3_stream_configuration_t *stream_list);

static int sRegisterStreamBuffers(const struct camera3_device *device, const camera3_stream_buffer_set_t *buffer_set);

static const camera_metadata_t * sConstructDefaultRequestSettings(const struct camera3_device *device, int type);

static int sProcessCaptureRequest(const struct camera3_device *device, camera3_capture_request_t *request);

static void sGetMetadataVendorTagOps(const struct camera3_device *device, vendor_tag_query_ops_t* ops);

static void sDump(const struct camera3_device *device, int fd);

static int sFlush(const struct camera3_device *device);

static void _AddTimesTamp(uint8_t* buffer, int32_t width, int32_t height);

static camera3_device_ops_t sOps;

};

}; /* namespace android */

#endif // CAMERA_H**

**/*

* Copyright © 2015-2016 Antmicro

*

* Licensed under the Apache License, Version 2.0 (the “License”);

* you may not use this file except in compliance with the License.

* You may obtain a copy of the License at

*

*      http://www.apache.org/licenses/LICENSE-2.0

*

* Unless required by applicable law or agreed to in writing, software

* distributed under the License is distributed on an “AS IS” BASIS,

* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

* See the License for the specific language governing permissions and

* limitations under the License.

*/

#define LOG_TAG “usb-Camera”

#define LOG_NDEBUG 0

#include <hardware/camera3.h>

#include <camera/CameraMetadata.h>

#include <utils/misc.h>

#include <utils/Log.h>

#include <hardware/gralloc.h>

#include <ui/Rect.h>

#include <ui/GraphicBufferMapper.h>

#include <ui/Fence.h>

#include <assert.h>

#include “DbgUtils.h”

#include “Camera.h”

#include “ImageConverter.h”

#include “libyuv.h”

//#include <sprd_exvideo.h>

#include <cutils/properties.h>

#include

#include

namespace android {

/**

* \class Camera

*

* Android’s Camera 3 device implementation.

*

* Declaration of camera capabilities, frame request handling, etc. This code

* is what Android framework talks to.

*/

Camera::Camera()
mStaticCharacteristics(NULL)

, mCallbackOps(NULL)

, mJpegBufferSize(0) {

ALOGI(“Camera() start”);

DBGUTILS_AUTOLOGCALL(__func__);

for(size_t i = 0; i < NELEM(mDefaultRequestSettings); i++) {

mDefaultRequestSettings[i] = NULL;

}

common.tag      = HARDWARE_DEVICE_TAG;

common.version  = CAMERA_DEVICE_API_VERSION_3_2;//CAMERA_DEVICE_API_VERSION_3_0;

common.module   = &HAL_MODULE_INFO_SYM.common;

common.close    = Camera::sClose;

ops             = &sOps;

priv            = NULL;

mValid = true;

mFrameBuffer = new uint8_t[640*480*4];

rszbuffer = new uint8_t[640*480*4];

mDev = new V4l2Device();

if(!mDev) {

mValid = false;

}

}

Camera::~Camera() {

DBGUTILS_AUTOLOGCALL(__func__);

gWorkers.stop();

mDev->disconnect();

delete[] mFrameBuffer;

delete[] rszbuffer;

delete mDev;

}

status_t Camera::cameraInfo(struct camera_info *info) {

DBGUTILS_AUTOLOGCALL(__func__);

ALOGI(“Camera::cameraInfo entry”);

ALOGE(“Camera::cameraInfo entry”);

Mutex::Autolock lock(mMutex);

info->facing = CAMERA_FACING_FRONT;//BACK;//FRONT;

info->orientation = 0;

info->device_version = CAMERA_DEVICE_API_VERSION_3_2;//CAMERA_DEVICE_API_VERSION_3_0;//CAMERA_DEVICE_API_VERSION_3_4;

info->static_camera_characteristics = staticCharacteristics();

return NO_ERROR;

}

int Camera::openDevice(hw_device_t **device) {

ALOGI(“%s”,__FUNCTION__);

DBGUTILS_AUTOLOGCALL(__func__);

Mutex::Autolock lock(mMutex);

char enableTimesTamp[PROPERTY_VALUE_MAX];

char enableAVI[PROPERTY_VALUE_MAX];

mDev->connect();

*device = &common;

gWorkers.start();

return NO_ERROR;

}

int Camera::closeDevice() {

ALOGI(“%s”,__FUNCTION__);

DBGUTILS_AUTOLOGCALL(__func__);

Mutex::Autolock lock(mMutex);

gWorkers.stop();

mDev->disconnect();

return NO_ERROR;

}

camera_metadata_t *Camera::staticCharacteristics() {

if(mStaticCharacteristics)

return mStaticCharacteristics;

CameraMetadata cm;

auto &resolutions = mDev->availableResolutions();

auto &previewResolutions = resolutions;

auto sensorRes = mDev->sensorResolution();

/***********************************\

|* START OF CAMERA CHARACTERISTICS *|

\***********************************/

/* fake, but valid aspect ratio */

const float sensorInfoPhysicalSize[] = {

5.0f,

5.0f * (float)sensorRes.height / (float)sensorRes.width

};

cm.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, sensorInfoPhysicalSize, NELEM(sensorInfoPhysicalSize));

/* fake */

static const float lensInfoAvailableFocalLengths[] = {3.30f};

cm.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, lensInfoAvailableFocalLengths, NELEM(lensInfoAvailableFocalLengths));

static const uint8_t lensFacing = ANDROID_LENS_FACING_FRONT;

cm.update(ANDROID_LENS_FACING, &lensFacing, 1);

const int32_t sensorInfoPixelArraySize[] = {

(int32_t)sensorRes.width,

(int32_t)sensorRes.height

};

cm.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, sensorInfoPixelArraySize, NELEM(sensorInfoPixelArraySize));

const int32_t sensorInfoActiveArraySize[] = {

0,                          0,

(int32_t)sensorRes.width,   (int32_t)sensorRes.height

};

cm.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, sensorInfoActiveArraySize, NELEM(sensorInfoActiveArraySize));

static const int32_t scalerAvailableFormats[] = {

HAL_PIXEL_FORMAT_RGBA_8888, //预览流

HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,//预览流

/* Non-preview one, must be last - see following code */

HAL_PIXEL_FORMAT_BLOB//拍照流

};

cm.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalerAvailableFormats, NELEM(scalerAvailableFormats));

/* Only for HAL_PIXEL_FORMAT_BLOB */

const size_t mainStreamConfigsCount = resolutions.size();

/* For all other supported pixel formats */

const size_t previewStreamConfigsCount = previewResolutions.size() * (NELEM(scalerAvailableFormats) - 1);

const size_t streamConfigsCount = mainStreamConfigsCount + previewStreamConfigsCount;

int32_t scalerAvailableStreamConfigurations[streamConfigsCount * 4];

int64_t scalerAvailableMinFrameDurations[streamConfigsCount * 4];

int32_t scalerAvailableProcessedSizes[previewResolutions.size() * 2];

int64_t scalerAvailableProcessedMinDurations[previewResolutions.size()];

int32_t scalerAvailableJpegSizes[resolutions.size() * 2];

int64_t scalerAvailableJpegMinDurations[resolutions.size()];

size_t i4 = 0;

size_t i2 = 0;

size_t i1 = 0;

/* Main stream configurations */

for(size_t resId = 0; resId < resolutions.size(); ++resId) {

scalerAvailableStreamConfigurations[i4 + 0] = HAL_PIXEL_FORMAT_BLOB;

scalerAvailableStreamConfigurations[i4 + 1] = (int32_t)resolutions[resId].width;

scalerAvailableStreamConfigurations[i4 + 2] = (int32_t)resolutions[resId].height;

scalerAvailableStreamConfigurations[i4 + 3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;

scalerAvailableMinFrameDurations[i4 + 0] = HAL_PIXEL_FORMAT_BLOB;

scalerAvailableMinFrameDurations[i4 + 1] = (int32_t)resolutions[resId].width;

scalerAvailableMinFrameDurations[i4 + 2] = (int32_t)resolutions[resId].height;

scalerAvailableMinFrameDurations[i4 + 3] = 1000000000 / 30; /* TODO: read from the device */

scalerAvailableJpegSizes[i2 + 0] = (int32_t)resolutions[resId].width;

scalerAvailableJpegSizes[i2 + 1] = (int32_t)resolutions[resId].height;

scalerAvailableJpegMinDurations[i1] = 1000000000 / 30; /* TODO: read from the device */

i4 += 4;

i2 += 2;

i1 += 1;

}

i2 = 0;

i1 = 0;

/* Preview stream configurations */

for(size_t resId = 0; resId < previewResolutions.size(); ++resId) {

for(size_t fmtId = 0; fmtId < NELEM(scalerAvailableFormats) - 1; ++fmtId) {

scalerAvailableStreamConfigurations[i4 + 0] = scalerAvailableFormats[fmtId];

scalerAvailableStreamConfigurations[i4 + 1] = (int32_t)previewResolutions[resId].width;

scalerAvailableStreamConfigurations[i4 + 2] = (int32_t)previewResolutions[resId].height;

scalerAvailableStreamConfigurations[i4 + 3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;

scalerAvailableMinFrameDurations[i4 + 0] = scalerAvailableFormats[fmtId];

scalerAvailableMinFrameDurations[i4 + 1] = (int32_t)previewResolutions[resId].width;

scalerAvailableMinFrameDurations[i4 + 2] = (int32_t)previewResolutions[resId].height;

scalerAvailableMinFrameDurations[i4 + 3] = 1000000000 / 10; /* TODO: read from the device */

i4 += 4;

}

scalerAvailableProcessedSizes[i2 + 0] = (int32_t)previewResolutions[resId].width;

scalerAvailableProcessedSizes[i2 + 1] = (int32_t)previewResolutions[resId].height;

scalerAvailableProcessedMinDurations[i1] = 1000000000 / 10; /* TODO: read from the device */

i2 += 2;

i1 += 1;

}

cm.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, scalerAvailableStreamConfigurations, (size_t)NELEM(scalerAvailableStreamConfigurations));

cm.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, scalerAvailableMinFrameDurations, (size_t)NELEM(scalerAvailableMinFrameDurations));

/* Probably fake */

cm.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS, scalerAvailableMinFrameDurations, (size_t)NELEM(scalerAvailableMinFrameDurations));

cm.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES, scalerAvailableJpegSizes, (size_t)NELEM(scalerAvailableJpegSizes));

cm.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS, scalerAvailableJpegMinDurations, (size_t)NELEM(scalerAvailableJpegMinDurations));

cm.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES, scalerAvailableProcessedSizes, (size_t)NELEM(scalerAvailableProcessedSizes));

cm.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS, scalerAvailableProcessedMinDurations, (size_t)NELEM(scalerAvailableProcessedMinDurations));

//添加capabilities集,否则api2的接口,在调用getStreamConfigurationMap去获取REQUEST_AVAILABLE_CAPABILITIES值时会失败。

Vector<uint8_t> available_capabilities;

available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);

available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);

available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);

cm.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,

available_capabilities.array(),

available_capabilities.size());

/* ~8.25 bit/px (https://en.wikipedia.org/wiki/JPEG#Sample_photographs) */

/* Use 9 bit/px, add buffer info struct size, round up to page size */

mJpegBufferSize = sensorRes.width * sensorRes.height * 9 + sizeof(camera3_jpeg_blob);

mJpegBufferSize = (mJpegBufferSize + PAGE_SIZE - 1u) & ~(PAGE_SIZE - 1u);

const int32_t jpegMaxSize = (int32_t)mJpegBufferSize;

cm.update(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1);

static const int32_t jpegAvailableThumbnailSizes[] = {

0, 0,

320, 240

};

cm.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, jpegAvailableThumbnailSizes, NELEM(jpegAvailableThumbnailSizes));

static const int32_t sensorOrientation = 90;

cm.update(ANDROID_SENSOR_ORIENTATION, &sensorOrientation, 1);

static const uint8_t flashInfoAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;

cm.update(ANDROID_FLASH_INFO_AVAILABLE, &flashInfoAvailable, 1);

static const float scalerAvailableMaxDigitalZoom = 1;

cm.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, &scalerAvailableMaxDigitalZoom, 1);

static const uint8_t statisticsFaceDetectModes[] = {

ANDROID_STATISTICS_FACE_DETECT_MODE_OFF

};

cm.update(ANDROID_STATISTICS_FACE_DETECT_MODE, statisticsFaceDetectModes, NELEM(statisticsFaceDetectModes));

static const int32_t statisticsInfoMaxFaceCount = 0;

cm.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, &statisticsInfoMaxFaceCount, 1);

static const uint8_t controlAvailableSceneModes[] = {

ANDROID_CONTROL_SCENE_MODE_DISABLED

};

cm.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES, controlAvailableSceneModes, NELEM(controlAvailableSceneModes));

static const uint8_t controlAvailableEffects[] = {

ANDROID_CONTROL_EFFECT_MODE_OFF

};

cm.update(ANDROID_CONTROL_AVAILABLE_EFFECTS, controlAvailableEffects, NELEM(controlAvailableEffects));

static const int32_t controlMaxRegions[] = {

0, /* AE */

0, /* AWB */

0  /* AF */

};

cm.update(ANDROID_CONTROL_MAX_REGIONS, controlMaxRegions, NELEM(controlMaxRegions));

static const uint8_t controlAeAvailableModes[] = {

ANDROID_CONTROL_AE_MODE_OFF

};

cm.update(ANDROID_CONTROL_AE_AVAILABLE_MODES, controlAeAvailableModes, NELEM(controlAeAvailableModes));

static const camera_metadata_rational controlAeCompensationStep = {1, 3};

cm.update(ANDROID_CONTROL_AE_COMPENSATION_STEP, &controlAeCompensationStep, 1);

int32_t controlAeCompensationRange[] = {-9, 9};

cm.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE, controlAeCompensationRange, NELEM(controlAeCompensationRange));

static const int32_t controlAeAvailableTargetFpsRanges[] = {

10, 20

};

cm.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, controlAeAvailableTargetFpsRanges, NELEM(controlAeAvailableTargetFpsRanges));

static const uint8_t controlAeAvailableAntibandingModes[] = {

ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF

};

cm.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, controlAeAvailableAntibandingModes, NELEM(controlAeAvailableAntibandingModes));

static const uint8_t controlAwbAvailableModes[] = {

ANDROID_CONTROL_AWB_MODE_AUTO,

ANDROID_CONTROL_AWB_MODE_OFF

};

cm.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES, controlAwbAvailableModes, NELEM(controlAwbAvailableModes));

static const uint8_t controlAfAvailableModes[] = {

ANDROID_CONTROL_AF_MODE_OFF

};

cm.update(ANDROID_CONTROL_AF_AVAILABLE_MODES, controlAfAvailableModes, NELEM(controlAfAvailableModes));

static const uint8_t controlAvailableVideoStabilizationModes[] = {

ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF

};

cm.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, controlAvailableVideoStabilizationModes, NELEM(controlAvailableVideoStabilizationModes));

const uint8_t infoSupportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;

cm.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL, &infoSupportedHardwareLevel, 1);

/***********************************\

|*  END OF CAMERA CHARACTERISTICS  *|

\***********************************/

mStaticCharacteristics = cm.release();

return mStaticCharacteristics;

}

int Camera::initialize(const camera3_callback_ops_t *callbackOps) {

DBGUTILS_AUTOLOGCALL(__func__);

Mutex::Autolock lock(mMutex);

mCallbackOps = callbackOps;

return NO_ERROR;

}

const camera_metadata_t * Camera::constructDefaultRequestSettings(int type) {

DBGUTILS_AUTOLOGCALL(__func__);

Mutex::Autolock lock(mMutex);

/* TODO: validate type */

if(mDefaultRequestSettings[type]) {

return mDefaultRequestSettings[type];

}

CameraMetadata cm;

static const int32_t requestId = 0;

cm.update(ANDROID_REQUEST_ID, &requestId, 1);

static const float lensFocusDistance = 0.0f;

cm.update(ANDROID_LENS_FOCUS_DISTANCE, &lensFocusDistance, 1);

auto sensorSize = mDev->sensorResolution();

const int32_t scalerCropRegion[] = {

0,                          0,

(int32_t)sensorSize.width,  (int32_t)sensorSize.height

};

cm.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, NELEM(scalerCropRegion));

static const int32_t jpegThumbnailSize[] = {

0, 0

};

cm.update(ANDROID_JPEG_THUMBNAIL_SIZE, jpegThumbnailSize, NELEM(jpegThumbnailSize));

static const uint8_t jpegThumbnailQuality = 50;

cm.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &jpegThumbnailQuality, 1);

static const double jpegGpsCoordinates[] = {

0, 0

};

cm.update(ANDROID_JPEG_GPS_COORDINATES, jpegGpsCoordinates, NELEM(jpegGpsCoordinates));

static const uint8_t jpegGpsProcessingMethod[32] = “None”;

cm.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, jpegGpsProcessingMethod, NELEM(jpegGpsProcessingMethod));

static const int64_t jpegGpsTimestamp = 0;

cm.update(ANDROID_JPEG_GPS_TIMESTAMP, &jpegGpsTimestamp, 1);

static const int32_t jpegOrientation = 0;

cm.update(ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1);

/** android.stats */

static const uint8_t statisticsFaceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;

cm.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &statisticsFaceDetectMode, 1);

static const uint8_t statisticsHistogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;

cm.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &statisticsHistogramMode, 1);

static const uint8_t statisticsSharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;

cm.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &statisticsSharpnessMapMode, 1);

uint8_t controlCaptureIntent = 0;

switch (type) {

case CAMERA3_TEMPLATE_PREVIEW:          controlCaptureIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;             break;

case CAMERA3_TEMPLATE_STILL_CAPTURE:    controlCaptureIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;       break;

case CAMERA3_TEMPLATE_VIDEO_RECORD:     controlCaptureIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;        break;

case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:   controlCaptureIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;      break;

case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG: controlCaptureIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;    break;

default:                                controlCaptureIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;              break;

}

cm.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlCaptureIntent, 1);

static const uint8_t controlMode = ANDROID_CONTROL_MODE_OFF;

cm.update(ANDROID_CONTROL_MODE, &controlMode, 1);

static const uint8_t controlEffectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;

cm.update(ANDROID_CONTROL_EFFECT_MODE, &controlEffectMode, 1);

static const uint8_t controlSceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;

cm.update(ANDROID_CONTROL_SCENE_MODE, &controlSceneMode, 1);

static const uint8_t controlAeMode = ANDROID_CONTROL_AE_MODE_OFF;

cm.update(ANDROID_CONTROL_AE_MODE, &controlAeMode, 1);

static const uint8_t controlAeLock = ANDROID_CONTROL_AE_LOCK_OFF;

cm.update(ANDROID_CONTROL_AE_LOCK, &controlAeLock, 1);

static const int32_t controlAeRegions[] = {

0,                          0,

(int32_t)sensorSize.width,  (int32_t)sensorSize.height,

1000

};

cm.update(ANDROID_CONTROL_AE_REGIONS, controlAeRegions, NELEM(controlAeRegions));

cm.update(ANDROID_CONTROL_AWB_REGIONS, controlAeRegions, NELEM(controlAeRegions));

cm.update(ANDROID_CONTROL_AF_REGIONS, controlAeRegions, NELEM(controlAeRegions));

static const int32_t controlAeExposureCompensation = 0;

cm.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &controlAeExposureCompensation, 1);

static const int32_t controlAeTargetFpsRange[] = {

10, 20

};

cm.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, controlAeTargetFpsRange, NELEM(controlAeTargetFpsRange));

static const uint8_t controlAeAntibandingMode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF;

cm.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &controlAeAntibandingMode, 1);

static const uint8_t controlAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;

cm.update(ANDROID_CONTROL_AWB_MODE, &controlAwbMode, 1);

static const uint8_t controlAwbLock = ANDROID_CONTROL_AWB_LOCK_OFF;

cm.update(ANDROID_CONTROL_AWB_LOCK, &controlAwbLock, 1);

uint8_t controlAfMode = ANDROID_CONTROL_AF_MODE_OFF;

cm.update(ANDROID_CONTROL_AF_MODE, &controlAfMode, 1);

static const uint8_t controlAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;

cm.update(ANDROID_CONTROL_AE_STATE, &controlAeState, 1);

static const uint8_t controlAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;

cm.update(ANDROID_CONTROL_AF_STATE, &controlAfState, 1);

static const uint8_t controlAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;

cm.update(ANDROID_CONTROL_AWB_STATE, &controlAwbState, 1);

static const uint8_t controlVideoStabilizationMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;

cm.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &controlVideoStabilizationMode, 1);

static const int32_t controlAePrecaptureId = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;

cm.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, &controlAePrecaptureId, 1);

static const int32_t controlAfTriggerId = 0;

cm.update(ANDROID_CONTROL_AF_TRIGGER_ID, &controlAfTriggerId, 1);

mDefaultRequestSettings[type] = cm.release();

return mDefaultRequestSettings[type];

}

int Camera::configureStreams(camera3_stream_configuration_t *streamList) {

DBGUTILS_AUTOLOGCALL(__func__);

Mutex::Autolock lock(mMutex);

ALOGI(“configureStreams”);

/* TODO: sanity checks */

ALOGI(“±------------------------------------------------------------------------------”);

ALOGI(“| STREAMS FROM FRAMEWORK”);

ALOGI(“±------------------------------------------------------------------------------”);

for(size_t i = 0; i < streamList->num_streams; ++i) {

camera3_stream_t *newStream = streamList->streams[i];

ALOGI(“| p=%p  fmt=0x%.2x  type=%u  usage=0x%.8x  size=%4ux%-4u  buf_no=%u”,

newStream,

newStream->format,

newStream->stream_type,

newStream->usage,

newStream->width,

newStream->height,

newStream->max_buffers);

}

ALOGI(“±------------------------------------------------------------------------------”);

/* TODO: do we need input stream? */

camera3_stream_t *inStream = NULL;

unsigned width = 0;

unsigned height = 0;

for(size_t i = 0; i < streamList->num_streams; ++i) {

camera3_stream_t *newStream = streamList->streams[i];

/* TODO: validate: null */

if(newStream->stream_type == CAMERA3_STREAM_INPUT || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {

if(inStream) {

ALOGI(“Only one input/bidirectional stream allowed (previous is %p, this %p)”, inStream, newStream);

return BAD_VALUE;

}

inStream = newStream;

}

/* TODO: validate format */

if(newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {

newStream->format = HAL_PIXEL_FORMAT_RGBA_8888;

}

/* TODO: support ZSL */

if(newStream->usage & GRALLOC_USAGE_HW_CAMERA_ZSL) {

ALOGI(“ZSL STREAM FOUND! It is not supported for now.”);

ALOGI("    Disable it by placing following line in /system/build.prop:");

ALOGI("    camera.disable_zsl_mode=1");

return BAD_VALUE;

}

switch(newStream->stream_type) {

case CAMERA3_STREAM_OUTPUT:         newStream->usage = GRALLOC_USAGE_SW_WRITE_OFTEN;                                break;

case CAMERA3_STREAM_INPUT:          newStream->usage = GRALLOC_USAGE_SW_READ_OFTEN;                                 break;

case CAMERA3_STREAM_BIDIRECTIONAL:  newStream->usage = GRALLOC_USAGE_SW_WRITE_OFTEN | GRALLOC_USAGE_SW_READ_OFTEN;  break;

}

newStream->max_buffers = 1; /* TODO: support larger queue */

if(newStream->width * newStream->height > width * height) {

width = newStream->width;

height = newStream->height;

}

/* TODO: store stream pointers somewhere and configure only new ones */

}

if(mDev->isNeedsetResolution(width, height))

{

if(mDev->isStreaming())

{

if (!mDev->setStreaming(false))

{

ALOGI(“Could not stop streaming”);

return NO_INIT;

}

}

if (!mDev->setResolution(width, height))

{

ALOGI(“Could not set resolution”);

return NO_INIT;

}

自我介绍一下,小编13年上海交大毕业,曾经在小公司待过,也去过华为、OPPO等大厂,18年进入阿里一直到现在。

深知大多数初中级安卓工程师,想要提升技能,往往是自己摸索成长,但自己不成体系的自学效果低效又漫长,而且极易碰到天花板技术停滞不前!

因此收集整理了一份《2024年最新Android移动开发全套学习资料》送给大家,初衷也很简单,就是希望能够帮助到想自学提升又不知道该从何学起的朋友,同时减轻大家的负担。
img
img
img
img

由于文件比较大,这里只是将部分目录截图出来,每个节点里面都包含大厂面经、学习笔记、源码讲义、实战项目、讲解视频
如果你觉得这些内容对你有帮助,可以添加下面V无偿领取!(备注Android)
img

结尾

好了,今天的分享就到这里,如果你对在面试中遇到的问题,或者刚毕业及工作几年迷茫不知道该如何准备面试并突破现状提升自己,对于自己的未来还不够了解不知道给如何规划,可以来看看同行们都是如何突破现状,怎么学习的,来吸收他们的面试以及工作经验完善自己的之后的面试计划及职业规划。

这里放上一部分我工作以来以及参与过的大大小小的面试收集总结出来的一套进阶学习的视频及面试专题资料包,在这里免费分享给大家,主要还是希望大家在如今大环境不好的情况下面试能够顺利一点,希望可以帮助到大家~

  • TODO: support larger queue */

if(newStream->width * newStream->height > width * height) {

width = newStream->width;

height = newStream->height;

}

/* TODO: store stream pointers somewhere and configure only new ones */

}

if(mDev->isNeedsetResolution(width, height))

{

if(mDev->isStreaming())

{

if (!mDev->setStreaming(false))

{

ALOGI(“Could not stop streaming”);

return NO_INIT;

}

}

if (!mDev->setResolution(width, height))

{

ALOGI(“Could not set resolution”);

return NO_INIT;

}

自我介绍一下,小编13年上海交大毕业,曾经在小公司待过,也去过华为、OPPO等大厂,18年进入阿里一直到现在。

深知大多数初中级安卓工程师,想要提升技能,往往是自己摸索成长,但自己不成体系的自学效果低效又漫长,而且极易碰到天花板技术停滞不前!

因此收集整理了一份《2024年最新Android移动开发全套学习资料》送给大家,初衷也很简单,就是希望能够帮助到想自学提升又不知道该从何学起的朋友,同时减轻大家的负担。
[外链图片转存中…(img-uFfCO0jv-1711324052363)]
[外链图片转存中…(img-oTljA0Zx-1711324052363)]
[外链图片转存中…(img-8wXOEuaD-1711324052364)]
[外链图片转存中…(img-HC1ptFl1-1711324052364)]

由于文件比较大,这里只是将部分目录截图出来,每个节点里面都包含大厂面经、学习笔记、源码讲义、实战项目、讲解视频
如果你觉得这些内容对你有帮助,可以添加下面V无偿领取!(备注Android)
[外链图片转存中…(img-ruRwcxg2-1711324052364)]

结尾

好了,今天的分享就到这里,如果你对在面试中遇到的问题,或者刚毕业及工作几年迷茫不知道该如何准备面试并突破现状提升自己,对于自己的未来还不够了解不知道给如何规划,可以来看看同行们都是如何突破现状,怎么学习的,来吸收他们的面试以及工作经验完善自己的之后的面试计划及职业规划。

这里放上一部分我工作以来以及参与过的大大小小的面试收集总结出来的一套进阶学习的视频及面试专题资料包,在这里免费分享给大家,主要还是希望大家在如今大环境不好的情况下面试能够顺利一点,希望可以帮助到大家~

[外链图片转存中…(img-WE6ocX0Z-1711324052365)]

  • 15
    点赞
  • 21
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值