Camera应用部分:
Packages/apps/camera/src/com/android/camera/camera.java
Camera本地框架:
frameworks/base/core/java/android/hardware/camera.java
Camera服务部分:
frameworks/base/services/camera/libcameraservice,这部分需要camera的硬件抽象层来实现上述的本地框架,会被编译成库Libcameraservice.so
Camera硬件接口层
frameworks/base/include/camera/CameraHardwareInterface.h
Camera HAL部分:
Devices/Samsung/proprietary/libcamera/

篇幅主要以take picture为例
点击(此处)折叠或打开
- 首先从上层讲解下来
- Packages/apps/camera/src/com/android/camera/camera.java
- private void capture()
{
- mCaptureOnlyData =
null;
- // See android.hardware.Camera.Parameters.setRotation
for
- mParameters.setRotation(rotation);
- …….
- mCameraDevice.setParameters(mParameters);
- //app 应用注册takepicture回调函数
- mCameraDevice.takePicture(mShutterCallback,
mRawPictureCallback,
- mPostViewPictureCallback, new JpegPictureCallback(loc));
- mPreviewing =
false;
- }
- 下面是关于四个回调函数的实现
- private final
class ShutterCallback
- implements android.hardware.Camera.ShutterCallback
{
- public void onShutter()
{
- mShutterCallbackTime = System.currentTimeMillis();
- mShutterLag = mShutterCallbackTime
- mCaptureStartTime;
- Log.v(TAG,
"mShutterLag = " + mShutterLag
+ "ms");
- clearFocusState();
- }
- }
- private final
class RawPictureCallback implements PictureCallback
{
- public void onPictureTaken(
- byte [] rawData, android.hardware.Camera camera)
{
- mRawPictureCallbackTime = System.currentTimeMillis();
- Log.v(TAG,
"mShutterToRawCallbackTime = "
- +
(mRawPictureCallbackTime - mShutterCallbackTime)
+ "ms");
- }
- }
- private final class PostViewPictureCallback implements PictureCallback {
- public void onPictureTaken(
- byte [] data, android.hardware.Camera camera)
{
- mPostViewPictureCallbackTime = System.currentTimeMillis();
- Log.v(TAG,
"mShutterToPostViewCallbackTime = "
- +
(mPostViewPictureCallbackTime - mShutterCallbackTime)
- +
"ms");
- }
- }
- private final
class JpegPictureCallback implements PictureCallback
{
- Location mLocation;
- public void onPictureTaken(
- final byte [] jpegData, final android.hardware.Camera camera)
{
- if
(mPausing)
{
- return;
- }
- mJpegPictureCallbackTime = System.currentTimeMillis();
-
- …………………..
- //这个函数等下做详解
- mImageCapture.storeImage(jpegData, camera, mLocation);
- }
- }
- 2、上层注册好回调函数之后frameworks/base/core/java/android/hardware/camera.java注册回调函数
- public final void takePicture(ShutterCallback shutter, PictureCallback raw,
- PictureCallback postview, PictureCallback jpeg)
{
- mShutterCallback = shutter;
- mRawImageCallback = raw;
- mPostviewCallback = postview;
- mJpegCallback = jpeg;
- native_takePicture();
- }
- frameworks/base/services/camera/libcameraservice
- // take a picture
- image is returned
in callback
- status_t CameraService::Client::takePicture()
{
- LOG1("takePicture (pid %d)", getCallingPid());
- Mutex::Autolock lock(mLock);
- status_t result = checkPidAndHardware();
- if (result
!= NO_ERROR) return result;
- //用于判断指定的msg所对应的callback是否可以回调
- enableMsgType(CAMERA_MSG_SHUTTER
|
- CAMERA_MSG_POSTVIEW_FRAME |
- CAMERA_MSG_RAW_IMAGE |
- CAMERA_MSG_COMPRESSED_IMAGE);
- return mHardware->takePicture();//从这里就进入hal实现了,即上层提供操作设备驱动的接口
- }
- 3、HAL层的实现,接口被定义在cameraHardwareInterface.h文件中,所以hal层要做的就是实现其接口
- 其实写到这里,还是有很多不了解,如service中stub的概念,IPC的机制如何实现,功底还是太浅了,有空专门研究下这块
- 下面就仔细看下hal如何实现takepicture操作,同时向上层传递data
- status_t CameraHardwareSec::takePicture()
- {
- stopPreview();
- Mutex::Autolock lock(mStateLock);
- if (mCaptureInProgress)
{
- LOGE("%s : capture already in progress", __func__);
- return INVALID_OPERATION;
- }
- //启动PictureThread线程
- if (mPictureThread->run("CameraPictureThread",
PRIORITY_DEFAULT)
!= NO_ERROR)
{
- LOGE("%s : couldn't run picture thread", __func__);
- return INVALID_OPERATION;
- }
- mCaptureInProgress =
true;
- return NO_ERROR;
- }
- int CameraHardwareSec::pictureThread()
- {
- mRawHeap = new MemoryHeapBase(picture_size);
- if (mRawHeap->getHeapID()
< 0)
{
- LOGE("ERR(%s): Raw heap creation fail", __func__);
- mRawHeap.clear();
- return UNKNOWN_ERROR;
- }
- sp<MemoryHeapBase> JpegHeap;
- sp<MemoryHeapBase> PostviewHeap;
- sp<MemoryHeapBase> ThumbnailHeap;
- sp<MemoryBase> buffer
= new MemoryBase(mRawHeap, 0, picture_size
+ 8);
- //主要是将raw data照片原始数据通过encode成jpeg的格式
- if ((mMsgEnabled
& CAMERA_MSG_RAW_IMAGE)&&(mSecCamera->getJpegStreamPossible()
!=
true))
{
- LOG_TIME_DEFINE(1)
- LOG_TIME_START(1)
- JpegHeap = new MemoryHeapBase(jpeg_heap_size);
- PostviewHeap = new MemoryHeapBase(picture_size);
- ThumbnailHeap = new MemoryHeapBase(thumb_size);
- // Modified the shutter sound timing
for Jpeg capture
- mSecCamera->startSnapshot();
- if (mMsgEnabled
& CAMERA_MSG_SHUTTER)
{
- //回调shuttcallback的接口
- mNotifyCb(CAMERA_MSG_SHUTTER, 0,
0, mCallbackCookie);
- }
- if(mSecCamera->getJpegStreamPossible()
!=
true){
- if
(mSecCamera->getSnapshotAndJpeg((unsigned char*)PostviewHeap->base(),
- (unsigned char*)JpegHeap->base(),
&jpeg_size)
< 0)
{
- LOGE("ERR(%s):Fail on SecCamera->getSnapshotAndJpeg()", __func__);
- mStateLock.lock();
- mCaptureInProgress =
false;
- mStateLock.unlock();
- return UNKNOWN_ERROR;
- }
- }
- }
- int JpegImageSize
= 10000, JpegExifSize;
- bool isLSISensor =
false;
- JpegImageSize = static_cast<int>(jpeg_size);
- CropScaleYUY2((char
*)PostviewHeap->base(),
picture_width, picture_height, 0, 0,
- (char
*)ThumbnailHeap->base(), thumb_width,
thumb_height);
- memcpy(mRawHeap->base(),PostviewHeap->base(),
picture_size);
- }
- memcpy(static_cast<unsigned char*>(mPreviewHeap->base())
+ offset +
(previewWidth*previewHeight
* 3 / 2),
- overlay_header, mSizeOfADDRS);
- ret = mOverlay->queueBuffer((void*)(static_cast<unsigned
char *>(mPreviewHeap->base())
+ offset +
- (previewWidth
* previewHeight * 3
/ 2)));
- if ((mMsgEnabled
& CAMERA_MSG_COMPRESSED_IMAGE)
&&(mSecCamera->getJpegStreamPossible()
!=
true))
{
- sp<MemoryHeapBase> ExifHeap
= new MemoryHeapBase(EXIF_FILE_SIZE
+ picture_size);
- if (mMsgEnabled
& CAMERA_MSG_RAW_IMAGE)
- mDataCb(CAMERA_MSG_RAW_IMAGE, buffer, mCallbackCookie);
- JpegExifSize = mSecCamera->getExif((unsigned char
*)ExifHeap->base(),
- (unsigned char
*)ThumbnailHeap->base());
- LOGW("JpegExifSize=%d", JpegExifSize);
- unsigned char *ExifStart
= (unsigned char
*)JpegHeap->base()
+ 2;
- unsigned char *ImageStart
= ExifStart + JpegExifSize;
- memmove(ImageStart, ExifStart, JpegImageSize
- 2);
- memcpy(ExifStart, ExifHeap->base(),
JpegExifSize);
- sp<MemoryBase> mem
= new MemoryBase(JpegHeap, 0, JpegImageSize
+ JpegExifSize);
- //压缩格式照片消息
- mDataCb(CAMERA_MSG_COMPRESSED_IMAGE, mem,
mCallbackCookie);
- }
- }
- 现在我们来看下在hal定义的三个回调函数
- typedef void (*notify_callback)(int32_t msgType,
//用于处理一些通知的消息,如快门
- int32_t ext1,
- int32_t ext2,
- void* user);
- typedef void (*data_callback)(int32_t msgType,
// 返回通过camera得到的raw data
- const sp<IMemory>& dataPtr,
- void* user);
- // 返回通过camera得到的raw data并且携带时间戳
- typedef void (*data_callback_timestamp)(nsecs_t timestamp,
- int32_t msgType,
- const sp<IMemory>& dataPtr,
- void* user);
- 接口如下:
- /**
Set the notification
and data callbacks */
- virtual void setCallbacks(notify_callback notify_cb,
- data_callback data_cb,
- data_callback_timestamp data_cb_timestamp,
- void* user)
= 0;
- 现在又返回到cameraService是如何处理hal发过来的消息
- void CameraService::Client::notifyCallback(int32_t
msgType, int32_t ext1,
- int32_t ext2, void* user)
{
- LOG2("notifyCallback(%d)", msgType);
- sp<Client> client
= getClientFromCookie(user);
- if (client
== 0) return;
- if (!client->lockIfMessageWanted(msgType))
return;
- switch (msgType)
{//接收到的Hal层消息
- case CAMERA_MSG_SHUTTER:
- // ext1
is the dimension of the yuv picture.
- client->handleShutter((image_rect_type
*)ext1);
- break;
- default:
- client->handleGenericNotify(msgType,
ext1, ext2);
- break;
- }
- }
- void CameraService::Client::dataCallback(int32_t
msgType,
- const sp<IMemory>& dataPtr, void* user)
{
- LOG2("dataCallback(%d)", msgType);
- sp<Client> client
= getClientFromCookie(user);
- if (client
== 0) return;
- if (!client->lockIfMessageWanted(msgType))
return;
- if (dataPtr
== 0)
{
- LOGE("Null data returned in data callback");
- client->handleGenericNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
- return;
- }
- switch (msgType)
{
- case CAMERA_MSG_PREVIEW_FRAME:
- client->handlePreviewData(dataPtr);
- break;
- case CAMERA_MSG_POSTVIEW_FRAME:
- client->handlePostview(dataPtr);
- break;
- case CAMERA_MSG_RAW_IMAGE:
- client->handleRawPicture(dataPtr);
- break;
- case CAMERA_MSG_COMPRESSED_IMAGE:
- client->handleCompressedPicture(dataPtr);
- break;
- default:
- client->handleGenericData(msgType, dataPtr);
- break;
- }
- }
- // picture callback
- compressed picture ready
- void CameraService::Client::handleCompressedPicture(const
sp<IMemory>& mem)
{
- disableMsgType(CAMERA_MSG_COMPRESSED_IMAGE);
- sp<ICameraClient> c
= mCameraClient;
- mLock.unlock();
- if (c
!= 0)
{
- //回调函数
- c->dataCallback(CAMERA_MSG_COMPRESSED_IMAGE,
mem);
- }
- }
- 回到frameworks/base/core/java/android/hardware/camera.java
处理消息
- @Override
- public void handleMessage(Message msg)
{
- switch(msg.what)
{
- case CAMERA_MSG_SHUTTER:
- if
(mShutterCallback !=
null)
{
- mShutterCallback.onShutter();
- }
- return;
- ………………
- case CAMERA_MSG_COMPRESSED_IMAGE:
- if
(mJpegCallback !=
null)
{
- //现在总算回到app时候注册的几个回调函数了
- mJpegCallback.onPictureTaken((byte[])msg.obj,
mCamera);
- }
- return;
- default:
- Log.e(TAG,
"Unknown message type "
+ msg.what);
- return;
- }
- }
- //这里就是存储数据的地方了,这里采用file的形式
- private int storeImage(byte[] data,
Location loc)
{
- try {
- long dateTaken = System.currentTimeMillis();
- String title
= createName(dateTaken);
- String filename
= title + ".jpg";
- int[] degree
= new int[1];
- mLastContentUri = ImageManager.addImage(
- mContentResolver,
- title,
- dateTaken,
- loc,
// location from gps/network
- ImageManager.CAMERA_IMAGE_BUCKET_NAME, filename,
- null, data,
- degree);
- return degree[0];
- } catch
(Exception ex)
{
- Log.e(TAG,
"Exception while compressing image.", ex);
- return 0;
- }
- }
本文详细解析了Android手机相机拍照的整个流程,从应用程序层、框架层、服务层到硬件抽象层,涵盖点击拍照到图片存储的具体过程。重点介绍了不同层间的交互方式及数据传递机制。

被折叠的 条评论
为什么被折叠?



