AndroidCamera开发详解二 实现触摸对焦功能

上一篇文章中,我们实现了相机的基本功能。今天我们来实现触摸对焦功能。用户触摸屏幕,显示对焦框,然后自动对焦(其实并不是用户按哪里对焦哪里,这个官网上有例子,但简单起见,这里就使用自动对焦)。

我们继续使用昨天的例子。新建一个CameraFocusListener的借口。包含两个方法。代码如下。

public interface CameraFocusListener {
	public void onFocusBegin(float x,float y);
	public void onFocusEnd();
}

然后让CameraSurfaceView实现这个Listener

 

既然是对焦我们就需要一个图形指示器,这里我就直接在PS里画一个圆形好了。保存为

Focus_indicator.png放到drawable文件夹里面去。

 

给CameraSurfaceView增加成员变量。并增加set方法

private ImageView mIVIndicator;
	private CameraFragment mCameraFragment;

	public void setCameraFragment(CameraFragment cameraFragment) {
		this.mCameraFragment = cameraFragment;
	}

	public void setIVIndicator(ImageView IVIndicator) {
		this.mIVIndicator = IVIndicator;
	}
	
	@Override
	public void onFocusBegin(float x,float y) {
		// TODO Auto-generated method stub

	}

	@Override
	public void onFocusEnd() {
		// TODO Auto-generated method stub

	}

	public CameraSurfaceView(Context context) {
		super(context);

	}

修改CameraFragment增加一个focusListener同时修改onCreateView

private CameraFocusListener focusListener;

	@Override
	public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {

		preview = new CameraSurfaceView(getActivity());
		preview.getHolder().addCallback(this);
		focusListener=preview;
		
		ImageView imageView=new ImageView(getActivity());
		Bitmap bitmap=BitmapFactory.decodeResource(getResources(), R.drawable.focus_indicator);
		imageView.setImageBitmap(bitmap);
		imageView.setAlpha(0.0f);
		preview.setIVIndicator(imageView);
		preview.setCameraFragment(this);
		
		RelativeLayout layout = new RelativeLayout(getActivity());
		layout.addView(preview);
		layout.addView(imageView);

		return layout;

	}

CameraFragment暴露出一个startFocus方法让CameraSurfaceView调用

public void startFocus(float x,float y){
		focusListener.onFocusBegin(x,y);
		mCamera.autoFocus(this);
	}

此时让CameraFragment实现autoFocuscallback

public class CameraFragment extends Fragment implements SurfaceHolder.Callback, Camera.PictureCallback,AutoFocusCallback
…
…
@Override
	public void onAutoFocus(boolean success, Camera camera) {				focusListener.onFocusEnd();		
	}

最后来编写CameraSurfaceView实现的FocusListener的方法,同时实现onTouchListener

因为touch事件是出现在这个SurfaceView上面的。并且注册onTouchListener

这个类最后完整的代码如下

package com.example.cameratutorial;

import android.content.Context;

import android.util.Log;
import android.view.MotionEvent;
import android.view.SurfaceView;
import android.view.View;
import android.widget.ImageView;
import android.view.View.OnTouchListener;
/**
 * @author CTGU小龙同学 2014-6-21
 */
public class CameraSurfaceView extends SurfaceView implements CameraFocusListener,OnTouchListener {
	private static final String TAG = "CameraSurfaceView";
	// 用四比三的比例
	public static double RATIO = 3.0 / 4.0;
	private ImageView mIVIndicator;
	private CameraFragment mCameraFragment;

	public void setCameraFragment(CameraFragment cameraFragment) {
		this.mCameraFragment = cameraFragment;
	}

	public void setIVIndicator(ImageView IVIndicator) {
		this.mIVIndicator = IVIndicator;
		setOnTouchListener(this);
	}
	/**
	 * 对焦开始的时候把对焦指示器显示出来
	 */
	@Override
	public void onFocusBegin(float x,float y) {
		mIVIndicator.setX(x-mIVIndicator.getWidth()/2);
		mIVIndicator.setY(y-mIVIndicator.getHeight()/2);
		mIVIndicator.setAlpha(1.0f);		

	}
/**
 * 对焦结束,隐藏
 */
	@Override
	public void onFocusEnd() {
		mIVIndicator.setAlpha(0.0f);
	}
	
	@Override
	public boolean onTouch(View v, MotionEvent event) {
		switch (event.getAction()) {
		case MotionEvent.ACTION_DOWN:
			mCameraFragment.startFocus(event.getX(), event.getY());
			break;

		default:
			break;
		}
		return false;
	}

	public CameraSurfaceView(Context context) {
		super(context);

	}

	@Override
	protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {

		int height = MeasureSpec.getSize(heightMeasureSpec);
		int width = MeasureSpec.getSize(widthMeasureSpec);
		Log.d("Measured", "before width" + width + "height" + height);

		boolean isWidthLonger;
		int longSide;
		int shortSide;
		// 以短边为准确定一下长边
		if (width < height) {
			height = (int) (width / RATIO);
			isWidthLonger = false;

		} else {
			width = (int) (height / RATIO);
			isWidthLonger = true;
		}

		Log.d("Measured", "after width" + width + "height" + height);
		setMeasuredDimension(width, height);

	}

	
	

	

}


CameraFragment完整的代码如下

package com.example.cameratutorial;

import java.io.*;
import java.util.*;
import android.app.Activity;
import android.app.Fragment;
import android.graphics.*;

import android.graphics.Bitmap.CompressFormat;
import android.hardware.Camera;
import android.hardware.Camera.AutoFocusCallback;
import android.hardware.Camera.CameraInfo;
import android.hardware.Camera.Size;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import android.view.*;


import android.widget.ImageView;
import android.widget.RelativeLayout;
import android.widget.Toast;

/**
 * @author CTGU小龙同学 2014-6-21
 */
public class CameraFragment extends Fragment implements SurfaceHolder.Callback, Camera.PictureCallback,AutoFocusCallback {
	private Camera mCamera;
	// CameraPreview的holder
	private SurfaceHolder mSurfaceHolder;
	private CameraSurfaceView preview;
	private int mFrontCameraId = -1;
	private int mBackCameraId = -1;
	
	private CameraFocusListener focusListener;

	@Override
	public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {

		preview = new CameraSurfaceView(getActivity());
		preview.getHolder().addCallback(this);
		focusListener=preview;
		
		ImageView imageView=new ImageView(getActivity());
		Bitmap bitmap=BitmapFactory.decodeResource(getResources(), R.drawable.focus_indicator);
		imageView.setImageBitmap(bitmap);
		imageView.setAlpha(0.0f);
		preview.setIVIndicator(imageView);
		preview.setCameraFragment(this);
		
		RelativeLayout layout = new RelativeLayout(getActivity());
		layout.addView(preview);
		layout.addView(imageView);

		return layout;

	}
	
	
	public void startFocus(float x,float y){
		focusListener.onFocusBegin(x,y);
		mCamera.autoFocus(this);
	}
	
	
	@Override
	public void onAutoFocus(boolean success, Camera camera) {
		focusListener.onFocusEnd();
		
	}

	@Override
	public void onAttach(Activity activity) {
		super.onAttach(activity);
		findAvailableCameras();

	}

	@Override
	public void onResume() {

		super.onResume();
		Log.d("camera", "mFrontCameraId" + mFrontCameraId);
		Log.d("camera", "mbackCameraId" + mBackCameraId);
		if (mBackCameraId != -1) {

			mCamera = Camera.open(mBackCameraId);

		} else {
			Toast.makeText(getActivity(), "fialed to open camera", Toast.LENGTH_SHORT).show();
		}

	}

	@Override
	public void onPause() {
		super.onPause();

		mCamera.stopPreview();
		mCamera.release();
	}

	/**
	 * 获得可用的相机,并设置前后摄像机的ID
	 */
	private void findAvailableCameras() {

		Camera.CameraInfo info = new CameraInfo();
		int numCamera = Camera.getNumberOfCameras();
		for (int i = 0; i < numCamera; i++) {
			Camera.getCameraInfo(i, info);
			// 找到了前置摄像头
			if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
				mFrontCameraId = info.facing;
			}
			// 招到了后置摄像头
			if (info.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
				mBackCameraId = info.facing;
			}

		}

	}

	/**
	 * 当相机拍照时会回调该方法
	 */
	@Override
	public void onPictureTaken(byte[] data, Camera camera) {
		final Bitmap bitmap;

		final String path;
		try {
			// /storage/emulated/0/Pictures/XXX.jpg

			path = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES).getAbsolutePath() + "/" + new Date().toLocaleString() + ".jpg";
			Log.d("Path", path);
			bitmap = BitmapFactory.decodeByteArray(data, 0, data.length);
			camera.stopPreview();

			final int displayOrientation = getCorrectOrientation();
			new Thread(new Runnable() {

				@Override
				public void run() {
					FileOutputStream fos;
					Matrix matrix = new Matrix();
					matrix.postRotate(displayOrientation);
					Bitmap rotaBitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, false);

					try {
						fos = new FileOutputStream(path);
						rotaBitmap.compress(CompressFormat.JPEG, 100, fos);
						fos.close();
					} catch (Exception e) {
						// TODO Auto-generated catch block
						e.printStackTrace();
					}

				}
			}).start();

		} catch (Exception e) {

		}
		camera.startPreview();
	}

	/**
	 * 让预览跟照片符合正确的方向。<br/>
	 * 因为预览默认是横向的。如果是一个竖向的应用,就需要把预览转90度<br/>
	 * 比如横着时1280*960的尺寸时,1280是宽.<br/>
	 * 竖着的时候1280就是高了<br/>
	 * 这段代码来自官方API。意思就是让拍出照片的方向和预览方向正确的符合设备当前的方向(有可能是竖向的也可能使横向的)
	 * 
	 */
	private int getCorrectOrientation() {
		android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
		android.hardware.Camera.getCameraInfo(mBackCameraId, info);
		int rotation = getActivity().getWindowManager().getDefaultDisplay().getRotation();
		int degrees = 0;
		switch (rotation) {
		case Surface.ROTATION_0:
			degrees = 0;
			break;
		case Surface.ROTATION_90:
			degrees = 90;
			break;
		case Surface.ROTATION_180:
			degrees = 180;
			break;
		case Surface.ROTATION_270:
			degrees = 270;
			break;
		}

		int result;
		if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
			result = (info.orientation + degrees) % 360;
			result = (360 - result) % 360; // compensate the mirror
		} else { // back-facing
			result = (info.orientation - degrees + 360) % 360;
		}
		Log.d("orientationResult", result + "");
		return result;
	}

	public void takePicture() {
		mCamera.takePicture(null, null, this);
	}

	@Override
	public void surfaceCreated(SurfaceHolder holder) {
		mSurfaceHolder = holder;

		startPreView();

	}

	private void startPreView() {
		try {
			mCamera.setPreviewDisplay(mSurfaceHolder);
			setPreviewSize();
			setDisplayOrientation();
			mCamera.startPreview();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}

	}

	private void setDisplayOrientation() {
		int displayOrientation = getCorrectOrientation();

		mCamera.setDisplayOrientation(displayOrientation);
	}

	/**
	 * 我们用4比3的比例设置预览图片
	 */
	private void setPreviewSize() {
		Camera.Parameters params = mCamera.getParameters();
		List<Size> sizes = params.getSupportedPreviewSizes();
		for (Size size : sizes) {
			Log.d("previewSize", "width:" + size.width + " height " + size.height);
		}
		for (Size size : sizes) {
			if (size.width / 4 == size.height / 3) {
				params.setPreviewSize(size.width, size.height);
				Log.d("previewSize", "SET width:" + size.width + " height " + size.height);
				break;
			}
		}

		// params一定要记得写回Camera
		mCamera.setParameters(params);

	}

	private void setPictureSize() {
		Camera.Parameters params = mCamera.getParameters();
		List<Size> sizes = params.getSupportedPictureSizes();
		for (Size size : sizes) {
			Log.d("picSize", "width:" + size.width + " height " + size.height);
		}
		for (Size size : sizes) {
			if (size.width / 4 == size.height / 3) {
				params.setPictureSize(size.width, size.height);
				break;
			}
		}

	}

	@Override
	public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {

	}

	@Override
	public void surfaceDestroyed(SurfaceHolder holder) {
		mCamera.release();
	}

}

 

其余的都不需要修改。

完整的实例地址 http://download.csdn.net/detail/longyubo007/7543741


  • 2
    点赞
  • 6
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
Android Camera2 API中实现手动对焦需要以下步骤: 1. 获取CameraManager实例,使用该实例获取可用的相机设备列表。 2. 获取要使用的相机设备的CameraDevice实例。 3. 创建一个CaptureRequest.Builder对象,并将其与相机设备关联。 4. 设置手动对焦模式和对焦区域。 5. 构建CaptureRequest对象。 6. 启动相机预览。 7. 实现一个触摸事件监听器,在用户触摸屏幕时获取焦点坐标,并设置对焦区域。 8. 在触摸事件监听器中调用CaptureRequest.Builder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_START)触发对焦操作。 9. 实现一个CameraCaptureSession.CaptureCallback回调函数,在对焦完成后更新预览界面。 下面是一个简单的实现示例: ```java private void setupCamera() { // 获取相机管理器实例 CameraManager cameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE); try { // 获取可用的相机设备列表 String[] cameraIds = cameraManager.getCameraIdList(); for (String cameraId : cameraIds) { // 获取相机设备实例 CameraDevice cameraDevice = cameraManager.openCamera(cameraId, mStateCallback, mBackgroundHandler); // 创建一个CaptureRequest.Builder对象,并将其与相机设备关联 mPreviewRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); // 设置手动对焦模式和对焦区域 mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO); mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_REGIONS, new MeteringRectangle[]{new MeteringRectangle(focusRect, 1000)}); // 构建CaptureRequest对象 mPreviewRequest = mPreviewRequestBuilder.build(); // 启动相机预览 cameraDevice.createCaptureSession(Arrays.asList(mSurface), mSessionCallback, mBackgroundHandler); } } catch (CameraAccessException e) { e.printStackTrace(); } } private void startPreview() { try { // 设置自动对焦模式 mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO); // 启动预览 mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(), mCaptureCallback, mBackgroundHandler); } catch (CameraAccessException e) { e.printStackTrace(); } } private void setFocusArea(float x, float y) { // 计算对焦区域 Rect focusRect = calculateFocusRect(x, y); // 设置对焦区域 mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_REGIONS, new MeteringRectangle[]{new MeteringRectangle(focusRect, 1000)}); // 触发对焦操作 mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_START); try { // 更新预览界面 mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(), mCaptureCallback, mBackgroundHandler); } catch (CameraAccessException e) { e.printStackTrace(); } } private CameraCaptureSession.CaptureCallback mCaptureCallback = new CameraCaptureSession.CaptureCallback() { @Override public void onCaptureCompleted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) { super.onCaptureCompleted(session, request, result); // 对焦完成后更新预览界面 if (request.get(CaptureRequest.CONTROL_AF_TRIGGER) != null && request.get(CaptureRequest.CONTROL_AF_TRIGGER) == CameraMetadata.CONTROL_AF_TRIGGER_START) { mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, null); mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); try { mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(), null, mBackgroundHandler); } catch (CameraAccessException e) { e.printStackTrace(); } } } }; private CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() { @Override public void onOpened(@NonNull CameraDevice camera) { mCameraDevice = camera; startPreview(); } @Override public void onDisconnected(@NonNull CameraDevice camera) { mCameraDevice.close(); mCameraDevice = null; } @Override public void onError(@NonNull CameraDevice camera, int error) { mCameraDevice.close(); mCameraDevice = null; } }; private CameraCaptureSession.StateCallback mSessionCallback = new CameraCaptureSession.StateCallback() { @Override public void onConfigured(@NonNull CameraCaptureSession session) { mCaptureSession = session; try { mCaptureSession.setRepeatingRequest(mPreviewRequest, mCaptureCallback, mBackgroundHandler); } catch (CameraAccessException e) { e.printStackTrace(); } } @Override public void onConfigureFailed(@NonNull CameraCaptureSession session) { session.close(); mCaptureSession = null; } }; ``` 在以上代码中,calculateFocusRect()方法用于计算对焦区域,可以根据需要自行实现
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值