package com.android.example.cameraappxjava;
import android.Manifest;
import android.content.ContentResolver;
import android.content.ContentValues;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.graphics.ImageFormat;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureFailure;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.Image;
import android.media.ImageReader;
import android.net.Uri;
import android.opengl.GLSurfaceView;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.SystemClock;
import android.provider.MediaStore;
import android.util.Log;
import android.util.Size;
import android.view.Surface;
import android.widget.Button;
import android.widget.Toast;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.app.ActivityCompat;
import com.android.example.cameraappxjava.util.CameraGLRenderer;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
/**
* 修复后:Camera2 + GLSurfaceView 自定义YUV预览Demo
*/
public class MainActivity2 extends AppCompatActivity implements CameraGLRenderer.SurfaceSizeCallback {
private static final String TAG = "camera2api_fixed";
private static final int REQUEST_CAMERA_PERMISSION = 100;
private static final long MIN_CLICK_INTERVAL = 1000;
// GLSurfaceView 相关
private GLSurfaceView glSurfaceView;
private CameraGLRenderer cameraGLRenderer;
private int glSurfaceWidth = 0;
private int glSurfaceHeight = 0;
// Camera2 核心组件
private Button captureButton;
private CameraDevice cameraDevice;
private CameraCaptureSession cameraCaptureSession;
private CaptureRequest.Builder captureRequestBuilder;
private String cameraId;
private Handler backgroundHandler;
private HandlerThread backgroundThread;
private CameraManager cameraManager;
private volatile boolean isCapturing = false;
private long lastClickTime = 0;
// ImageReader:预览(YUV)+ 拍照(JPEG)
private ImageReader previewImageReader;
private ImageReader captureImageReader;
private Size previewSize; // 与GLSurfaceView匹配的预览尺寸
private Size captureSize; // 拍照尺寸
// 图片保存相关
private ContentResolver contentResolver;
private ContentValues mediaValues;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Log.d(TAG, "onCreate ——————————————————————");
// 1. 初始化视图组件
glSurfaceView = findViewById(R.id.glsurfaceView);
captureButton = findViewById(R.id.btnCapture);
contentResolver = getContentResolver();
mediaValues = new ContentValues();
mediaValues.put(MediaStore.Images.Media.MIME_TYPE, "image/jpeg");
mediaValues.put(MediaStore.Images.Media.RELATIVE_PATH, Environment.DIRECTORY_PICTURES);
// 2. 初始化GL渲染器(设置尺寸回调)
initGLRenderer();
// 3. 拍照按钮点击事件(防连点)
captureButton.setOnClickListener(v -> {
long currentTime = SystemClock.elapsedRealtime();
if (currentTime - lastClickTime > MIN_CLICK_INTERVAL) {
lastClickTime = currentTime;
takePicture();
} else {
Log.d(TAG, "点击过快,已忽略");
}
});
// 4. 初始化CameraManager(提前获取,避免重复创建)
cameraManager = (CameraManager) getSystemService(CAMERA_SERVICE);
}
/**
* 初始化GLSurfaceView和自定义渲染器(核心)
*/
private void initGLRenderer() {
// 设置OpenGL ES 2.0版本
glSurfaceView.setEGLContextClientVersion(2);
// 创建渲染器并设置尺寸回调(监听GLSurfaceView实际尺寸)
cameraGLRenderer = new CameraGLRenderer(this);
glSurfaceView.setRenderer(cameraGLRenderer);
// 按需渲染(有新帧才重绘,节省性能)
glSurfaceView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
}
/**
* 初始化相机核心配置(预览/拍照ImageReader)
* 必须在GLSurfaceView尺寸确定后调用(避免尺寸不匹配)
*/
private void initCamera() {
if (glSurfaceWidth == 0 || glSurfaceHeight == 0 || backgroundHandler == null) {
Log.w(TAG, "initCamera: 条件不满足(GL尺寸未确定/后台线程未启动)");
Log.w(TAG, "glSurfaceWidth:"+glSurfaceWidth+"glSurfaceHeight:"+glSurfaceHeight+"backgroundHandler:"+backgroundHandler);
return;
}
Log.d(TAG, "initCamera: 开始初始化,GL尺寸=" + glSurfaceWidth + "x" + glSurfaceHeight);
try {
// 1. 获取相机ID(默认后置相机,0为后置,1为前置)
String[] cameraIds = cameraManager.getCameraIdList();
if (cameraIds.length == 0) {
Log.e(TAG, "无可用相机设备");
return;
}
cameraId = cameraIds[0]; // 优先后置相机
// 2. 获取相机支持的配置(预览/拍照尺寸)
CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId);
StreamConfigurationMap configMap = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (configMap == null) {
Log.e(TAG, "StreamConfigurationMap为空,相机不支持该配置");
return;
}
// 3. 初始化预览ImageReader(YUV_420_888格式,与GLSurfaceView尺寸匹配)
Size[] yuvSizes = configMap.getOutputSizes(ImageFormat.YUV_420_888);
previewSize = chooseOptimalSize(yuvSizes, glSurfaceWidth, glSurfaceHeight);
if (previewSize == null) {
Log.e(TAG, "未找到合适的预览尺寸");
return;
}
// 关闭旧的ImageReader(避免内存泄漏)
if (previewImageReader != null) {
previewImageReader.close();
}
// 创建预览ImageReader(2个缓冲区,避免帧丢失)
previewImageReader = ImageReader.newInstance(
previewSize.getWidth(),
previewSize.getHeight(),
ImageFormat.YUV_420_888,
4
);
// 设置YUV帧回调(Camera后台线程执行,避免阻塞UI)
previewImageReader.setOnImageAvailableListener(reader -> {
Image image = reader.acquireLatestImage();
if (image == null) return;
try {
if (cameraGLRenderer != null) {
// 传递YUV数据给渲染器,并触发渲染
cameraGLRenderer.setYUVData(image);
glSurfaceView.requestRender();
}else {
image.close();
}
} catch (Exception e) {
Log.e(TAG, "预览帧处理失败: " + e.getMessage(), e);
}
}, backgroundHandler);
// 4. 初始化拍照ImageReader(JPEG格式,选择最大支持尺寸)
Size[] jpegSizes = configMap.getOutputSizes(ImageFormat.JPEG);
if (jpegSizes.length == 0) {
Log.e(TAG, "相机不支持JPEG拍照");
return;
}
// 选择最大的拍照尺寸
captureSize = Collections.max(Arrays.asList(jpegSizes), new CompareSizesByArea());
// 关闭旧的ImageReader
if (captureImageReader != null) {
captureImageReader.close();
}
// 创建拍照ImageReader
captureImageReader = ImageReader.newInstance(
captureSize.getWidth(),
captureSize.getHeight(),
ImageFormat.JPEG,
1 // 拍照一次一张,1个缓冲区足够
);
Log.d(TAG, "initCamera: 完成,预览尺寸=" + previewSize + ",拍照尺寸=" + captureSize);
} catch (CameraAccessException e) {
Log.e(TAG, "相机访问异常: " + e.getMessage(), e);
} catch (SecurityException e) {
Log.e(TAG, "相机权限异常: " + e.getMessage(), e);
}
}
/**
* 打开相机(需权限已授予)
*/
private void openCamera() {
if (cameraId == null || previewImageReader == null || backgroundHandler == null) {
Log.w(TAG, "openCamera: 条件不满足,延迟1000ms重试");
backgroundHandler.postDelayed(this::openCamera, 1000);
return;
}
Log.d(TAG, "openCamera: 尝试打开相机,ID=" + cameraId);
try {
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED) {
// 打开相机(传入状态回调和后台线程Handler)
cameraManager.openCamera(cameraId, cameraStateCallback, backgroundHandler);
} else {
Log.w(TAG, "相机权限未授予,无法打开");
}
} catch (CameraAccessException e) {
Log.e(TAG, "打开相机失败: " + e.getMessage(), e);
}
}
/**
* 创建相机预览会话(核心:绑定预览/拍照Surface)
*/
private void createCaptureSession() {
if (cameraDevice == null || previewImageReader == null || captureImageReader == null) {
Log.e(TAG, "createCaptureSession: 核心组件为空(相机/ImageReader)");
return;
}
try {
// 1. 获取预览和拍照的Surface
Surface previewSurface = previewImageReader.getSurface();
Surface captureSurface = captureImageReader.getSurface();
List<Surface> outputSurfaces = new ArrayList<>();
outputSurfaces.add(previewSurface); // 预览Surface(YUV输出)
outputSurfaces.add(captureSurface); // 拍照Surface(JPEG输出)
// 2. 创建CaptureSession(配置输出Surface)
cameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession session) {
Log.i(TAG, "CaptureSession配置成功");
cameraCaptureSession = session;
// 配置预览请求(持续输出YUV帧到预览Surface)
try {
captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
captureRequestBuilder.addTarget(previewSurface); // 预览目标:YUV ImageReader
// 开启自动对焦和自动曝光(预览必备)
captureRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
captureRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
// 下发持续预览请求
cameraCaptureSession.setRepeatingRequest(captureRequestBuilder.build(), null, backgroundHandler);
Log.i(TAG, "预览请求已下发,开始预览");
} catch (CameraAccessException e) {
Log.e(TAG, "配置预览请求失败: " + e.getMessage(), e);
}
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession session) {
Log.e(TAG, "CaptureSession配置失败");
runOnUiThread(() -> Toast.makeText(MainActivity2.this, "相机预览配置失败", Toast.LENGTH_SHORT).show());
}
}, backgroundHandler);
} catch (CameraAccessException e) {
Log.e(TAG, "创建CaptureSession异常: " + e.getMessage(), e);
}
}
/**
* 拍照逻辑(停止预览→下发拍照请求→恢复预览)
*/
private void takePicture() {
if (cameraDevice == null || cameraCaptureSession == null || captureImageReader == null) {
Log.w(TAG, "takePicture: 核心组件未就绪,无法拍照");
runOnUiThread(() -> Toast.makeText(this, "相机未就绪", Toast.LENGTH_SHORT).show());
return;
}
Log.i(TAG, "takePicture: 开始拍照流程");
isCapturing = true;
try {
// 1. 配置拍照请求(JPEG格式,输出到拍照ImageReader)
CaptureRequest.Builder captureBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(captureImageReader.getSurface());
// 配置拍照参数(自动对焦、曝光、旋转角度)
captureBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
// 修正照片旋转角度(匹配屏幕方向)
int rotation = getWindowManager().getDefaultDisplay().getRotation();
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, getJpegOrientation(rotation));
// 2. 设置拍照ImageReader回调(保存照片)
captureImageReader.setOnImageAvailableListener(reader -> {
try (Image image = reader.acquireLatestImage()) {
if (image == null) {
Log.w(TAG, "拍照Image为空,保存失败");
return;
}
// 提取JPEG数据(Image的planes[0]为JPEG数据)
Image.Plane[] planes = image.getPlanes();
ByteBuffer jpegBuffer = planes[0].getBuffer();
byte[] jpegData = new byte[jpegBuffer.remaining()];
jpegBuffer.get(jpegData);
// 保存照片到相册
savePhotoToGallery(jpegData);
} catch (Exception e) {
Log.e(TAG, "保存照片失败: " + e.getMessage(), e);
runOnUiThread(() -> Toast.makeText(MainActivity2.this, "照片保存失败", Toast.LENGTH_SHORT).show());
} finally {
isCapturing = false;
// 恢复预览(重新下发持续预览请求)
if (cameraCaptureSession != null && captureRequestBuilder != null) {
try {
cameraCaptureSession.setRepeatingRequest(captureRequestBuilder.build(), null, backgroundHandler);
Log.i(TAG, "拍照完成,已恢复预览");
} catch (CameraAccessException e) {
Log.e(TAG, "恢复预览失败: " + e.getMessage(), e);
}
}
}
}, backgroundHandler);
// 3. 停止预览→下发拍照请求
cameraCaptureSession.stopRepeating();
cameraCaptureSession.capture(captureBuilder.build(), new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureFailed(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull CaptureFailure failure) {
super.onCaptureFailed(session, request, failure);
Log.e(TAG, "拍照失败,原因: " + failure.getReason());
isCapturing = false;
runOnUiThread(() -> Toast.makeText(MainActivity2.this, "拍照失败", Toast.LENGTH_SHORT).show());
// 恢复预览
if (cameraCaptureSession != null && captureRequestBuilder != null) {
try {
cameraCaptureSession.setRepeatingRequest(captureRequestBuilder.build(), null, backgroundHandler);
} catch (CameraAccessException e) {
Log.e(TAG, "恢复预览失败: " + e.getMessage(), e);
}
}
}
}, backgroundHandler);
} catch (CameraAccessException e) {
Log.e(TAG, "拍照流程异常: " + e.getMessage(), e);
isCapturing = false;
}
}
/**
* 保存照片到系统相册
*/
private void savePhotoToGallery(byte[] jpegData) {
if (jpegData == null || jpegData.length == 0) {
Log.w(TAG, "JPEG数据为空,无法保存");
return;
}
try {
// 1. 生成唯一文件名(时间戳)
String fileName = "Camera2_" + System.currentTimeMillis() + ".jpg";
mediaValues.put(MediaStore.Images.Media.DISPLAY_NAME, fileName);
// 2. 插入到媒体库(获取Uri)
Uri imageUri = contentResolver.insert(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, mediaValues);
if (imageUri == null) {
Log.e(TAG, "插入媒体库失败,无法获取Uri");
return;
}
// 3. 写入文件(通过ContentResolver避免存储权限问题)
try (FileOutputStream outputStream = (FileOutputStream) contentResolver.openOutputStream(imageUri)) {
outputStream.write(jpegData);
outputStream.flush();
Log.i(TAG, "照片保存成功,路径: " + imageUri);
// 通知媒体库扫描(刷新相册)
sendBroadcast(new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE, imageUri));
// 显示成功提示
runOnUiThread(() -> Toast.makeText(this, "照片已保存到相册", Toast.LENGTH_SHORT).show());
}
} catch (IOException e) {
Log.e(TAG, "写入照片文件失败: " + e.getMessage(), e);
}
}
/**
* 计算JPEG照片的旋转角度(匹配屏幕方向)
*/
private int getJpegOrientation(int screenRotation) {
try {
CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId);
int sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
// 根据相机传感器方向和屏幕旋转计算最终角度
int orientation = (sensorOrientation + screenRotation * 90) % 360;
return orientation;
} catch (CameraAccessException e) {
Log.e(TAG, "获取传感器方向失败: " + e.getMessage(), e);
return 0;
}
}
/**
* 选择最优尺寸(匹配View尺寸比例,避免拉伸)
*/
private Size chooseOptimalSize(Size[] sizes, int viewWidth, int viewHeight) {
if (sizes == null || sizes.length == 0) return null;
List<Size> candidateSizes = new ArrayList<>();
float viewRatio = (float) viewWidth / viewHeight;
// 筛选:比例接近View(误差≤0.1),且尺寸不超过View
for (Size size : sizes) {
float sizeRatio = (float) size.getWidth() / size.getHeight();
if (Math.abs(sizeRatio - viewRatio) <= 0.1 && size.getWidth() <= viewWidth && size.getHeight() <= viewHeight) {
candidateSizes.add(size);
}
}
// 有符合条件的尺寸:选最大的(画质最好)
if (!candidateSizes.isEmpty()) {
return Collections.max(candidateSizes, new CompareSizesByArea());
}
// 无符合条件:选最大的尺寸(降级处理)
return Collections.max(Arrays.asList(sizes), new CompareSizesByArea());
}
/**
* 启动相机后台线程(处理相机回调和ImageReader回调)
*/
private void startBackgroundThread() {
if (backgroundThread == null) {
backgroundThread = new HandlerThread("Camera2_Background");
backgroundThread.start();
backgroundHandler = new Handler(backgroundThread.getLooper());
Log.d(TAG, "后台线程已启动");
}
}
/**
* 停止相机后台线程(避免内存泄漏)
*/
private void stopBackgroundThread() {
if (backgroundThread != null) {
backgroundThread.quitSafely();
try {
backgroundThread.join();
backgroundThread = null;
backgroundHandler = null;
Log.d(TAG, "后台线程已停止");
} catch (InterruptedException e) {
Log.e(TAG, "停止后台线程异常: " + e.getMessage(), e);
}
}
}
/**
* 释放相机资源(避免内存泄漏)
*/
private void releaseCameraResources() {
Log.d(TAG, "releaseCameraResources: 开始释放");
// 停止预览请求
if (cameraCaptureSession != null) {
try {
cameraCaptureSession.stopRepeating();
} catch (CameraAccessException e) {
Log.e(TAG, "停止预览失败: " + e.getMessage(), e);
}
cameraCaptureSession.close();
cameraCaptureSession = null;
}
// 关闭相机设备
if (cameraDevice != null) {
cameraDevice.close();
cameraDevice = null;
}
// 关闭ImageReader
if (previewImageReader != null) {
previewImageReader.close();
previewImageReader = null;
}
if (captureImageReader != null) {
captureImageReader.close();
captureImageReader = null;
}
Log.d(TAG, "releaseCameraResources: 完成释放");
}
// ---------------------- 生命周期方法 ----------------------
@Override
protected void onResume() {
super.onResume();
Log.d(TAG, "onResume ——————————————————————");
// 恢复GLSurfaceView(必须调用,否则OpenGL上下文丢失)
glSurfaceView.onResume();
// 启动后台线程
startBackgroundThread();
// 检查相机权限
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
Log.i(TAG, "请求相机权限");
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.CAMERA}, REQUEST_CAMERA_PERMISSION);
return;
}
// 初始化相机(GL尺寸确定后会自动调用openCamera)
initCamera();
}
@Override
protected void onPause() {
super.onPause();
Log.d(TAG, "onPause ——————————————————————");
// 暂停GLSurfaceView(保存OpenGL上下文)
glSurfaceView.onPause();
// 停止预览并释放相机资源
if (!isCapturing) {
releaseCameraResources();
} else {
Log.w(TAG, "拍照中,延迟1000ms释放资源");
backgroundHandler.postDelayed(this::releaseCameraResources, 1000);
}
// 停止后台线程
stopBackgroundThread();
}
@Override
protected void onDestroy() {
super.onDestroy();
Log.d(TAG, "onDestroy ——————————————————————");
// 释放渲染器资源
if (cameraGLRenderer != null) {
cameraGLRenderer.release();
}
// 置空引用(帮助GC)
glSurfaceView = null;
cameraGLRenderer = null;
captureButton = null;
cameraManager = null;
contentResolver = null;
mediaValues = null;
}
// ---------------------- 权限回调 ----------------------
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (requestCode == REQUEST_CAMERA_PERMISSION) {
if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
Log.i(TAG, "相机权限已授予");
// 初始化相机并打开
if (glSurfaceWidth == 0 || glSurfaceHeight == 0 || backgroundHandler == null) {
Log.w(TAG, "相机权限授权后 initCamera: 条件不满足(GL尺寸未确定/后台线程未启动)");
Log.w(TAG, "glSurfaceWidth:"+glSurfaceWidth+"glSurfaceHeight:"+glSurfaceHeight+"backgroundHandler:"+backgroundHandler);
}
initCamera();
} else {
Log.w(TAG, "相机权限被拒绝,无法预览");
runOnUiThread(() -> {
Toast.makeText(this, "需要相机权限才能使用", Toast.LENGTH_SHORT).show();
finish(); // 无权限则退出
});
}
}
}
// ---------------------- CameraDevice状态回调 ----------------------
private final CameraDevice.StateCallback cameraStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(@NonNull CameraDevice camera) {
Log.i(TAG, "相机已打开,ID=" + camera.getId());
cameraDevice = camera;
// 相机打开后,创建预览会话
createCaptureSession();
}
@Override
public void onDisconnected(@NonNull CameraDevice camera) {
Log.w(TAG, "相机已断开连接");
camera.close();
cameraDevice = null;
}
@Override
public void onError(@NonNull CameraDevice camera, int error) {
Log.e(TAG, "相机错误,代码=" + error);
camera.close();
cameraDevice = null;
runOnUiThread(() -> Toast.makeText(MainActivity2.this, "相机初始化错误", Toast.LENGTH_SHORT).show());
}
};
// ---------------------- GLSurface尺寸回调(来自CameraGLRenderer) ----------------------
@Override
public void onSurfaceSizeChanged(int width, int height) {
Log.d(TAG, "onSurfaceSizeChanged: GL尺寸=" + width + "x" + height);
// 更新GLSurface尺寸
glSurfaceWidth = width;
glSurfaceHeight = height;
// 重新初始化相机并打开
initCamera();
openCamera();
}
// ---------------------- 尺寸比较器 ----------------------
static class CompareSizesByArea implements Comparator<Size> {
@Override
public int compare(Size lhs, Size rhs) {
// 比较面积(避免溢出,用long)
return Long.signum((long) lhs.getWidth() * lhs.getHeight() - (long) rhs.getWidth() * rhs.getHeight());
}
}
}package com.android.example.cameraappxjava.util;
import android.graphics.ImageFormat;
import android.media.Image;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.util.Log;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
/**
* 修复后:YUV_420_888 自定义GL渲染器
*/
public class CameraGLRenderer implements GLSurfaceView.Renderer {
private static final String TAG = "CameraGLRenderer";
private static final int TEXTURE_COUNT = 3; // Y/U/V 三个纹理
// ---------------------- OpenGL ES 2.0 核心配置 ----------------------
// 顶点着色器(全屏显示)
private static final String VERTEX_SHADER =
"attribute vec4 vPosition;\n" +
"attribute vec2 vTexCoord;\n" +
"varying vec2 texCoord;\n" +
"void main() {\n" +
" gl_Position = vPosition;\n" +
" texCoord = vTexCoord;\n" +
"}";
// 片段着色器(YUV转RGB)
private static final String FRAGMENT_SHADER =
"precision mediump float;\n" +
"varying vec2 texCoord;\n" +
"uniform sampler2D yTex;\n" +
"uniform sampler2D uTex;\n" +
"uniform sampler2D vTex;\n" +
"void main() {\n" +
" // YUV转RGB(BT.601标准)\n" +
" float y = texture2D(yTex, texCoord).r;\n" +
" float u = texture2D(uTex, texCoord).r - 0.5;\n" +
" float v = texture2D(vTex, texCoord).r - 0.5;\n" +
" float r = y + 1.402 * v;\n" +
" float g = y - 0.34414 * u - 0.71414 * v;\n" +
" float b = y + 1.772 * u;\n" +
" // 颜色范围限制(0.0~1.0)\n" +
" r = clamp(r, 0.0, 1.0);\n" +
" g = clamp(g, 0.0, 1.0);\n" +
" b = clamp(b, 0.0, 1.0);\n" +
" gl_FragColor = vec4(r, g, b, 1.0);\n" +
"}";
// 全屏顶点坐标(左手坐标系:左上→左下→右上→右下)
private static final float[] VERTEX_COORDS = {
-1.0f, 1.0f, 0.0f, // 左上
-1.0f, -1.0f, 0.0f, // 左下
1.0f, 1.0f, 0.0f, // 右上
1.0f, -1.0f, 0.0f // 右下
};
// 纹理坐标(适配竖屏:解决画面颠倒,与顶点坐标对应)
private static final float[] TEX_COORDS = {
0.0f, 1.0f, // 左上(对应顶点左上)
1.0f, 1.0f, // 左下(对应顶点左下)
0.0f, 0.0f, // 右上(对应顶点右上)
1.0f, 0.0f // 右下(对应顶点右下)
};
// ---------------------- 动态变量 ----------------------
private final SurfaceSizeCallback sizeCallback; // GL尺寸回调
private int shaderProgram; // 着色器程序ID
private int[] textureIds = new int[TEXTURE_COUNT]; // Y/U/V纹理ID
private FloatBuffer vertexBuffer; // 顶点坐标缓冲区
private FloatBuffer texCoordBuffer; // 纹理坐标缓冲区
// YUV数据(线程安全管理)
private final Object yuvLock = new Object();
private Image pendingImage; // 待处理的YUV图像
private byte[] yData, uData, vData; // 提取后的Y/U/V数据
private int yuvWidth, yuvHeight; // YUV图像尺寸
// 纹理尺寸记录(避免重复创建纹理)
private int yTexWidth = 0, yTexHeight = 0;
private int uvTexWidth = 0, uvTexHeight = 0;
private ByteBuffer yBuffer;
private ByteBuffer uBuffer;
private ByteBuffer vBuffer;
private boolean hasNewFrame = false; // 新帧标志
// ---------------------- 构造方法(传入尺寸回调) ----------------------
public CameraGLRenderer(SurfaceSizeCallback callback) {
this.sizeCallback = callback;
}
// ---------------------- 对外接口 ----------------------
/**
* 设置待处理的YUV图像(从Camera2 ImageReader回调调用)
*/
public void setYUVData(Image image) {
Log.d(TAG, "acquire image: " + image + " @ " + System.identityHashCode(image));
if (image == null || image.getFormat() != ImageFormat.YUV_420_888) {
Log.w(TAG, "无效Image:格式非YUV_420_888或为空");
if (image != null) image.close();
return;
}
synchronized (yuvLock) {
// 关闭未处理的旧图像(避免内存泄漏)
if (pendingImage != null) {
pendingImage.close(); // 关闭旧图像
Log.d(TAG, "关闭未处理的旧Image");
}
// 仅当无待处理帧时才更新
if (pendingImage == null) {
pendingImage = image;
hasNewFrame = true;
} else {
image.close(); // 直接丢弃过载帧
}
Log.e(TAG, "调用setYUVData,pendingImage:" + pendingImage);
}
}
/**
* 释放渲染器资源(Activity销毁时调用)
*/
public void release() {
synchronized (yuvLock) {
// 关闭待处理图像
if (pendingImage != null) {
pendingImage.close();
pendingImage = null;
}
// 清空YUV数据
yData = uData = vData = null;
yuvWidth = yuvHeight = 0;
}
// 释放OpenGL资源(必须在GL线程调用,此处通过GLSurfaceView队列)
GLES20.glDeleteTextures(TEXTURE_COUNT, textureIds, 0);
GLES20.glDeleteProgram(shaderProgram);
Log.d(TAG, "渲染器资源已释放");
}
// ---------------------- OpenGL生命周期回调 ----------------------
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
Log.d(TAG, "onSurfaceCreated:初始化OpenGL");
// 初始化OpenGL状态
GLES20.glDisable(GLES20.GL_BLEND); // 关闭混合(避免透明)
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f); // 背景黑色
// 初始化坐标缓冲区(native内存,避免GC)
vertexBuffer = createFloatBuffer(VERTEX_COORDS);
texCoordBuffer = createFloatBuffer(TEX_COORDS);
// 编译着色器程序
shaderProgram = compileShaderProgram(VERTEX_SHADER, FRAGMENT_SHADER);
if (shaderProgram == 0) {
Log.e(TAG, "着色器程序创建失败,预览不可用");
return;
}
// 创建Y/U/V三个纹理
GLES20.glGenTextures(TEXTURE_COUNT, textureIds, 0);
initTexture(textureIds[0]); // Y纹理
initTexture(textureIds[1]); // U纹理
initTexture(textureIds[2]); // V纹理
// 检查OpenGL错误
int glError = GLES20.glGetError();
if (glError != GLES20.GL_NO_ERROR) {
Log.e(TAG, "onSurfaceCreated OpenGL错误: " + glError);
}
}
@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
Log.d(TAG, "onSurfaceChanged:GL尺寸=" + width + "x" + height);
// 设置视口(全屏显示)
GLES20.glViewport(0, 0, width, height);
// 通知Activity更新相机预览尺寸
if (sizeCallback != null) {
sizeCallback.onSurfaceSizeChanged(width, height);
}
// 重置纹理尺寸记录(避免尺寸变化导致纹理不匹配)
yTexWidth = yTexHeight = uvTexWidth = uvTexHeight = 0;
}
@Override
public void onDrawFrame(GL10 gl) {
Log.e(TAG, "调用着色器onDrawFrame");
Log.d(TAG, "PendingImage: " + (pendingImage != null)
+ " | YUV尺寸: " + yuvWidth + "x" + yuvHeight
+ " | 纹理尺寸: Y=" + yTexWidth + "x" + yTexHeight
+ " UV=" + uvTexWidth + "x" + uvTexHeight);
// 1. 处理待处理的YUV数据
boolean hasNewData = processPendingYUV();
if (!hasNewData) {
// 无新数据:清空屏幕(黑色背景)
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
return;
}
// 2. 检查着色器程序是否有效
if (shaderProgram == 0 || textureIds == null) {
Log.e(TAG, "着色器程序或纹理无效,跳过渲染");
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
return;
}
// 3. 清空上一帧
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
// 4. 使用着色器程序
GLES20.glUseProgram(shaderProgram);
// 5. 上传Y/U/V纹理数据
uploadTexture(textureIds[0], yData, yuvWidth, yuvHeight, true); // Y纹理
uploadTexture(textureIds[1], uData, uvTexWidth, uvTexHeight, false); // U纹理
uploadTexture(textureIds[2], vData, uvTexWidth, uvTexHeight, false); // V纹理
// 6. 绑定纹理到着色器采样器
bindTexturesToSamplers();
// 7. 传递顶点坐标和纹理坐标
passVertexAndTexCoord();
// 8. 绘制(三角形带:4个顶点→2个三角形→全屏)
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, VERTEX_COORDS.length / 3);
// 9. 禁用顶点属性(避免后续干扰)
int vPositionLoc = GLES20.glGetAttribLocation(shaderProgram, "vPosition");
int vTexCoordLoc = GLES20.glGetAttribLocation(shaderProgram, "vTexCoord");
GLES20.glDisableVertexAttribArray(vPositionLoc);
GLES20.glDisableVertexAttribArray(vTexCoordLoc);
// 检查渲染错误
int glError = GLES20.glGetError();
if (glError != GLES20.GL_NO_ERROR) {
Log.e(TAG, "onDrawFrame OpenGL错误: " + glError);
}
}
// ---------------------- OpenGL辅助方法 ----------------------
/**
* 创建Float缓冲区(native内存,避免Java堆内存拷贝)
*/
private FloatBuffer createFloatBuffer(float[] data) {
if (data == null || data.length == 0) return null;
ByteBuffer byteBuffer = ByteBuffer.allocateDirect(data.length * 4); // float占4字节
byteBuffer.order(ByteOrder.nativeOrder()); // 匹配 native 字节序
FloatBuffer floatBuffer = byteBuffer.asFloatBuffer();
floatBuffer.put(data);
floatBuffer.position(0); // 重置读取位置
return floatBuffer;
}
/**
* 编译着色器程序(顶点+片段)
*/
private int compileShaderProgram(String vertexCode, String fragmentCode) {
// 1. 编译顶点着色器
int vertexShader = compileSingleShader(GLES20.GL_VERTEX_SHADER, vertexCode);
if (vertexShader == 0) return 0;
// 2. 编译片段着色器
int fragmentShader = compileSingleShader(GLES20.GL_FRAGMENT_SHADER, fragmentCode);
if (fragmentShader == 0) {
GLES20.glDeleteShader(vertexShader); // 清理已创建的顶点着色器
return 0;
}
// 3. 链接着色器程序
int program = GLES20.glCreateProgram();
GLES20.glAttachShader(program, vertexShader);
GLES20.glAttachShader(program, fragmentShader);
GLES20.glLinkProgram(program);
// 4. 检查链接错误
int[] linkStatus = new int[1];
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] != GLES20.GL_TRUE) {
String errorLog = GLES20.glGetProgramInfoLog(program);
Log.e(TAG, "着色器程序链接失败: " + errorLog);
GLES20.glDeleteProgram(program);
program = 0;
}
// 5. 清理临时着色器(链接后不再需要)
GLES20.glDeleteShader(vertexShader);
GLES20.glDeleteShader(fragmentShader);
return program;
}
/**
* 编译单个着色器(顶点或片段)
*/
private int compileSingleShader(int shaderType, String shaderCode) {
int shader = GLES20.glCreateShader(shaderType);
if (shader == 0) {
Log.e(TAG, "创建着色器失败,类型: " + (shaderType == GLES20.GL_VERTEX_SHADER ? "顶点" : "片段"));
return 0;
}
// 加载着色器代码并编译
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
// 检查编译错误
int[] compileStatus = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
if (compileStatus[0] != GLES20.GL_TRUE) {
String errorLog = GLES20.glGetShaderInfoLog(shader);
Log.e(TAG, (shaderType == GLES20.GL_VERTEX_SHADER ? "顶点" : "片段") + "着色器编译失败: " + errorLog);
GLES20.glDeleteShader(shader);
shader = 0;
}
return shader;
}
/**
* 初始化纹理参数(Y/U/V通用)
*/
private void initTexture(int textureId) {
if (textureId == 0) return;
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
// 纹理过滤:线性插值(画质更平滑)
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
// 纹理包裹:边缘拉伸(避免黑边)
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
// 解绑纹理
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
}
/**
* 处理待处理的YUV图像(提取Y/U/V数据,考虑内存对齐)
*/
private boolean processPendingYUV() {
Image image = null;
Log.e(TAG, "调用着色器onDrawFrame2,pendingImage:" + pendingImage);
synchronized (yuvLock) {
if (pendingImage == null) {
return false; // 无新数据
}
Log.e(TAG, "调用着色器onDrawFrame4");
// 取出待处理图像(释放锁,避免长时间占用)
image = pendingImage;
pendingImage = null;
Log.e(TAG, "调用着色器onDrawFrame4");
}
Log.e(TAG, "调用着色器onDrawFrame3");
try {
Log.e(TAG, "image是否可用:" + image);
// 1. 获取YUV图像尺寸
yuvWidth = image.getWidth();
yuvHeight = image.getHeight();
Image.Plane[] planes = image.getPlanes();
if (planes.length < 3) {
Log.e(TAG, "YUV平面数量不足3,无法提取数据");
return false;
}
// 2. 提取Y数据(Plane 0:Y通道,pixelStride=1,rowStride可能有对齐)
Image.Plane yPlane = planes[0];
int yRowStride = yPlane.getRowStride();
int yPixelStride = yPlane.getPixelStride();
yBuffer = yPlane.getBuffer();
yData = extractPlaneData(yBuffer, yRowStride, yPixelStride, yuvWidth, yuvHeight);
if (yData == null || yData.length != yuvWidth * yuvHeight) {
Log.e(TAG, "Y数据提取失败,长度不匹配: " + (yData != null ? yData.length : 0) + " vs " + (yuvWidth * yuvHeight));
return false;
}
// 3. 提取U/V数据(Plane 1:U通道,Plane 2:V通道,或交错)
Image.Plane uPlane = planes[1];
Image.Plane vPlane = planes[2];
int uvRowStride = uPlane.getRowStride();
int uvPixelStride = uPlane.getPixelStride();
int uvWidth = yuvWidth / 2; // YUV_420:U/V尺寸是Y的1/2
int uvHeight = yuvHeight / 2;
uvTexWidth = uvWidth;
uvTexHeight = uvHeight;
// 处理Planar(U/V分离)或Semi-Planar(UV交错)
if (uvPixelStride == 2) {
ByteBuffer uvBuffer = uPlane.getBuffer();
int uvBufferSize = uvBuffer.remaining();
uData = new byte[uvWidth * uvHeight];
vData = new byte[uvWidth * uvHeight];
uvBuffer.rewind(); // 确保从0开始
// 使用批量复制以提高效率
byte[] rowBuffer = new byte[uvRowStride];
for (int row = 0; row < uvHeight; row++) {
int rowStart = row * uvRowStride;
if (rowStart >= uvBufferSize) break;
int bytesToRead = Math.min(uvRowStride, uvBufferSize - rowStart);
uvBuffer.position(rowStart);
uvBuffer.get(rowBuffer, 0, bytesToRead);
// 从rowBuffer中提取UV
for (int col = 0; col < uvWidth; col++) {
int offset = col * 2; // 每列占2字节
if (offset >= bytesToRead) break; // 防止行内越界
vData[row * uvWidth + col] = rowBuffer[offset + 1];
uData[row * uvWidth + col] = rowBuffer[offset];
}
}
} else {
// Planar(U/V分离,如I420):U和V各自在独立Plane
uBuffer = uPlane.getBuffer();
vBuffer = vPlane.getBuffer();
uData = extractPlaneData(uBuffer, uvRowStride, uvPixelStride, uvWidth, uvHeight);
vData = extractPlaneData(vBuffer, uvRowStride, uvPixelStride, uvWidth, uvHeight);
}
// 4. 验证U/V数据长度
if (uData == null || vData == null || uData.length != uvWidth * uvHeight || vData.length != uvWidth * uvHeight) {
Log.e(TAG, "U/V数据提取失败,长度不匹配");
return false;
}
hasNewFrame = false;//处理完帧后需要重置标志
Log.d(TAG, "YUV数据处理成功: " + yuvWidth + "x" + yuvHeight + ",Y长度=" + yData.length + ",U/V长度=" + uData.length);
return true;
} catch (Exception e) {
Log.e(TAG, "处理YUV数据异常: " + e.getMessage(), e);
return false;
} finally {
// 必须关闭Image,否则内存泄漏
if (image != null) {
image.close();
}
}
}
/**
* 提取平面数据(处理rowStride和pixelStride,避免读取padding字节)
*/
private byte[] extractPlaneData(ByteBuffer buffer, int rowStride, int pixelStride, int width, int height) {
if (buffer == null || rowStride <= 0 || pixelStride <= 0 || width <= 0 || height <= 0) {
Log.w(TAG, "提取平面数据参数无效");
return null;
}
byte[] data = new byte[width * height];
int dataIdx = 0;
// 按行读取(跳过rowStride中的padding字节)
for (int row = 0; row < height; row++) {
// 每行的起始位置
int bufferRowStart = row * rowStride;
// 读取当前行的有效数据(width个像素,每个像素pixelStride字节)
for (int col = 0; col < width; col++) {
int bufferPos = bufferRowStart + col * pixelStride;
data[dataIdx++] = buffer.get(bufferPos);
}
}
return data;
}
/**
* 上传数据到纹理(首次创建纹理,后续更新数据)
*/
private void uploadTexture(int textureId, byte[] data, int width, int height, boolean isYTexture) {
if (textureId == 0 || data == null || width <= 0 || height <= 0) {
Log.w(TAG, "上传纹理参数无效");
return;
}
// 绑定纹理
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
// 设置像素对齐(YUV数据是1字节对齐,默认是4字节,必须修改)
GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
// 检查纹理是否已创建(尺寸匹配则更新,否则重新创建)
boolean textureCreated = false;
if (isYTexture) {
textureCreated = (yTexWidth == width && yTexHeight == height);
} else {
textureCreated = (uvTexWidth == width && uvTexHeight == height);
}
ByteBuffer dataBuffer = ByteBuffer.wrap(data);
if (!textureCreated) {
// 首次创建纹理(GL_LUMINANCE:单通道亮度数据)
GLES20.glTexImage2D(
GLES20.GL_TEXTURE_2D, 0,
GLES20.GL_LUMINANCE,
width, height, 0,
GLES20.GL_LUMINANCE,
GLES20.GL_UNSIGNED_BYTE,
dataBuffer
);
// 更新纹理尺寸记录
if (isYTexture) {
yTexWidth = width;
yTexHeight = height;
} else {
uvTexWidth = width;
uvTexHeight = height;
}
Log.d(TAG, "创建纹理: " + (isYTexture ? "Y" : "UV") + ",尺寸=" + width + "x" + height);
} else {
// 纹理已存在,更新数据(只更新像素,不重新创建纹理)
GLES20.glTexSubImage2D(
GLES20.GL_TEXTURE_2D, 0,
0, 0, // 起始坐标(x,y)
width, height,
GLES20.GL_LUMINANCE,
GLES20.GL_UNSIGNED_BYTE,
dataBuffer
);
}
// 解绑纹理
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
}
/**
* 绑定纹理到着色器的采样器(yTex/uTex/vTex)
*/
private void bindTexturesToSamplers() {
// 绑定Y纹理到TEXTURE0,对应着色器的yTex
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureIds[0]);
int yTexLoc = GLES20.glGetUniformLocation(shaderProgram, "yTex");
GLES20.glUniform1i(yTexLoc, 0);
// 绑定U纹理到TEXTURE1,对应着色器的uTex
GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureIds[1]);
int uTexLoc = GLES20.glGetUniformLocation(shaderProgram, "uTex");
GLES20.glUniform1i(uTexLoc, 1);
// 绑定V纹理到TEXTURE2,对应着色器的vTex
GLES20.glActiveTexture(GLES20.GL_TEXTURE2);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureIds[2]);
int vTexLoc = GLES20.glGetUniformLocation(shaderProgram, "vTex");
GLES20.glUniform1i(vTexLoc, 2);
// 检查采样器位置是否有效
if (yTexLoc == -1 || uTexLoc == -1 || vTexLoc == -1) {
Log.e(TAG, "着色器采样器位置无效: y=" + yTexLoc + ", u=" + uTexLoc + ", v=" + vTexLoc);
}
}
/**
* 传递顶点坐标和纹理坐标到着色器
*/
private void passVertexAndTexCoord() {
// 传递顶点坐标(vPosition)
int vPositionLoc = GLES20.glGetAttribLocation(shaderProgram, "vPosition");
GLES20.glEnableVertexAttribArray(vPositionLoc);
GLES20.glVertexAttribPointer(
vPositionLoc,
3, // 每个顶点3个坐标(x,y,z)
GLES20.GL_FLOAT,
false, // 不归一化
3 * 4, // 顶点步长(3个float,每个4字节)
vertexBuffer
);
// 传递纹理坐标(vTexCoord)
int vTexCoordLoc = GLES20.glGetAttribLocation(shaderProgram, "vTexCoord");
GLES20.glEnableVertexAttribArray(vTexCoordLoc);
GLES20.glVertexAttribPointer(
vTexCoordLoc,
2, // 每个纹理坐标2个值(s,t)
GLES20.GL_FLOAT,
false,
2 * 4, // 纹理坐标步长(2个float,每个4字节)
texCoordBuffer
);
// 检查坐标位置是否有效
if (vPositionLoc == -1 || vTexCoordLoc == -1) {
Log.e(TAG, "着色器坐标位置无效: vPosition=" + vPositionLoc + ", vTexCoord=" + vTexCoordLoc);
}
}
// ---------------------- GLSurface尺寸回调接口 ----------------------
public interface SurfaceSizeCallback {
void onSurfaceSizeChanged(int width, int height);
}
}
你看看为什么美颜调用
最新发布