public class MainActivity extends AppCompatActivity {
private static final String TAG = "camera2api";
private static final int REQUEST_CAMERA_PERMISSION = 100;
private GLSurfaceView glSurfaceView;
private Button captureButton;
private CameraDevice cameraDevice;
private CameraCaptureSession cameraCaptureSession;
private CaptureRequest.Builder captureRequestBuilder;
private String cameraId;
private Handler backgroundHandler;
private boolean isSessionClosed;
private HandlerThread backgroundThread;
private ImageReader imageReader;
private boolean isTextureAvailable = false; // 跟踪Surface状态
private CameraManager manager;
private volatile boolean isCapturing = false;
private StreamConfigurationMap map;
private long lastClickTime = 0;
private static final long MIN_CLICK_INTERVAL = 1000; // 最小点击间隔1秒
private File file;
private ContentResolver resolver;
private ContentValues values;
private Uri imageUri;
private FilterRendererNoFilter cameraRengerer;
private Surface previewSurface;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Log.d(TAG, "onCreate ——————————————————————");
glSurfaceView = findViewById(R.id.glsurfaceView);
captureButton = findViewById(R.id.btnCapture);
// 1. 初始化GLSurfaceView和渲染器
glSurfaceView.setEGLContextClientVersion(2); // 使用OpenGL ES 2.0
cameraRengerer = new FilterRendererNoFilter(this);
glSurfaceView.setRenderer(cameraRengerer);
//初始化相机参数
initCamera();
//设置拍照预监听
captureButton.setOnClickListener(v -> {//防止 连点抛出异常
long currentTime = SystemClock.elapsedRealtime();
if (currentTime - lastClickTime > MIN_CLICK_INTERVAL) {
lastClickTime = currentTime;
takePicture();
} else {
Log.d(TAG, "点击过快,已忽略");
}
});
}
// === Surface准备就绪回调 ===
public void onGLSurfaceReady(Surface surface) {
Log.d(TAG, "GLSurface准备就绪");
previewSurface = surface;
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA)
== PackageManager.PERMISSION_GRANTED) {
openCamera();
} else {
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.CAMERA}, REQUEST_CAMERA_PERMISSION);
}
}
@Override
protected void onResume() {
Log.d(TAG, "onResume —————————————————————— ");
Log.d(TAG, Log.getStackTraceString(new Throwable()));
super.onResume();
// 检查相机权限
// ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA):用于检测this(当前应用)是否有Manifest.permission.CAMERA权限
//有则返回PackageManager.PERMISSION_GRANTED(有权限),没有则返回PackageManager.PERMISSION_DENIED(无权限)
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
Log.i(TAG, "没有相机权限——>开始请求相机权限");
//ActivityCompat.requestPermissions():请求权限的方法
//this:当前应用
//new String[]{权限1,权限2,...}:请求的权限集合
//code:对应编码名,发送请求后会调用回调函数 onRequestPermissionsResult(),该回调的第一个参数就是这个code,用于给我们做请求权限后的自定义操作。
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.CAMERA}, REQUEST_CAMERA_PERMISSION);
}
//注意:以上的权限请求是异步操作
startBackgroundThread();
}
@Override
protected void onPause() {
super.onPause();
Log.d(TAG, "onPause ——————————————————————");
// 1. 仅在“非拍照中”且“会话存在”时,停止预览的重复请求(避免授权时资源被清)
if (!isCapturing && cameraCaptureSession != null) {
try {
// 停止预览的重复请求(暂停预览画面),但不关闭会话和相机设备
cameraCaptureSession.stopRepeating();
Log.d(TAG, "onPause: 暂停预览重复请求(核心资源未释放)");
} catch (CameraAccessException e) {
Log.e(TAG, "onPause: 停止预览失败", e);
}
// 2. 拍照中时,延迟处理(避免中断拍照流程)
if (isCapturing) {
Log.w(TAG, "onPause: 拍照中,暂不处理预览暂停");
new Handler().postDelayed(() -> {
if (!isCapturing && cameraCaptureSession != null) {
try {
cameraCaptureSession.stopRepeating();
Log.d(TAG, "onPause: 拍照完成后,暂停预览");
} catch (CameraAccessException e) {
Log.e(TAG, "onPause: 延迟停止预览失败", e);
}
}
}, 1000);
}
}
}
@Override
protected void onStop() {
super.onStop();
Log.d(TAG, "onStop: Activity 完全不可见(如按Home键),释放核心资源");
// 1. 检查是否正在销毁(如果是销毁,交给 onDestroy 处理,避免重复释放)
if (isFinishing()) {
Log.d(TAG, "onStop: Activity 正在销毁,核心资源由 onDestroy 处理");
return;
}
// 2. 释放相机核心资源(会话、设备、ImageReader),但保留后台线程(onResume 可快速恢复)
if (cameraCaptureSession != null) {
cameraCaptureSession.close();
cameraCaptureSession = null;
Log.d(TAG, "onStop: 关闭 CameraCaptureSession");
}
if (cameraDevice != null) {
cameraDevice.close();
cameraDevice = null;
Log.d(TAG, "onStop: 关闭 CameraDevice");
}
if (imageReader != null) {
imageReader.close();
imageReader = null;
Log.d(TAG, "onStop: 关闭 ImageReader");
}
isSessionClosed = true;
}
@Override
protected void onDestroy() {
super.onDestroy();
Log.d(TAG, "onDestroy: Activity 彻底销毁,释放所有资源");
// 1. 释放相机相关资源(防止内存泄漏)
if (cameraCaptureSession != null) {
cameraCaptureSession.close();
cameraCaptureSession = null;
}
if (cameraDevice != null) {
cameraDevice.close();
cameraDevice = null;
}
if (imageReader != null) {
imageReader.close();
imageReader = null;
}
// 2. 停止后台线程(彻底销毁,避免线程泄漏)
stopBackgroundThread();
// 3. 置空所有引用(帮助 GC 回收)
glSurfaceView = null;
captureButton = null;
manager = null;
resolver = null;
values = null;
imageUri = null;
backgroundHandler = null;
backgroundThread = null;
Log.d(TAG, "onDestroy: 所有资源释放完成");
}
private final TextureView.SurfaceTextureListener surfaceTextureListener = new TextureView.SurfaceTextureListener() {
//触发时机:SurfaceTexture 初始化后
@Override
public void onSurfaceTextureAvailable(@NonNull SurfaceTexture surface, int width, int height) {
isTextureAvailable = true; // 设置可用标志
Log.d(TAG,"Surface就绪,触发相机初始化");
if (ActivityCompat.checkSelfPermission(MainActivity.this,Manifest.permission.CAMERA)==PackageManager.PERMISSION_GRANTED){
openCamera();
}
}
@Override
public void onSurfaceTextureSizeChanged(@NonNull SurfaceTexture surface, int width, int height) {
Log.d(TAG, "onSurfaceTextureSizeChanged: " + width + "x" + height);
}
@Override
public boolean onSurfaceTextureDestroyed(@NonNull SurfaceTexture surface) {
Log.d(TAG, "onSurfaceTextureDestroyed");
isTextureAvailable = false;
if (cameraCaptureSession!=null){
try {
cameraCaptureSession.stopRepeating();
} catch (CameraAccessException e) {
Log.e(TAG,"停止预览失败",e);
}
}
return true;
}
@Override
public void onSurfaceTextureUpdated(@NonNull SurfaceTexture surface) {
}
};
private void initCamera() {
Log.d(TAG, "initCamera: 初始化相机配置");
try {
//1.初始化CameraManager,获取相机配置map
manager = (CameraManager) getSystemService(CAMERA_SERVICE);
cameraId = manager.getCameraIdList()[0];
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (map == null) {
Log.e(TAG, "错误: StreamConfigurationMap为空!!");
}
//2.图片保存参数配置
resolver = getContentResolver();
values = new ContentValues();
values.put(MediaStore.Images.Media.DISPLAY_NAME, "pic_" + System.currentTimeMillis() + ".jpg");
values.put(MediaStore.Images.Media.MIME_TYPE, "image/jpeg");
values.put(MediaStore.Images.Media.RELATIVE_PATH, Environment.DIRECTORY_PICTURES);
} catch (CameraAccessException e) {
Log.e(TAG, "相机访问异常: " + e.getMessage());
} catch (NullPointerException e) {
Log.e(TAG, "NPE: " + e.getMessage());
}
}
private Size chooseOptimalSize(Size[] choices, int width, int height) {
List<Size> bigEnough = new ArrayList<>();
for (Size option : choices) {
float ratio = (float) option.getWidth() / option.getHeight();
float viewRatio = (float) width / height;
if (Math.abs(ratio - viewRatio) <= 0.1 &&
option.getWidth() <= width &&
option.getHeight() <= height) {
bigEnough.add(option);
}
}
if (!bigEnough.isEmpty()) {
return Collections.max(bigEnough, new CompareSizesByArea());
}
Log.w(TAG, "未找到完美匹配尺寸,使用默认");
return choices[0];
}
static class CompareSizesByArea implements Comparator<Size> {
@Override
public int compare(Size lhs, Size rhs) {
return Long.signum((long) lhs.getWidth() * lhs.getHeight() - (long) rhs.getWidth() * rhs.getHeight());
}
}
private void openCamera() {
if (!isTextureAvailable) { // 检查Surface是否可用
Log.w(TAG, "Surface不可用,延迟打开相机,100ms后重试");
backgroundHandler.postDelayed(this::openCamera,10000);
return;
}
Log.d(TAG, "openCamera: 尝试打开相机");
try {
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED) {
Log.i(TAG, "1.打开相机: " + cameraId);
manager.openCamera(cameraId, stateCallback, backgroundHandler);
} else {
Log.w(TAG, "相机权限未授予");
}
} catch (CameraAccessException e) {
Log.e(TAG, "打开相机失败: " + e.getMessage());
} catch (SecurityException e) {
Log.e(TAG, "安全异常: " + e.getMessage());
}
}
private final CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(@NonNull CameraDevice camera) {
Log.i(TAG, "相机已打开");
cameraDevice = camera;
Log.i(TAG, "2.1 开始配置预览流");
createCameraPreviewSession();
}
@Override
public void onDisconnected(@NonNull CameraDevice camera) {
Log.w(TAG, "相机断开连接");
cameraDevice.close();
}
@Override
public void onError(@NonNull CameraDevice camera, int error) {
Log.e(TAG, "相机错误: " + error);
cameraDevice.close();
cameraDevice = null;
}
};
//2.1 开始配置预览流
private void createCameraPreviewSession() {
if (cameraDevice == null || !isTextureAvailable) { // 双重检查
Log.e(TAG, "创建预览会话失败: 相机或Surface不可用");
return;
}
try {
//1.1 配置预览尺寸
Size previewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), glSurfaceView.getWidth(), glSurfaceView.getHeight());
Log.i(TAG, "选择预览尺寸: " + previewSize.getWidth() + "x" + previewSize.getHeight());
//1.2 配置预览Surface
SurfaceTexture previewTexture = cameraRengerer.getSurfaceTexture();
previewTexture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight());
previewSurface = new Surface(previewTexture);
// 2.配置拍照Surface
//优化尺寸选择(兼容性处理) 配置jpegSizes
Size[] jpegSizes = map.getOutputSizes(ImageFormat.JPEG);
Size captureSize = chooseOptimalSize(jpegSizes);
Log.i(TAG, "使用拍照尺寸: " + captureSize.getWidth() + "x" + captureSize.getHeight());
// 优化:检查尺寸变化,仅当尺寸变更时重建
if (imageReader == null || imageReader.getWidth() != captureSize.getWidth()) {
if (imageReader != null) imageReader.close();
imageReader = ImageReader.newInstance(
captureSize.getWidth(),
captureSize.getHeight(),
ImageFormat.JPEG,
2 // 缓冲区数量
);
}
// 3.配置双输出Surface(预览 + 拍照)
List<Surface> outputSurfaces = new ArrayList<>(2);
outputSurfaces.add(previewSurface);
outputSurfaces.add(imageReader.getSurface());
//3.创建会话
cameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession session) {
Log.i(TAG, "2.2 预览会话配置成功");
cameraCaptureSession = session;
try {
//3.下发请求添加预览流
captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
captureRequestBuilder.addTarget(previewSurface);
//配置预览 自动对焦
captureRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
captureRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
Log.i(TAG, "3.开始下发预览请求");
cameraCaptureSession.setRepeatingRequest(captureRequestBuilder.build(), null, backgroundHandler);
} catch (CameraAccessException e) {
Log.e(TAG, "设置预览请求失败: " + e.getMessage());
}
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession session) {
Log.e(TAG, "预览会话配置失败");
Toast.makeText(MainActivity.this, "配置失败", Toast.LENGTH_SHORT).show();
}
}, backgroundHandler);
} catch (CameraAccessException e) {
Log.e(TAG, "创建预览会话异常: " + e.getMessage());
}
}
// 优化尺寸选择
private Size chooseOptimalSize(Size[] choices) {
// 优先选择16:9的比例
final double ASPECT_RATIO = 16.0 / 9.0;
Size optimalSize = null;
for (Size size : choices) {
double ratio = (double) size.getWidth() / size.getHeight();
if (Math.abs(ratio - ASPECT_RATIO) <= 0.1) {
if (optimalSize == null ||
size.getWidth() > optimalSize.getWidth()) {
optimalSize = size;
}
}
}
// 若无匹配则选择最大尺寸
if (optimalSize == null) {
for (Size size : choices) {
if (optimalSize == null ||
size.getWidth() > optimalSize.getWidth()) {
optimalSize = size;
}
}
}
return optimalSize != null ? optimalSize : new Size(1920, 1080);
}
private void saveImage(byte[] bytes, File file) {
Log.d(TAG, "保存图像: " + file.getAbsolutePath());
imageUri = resolver.insert(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, values);
if (imageUri != null) {
try (FileOutputStream output = new FileOutputStream(file)) {
output.write(bytes);
Log.i(TAG, "图像保存成功, 大小: " + bytes.length + " bytes");
} catch (IOException e) {
Log.e(TAG, "保存文件失败: " + e.getMessage());
}
}
}
//触发时机:用户点击授权后调用
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions,
@NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
Log.d(TAG, "权限请求结果: " + requestCode);
if (requestCode == REQUEST_CAMERA_PERMISSION) {
if (grantResults[0] == PackageManager.PERMISSION_DENIED) {
Log.w(TAG, "用户拒绝相机权限");
Toast.makeText(this, "需要相机权限", Toast.LENGTH_SHORT).show();
finish();
} else {
Log.i(TAG, "用户授予相机权限");
}
}
}
private void startBackgroundThread() {
if (backgroundThread == null) {
backgroundThread = new HandlerThread("CameraBackground");
backgroundThread.start();
backgroundHandler = new Handler(backgroundThread.getLooper());
Log.d(TAG, "后台线程启动");
}
}
private void stopBackgroundThread() {
if (backgroundThread != null) {
Log.d(TAG, "停止后台线程");
backgroundThread.quitSafely();
try {
backgroundThread.join();
backgroundThread = null;
backgroundHandler = null;
} catch (InterruptedException e) {
Log.e(TAG, "停止线程失败: " + e.getMessage());
}
}
}
private void closeCamera() {
Log.d(TAG, "关闭相机资源");
if (isCapturing) {
Log.w(TAG, "正在拍照中,等待完成或取消...");
// 可以尝试等待一段时间或取消请求
try {
cameraCaptureSession.abortCaptures(); // 取消所有进行中的捕获
} catch (CameraAccessException e) {
throw new RuntimeException(e);
}
}
if (cameraCaptureSession != null) {
cameraCaptureSession.close();
cameraCaptureSession = null;
}
isSessionClosed = true;
}
private boolean checkTakePicture() {
if (cameraDevice == null) {
Log.w(TAG, "拍照失败: 相机未初始化");
return false;
}
// 1. 检查会话有效性
if (cameraCaptureSession == null) {
Log.e(TAG, "拍照错误: CameraCaptureSession为空");
return false;
}
// 2. 检查后台Handler
if (backgroundHandler == null) {
Log.e(TAG, "拍照错误: backgroundHandler未初始化");
startBackgroundThread(); // 初始化方法见下方
return false;
}
if (isSessionClosed) {
Log.e(TAG, "当前会话已关闭");
}
return true;
}
private void takePicture() {
Log.i(TAG, "4.开始拍照流程——————————");
try {
//1.检查是否可以拍照
boolean checkFlag = checkTakePicture();
if (!checkFlag) {
Log.i(TAG, "拍照流程————检查未通过!退出拍照!");
return;
}
// 2. 创建拍照请求
CaptureRequest.Builder captureBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(imageReader.getSurface());
//3.设置拍照下发参数
//自动曝光
captureBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
// 图片旋转角度跟随手机默认
int rotation = getWindowManager().getDefaultDisplay().getRotation();
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, rotation);
// 4. 图像可用监听器
imageReader.setOnImageAvailableListener(reader -> {
Log.d(TAG, "图像数据可用");
try (Image image = reader.acquireLatestImage()) {
if (image != null) {
// 9. 创建目标文件
file = new File(
Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES),
"pic_" + System.currentTimeMillis() + ".jpg"
);
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.remaining()];
buffer.get(bytes);
saveImage(bytes, file);
// 发送媒体扫描广播,写入系统相册
Intent mediaScanIntent = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE);
mediaScanIntent.setData(Uri.fromFile(file));
sendBroadcast(mediaScanIntent);
}
} catch (Exception e) {
Log.e(TAG, "保存图像错误: " + e.getMessage());
}
}, backgroundHandler);
// 11. 拍照回调
CameraCaptureSession.CaptureCallback captureCallback = new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureCompleted(@NonNull CameraCaptureSession session,
@NonNull CaptureRequest request,
@NonNull TotalCaptureResult result) {
// 处理拍照完成
isCapturing = false;
// 可以在这里进行后续操作,但注意检查资源是否仍然有效
super.onCaptureCompleted(session, request, result);
Log.i(TAG, "拍照完成,保存至: " + file.getAbsolutePath());
runOnUiThread(() ->
Toast.makeText(MainActivity.this, "保存至: " + file, Toast.LENGTH_SHORT).show()
);
createCameraPreviewSession(); // 恢复预览
}
@Override
public void onCaptureFailed(@NonNull CameraCaptureSession session,
@NonNull CaptureRequest request,
@NonNull CaptureFailure failure) {
super.onCaptureFailed(session, request, failure);
Log.e(TAG, "拍照失败: " + failure.getReason());
}
};
// 12. 执行拍照(添加会话状态检查)
Log.d(TAG, "停止预览");
cameraCaptureSession.stopRepeating();
Log.d(TAG, "播放黑闪动画");
flash_mode();
Log.d(TAG, "4.下发拍照");
isCapturing = true;
cameraCaptureSession.capture(captureBuilder.build(), captureCallback, backgroundHandler);
} catch (CameraAccessException | IllegalStateException | SecurityException e) {
Log.e(TAG, "拍照过程异常: " + e.getClass().getSimpleName(), e);
}
}
//播放黑闪动画
private void flash_mode() {
// 在拍照按钮点击事件中触发
View flashView = findViewById(R.id.flashview);
flashView.setVisibility(View.VISIBLE); // 先显示黑色视图
// 创建动画:透明度从0到1再到0,模拟闪烁
AlphaAnimation flashAnim = new AlphaAnimation(0.0f, 1.0f);
flashAnim.setDuration(100); // 动画时长100ms,根据需求调整
flashAnim.setAnimationListener(new Animation.AnimationListener() {
@Override
public void onAnimationStart(Animation animation) {
}
@Override
public void onAnimationEnd(Animation animation) {
flashView.setVisibility(View.INVISIBLE); // 动画结束后隐藏视图
// 在此处调用实际拍照逻辑(如Camera API)
}
@Override
public void onAnimationRepeat(Animation animation) {
}
});
flashView.startAnimation(flashAnim);
}
}
package com.android.example.cameraappxjava.util;
import android.content.Context;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.graphics.SurfaceTexture;
import androidx.appcompat.app.AppCompatActivity;
import com.android.example.cameraappxjava.R;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
public class FilterRendererNoFilter extends AppCompatActivity implements GLSurfaceView.Renderer {
private Context mContext;
private SurfaceTexture mSurfaceTexture; // 连接相机与GPU的桥梁
private int mCameraTextureId; // 相机纹理ID(GPU中的“数据索引”)
private int mProgram; // OpenGL程序(顶点+片段着色器)
// 顶点坐标:覆盖整个屏幕(左下、右下、左上、右上)
private float[] mVertexCoords = {-1.0f, -1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, 1.0f};
// 纹理坐标:与顶点坐标对应(解决相机纹理上下颠倒问题)
private float[] mTexCoords = {0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f};
private FloatBuffer mVertexBuffer; // 顶点坐标缓冲区(GPU可直接读取)
private FloatBuffer mTexCoordBuffer; // 纹理坐标缓冲区
public FilterRendererNoFilter(Context context) {
mContext = context;
// 初始化坐标缓冲区(转为GPU支持的FloatBuffer格式)
mVertexBuffer = ByteBuffer.allocateDirect(mVertexCoords.length * 4)
.order(ByteOrder.nativeOrder()).asFloatBuffer().put(mVertexCoords);
mVertexBuffer.position(0);
mTexCoordBuffer = ByteBuffer.allocateDirect(mTexCoords.length * 4)
.order(ByteOrder.nativeOrder()).asFloatBuffer().put(mTexCoords);
mTexCoordBuffer.position(0);
}
// --------------------------
// 渲染器核心方法:初始化OpenGL环境(只执行一次)
// --------------------------
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
// 1. 创建相机专用纹理(告诉GPU:这是相机的YUV数据)
mCameraTextureId = createCameraTexture();
// 2. 绑定纹理到SurfaceTexture(让相机数据流入GPU)
mSurfaceTexture = new SurfaceTexture(mCameraTextureId);
// 当有新帧(YUV数据)时,通知GLSurfaceView刷新渲染
mSurfaceTexture.setOnFrameAvailableListener(surfaceTexture -> {
((GLSurfaceView) findViewById(R.id.glsurfaceView)).requestRender();
});
// 3. 加载并编译着色器(无滤镜版本)
String vertexShaderCode = loadShaderFromRaw(R.raw.vertex_shader);
String fragmentShaderCode = loadShaderFromRaw(R.raw.fragment_shader_no_filter);
mProgram = createOpenGLProgram(vertexShaderCode, fragmentShaderCode);
}
// --------------------------
// 渲染器核心方法:每帧执行(约30次/秒,绘制画面)
// --------------------------
@Override
public void onDrawFrame(GL10 gl) {
// 1. 清空屏幕(避免上一帧残留)
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f); // 黑色背景
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
// 2. 更新纹理数据(从相机获取最新YUV帧,同步到GPU)
mSurfaceTexture.updateTexImage();
// 3. 使用OpenGL程序(激活无滤镜的着色器)
GLES20.glUseProgram(mProgram);
// 4. 绑定顶点坐标(告诉GPU:画面要画在屏幕的哪个位置)
int vPositionLoc = GLES20.glGetAttribLocation(mProgram, "vPosition");
GLES20.glEnableVertexAttribArray(vPositionLoc);
GLES20.glVertexAttribPointer(vPositionLoc, 2, GLES20.GL_FLOAT, false, 0, mVertexBuffer);
// 5. 绑定纹理坐标(告诉GPU:纹理如何映射到屏幕)
int vTexCoordLoc = GLES20.glGetAttribLocation(mProgram, "vTexCoord");
GLES20.glEnableVertexAttribArray(vTexCoordLoc);
GLES20.glVertexAttribPointer(vTexCoordLoc, 2, GLES20.GL_FLOAT, false, 0, mTexCoordBuffer);
// 6. 绑定相机纹理(告诉GPU:要处理的YUV数据在这个纹理中)
GLES20.glActiveTexture(GLES20.GL_TEXTURE0); // 激活纹理单元0
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mCameraTextureId);
// 将纹理单元0传递给片段着色器的sTexture变量
int sTextureLoc = GLES20.glGetUniformLocation(mProgram, "sTexture");
GLES20.glUniform1i(sTextureLoc, 0);
// 7. 绘制画面(用两个三角形组成矩形,覆盖整个屏幕)
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
// 8. 禁用顶点属性(避免GPU资源泄漏)
GLES20.glDisableVertexAttribArray(vPositionLoc);
GLES20.glDisableVertexAttribArray(vTexCoordLoc);
}
// --------------------------
// 渲染器核心方法:视图尺寸变化时执行(如屏幕旋转)
// --------------------------
@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
// 设置渲染视口(画面显示的区域,这里是整个屏幕)
GLES20.glViewport(0, 0, width, height);
}
// --------------------------
// 辅助方法:创建相机专用纹理
// --------------------------
private int createCameraTexture() {
int[] textures = new int[1];
// 1. 向GPU申请纹理ID(类似“分配内存地址”)
GLES20.glGenTextures(1, textures, 0);
int textureId = textures[0];
// 2. 绑定纹理(指定类型为“相机专用纹理”)
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId);
// 3. 设置纹理过滤参数(避免拉伸时画面模糊/失真)
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
// 4. 解绑纹理(避免后续操作污染)
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
return textureId;
}
// --------------------------
// 辅助方法:从raw目录加载着色器代码
// --------------------------
private String loadShaderFromRaw(int rawId) {
try {
InputStream is = mContext.getResources().openRawResource(rawId);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
byte[] buffer = new byte[1024];
int len;
while ((len = is.read(buffer)) != -1) {
baos.write(buffer, 0, len);
}
baos.close();
is.close();
return baos.toString("UTF-8");
} catch (Exception e) {
e.printStackTrace();
return "";
}
}
// --------------------------
// 辅助方法:创建OpenGL程序(链接顶点+片段着色器)
// --------------------------
private int createOpenGLProgram(String vertexShaderCode, String fragmentShaderCode) {
// 1. 编译顶点着色器
int vertexShader = compileShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
// 2. 编译片段着色器
int fragmentShader = compileShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
// 3. 链接程序(将两个着色器组合为GPU可执行的“指令集”)
int program = GLES20.glCreateProgram();
GLES20.glAttachShader(program, vertexShader);
GLES20.glAttachShader(program, fragmentShader);
GLES20.glLinkProgram(program);
// 4. 检查链接结果(避免编译失败)
int[] linkStatus = new int[1];
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] == 0) {
throw new RuntimeException("OpenGL程序链接失败:" + GLES20.glGetProgramInfoLog(program));
}
return program;
}
// --------------------------
// 辅助方法:编译单个着色器
// --------------------------
private int compileShader(int type, String shaderCode) {
int shader = GLES20.glCreateShader(type);
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
// 检查编译结果
int[] compileStatus = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
if (compileStatus[0] == 0) {
throw new RuntimeException("着色器编译失败:" + GLES20.glGetShaderInfoLog(shader));
}
return shader;
}
// --------------------------
// 对外提供SurfaceTexture(供Activity绑定相机)
// --------------------------
public SurfaceTexture getSurfaceTexture() {
return mSurfaceTexture;
}
}package com.android.example.cameraappxjava.util;
import android.content.Context;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.graphics.SurfaceTexture;
import android.util.Log;
import android.view.Surface;
import androidx.appcompat.app.AppCompatActivity;
import com.android.example.cameraappxjava.MainActivity;
import com.android.example.cameraappxjava.R;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
public class CameraRenderer extends AppCompatActivity implements GLSurfaceView.Renderer {
private static final String TAG = "camera2api";
// 1. 基础变量
private Context mContext;
private SurfaceTexture mSurfaceTexture; // 连接Camera2和GPU的桥梁(接收YUV数据)
private int mCameraTextureId; // GPU纹理ID(存储YUV数据的“容器”)
private int mProgramNoFilter; // 无滤镜OpenGL程序(顶点+片段着色器)
private int mProgramBlackWhite; // 黑白滤镜OpenGL程序
private int mCurrentProgram; // 当前使用的程序(默认无滤镜)
private boolean mIsBlackWhite = false; // 滤镜开关
// 2. 顶点坐标和纹理坐标(固定值:覆盖全屏)
// 顶点坐标:左下、右下、左上、右上(OpenGL坐标系:-1~1)
private float[] mVertexCoords = {-1.0f, -1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, 1.0f};
// 纹理坐标:解决相机纹理上下颠倒问题(0~1,对应顶点坐标)
private float[] mTexCoords = {0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f};
// 转为GPU可读取的FloatBuffer(避免CPU/GPU数据格式不兼容)
private FloatBuffer mVertexBuffer;
private FloatBuffer mTexCoordBuffer;
// 3. 构造方法(初始化坐标缓冲区)
public CameraRenderer(Context context) {
mContext = context;
// 初始化顶点坐标缓冲区
mVertexBuffer = ByteBuffer.allocateDirect(mVertexCoords.length * 4)
.order(ByteOrder.nativeOrder()) // 按GPU原生字节序排列
.asFloatBuffer()
.put(mVertexCoords);
mVertexBuffer.position(0); // 重置读取指针
// 初始化纹理坐标缓冲区
mTexCoordBuffer = ByteBuffer.allocateDirect(mTexCoords.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
.put(mTexCoords);
mTexCoordBuffer.position(0);
}
// 在CameraRenderer类中添加
public interface SurfaceTextureListener {
void onSurfaceTextureCreated(SurfaceTexture surfaceTexture);
}
private SurfaceTextureListener mListener;
private Surface surface;
private Context context;
public void setSurfaceTextureListener(SurfaceTextureListener listener) {
mListener = listener;
}
// --------------------------
// 渲染器核心1:初始化OpenGL环境(只执行1次,在GL线程)
// --------------------------
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
// 1. 创建相机专用纹理(告诉GPU:这是相机YUV数据,不是普通图片)
mCameraTextureId = createCameraTexture();
// 2. 绑定纹理到SurfaceTexture(让Camera2的YUV数据流入GPU)
mSurfaceTexture = new SurfaceTexture(mCameraTextureId);
// 当有新YUV帧时,通知GLSurfaceView刷新渲染(保证预览流畅)
GLSurfaceView mGLSurfaceView = (GLSurfaceView) findViewById(R.id.texture_view);
// 修复1:必须设置帧监听器(否则onDrawFrame不触发)
mSurfaceTexture.setOnFrameAvailableListener(st -> {
if (mGLSurfaceView != null) {
mGLSurfaceView.requestRender();
}
});
// 修复2:空指针防护
if (mListener != null) {
mListener.onSurfaceTextureCreated(mSurfaceTexture);
} else {
Log.w(TAG, "SurfaceTexture回调未注册!");
}
// 3. 创建Surface并通知Activity
surface = new Surface(mSurfaceTexture);
((MainActivity) context).onGLSurfaceReady(surface); // 关键回调
// 3. 编译并链接两个OpenGL程序(无滤镜+黑白滤镜)
String vertexShader = loadShaderFromRaw(R.raw.vertex_shader); // 通用顶点着色器
String fragNoFilter = loadShaderFromRaw(R.raw.frag_shader_no_filter);
String fragBlackWhite = loadShaderFromRaw(R.raw.frag_shader_black_white);
mProgramNoFilter = createOpenGLProgram(vertexShader, fragNoFilter);
mProgramBlackWhite = createOpenGLProgram(vertexShader, fragBlackWhite);
// 默认使用无滤镜程序
mCurrentProgram = mProgramNoFilter;
}
// --------------------------
// 渲染器核心2:每帧渲染(约30次/秒,在GL线程)
// --------------------------
@Override
public void onDrawFrame(GL10 gl) {
// 1. 清空屏幕(避免上一帧画面残留,黑色背景)
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
// 2. 更新纹理数据(从Camera2获取最新YUV帧,同步到GPU纹理)
mSurfaceTexture.updateTexImage();
// 3. 切换当前OpenGL程序(根据滤镜开关选择无滤镜/黑白)
mCurrentProgram = mIsBlackWhite ? mProgramBlackWhite : mProgramNoFilter;
GLES20.glUseProgram(mCurrentProgram); // 激活当前程序(GPU开始执行该程序的着色器)
// 4. 绑定顶点坐标(告诉GPU:画面要画在屏幕的哪个位置)
int vPositionLoc = GLES20.glGetAttribLocation(mCurrentProgram, "vPosition");
GLES20.glEnableVertexAttribArray(vPositionLoc); // 启用顶点属性
// 传递顶点坐标给顶点着色器:2个值为1组(x,y),float类型,不归一化,无偏移,从mVertexBuffer读取
GLES20.glVertexAttribPointer(vPositionLoc, 2, GLES20.GL_FLOAT, false, 0, mVertexBuffer);
// 5. 绑定纹理坐标(告诉GPU:纹理如何映射到屏幕顶点)
int vTexCoordLoc = GLES20.glGetAttribLocation(mCurrentProgram, "vTexCoord");
GLES20.glEnableVertexAttribArray(vTexCoordLoc);
GLES20.glVertexAttribPointer(vTexCoordLoc, 2, GLES20.GL_FLOAT, false, 0, mTexCoordBuffer);
// 6. 绑定相机纹理(告诉GPU:要处理的YUV数据在这个纹理中)
GLES20.glActiveTexture(GLES20.GL_TEXTURE0); // 激活纹理单元0(GPU有多个纹理单元,这里用第0个)
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mCameraTextureId); // 绑定相机纹理
// 传递纹理单元0给片段着色器的sTexture变量(告诉片段着色器:采样这个纹理)
int sTextureLoc = GLES20.glGetUniformLocation(mCurrentProgram, "sTexture");
GLES20.glUniform1i(sTextureLoc, 0);
// 7. 绘制画面(GPU执行着色器,将处理后的RGB数据写入Surface)
// GL_TRIANGLE_STRIP:用4个顶点画2个三角形,覆盖全屏(效率最高)
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
// 8. 禁用顶点属性(避免GPU资源泄漏)
GLES20.glDisableVertexAttribArray(vPositionLoc);
GLES20.glDisableVertexAttribArray(vTexCoordLoc);
// --------------------------
// 演示Canvas绘制(CPU层面叠加简单UI:如白色取景框)
// --------------------------
// 注意:Canvas绘制需在UI线程或SurfaceTexture的回调中执行,这里仅演示逻辑
// 实际代码需在MainActivity的SurfaceTextureListener中处理(见步骤4)
}
// --------------------------
// 渲染器核心3:视图尺寸变化(如屏幕旋转,执行1次)
// --------------------------
@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
// 设置渲染视口(画面显示的区域:左上角(0,0),宽width,高height,即全屏)
GLES20.glViewport(0, 0, width, height);
}
// --------------------------
// 辅助方法1:创建相机专用纹理
// --------------------------
private int createCameraTexture() {
int[] textures = new int[1];
// 1. 向GPU申请1个纹理ID(类似“分配GPU内存地址”)
GLES20.glGenTextures(1, textures, 0);
int textureId = textures[0];
// 2. 绑定纹理(指定纹理类型为“相机专用纹理”GL_TEXTURE_EXTERNAL_OES)
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId);
// 3. 设置纹理过滤参数(避免拉伸时画面模糊/失真)
// 缩小过滤:线性插值(画面缩小时平滑)
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
// 放大过滤:线性插值(画面放大时平滑)
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
// 纹理边缘处理: clamp_to_edge(边缘像素不重复,避免黑边)
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
// 4. 解绑纹理(避免后续操作污染当前纹理)
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
return textureId;
}
// --------------------------
// 辅助方法2:从raw目录加载着色器代码
// --------------------------
private String loadShaderFromRaw(int rawId) {
try {
InputStream is = mContext.getResources().openRawResource(rawId);
byte[] buffer = new byte[is.available()];
is.read(buffer);
is.close();
return new String(buffer, "UTF-8"); // 转为字符串格式的着色器代码
} catch (Exception e) {
e.printStackTrace();
throw new RuntimeException("加载着色器失败:" + e.getMessage());
}
}
// --------------------------
// 辅助方法3:创建OpenGL程序(编译+链接着色器)
// --------------------------
private int createOpenGLProgram(String vertexShaderCode, String fragmentShaderCode) {
// 1. 编译顶点着色器
int vertexShader = compileShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
// 2. 编译片段着色器
int fragmentShader = compileShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
// 3. 链接程序(将两个着色器组合为GPU可执行的“指令集”)
int program = GLES20.glCreateProgram();
GLES20.glAttachShader(program, vertexShader); // 附加顶点着色器
GLES20.glAttachShader(program, fragmentShader); // 附加片段着色器
GLES20.glLinkProgram(program); // 链接程序
// 4. 检查链接结果(避免编译失败)
int[] linkStatus = new int[1];
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] == 0) {
String errorLog = GLES20.glGetProgramInfoLog(program);
GLES20.glDeleteProgram(program); // 删除无效程序
throw new RuntimeException("OpenGL程序链接失败:" + errorLog);
}
return program;
}
// --------------------------
// 辅助方法4:编译单个着色器
// --------------------------
private int compileShader(int type, String shaderCode) {
int shader = GLES20.glCreateShader(type); // 创建着色器(顶点/片段)
GLES20.glShaderSource(shader, shaderCode); // 设置着色器代码
GLES20.glCompileShader(shader); // 编译着色器
// 检查编译结果
int[] compileStatus = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
if (compileStatus[0] == 0) {
String errorLog = GLES20.glGetShaderInfoLog(shader);
GLES20.glDeleteShader(shader); // 删除无效着色器
throw new RuntimeException("着色器编译失败:" + errorLog);
}
return shader;
}
// --------------------------
// 对外提供的方法:切换滤镜(需在GL线程执行)
// --------------------------
public void toggleFilter() {
mIsBlackWhite = !mIsBlackWhite;
}
// --------------------------
// 对外提供的方法:获取SurfaceTexture(供Camera2绑定输出目标)
// --------------------------
public SurfaceTexture getSurfaceTexture() {
return mSurfaceTexture;
}
}
还是有问题,你看看是什么问题