1、设置Capture
- (void)_setupCapture
{
CVMetalTextureCacheCreate(NULL, NULL, _device, NULL, &_textureCache);
ColorConversion colorConversion = {
.matrix = {
.columns[0] = { 1.164, 1.164, 1.164, },
.columns[1] = { 0.000, -0.392, 2.017, },
.columns[2] = { 1.596, -0.813, 0.000, },
},
.offset = { -(16.0/255.0), -0.5, -0.5 },
};
_colorConversionBuffer = [_device newBufferWithBytes:&colorConversion length:sizeof(colorConversion) options:MTLResourceOptionCPUCacheModeDefault];
_captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if(_captureDevice != nil)
{
_captureSession = [[AVCaptureSession alloc] init];
AVCaptureInput *input = [[AVCaptureDeviceInput alloc] initWithDevice:_captureDevice error:nil];
[_captureSession addInput:input];
_captureQueue = dispatch_queue_create("captureQueue", DISPATCH_QUEUE_SERIAL);
AVCaptureVideoDataOutput *videoOutput = [[AVCaptureVideoDataOutput alloc] init];
videoOutput.videoSettings = @{
(NSString*)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange)
};
[videoOutput setSampleBufferDelegate:self queue:_captureQueue];
[_captureSession addOutput:videoOutput];
[_captureSession startRunning];
}
}
2.增加代理:
AVCaptureVideoDataOutputSampleBufferDelegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
id<MTLTexture> textureY = nil;
id<MTLTexture> textureCbCr = nil;
// textureY
{
size_t width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 0);
size_t height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0);
MTLPixelFormat pixelFormat = MTLPixelFormatR8Unorm;
CVMetalTextureRef texture = NULL;
CVReturn status = CVMetalTextureCacheCreateTextureFromImage(NULL, _textureCache, pixelBuffer, NULL, pixelFormat, width, height, 0, &texture);
if(status == kCVReturnSuccess)
{
textureY = CVMetalTextureGetTexture(texture);
CFRelease(texture);
}
}
// textureCbCr
{
size_t width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 1);
size_t height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 1);
MTLPixelFormat pixelFormat = MTLPixelFormatRG8Unorm;
CVMetalTextureRef texture = NULL;
CVReturn status = CVMetalTextureCacheCreateTextureFromImage(NULL, _textureCache, pixelBuffer, NULL, pixelFormat, width, height, 1, &texture);
if(status == kCVReturnSuccess)
{
textureCbCr = CVMetalTextureGetTexture(texture);
CFRelease(texture);
}
}
if(textureY != nil && textureCbCr != nil)
{
dispatch_async(dispatch_get_main_queue(), ^{
// always assign the textures atomic
_textureY = textureY;
_textureCbCr = textureCbCr;
});
}
}
3、shader使用:
fragment float4 fragment_Glass1(ProjectedVertex vert [[stage_in]],
texture2d<float, access::sample> textureY [[ texture(0) ]],
texture2d<float, access::sample> textureCbCr [[ texture(1) ]],
constant ColorConversion1 &colorConversion [[ buffer(0) ]])
{
constexpr sampler s(address::repeat, filter::linear); ycbcr=float3(textureY.sample(s,vert.texCoords.xy).r,textureCbCr.sample(s,vert.texCoords.xy).rg);
float3 rgb=colorConversion.matrix*(ycbcr+colorConversion.offset);
return float4(rgb,1.0)*0.85;
}