// 对静态图像进行模糊处理
- (UIImage *)blurImage:(CGImageRef)srcImg {
// 创建CIContext,这里关闭color management有助于提升性能
NSDictionary *options = @{ kCIContextWorkingColorSpace : [NSNull null] };
CIContext *mContext = [CIContext contextWithOptions:options];
CIImage *mImg = [[CIImage alloc] initWithCGImage:srcImg];
// 创建高斯模糊滤镜,ios只支持这1种模糊滤镜,并且要ios6.0以上才行,inputRadius最大100,默认10
CIFilter *mBlurFilter = [CIFilter filterWithName:@"CIGaussianBlur"];
[mBlurFilter setValue:mImg forKey:kCIInputImageKey];
[mBlurFilter setValue:@(10.0f) forKey:@"inputRadius”];
CIImage *mBlurImg = [mBlurFilter valueForKey:kCIOutputImageKey];
CGImageRef mResultCGImg = [mContext createCGImage:mBlurImg fromRect:[mBlurImg extent]];
UIImage *mResultUIImg = [UIImage imageWithCGImage:mResultCGImg];
CGImageRelease(mDestImg);
[mImg release];
return mResultUIImg;
}
// 对摄像头作模糊处理
// 使用AVFoundation实现摄像头功能并且实现AVCaptureVideoDataOutputSampleBufferDelegate托管协议
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer, 0);
void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
static CGColorSpaceRef colorSpace = nil;
if (colorSpace == nil) {
colorSpace = CGColorSpaceCreateDeviceRGB();
}
CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef quartzImage = CGBitmapContextCreateImage(context);
// 处理原始图像
UIImage *resultUIImage = [self blurImage:quartzImage];
// 更新某个UIImageView的image(隐藏摄像头的layer,盖一个UIImageView来播放滤镜处理后的图像)
...
CGImageRelease(quartzImage);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
}
参考:
1. Core Image Programming Guide
https://developer.apple.com/library/ios/documentation/GraphicsImaging/Conceptual/CoreImaging/ci_performance/ci_performance.html#//apple_ref/doc/uid/TP30001185-CH10-SW2
2. Core Image Filter Reference
https://developer.apple.com/library/ios/documentation/GraphicsImaging/Reference/CoreImageFilterReference/Reference/reference.html#//apple_ref/doc/uid/TP40004346
- (UIImage *)blurImage:(CGImageRef)srcImg {
// 创建CIContext,这里关闭color management有助于提升性能
NSDictionary *options = @{ kCIContextWorkingColorSpace : [NSNull null] };
CIContext *mContext = [CIContext contextWithOptions:options];
CIImage *mImg = [[CIImage alloc] initWithCGImage:srcImg];
// 创建高斯模糊滤镜,ios只支持这1种模糊滤镜,并且要ios6.0以上才行,inputRadius最大100,默认10
CIFilter *mBlurFilter = [CIFilter filterWithName:@"CIGaussianBlur"];
[mBlurFilter setValue:mImg forKey:kCIInputImageKey];
[mBlurFilter setValue:@(10.0f) forKey:@"inputRadius”];
CIImage *mBlurImg = [mBlurFilter valueForKey:kCIOutputImageKey];
CGImageRef mResultCGImg = [mContext createCGImage:mBlurImg fromRect:[mBlurImg extent]];
UIImage *mResultUIImg = [UIImage imageWithCGImage:mResultCGImg];
CGImageRelease(mDestImg);
[mImg release];
return mResultUIImg;
}
// 对摄像头作模糊处理
// 使用AVFoundation实现摄像头功能并且实现AVCaptureVideoDataOutputSampleBufferDelegate托管协议
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer, 0);
void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
static CGColorSpaceRef colorSpace = nil;
if (colorSpace == nil) {
colorSpace = CGColorSpaceCreateDeviceRGB();
}
CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef quartzImage = CGBitmapContextCreateImage(context);
// 处理原始图像
UIImage *resultUIImage = [self blurImage:quartzImage];
// 更新某个UIImageView的image(隐藏摄像头的layer,盖一个UIImageView来播放滤镜处理后的图像)
...
CGImageRelease(quartzImage);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
}
参考:
1. Core Image Programming Guide
https://developer.apple.com/library/ios/documentation/GraphicsImaging/Conceptual/CoreImaging/ci_performance/ci_performance.html#//apple_ref/doc/uid/TP30001185-CH10-SW2
2. Core Image Filter Reference
https://developer.apple.com/library/ios/documentation/GraphicsImaging/Reference/CoreImageFilterReference/Reference/reference.html#//apple_ref/doc/uid/TP40004346