IOS视频添加动效水印实现介绍

488 篇文章 14 订阅
463 篇文章 13 订阅

1、简概:

  • 本次文章主要介绍给视频添加动效水印的几种方式,以及实现代码。

  • 使用AVFoundation + CoreAnimation 合成方式

  • 基于Lottie 核心也是 CoreAnimation ,这里我们也可以使用AVFoundation + Lottie 合成方式

  • 我们同样可以使用序列帧资源或者gif资源 来编写一段keyFrameAnination,这里我们就介绍一段 AVFoundation + Gif 合成方式

  • 使用 GPUImageUIElement 将序列帧资源合并在目标资源上

  • 使用 GPUImage 将水印视频合并在目标资源上

  • 如果你有问题,或者对下述文字有任何意见与建议,除了在文章最后留言,还可以在微博阳眼的熊1993上给我留言,或者联系我的邮箱hu-yangyang@qq.com,以及访问我的Github

  • 文章某尾会给到Demo。

2、视频处理后效果 GIF

原视频.gifCoreAnimation.gifLottie.gif
GIF.gifGPUImageType1.gifGPUImageType2.gif

3、使用AVFoundation + CoreAnimation 合成方式

#pragma mark CorAnimation
+ (void)addWaterMarkTypeWithCorAnimationAndInputVideoURL:(NSURL*)InputURL WithCompletionHandler:(void (^)(NSURL* outPutURL, int code))handler{
    NSDictionary *opts = [NSDictionary dictionaryWithObject:@(YES) forKey:AVURLAssetPreferPreciseDurationAndTimingKey];
    AVAsset *videoAsset = [AVURLAsset URLAssetWithURL:InputURL options:opts];
    AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
    AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
                                                                        preferredTrackID:kCMPersistentTrackID_Invalid];
    NSError *errorVideo = [NSError new];
    AVAssetTrack *assetVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo]firstObject];
    CMTime endTime = assetVideoTrack.asset.duration;
    BOOL bl = [videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, assetVideoTrack.asset.duration)
                                  ofTrack:assetVideoTrack
                                   atTime:kCMTimeZero error:&errorVideo];
    videoTrack.preferredTransform = assetVideoTrack.preferredTransform;
    NSLog(@"errorVideo:%ld%d",errorVideo.code,bl);
    NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
    NSString *documentsDirectory = [paths objectAtIndex:0];
    NSDateFormatter *formatter = [[NSDateFormatter alloc] init];
    formatter.dateFormat = @"yyyyMMddHHmmss";
    NSString *outPutFileName = [formatter stringFromDate:[NSDate dateWithTimeIntervalSinceNow:0]];
    NSString *myPathDocs =  [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:@"%@.mov",outPutFileName]];
    NSURL* outPutVideoUrl = [NSURL fileURLWithPath:myPathDocs];
    
    CGSize videoSize = [videoTrack naturalSize];
    
    UIFont *font = [UIFont systemFontOfSize:60.0];
    CATextLayer *aLayer = [[CATextLayer alloc] init];
    [aLayer setFontSize:60];
    [aLayer setString:@"H"];
    [aLayer setAlignmentMode:kCAAlignmentCenter];
    [aLayer setForegroundColor:[[UIColor greenColor] CGColor]];
    [aLayer setBackgroundColor:[UIColor clearColor].CGColor];
    CGSize textSize = [@"H" sizeWithAttributes:[NSDictionary dictionaryWithObjectsAndKeys:font,NSFontAttributeName, nil]];
    [aLayer setFrame:CGRectMake(240, 470, textSize.width, textSize.height)];
    aLayer.anchorPoint = CGPointMake(0.5, 1.0);
​
    
    CATextLayer *bLayer = [[CATextLayer alloc] init];
    [bLayer setFontSize:60];
    [bLayer setString:@"E"];
    [bLayer setAlignmentMode:kCAAlignmentCenter];
    [bLayer setForegroundColor:[[UIColor greenColor] CGColor]];
    [bLayer setBackgroundColor:[UIColor clearColor].CGColor];
    CGSize textSizeb = [@"E" sizeWithAttributes:[NSDictionary dictionaryWithObjectsAndKeys:font,NSFontAttributeName, nil]];
    [bLayer setFrame:CGRectMake(240 + textSize.width, 470 , textSizeb.width, textSizeb.height)];
    bLayer.anchorPoint = CGPointMake(0.5, 1.0);
​
    
    CATextLayer *cLayer = [[CATextLayer alloc] init];
    [cLayer setFontSize:60];
    [cLayer setString:@"L"];
    [cLayer setAlignmentMode:kCAAlignmentCenter];
    [cLayer setForegroundColor:[[UIColor greenColor] CGColor]];
    [cLayer setBackgroundColor:[UIColor clearColor].CGColor];
    CGSize textSizec = [@"L" sizeWithAttributes:[NSDictionary dictionaryWithObjectsAndKeys:font,NSFontAttributeName, nil]];
    [cLayer setFrame:CGRectMake(240 + textSizeb.width + textSize.width, 470 , textSizec.width, textSizec.height)];
    cLayer.anchorPoint = CGPointMake(0.5, 1.0);
​
    
    CATextLayer *dLayer = [[CATextLayer alloc] init];
    [dLayer setFontSize:60];
    [dLayer setString:@"L"];
    [dLayer setAlignmentMode:kCAAlignmentCenter];
    [dLayer setForegroundColor:[[UIColor greenColor] CGColor]];
    [dLayer setBackgroundColor:[UIColor clearColor].CGColor];
    CGSize textSized = [@"L" sizeWithAttributes:[NSDictionary dictionaryWithObjectsAndKeys:font,NSFontAttributeName, nil]];
    [dLayer setFrame:CGRectMake(240 + textSizec.width+ textSizeb.width + textSize.width, 470 , textSized.width, textSized.height)];
    dLayer.anchorPoint = CGPointMake(0.5, 1.0);
    
    CATextLayer *eLayer = [[CATextLayer alloc] init];
    [eLayer setFontSize:60];
    [eLayer setString:@"O"];
    [eLayer setAlignmentMode:kCAAlignmentCenter];
    [eLayer setForegroundColor:[[UIColor greenColor] CGColor]];
    [eLayer setBackgroundColor:[UIColor clearColor].CGColor];
    CGSize textSizede = [@"O" sizeWithAttributes:[NSDictionary dictionaryWithObjectsAndKeys:font,NSFontAttributeName, nil]];
    [eLayer setFrame:CGRectMake(240 + textSized.width + textSizec.width+ textSizeb.width + textSize.width, 470 , textSizede.width, textSizede.height)];
    eLayer.anchorPoint = CGPointMake(0.5, 1.0);
​
    CABasicAnimation* basicAni = [CABasicAnimation animationWithKeyPath:@"transform.scale"];
    basicAni.fromValue = @(0.2f);
    basicAni.toValue = @(1.0f);
    basicAni.beginTime = AVCoreAnimationBeginTimeAtZero;
    basicAni.duration = 2.0f;
    basicAni.repeatCount = HUGE_VALF;
    basicAni.removedOnCompletion = NO;
    basicAni.fillMode = kCAFillModeForwards;
    [aLayer addAnimation:basicAni forKey:nil];
    [bLayer addAnimation:basicAni forKey:nil];
    [cLayer addAnimation:basicAni forKey:nil];
    [dLayer addAnimation:basicAni forKey:nil];
    [eLayer addAnimation:basicAni forKey:nil];
    
    CALayer *parentLayer = [CALayer layer];
    CALayer *videoLayer = [CALayer layer];
    parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
    videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
    [parentLayer addSublayer:videoLayer];
    [parentLayer addSublayer:aLayer];
    [parentLayer addSublayer:bLayer];
    [parentLayer addSublayer:cLayer];
    [parentLayer addSublayer:dLayer];
    [parentLayer addSublayer:eLayer];
​
    AVMutableVideoComposition* videoComp = [AVMutableVideoComposition videoComposition];
    videoComp.renderSize = videoSize;
    parentLayer.geometryFlipped = true;
    videoComp.frameDuration = CMTimeMake(1, 30);
    videoComp.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
    AVMutableVideoCompositionInstruction* instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
    
    instruction.timeRange = CMTimeRangeMake(kCMTimeZero, endTime);
    AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
    instruction.layerInstructions = [NSArray arrayWithObjects:layerInstruction, nil];
    videoComp.instructions = [NSArray arrayWithObject: instruction];
    
    
    AVAssetExportSession* exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
                                                                      presetName:AVAssetExportPresetHighestQuality];
    exporter.outputURL=outPutVideoUrl;
    exporter.outputFileType = AVFileTypeMPEG4;
    exporter.shouldOptimizeForNetworkUse = YES;
    exporter.videoComposition = videoComp;
    [exporter exportAsynchronouslyWithCompletionHandler:^{
        dispatch_async(dispatch_get_main_queue(), ^{
            //这里是输出视频之后的操作,做你想做的
            NSLog(@"输出视频地址:%@ andCode:%@",myPathDocs,exporter.error);
            handler(outPutVideoUrl,(int)exporter.error.code);
        });
    }];
}

4、基于Lottie 核心也是 CoreAnimation ,这里我们也可以使用AVFoundation + Lottie 合成方式

  • 与第一段代码不同的地方

LOTAnimationView* animation = [LOTAnimationView animationNamed:@"青蛙"];
    animation.frame = CGRectMake(150 , 340 , 240 , 240 );
    animation.animationSpeed = 5.0 ;
    animation.loopAnimation = YES;
    [animation play];
    
    CALayer *parentLayer = [CALayer layer];
    CALayer *videoLayer = [CALayer layer];
    parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
    videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
    [parentLayer addSublayer:videoLayer];
    [parentLayer addSublayer:animation.layer];

【学习地址】:FFmpeg/WebRTC/RTMP/NDK/Android音视频流媒体高级开发

【文章福利】:免费领取更多音视频学习资料包、大厂面试题、技术视频和学习路线图,资料包括(C/C++,Linux,FFmpeg webRTC rtmp hls rtsp ffplay srs 等等)有需要的可以点击1079654574加群领取哦~

5、我们同样可以使用序列帧资源或者gif资源 来编写一段keyFrameAnination,这里我们就介绍一段 AVFoundation + Gif 合成方式

  • 与第一段代码不同的地方是将gif 转成layer 的KEYFrameAnimation

CALayer *gifLayer1 = [[CALayer alloc] init];
    gifLayer1.frame = CGRectMake(150 , 340 , 298 , 253 );
    CAKeyframeAnimation *gifLayer1Animation = [WatermarkEngine animationForGifWithURL:[[NSBundle mainBundle] URLForResource:@"雪人完成_1" withExtension:@"gif"]];
    gifLayer1Animation.beginTime = AVCoreAnimationBeginTimeAtZero;
    gifLayer1Animation.removedOnCompletion = NO;
    [gifLayer1 addAnimation:gifLayer1Animation forKey:@"gif"];
    
    
    CALayer *parentLayer = [CALayer layer];
    CALayer *videoLayer = [CALayer layer];
    parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
    videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
    [parentLayer addSublayer:videoLayer];
    [parentLayer addSublayer:gifLayer1];
+ (CAKeyframeAnimation *)animationForGifWithURL:(NSURL *)url {
    
    CAKeyframeAnimation *animation = [CAKeyframeAnimation animationWithKeyPath:@"contents"];
    
    NSMutableArray * frames = [NSMutableArray new];
    NSMutableArray *delayTimes = [NSMutableArray new];
    
    CGFloat totalTime = 0.0;
    CGFloat gifWidth;
    CGFloat gifHeight;
    
    CGImageSourceRef gifSource = CGImageSourceCreateWithURL((CFURLRef)url, NULL);
    
    // get frame count
    size_t frameCount = CGImageSourceGetCount(gifSource);
    for (size_t i = 0; i < frameCount; ++i) {
        // get each frame
        CGImageRef frame = CGImageSourceCreateImageAtIndex(gifSource, i, NULL);
        [frames addObject:(__bridge id)frame];
        CGImageRelease(frame);
        
        // get gif info with each frame
        NSDictionary *dict = (NSDictionary*)CFBridgingRelease(CGImageSourceCopyPropertiesAtIndex(gifSource, i, NULL));
        NSLog(@"kCGImagePropertyGIFDictionary %@", [dict valueForKey:(NSString*)kCGImagePropertyGIFDictionary]);
        
        // get gif size
        gifWidth = [[dict valueForKey:(NSString*)kCGImagePropertyPixelWidth] floatValue];
        gifHeight = [[dict valueForKey:(NSString*)kCGImagePropertyPixelHeight] floatValue];
        
        // kCGImagePropertyGIFDictionary中kCGImagePropertyGIFDelayTime,kCGImagePropertyGIFUnclampedDelayTime值是一样的
        NSDictionary *gifDict = [dict valueForKey:(NSString*)kCGImagePropertyGIFDictionary];
        [delayTimes addObject:[gifDict valueForKey:(NSString*)kCGImagePropertyGIFUnclampedDelayTime]];
        
        totalTime = totalTime + [[gifDict valueForKey:(NSString*)kCGImagePropertyGIFUnclampedDelayTime] floatValue];
        
        //        CFRelease((__bridge CFTypeRef)(dict));
        //        CFRelease((__bridge CFTypeRef)(dict));
    }
    if (gifSource) {
        CFRelease(gifSource);
    }
    
    NSMutableArray *times = [NSMutableArray arrayWithCapacity:3];
    CGFloat currentTime = 0;
    NSInteger count = delayTimes.count;
    for (int i = 0; i < count; ++i) {
        [times addObject:[NSNumber numberWithFloat:(currentTime / totalTime)]];
        currentTime += [[delayTimes objectAtIndex:i] floatValue];
    }
    
    NSMutableArray *images = [NSMutableArray arrayWithCapacity:3];
    for (int i = 0; i < count; ++i) {
        [images addObject:[frames objectAtIndex:i]];
    }
    
    animation.keyTimes = times;
    animation.values = images;
    animation.timingFunction = [CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionLinear];
    animation.duration = totalTime;
    animation.repeatCount = HUGE_VALF;
    
    return animation;
}

6、使用 GPUImage 将水印视频合并在目标资源上

#pragma mark GPUImage TWO VIDEO INPUT
+ (void)addWaterMarkTypeWithGPUImageAndInputVideoURL:(NSURL*)InputURL AndWaterMarkVideoURL:(NSURL*)InputURL2 WithCompletionHandler:(void (^)(NSURL* outPutURL, int code))handler{
    NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
    NSString *documentsDirectory = [paths objectAtIndex:0];
    NSDateFormatter *formatter = [[NSDateFormatter alloc] init];
    formatter.dateFormat = @"yyyyMMddHHmmss";
    NSString *outPutFileName = [formatter stringFromDate:[NSDate dateWithTimeIntervalSinceNow:0]];
    NSString *myPathDocs =  [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:@"%@.mov",outPutFileName]];
    NSURL* outPutVideoUrl = [NSURL fileURLWithPath:myPathDocs];
    
    GPUImageMovie* movieFile = [[GPUImageMovie alloc] initWithURL:InputURL];
    GPUImageMovie* movieFile2 = [[GPUImageMovie alloc] initWithURL:InputURL2];
    GPUImageScreenBlendFilter* filter =  [[GPUImageScreenBlendFilter alloc] init];
    [movieFile addTarget:filter];
    [movieFile2 addTarget:filter];
    
    GPUImageMovieWriter* movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:outPutVideoUrl size:CGSizeMake(540, 960) fileType:AVFileTypeQuickTimeMovie outputSettings:    @
                                        {
                                        AVVideoCodecKey: AVVideoCodecH264,
                                        AVVideoWidthKey: @540,   //Set your resolution width here
                                        AVVideoHeightKey: @960,  //set your resolution height here
                                        AVVideoCompressionPropertiesKey: @
                                            {
                                                //2000*1000  建议800*1000-5000*1000
                                                //AVVideoAverageBitRateKey: @2500000, // Give your bitrate here for lower size give low values
                                            AVVideoAverageBitRateKey: @5000000,
                                            AVVideoProfileLevelKey: AVVideoProfileLevelH264HighAutoLevel,
                                            AVVideoAverageNonDroppableFrameRateKey: @30,
                                            },
                                        }
                                        ];
    [filter  addTarget:movieWriter];
    AVAsset* videoAsset = [AVAsset assetWithURL:InputURL];
    AVAssetTrack *assetVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo]firstObject];
    movieWriter.transform = assetVideoTrack.preferredTransform;
    //    [movie enableSynchronizedEncodingUsingMovieWriter:movieWriter];
    [movieWriter startRecording];
    [movieFile startProcessing];
    [movieFile2 startProcessing];
    [movieWriter setCompletionBlock:^{
        dispatch_async(dispatch_get_main_queue(), ^{
            NSLog(@"movieWriter Completion");
            
            handler(outPutVideoUrl,1);
        });
​
    }];
    
}

7、使用 GPUImageUIElement 将序列帧资源合并在目标资源上

#pragma mark GPUImageUIElement
+ (void)addWaterMarkTypeWithGPUImageUIElementAndInputVideoURL:(NSURL*)InputURL WithCompletionHandler:(void (^)(NSURL* outPutURL, int code))handler{
    NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
    NSString *documentsDirectory = [paths objectAtIndex:0];
    NSDateFormatter *formatter = [[NSDateFormatter alloc] init];
    formatter.dateFormat = @"yyyyMMddHHmmss";
    NSString *outPutFileName = [formatter stringFromDate:[NSDate dateWithTimeIntervalSinceNow:0]];
    NSString *myPathDocs =  [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:@"%@.mov",outPutFileName]];
    NSURL* outPutVideoUrl = [NSURL fileURLWithPath:myPathDocs];
    
    GPUImageMovie* movieFile = [[GPUImageMovie alloc] initWithURL:InputURL];
​
    
​
    NSValue *value = [NSValue valueWithCGRect:CGRectMake([UIScreen mainScreen].bounds.size.width/2.0 - (332 /2.0) , [UIScreen mainScreen].bounds.size.height/2.0 - (297 /2.0) , 332 , 297 )];
    NSValue *value2 = [NSValue valueWithCGAffineTransform:CGAffineTransformMake(1, 0, 0, 1, 0, 0)];
    UIView* view = [[UIView alloc] initWithFrame:CGRectMake(0, 0, 1, 1)];
    GPUImageFilterGroup* filter =  [WatermarkEngine addWatermarkWithResourcesNames:@[@"雨天青蛙"] Andframes:@[value] AndTransform:@[value2] AndLabelViews:@[view]];
    [movieFile addTarget:filter];
​
    
    GPUImageMovieWriter* movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:outPutVideoUrl size:CGSizeMake(540, 960) fileType:AVFileTypeQuickTimeMovie outputSettings:    @
                                        {
                                        AVVideoCodecKey: AVVideoCodecH264,
                                        AVVideoWidthKey: @540,   //Set your resolution width here
                                        AVVideoHeightKey: @960,  //set your resolution height here
                                        AVVideoCompressionPropertiesKey: @
                                            {
                                                //2000*1000  建议800*1000-5000*1000
                                                //AVVideoAverageBitRateKey: @2500000, // Give your bitrate here for lower size give low values
                                            AVVideoAverageBitRateKey: @5000000,
                                            AVVideoProfileLevelKey: AVVideoProfileLevelH264HighAutoLevel,
                                            AVVideoAverageNonDroppableFrameRateKey: @30,
                                            },
                                        }
                                        ];
    [filter  addTarget:movieWriter];
    AVAsset* videoAsset = [AVAsset assetWithURL:InputURL];
    AVAssetTrack *assetVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo]firstObject];
    movieWriter.transform = assetVideoTrack.preferredTransform;
    //    [movie enableSynchronizedEncodingUsingMovieWriter:movieWriter];
    [movieWriter startRecording];
    [movieFile startProcessing];
    [movieWriter setCompletionBlock:^{
        dispatch_async(dispatch_get_main_queue(), ^{
            NSLog(@"movieWriter Completion");
            
            handler(outPutVideoUrl,1);
        });
        
    }];
}
+ (GPUImageFilterGroup*) addWatermarkWithResourcesNames:(NSArray* )resourcesNames Andframes:(NSArray*)frams AndTransform:(NSArray*)transforms AndLabelViews:(NSArray*)labelViews{
  __block int currentPicIndex = 0;
  CGFloat width = CGRectGetWidth([UIScreen mainScreen].bounds);
  UIView* temp = [[UIView alloc] initWithFrame:[UIScreen mainScreen].bounds];
  [temp setContentScaleFactor:[[UIScreen mainScreen] scale]];
  __block UIImageView* waterImageView1 = [[UIImageView alloc] init];
  __block UIImageView* waterImageView2 = [[UIImageView alloc] init];
  __block UIImageView* waterImageView3 = [[UIImageView alloc] init];
  for (int index = 0 ; index < resourcesNames.count ; index++) {
    if (index == 0) {
      waterImageView1.frame = [frams[index] CGRectValue];
      UIImage* tempImage = [UIImage imageWithContentsOfFile: [[NSBundle mainBundle] pathForResource:[NSString stringWithFormat:@"%@_%05d",resourcesNames[index],currentPicIndex] ofType:@"png"]];
      waterImageView1.image = tempImage;
      waterImageView1.transform = [transforms[index] CGAffineTransformValue];
      [temp addSubview:waterImageView1];
      [temp addSubview:labelViews[index]];
    }else if (index == 1){
      waterImageView2.frame = [frams[index] CGRectValue];
      UIImage* tempImage = [UIImage imageWithContentsOfFile: [[NSBundle mainBundle] pathForResource:[NSString stringWithFormat:@"%@_%05d",resourcesNames[index],currentPicIndex] ofType:@"png"]];
      waterImageView2.image = tempImage;
      waterImageView2.transform = [transforms[index] CGAffineTransformValue];
      [temp addSubview:waterImageView2];
      [temp addSubview:labelViews[index]];
    }else{
      waterImageView3.frame = [frams[index] CGRectValue];
      UIImage* tempImage = [UIImage imageWithContentsOfFile: [[NSBundle mainBundle] pathForResource:[NSString stringWithFormat:@"%@_%05d",resourcesNames[index],currentPicIndex] ofType:@"png"]];
      waterImageView3.image = tempImage;
      waterImageView3.transform = [transforms[index] CGAffineTransformValue];
      [temp addSubview:waterImageView3];
      [temp addSubview:labelViews[index]];
    }
    
​
  }
  
  
  
​
  
   GPUImageFilterGroup* filterGroup = [[GPUImageFilterGroup alloc] init];
  
  GPUImageUIElement *uiElement = [[GPUImageUIElement alloc] initWithView:temp];
  GPUImageTwoInputFilter* blendFilter = [[GPUImageTwoInputFilter alloc] initWithFragmentShaderFromString:[WatermarkEngine loadShader:@"AlphaBlend_Normal" extension:@"frag"]];
  GPUImageFilter* filter = [[GPUImageFilter alloc] init];
​
  GPUImageFilter* uiFilter = [[GPUImageFilter alloc] init];
  [uiElement addTarget:uiFilter];
//  [uiFilter setInputRotation:kGPUImageRotateLeft atIndex:0];
  
  [filter addTarget:blendFilter];
  [uiFilter addTarget:blendFilter];
  
  
  [filterGroup addFilter:filter];
  [filterGroup addFilter:uiFilter];
  [filterGroup addFilter:blendFilter];
​
  
  [filterGroup setInitialFilters:@[filter]];
  [filterGroup setTerminalFilter:blendFilter];
  // 71
  //    __unsafe_unretained typeof(self) this = self;
  [filter setFrameProcessingCompletionBlock:^(GPUImageOutput * filter, CMTime frameTime) {
    currentPicIndex += 1;
    
    for (int index = 0 ; index < resourcesNames.count ; index++) {
      if (index == 0) {
        waterImageView1.image = [UIImage imageWithContentsOfFile: [[NSBundle mainBundle] pathForResource:[NSString stringWithFormat:@"%@_%05d",resourcesNames[index],currentPicIndex] ofType:@"png"]];
      }else if (index == 1){
        waterImageView2.image = [UIImage imageWithContentsOfFile: [[NSBundle mainBundle] pathForResource:[NSString stringWithFormat:@"%@_%05d",resourcesNames[index],currentPicIndex] ofType:@"png"]];
      }else{
        waterImageView3.image = [UIImage imageWithContentsOfFile: [[NSBundle mainBundle] pathForResource:[NSString stringWithFormat:@"%@_%05d",resourcesNames[index],currentPicIndex] ofType:@"png"]];
      }
      
      
    }
    
    
    if (currentPicIndex == 89) {
      currentPicIndex = 0;
    }
    
    [uiElement update];
  }];
  
  return filterGroup;
  
}
+ (NSString * _Nonnull)loadShader:(NSString *)name extension:(NSString *)extenstion {
    NSURL *url = [[NSBundle mainBundle] URLForResource:name withExtension:extenstion];
    return [NSString stringWithContentsOfURL:url encoding:NSUTF8StringEncoding error:nil];
}

8、demo传送门:

原文链接:[iOS] 视频添加动效水印实现介绍 - 简书

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值