IOS多张图片合成一个视频

本文介绍了一种在iOS应用中将多张图片合成为视频的方法。利用Objective-C编写,通过AVFoundation框架创建视频文件,并提供了播放合成视频的功能。文中详细展示了如何设置视频参数、创建视频写入器、准备图片资源及实现视频合成。


- (void)viewDidLoad
{
   [superviewDidLoad];
    //Do any additional setup after loading the view.
   imageArr =[[NSMutableArrayalloc]initWithObjects:
             [UIImageimageNamed:@"1"],[UIImageimageNamed:@"2.jpg"],[UIImageimageNamed:@"3.jpg"],[UIImageimageNamed:@"4.jpg"],[UIImageimageNamed:@"5.jpg"],[UIImageimageNamed:@"6.jpg"],[UIImageimageNamed:@"7.jpg"],[UIImageimageNamed:@"8.jpg"],[UIImageimageNamed:@"9.jpg"],[UIImageimageNamed:@"10.jpg"],[UIImageimageNamed:@"11.jpg"],[UIImageimageNamed:@"12.jpg"],[UIImageimageNamed:@"13.jpg"],[UIImageimageNamed:@"14.jpg"],[UIImageimageNamed:@"15"],[UIImageimageNamed:@"16"],[UIImageimageNamed:@"17"],[UIImageimageNamed:@"18"],[UIImageimageNamed:@"19"],[UIImageimageNamed:@"20"],[UIImageimageNamed:@"21"],[UIImageimageNamed:@"22"],[UIImageimageNamed:@"23"],[UIImageimageNamed:@"24"],[UIImageimageNamed:@"25"],[UIImageimageNamed:@"26"],[UIImageimageNamed:@"27"],[UIImageimageNamed:@"28"],[UIImageimageNamed:@"29"],[UIImageimageNamed:@"30"],[UIImageimageNamed:@"31"],nil];
   
   UIButton * button =[UIButtonbuttonWithType:UIButtonTypeRoundedRect];
    [buttonsetFrame:CGRectMake(100,100, 100,100)];
   [buttonsetTitle:@"合成"forState:UIControlStateNormal];
   [buttonaddTarget:selfaction:@selector(testCompressionSession)forControlEvents:UIControlEventTouchUpInside];
   [self.viewaddSubview:button];
   
   UIButton * button1 =[UIButtonbuttonWithType:UIButtonTypeRoundedRect];
    [button1setFrame:CGRectMake(100,200, 100,100)];
   [button1setTitle:@"播放"forState:UIControlStateNormal];
   [button1addTarget:selfaction:@selector(playAction)forControlEvents:UIControlEventTouchUpInside];
   [self.viewaddSubview:button1];
}

-(void)testCompressionSession
{
   NSLog(@"开始");
   //NSString *moviePath = [[NSBundle mainBundle]pathForResource:@"Movie" ofType:@"mov"];
   NSArray *paths =NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask,YES);
   NSString *moviePath =[[pathsobjectAtIndex:0]stringByAppendingPathComponent:[NSStringstringWithFormat:@"%@.mov",@"test"]];
   self.theVideoPath=moviePath;
   CGSize size =CGSizeMake(320,400);//定义视频的大小
   
//    [selfwriteImages:imageArr ToMovieAtPath:moviePath withSize:sizeinDuration:4 byFPS:30];//第2中方法

   NSError *error =nil;
   
   unlink([moviePathUTF8String]);
   NSLog(@"path->%@",moviePath);
   //—-initialize compression engine
   AVAssetWriter *videoWriter =[[AVAssetWriteralloc]initWithURL:[NSURLfileURLWithPath:moviePath]
                                                 fileType:AVFileTypeQuickTimeMovie
                                                   error:&error];
   NSParameterAssert(videoWriter);
   if(error)
      NSLog(@"error =%@", [errorlocalizedDescription]);
   
   NSDictionary *videoSettings =[NSDictionarydictionaryWithObjectsAndKeys:AVVideoCodecH264,AVVideoCodecKey,
                             [NSNumbernumberWithInt:size.width],AVVideoWidthKey,
                             [NSNumbernumberWithInt:size.height],AVVideoHeightKey,nil];
   AVAssetWriterInput *writerInput =[AVAssetWriterInputassetWriterInputWithMediaType:AVMediaTypeVideooutputSettings:videoSettings];
   
   NSDictionary*sourcePixelBufferAttributesDictionary =[NSDictionarydictionaryWithObjectsAndKeys:[NSNumbernumberWithInt:kCVPixelFormatType_32ARGB],kCVPixelBufferPixelFormatTypeKey,nil];
   
   AVAssetWriterInputPixelBufferAdaptor *adaptor =[AVAssetWriterInputPixelBufferAdaptor
                                            assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInputsourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
   NSParameterAssert(writerInput);
   NSParameterAssert([videoWritercanAddInput:writerInput]);
   
   if ([videoWritercanAddInput:writerInput])
      NSLog(@"11111");
   else
      NSLog(@"22222");
   
    [videoWriteraddInput:writerInput];
   
    [videoWriterstartWriting];
   [videoWriterstartSessionAtSourceTime:kCMTimeZero];
   
   //合成多张图片为一个视频文件
   dispatch_queue_t dispatchQueue =dispatch_queue_create("mediaInputQueue",NULL);
   int __block frame =0;
   
   [writerInputrequestMediaDataWhenReadyOnQueue:dispatchQueueusingBlock:^{
       while([writerInputisReadyForMoreMediaData])
       {
          if(++frame >=[imageArrcount]*10)
           {
              [writerInputmarkAsFinished];
              [videoWriterfinishWriting];
//              [videoWriterfinishWritingWithCompletionHandler:nil];
             break;
           }
          
          CVPixelBufferRef buffer =NULL;
          
          int idx =frame/10;
          NSLog(@"idx==%d",idx);
          
           buffer =(CVPixelBufferRef)[selfpixelBufferFromCGImage:[[imageArrobjectAtIndex:idx]CGImage]size:size];
          
          if (buffer)
           {
             if(![adaptorappendPixelBuffer:bufferwithPresentationTime:CMTimeMake(frame,10)])
                NSLog(@"FAIL");
             else
                NSLog(@"OK");
                CFRelease(buffer);
           }
       }
    }];
}

- (CVPixelBufferRef)pixelBufferFromCGImage:(CGImageRef)imagesize:(CGSize)size
{
   NSDictionary *options =[NSDictionarydictionaryWithObjectsAndKeys:
                        [NSNumbernumberWithBool:YES],kCVPixelBufferCGImageCompatibilityKey,
                        [NSNumbernumberWithBool:YES],kCVPixelBufferCGBitmapContextCompatibilityKey,nil];
   CVPixelBufferRef pxbuffer =NULL;
   CVReturn status =CVPixelBufferCreate(kCFAllocatorDefault,size.width,size.height,kCVPixelFormatType_32ARGB,(__bridgeCFDictionaryRef) options,&pxbuffer);
   
   NSParameterAssert(status ==kCVReturnSuccess && pxbuffer !=NULL);
   
   CVPixelBufferLockBaseAddress(pxbuffer,0);
   void *pxdata =CVPixelBufferGetBaseAddress(pxbuffer);
   NSParameterAssert(pxdata !=NULL);
   
   CGColorSpaceRef rgbColorSpace=CGColorSpaceCreateDeviceRGB();
   CGContextRef context =CGBitmapContextCreate(pxdata,size.width,size.height,8,4*size.width,rgbColorSpace,kCGImageAlphaPremultipliedFirst);
   NSParameterAssert(context);
   
   CGContextDrawImage(context,CGRectMake(0,0,CGImageGetWidth(image),CGImageGetHeight(image)), image);
   
   CGColorSpaceRelease(rgbColorSpace);
   CGContextRelease(context); 
   
   CVPixelBufferUnlockBaseAddress(pxbuffer,0); 
   
   return pxbuffer;
}

-(void)playAction
{
   MPMoviePlayerViewController *theMovie =[[MPMoviePlayerViewControlleralloc]initWithContentURL:[NSURLfileURLWithPath:self.theVideoPath]];
   [selfpresentMoviePlayerViewControllerAnimated:theMovie];
   theMovie.moviePlayer.movieSourceType=MPMovieSourceTypeFile;
   [theMovie.moviePlayerplay];
}


//第二种方式
- (void)writeImages:(NSArray *)imagesArrayToMovieAtPath:(NSString *) pathwithSize:(CGSize) size
        inDuration:(float)durationbyFPS:(int32_t)fps{
   //Wire the writer:
   NSError *error =nil;
   AVAssetWriter *videoWriter =[[AVAssetWriteralloc]initWithURL:[NSURLfileURLWithPath:path]
                                                  fileType:AVFileTypeQuickTimeMovie
                                                    error:&error];
   NSParameterAssert(videoWriter);
   
   NSDictionary *videoSettings =[NSDictionarydictionaryWithObjectsAndKeys:
                             AVVideoCodecH264,AVVideoCodecKey,
                             [NSNumbernumberWithInt:size.width],AVVideoWidthKey,
                             [NSNumbernumberWithInt:size.height],AVVideoHeightKey,
                             nil];
   
   AVAssetWriterInput* videoWriterInput =[AVAssetWriterInput
                                     assetWriterInputWithMediaType:AVMediaTypeVideo
                                     outputSettings:videoSettings];
   
   
   AVAssetWriterInputPixelBufferAdaptor *adaptor =[AVAssetWriterInputPixelBufferAdaptor
                                            assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
                                            sourcePixelBufferAttributes:nil];
   NSParameterAssert(videoWriterInput);
   NSParameterAssert([videoWritercanAddInput:videoWriterInput]);
    [videoWriteraddInput:videoWriterInput];
   
   //Start a session:
    [videoWriterstartWriting];
   [videoWriterstartSessionAtSourceTime:kCMTimeZero];
   
   //Write some samples:
   CVPixelBufferRef buffer =NULL;
   
   int frameCount =0;
   
   int imagesCount = [imagesArraycount];
   float averageTime =duration/imagesCount;
   int averageFrame =(int)(averageTime * fps);
   
   for(UIImage *img in imagesArray)
    {
       buffer=[selfpixelBufferFromCGImage:[imgCGImage]size:size];
       BOOL append_ok =NO;
       int j =0;
       while (!append_ok&& j <</b> 30)
       {
          if(adaptor.assetWriterInput.readyForMoreMediaData)
           {
             printf("appending %d attemp%d\n", frameCount, j);
              
             CMTime frameTime =CMTimeMake(frameCount,(int32_t)fps);
              floatframeSeconds =CMTimeGetSeconds(frameTime);
             NSLog(@"frameCount:%d,kRecordingFPS:%d,frameSeconds:%f",frameCount,fps,frameSeconds);
              append_ok = [adaptorappendPixelBuffer:bufferwithPresentationTime:frameTime];
              
             if(buffer)
                 [NSThreadsleepForTimeInterval:0.05];
           }
          else
           {
             printf("adaptor not ready %d,%d\n", frameCount, j);
             [NSThreadsleepForTimeInterval:0.1];
           }
          j++;
       }
       if (!append_ok){
         printf("error appendingimage %d times %d\n", frameCount, j);
       }
       
       frameCount = frameCount + averageFrame;
    }
   
   //Finish the session:
    [videoWriterInputmarkAsFinished];
    [videoWriterfinishWriting];
   NSLog(@"finishWriting");
}

<span style="font-size:18px;color:#ff6600;">推荐一个别人写好的<a target=_blank href="https://github.com/HarrisonJackson/HJImagesToVideo">https://github.com/HarrisonJackson/HJImagesToVideo</a></span>


评论 2
成就一亿技术人!
拼手气红包6.0元
还能输入1000个字符
 
红包 添加红包
表情包 插入表情
 条评论被折叠 查看
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值