AVFounction学习笔记之–音视频的编辑
媒体数据的读取和写入
- AVAssetReader
AVAssetReader用于从AVAsset实例中读取媒体样本。通常会配置一个或多个AVAssetReaderOutput实例,并通过copyNextSampleBuffer方法访问音频样本和视频帧。它只针对带有一个资源的媒体样本。
- AVAssetWriter
AVAssetWriter用于对媒体资源进行编码并将其写入到容器文件中。它由一个或多个AVAssetWriterInput对象配置,用于附加将包含写入容器的媒体样本的CMSampleBuffer对象。AVAssetWriter可以自动支持交叉媒体样本。可用于实时操作和离线操作两种情况。
- Demo例子
// 配置AVAssetReader
NSURL * url = [[NSBundle mainBundle] URLForResource:@"Test" withExtension:@"mov"];
AVAsset * asset = [AVAsset assetWithURL:url];
AVAssetTrack * track = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject];
self.assetReader = [[AVAssetReader alloc] initWithAsset:asset error:nil];
NSDictionary * readerOutputSettings = @{(id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA)};
AVAssetReaderTrackOutput * trackOutput = [[AVAssetReaderTrackOutput alloc] initWithTrack:track outputSettings:readerOutputSettings];
[self.assetReader addOutput:trackOutput];
[self.assetReader startReading];
// 配置AVAssetWriter
NSURL * outputURL = [[NSURL alloc] initFileURLWithPath:@"/Users/mac/Desktop/T/T/Writer.h264"];
NSError * error = nil;
self.assetWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:AVFileTypeQuickTimeMovie error:&error];
if (error) {
NSLog(@"assetWriter error = %@", error.localizedDescription);
return;
}
NSDictionary * writeOutputSettings = @{
AVVideoCodecKey: AVVideoCodecTypeH264,
AVVideoWidthKey: @1280,
AVVideoHeightKey: @720,
AVVideoCompressionPropertiesKey: @{
AVVideoMaxKeyFrameIntervalKey: @1,
AVVideoAverageBitRateKey: @10500000,
AVVideoProfileLevelKey: AVVideoProfileLevelH264Main31
}
};
AVAssetWriterInput * writerInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo outputSettings: writeOutputSettings];
[self.assetWriter addInput:writerInput];
[self.assetWriter startWriting];
// 开始写入容器
[self.assetWriter startSessionAtSourceTime:kCMTimeZero];
[writerInput requestMediaDataWhenReadyOnQueue:dispatch_queue_create("com.writerQueue", NULL) usingBlock:^{
BOOL complete = NO;
while ([writerInput isReadyForMoreMediaData] && !complete) {
CMSampleBufferRef sampleBuffer = [trackOutput copyNextSampleBuffer];
if (sampleBuffer) {
BOOL result = [writerInput appendSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
complete = !result;
} else {
[writerInput markAsFinished];
complete = YES;
}
}
if (complete) {
[self.assetWriter finishWritingWithCompletionHandler:^{
AVAssetWriterStatus status = self.assetWriter.status;
if (status == AVAssetWriterStatusCompleted) {
NSLog(@"success");
} else {
NSLog(@"error - %@", self.assetWriter.error.localizedDescription);
}
}];
}
}];
媒体的组合和编辑
通过AVFounction框架实现媒体的组合和编辑,包括音频与音频、音频与视频、视频与视频等的组合和剪切等。
涉及到的类
AVAssetTrack
AVMutableComposition
AVMutableCompositionTrack
AVURLAsset
实现步骤:
1、创建需要编辑的音视频资源
2、创建媒体组合容器,添加音视频轨道
3、获取原音视频的轨道数据
4、向容器中添加需要的轨道数据
5、创建导出Session,配置参数导出到目标位置
let videoMov = Bundle.main.url(forResource: "Test", withExtension: "mov")
let videoMovAsset = AVURLAsset.init(url: videoMov!)
let audioMp3 = Bundle.main.url(forResource: "test", withExtension: "mp3")
let audioMp3Asset = AVURLAsset.init(url: audioMp3!)
let composition = AVMutableComposition()
// video
let videoTrack = composition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: kCMPersistentTrackID_Invalid)
// audio
let audioTrack = composition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: kCMPersistentTrackID_Invalid)
// 视频
let cursorTime = kCMTimeZero
let videoDurtion = CMTimeMake(10, 1)
let videoTimeRange = CMTimeRangeMake(kCMTimeZero, videoDurtion)
var assetTrack = videoMovAsset.tracks(withMediaType: AVMediaType.video).first
do {
try videoTrack?.insertTimeRange(videoTimeRange, of: assetTrack!, at: cursorTime)
} catch let error {
print(" video error = \(error.localizedDescription)")
}
// 音频
let audioDurtion = CMTimeMake(10, 1)
let range = CMTimeRangeMake(kCMTimeZero, audioDurtion)
assetTrack = audioMp3Asset.tracks(withMediaType: AVMediaType.audio).first
do {
try audioTrack?.insertTimeRange(range, of: assetTrack!, at: cursorTime)
} catch let error {
print("audio error = \(error.localizedDescription)")
}
let outputUrl = URL(fileURLWithPath: "/Users/mac/Documents/iOSProject/AVFounctionStudy/AVFounctionStudy/edit.mp4")
let session = AVAssetExportSession(asset: composition, presetName: AVAssetExportPreset640x480)
session?.outputFileType = AVFileType.mp4
session?.outputURL = outputUrl
session?.exportAsynchronously(completionHandler: {
if AVAssetExportSessionStatus.completed == session?.status {
print("导出成功")
} else {
print("导出失败 = \(session?.error?.localizedDescription ?? "nil")")
}
})