下面的示例代码省略了一些内存管理和通知移除相关的代码。
// 1、创建 composition。创建一个 composition,并添加一个 audio track 和一个 video track。 AVMutableComposition *mutableComposition = [AVMutableComposition composition]; AVMutableCompositionTrack *videoCompositionTrack = [mutableComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; AVMutableCompositionTrack *audioCompositionTrack = [mutableComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; // 2、添加 asset。从源 assets 中取得两个 video track 和一个 audio track,在上面的 video composition track 中依次添加两个 video track,在 audio composition track 中添加一个 video track。 AVAsset *firstVideoAsset = <#First AVAsset with at least one video track#>; AVAsset *secondVideoAsset = <#Second AVAsset with at least one video track#>; AVAsset *audioAsset = <#AVAsset with at least one audio track#>; AVAssetTrack *firstVideoAssetTrack = [[firstVideoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; AVAssetTrack *secondVideoAssetTrack = [[secondVideoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; AVAssetTrack *audioAssetTrack = [[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] [videoCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstVideoAssetTrack.timeRange.duration) ofTrack:firstVideoAssetTrack atTime:kCMTimeZero error:nil]; [videoCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, secondVideoAssetTrack.timeRange.duration) ofTrack:secondVideoAssetTrack atTime:firstVideoAssetTrack.timeRange.duration error:nil]; [audioCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, CMTimeAdd(firstVideoAssetTrack.timeRange.duration, secondVideoAssetTrack.timeRange.duration)) ofTrack:audioAssetTrack atTime:kCMTimeZero error:nil]; // 3、检查 composition 方向。在 composition 中添加了 audio track 和 video track 后,还必须确保其中所有的 video track 的视频方向都是一致的。在默认情况下 video track 默认为横屏模式,如果这时添加进来的 video track 是在竖屏模式下采集的,那么导出的视频会出现方向错误。同理,将一个横向的视频和一个纵向的视频进行合并导出,export session 会报错。 BOOL isFirstVideoPortrait = NO; CGAffineTransform firstTransform = firstVideoAssetTrack.preferredTransform; // Check the first video track\'s preferred transform to determine if it was recorded in portrait mode. if (firstTransform.a == 0 && firstTransform.d == 0 && (firstTransform.b == 1.0 || firstTransform.b == -1.0) && (firstTransform.c == 1.0 || firstTransform.c == -1.0)) { isFirstVideoPortrait = YES; } BOOL isSecondVideoPortrait = NO; CGAffineTransform secondTransform = secondVideoAssetTrack.preferredTransform; // Check the second video track\'s preferred transform to determine if it was recorded in portrait mode. if (secondTransform.a == 0 && secondTransform.d == 0 && (secondTransform.b == 1.0 || secondTransform.b == -1.0) && (secondTransform.c == 1.0 || secondTransform.c == -1.0)) { isSecondVideoPortrait = YES; } if ((isFirstVideoAssetPortrait && !isSecondVideoAssetPortrait) || (!isFirstVideoAssetPortrait && isSecondVideoAssetPortrait)) { UIAlertView *incompatibleVideoOrientationAlert = [[UIAlertView alloc] initWithTitle:@"Error!" message:@"Cannot combine a video shot in portrait mode with a video shot in landscape mode." delegate:self cancelButtonTitle:@"Dismiss" otherButtonTitles:nil]; [incompatibleVideoOrientationAlert show]; return; } // 4、应用 Video Composition Layer Instructions。一旦你知道你要合并的视频片段的方向是兼容的,那么你接下来就可以为每个片段应用必要的 layer instructions,并将这些 layer instructions 添加到 video composition 中。 // 所有的 `AVAssetTrack` 对象都有一个 `preferredTransform` 属性,包含了 asset track 的方向信息。这个 transform 会在 asset track 在屏幕上展示时被应用。在下面的代码中,layer instruction 的 transform 被设置为 asset track 的 transform,便于在你修改了视频尺寸时,新的 composition 中的视频也能正确的进行展示。 AVMutableVideoCompositionInstruction *firstVideoCompositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; // Set the time range of the first instruction to span the duration of the first video track. firstVideoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, firstVideoAssetTrack.timeRange.duration); AVMutableVideoCompositionInstruction *secondVideoCompositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; // Set the time range of the second instruction to span the duration of the second video track. secondVideoCompositionInstruction.timeRange = CMTimeRangeMake(firstVideoAssetTrack.timeRange.duration, CMTimeAdd(firstVideoAssetTrack.timeRange.duration, secondVideoAssetTrack.timeRange.duration)); // 创建两个 video layer instruction,关联对应的 video composition track,并设置 transform 为 preferredTransform。 AVMutableVideoCompositionLayerInstruction *firstVideoLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoCompositionTrack]; // Set the transform of the first layer instruction to the preferred transform of the first video track. [firstVideoLayerInstruction setTransform:firstTransform atTime:kCMTimeZero]; AVMutableVideoCompositionLayerInstruction *secondVideoLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoCompositionTrack]; // Set the transform of the second layer instruction to the preferred transform of the second video track. [secondVideoLayerInstruction setTransform:secondTransform atTime:firstVideoAssetTrack.timeRange.duration]; firstVideoCompositionInstruction.layerInstructions = @[firstVideoLayerInstruction]; secondVideoCompositionInstruction.layerInstructions = @[secondVideoLayerInstruction]; AVMutableVideoComposition *mutableVideoComposition = [AVMutableVideoComposition videoComposition]; mutableVideoComposition.instructions = @[firstVideoCompositionInstruction, secondVideoCompositionInstruction]; // 5、设置渲染尺寸和帧率。要完全解决视频方向问题,你还需要调整 video composition 的 `renderSize` 属性,同时也需要设置一个合适的 `frameDuration`,比如 1/30 表示 30 帧每秒。此外,`renderScale` 默认值为 1.0。 CGSize naturalSizeFirst, naturalSizeSecond; // If the first video asset was shot in portrait mode, then so was the second one if we made it here. if (isFirstVideoAssetPortrait) { // Invert the width and height for the video tracks to ensure that they display properly. naturalSizeFirst = CGSizeMake(firstVideoAssetTrack.naturalSize.height, firstVideoAssetTrack.naturalSize.width); naturalSizeSecond = CGSizeMake(secondVideoAssetTrack.naturalSize.height, secondVideoAssetTrack.naturalSize.width); } else { // If the videos weren\'t shot in portrait mode, we can just use their natural sizes. naturalSizeFirst = firstVideoAssetTrack.naturalSize; naturalSizeSecond = secondVideoAssetTrack.naturalSize; } float renderWidth, renderHeight; // Set the renderWidth and renderHeight to the max of the two videos widths and heights. if (naturalSizeFirst.width > naturalSizeSecond.width) { renderWidth = naturalSizeFirst.width; } else { renderWidth = naturalSizeSecond.width; } if (naturalSizeFirst.height > naturalSizeSecond.height) { renderHeight = naturalSizeFirst.height; } else { renderHeight = naturalSizeSecond.height; } mutableVideoComposition.renderSize = CGSizeMake(renderWidth, renderHeight); // Set the frame duration to an appropriate value (i.e. 30 frames per second for video). mutableVideoComposition.frameDuration = CMTimeMake(1,30); // 6、导出 composition 并保持到相册。创建一个 `AVAssetExportSession` 对象,设置对应的 `outputURL` 来将视频导出到指定的文件。同时,我们还可以用 `ALAssetsLibrary` 接口来将导出的视频文件存储到相册中去。 // Create a static date formatter so we only have to initialize it once. static NSDateFormatter *kDateFormatter; if (!kDateFormatter) { kDateFormatter = [[NSDateFormatter alloc] init]; kDateFormatter.dateStyle = NSDateFormatterMediumStyle; kDateFormatter.timeStyle = NSDateFormatterShortStyle; } // Create the export session with the composition and set the preset to the highest quality. AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mutableComposition presetName:AVAssetExportPresetHighestQuality]; // Set the desired output URL for the file created by the export process. exporter.outputURL = [[[[NSFileManager defaultManager] URLForDirectory:NSDocumentDirectory inDomain:NSUserDomainMask appropriateForURL:nil create:@YES error:nil] URLByAppendingPathComponent:[kDateFormatter stringFromDate:[NSDate date]]] URLByAppendingPathExtension:CFBridgingRelease(UTTypeCopyPreferredTagWithClass((CFStringRef)AVFileTypeQuickTimeMovie, kUTTagClassFilenameExtension))]; // Set the output file type to be a QuickTime movie. exporter.outputFileType = AVFileTypeQuickTimeMovie; exporter.shouldOptimizeForNetworkUse = YES; exporter.videoComposition = mutableVideoComposition; // Asynchronously export the composition to a video file and save this file to the camera roll once export completes. [exporter exportAsynchronouslyWithCompletionHandler:^{ dispatch_async(dispatch_get_main_queue(), ^{ if (exporter.status == AVAssetExportSessionStatusCompleted) { ALAssetsLibrary *assetsLibrary = [[ALAssetsLibrary alloc] init]; if ([assetsLibrary videoAtPathIsCompatibleWithSavedPhotosAlbum:exporter.outputURL]) { [assetsLibrary writeVideoAtPathToSavedPhotosAlbum:exporter.outputURL completionBlock:NULL]; } } }); }]; 其他参考一个简单的视频编辑 Demo