I am trying to export AVMutableComposition using AVAssetExportSession .
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mutableComposition presetName:AVAssetExportPresetHighestQuality]; exporter.outputURL=url; exporter.outputFileType = AVFileTypeQuickTimeMovie; exporter.videoComposition = mainCompositionInst; exporter.shouldOptimizeForNetworkUse = YES; [exporter exportAsynchronouslyWithCompletionHandler:^ { switch (exporter.status) { case AVAssetExportSessionStatusCompleted: { NSLog(@"Video Merge SuccessFullt"); } break; case AVAssetExportSessionStatusFailed: NSLog(@"Failed:%@", exporter.error.description); break; case AVAssetExportSessionStatusCancelled: NSLog(@"Canceled:%@", exporter.error); break; case AVAssetExportSessionStatusExporting: NSLog(@"Exporting!"); break; case AVAssetExportSessionStatusWaiting: NSLog(@"Waiting"); break; default: break; } }];
But exporting even a 1-minute video takes about 30 seconds, which is too much, given that the built-in iPad app takes less than 2 seconds.
Also, if I videoComposition from the exporter, the time is reduced to 7 seconds, which is still bad if the video is only 1 minute long. So, I want to know how to reduce export time to a minimum?
Also, I want to know that AVAssetExportSession usually takes a lot of time or is this just my case?
Update : Merge Code:
AVMutableComposition * mutableComposition = [Composition AVMutableComposition];
AVMutableCompositionTrack *videoCompositionTrack = [mutableComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; AVMutableCompositionTrack *audioCompositionTrack = [mutableComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; AVMutableVideoCompositionLayerInstruction *videoTrackLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoCompositionTrack]; NSMutableArray *instructions = [NSMutableArray new]; CGSize size = CGSizeZero; CMTime time = kCMTimeZero; for (AVURLAsset *asset in assets) { AVAssetTrack *assetTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; AVAssetTrack *audioAssetTrack = [asset tracksWithMediaType:AVMediaTypeAudio].firstObject; NSError *error; [videoCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, assetTrack.timeRange.duration ) ofTrack:assetTrack atTime:time error:&error]; [videoTrackLayerInstruction setTransform:assetTrack.preferredTransform atTime:time]; if (error) { NSLog(@"asset url :: %@",assetTrack.asset); NSLog(@"Error1 - %@", error.debugDescription); } [audioCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioAssetTrack.timeRange.duration) ofTrack:audioAssetTrack atTime:time error:&error]; if (error) { NSLog(@"Error2 - %@", error.debugDescription); } time = CMTimeAdd(time, assetTrack.timeRange.duration); if (CGSizeEqualToSize(size, CGSizeZero)) { size = assetTrack.naturalSize; } } AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, time); mainInstruction.layerInstructions = [NSArray arrayWithObject:videoTrackLayerInstruction]; AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition]; mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction]; mainCompositionInst.frameDuration = CMTimeMake(1, 30); mainCompositionInst.renderSize = size;
source share