I have 2 videos in which one of them is an overlay video with a transparent background (the video for the explosion that I am trying to add to the video). When I try to combine them using AVMutableComposition, I see that the alpha channel of the video overlap is ignored. Basically I see only the second video (with a black background instead of a transparent one).
As a test, I added an opacity of 0.9 for the video overlay to make sure that they merged correctly, and as a result I see the main video under the overlay video (not what I want, of course, but proves that the composition works). Any idea on how to make an alpha channel for the second video?
NSError* error = nil;
AVMutableComposition *comp = [AVMutableComposition composition];
AVMutableCompositionTrack* videoCompTrack = [comp addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:0];
AVMutableCompositionTrack* videoCompTrack2 = [comp addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:1];
AVMutableCompositionTrack* audioCompTrack = [comp addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVURLAsset* videoAssetMain = [AVURLAsset URLAssetWithURL:url1 options:nil];
NSArray* tracks = [videoAssetMain tracksWithMediaType:AVMediaTypeVideo];
AVAssetTrack* videoTrackMain = [tracks firstObject];
tracks = [videoAssetMain tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack* audioTrackMain = [tracks firstObject];
CMTimeRange timeRange = CMTimeRangeMake(kCMTimeZero, videoTrackMain.timeRange.duration);
AVURLAsset* videoAssetOver = [AVURLAsset URLAssetWithURL:url2 options:nil];
tracks = [videoAssetOver tracksWithMediaType:AVMediaTypeVideo];
AVAssetTrack* videoTrackOver = [tracks firstObject];
[videoCompTrack insertTimeRange:timeRange ofTrack:videoTrackMain atTime:kCMTimeZero error:&error];
[videoCompTrack2 insertTimeRange:timeRange ofTrack:videoTrackOver atTime:kCMTimeZero error:&error];
[audioCompTrack insertTimeRange:timeRange ofTrack:audioTrackMain atTime:kCMTimeZero error:&error];
AVMutableVideoCompositionLayerInstruction *inst1 = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoCompTrack];
[inst1 setOpacity:1 atTime:kCMTimeZero];
AVMutableVideoCompositionLayerInstruction *inst2 = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoCompTrack2];
CGAffineTransform scale = CGAffineTransformMakeScale(videoTrackMain.naturalSize.width/videoTrackOver.naturalSize.width, videoTrackMain.naturalSize.height/videoTrackOver.naturalSize.height);
[inst2 setTransform:scale atTime:kCMTimeZero];
AVMutableVideoCompositionInstruction *trans = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
trans.backgroundColor = [UIColor clearColor].CGColor;
trans.timeRange = timeRange;
trans.layerInstructions = [NSArray arrayWithObjects:inst2,inst1, nil];
AVMutableVideoComposition* videoComp = [AVMutableVideoComposition videoComposition];
videoComp.instructions = [NSArray arrayWithObjects:trans,nil];
videoComp.frameDuration = CMTimeMake(1, 30);
videoComp.renderSize = comp.naturalSize;
AVAssetExportSession* expSession = [[AVAssetExportSession alloc] initWithAsset:comp presetName:AVAssetExportPresetHighestQuality];
NSString* newVideoPath = [[NSString alloc] initWithFormat:@"%@%@", NSTemporaryDirectory(), @"output_final.mov"];
if ([[NSFileManager defaultManager] fileExistsAtPath:newVideoPath]) {
[[NSFileManager defaultManager] removeItemAtPath:newVideoPath error:&error];
}
expSession.outputURL = [NSURL fileURLWithPath:newVideoPath];
expSession.outputFileType = AVFileTypeQuickTimeMovie;
expSession.videoComposition = videoComp;
[expSession exportAsynchronouslyWithCompletionHandler:^{
if (delegate) {
[delegate videoProcessor:self didFinish:expSession.outputURL];
}
}];