Domain Error = AVFoundationErrorDomain Code = -11821 "Unable to decode"

There is a strange behavior that I discovered while trying to combine video with AVFoundation. I'm pretty sure that something made a mistake, but I'm too blind to see it. My goal is to combine 4 videos (later there will be a crossover between them). Every time I try to export a video, I get this error:

Error Domain=AVFoundationErrorDomain Code=-11821 "Cannot Decode" UserInfo=0x7fd94073cc30 {NSLocalizedDescription=Cannot Decode, NSLocalizedFailureReason=The media data could not be decoded. It may be damaged.}

The funny thing is that if I did not provide AVAssetExportSession with AVMutableVideoComposition, then everything will be fine! I do not understand what I am doing wrong. Source videos are downloaded from youtube and have the extension .mp4. I can play them using MPMoviePlayerController. While checking the source code, please take a close look at AVMutableVideoComposition. I tested this code in Xcode 6.0.1 on an iOS simulator.

#import "VideoStitcher.h"
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
#import <AssetsLibrary/AssetsLibrary.h>

@implementation VideoStitcher
{
    VideoStitcherCompletionBlock _completionBlock;
    AVMutableComposition *_composition;
    AVMutableVideoComposition *_videoComposition;
}

- (instancetype)init
{
    self = [super init];
    if (self)
    {
        _composition = [AVMutableComposition composition];
        _videoComposition = [AVMutableVideoComposition videoComposition];
    }
    return self;
}

- (void)compileVideoWithAssets:(NSArray *)assets completion:(VideoStitcherCompletionBlock)completion
{
    _completionBlock = [completion copy];

    if (assets == nil || assets.count < 2)
    {
        // We need at least two video to make a stitch, right?
        NSAssert(NO, @"VideoStitcher: assets parameter is nil or has not enough items in it");
    }
    else
    {
        [self composeAssets:assets];
        if (_composition != nil) // if stitching went good and no errors were found
            [self exportComposition];
    }
}

- (void)composeAssets:(NSArray *)assets
{
    AVMutableCompositionTrack *compositionVideoTrack = [_composition addMutableTrackWithMediaType:AVMediaTypeVideo
                                                                     preferredTrackID:kCMPersistentTrackID_Invalid];

    NSError *compositionError = nil;
    CMTime currentTime = kCMTimeZero;
    AVAsset *asset = nil;
    for (int i = (int)assets.count - 1; i >= 0; i--) //For some reason videos are compiled in reverse order. Find the bug later. 06.10.14
    {
        asset = assets[i];
        AVAssetTrack *assetVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject];
        BOOL success = [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, assetVideoTrack.timeRange.duration)
                                                      ofTrack:assetVideoTrack
                                                       atTime:currentTime
                                                        error:&compositionError];
        if (success)
        {
            CMTimeAdd(currentTime, asset.duration);
        }
        else
        {
            NSLog(@"VideoStitcher: something went wrong during inserting time range in composition");
            if (compositionError != nil)
            {
                NSLog(@"%@", compositionError);
                _completionBlock(nil, compositionError);
                _composition = nil;
                return;
            }
        }
    }

    AVMutableVideoCompositionInstruction *videoCompositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
    videoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, _composition.duration);
    videoCompositionInstruction.backgroundColor = [[UIColor redColor] CGColor];
    _videoComposition.instructions = @[videoCompositionInstruction];
    _videoComposition.renderSize = [self calculateOptimalRenderSizeFromAssets:assets];
    _videoComposition.frameDuration = CMTimeMake(1, 600);
}

- (void)exportComposition
{
    NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
    NSString *documentsDirectory = [paths objectAtIndex:0];
    NSString *myPathDocs =  [documentsDirectory stringByAppendingPathComponent:@"testVideo.mov"];
    NSURL *url = [NSURL fileURLWithPath:myPathDocs];


    NSString *filePath = [url path];
    NSFileManager *fileManager = [NSFileManager defaultManager];
    if ([fileManager fileExistsAtPath:filePath]) {
        NSError *error;
        if ([fileManager removeItemAtPath:filePath error:&error] == NO) {
            NSLog(@"removeItemAtPath %@ error:%@", filePath, error);
        }
    }

    AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:_composition
                                                                      presetName:AVAssetExportPreset1280x720];
    exporter.outputURL = url;
    exporter.outputFileType = AVFileTypeQuickTimeMovie;
    exporter.shouldOptimizeForNetworkUse = YES;
    exporter.videoComposition = _videoComposition;
    [exporter exportAsynchronouslyWithCompletionHandler:^{
        [self exportDidFinish:exporter];
    }];
}

- (void)exportDidFinish:(AVAssetExportSession*)session
{
    NSLog(@"%li", session.status);
    if (session.status == AVAssetExportSessionStatusCompleted)
    {
        NSURL *outputURL = session.outputURL;

        // time to call delegate methods, but for testing purposes we save the video in 'photos' app

        ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
        if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputURL])
        {
            [library writeVideoAtPathToSavedPhotosAlbum:outputURL completionBlock:^(NSURL *assetURL, NSError *error){
                if (error == nil)
                {
                    NSLog(@"successfully saved video");
                }
                else
                {
                    NSLog(@"saving video failed.\n%@", error);
                }
            }];
        }
    }
    else if (session.status == AVAssetExportSessionStatusFailed)
    {
        NSLog(@"VideoStitcher: exporting failed.\n%@", session.error);
    }
}

- (CGSize)calculateOptimalRenderSizeFromAssets:(NSArray *)assets
{
    AVAsset *firstAsset = assets[0];
    AVAssetTrack *firstAssetVideoTrack = [[firstAsset tracksWithMediaType:AVMediaTypeVideo] firstObject];
    CGFloat maxWidth = firstAssetVideoTrack.naturalSize.height;
    CGFloat maxHeight = firstAssetVideoTrack.naturalSize.width;

    for (AVAsset *asset in assets)
    {
        AVAssetTrack *assetVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject];
        if (assetVideoTrack.naturalSize.width > maxWidth)
            maxWidth = assetVideoTrack.naturalSize.width;
        if (assetVideoTrack.naturalSize.height > maxHeight)
            maxHeight = assetVideoTrack.naturalSize.height;
    }

    return CGSizeMake(maxWidth, maxHeight);
}

@end

Thank you for the attention. I'm really tired, I tried to find a mistake for four hours in a row. I am going to sleep now.

+2
source share
1 answer

- . : " . , ". , - . 5 , ..

, !

, , CMTimeADD() . , , :

CMTime currentTime = kCMTimeZero;
for (int i = (int)assets.count - 1; i >= 0; i--)
{
    CMTimeAdd(currentTime, asset.duration); //HERE!! I don't actually increment the value! currentTime is always kCMTimeZero
}
videoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, _composition.duration); // And that where everything breaks!

, : AVFoundation ! , .

+4

All Articles