I have an application that, when loading a presentation, starts capturing video and audio, and upon completion, writes it to the application’s document folder along with the Camera Roll iPad on which it works. I made sure and added both audio and video to the input for the session, but when I go to view the saved video, there is no audio with it. Can someone spot anything in my code to indicate where the problem is?
UPDATE: Error messages are never displayed. However, I found a common denominator. Audio will be recorded, but only if the recording is 10 seconds or shorter. If it reaches 11 seconds, no sound is recorded.
NSLog shows
Failed with error: (null)
-(void)viewWillAppear:(BOOL)animated { NSDate *today = [NSDate date]; NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init]; [dateFormatter setDateFormat:@"MMM d hh:mm:ss a"]; // display in 12HR/24HR (ie 11:25PM or 23:25) format according to User Settings NSString *currentTime = [dateFormatter stringFromDate:today]; NSError* error4 = nil; AVAudioSession* audioSession = [AVAudioSession sharedInstance]; [audioSession setCategory:AVAudioSessionCategoryAmbient error:&error4]; OSStatus propertySetError = 0; UInt32 allowMixing = true; propertySetError |= AudioSessionSetProperty(kAudioSessionProperty_OtherMixableAudioShouldDuck, sizeof(allowMixing), &allowMixing); // Activate the audio session error4 = nil; if (![audioSession setActive:YES error:&error4]) { NSLog(@"AVAudioSession setActive:YES failed: %@", [error4 localizedDescription]); } NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); NSString *documentsDirectoryPath = [paths objectAtIndex:0]; session = [[AVCaptureSession alloc] init]; [session beginConfiguration]; session.sessionPreset = AVCaptureSessionPresetMedium; self.navigationController.navigationBarHidden = YES; NSError *error = nil; AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio]; NSError *error2 = nil; AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error2]; AVCaptureDevice *device; AVCaptureDevicePosition desiredPosition = AVCaptureDevicePositionBack; // find the front facing camera device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; // get the input device AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error]; AVCaptureMovieFileOutput *movieFileOutput = [[AVCaptureMovieFileOutput alloc] init]; NSString *archives = [documentsDirectoryPath stringByAppendingPathComponent:@"archives"]; NSString *editedfilename = [[@"ComeOnDown" lastPathComponent] stringByDeletingPathExtension]; NSString *datestring = [[editedfilename stringByAppendingString:@" "] stringByAppendingString:currentTime]; NSLog(@"%@", datestring); NSString *outputpathofmovie = [[archives stringByAppendingPathComponent:datestring] stringByAppendingString:@".mp4"]; NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputpathofmovie]; [session addInput:audioInput]; [session addInput:deviceInput]; [session addOutput:movieFileOutput]; [session commitConfiguration]; [session startRunning]; AVCaptureVideoPreviewLayer *previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session]; previewLayer.backgroundColor = [[UIColor blackColor] CGColor]; previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; previewLayer.connection.videoOrientation = AVCaptureVideoOrientationLandscapeRight; CALayer *rootLayer = [vImagePreview layer]; [rootLayer setMasksToBounds:YES]; [previewLayer setFrame:[rootLayer bounds]]; [rootLayer addSublayer:previewLayer]; [movieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self]; //session = nil; if (error) { UIAlertView *alertView = [[UIAlertView alloc] initWithTitle: [NSString stringWithFormat:@"Failed with error %d", (int)[error code]] message:[error localizedDescription] delegate:nil cancelButtonTitle:@"Dismiss" otherButtonTitles:nil]; [alertView show]; } [super viewWillAppear:YES]; } -(void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections { } -(void)video:(NSString *)videoPath didFinishSavingWithError:(NSError *)error contextInfo:(void *)contextInfo{ NSLog(@"Finished with error: %@", error); } - (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error { //finished NSLog(@"Finished"); NSString *proud = [[NSString alloc] initWithString:[outputFileURL path]]; UISaveVideoAtPathToSavedPhotosAlbum(proud, self, @selector(video:didFinishSavingWithError: contextInfo:), (__bridge void *)(proud)); }
source share