Record a video using AVCaptureSession, add a CIFilter to it and save it in a photo album

I want to make my own VCR in my application. At the moment I can record a video and save it, but I want to add filters to the video when it records and saves the video with the new filter to the photo album. This is my code for recording video and saving it.

let captureSession = AVCaptureSession() let fileOutput = AVCaptureMovieFileOutput() func initVideoRecording() { do { try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryRecord) try AVAudioSession.sharedInstance().setActive(true) }catch { print("error in audio") } let session = AVCaptureSession() session.beginConfiguration() session.sessionPreset = AVCaptureSessionPresetMedium let videoLayer = AVCaptureVideoPreviewLayer(session: session) videoLayer.videoGravity = AVLayerVideoGravityResizeAspectFill videoLayer.frame = myImage.bounds myImage.layer.addSublayer(videoLayer) let backCamera = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo) let audio = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio) do { let input = try AVCaptureDeviceInput(device: backCamera) let audioInput = try AVCaptureDeviceInput(device: audio) session.addInput(input) session.addInput(audioInput) } catch { print("can't access camera") return } session.addOutput(fileOutput) session.commitConfiguration() session.startRunning() } @IBAction func recordFunc() { if fileOutput.recording { myButton.setTitle("record", forState: .Normal) fileOutput.stopRecording() }else{ let fileUrl = NSURL(fileURLWithPath: NSTemporaryDirectory()).URLByAppendingPathComponent("\(getCurrentDate())-capturedvideo.mp4") fileOutput.startRecordingToOutputFileURL(fileUrl, recordingDelegate: self) myButton.setTitle("stop", forState: .Normal) } } func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) { //to save record video to photos album UISaveVideoAtPathToSavedPhotosAlbum(outputFileURL.path!, self, "video:didFinishSavingWithError:contextInfo:", nil) } 

I am trying to use AVCaptureVideoDataOutput

In my business I use this code

 func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) { connection.videoOrientation = AVCaptureVideoOrientation.Portrait let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) let cameraImage = CIImage(CVPixelBuffer: pixelBuffer!) let comicEffect = CIFilter(name: "CIComicEffect") comicEffect!.setValue(cameraImage, forKey: kCIInputImageKey) let filteredImage = UIImage(CIImage: comicEffect!.valueForKey(kCIOutputImageKey) as! CIImage!) dispatch_async(dispatch_get_main_queue()) { self.myImage.image = filteredImage } } 

With this code, it simply displays the filter, but does not write it.

========================== This is the solution for my question \ ================= , please do not use this code for swift 2 and xcode 7.3

 let captureSession = AVCaptureSession() let videoOutput = AVCaptureVideoDataOutput() let audioOutput = AVCaptureAudioDataOutput() var adapter:AVAssetWriterInputPixelBufferAdaptor! var record = false var videoWriter:AVAssetWriter! var writerInput:AVAssetWriterInput! var audioWriterInput:AVAssetWriterInput! var lastPath = "" var starTime = kCMTimeZero var outputSize = CGSizeMake(UIScreen.mainScreen().bounds.width, UIScreen.mainScreen().bounds.height) override func viewDidAppear(animated: Bool) { super.viewDidAppear(animated) video() } func video() { do { try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryRecord) try AVAudioSession.sharedInstance().setActive(true) }catch { print("error in audio") } captureSession.beginConfiguration() captureSession.sessionPreset = AVCaptureSessionPresetMedium let videoLayer = AVCaptureVideoPreviewLayer(session: captureSession) videoLayer.videoGravity = AVLayerVideoGravityResizeAspectFill //videoLayer.frame = myImage.bounds //myImage.layer.addSublayer(videoLayer) view.layer.addSublayer(videoLayer) let backCamera = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo) let audio = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio) do { let input = try AVCaptureDeviceInput(device: backCamera) let audioInput = try AVCaptureDeviceInput(device: audio) captureSession.addInput(input) captureSession.addInput(audioInput) } catch { print("can't access camera") return } let queue = dispatch_queue_create("sample buffer delegate", DISPATCH_QUEUE_SERIAL) videoOutput.setSampleBufferDelegate(self,queue: queue) audioOutput.setSampleBufferDelegate(self, queue: queue) captureSession.addOutput(videoOutput) captureSession.addOutput(audioOutput) captureSession.commitConfiguration() captureSession.startRunning() } @IBAction func recordFunc() { if record { myButton.setTitle("record", forState: .Normal) record = false self.writerInput.markAsFinished() audioWriterInput.markAsFinished() self.videoWriter.finishWritingWithCompletionHandler { () -> Void in print("FINISHED!!!!!") UISaveVideoAtPathToSavedPhotosAlbum(self.lastPath, self, "video:didFinishSavingWithError:contextInfo:", nil) } }else{ let fileUrl = NSURL(fileURLWithPath: NSTemporaryDirectory()).URLByAppendingPathComponent("\(getCurrentDate())-capturedvideo.MP4") lastPath = fileUrl.path! videoWriter = try? AVAssetWriter(URL: fileUrl, fileType: AVFileTypeMPEG4) let outputSettings = [AVVideoCodecKey : AVVideoCodecH264, AVVideoWidthKey : NSNumber(float: Float(outputSize.width)), AVVideoHeightKey : NSNumber(float: Float(outputSize.height))] writerInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: outputSettings) writerInput.expectsMediaDataInRealTime = true audioWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeAudio, outputSettings: DejalActivityView.getAudioDictionary() as? [String:AnyObject]) videoWriter.addInput(writerInput) videoWriter.addInput(audioWriterInput) adapter = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: writerInput, sourcePixelBufferAttributes: DejalActivityView.getAdapterDictionary() as? [String:AnyObject]) videoWriter.startWriting() videoWriter.startSessionAtSourceTime(starTime) record = true myButton.setTitle("stop", forState: .Normal) } } func getCurrentDate()->String{ let format = NSDateFormatter() format.dateFormat = "dd-MM-yyyy hh:mm:ss" format.locale = NSLocale(localeIdentifier: "en") let date = format.stringFromDate(NSDate()) return date } extension newCustomCameraViewController:AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureAudioDataOutputSampleBufferDelegate{ func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) { starTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) if captureOutput == videoOutput { connection.videoOrientation = AVCaptureVideoOrientation.Portrait let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) let cameraImage = CIImage(CVPixelBuffer: pixelBuffer!) let comicEffect = CIFilter(name: "CIHexagonalPixellate") comicEffect!.setValue(cameraImage, forKey: kCIInputImageKey) let filteredImage = UIImage(CIImage: comicEffect!.valueForKey(kCIOutputImageKey) as! CIImage!) //let filteredImage = UIImage(CIImage: cameraImage) if self.record == true{ dispatch_sync(dispatch_queue_create("sample buffer append", DISPATCH_QUEUE_SERIAL), { if self.record == true{ if self.writerInput.readyForMoreMediaData { let bo = self.adapter.appendPixelBuffer(DejalActivityView.pixelBufferFromCGImage(self.convertCIImageToCGImage(comicEffect!.valueForKey(kCIOutputImageKey) as! CIImage!)).takeRetainedValue() as CVPixelBufferRef, withPresentationTime: self.starTime) print("video is \(bo)") } } }) } dispatch_async(dispatch_get_main_queue()) { self.myImage.image = filteredImage } }else if captureOutput == audioOutput{ if self.record == true{ let bo = audioWriterInput.appendSampleBuffer(sampleBuffer) print("audio is \(bo)") } } } func convertCIImageToCGImage(inputImage: CIImage) -> CGImage! { let context:CIContext? = CIContext(options: nil) if context != nil { return context!.createCGImage(inputImage, fromRect: inputImage.extent) } return nil } func video(videoPath: NSString, didFinishSavingWithError error: NSError?, contextInfo info: AnyObject) { var title = "Success" var message = "Video was saved" if let saveError = error { title = "Error" message = "Video failed to save" } let alert = UIAlertController(title: title, message: message, preferredStyle: .Alert) alert.addAction(UIAlertAction(title: "OK", style: UIAlertActionStyle.Cancel, handler: nil)) presentViewController(alert, animated: true, completion: nil) } 

these methods, which in the DejalActivityView there, in object c and i, could not convert it to Swift, so if someone can convert it, edit my code and convert it

 + (CVPixelBufferRef )pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)size { NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey, [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, nil]; CVPixelBufferRef pxbuffer = NULL; CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options, &pxbuffer); // CVReturn status = CVPixelBufferPoolCreatePixelBuffer(NULL, adaptor.pixelBufferPool, &pxbuffer); NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL); CVPixelBufferLockBaseAddress(pxbuffer, 0); void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer); NSParameterAssert(pxdata != NULL); CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB(); CGContextRef context = CGBitmapContextCreate(pxdata, size.width, size.height, 8, 4*size.width, rgbColorSpace, kCGImageAlphaPremultipliedFirst); NSParameterAssert(context); CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), CGImageGetHeight(image)), image); CGColorSpaceRelease(rgbColorSpace); CGContextRelease(context); CVPixelBufferUnlockBaseAddress(pxbuffer, 0); return pxbuffer; } +(NSDictionary *)getAdapterDictionary{ NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil]; return sourcePixelBufferAttributesDictionary; } +(NSDictionary *) getAudioDictionary{ AudioChannelLayout acl; bzero( &acl, sizeof(acl)); acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono; NSDictionary* audioOutputSettings = nil; audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys: [ NSNumber numberWithInt: kAudioFormatMPEG4AAC ], AVFormatIDKey, //[ NSNumber numberWithInt: 16 ], AVEncoderBitDepthHintKey, [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey, [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey, [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey, nil ]; // NSDictionary* audioOutputSettings = nil; // audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys: // [ NSNumber numberWithInt: kAudioFormatMPEG4AAC_HE_V2 ], AVFormatIDKey, // [ NSNumber numberWithFloat: 44100.0], AVSampleRateKey, // [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey, // nil ]; return audioOutputSettings; } 
+7
ios swift swift2 video video-capture
source share
1 answer

You need to add AVAssetWriter

 var videoRecorder: AVAssetWriter? 

Then in your delegate callback:

 let timeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) if videoRecorder?.status == .Unknown { startRecordingTime = timeStamp videoRecorder?.startWriting() videoRecorder?.startSessionAtSourceTime(timeStamp) } 

You will need to set up a recorder for each record that you want to make, you will also need to add your inputs to the recorder.

You may run into problems because you donโ€™t have the queue settings you need yet, but for reference this Github is a very good resource for him.

https://github.com/waleedka/rosywriterswift

EDIT: Additional Information

You need to run init (), then add AVAssetWriterInput entries for video / audio.

+2
source share

All Articles