Capture video with AVFoundation

I looked at Stack and I found similar questions to this, but no one worked for me. I am a complete newbie to Swift 3.0. Essentially, I'm trying to record a video using AVFoundation. So far I have managed to capture a still image, and this is the code that I still have

func beginSession() { do { let deviceInput = try AVCaptureDeviceInput(device: captureDevice) as AVCaptureDeviceInput if captureSession.inputs.isEmpty { self.captureSession.addInput(deviceInput) } stillImageOutput.outputSettings = [AVVideoCodecKey:AVVideoCodecJPEG] if captureSession.canAddOutput(stillImageOutput) { captureSession.addOutput(stillImageOutput) } } catch { print("error: \(error.localizedDescription)") } guard let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) else { print("no preview layer") return } self.view.layer.addSublayer(previewLayer) previewLayer.frame = self.view.layer.frame captureSession.startRunning() // Subviews self.view.addSubview(imgOverlay) self.view.addSubview(blur) self.view.addSubview(label) self.view.addSubview(Flip) self.view.addSubview(btnCapture) } 

and

  // SAVE PHOTO func saveToCamera() { if let videoConnection = stillImageOutput.connection(withMediaType: AVMediaTypeVideo) { stillImageOutput.captureStillImageAsynchronously(from: videoConnection, completionHandler: { (CMSampleBuffer, Error) in if let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(CMSampleBuffer) { if let cameraImage = UIImage(data: imageData) { self.flippedImage = UIImage(cgImage: cameraImage.cgImage!, scale: cameraImage.scale, orientation: UIImageOrientation.rightMirrored) UIImageWriteToSavedPhotosAlbum(self.flippedImage, nil, nil, nil) } } }) } } 
+17
ios swift avfoundation media
source share
3 answers

I am going to simplify your task by posting all the code needed to create a VCR in AVFoundation. This code should work if you just copy and paste it as it is. The only thing you need to remember is that you need to connect the camPreview output to the UIView in the ViewController in the StoryBoard. This UIView should occupy the entire contents of the screen. I will explain the code so that you can conduct your own investigation and change the DVR according to the needs of your application. You also need to make sure that you have attached the appropriate privacy permissions to info.plist, which are Privacy - Description of microphone use and Privacy - Description of camera use, otherwise you will see only a black screen.

NOTE. At the bottom right, I added a way to play the recorded video under the heading "Play recorded video."

EDIT - I forgot two things that caused the recording to crash, but I added them now.

Swift 4

 import UIKit import AVFoundation class ViewController: UIViewController, AVCaptureFileOutputRecordingDelegate { @IBOutlet weak var camPreview: UIView! let cameraButton = UIView() let captureSession = AVCaptureSession() let movieOutput = AVCaptureMovieFileOutput() var previewLayer: AVCaptureVideoPreviewLayer! var activeInput: AVCaptureDeviceInput! var outputURL: URL! override func viewDidLoad() { super.viewDidLoad() if setupSession() { setupPreview() startSession() } cameraButton.isUserInteractionEnabled = true let cameraButtonRecognizer = UITapGestureRecognizer(target: self, action: #selector(ViewController.startCapture)) cameraButton.addGestureRecognizer(cameraButtonRecognizer) cameraButton.frame = CGRect(x: 0, y: 0, width: 100, height: 100) cameraButton.backgroundColor = UIColor.red camPreview.addSubview(cameraButton) } func setupPreview() { // Configure previewLayer previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) previewLayer.frame = camPreview.bounds previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill camPreview.layer.addSublayer(previewLayer) } //MARK:- Setup Camera func setupSession() -> Bool { captureSession.sessionPreset = AVCaptureSession.Preset.high // Setup Camera let camera = AVCaptureDevice.default(for: AVMediaType.video)! do { let input = try AVCaptureDeviceInput(device: camera) if captureSession.canAddInput(input) { captureSession.addInput(input) activeInput = input } } catch { print("Error setting device video input: \(error)") return false } // Setup Microphone let microphone = AVCaptureDevice.default(for: AVMediaType.audio)! do { let micInput = try AVCaptureDeviceInput(device: microphone) if captureSession.canAddInput(micInput) { captureSession.addInput(micInput) } } catch { print("Error setting device audio input: \(error)") return false } // Movie output if captureSession.canAddOutput(movieOutput) { captureSession.addOutput(movieOutput) } return true } func setupCaptureMode(_ mode: Int) { // Video Mode } //MARK:- Camera Session func startSession() { if !captureSession.isRunning { videoQueue().async { self.captureSession.startRunning() } } } func stopSession() { if captureSession.isRunning { videoQueue().async { self.captureSession.stopRunning() } } } func videoQueue() -> DispatchQueue { return DispatchQueue.main } func currentVideoOrientation() -> AVCaptureVideoOrientation { var orientation: AVCaptureVideoOrientation switch UIDevice.current.orientation { case .portrait: orientation = AVCaptureVideoOrientation.portrait case .landscapeRight: orientation = AVCaptureVideoOrientation.landscapeLeft case .portraitUpsideDown: orientation = AVCaptureVideoOrientation.portraitUpsideDown default: orientation = AVCaptureVideoOrientation.landscapeRight } return orientation } @objc func startCapture() { startRecording() } //EDIT 1: I FORGOT THIS AT FIRST func tempURL() -> URL? { let directory = NSTemporaryDirectory() as NSString if directory != "" { let path = directory.appendingPathComponent(NSUUID().uuidString + ".mp4") return URL(fileURLWithPath: path) } return nil } override func prepare(for segue: UIStoryboardSegue, sender: Any?) { let vc = segue.destination as! VideoPlaybackViewController vc.videoURL = sender as? URL } func startRecording() { if movieOutput.isRecording == false { let connection = movieOutput.connection(with: AVMediaType.video) if (connection?.isVideoOrientationSupported)! { connection?.videoOrientation = currentVideoOrientation() } if (connection?.isVideoStabilizationSupported)! { connection?.preferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.auto } let device = activeInput.device if (device.isSmoothAutoFocusSupported) { do { try device.lockForConfiguration() device.isSmoothAutoFocusEnabled = false device.unlockForConfiguration() } catch { print("Error setting configuration: \(error)") } } //EDIT2: And I forgot this outputURL = tempURL() movieOutput.startRecording(to: outputURL, recordingDelegate: self) } else { stopRecording() } } func stopRecording() { if movieOutput.isRecording == true { movieOutput.stopRecording() } } func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) { } func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) { if (error != nil) { print("Error recording movie: \(error!.localizedDescription)") } else { let videoRecorded = outputURL! as URL performSegue(withIdentifier: "showVideo", sender: videoRecorded) } } } 

Swift 3

 import UIKit import AVFoundation class ViewController: UIViewController, AVCaptureFileOutputRecordingDelegate { @IBOutlet weak var camPreview: UIView! let cameraButton = UIView() let captureSession = AVCaptureSession() let movieOutput = AVCaptureMovieFileOutput() var previewLayer: AVCaptureVideoPreviewLayer! var activeInput: AVCaptureDeviceInput! var outputURL: URL! override func viewDidLoad() { super.viewDidLoad() if setupSession() { setupPreview() startSession() } cameraButton.isUserInteractionEnabled = true let cameraButtonRecognizer = UITapGestureRecognizer(target: self, action: #selector(ViewController.startCapture)) cameraButton.addGestureRecognizer(cameraButtonRecognizer) cameraButton.frame = CGRect(x: 0, y: 0, width: 100, height: 100) cameraButton.backgroundColor = UIColor.red camPreview.addSubview(cameraButton) } func setupPreview() { // Configure previewLayer previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) previewLayer.frame = camPreview.bounds previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill camPreview.layer.addSublayer(previewLayer) } //MARK:- Setup Camera func setupSession() -> Bool { captureSession.sessionPreset = AVCaptureSessionPresetHigh // Setup Camera let camera = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo) do { let input = try AVCaptureDeviceInput(device: camera) if captureSession.canAddInput(input) { captureSession.addInput(input) activeInput = input } } catch { print("Error setting device video input: \(error)") return false } // Setup Microphone let microphone = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio) do { let micInput = try AVCaptureDeviceInput(device: microphone) if captureSession.canAddInput(micInput) { captureSession.addInput(micInput) } } catch { print("Error setting device audio input: \(error)") return false } // Movie output if captureSession.canAddOutput(movieOutput) { captureSession.addOutput(movieOutput) } return true } func setupCaptureMode(_ mode: Int) { // Video Mode } //MARK:- Camera Session func startSession() { if !captureSession.isRunning { videoQueue().async { self.captureSession.startRunning() } } } func stopSession() { if captureSession.isRunning { videoQueue().async { self.captureSession.stopRunning() } } } func videoQueue() -> DispatchQueue { return DispatchQueue.main } func currentVideoOrientation() -> AVCaptureVideoOrientation { var orientation: AVCaptureVideoOrientation switch UIDevice.current.orientation { case .portrait: orientation = AVCaptureVideoOrientation.portrait case .landscapeRight: orientation = AVCaptureVideoOrientation.landscapeLeft case .portraitUpsideDown: orientation = AVCaptureVideoOrientation.portraitUpsideDown default: orientation = AVCaptureVideoOrientation.landscapeRight } return orientation } func startCapture() { startRecording() } //EDIT 1: I FORGOT THIS AT FIRST func tempURL() -> URL? { let directory = NSTemporaryDirectory() as NSString if directory != "" { let path = directory.appendingPathComponent(NSUUID().uuidString + ".mp4") return URL(fileURLWithPath: path) } return nil } func startRecording() { if movieOutput.isRecording == false { let connection = movieOutput.connection(withMediaType: AVMediaTypeVideo) if (connection?.isVideoOrientationSupported)! { connection?.videoOrientation = currentVideoOrientation() } if (connection?.isVideoStabilizationSupported)! { connection?.preferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.auto } let device = activeInput.device if (device?.isSmoothAutoFocusSupported)! { do { try device?.lockForConfiguration() device?.isSmoothAutoFocusEnabled = false device?.unlockForConfiguration() } catch { print("Error setting configuration: \(error)") } } //EDIT2: And I forgot this outputURL = tempURL() movieOutput.startRecording(toOutputFileURL: outputURL, recordingDelegate: self) } else { stopRecording() } } func stopRecording() { if movieOutput.isRecording == true { movieOutput.stopRecording() } } func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) { } func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) { if (error != nil) { print("Error recording movie: \(error!.localizedDescription)") } else { _ = outputURL as URL } outputURL = nil } } 

This is how you should configure the view controller

Setup your View Controller with campPreview

Permissions for your Info.plist

plist permissions

Setting Delegate Recording

You need to match AVCaptureFileOutputRecordingDelegate. According to Apple's docs, it defines an interface for AVCaptureFileOutput delegates to respond to events that occur during the recording of a single file. It comes with two methods that need to be implemented, and these are the last two methods at the bottom of the code. At first,

 func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) { } 

You can add any logic to this when the video starts recording. In my sample code example, the video starts to be recorded when you press the red square button in the left corner. Secondly,

 func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) { if (error != nil) { print("Error recording movie: \(error!.localizedDescription)") } else { _ = outputURL as URL } outputURL = nil } 

This is called when the video has finished recording. In the example code that I gave, the video stops recording after clicking the red square button again. When the video has stopped recording, you will receive the URL of the output file. This represents your video. You can use this to possibly switch to another View Controller to play video in AVPlayer. Or you can save it. In this example, you will notice that I have not done much with the output URL.

To start recording video, I used a programmatically created button that appears as a red square in the left corner and responds to UITapGesture. You can make the best button in your application.

Session setup

The dashcam needs a capture session that I configured in setupSession (). Here you add AVCapture input devices that include a camera and microphone. According to Apple, AVCaptureDeviceInput is a specific subclass of AVCaptureInput that you use to capture data from an AVCaptureDevice object. However, the user must give you access to their use, so in your info.plist you must add Privacy - Description of the use of the microphone and Privacy - Description of the use of the camera and indicate the reason why you want to use the VCR and microphone. If you do not, you will only get a black screen. A session pre-setting is a constant value indicating the quality level or output bitrate. I set this to a high level, but there are other options that you can explore. The movieOutput argument is of type AVCaptureMovieFileOutput, which, according to Apple, is a specific subclass of AVCaptureFileOutput that is used to capture data in a QuickTime movie. This is what actually allows you to record and save videos.

Preview Setting

Here you configure the camera preview layer, which is done in setupPreview (). You set the preview layer with the capture session you created using the following AVCaptureVideoPreviewLayer (session: captureSession).

Starting session

The final step is to start a session that runs in startSession (). You check if the session is already running, and if not, then start it.

 if !captureSession.isRunning { videoQueue().async { self.captureSession.startRunning() } } 

Start recording

When you press the red button, the startRecording () method is called. Here I have added methods for handling video orientation and video stabilization. Finally, we again see the movieOutput variable that we set earlier in our session. We call it to write our movie to outputURL and say that our delegate methods for processing the start and end of the recording are in the same view controller (these last two methods).

Stop recording

It just so happened that when you press the red button again, startRecoding is called again, but it will notice that something is being recorded and will call stopRecording.

Play recorded video

I am generous today, so I will add this too.

Create a new view controller and name it VideoPlayback. Connect it to your first ViewController using the transition in the storyboard. Give Sege the identifier "showVideo". Create a UIView and populate the VideoPlayback screen and create an output for its view controller called videoView. Add the following code to your new VideoPlayback view controller:

Swift 4

 import UIKit import AVFoundation class VideoPlayback: UIViewController { let avPlayer = AVPlayer() var avPlayerLayer: AVPlayerLayer! var videoURL: URL! //connect this to your uiview in storyboard @IBOutlet weak var videoView: UIView! override func viewDidLoad() { super.viewDidLoad() avPlayerLayer = AVPlayerLayer(player: avPlayer) avPlayerLayer.frame = view.bounds avPlayerLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill videoView.layer.insertSublayer(avPlayerLayer, at: 0) view.layoutIfNeeded() let playerItem = AVPlayerItem(url: videoURL as URL) avPlayer.replaceCurrentItem(with: playerItem) avPlayer.play() } } 

Swift 3

 import UIKit import AVFoundation class VideoPlayback: UIViewController { let avPlayer = AVPlayer() var avPlayerLayer: AVPlayerLayer! var videoURL: URL! //connect this to your uiview in storyboard @IBOutlet weak var videoView: UIView! override func viewDidLoad() { super.viewDidLoad() avPlayerLayer = AVPlayerLayer(player: avPlayer) avPlayerLayer.frame = view.bounds avPlayerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill videoView.layer.insertSublayer(avPlayerLayer, at: 0) view.layoutIfNeeded() let playerItem = AVPlayerItem(url: videoURL as URL) avPlayer.replaceCurrentItem(with: playerItem) avPlayer.play() } } 

Now go back to the last delegate method and change it as follows:

 func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) { if (error != nil) { print("Error recording movie: \(error!.localizedDescription)") } else { let videoRecorded = outputURL! as URL performSegue(withIdentifier: "showVideo", sender: videoRecorded) } } 

Finally, create a transition preparation method that initializes the videoURL to be played from AVPlayer.

 override func prepare(for segue: UIStoryboardSegue, sender: Any?) { let vc = segue.destination as! VideoPlayback vc.videoURL = sender as! URL } 

Now, to check, go back and start recording the video. The second time you touch the red square, the transfer will be completed and you will see that the recorded video is played automatically.

+61
source share

Based on @gwinyai's amazing answer, I made a similar Camera framework. https://github.com/eonist/HybridCamera. It also supports taking pictures and does not have a problem with the error described by @Makim Kniazev that was caused by this line:

 if (connection?.isVideoStabilizationSupported)! { connection?.preferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.auto } 
+3
source share

From @gwinyai answer updated to latest version (swift 4)

 import UIKit import AVFoundation class ViewController: UIViewController, AVCaptureFileOutputRecordingDelegate { @IBOutlet weak var camPreview: UIView! let captureSession = AVCaptureSession() let movieOutput = AVCaptureMovieFileOutput() var previewLayer: AVCaptureVideoPreviewLayer! var activeInput: AVCaptureDeviceInput! var outputURL: URL! override func viewDidLoad() { super.viewDidLoad() // Do any additional setup after loading the view, typically from a nib. if setupSession() { setupPreview() startSession() } } override func didReceiveMemoryWarning() { super.didReceiveMemoryWarning() // Dispose of any resources that can be recreated. } func setupPreview() { // Configure previewLayer previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) previewLayer.frame = camPreview.bounds previewLayer.videoGravity = .resizeAspectFill camPreview.layer.addSublayer(previewLayer) } //MARK:- Setup Camera func setupSession() -> Bool { captureSession.sessionPreset = AVCaptureSession.Preset.high // Setup Camera let camera = AVCaptureDevice.default(for: .video) do { let input = try AVCaptureDeviceInput(device: camera!) if captureSession.canAddInput(input) { captureSession.addInput(input) activeInput = input } } catch { print("Error setting device video input: \(error)") return false } // Setup Microphone let microphone = AVCaptureDevice.default(for: .audio) do { let micInput = try AVCaptureDeviceInput(device: microphone!) if captureSession.canAddInput(micInput) { captureSession.addInput(micInput) } } catch { print("Error setting device audio input: \(error)") return false } // Movie output if captureSession.canAddOutput(movieOutput) { captureSession.addOutput(movieOutput) } return true } func setupCaptureMode(_ mode: Int) { // Video Mode } //MARK:- Camera Session func startSession() { if !captureSession.isRunning { videoQueue().async { self.captureSession.startRunning() } } } func stopSession() { if captureSession.isRunning { videoQueue().async { self.captureSession.stopRunning() } } } func videoQueue() -> DispatchQueue { return DispatchQueue.main } func currentVideoOrientation() -> AVCaptureVideoOrientation { var orientation: AVCaptureVideoOrientation switch UIDevice.current.orientation { case .portrait: orientation = AVCaptureVideoOrientation.portrait case .landscapeRight: orientation = AVCaptureVideoOrientation.landscapeLeft case .portraitUpsideDown: orientation = AVCaptureVideoOrientation.portraitUpsideDown default: orientation = AVCaptureVideoOrientation.landscapeRight } return orientation } func startCapture() { startRecording() } //EDIT 1: I FORGOT THIS AT FIRST func tempURL() -> URL? { let directory = NSTemporaryDirectory() as NSString if directory != "" { let path = directory.appendingPathComponent(NSUUID().uuidString + ".mp4") return URL(fileURLWithPath: path) } return nil } func startRecording() { if movieOutput.isRecording == false { let connection = movieOutput.connection(with: .video) if (connection?.isVideoOrientationSupported)! { connection?.videoOrientation = currentVideoOrientation() } if (connection?.isVideoStabilizationSupported)! { connection?.preferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.auto } let device = activeInput.device if (device.isSmoothAutoFocusSupported) { do { try device.lockForConfiguration() device.isSmoothAutoFocusEnabled = false device.unlockForConfiguration() } catch { print("Error setting configuration: \(error)") } } //EDIT2: And I forgot this outputURL = tempURL() movieOutput.startRecording(to: outputURL, recordingDelegate: self) } else { stopRecording() } } func stopRecording() { if movieOutput.isRecording == true { movieOutput.stopRecording() } } func fileOutput(_ output: AVCaptureFileOutput, didStartRecordingTo fileURL: URL, from connections: [AVCaptureConnection]) { } func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) { if (error != nil) { print("Error recording movie: \(error!.localizedDescription)") } else { _ = outputURL as URL } outputURL = nil } } 
+2
source share

All Articles