iOSSwiftSimpleAVCamera . , , . , , , .
import UIKit
import CoreData
@UIApplicationMain
class AppDelegate: UIResponder, UIApplicationDelegate {
var window: UIWindow?
func applicationDidFinishLaunching(application: UIApplication) {
}
func applicationWillResignActive(application: UIApplication) {
}
func applicationDidEnterBackground(application: UIApplication) {
}
func applicationWillEnterForeground(application: UIApplication) {
}
func applicationDidBecomeActive(application: UIApplication) {
}
func applicationWillTerminate(application: UIApplication) {
self.saveContext()
}
func saveContext () {
var error: NSError? = nil
let managedObjectContext = self.managedObjectContext
if managedObjectContext.hasChanges && !managedObjectContext.save(&error) {
abort()
}
}
var managedObjectContext: NSManagedObjectContext {
if !(_managedObjectContext != nil) {
let coordinator = self.persistentStoreCoordinator
_managedObjectContext = NSManagedObjectContext()
_managedObjectContext!.persistentStoreCoordinator = coordinator
}
return _managedObjectContext!
}
var _managedObjectContext: NSManagedObjectContext? = nil
var managedObjectModel: NSManagedObjectModel {
if !(_managedObjectModel != nil) {
let modelURL = NSBundle.mainBundle().URLForResource("iOSSwiftSimpleAVCamera", withExtension: "momd")
_managedObjectModel = NSManagedObjectModel(contentsOfURL: modelURL!)
}
return _managedObjectModel!
}
var _managedObjectModel: NSManagedObjectModel? = nil
var persistentStoreCoordinator: NSPersistentStoreCoordinator {
if !(_persistentStoreCoordinator != nil) {
let storeURL = self.applicationDocumentsDirectory.URLByAppendingPathComponent("iOSSwiftSimpleAVCamera.sqlite")
var error: NSError? = nil
_persistentStoreCoordinator = NSPersistentStoreCoordinator(managedObjectModel: self.managedObjectModel)
if _persistentStoreCoordinator!.addPersistentStoreWithType(NSSQLiteStoreType, configuration: nil, URL: storeURL, options: nil, error: &error) == nil {
abort()
}
}
return _persistentStoreCoordinator!
}
var _persistentStoreCoordinator: NSPersistentStoreCoordinator? = nil
var applicationDocumentsDirectory: NSURL {
let urls = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)
return urls[urls.endIndex-1] as! NSURL
}
}
CameraSessionController
import UIKit
import AVFoundation
import CoreMedia
import CoreImage
@objc protocol CameraSessionControllerDelegate {
optional func cameraSessionDidOutputSampleBuffer(sampleBuffer: CMSampleBuffer!)
}
class CameraSessionController: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate {
var session: AVCaptureSession!
var sessionQueue: dispatch_queue_t!
var videoDeviceInput: AVCaptureDeviceInput!
var videoDeviceOutput: AVCaptureVideoDataOutput!
var stillImageOutput: AVCaptureStillImageOutput!
var runtimeErrorHandlingObserver: AnyObject?
var sessionDelegate: CameraSessionControllerDelegate?
class func deviceWithMediaType(mediaType: NSString, position: AVCaptureDevicePosition) -> AVCaptureDevice {
var devices: NSArray = AVCaptureDevice.devicesWithMediaType(mediaType as String)
var captureDevice: AVCaptureDevice = devices.firstObject as! AVCaptureDevice
for object:AnyObject in devices {
let device = object as! AVCaptureDevice
if (device.position == position) {
captureDevice = device
break
}
}
return captureDevice
}
override init() {
super.init();
self.session = AVCaptureSession()
self.authorizeCamera();
self.sessionQueue = dispatch_queue_create("CameraSessionController Session", DISPATCH_QUEUE_SERIAL)
dispatch_async(self.sessionQueue, {
self.session.beginConfiguration()
self.addVideoInput()
self.addVideoOutput()
self.addStillImageOutput()
self.session.commitConfiguration()
})
}
func authorizeCamera() {
AVCaptureDevice.requestAccessForMediaType(AVMediaTypeVideo, completionHandler: {
(granted: Bool) -> Void in
if !granted {
dispatch_async(dispatch_get_main_queue(), {
UIAlertView(
title: "Could not use camera!",
message: "This application does not have permission to use camera. Please update your privacy settings.",
delegate: self,
cancelButtonTitle: "OK").show()
})
}
});
}
func addVideoInput() -> Bool {
var success: Bool = false
var error: NSError?
var videoDevice: AVCaptureDevice = CameraSessionController.deviceWithMediaType(AVMediaTypeVideo, position: AVCaptureDevicePosition.Back)
self.videoDeviceInput = AVCaptureDeviceInput.deviceInputWithDevice(videoDevice, error: &error) as! AVCaptureDeviceInput;
if !(error != nil) {
if self.session.canAddInput(self.videoDeviceInput) {
self.session.addInput(self.videoDeviceInput)
success = true
}
}
return success
}
func addVideoOutput() {
self.videoDeviceOutput = AVCaptureVideoDataOutput()
self.videoDeviceOutput.alwaysDiscardsLateVideoFrames = true
self.videoDeviceOutput.setSampleBufferDelegate(self, queue: self.sessionQueue)
if self.session.canAddOutput(self.videoDeviceOutput) {
self.session.addOutput(self.videoDeviceOutput)
}
}
func addStillImageOutput() {
self.stillImageOutput = AVCaptureStillImageOutput()
self.stillImageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
if self.session.canAddOutput(self.stillImageOutput) {
self.session.addOutput(self.stillImageOutput)
}
}
func startCamera() {
dispatch_async(self.sessionQueue, {
var weakSelf: CameraSessionController? = self
self.runtimeErrorHandlingObserver = NSNotificationCenter.defaultCenter().addObserverForName(AVCaptureSessionRuntimeErrorNotification, object: self.sessionQueue, queue: nil, usingBlock: {
(note: NSNotification!) -> Void in
let strongSelf: CameraSessionController = weakSelf!
dispatch_async(strongSelf.sessionQueue, {
strongSelf.session.startRunning()
})
})
self.session.startRunning()
})
}
func teardownCamera() {
dispatch_async(self.sessionQueue, {
self.session.stopRunning()
NSNotificationCenter.defaultCenter().removeObserver(self.runtimeErrorHandlingObserver!)
})
}
func focusAndExposeAtPoint(point: CGPoint) {
dispatch_async(self.sessionQueue, {
var device: AVCaptureDevice = self.videoDeviceInput.device
var error: NSErrorPointer!
if device.lockForConfiguration(error) {
if device.focusPointOfInterestSupported && device.isFocusModeSupported(AVCaptureFocusMode.AutoFocus) {
device.focusPointOfInterest = point
device.focusMode = AVCaptureFocusMode.AutoFocus
}
if device.exposurePointOfInterestSupported && device.isExposureModeSupported(AVCaptureExposureMode.AutoExpose) {
device.exposurePointOfInterest = point
device.exposureMode = AVCaptureExposureMode.AutoExpose
}
device.unlockForConfiguration()
}
else {
}
})
}
func captureImage(completion:((image: UIImage?, error: NSError?) -> Void)?) {
if (completion != nil){
if(self.stillImageOutput != nil) {
return
}}
dispatch_async(self.sessionQueue, {
self.stillImageOutput.captureStillImageAsynchronouslyFromConnection(self.stillImageOutput.connectionWithMediaType(AVMediaTypeVideo), completionHandler: {
(imageDataSampleBuffer: CMSampleBuffer?, error: NSError?) -> Void in
if (imageDataSampleBuffer != nil)
{
if(error != nil)
{
completion!(image:nil, error:nil)
}
}
else if (imageDataSampleBuffer != nil) {
var imageData: NSData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(imageDataSampleBuffer)
var image: UIImage = UIImage(data: imageData)!
completion!(image:image, error:nil)
}
})
})
}
func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
self.sessionDelegate?.cameraSessionDidOutputSampleBuffer?(sampleBuffer)
}
}
import UIKit
import CoreMedia
import AVFoundation
class CameraViewController: UIViewController, CameraSessionControllerDelegate {
var cameraSessionController: CameraSessionController!
var previewLayer: AVCaptureVideoPreviewLayer!
override func viewDidLoad() {
super.viewDidLoad()
self.cameraSessionController = CameraSessionController()
self.cameraSessionController.sessionDelegate = self
self.setupPreviewLayer()
}
override func viewWillAppear(animated: Bool) {
super.viewWillAppear(animated)
self.cameraSessionController.startCamera()
}
override func viewWillDisappear(animated: Bool) {
super.viewWillDisappear(animated)
self.cameraSessionController.teardownCamera()
}
func setupPreviewLayer() {
var minSize = min(self.view.bounds.size.width, self.view.bounds.size.height)
var bounds: CGRect = CGRectMake(0.0, 0.0, minSize, minSize)
self.previewLayer = AVCaptureVideoPreviewLayer(session: self.cameraSessionController.session)
self.previewLayer.bounds = bounds
self.previewLayer.position = CGPointMake(CGRectGetMidX(self.view.bounds), CGRectGetMidY(self.view.bounds))
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
self.view.layer.addSublayer(self.previewLayer)
}
func cameraSessionDidOutputSampleBuffer(sampleBuffer: CMSampleBuffer!) {
}
}