How to capture depth data from a camera in iOS 11 and Swift 4?

I am trying to get depth data from a camera in iOS 11 using AVDepthData, then when I configure photoOutput using AVCapturePhotoCaptureDelegate, the photo.depthData file is zero.

So, I tried to configure AVCaptureDepthDataOutputDelegate using AVCaptureDepthDataOutput, because I don’t know how to take a depth picture?

Does anyone have an image from AVDepthData?

Edit:

Here is the code I tried:

// delegates: AVCapturePhotoCaptureDelegate & AVCaptureDepthDataOutputDelegate

@IBOutlet var image_view: UIImageView!
@IBOutlet var capture_button: UIButton!

var captureSession: AVCaptureSession?
var sessionOutput: AVCapturePhotoOutput?
var depthOutput: AVCaptureDepthDataOutput?
var previewLayer: AVCaptureVideoPreviewLayer?

@IBAction func capture(_ sender: Any) {

    self.sessionOutput?.capturePhoto(with: AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg]), delegate: self)

}

func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {

    self.previewLayer?.removeFromSuperlayer()
    self.image_view.image = UIImage(data: photo.fileDataRepresentation()!)

    let depth_map = photo.depthData?.depthDataMap
    print("depth_map:", depth_map) // is nil

}

func depthDataOutput(_ output: AVCaptureDepthDataOutput, didOutput depthData: AVDepthData, timestamp: CMTime, connection: AVCaptureConnection) {

    print("depth data") // never called

}

override func viewDidLoad() {
    super.viewDidLoad()

    self.captureSession = AVCaptureSession()
    self.captureSession?.sessionPreset = .photo

    self.sessionOutput = AVCapturePhotoOutput()
    self.depthOutput = AVCaptureDepthDataOutput()
    self.depthOutput?.setDelegate(self, callbackQueue: DispatchQueue(label: "depth queue"))

    do {

        let device = AVCaptureDevice.default(for: .video)
        let input = try AVCaptureDeviceInput(device: device!)
        if(self.captureSession?.canAddInput(input))!{
            self.captureSession?.addInput(input)

            if(self.captureSession?.canAddOutput(self.sessionOutput!))!{
                self.captureSession?.addOutput(self.sessionOutput!)


                if(self.captureSession?.canAddOutput(self.depthOutput!))!{
                    self.captureSession?.addOutput(self.depthOutput!)

                    self.previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession!)
                    self.previewLayer?.frame = self.image_view.bounds
                    self.previewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
                    self.previewLayer?.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
                    self.image_view.layer.addSublayer(self.previewLayer!)

                }

            }

        }

    } catch {}

    self.captureSession?.startRunning()

}

I try to do two things: the one where the depth data is zero, and the one where I try to call the depth delegate method.

Does anyone know what I'm missing?

+6
source share
4 answers

-, .

let device = AVCaptureDevice.default(.builtInDuoCamera, for: .video, position: .back)

let dataOutputQueue = DispatchQueue(label: "data queue", qos: .userInitiated, attributes: [], autoreleaseFrequency: .workItem)

, ,

var outputSynchronizer: AVCaptureDataOutputSynchronizer?

viewDidLoad(),

if sessionOutput?.isDepthDataDeliverySupported {
    sessionOutput?.isDepthDataDeliveryEnabled = true
    depthDataOutput?.connection(with: .depthData)!.isEnabled = true
    depthDataOutput?.isFilteringEnabled = true
    outputSynchronizer = AVCaptureDataOutputSynchronizer(dataOutputs: [sessionOutput!, depthDataOutput!])
    outputSynchronizer!.setDelegate(self, queue: self.dataOutputQueue)
}

WWDC 507 - , , .

https://developer.apple.com/videos/play/wwdc2017/507/

+3

@klinger, , , , , !

func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {

    //## Convert Disparity to Depth ##

    let depthData = (photo.depthData as AVDepthData!).converting(toDepthDataType: kCVPixelFormatType_DepthFloat32)
    let depthDataMap = depthData.depthDataMap //AVDepthData -> CVPixelBuffer

    //## Data Analysis ##

    // Useful data
    let width = CVPixelBufferGetWidth(depthDataMap) //768 on an iPhone 7+
    let height = CVPixelBufferGetHeight(depthDataMap) //576 on an iPhone 7+
    CVPixelBufferLockBaseAddress(depthDataMap, CVPixelBufferLockFlags(rawValue: 0))

    // Convert the base address to a safe pointer of the appropriate type
    let floatBuffer = unsafeBitCast(CVPixelBufferGetBaseAddress(depthDataMap), to: UnsafeMutablePointer<Float32>.self)

    // Read the data (returns value of type Float)
    // Accessible values : (width-1) * (height-1) = 767 * 575

    let distanceAtXYPoint = floatBuffer[Int(x * y)]

}
+2

, :

  • . photo.depthData photoOutput(_:didFinishProcessingPhoto:error:). , .
  • AVCaptureDepthDataOutput depthDataOutput(_:didOutput:timestamp:connection:). , , depthDataOutput(_:didOutput:timestamp:connection:) , .

, # 1 - , . :

@IBAction func capture(_ sender: Any) {

    let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])
    settings.isDepthDataDeliveryEnabled = true
    self.sessionOutput?.capturePhoto(with: settings, delegate: self)

}

// ...

override func viewDidLoad() {
    // ...
    self.sessionOutput = AVCapturePhotoOutput()
    self.sessionOutput.isDepthDataDeliveryEnabled = true
    // ...
}

depth_map nil. this ( ) .

For # 2, I'm not quite sure why depthDataOutput(_:didOutput:timestamp:connection:)it is not being called, but you have to implement depthDataOutput(_:didDrop:timestamp:connection:reason:)to see if the depth data is deleted for any reason.

+1
source

The way you launch your device for cats is wrong. You must use dual camera mode.

as for oc as follows:

AVCaptureDevice * device = [AVCaptureDevice defaultDeviceWithDeviceType: AVCaptureDeviceTypeBuiltInDualCamera mediaType: AVMediaTypeVideo Position: AVCaptureDevicePositionBack];

0
source

All Articles