How do I call CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer?

I am trying to figure out what to call this function AVFoundationin Swift. I spent a lot of time playing with declarations and syntax, and got this far. The compiler is mostly happy, but I have one last problem left.

public func captureOutput(
    captureOutput: AVCaptureOutput!,
    didOutputSampleBuffer sampleBuffer: CMSampleBuffer!,
    fromConnection connection: AVCaptureConnection!
) {
    let samplesInBuffer = CMSampleBufferGetNumSamples(sampleBuffer)
    var audioBufferList: AudioBufferList

    var buffer: Unmanaged<CMBlockBuffer>? = nil

    CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(
        sampleBuffer,
        nil,
        &audioBufferList,
        UInt(sizeof(audioBufferList.dynamicType)),
        nil,
        nil,
        UInt32(kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment),
        &buffer
    )

    // do stuff
}

The compiler complains about the 3rd and 4th arguments:

Variable address 'audioBufferList' done before initialization

and

Variable 'audioBufferList' used before initialization

So what should I do here?

I am debugging https://stackoverflow.com/a/3776326/2126 , but it is Objective-C. I am trying to translate it to Swift, but ran into this problem.

? , , , .

+4
5

: AVAssetReader Swift , . .

// Needs to be initialized somehow, even if we take only the address
var audioBufferList = AudioBufferList(mNumberBuffers: 1,
      mBuffers: AudioBuffer(mNumberChannels: 0, mDataByteSize: 0, mData: nil))

var buffer: Unmanaged<CMBlockBuffer>? = nil

CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(
    sampleBuffer,
    nil,
    &audioBufferList,
    UInt(sizeof(audioBufferList.dynamicType)),
    nil,
    nil,
    UInt32(kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment),
    &buffer
)

// Ensure that the buffer is released automatically.
let buf = buffer!.takeRetainedValue() 

// Create UnsafeBufferPointer from the variable length array starting at audioBufferList.mBuffers
let audioBuffers = UnsafeBufferPointer<AudioBuffer>(start: &audioBufferList.mBuffers,
    count: Int(audioBufferList.mNumberBuffers))

for audioBuffer in audioBuffers {
    // Create UnsafeBufferPointer<Int16> from the buffer data pointer
    var samples = UnsafeMutableBufferPointer<Int16>(start: UnsafeMutablePointer(audioBuffer.mData),
        count: Int(audioBuffer.mDataByteSize)/sizeof(Int16))

    for sample in samples {
        // ....
    }
}
+4

Swift3:

func loopAmplitudes(audioFileUrl: URL) {

    let asset = AVAsset(url: audioFileUrl)

    let reader = try! AVAssetReader(asset: asset)

    let track = asset.tracks(withMediaType: AVMediaTypeAudio)[0]

    let settings = [
        AVFormatIDKey : kAudioFormatLinearPCM
    ]

    let readerOutput = AVAssetReaderTrackOutput(track: track, outputSettings: settings)
    reader.add(readerOutput)
    reader.startReading()

    while let buffer = readerOutput.copyNextSampleBuffer() {

        var audioBufferList = AudioBufferList(mNumberBuffers: 1, mBuffers: AudioBuffer(mNumberChannels: 0, mDataByteSize: 0, mData: nil))
        var blockBuffer: CMBlockBuffer?

        CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(
            buffer,
            nil,
            &audioBufferList,
            MemoryLayout<AudioBufferList>.size,
            nil,
            nil,
            kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment,
            &blockBuffer
        );

        let buffers = UnsafeBufferPointer<AudioBuffer>(start: &audioBufferList.mBuffers, count: Int(audioBufferList.mNumberBuffers))

        for buffer in buffers {

            let samplesCount = Int(buffer.mDataByteSize) / MemoryLayout<Int16>.size
            let samplesPointer = audioBufferList.mBuffers.mData!.bindMemory(to: Int16.self, capacity: samplesCount)
            let samples = UnsafeMutableBufferPointer<Int16>(start: samplesPointer, count: samplesCount)

            for sample in samples {

                //do something with you sample (which is Int16 amplitude value)

            }
        }
    }
}
+3

, , , (, ), :

public func captureOutput(
    captureOutput: AVCaptureOutput!,
    didOutputSampleBuffer sampleBuffer: CMSampleBuffer!,
    fromConnection connection: AVCaptureConnection!
) {
    let samplesInBuffer = CMSampleBufferGetNumSamples(sampleBuffer)
    self.currentZ = Double(samplesInBuffer)

    let buffer: CMBlockBufferRef = CMSampleBufferGetDataBuffer(sampleBuffer)

    var lengthAtOffset: size_t = 0
    var totalLength: size_t = 0
    var data: UnsafeMutablePointer<Int8> = nil

    if( CMBlockBufferGetDataPointer( buffer, 0, &lengthAtOffset, &totalLength, &data ) != noErr ) {
        println("some sort of error happened")
    } else {
        for i in stride(from: 0, to: totalLength, by: 2) {
            // do stuff
        }
    }
}

, , , , , , iPad Mini (, , ), , , 1024 . 2048 Int8 . - / , , Int16, 2048 1024 .

+1

. :

let musicUrl: NSURL = mediaItemCollection.items[0].valueForProperty(MPMediaItemPropertyAssetURL) as! NSURL
let asset: AVURLAsset = AVURLAsset(URL: musicUrl, options: nil)
let assetOutput = AVAssetReaderTrackOutput(track: asset.tracks[0] as! AVAssetTrack, outputSettings: nil)

var error : NSError?

let assetReader: AVAssetReader = AVAssetReader(asset: asset, error: &error)

if error != nil {
    print("Error asset Reader: \(error?.localizedDescription)")
}

assetReader.addOutput(assetOutput)
assetReader.startReading()

let sampleBuffer: CMSampleBufferRef = assetOutput.copyNextSampleBuffer()

var audioBufferList = AudioBufferList(mNumberBuffers: 1, mBuffers: AudioBuffer(mNumberChannels: 0, mDataByteSize: 0, mData: nil))
var blockBuffer: Unmanaged<CMBlockBuffer>? = nil


CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(
    sampleBuffer,
    nil,
    &audioBufferList,
    sizeof(audioBufferList.dynamicType), // instead of UInt(sizeof(audioBufferList.dynamicType))
    nil,
    nil,
    UInt32(kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment),
    &blockBuffer
)
0

, , AudioBufferList, , , , AVCaptureSession. ( Apple .)

CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer , , AudioListBuffer , AudioBufferList .

C++ (, Swift), , .

// ask the function how big the audio buffer list should be for this
// sample buffer ref
size_t requiredABLSize = 0;
err = CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(sampleBuffer,
                      &requiredABLSize,
                      NULL,
                      NULL,
                      kCFAllocatorSystemDefault,
                      kCFAllocatorSystemDefault,
                      kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment,
                      NULL);

// allocate an audio buffer list of the required size
AudioBufferList* audioBufferList = (AudioBufferList*) malloc(requiredABLSize);
// ensure that blockBuffer is NULL in case the function fails
CMBlockBufferRef blockBuffer = NULL;

// now let the function allocate fill in the ABL for you
err = CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(sampleBuffer,
                      NULL,
                      audioBufferList,
                      requiredABLSize,
                      kCFAllocatorSystemDefault,
                      kCFAllocatorSystemDefault,
                      kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment,
                      &blockBuffer);

// if we succeeded...
if (err == noErr) {
   // la la la... read your samples...
}

// release the allocated block buffer
if (blockBuffer != NULL) {
    CFRelease(blockBuffer);
    blockBuffer = NULL;
}

// release the allocated ABL
if (audioBufferList != NULL) {
    free(audioBufferList);
    audioBufferList = NULL;
}

Swift, .

0
source

All Articles