Capturing volume levels with AVCaptureAudioDataOutputSampleBufferDelegate in swift
Asked Answered
C

4

8

I'm trying to live volume levels using AVCaptureDevice etc it compiles and runs but the values just seem to be random and I keep getting overflow errors as well.

EDIT:

also is it normal for the RMS range to be 0 to about 20000?

        if let audioCaptureDevice : AVCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio){



            try audioCaptureDevice.lockForConfiguration()

            let audioInput = try AVCaptureDeviceInput(device: audioCaptureDevice)
            audioCaptureDevice.unlockForConfiguration()

            if(captureSession.canAddInput(audioInput)){
                captureSession.addInput(audioInput)
                print("added input")
            }


            let audioOutput = AVCaptureAudioDataOutput()

            audioOutput.setSampleBufferDelegate(self, queue: GlobalUserInitiatedQueue)

            if(captureSession.canAddOutput(audioOutput)){
                captureSession.addOutput(audioOutput)
                print("added output")
            }


            //supposed to start session not on UI queue coz it takes a while
            dispatch_async(GlobalUserInitiatedQueue) {
                print("starting captureSession")
                self.captureSession.startRunning()
            }
        }

...

func captureOutput(captureOutput: AVCaptureOutput!, let didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {

    // Needs to be initialized somehow, even if we take only the address
    var audioBufferList = AudioBufferList(mNumberBuffers: 1,
        mBuffers: AudioBuffer(mNumberChannels: 1, mDataByteSize: 0, mData: nil))
    //this needs to be in method otherwise only runs 125 times?
    var blockBuffer: CMBlockBuffer?

    CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(
        sampleBuffer,
        nil,
        &audioBufferList,
        sizeof(audioBufferList.dynamicType),
        nil,
        nil,
        UInt32(kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment),
        &buffer
    )



    let abl = UnsafeMutableAudioBufferListPointer(&audioBufferList)

    for buffer in abl{
        let samples = UnsafeMutableBufferPointer<Int16>(start: UnsafeMutablePointer(buffer.mData),
            count: Int(buffer.mDataByteSize)/sizeof(Int16))


        var sum:Int = 0
        for sample in samples {
            sum = sum + Int(sample*sample)

        }

        let rms = sqrt(Double(sum)/count)
    }
Cosmonautics answered 9/10, 2015 at 5:19 Comment(1)
I'm not to play with audio buffers and stuff, nontheless I'd like to get an average audio level like you, could you explain in more details what's happenning?Dibrin
C
2

It appears I have it working. I casted sample to an Int64 before doing any manipulations.

        for buffer in abl{
        let samples = UnsafeMutableBufferPointer<Int16>(start: UnsafeMutablePointer(buffer.mData),
            count: Int(buffer.mDataByteSize)/sizeof(Int16))

        var sum:Int64 = 0

        for sample in samples {
         let s = Int64(sample)
         sum +=s*s
        }


        dispatch_async(dispatch_get_main_queue()) {

            self.volLevel.text = String(sqrt(Float(sum/Int64(samples.count))))
        }
Cosmonautics answered 9/10, 2015 at 8:24 Comment(0)
H
8

Use AVCaptureAudioDataOutputSampleBufferDelegate's method

captureOutput(captureOutput: AVCaptureOutput!, let didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!)

to get AVCaptureConnection from last parameter.

Then get AVCaptureAudioChannel from connection.audioChannels

Then you can get volume levels from it:

audioChannel.averagePowerLevel
audioChannel.peakHoldLevel
Highhanded answered 11/3, 2019 at 10:52 Comment(0)
D
6

Hey I don't understand much of it but here is a working Swift 5 version:

func captureOutput(_            output      : AVCaptureOutput,
                   didOutput    sampleBuffer: CMSampleBuffer,
                   from         connection  : AVCaptureConnection) {

    var buffer: CMBlockBuffer? = nil

    // Needs to be initialized somehow, even if we take only the address
    let convenianceBuffer = AudioBuffer(mNumberChannels: 1, mDataByteSize: 0, mData: nil)
    var audioBufferList = AudioBufferList(mNumberBuffers: 1,
                                          mBuffers: convenianceBuffer)

    CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(
        sampleBuffer,
        bufferListSizeNeededOut: nil,
        bufferListOut: &audioBufferList,
        bufferListSize: MemoryLayout<AudioBufferList>.size(ofValue: audioBufferList),
        blockBufferAllocator: nil,
        blockBufferMemoryAllocator: nil,
        flags: UInt32(kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment),
        blockBufferOut: &buffer
    )

    let abl = UnsafeMutableAudioBufferListPointer(&audioBufferList)

    for buffer in abl {
        let originRawPtr = buffer.mData
        let ptrDataSize = Int(buffer.mDataByteSize)

        // From raw pointer to typed Int16 pointer
        let buffPtrInt16 = originRawPtr?.bindMemory(to: Int16.self, capacity: ptrDataSize)

        // From pointer typed Int16 to pointer of [Int16]
        // So we can iterate on it simply
        let unsafePtrByteSize = ptrDataSize/Int16.bitWidth
        let samples = UnsafeMutableBufferPointer<Int16>(start: buffPtrInt16,
                                                        count: unsafePtrByteSize)

        // Average of each sample squared, then root squared
        let sumOfSquaredSamples = samples.map(Float.init).reduce(0) { $0 + $1*$1 }
        let averageOfSomething = sqrt(sumOfSquaredSamples / Float(samples.count))

        DispatchQueue.main.async {
            print("Calulcus of something: \(String(averageOfSomething))" )
        }
    }
}
Dibrin answered 13/3, 2020 at 17:47 Comment(0)
C
2

It appears I have it working. I casted sample to an Int64 before doing any manipulations.

        for buffer in abl{
        let samples = UnsafeMutableBufferPointer<Int16>(start: UnsafeMutablePointer(buffer.mData),
            count: Int(buffer.mDataByteSize)/sizeof(Int16))

        var sum:Int64 = 0

        for sample in samples {
         let s = Int64(sample)
         sum +=s*s
        }


        dispatch_async(dispatch_get_main_queue()) {

            self.volLevel.text = String(sqrt(Float(sum/Int64(samples.count))))
        }
Cosmonautics answered 9/10, 2015 at 8:24 Comment(0)
C
1

I've played with your example. This is a full working swift 2 code snippet:

// also define a variable in class scope, otherwise captureOutput will not be called
    var session : AVCaptureSession!



    func startCapture() {
    if let device : AVCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio){

        do {
            self.session = AVCaptureSession()
            try device.lockForConfiguration()

            let audioInput = try AVCaptureDeviceInput(device: device)
            device.unlockForConfiguration()

            if(self.session.canAddInput(audioInput)){
                self.session.addInput(audioInput)
                print("added input")
            }


            let audioOutput = AVCaptureAudioDataOutput()

            audioOutput.setSampleBufferDelegate(self, queue: dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0))

            if(self.session.canAddOutput(audioOutput)){
                self.session.addOutput(audioOutput)
                print("added output")
            }


            //supposed to start session not on UI queue coz it takes a while
            dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)) {
                print("starting captureSession")
                self.session.startRunning()
            }

        } catch {

        }
    }
}

func captureOutput(captureOutput: AVCaptureOutput!, let didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {

    var buffer: CMBlockBuffer? = nil

    // Needs to be initialized somehow, even if we take only the address
    var audioBufferList = AudioBufferList(mNumberBuffers: 1,
                                          mBuffers: AudioBuffer(mNumberChannels: 1, mDataByteSize: 0, mData: nil))

    CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(
        sampleBuffer,
        nil,
        &audioBufferList,
        sizeof(audioBufferList.dynamicType),
        nil,
        nil,
        UInt32(kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment),
        &buffer
    )

    let abl = UnsafeMutableAudioBufferListPointer(&audioBufferList)

    for buffer in abl {
        let samples = UnsafeMutableBufferPointer<Int16>(start: UnsafeMutablePointer(buffer.mData),
                                                        count: Int(buffer.mDataByteSize)/sizeof(Int16))

        var sum:Int64 = 0

        for sample in samples {
            let s = Int64(sample)
            sum = (sum + s*s)
        }

        dispatch_async(dispatch_get_main_queue()) {

            print( String(sqrt(Float(sum/Int64(samples.count)))))
        }
    }
}
Convulsant answered 9/6, 2016 at 15:19 Comment(0)

© 2022 - 2024 — McMap. All rights reserved.