Crash when recording: "required condition is false: format.sampleRate == hwFormat.sampleRate" after a WebRTC call
Asked Answered
S

4

6

I can record normally, but after a WebRTC call, I get a crash:

required condition is false: format.sampleRate == hwFormat.sampleRate

Here is how I start the crash and installTap:

func startRecord() {
    self.filePath = nil
        
    print("last format: \(audioEngine.inputNode.inputFormat(forBus: 0).sampleRate)")
    let session = AVAudioSession.sharedInstance()
    do {
        try session.setCategory(.playAndRecord, options: .mixWithOthers)
    } catch {
        print("======== Error setting setCategory \(error.localizedDescription)")
    }
    do {
        try session.setPreferredSampleRate(44100.0)
    } catch {
        print("======== Error setting rate \(error.localizedDescription)")
    }
    do {
        try session.setPreferredIOBufferDuration(0.005)
    } catch {
        print("======== Error IOBufferDuration \(error.localizedDescription)")
    }
    do {
        try session.setActive(true, options: .notifyOthersOnDeactivation)
    } catch {
        print("========== Error starting session \(error.localizedDescription)")
    }
    let format = AVAudioFormat(commonFormat: AVAudioCommonFormat.pcmFormatInt16,
        sampleRate: 44100.0,
    //            sampleRate: audioEngine.inputNode.inputFormat(forBus: 0).sampleRate,
        channels: 1,
        interleaved: true)
    audioEngine.connect(audioEngine.inputNode, to: mixer, format: format)
    audioEngine.connect(mixer, to: audioEngine.mainMixerNode, format: format)

    let dir = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first! as String
    filePath =  dir.appending("/\(UUID.init().uuidString).wav")

    _ = ExtAudioFileCreateWithURL(URL(fileURLWithPath: filePath!) as CFURL,
        kAudioFileWAVEType,(format?.streamDescription)!,nil,AudioFileFlags.eraseFile.rawValue,&outref)

    mixer.installTap(onBus: 0, bufferSize: AVAudioFrameCount((format?.sampleRate)!), format: format, block: { (buffer: AVAudioPCMBuffer!, time: AVAudioTime!) -> Void in

        let audioBuffer : AVAudioBuffer = buffer
        _ = ExtAudioFileWrite(self.outref!, buffer.frameLength, audioBuffer.audioBufferList)
    })

    try! audioEngine.start()
    startMP3Rec(path: filePath!, rate: 128)
}

func stopRecord() {
    self.audioFilePlayer.stop()
    self.audioEngine.stop()
    self.mixer.removeTap(onBus: 0)

    self.stopMP3Rec()
    ExtAudioFileDispose(self.outref!)

    try? AVAudioSession.sharedInstance().setActive(false)
}
    
func startMP3Rec(path: String, rate: Int32) {
    self.isMP3Active = true
    var total = 0
    var read = 0
    var write: Int32 = 0

    let mp3path = path.replacingOccurrences(of: "wav", with: "mp3")
    var pcm: UnsafeMutablePointer<FILE> = fopen(path, "rb")
    fseek(pcm, 4*1024, SEEK_CUR)
    let mp3: UnsafeMutablePointer<FILE> = fopen(mp3path, "wb")
    let PCM_SIZE: Int = 8192
    let MP3_SIZE: Int32 = 8192
    let pcmbuffer = UnsafeMutablePointer<Int16>.allocate(capacity: Int(PCM_SIZE*2))
    let mp3buffer = UnsafeMutablePointer<UInt8>.allocate(capacity: Int(MP3_SIZE))

    let lame = lame_init()
    lame_set_num_channels(lame, 1)
    lame_set_mode(lame, MONO)
    lame_set_in_samplerate(lame, 44100)
    lame_set_brate(lame, rate)
    lame_set_VBR(lame, vbr_off)
    lame_init_params(lame)

    DispatchQueue.global(qos: .default).async {
        while true {
            pcm = fopen(path, "rb")
            fseek(pcm, 4*1024 + total, SEEK_CUR)
            read = fread(pcmbuffer, MemoryLayout<Int16>.size, PCM_SIZE, pcm)
            if read != 0 {
                write = lame_encode_buffer(lame, pcmbuffer, nil, Int32(read), mp3buffer, MP3_SIZE)
                fwrite(mp3buffer, Int(write), 1, mp3)
                total += read * MemoryLayout<Int16>.size
                fclose(pcm)
            } else if !self.isMP3Active {
                _ = lame_encode_flush(lame, mp3buffer, MP3_SIZE)
                _ = fwrite(mp3buffer, Int(write), 1, mp3)
                break
            } else {
                fclose(pcm)
                usleep(50)
            }
        }
        lame_close(lame)
        fclose(mp3)
        fclose(pcm)
        self.filePathMP3 = mp3path
    }
}

func stopMP3Rec() {
    self.isMP3Active = false
}

The first time I run the app, I log the last format using:

print("last format: \(audioEngine.inputNode.inputFormat(forBus: 0).sampleRate)")

This returns 0 and records normally.

Second run, it returns 44100, and also records normally.

But, after the WebRTC call, I got 48000, then it crashes in this line:

self.audioEngine.connect(self.audioEngine.inputNode, to: self.mixer, format: format)

I don't want the 48000 format, because I have set the sample to:

sampleRate: audioEngine.inputNode.inputFormat(forBus: 0).sampleRate,

My output is hard to hear, I cannot recognize my voice.

So, I think, 44100 is the best.

How do I solve this?

Steading answered 6/4, 2021 at 15:18 Comment(0)
S
1

This line bugs.

let format = AVAudioFormat(commonFormat: AVAudioCommonFormat.pcmFormatInt16, ...

AVAudioCommonFormat.pcmFormatInt16 not works by default.

You should use .pcmFormatFloat32


And the xcode tip is obvious,

the crash line

self.audioEngine.connect(self.audioEngine.inputNode, to: self.mixer, format: format)

You know it by print mixer.inputFormat(forBus: 0 )


then you got sample rate 48000 by the actual device. you can get 44100 by converting


just use AVAudioConverter to do down sample audio buffer.

let input = engine.inputNode
let bus = 0
let inputFormat = input.outputFormat(forBus: bus )

 guard let outputFormat = AVAudioFormat(commonFormat: .pcmFormatFloat32, sampleRate: 44100, channels: 1, interleaved: true), let converter = AVAudioConverter(from: inputFormat, to: outputFormat) else{
        return
    }

if let convertedBuffer = AVAudioPCMBuffer(pcmFormat: outputFormat, frameCapacity: AVAudioFrameCount(outputFormat.sampleRate) * buffer.frameLength / AVAudioFrameCount(buffer.format.sampleRate)){
            var error: NSError?
            let status = converter.convert(to: convertedBuffer, error: &error, withInputFrom: inputCallback)
            assert(status != .error)
            print(convertedBuffer.format)
        }
Sentimentalize answered 9/4, 2021 at 15:38 Comment(2)
My crash is happening also with pcmFormatFloat32. What is different from pcmFormatInt16 to pcmFormatFloat32. Can you shed some light on this? Why we should use pcmFormatFloat32?Knurly
When do sampling , sample rate and bit depth do matter. pcmFormatFloat32 and pcmFormatInt16 are units of bit depth . pcmFormatFloat32 is more accurate, comparing to pcmFormatInt16 ......Sentimentalize
S
4

Only saw this in the iOS simulator:

I had worked on a Logic audio session with some headphones (at 48K) on and then went over to my iOS simulator to work on my audio code for my app and started getting this crash. Unplugged my headphones, still crashed. Rebooted simulator, deleted app from the simulator, restarted XCode and machine, still crashed.

Finally, I went to system preference on my Mac, selected: Sound & Input, plugged in my headphones so it says "External Microphone".

Also went to the simulator I/O settings for audio input set to "Internal Microphone".

Now, my app was able to start up in the simulator without crashing while trying to create an AKMicrophone().

Straitjacket answered 10/11, 2021 at 23:57 Comment(0)
F
3

The down sample part, more vivid on your case.

let bus = 0
let inputNode = audioEngine.inputNode
let inputFormat = inputNode.outputFormat(forBus: bus)
    
let outputFormat = AVAudioFormat(commonFormat: .pcmFormatFloat32, sampleRate: 44100, channels: 1, interleaved: true)!

let converter = AVAudioConverter(from: inputFormat, to: outputFormat)!

inputNode.installTap(onBus: bus, bufferSize: 1024, format: inputFormat) { (buffer: AVAudioPCMBuffer, when: AVAudioTime) in

    var newBufferAvailable = true
    
    let inputCallback: AVAudioConverterInputBlock = { inNumPackets, outStatus in
        if newBufferAvailable {
            outStatus.pointee = .haveData
            newBufferAvailable = false
            
            return buffer
        } else {
            outStatus.pointee = .noDataNow
            return nil
        }
    }
    
    let convertedBuffer = AVAudioPCMBuffer(pcmFormat: outputFormat, frameCapacity: AVAudioFrameCount(outputFormat.sampleRate) * buffer.frameLength / AVAudioFrameCount(buffer.format.sampleRate))!

    var error: NSError?
    let status = converter.convert(to: convertedBuffer, error: &error, withInputFrom: inputCallback)

    // 44100 Hz buffer
    print(convertedBuffer.format)
}
Francoise answered 11/4, 2021 at 12:5 Comment(0)
S
1

This line bugs.

let format = AVAudioFormat(commonFormat: AVAudioCommonFormat.pcmFormatInt16, ...

AVAudioCommonFormat.pcmFormatInt16 not works by default.

You should use .pcmFormatFloat32


And the xcode tip is obvious,

the crash line

self.audioEngine.connect(self.audioEngine.inputNode, to: self.mixer, format: format)

You know it by print mixer.inputFormat(forBus: 0 )


then you got sample rate 48000 by the actual device. you can get 44100 by converting


just use AVAudioConverter to do down sample audio buffer.

let input = engine.inputNode
let bus = 0
let inputFormat = input.outputFormat(forBus: bus )

 guard let outputFormat = AVAudioFormat(commonFormat: .pcmFormatFloat32, sampleRate: 44100, channels: 1, interleaved: true), let converter = AVAudioConverter(from: inputFormat, to: outputFormat) else{
        return
    }

if let convertedBuffer = AVAudioPCMBuffer(pcmFormat: outputFormat, frameCapacity: AVAudioFrameCount(outputFormat.sampleRate) * buffer.frameLength / AVAudioFrameCount(buffer.format.sampleRate)){
            var error: NSError?
            let status = converter.convert(to: convertedBuffer, error: &error, withInputFrom: inputCallback)
            assert(status != .error)
            print(convertedBuffer.format)
        }
Sentimentalize answered 9/4, 2021 at 15:38 Comment(2)
My crash is happening also with pcmFormatFloat32. What is different from pcmFormatInt16 to pcmFormatFloat32. Can you shed some light on this? Why we should use pcmFormatFloat32?Knurly
When do sampling , sample rate and bit depth do matter. pcmFormatFloat32 and pcmFormatInt16 are units of bit depth . pcmFormatFloat32 is more accurate, comparing to pcmFormatInt16 ......Sentimentalize
N
0

I was able to fix it by declaring audioEngine instance variable as optional. Right before when I need to monitor or record sound. I used to assign a new object of type AVAudioEngine to it. Upon ending the recording session, I call audioEngine!.stop and then assign it to nil to de-allocate the object.

Nickeliferous answered 1/12, 2021 at 11:31 Comment(0)

© 2022 - 2024 — McMap. All rights reserved.