Terminating app due to uncaught App crashes while using Speech kit ios
Asked Answered
P

4

33

I got this error while implementing speech to text:

Terminating app due to uncaught exception 'com.apple.coreaudio.avfaudio', reason: 'required condition is false: _recordingTap == nil'

and:

ERROR: [0x1b2df5c40] >avae> AVAudioNode.mm:565: CreateRecordingTap: required condition is false: _recordingTap == nil

Here is the code of my viewController:

public class ViewController: UIViewController, SFSpeechRecognizerDelegate {
// MARK: Properties

private let speechRecognizer = SFSpeechRecognizer(locale: Locale(identifier: "en-US"))!

private var recognitionRequest: SFSpeechAudioBufferRecognitionRequest?

private var recognitionTask: SFSpeechRecognitionTask?

private let audioEngine = AVAudioEngine()

@IBOutlet var textView : UITextView!

@IBOutlet var recordButton : UIButton!

// MARK: UIViewController

public override func viewDidLoad() {
    super.viewDidLoad()

    // Disable the record buttons until authorization has been granted.
    recordButton.isEnabled = false
}

override public func viewDidAppear(_ animated: Bool) {
    speechRecognizer.delegate = self

    SFSpeechRecognizer.requestAuthorization { authStatus in
        /*
            The callback may not be called on the main thread. Add an
            operation to the main queue to update the record button's state.
        */
        OperationQueue.main.addOperation {
            switch authStatus {
                case .authorized:
                    self.recordButton.isEnabled = true

                case .denied:
                    self.recordButton.isEnabled = false
                    self.recordButton.setTitle("User denied access to speech recognition", for: .disabled)

                case .restricted:
                    self.recordButton.isEnabled = false
                    self.recordButton.setTitle("Speech recognition restricted on this device", for: .disabled)

                case .notDetermined:
                    self.recordButton.isEnabled = false
                    self.recordButton.setTitle("Speech recognition not yet authorized", for: .disabled)
            }
        }
    }
}

private func startRecording() throws {

    // Cancel the previous task if it's running.
    if let recognitionTask = recognitionTask {
        recognitionTask.cancel()
        self.recognitionTask = nil
    }

    let audioSession = AVAudioSession.sharedInstance()
    try audioSession.setCategory(AVAudioSessionCategoryRecord)
    try audioSession.setMode(AVAudioSessionModeMeasurement)
    try audioSession.setActive(true, with: .notifyOthersOnDeactivation)

    recognitionRequest = SFSpeechAudioBufferRecognitionRequest()

    guard let inputNode = audioEngine.inputNode else { fatalError("Audio engine has no input node") }
    guard let recognitionRequest = recognitionRequest else { fatalError("Unable to created a SFSpeechAudioBufferRecognitionRequest object") }

    // Configure request so that results are returned before audio recording is finished
    recognitionRequest.shouldReportPartialResults = true

    // A recognition task represents a speech recognition session.
    // We keep a reference to the task so that it can be cancelled.
    recognitionTask = speechRecognizer.recognitionTask(with: recognitionRequest) { result, error in
        var isFinal = false

        if let result = result {
            self.textView.text = result.bestTranscription.formattedString
            isFinal = result.isFinal
        }

        if error != nil || isFinal {
            self.audioEngine.stop()
            inputNode.removeTap(onBus: 0)

            self.recognitionRequest = nil
            self.recognitionTask = nil

            self.recordButton.isEnabled = true
            self.recordButton.setTitle("Start Recording", for: [])
        }
    }

    let recordingFormat = inputNode.outputFormat(forBus: 0)
    inputNode.installTap(onBus: 0, bufferSize: 1024, format: recordingFormat) { (buffer: AVAudioPCMBuffer, when: AVAudioTime) in
        self.recognitionRequest?.append(buffer)
    }

    audioEngine.prepare()

    try audioEngine.start()

    textView.text = "(Go ahead, I'm listening)"
}

// MARK: SFSpeechRecognizerDelegate

public func speechRecognizer(_ speechRecognizer: SFSpeechRecognizer, availabilityDidChange available: Bool) {
    if available {
        recordButton.isEnabled = true
        recordButton.setTitle("Start Recording", for: [])
    } else {
        recordButton.isEnabled = false
        recordButton.setTitle("Recognition not available", for: .disabled)
    }
}

// MARK: Interface Builder actions

@IBAction func recordButtonTapped() {
    if audioEngine.isRunning {
        audioEngine.stop()
        recognitionRequest?.endAudio()
        recordButton.isEnabled = false
        recordButton.setTitle("Stopping", for: .disabled)
    } else {
        try! startRecording()
        recordButton.setTitle("Stop recording", for: [])
    }
}
}
Pfeifer answered 3/1, 2017 at 7:11 Comment(0)
V
53

You can try to use it on stop recording

Swift 3:

audioEngine.inputNode?.removeTap(onBus: 0)

It's helped me and should help you too.

Vasectomy answered 18/1, 2017 at 6:30 Comment(0)
B
16

You probably already have a tap on the bus and you can't have another one on that same bus. You should removeTapOnBus when you stop your engine.

audioEngine.inputNode?.removeTap(onBus: 0)
Bilbao answered 3/1, 2017 at 8:33 Comment(0)
K
3

The error is telling you that you already have a tap installed on that bus and that you can't have another one.

First you have to remove the tap for that bus. Then again you can install tap on the bus.

    let inputNode = audioEngine.inputNode
    inputNode.removeTap(onBus: 0)

It will help.

Kaila answered 16/1, 2019 at 11:37 Comment(0)
K
0

First, as every one say you must clean inputNode

But in my case I still had this error cause I kept strong reference to audioEngine = AVAudioEngine(). After I start recreate it the problem is gone.

Here is my class to work with speech engine: (Create WordRecognizer instance every time you need work with speech engine)

import Speech

final class WordRecognizer {

    private let audioEngine = AVAudioEngine()
    private let speechRecognizer = SFSpeechRecognizer(locale: Locale(identifier: "en-US"))
    private var request: SFSpeechAudioBufferRecognitionRequest?
    private var task: SFSpeechRecognitionTask?
    private unowned let delegate: WordRecognizerDelegate
    
    init(delegate: WordRecognizerDelegate) {
        self.delegate = delegate
        debugPrint("init audio system")
        let audioSession = AVAudioSession.sharedInstance()
        do {
            try audioSession.setCategory(.record, mode: .measurement, options: .duckOthers)
            try audioSession.setActive(true, options: .notifyOthersOnDeactivation)
        } catch {
            debugPrint("init error: \(error.localizedDescription)")
        }
    }
    
    deinit {
        debugPrint("clear audio system")
        do {
            try AVAudioSession.sharedInstance().setActive(false)
        } catch {
            debugPrint("clear error: \(error.localizedDescription)")
        }
    }
    
    func startTask()  {
        guard let recognizer = speechRecognizer else { return }
        
        let newRequest = SFSpeechAudioBufferRecognitionRequest()
        self.request = newRequest

        let task = recognizer.recognitionTask(with: newRequest, resultHandler: { [weak self] result, error in
            guard let result = result else { return }
            debugPrint(result.bestTranscription.formattedString)
            if Thread.isMainThread {
                self?.onRecoginize(transcriptions: result.transcriptions)
            } else {
                DispatchQueue.main.async { [weak self] in
                    self?.onRecoginize(transcriptions: result.transcriptions)
                }
            }
        })
        self.task = task

        let recordingFormat = audioEngine.inputNode.outputFormat(forBus: 0)
        audioEngine.inputNode.installTap(onBus: 0, bufferSize: 1024, format: recordingFormat) { [weak self] buffer, time in
            self?.request?.append(buffer)
        }
        audioEngine.prepare()
        
        debugPrint("start audio system")
        do {
            try audioEngine.start()
        } catch {
            debugPrint("start error: \(error.localizedDescription)")
        }
    }
    
    private func onRecoginize(transcriptions: [SFTranscription]) {
        for transcription in transcriptions {
            for segment in transcription.segments {
                let text = segment.substring.lowercased()
                if delegate.recognize(text: text) {
                    self.stopTask()
                    self.startTask()
                }
            }
        }
    }

    private func stopTask()  {
        if audioEngine.isRunning {
            audioEngine.stop()
            audioEngine.inputNode.removeTap(onBus: 0)
        }
        if let request = self.request {
            request.endAudio()
            self.request = nil
        }
        
        if let task = self.task {
            task.cancel()
            self.task = nil
        }
    }
}

protocol WordRecognizerDelegate: AnyObject {
    
    func recognize(text: String) -> Bool
    
}
Kweisui answered 20/9, 2023 at 9:14 Comment(0)

© 2022 - 2024 — McMap. All rights reserved.