MCSessionDelegate - Getting No Bytes When Receiving Stream
Asked Answered
E

0

6

i'm pretty new to Swift/iOS, just started learning that a couple days ago. I'm using Swift 3 and want to develop two iPhone apps that can send audio stream from microphone to other iPhone devices using Multi-peer Connectivity. The first app would be the speaker's app and the other would be the listener's app.

Previously, I learned how to advertise, browse, and invite peers from this useful tutorial

and I learned how to get audio data from microphone and convert them to bytes from this answer and this answer. Thanks a lot to Rhythmic Fistman.

So, my code is combination of what those articles include.

This is ViewController of listener app

import UIKit
import MultipeerConnectivity
import AVFoundation


class ColorSwitchViewController: UIViewController {

    @IBOutlet weak var connectionsLabel: UILabel!

    let colorService = ColorServiceManager()
    var engine = AVAudioEngine()
    let player = AVAudioPlayerNode()

    // Somewhere, schedule the stream in the mainRunLoop, set the delegate and open it. Choose the peer that you want to connect
    var inputStream = InputStream()
    var inputStreamIsSet: Bool!
    var outputStreamIsSet: Bool!
    public let peerID = MCPeerID(displayName: UIDevice.current.name)

    //MARK: Private Functions
    override func didReceiveMemoryWarning() {
        super.didReceiveMemoryWarning()
    }

    func copyAudioBufferBytes(_ audioBuffer: AVAudioPCMBuffer) -> [UInt8] {
        let srcLeft = audioBuffer.floatChannelData![0]
        let bytesPerFrame = audioBuffer.format.streamDescription.pointee.mBytesPerFrame
        let numBytes = Int(bytesPerFrame * audioBuffer.frameLength)

        // initialize bytes to 0 (how to avoid?)
        var audioByteArray = [UInt8] (repeating: 0, count: numBytes)

        // copy data from buffer
        srcLeft.withMemoryRebound(to: UInt8.self, capacity: numBytes) { srcByteData in
            audioByteArray.withUnsafeMutableBufferPointer {
                $0.baseAddress!.initialize(from: srcByteData, count: numBytes)
            }
        }

        return audioByteArray
    }

    func bytesToAudioBuffer(_ buf: [UInt8]) -> AVAudioPCMBuffer {
        let fmt = AVAudioFormat(commonFormat: .pcmFormatFloat32, sampleRate: 44100, channels: 1, interleaved: true)
        let frameLength = UInt32(buf.count) / fmt.streamDescription.pointee.mBytesPerFrame

        let audioBuffer = AVAudioPCMBuffer(pcmFormat: fmt, frameCapacity: frameLength)
        audioBuffer.frameLength = frameLength

        let dstLeft = audioBuffer.floatChannelData![0]

        buf.withUnsafeBufferPointer {
            let src = UnsafeRawPointer($0.baseAddress!).bindMemory(to: Float.self, capacity: Int(frameLength))
            dstLeft.initialize(from: src, count: Int(frameLength))
        }

        return audioBuffer
    }

    override func viewDidLoad() {
        super.viewDidLoad()
        colorService.delegate = self
    }

    @IBAction func redTapped() {
        self.change(color: .red)
        colorService.send(colorName: "red")

    }

    @IBAction func yellowTapped() {
        self.change(color: .yellow)
        colorService.send(colorName: "yellow")
    }

    func change(color : UIColor) {
        UIView.animate(withDuration: 0.2) {
            self.view.backgroundColor = color
        }
    }

}

extension ColorSwitchViewController : ColorServiceManagerDelegate {

    func connectedDevicesChanged(manager: ColorServiceManager, connectedDevices: [String]) {
        OperationQueue.main.addOperation {
            self.connectionsLabel.text = "Connections: \(connectedDevices)"
        }
    }

    func colorChanged(manager: ColorServiceManager, colorString: String) {
        OperationQueue.main.addOperation {
            switch colorString {
            case "red":
                self.change(color: .red)
            case "yellow":
                self.change(color: .yellow)
            default:
                NSLog("%@", "Unknown color value received: \(colorString)")
            }
        }
    }

    func streamReceived(manager: ColorServiceManager, stream: InputStream, streamName: String, fromPeer: MCPeerID) {
        NSLog("%@", "name " + fromPeer.displayName)
        if streamName == "stream" && fromPeer != peerID {

            NSLog("%@", "voice received")

            stream.schedule(in: RunLoop.current, forMode: .defaultRunLoopMode)
            stream.open()

            var bytes = [UInt8](repeating: 0, count: 17640)

            if (stream.hasBytesAvailable == true) {
                NSLog("%@", "has bytes available...")
            } else {
                NSLog("%@", "has NO byte ...")
            }

            let result = stream.read(&bytes, maxLength: bytes.count)
            if result == 0 {
                print("Stream at capacity")
            } else if result == -1 {
                print("Operation failed: \(String(describing: stream.streamError))")
            } else {
                print("The number of bytes read is \(result)")
            }

            let audioBuffer = self.bytesToAudioBuffer(bytes) //Here is where the app crashes

            engine.attach(player)
            let outputFormat = AVAudioFormat(commonFormat: .pcmFormatFloat32, sampleRate: 44100, channels: 1, interleaved: true)
            engine.connect(player, to: engine.mainMixerNode, format: outputFormat)


            do {
                try engine.start()

                player.scheduleBuffer(audioBuffer, completionHandler: nil)
                player.play()
            } catch let error {
                print(error.localizedDescription)

            }
        }
    }

}

And the ViewController of the speaker app is similar except that it contains code for sending the stream and doesn't contain code for receiving

// ....

    override func viewDidLoad() {
        super.viewDidLoad()
        colorService.delegate = self

    }


    func startStream() {
        let input = engine.inputNode!
        engine.attach(player)

        let bus = 0
        let inputFormat = input.inputFormat(forBus: bus)
        engine.connect(player, to: engine.mainMixerNode, format: inputFormat)

        NSLog("%@", "sendStream: to \(self.colorService.session.connectedPeers.count) peers")

        if self.colorService.session.connectedPeers.count > 0 {
            do {
                let outputStream = try self.colorService.session.startStream(withName: "stream", toPeer: self.colorService.session.connectedPeers.first!)
                outputStream.schedule(in: RunLoop.main, forMode:RunLoopMode.defaultRunLoopMode)
                outputStream.open()

                let bus = 0
                let inputFormat = input.inputFormat(forBus: bus)
                input.installTap(onBus: bus, bufferSize: 2048, format: inputFormat, block: {
                    (buffer: AVAudioPCMBuffer!, time: AVAudioTime!) -> Void in
                    self.player.scheduleBuffer(buffer)
                    let audioBuffer = self.copyAudioBufferBytes(buffer)
//                    NSLog("%@", "speaking...")
                    let result = outputStream.write(audioBuffer, maxLength: audioBuffer.count)
                    if result == 0 {
                        print("Stream at capacity")
                    } else if result == -1 {
                        print("Operation failed: \(String(describing: outputStream.streamError))")
                    } else {
                        print("The number of bytes written is \(result)")
                    }
                })


                try! engine.start()
                player.play()


            }
            catch let error {
                NSLog("%@", "Error for sending: \(error)")
            }
        }

    }

    func stopStream() {
        engine.inputNode?.removeTap(onBus: 0)
        player.stop()
    }

    @IBAction func redTapped() {
        self.change(color: .red)
        colorService.send(colorName: "red")
        self.startStream()
    }

    @IBAction func yellowTapped() {
        self.change(color: .yellow)
        colorService.send(colorName: "yellow")
        self.stopStream()
    }

    // ...

Unfortunately, on the listener side, i get the app receiving the stream with no bytes available. NSLog("%@", "has NO byte ...") was called. I wonder if the listener app really receive the audio stream or not.

So, What's my mistake here? Any helps would be appreciated. Thank you in advance.

Earthaearthborn answered 6/6, 2017 at 2:45 Comment(2)
Do you see speaker sends something?Sheepherder
May I suggest, before going into exchanging audio, to just check that the MC connectivity between two devices is working properly, because I don't see any MCSession, MCAdvertiserAssistant nor MCBrowserViewController to establish a MCSession in the first place. Can you share the code that does that?Snaffle

© 2022 - 2024 — McMap. All rights reserved.