Spatial Audio Playback in RealityKit
Since RealityKit for iOS isn't able to control parameters of Reality Composer 1.5 behaviors, the best strategy for controlling audio is to create a programmatic AudioPlaybackController. To feed your audio file to the controller, export .rcproject
scene to .usdz
format and use unzipping trick to extract the .aiff
, .caf
or .mp3
sound file. When loading audio for playback, you can choose between spatial
and non-spatial
audio experience.
In this answer you'll find solutions on how to control Spatial Audio in iOS
and visionOS
.
UIKit version
import UIKit
import RealityKit
extension ViewController {
private func loadAudio() {
do {
let resource = try AudioFileResource.load(
named: "planetarium07.caf",
in: nil,
inputMode: .spatial,
loadingStrategy: .preload,
shouldLoop: true)
self.controller = entity.prepareAudio(resource)
self.controller?.speed = 0.9
self.controller?.fade(to: .infinity, duration: 2)
} catch {
print(error.localizedDescription)
}
}
}
class ViewController : UIViewController {
@IBOutlet var uiView: UIView!
@IBOutlet var arView: ARView!
private var entity = Entity()
private var controller: AudioPlaybackController? = nil
override func viewDidLoad() {
super.viewDidLoad()
uiView.backgroundColor = .systemCyan
let boxScene = try! Experience.loadBox()
arView.scene.anchors.append(boxScene)
let anchor = boxScene.anchor
anchor?.addChild(entity)
self.loadAudio()
}
@IBAction func playMusic(_ sender: UIButton) {
self.controller?.play()
}
@IBAction func stopMusic(_ sender: UIButton) {
self.controller?.pause()
// self.controller?.stop()
}
}
SwiftUI iOS version
import SwiftUI
import RealityKit
struct ContentView : View {
@State var arView = ARView(frame: .zero)
@State var controller: AudioPlaybackController? = nil
@State var entity = Entity()
var body: some View {
ZStack {
ARViewContainer(arView: $arView,
entity: $entity).ignoresSafeArea()
VStack {
Spacer()
Button("Play") { loadSound(); controller?.play() }
Button("Stop") { controller?.stop() }
}
}
}
func loadSound() {
do {
let resource = try AudioFileResource.load(
named: "planetarium07.caf",
in: nil,
inputMode: .spatial,
loadingStrategy: .preload,
shouldLoop: true)
self.controller = entity.prepareAudio(resource)
} catch {
print(error.localizedDescription)
}
}
}
struct ARViewContainer: UIViewRepresentable {
@Binding var arView: ARView
@Binding var entity: Entity
func makeUIView(context: Context) -> ARView {
let boxScene = try! Experience.loadBox()
arView.scene.anchors.append(boxScene)
let anchor = boxScene.anchor
anchor?.addChild(entity)
return arView
}
func updateUIView(_ view: ARView, context: Context) { }
}
SwiftUI visionOS version
import SwiftUI
import RealityKit
class Audio : Entity {
let resource: AudioFileResource
init(_ named: String) {
resource = try! AudioFileResource.load(named: named)
}
func setupSpatialSound(pinnedTo: ModelEntity) {
self.spatialAudio = SpatialAudioComponent()
self.spatialAudio?.gain = -15 // decibels
self.addChild(pinnedTo)
}
func playSpatialSound() {
self.playAudio(resource)
}
func stopSpatialSound() {
self.stopAllAudio()
}
@MainActor required init() {
fatalError("Hasn't been implemented yet")
}
}
struct ContentView : View {
@State var counter = 1
let cube = ModelEntity(mesh: .generateBox(size: 0.05))
let audio = Audio("AudioFile")
init() {
cube.components[InputTargetComponent.self] = .init()
cube.generateCollisionShapes(recursive: false)
audio.setupSpatialSound(pinnedTo: cube)
audio.playSpatialSound()
}
var body: some View {
RealityView { content in
content.add(audio)
}
.gesture(
SpatialTapGesture()
.targetedToEntity(cube)
.onEnded { _ in
counter += 1
if (counter % 2) == 0 {
audio.stopSpatialSound()
} else if (counter % 2) == 1 {
audio.playSpatialSound()
}
}
)
}
}