ARTrackedRaycast in RealityKit app based on SwiftUI
(Xcode 16.0, target: iPadOS 17.5)
To implement a tracked raycasting functionality in RealityKit scene, all you need to do is to call the instance method trackedRaycast(..)
inside the handler's body of a drag gesture. This type of raycasting is highly computationally intensive, which means it must be stopped when needed using the stopTracking()
instance method.
import SwiftUI
import RealityKit
import ARKit
struct ContentView : View {
@State var raycast: ARTrackedRaycast? = nil
let arView = ARView(frame: .zero)
let anchor = AnchorEntity()
var gesture: some Gesture {
DragGesture()
.onChanged { value in
raycast = arView.trackedRaycast(
from: value.location,
allowing: .estimatedPlane,
alignment: .any) { results in
if let result = results.first {
anchor.transform.matrix = result.worldTransform
}
}
raycast?.stopTracking()
}
}
var body: some View {
ARContainer(arView: arView, anchor: anchor)
.ignoresSafeArea()
.gesture(gesture)
}
}
struct ARContainer : UIViewRepresentable {
let arView: ARView
let anchor: AnchorEntity
let model = ModelEntity(mesh: .generateBox(width: 0.05,
height: 0.25,
depth: 0.05))
func makeUIView(context: Context) -> ARView {
model.position.y += 0.125
anchor.addChild(model)
arView.scene.anchors.append(anchor)
return arView
}
func updateUIView(_ view: ARView, context: Context) { }
}