Working on user position tracking in visionOS within Immersive Space. Any insights or tips to navigate this? Docs seem elusive at the moment. I searched and found queryPose but Xcode throws error.
struct ImmersiveView : View {
private let attachmentID = "viewID"
var body: some View {
RealityView { content, attachments in
if let fixedScene = try? await Entity(named: "ImmersiveScene",
in: realityKitContentBundle) {
let wtp = WorldTrackingProvider()
let session = ARKitSession()
let anchor = AnchorEntity(.head)
anchor.anchoring.trackingMode = .continuous
fixedScene.setParent(anchor)
content.add(anchor)
if let sceneAttachment = attachments.entity(for: attachmentID) {
fixedScene.addChild(sceneAttachment)
}
guard let env = try? await EnvironmentResource(named: "Directional")
else { return }
let iblComponent = ImageBasedLightComponent(source: .single(env),
intensityExponent: 10)
fixedScene.components[ImageBasedLightComponent.self] = iblComponent
fixedScene.components.set(ImageBasedLightReceiverComponent(imageBasedLight: fixedScene))
fixedScene.transform.translation.z = -1.0
fixedScene.transform.translation.y = 0.35
fixedScene.transform.translation.x = 0.25
anchor.name = "Attachments"
}
}
} attachments: {
Attachment(id: attachmentID) {
}
}
let dt = event.dt
– Poodle