Hi,
I was wondering during developing for visionOS why when I try to use queryDeviceAnchor() with WorldTrackingProvider() after opening the immersive space in the update(context: SceneUpdateContext) function, it initially seems to provide the DeviceAnchor data every frame but stops at some point (about 5-10 seconds after pressing the Button which opens the immersive space) and then stops updating constantly and only updates somehow randomly if I move my head abruptly to the left, right, etc. Somehow, the tracking doesn't seem to work as it should directly on the AVP device.
Any help would be greatly appreciated!
See my code down below:
ContentView.swift
import SwiftUI
struct ContentView: View {
@Environment(\.openImmersiveSpace) private var openImmersiveSpace
@Environment(\.scenePhase) private var scenePhase
var body: some View {
VStack {
Text("Head Tracking Prototype")
.font(.largeTitle)
Button("Start Head Tracking") {
Task {
await openImmersiveSpace(id: "appSpace")
}
}
}
.onChange(of: scenePhase) {_, newScenePhase in
switch newScenePhase {
case .active:
print("...")
case .inactive:
print("...")
case .background:
break
@unknown default:
print("...")
}
}
}
}
HeadTrackingApp.swift
import SwiftUI
@main
struct HeadTrackingApp: App {
init() {
HeadTrackingSystem.registerSystem()
}
var body: some Scene {
WindowGroup {
ContentView()
}
ImmersiveSpace(id: "appSpace") {
}
}
}
HeadTrackingSystem.swift
import SwiftUI
import ARKit
import RealityKit
class HeadTrackingSystem: System {
let arKitSession = ARKitSession()
let worldTrackingProvider = WorldTrackingProvider()
required public init(scene: RealityKit.Scene) {
setUpSession()
}
func setUpSession() {
Task {
do {
try await arKitSession.run([worldTrackingProvider])
} catch {
print("Error: \(error)")
}
}
}
public func update(context: SceneUpdateContext) {
guard worldTrackingProvider.state == .running else { return }
let avp = worldTrackingProvider.queryDeviceAnchor(atTimestamp: CACurrentMediaTime())
print(avp!)
}
1
1
677