I have been trying to create an ARView for over two days now that can record the position of the camera in space over time and then save this to a keyframe file. Basically, I want to create an app that lets you record virtual camera movements that can then be used in 3d applications like Autodesk Maya
or Cinema4D
to drive a camera. preferred file outputs would be anything that can hold a camera object and animate it over time (alternatively also an object that moves over time, that I can then parent my camera to).
Here is the code I have, sorry for it being a bit chaotic, I have tried a LOT of different things... Basically I try to record device position and rotation and then save it to an MDL object, but somehow it doesn't animate. I have also tried multiple different file types (some of those didn't support keyframe animation, so that didn't help, but from what I understand, Alembic does)
import SwiftUI
import ARKit
import RealityKit
import ModelIO
struct ARViewContainer: UIViewRepresentable {
let session = ARSession()
let delegate = MySessionDelegate()
func makeUIView(context: Context) -> ARView {
// Set up the ARView with session
let arView = ARView(frame: .zero)
let boxAnchor = try! Experience.loadBox()
arView.scene.anchors.append(boxAnchor)
arView.session.delegate = delegate // assign delegate to the session
return arView
}
func updateUIView(_ uiView: ARView, context: Context) {
// Update the ARView if needed
}
func startARSession() {
// Start the ARSession
let configuration = ARWorldTrackingConfiguration()
configuration.planeDetection = [.horizontal, .vertical]
session.run(configuration, options: [])
}
func stopARSession() {
// Stop the ARSession
session.pause()
}
}
class MySessionDelegate: NSObject, ARSessionDelegate {
var object: MDLMesh?
let asset = MDLAsset()
let cameraTransform = MDLTransform()
var documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!
func session(_ session: ARSession, didUpdate frame: ARFrame) {
// Get the camera position and orientation for the current frame
let transform = frame.camera.transform
let rotation = frame.camera.eulerAngles
let position = transform.columns.3
let elapsedTime = frame.timestamp
cameraTransform.setTranslation(position[SIMD3(0,1,2)], forTime: elapsedTime)
cameraTransform.setRotation(rotation, forTime: elapsedTime)
print("Camera Transform: \(cameraTransform.matrix)")
}
}
struct Camera: View {
var body: some View {
VStack {
ARViewContainer().onAppear(perform: ARViewContainer().startARSession)
.onDisappear(perform: ARViewContainer().stopARSession)
Button("Export Recording") {
// Create an MDLAsset with a box representing the camera transform
let object = MDLMesh(boxWithExtent: .init(0.1, 0.1, 0.1), segments: .init(10, 10, 10), inwardNormals: false, geometryType: .triangles, allocator: nil)
object.name = "Camera Transform"
object.transform = MySessionDelegate().cameraTransform
let asset = MDLAsset()
asset.add(object)
// Export the MDLAsset to a file
let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!
let fileURL = documentsDirectory.appendingPathComponent("recording.abc")
try! asset.export(to: fileURL)
}
}
}
}
If there is a completely different way of doing it, please also share, I thank everybody in advance for any help!