How can I accurately measure the width of a 3D object loaded in an ARView in Swift, especially when the object is animated and its width increases? I’ve tried using the following code snippets with RealityKit, but the printed measurements are incorrect.
var anchorEnt = AnchorEntity(.face)
func loadModel(into arView: ARView) {
downloadUsdz.downloadModel(model: selectedModel, loadOnFace: {
var cancellable: AnyCancellable? = nil
cancellable = AnchorEntity.loadModelAsync(contentsOf: URL(fileURLWithPath: model.usdzPath))
.sink(receiveCompletion: { [self] error in
cancellable?.cancel()
if(anchorEnt.children.isEmpty) {
loadModel(into: arView)
}
}, receiveValue: { [self] entity in
entity.availableAnimations.forEach { animation in
entity.playAnimation(animation)
}
Timer.scheduledTimer(withTimeInterval: 0.1, repeats: true) { time in
let currentBounds = entity.visualBounds(relativeTo: self.anchorEnt)
let currentWidth = currentBounds.max.x - currentBounds.min.x
let currentWidth2 = currentBounds.extents.x
print("Current Width: \(currentWidth), \(currentWidth2)") // Both Prints same value
}
retryModelLoading(entity: entity, arview: arView)
cancellable?.cancel()
})
})
}