iOS 17 Object Capture breaks RealityKit occlusion

81 Views Asked by At

I believe that ObjectCaptureView from iOS 17 is causing an issue that breaks RealityKit occlusion.

The following code is a minimal reproducible example of the issue, as follows:

  1. load app, and notice that yellow box will be occluded by the real environment as you move your phone around
  2. press Switch button in center of screen to go to ObjectCaptureView
  3. ObjectCaptureView will open (notice the square in middle of screen)
  4. press Switch button in center of screen - go back to RealityKit view
  5. notice that box is no longer occluded by environment as you move around

I cannot understand why this is happening, I do nothing except .Start() and .Cancel() the Object Capture Session, but if you remove both Start() and Cancel() the issue still presents. The issue seems to only occur when you call ObjectCaptureView() somewhere in your code.

I have logged a bug with Apple but would really like any feedback - thanks.


import SwiftUI
import RealityKit
import ARKit
import Combine

func getTempObjectCaptureFolder() -> URL {
    let fileManager = FileManager.default
    let paths = fileManager.urls(for: .documentDirectory, in: .userDomainMask)
    let documentsDirectory = paths[0]
    let objectCaptureTempPath = documentsDirectory.appendingPathComponent("ObjectCaptureTemp")
    
    let dateFormatter = DateFormatter()
    dateFormatter.dateFormat = "yyyyMMdd_HHmmss"
    let dateStr = dateFormatter.string(from: Date())
    
    let subfolderURL = objectCaptureTempPath.appendingPathComponent(dateStr)
    
    do {
        try fileManager.createDirectory(at: subfolderURL, withIntermediateDirectories: true, attributes: nil)
    } catch {
    }
    
    return subfolderURL
}


struct DeepView: View {
    @Binding var switched:Bool
    @State var session: ObjectCaptureSession
    @State var loaded: Bool = false
    
    @MainActor
    init(switched: Binding<Bool>) {
        self._switched = switched
        self.session = ObjectCaptureSession()
    }
    
    var body: some View {
   
        if (!loaded){
            Text("loading camera")
                .onAppear(){
                    Task{
                        do{
                            let baseDir = getTempObjectCaptureFolder()
                            
                            let snapshotsDir = baseDir.appendingPathComponent("Snapshots")
                            let imagesDir = baseDir.appendingPathComponent("Images")
                            try FileManager.default.createDirectory(at: snapshotsDir, withIntermediateDirectories: true, attributes: nil)
                            try FileManager.default.createDirectory(at: imagesDir, withIntermediateDirectories: true, attributes: nil)
                            session.start(imagesDirectory: imagesDir, configuration: ObjectCaptureSession.Configuration())
                            loaded = true
                            
                        }
                        catch{
                            print("bad error \(error)")
                        }
                    }
                }
        }
        else {
            ObjectCaptureView(session: session)
                .onDisappear() {
                    session.cancel()
                    print("disappearing")
                }
        }
        
    }
}

class Controller {
    weak var view: ARView?
    var configuration: ARWorldTrackingConfiguration?
    
    func control(){
        
        view?.session.pause()
        print("Controllering")
        view?.session.run(configuration!)
        view?.environment.sceneUnderstanding.options.remove(.occlusion)
        view?.environment.sceneUnderstanding.options.insert(.occlusion)
    }
}

struct ContentView: View {
    @State var switched:Bool = false
    var controller = Controller()
    
    var body: some View {
        ZStack{
            if (switched) {
                DeepView(switched: $switched)
                
            }
            else{
                ARFeedView(controller: controller)
            }
            
            VStack{
                Button(action: {
   
                    if(!switched){
                        controller.view?.session.pause()
                    }
                    
                    switched.toggle()
                    
                }) {
                    Text("Switch")
                        .foregroundStyle(.white)
                }
            }
            
        }
        .ignoresSafeArea()
        
    }
    
}


struct ARFeedView: UIViewRepresentable {
    func updateUIView(_ uiView: ARView, context: Context) {
        
    }
    
    var controller: Controller
    
    let configuration: ARWorldTrackingConfiguration = {
        let config = ARWorldTrackingConfiguration()
        
        if (ARWorldTrackingConfiguration.supportsSceneReconstruction(ARWorldTrackingConfiguration.SceneReconstruction.mesh)) {
            config.sceneReconstruction = .mesh
        }
        
        config.planeDetection = [.vertical, .horizontal]
        
        let sortedVideoFormats = ARWorldTrackingConfiguration.supportedVideoFormats.sorted {
            ($0.imageResolution.width * $0.imageResolution.height) > ($1.imageResolution.width * $1.imageResolution.height)
        }
        
        if let highestResolutionFormat = sortedVideoFormats.first {
            config.videoFormat = highestResolutionFormat
        }
        
        return config
    }()
    
    
    
    func makeUIView(context: Context) -> ARView {
        let arView = ARView()

        arView.session.run(configuration)
     
        arView.renderOptions = [.disableGroundingShadows]
        
        let boxMesh = MeshResource.generateBox(size: 0.50) // Adjust the size as needed
        var mat = PhysicallyBasedMaterial()
        mat.baseColor = .init(tint: .blue) // Changed to blue
        // Keeping the other material properties the same, adjust if needed
        mat.sheen = .init(tint: .black)
        mat.emissiveColor = .init(color: .yellow)
        mat.emissiveIntensity = 2
        
        let boxModel = ModelEntity(mesh: boxMesh, materials: [mat])
        
        let anchor = AnchorEntity()
        anchor.addChild(boxModel)
        boxModel.name = "Blue Box"
        boxModel.isEnabled = true
        arView.scene.addAnchor(anchor)

        
        // Set up coordinator
        let coordinator = context.coordinator
        coordinator.arView = arView
        coordinator.boxEntity = boxModel
        coordinator.setupUpdateSubscription()
        
        controller.view = arView
        controller.configuration = configuration
        return arView
    }

    
    func makeCoordinator() -> Coordinator {
        Coordinator()
    }
    
    static func dismantleUIView(_ uiView: ARView, coordinator: Coordinator) {
        print("dismantleUIView has been called")
    }
    
    class Coordinator: NSObject {
        weak var arView: ARView?
        var boxEntity: ModelEntity?
        var updateSubscription: AnyCancellable?
        
        func setupUpdateSubscription() {
            guard let currentView = arView else {
                return
            }
            
            updateSubscription = currentView.scene.subscribe(to: SceneEvents.Update.self) { [weak self] event in
                self?.handleUpdate(scene: event.scene, deltaTime: event.deltaTime, arView: self?.arView ?? ARView())
            } as! AnyCancellable
        }
        
        func handleUpdate(scene: RealityKit.Scene, deltaTime: TimeInterval, arView: ARView) {
            
            if let configuration = arView.session.configuration as? ARWorldTrackingConfiguration {
                let isMeshReconstructionEnabled = configuration.sceneReconstruction == .mesh
                print("Is mesh scene reconstruction enabled: \(isMeshReconstructionEnabled)")
            } else {
                print("Current configuration is not ARWorldTrackingConfiguration or scene reconstruction is not set.")
            }
            
            let isOcclusionEnabled = arView.environment.sceneUnderstanding.options.contains(.occlusion)
            
            print("Is occlusion enabled: \(isOcclusionEnabled)")
            
            
            if (!isOcclusionEnabled){
                arView.environment.sceneUnderstanding.options.insert(.occlusion)
            }
            
            guard let raycastQuery = arView.makeRaycastQuery(from: arView.center, allowing: .estimatedPlane, alignment: .any),
                  let raycastResult = arView.session.raycast(raycastQuery).first else {
                return
            }
            
            let newTransform = Transform(matrix: raycastResult.worldTransform)
            
            if let boxEntity = scene.findEntity(named: "Blue Box") as? ModelEntity {
                let previousPosition = boxEntity.position
                let newPosition = newTransform.translation
                let movement = simd_distance(previousPosition, newPosition)
                boxEntity.position = newPosition
                boxEntity.isEnabled = true
            }
        }
    }
    
}

import SwiftUI

@main
struct BugApp: App {
    var body: some Scene {
        WindowGroup {
            ContentView()
        }
    }
}
0

There are 0 best solutions below