Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ARCameraStream isDepthOcclusionEnabled option does not function as intended #578

Open
hanssbtn opened this issue Nov 11, 2024 · 0 comments

Comments

@hanssbtn
Copy link

I am experiencing the same problems described in this issue here:
#232
Here are some code snippets:

var gestureListener by remember {
        mutableStateOf<io.github.sceneview.gesture.GestureDetector.OnGestureListener?>(null)
}
val engine = rememberEngine()
val modelLoader = rememberModelLoader(engine)
val materialLoader = rememberMaterialLoader(engine)            
val cameraStream = rememberARCameraStream(materialLoader, creator = {
        ARSceneView.createARCameraStream(materialLoader).apply {
                 Log.d("ARCameraStream", "Created camera stream with textures {${this.cameraTextureIds.contentToString()}}")
                 isDepthOcclusionEnabled = true
        }
})
val view = rememberView(engine).apply {
        isPostProcessingEnabled = true
}

 ARScene(
        modifier = Modifier.fillMaxSize(),
        onGestureListener = gestureListener,
        engine = engine,
        modelLoader = modelLoader,
        materialLoader = materialLoader,
        cameraStream = cameraStream,
        view = view,
        sessionConfiguration = { sess, config ->
                config.depthMode =
                        when (sess.isDepthModeSupported(Config.DepthMode.AUTOMATIC)) {
                                true -> {
                                        Log.d("ARScene", "DepthMode = Automatic")
                                        Config.DepthMode.AUTOMATIC
                                }
                                false -> {
                                        Log.d("ARScene", "DepthMode = Disabled")
                                        cameraStream.isDepthOcclusionEnabled = false
                                        Config.DepthMode.DISABLED
                                }
                        }
                        config.updateMode = Config.UpdateMode.LATEST_CAMERA_IMAGE
                        config.instantPlacementMode = Config.InstantPlacementMode.LOCAL_Y_UP
                        config.lightEstimationMode = Config.LightEstimationMode.ENVIRONMENTAL_HDR
                        config.planeFindingMode = Config.PlaneFindingMode.HORIZONTAL_AND_VERTICAL
                        config.semanticMode = Config.SemanticMode.DISABLED
                },
        onSessionCreated = { session ->
                Log.d("ARScene", "Session created ($session)")
        },
        onTrackingFailureChanged = { reason ->
                trackingStatus = reason ?: TrackingFailureReason.NONE
        },
        onSessionResumed = { session ->
                val orientation = when ([email protected]) {
                        Configuration.ORIENTATION_LANDSCAPE -> "Landscape"
                        Configuration.ORIENTATION_PORTRAIT -> "Portrait"
                        Configuration.ORIENTATION_UNDEFINED -> "Undefined"
                        else -> "Unknown"
                }
                Log.d("ARScene", "Session resumed, orientation: $orientation")
        },
        onViewCreated = {
                val handleSingleTapConfirmed = { e: MotionEvent, node: Node? ->
                        val t = frame?.hitTest(e)?.firstOrNull()
                        if (t != null) {
                                if (t.trackable is Plane) {
                                        val plane = t.trackable as Plane
                                        if (plane.isPoseInPolygon(t.hitPose) && (t.distance > 0)) {
                                                val anchor = t.createAnchor()
                                                if (anchorNodes.value == null) anchorNodes.value = AnchorNode(engine, anchor)
                                                else {
                                                        anchorNodes.value!!.detachAnchor()
                                                        anchorNodes.value!!.anchor = anchor
                                                }
                                                anchorNodes.value!!.apply {
                                       
                                                val modelNode = ModelNode(
                                                        modelInstance = modelLoader.createModelInstance(helmet),
                                                        scaleToUnits = 0.5f
                                                ).apply {
                                                        isEditable = true
                                                        editableScaleRange = 0.2f..0.75f
                                                }
                                                val boundingBoxNode = CubeNode(
                                                        engine,
                                                        size = modelNode.extents,
                                                        center = modelNode.center,
                                                        materialInstance = materialLoader.createColorInstance(Color.White.copy(alpha = 1f))
                                                ).apply {
                                                        isVisible = true
                                                }
                                                modelNode.addChildNode(boundingBoxNode)
                                                addChildNode(modelNode)
                                                [email protected](this)
                                        }
                                }
                        }
                                if (t.trackable is Point) {
                                        Log.d("HitTest", "Hit point with orientation ${(t.trackable as Point).orientationMode}")
                                }
                                if (t.trackable is DepthPoint) {
                                        Log.d("HitTest", "Hit depth point at depth ${t.hitPose}")
                                }
                        }
                }
                gestureListener = object : io.github.sceneview.gesture.GestureDetector.OnGestureListener {
                 
                        // Method definitions... 
                
                        override fun onSingleTapConfirmed(e: MotionEvent, node: Node) {
                                handleSingleTapConfirmed(e, node)
                        }
                
                        // Other methods....
                }
        }
)

The depth occlusion works for virtual objects, but not real life objects. Does anyone know how to fix this issue?

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

No branches or pull requests

1 participant