diff --git a/Sample/Sample/ViewController.swift b/Sample/Sample/ViewController.swift index 75e15d1..416f518 100644 --- a/Sample/Sample/ViewController.swift +++ b/Sample/Sample/ViewController.swift @@ -59,6 +59,16 @@ class ViewController: UIViewController { facialExpressionDetectorViewController.didMove(toParent: self) } + private func changeFacialExpressionMinimumValidCoefficient() { + let changingAnalyzer = FacialExpressionAnalyzer(facialExpression: FacialExpression.mouthSmileLeft, blendShapeLocation: .mouthSmileLeft, minimumValidCoefficient: 0.2) + + guard let index = facialExpressionDetectorViewController.analyzers.firstIndex(where: { $0.facialExpression == FacialExpression.mouthSmileLeft }) else { + return + } + + facialExpressionDetectorViewController.analyzers[index] = changingAnalyzer + } + private func addMoreFacialExpressionsToBeDetected() { facialExpressionDetectorViewController.analyzers.append(FacialExpressionAnalyzer(facialExpression: FacialExpression.eyeWideLeft, blendShapeLocation: .eyeWideLeft, minimumValidCoefficient: 0.6)) } diff --git a/Sources/Wink/FacialExpression.swift b/Sources/Wink/FacialExpression.swift index d599de9..33515ff 100644 --- a/Sources/Wink/FacialExpression.swift +++ b/Sources/Wink/FacialExpression.swift @@ -31,8 +31,8 @@ extension FacialExpression { static let test = FacialExpression(rawValue: "") } -public struct FacialExpressionAnalyzer { - let facialExpression: FacialExpression +public struct FacialExpressionAnalyzer: Equatable { + public let facialExpression: FacialExpression let blendShapeLocation: ARFaceAnchor.BlendShapeLocation let minimumValidCoefficient: Decimal diff --git a/Sources/Wink/FacialExpressionDetectorViewController.swift b/Sources/Wink/FacialExpressionDetectorViewController.swift index 78f6ca3..0413831 100644 --- a/Sources/Wink/FacialExpressionDetectorViewController.swift +++ b/Sources/Wink/FacialExpressionDetectorViewController.swift @@ -10,12 +10,16 @@ import UIKit import ARKit import Combine -public class FacialExpressionDetectorViewController: UIViewController, ARSCNViewDelegate { +public class FacialExpressionDetectorViewController: UIViewController { var sceneView: ARSCNView! + /// Array of `FacialExpressionAnalyzer` responsible of detecting the new expressions public var analyzers = DefaultFacialExpressionAnalyzersProvider().defaultFacialExpressionAnalyzers() - + /// When enabled, debug elements are shown in the camera view such as node lines and statistics. Default is `false` + public var debugMode = false + /// This publisher is updated with a new array of `FacialExpression` each time they are retrived lazy public var facialExpressionPublisher: AnyPublisher<[FacialExpression], Never> = facialExpressionSubject.eraseToAnyPublisher() + private let facialExpressionSubject: PassthroughSubject<[FacialExpression], Never> = PassthroughSubject<[FacialExpression], Never>() public override func viewDidLoad() { @@ -29,7 +33,10 @@ public class FacialExpressionDetectorViewController: UIViewController, ARSCNView adjustSceneViewConstraints() sceneView.delegate = self - sceneView.showsStatistics = true + + if debugMode { + sceneView.showsStatistics = true + } } private func checkFaceTrackingSupport() { @@ -59,19 +66,7 @@ public class FacialExpressionDetectorViewController: UIViewController, ARSCNView sceneView.session.pause() } - public func renderer(_ renderer: SCNSceneRenderer, nodeFor anchor: ARAnchor) -> SCNNode? { - let faceMesh = ARSCNFaceGeometry(device: sceneView.device!) - let node = SCNNode(geometry: faceMesh) - node.geometry?.firstMaterial?.fillMode = .lines - return node - } - public func renderer(_ renderer: SCNSceneRenderer, didUpdate node: SCNNode, for anchor: ARAnchor) { - if let faceAnchor = anchor as? ARFaceAnchor, let faceGeometry = node.geometry as? ARSCNFaceGeometry { - faceGeometry.update(from: faceAnchor.geometry) - detectFacialExpression(from: faceAnchor) - } - } func detectFacialExpression(from anchor: ARFaceAnchor) { let facialExpressions: [FacialExpression] = analyzers.compactMap { @@ -83,3 +78,24 @@ public class FacialExpressionDetectorViewController: UIViewController, ARSCNView facialExpressionSubject.send(facialExpressions) } } + +extension FacialExpressionDetectorViewController: ARSCNViewDelegate { + public func renderer(_ renderer: SCNSceneRenderer, nodeFor anchor: ARAnchor) -> SCNNode? { + let faceMesh = ARSCNFaceGeometry(device: sceneView.device!) + let node = SCNNode(geometry: faceMesh) + + if debugMode { + node.geometry?.firstMaterial?.fillMode = .lines + + } + + return node + } + + public func renderer(_ renderer: SCNSceneRenderer, didUpdate node: SCNNode, for anchor: ARAnchor) { + if let faceAnchor = anchor as? ARFaceAnchor, let faceGeometry = node.geometry as? ARSCNFaceGeometry { + faceGeometry.update(from: faceAnchor.geometry) + detectFacialExpression(from: faceAnchor) + } + } +}