Skip to content

Commit

Permalink
Better format, add debug mode
Browse files Browse the repository at this point in the history
  • Loading branch information
Cesar Vargas Casaseca committed Dec 13, 2020
1 parent f129ff2 commit dce3682
Show file tree
Hide file tree
Showing 3 changed files with 43 additions and 17 deletions.
10 changes: 10 additions & 0 deletions Sample/Sample/ViewController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,16 @@ class ViewController: UIViewController {
facialExpressionDetectorViewController.didMove(toParent: self)
}

private func changeFacialExpressionMinimumValidCoefficient() {
let changingAnalyzer = FacialExpressionAnalyzer(facialExpression: FacialExpression.mouthSmileLeft, blendShapeLocation: .mouthSmileLeft, minimumValidCoefficient: 0.2)

guard let index = facialExpressionDetectorViewController.analyzers.firstIndex(where: { $0.facialExpression == FacialExpression.mouthSmileLeft }) else {
return
}

facialExpressionDetectorViewController.analyzers[index] = changingAnalyzer
}

private func addMoreFacialExpressionsToBeDetected() {
facialExpressionDetectorViewController.analyzers.append(FacialExpressionAnalyzer(facialExpression: FacialExpression.eyeWideLeft, blendShapeLocation: .eyeWideLeft, minimumValidCoefficient: 0.6))
}
Expand Down
4 changes: 2 additions & 2 deletions Sources/Wink/FacialExpression.swift
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,8 @@ extension FacialExpression {
static let test = FacialExpression(rawValue: "")
}

public struct FacialExpressionAnalyzer {
let facialExpression: FacialExpression
public struct FacialExpressionAnalyzer: Equatable {
public let facialExpression: FacialExpression
let blendShapeLocation: ARFaceAnchor.BlendShapeLocation
let minimumValidCoefficient: Decimal

Expand Down
46 changes: 31 additions & 15 deletions Sources/Wink/FacialExpressionDetectorViewController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,16 @@ import UIKit
import ARKit
import Combine

public class FacialExpressionDetectorViewController: UIViewController, ARSCNViewDelegate {
public class FacialExpressionDetectorViewController: UIViewController {
var sceneView: ARSCNView!

/// Array of `FacialExpressionAnalyzer` responsible of detecting the new expressions
public var analyzers = DefaultFacialExpressionAnalyzersProvider().defaultFacialExpressionAnalyzers()

/// When enabled, debug elements are shown in the camera view such as node lines and statistics. Default is `false`
public var debugMode = false
/// This publisher is updated with a new array of `FacialExpression` each time they are retrived
lazy public var facialExpressionPublisher: AnyPublisher<[FacialExpression], Never> = facialExpressionSubject.eraseToAnyPublisher()

private let facialExpressionSubject: PassthroughSubject<[FacialExpression], Never> = PassthroughSubject<[FacialExpression], Never>()

public override func viewDidLoad() {
Expand All @@ -29,7 +33,10 @@ public class FacialExpressionDetectorViewController: UIViewController, ARSCNView
adjustSceneViewConstraints()

sceneView.delegate = self
sceneView.showsStatistics = true

if debugMode {
sceneView.showsStatistics = true
}
}

private func checkFaceTrackingSupport() {
Expand Down Expand Up @@ -59,19 +66,7 @@ public class FacialExpressionDetectorViewController: UIViewController, ARSCNView
sceneView.session.pause()
}

public func renderer(_ renderer: SCNSceneRenderer, nodeFor anchor: ARAnchor) -> SCNNode? {
let faceMesh = ARSCNFaceGeometry(device: sceneView.device!)
let node = SCNNode(geometry: faceMesh)
node.geometry?.firstMaterial?.fillMode = .lines
return node
}

public func renderer(_ renderer: SCNSceneRenderer, didUpdate node: SCNNode, for anchor: ARAnchor) {
if let faceAnchor = anchor as? ARFaceAnchor, let faceGeometry = node.geometry as? ARSCNFaceGeometry {
faceGeometry.update(from: faceAnchor.geometry)
detectFacialExpression(from: faceAnchor)
}
}

func detectFacialExpression(from anchor: ARFaceAnchor) {
let facialExpressions: [FacialExpression] = analyzers.compactMap {
Expand All @@ -83,3 +78,24 @@ public class FacialExpressionDetectorViewController: UIViewController, ARSCNView
facialExpressionSubject.send(facialExpressions)
}
}

extension FacialExpressionDetectorViewController: ARSCNViewDelegate {
public func renderer(_ renderer: SCNSceneRenderer, nodeFor anchor: ARAnchor) -> SCNNode? {
let faceMesh = ARSCNFaceGeometry(device: sceneView.device!)
let node = SCNNode(geometry: faceMesh)

if debugMode {
node.geometry?.firstMaterial?.fillMode = .lines

}

return node
}

public func renderer(_ renderer: SCNSceneRenderer, didUpdate node: SCNNode, for anchor: ARAnchor) {
if let faceAnchor = anchor as? ARFaceAnchor, let faceGeometry = node.geometry as? ARSCNFaceGeometry {
faceGeometry.update(from: faceAnchor.geometry)
detectFacialExpression(from: faceAnchor)
}
}
}

0 comments on commit dce3682

Please sign in to comment.