diff --git a/Sources/FaceLiveness/AV/LivenessCaptureSession.swift b/Sources/FaceLiveness/AV/LivenessCaptureSession.swift index 9cd8eccf..988e0c97 100644 --- a/Sources/FaceLiveness/AV/LivenessCaptureSession.swift +++ b/Sources/FaceLiveness/AV/LivenessCaptureSession.swift @@ -11,8 +11,12 @@ import AVFoundation class LivenessCaptureSession { let captureDevice: LivenessCaptureDevice private let captureQueue = DispatchQueue(label: "com.amazonaws.faceliveness.cameracapturequeue") + private let configurationQueue = DispatchQueue(label: "com.amazonaws.faceliveness.sessionconfiguration", qos: .userInitiated) let outputDelegate: AVCaptureVideoDataOutputSampleBufferDelegate var captureSession: AVCaptureSession? + private var deviceInput: AVCaptureDeviceInput? + private var videoOutput: AVCaptureVideoDataOutput? + private var previewLayer: AVCaptureVideoPreviewLayer? var outputSampleBufferCapturer: OutputSampleBufferCapturer? { return outputDelegate as? OutputSampleBufferCapturer @@ -34,81 +38,80 @@ class LivenessCaptureSession { frame: frame, for: captureSession ) - + self.previewLayer = previewLayer return previewLayer } func startSession() throws { + teardownCurrentSession() guard let camera = captureDevice.avCaptureDevice else { throw LivenessCaptureSessionError.cameraUnavailable } - - let cameraInput = try AVCaptureDeviceInput(device: camera) - - teardownExistingSession(input: cameraInput) captureSession = AVCaptureSession() + deviceInput = try AVCaptureDeviceInput(device: camera) + videoOutput = AVCaptureVideoDataOutput() guard let captureSession = captureSession else { throw LivenessCaptureSessionError.captureSessionUnavailable } - - try setupInput(cameraInput, for: captureSession) - captureSession.sessionPreset = captureDevice.preset - - let videoOutput = AVCaptureVideoDataOutput() - try setupOutput(videoOutput, for: captureSession) - + guard let input = deviceInput, captureSession.canAddInput(input) else { + throw LivenessCaptureSessionError.captureSessionInputUnavailable + } + guard let output = videoOutput, captureSession.canAddOutput(output) else { + throw LivenessCaptureSessionError.captureSessionOutputUnavailable + } try captureDevice.configure() - - DispatchQueue.global().async { + + configureOutput(output) + + configurationQueue.async { + captureSession.beginConfiguration() + captureSession.sessionPreset = self.captureDevice.preset + captureSession.addInput(input) + captureSession.addOutput(output) + captureSession.commitConfiguration() captureSession.startRunning() } - - videoOutput.setSampleBufferDelegate( - outputDelegate, - queue: captureQueue - ) } func stopRunning() { - if captureSession?.isRunning == true { - captureSession?.stopRunning() - } - } - - private func teardownExistingSession(input: AVCaptureDeviceInput) { - stopRunning() - captureSession?.removeInput(input) - } - - private func setupInput( - _ input: AVCaptureDeviceInput, - for captureSession: AVCaptureSession - ) throws { - if captureSession.canAddInput(input) { - captureSession.addInput(input) - } else { - throw LivenessCaptureSessionError.captureSessionInputUnavailable - } + teardownCurrentSession() } - - private func setupOutput( - _ output: AVCaptureVideoDataOutput, - for captureSession: AVCaptureSession - ) throws { - if captureSession.canAddOutput(output) { - captureSession.addOutput(output) - } else { - throw LivenessCaptureSessionError.captureSessionOutputUnavailable - } + + private func configureOutput(_ output: AVCaptureVideoDataOutput) { output.videoSettings = [ kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA ] - + output.connections .filter(\.isVideoOrientationSupported) .forEach { $0.videoOrientation = .portrait + } + + output.setSampleBufferDelegate( + outputDelegate, + queue: captureQueue + ) + } + + private func teardownCurrentSession() { + if captureSession?.isRunning == true { + captureSession?.stopRunning() + } + + if let output = videoOutput { + captureSession?.removeOutput(output) + videoOutput = nil + } + if let input = deviceInput { + captureSession?.removeInput(input) + deviceInput = nil } + + previewLayer?.removeFromSuperlayer() + previewLayer?.session = nil + previewLayer = nil + captureSession = nil } private func previewLayer( diff --git a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionView.swift b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionView.swift index 64098e5f..320da58e 100644 --- a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionView.swift +++ b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionView.swift @@ -172,6 +172,9 @@ public struct FaceLivenessDetectorView: View { UIScreen.main.brightness = 1.0 } } + .onDisappear() { + viewModel.stopRecording() + } .onReceive(viewModel.$livenessState) { output in switch output.state { case .completed: diff --git a/Sources/FaceLiveness/Views/Liveness/LivenessViewController.swift b/Sources/FaceLiveness/Views/Liveness/LivenessViewController.swift index 0189c3c8..5e5111a7 100644 --- a/Sources/FaceLiveness/Views/Liveness/LivenessViewController.swift +++ b/Sources/FaceLiveness/Views/Liveness/LivenessViewController.swift @@ -37,6 +37,12 @@ final class _LivenessViewController: UIViewController { } } } + + deinit { + self.previewLayer.removeFromSuperlayer() + (self.previewLayer as? AVCaptureVideoPreviewLayer)?.session = nil + self.previewLayer = nil + } override func viewDidLoad() { super.viewDidLoad() @@ -110,6 +116,7 @@ extension _LivenessViewController: FaceLivenessViewControllerPresenter { imageView.frame = self.previewLayer.frame self.view.addSubview(imageView) self.previewLayer.removeFromSuperlayer() + (self.previewLayer as? AVCaptureVideoPreviewLayer)?.session = nil self.viewModel.stopRecording() } }